code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
from flask import Flask, render_template, Response
from camera_pi import Camera
import subprocess
import os
import datetime
import time
app = Flask(__name__)
@app.route("/")
def hello():
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
templateData = {
'title' : 'HELLO!',
'time': timeString
}
return render_template('index.html', **templateData)
@app.route("/l1/")
def l1():
subprocess.call(['./l1.py'], shell=True)
return render_template('index.html')
@app.route("/r1/")
def r1():
subprocess.call(['./r1.py'], shell=True)
return render_template('index.html')
@app.route("/d1/")
def d1():
subprocess.call(['./d1.py'], shell=True)
return render_template('index.html')
@app.route("/u1/")
def u1():
subprocess.call(['./u1.py'], shell=True)
return render_template('index.html')
@app.route('/')
def index():
"""Video streaming home page."""
return render_template('index.html')
def gen(camera):
"""Video streaming generator function."""
while True:
frame = camera.get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
@app.route('/video_feed')
def video_feed():
"""Video streaming route. Put this in the src attribute of an img tag."""
return Response(gen(Camera()),
mimetype='multipart/x-mixed-replace; boundary=frame')
if __name__ == "__main__":
app.run(host='0.0.0.0', threaded='true')
| [
"flask.render_template",
"flask.Flask",
"camera_pi.Camera",
"datetime.datetime.now",
"subprocess.call"
] | [((144, 159), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (149, 159), False, 'from flask import Flask, render_template, Response\n'), ((199, 222), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (220, 222), False, 'import datetime\n'), ((359, 404), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html', **templateData)\n", (374, 404), False, 'from flask import Flask, render_template, Response\n'), ((438, 478), 'subprocess.call', 'subprocess.call', (["['./l1.py']"], {'shell': '(True)'}), "(['./l1.py'], shell=True)\n", (453, 478), False, 'import subprocess\n'), ((489, 518), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (504, 518), False, 'from flask import Flask, render_template, Response\n'), ((552, 592), 'subprocess.call', 'subprocess.call', (["['./r1.py']"], {'shell': '(True)'}), "(['./r1.py'], shell=True)\n", (567, 592), False, 'import subprocess\n'), ((603, 632), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (618, 632), False, 'from flask import Flask, render_template, Response\n'), ((666, 706), 'subprocess.call', 'subprocess.call', (["['./d1.py']"], {'shell': '(True)'}), "(['./d1.py'], shell=True)\n", (681, 706), False, 'import subprocess\n'), ((717, 746), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (732, 746), False, 'from flask import Flask, render_template, Response\n'), ((780, 820), 'subprocess.call', 'subprocess.call', (["['./u1.py']"], {'shell': '(True)'}), "(['./u1.py'], shell=True)\n", (795, 820), False, 'import subprocess\n'), ((831, 860), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (846, 860), False, 'from flask import Flask, render_template, Response\n'), ((940, 969), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (955, 969), False, 'from flask import Flask, render_template, Response\n'), ((1334, 1342), 'camera_pi.Camera', 'Camera', ([], {}), '()\n', (1340, 1342), False, 'from camera_pi import Camera\n')] |
import os
from flask import Flask, request, redirect, url_for, send_from_directory, render_template
import time
from datetime import date
from werkzeug.utils import secure_filename
import mimetypes
ALLOWED_EXTENSIONS = set(['csv', 'xlsx', 'xls', 'txt'])
UPLOAD_FOLDER = os.path.abspath(os.path.dirname(__name__))
UPLOAD_FOLDER_PATH="/com/medicom/health/diabetes/data/upload/"
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER+UPLOAD_FOLDER_PATH
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
class FileHandler:
def __init__(self):
print("File handler init")
def save(self, request):
print("File uploading is started")
if 'file' not in request.files:
print("No file found")
return "No file found"
file = request.files['file']
completeFileName = ""
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
#today = date().today().isoformat(timespec='microseconds')
completeFileName=(os.path.join(app.config['UPLOAD_FOLDER'],filename))
print("filename >>>>>>>>>>>> ",completeFileName)
file.save(completeFileName)
return completeFileName
| [
"os.path.dirname",
"os.path.join",
"werkzeug.utils.secure_filename",
"flask.Flask"
] | [((386, 401), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (391, 401), False, 'from flask import Flask, request, redirect, url_for, send_from_directory, render_template\n'), ((289, 314), 'os.path.dirname', 'os.path.dirname', (['__name__'], {}), '(__name__)\n', (304, 314), False, 'import os\n'), ((990, 1020), 'werkzeug.utils.secure_filename', 'secure_filename', (['file.filename'], {}), '(file.filename)\n', (1005, 1020), False, 'from werkzeug.utils import secure_filename\n'), ((1122, 1173), 'os.path.join', 'os.path.join', (["app.config['UPLOAD_FOLDER']", 'filename'], {}), "(app.config['UPLOAD_FOLDER'], filename)\n", (1134, 1173), False, 'import os\n')] |
"""
RenderPipeline
Copyright (c) 2014-2016 tobspr <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from panda3d.core import SamplerState
from rpcore.globals import Globals
from rpcore.render_stage import RenderStage
from rpcore.stages.ambient_stage import AmbientStage
class SSRStage(RenderStage):
""" This stage does the SSR pass """
required_inputs = []
required_pipes = ["ShadedScene", "CombinedVelocity", "GBuffer",
"DownscaledDepth", "PreviousFrame::PostAmbientScene",
"PreviousFrame::SSRSpecular", "PreviousFrame::SceneDepth"]
@property
def produced_pipes(self):
return {"SSRSpecular": self.target_resolve.color_tex}
def create(self):
x_size, y_size = Globals.resolution.x, Globals.resolution.y
self.target = self.create_target("ComputeSSR")
self.target.size = -2
self.target.add_color_attachment(bits=(16, 16, 0, 0))
self.target.prepare_buffer()
self.target.color_tex.set_minfilter(SamplerState.FT_nearest)
self.target.color_tex.set_magfilter(SamplerState.FT_nearest)
self.target_velocity = self.create_target("ReflectionVelocity")
self.target_velocity.add_color_attachment(bits=(16, 16, 0, 0))
self.target_velocity.prepare_buffer()
self.target_velocity.set_shader_input("TraceResult", self.target.color_tex)
self.target_reproject_lighting = self.create_target("CopyLighting")
self.target_reproject_lighting.add_color_attachment(bits=16, alpha=True)
self.target_reproject_lighting.prepare_buffer()
self.target_upscale = self.create_target("UpscaleSSR")
self.target_upscale.add_color_attachment(bits=16, alpha=True)
self.target_upscale.prepare_buffer()
self.target_upscale.set_shader_input("SourceTex", self.target.color_tex)
self.target_upscale.set_shader_input(
"LastFrameColor", self.target_reproject_lighting.color_tex)
self.target_resolve = self.create_target("ResolveSSR")
self.target_resolve.add_color_attachment(bits=16, alpha=True)
self.target_resolve.prepare_buffer()
self.target_resolve.set_shader_input("CurrentTex", self.target_upscale.color_tex)
self.target_resolve.set_shader_input("VelocityTex", self.target_velocity.color_tex)
AmbientStage.required_pipes.append("SSRSpecular")
def reload_shaders(self):
self.target.shader = self.load_plugin_shader("ssr_trace.frag.glsl")
self.target_velocity.shader = self.load_plugin_shader("reflection_velocity.frag.glsl")
self.target_reproject_lighting.shader = self.load_plugin_shader("reproject_lighting.frag.glsl")
self.target_upscale.shader = self.load_plugin_shader("upscale_bilateral_brdf.frag.glsl")
self.target_resolve.shader = self.load_plugin_shader("resolve_ssr.frag.glsl")
| [
"rpcore.stages.ambient_stage.AmbientStage.required_pipes.append"
] | [((3422, 3471), 'rpcore.stages.ambient_stage.AmbientStage.required_pipes.append', 'AmbientStage.required_pipes.append', (['"""SSRSpecular"""'], {}), "('SSRSpecular')\n", (3456, 3471), False, 'from rpcore.stages.ambient_stage import AmbientStage\n')] |
"""
https://www.codewars.com/kata/60790e04cc9178003077db43/train/python
"""
from typing import Optional
from math import comb
from functools import lru_cache
@lru_cache
def balanced_paren_num(par_amt: int) -> int:
assert par_amt >= 0
# formula from https://en.wikipedia.org/wiki/Catalan_number
return comb(2 * par_amt, par_amt) // (par_amt + 1)
def balanced_parens(par_amt: int, ind: int) -> Optional[str]:
assert par_amt >= 0
total = balanced_paren_num(par_amt) # total number of balanced parenthesis
# simple bounds checking
if not 0 <= ind < total or par_amt < 0:
return None
if par_amt in [0, 1]: # idk if my function can handle these cases
return '' if par_amt == 0 else '()'
# this[o][c] = ways to complete given o opening parenthesis & c closing parenthesis
complete_ways = [[0 for _ in range(par_amt + 1)] for _ in range(par_amt + 1)]
complete_ways[par_amt][par_amt] = 1
for o in range(par_amt, -1, -1):
for c in range(o, -1, -1):
if o + 1 <= par_amt:
complete_ways[o][c] += complete_ways[o + 1][c]
if c + 1 <= par_amt:
complete_ways[o][c] += complete_ways[o][c + 1]
def actual_calc(curr_str: str, o_amt: int, c_amt: int, rel_ind: int) -> str:
if o_amt == par_amt:
return curr_str + ')' * (par_amt - c_amt)
# adding the o comes before adding the c
add_o = complete_ways[o_amt + 1][c_amt]
if rel_ind < add_o:
return actual_calc(curr_str + '(', o_amt + 1, c_amt, rel_ind)
return actual_calc(curr_str + ')', o_amt, c_amt + 1, rel_ind - add_o)
return actual_calc('', 0, 0, ind)
print(balanced_parens(2, 0)) # should output (())
print(balanced_parens(2, 1)) # should output ()()
print(balanced_parens(3, 3)) # should output ()(())
print(balanced_parens(3, 5)) # should output None
| [
"math.comb"
] | [((315, 341), 'math.comb', 'comb', (['(2 * par_amt)', 'par_amt'], {}), '(2 * par_amt, par_amt)\n', (319, 341), False, 'from math import comb\n')] |
# Imports
import sys, getopt
import json
from tango import DeviceProxy, DevFailed
def cm_configure_attributes():
configure_success_count = 0
configure_fail_count = 0
already_configured_count = 0
total_attrib_count = 0
with open(attr_list_file, "r") as attrib_list_file:
attribute_list = json.load(attrib_list_file)
for attribute in attribute_list:
total_attrib_count += 1
## Set appropriate CM attributes
try:
# SetAttributeName
conf_manager_proxy.write_attribute("SetAttributeName", attribute)
# SetArchiver
conf_manager_proxy.write_attribute(
"SetArchiver", evt_subscriber_device_fqdn
)
# SetStrategy
conf_manager_proxy.write_attribute("SetStrategy", "ALWAYS")
# SetPollingPeriod
conf_manager_proxy.write_attribute("SetPollingPeriod", 1000)
# SetEventPeriod
conf_manager_proxy.write_attribute("SetPeriodEvent", 3000)
except Exception as except_occured:
print(
"Exception while setting configuration manager arrtibutes: ",
except_occured,
)
configure_fail_count += 1
continue
## Add Attribute for archiving
try:
conf_manager_proxy.command_inout("AttributeAdd")
except DevFailed as df:
str_df = str(df)
if "reason = Already archived" in str_df:
start_archiving(attribute)
else:
already_configured_count += 1
continue
configure_success_count += 1
return (
configure_success_count,
configure_fail_count,
already_configured_count,
total_attrib_count,
)
def start_archiving(str_attribute):
try:
conf_manager_proxy.command_inout("AttributeStart", str_attribute)
except Exception as except_occured:
print("start_archiving except_occured: ", except_occured)
# Main entrypoint of the script.
conf_manager_device_fqdn = ""
evt_subscriber_device_fqdn = ""
attr_list_file = ""
## parse arguments
try:
opts, args = getopt.getopt(sys.argv[1:], "c:e:a:", ["cm=", "es=", "attrfile="])
except getopt.GetoptError:
print("Please provide proper arguments.")
print(
"Usage: $python configure_hdbpp.py --cm=<FQDN> --es=<FQDN> --attrfile=<filepath> OR"
)
print(" $python configure_hdbpp.py -cm <FQDN> -e <FQDN> -a <filepath>")
print(" cm: FQDN of HDB++ Configuration Manager")
print(" es: FQDN of HDB++ Event subscriber")
print(" infile: File containing FQDNs of attributes to archive")
sys.exit(2)
for opt, arg in opts:
if opt in ("-c", "--cm"):
conf_manager_device_fqdn = arg
elif opt in ("-e", "--es"):
evt_subscriber_device_fqdn = arg
elif opt in ("-a", "--attrfile"):
attr_list_file = arg
try:
# create device proxies
conf_manager_proxy = DeviceProxy(conf_manager_device_fqdn)
evt_subscriber_proxy = DeviceProxy(evt_subscriber_device_fqdn)
# configure attribute
(
configure_success_count,
configure_fail_count,
already_configured_count,
total_attrib_count,
) = cm_configure_attributes()
print(
"Configured successfully: ",
configure_success_count,
"Failed: ",
configure_fail_count,
"Already configured: ",
already_configured_count,
"Total attributes: ",
total_attrib_count,
)
except Exception as exception:
print("Exception: ", exception)
| [
"tango.DeviceProxy",
"json.load",
"getopt.getopt",
"sys.exit"
] | [((2342, 2408), 'getopt.getopt', 'getopt.getopt', (['sys.argv[1:]', '"""c:e:a:"""', "['cm=', 'es=', 'attrfile=']"], {}), "(sys.argv[1:], 'c:e:a:', ['cm=', 'es=', 'attrfile='])\n", (2355, 2408), False, 'import sys, getopt\n'), ((3171, 3208), 'tango.DeviceProxy', 'DeviceProxy', (['conf_manager_device_fqdn'], {}), '(conf_manager_device_fqdn)\n', (3182, 3208), False, 'from tango import DeviceProxy, DevFailed\n'), ((3236, 3275), 'tango.DeviceProxy', 'DeviceProxy', (['evt_subscriber_device_fqdn'], {}), '(evt_subscriber_device_fqdn)\n', (3247, 3275), False, 'from tango import DeviceProxy, DevFailed\n'), ((318, 345), 'json.load', 'json.load', (['attrib_list_file'], {}), '(attrib_list_file)\n', (327, 345), False, 'import json\n'), ((2868, 2879), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (2876, 2879), False, 'import sys, getopt\n')] |
import numpy as np
from genetic_algo import Population
from numpy.polynomial.polynomial import polyval
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib.patches import Rectangle
TARGET = np.random.rand(16)
TARGET_2 = np.random.rand(16)
test_range = np.linspace(0, 1, 10_000)
target_poly = polyval(test_range, TARGET, tensor=False)
"""
This script is not technically part of the project, it just generates the animations we use in our presentation. As a
result, its documentation and pair programming is not up to the standards of the project, and should not be considered
beholden to those standards. It is outside the scope of the project as outlined in the proposal and is
explicitly referenced as a piece of code that would be written but not considered part of the project in the
proposal. It's included in the repository because it's cool, and because it allows anyone to see the algorithm in
action with their own modifications, fitness functions, SEX_PARAMS, etc. While running this script is a good way to
verify that the code in the project is working properly, it's not intended to be a test. That is the job of the
unittests in test_project.
"""
def dummy_func_1(input_array):
"""
a sample fitness function that uses the closeness of fit to a polynomial with random coefficients to calculate
fitness (loss)
Args:
input_array(array): iterable of 16 floats between 0 and 1
Returns:
loss(float): an approximation of how close the polynomial with coefficients determined by input is to the target
polynomial
(Ben)
"""
n_samples = 10_000
output = polyval(test_range, input_array, tensor=False)
loss = np.sum(abs(target_poly - output)) / n_samples
return -1 * loss
def dummy_func_2(input_array):
"""
a sample fitness function that uses the closeness of fit to a linear equation to calculate fitness (loss)
Args:
input_array(array): iterable of 16 floats between 0 and 1
Returns:
loss(float): an approximation of how close the polynomial with coefficients determined by input is to the target
linear equation
(Ben)
"""
output = np.array(input_array)
loss = np.sum(abs(TARGET - output))
return -1 * loss
goal = -0.010
p = Population(goal, dummy_func_1)
print(p.main())
nbins = 50
ymax = 10
xmin = -1
bins = np.linspace(xmin, 0, nbins + 1)
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(20, 12))
ax1 = axes[0][0]
ax2 = axes[0][1]
ax4 = axes[1][0]
ax3 = axes[1][1]
def generation_fit(gen):
return [x.fitness for x in gen.individuals]
def animate(i):
ax1.cla()
ax1.set(xlim=(xmin, 0), ylim=(0, ymax))
ax2.cla()
ax2.set(xlim=(0, 1), ylim=(0, 9))
ax3.cla()
ax3.set(xlim=(0, 1), ylim=(-2, 2))
ax4.cla()
ax4.set(xlim=(0, 15), ylim=(-.5, .5))
mean_fit = p.generations[i].mean_fitness()
best_fit = p.generations[i].top_fitness().fitness
n_individuals = len(p.generations[i].individuals)
best_new_fit = max([x.fitness for x in p.generations[i].individuals[1:]])
best_new_fit_indiv = max(p.generations[i].individuals[1:], key=lambda x: x.fitness)
best_new_fit_poly = polyval(test_range, best_new_fit_indiv.genes, tensor=False)
best_fit_poly = polyval(test_range, p.generations[i].individuals[0].genes, tensor=False)
ax1.set_title('Histogram of fitness for individuals of each generation')
ax1.hist(generation_fit(p.generations[i]), bins, range=(xmin, 0), density=True, color='c')
generations = ax1.add_artist(Rectangle((0, 0), 1, 1, fc="w", fill=False, edgecolor='none', linewidth=0,
label=f'Generation: {i}'))
mean = ax1.vlines([mean_fit], 0, ymax, linestyles='dashed', colors='k', label="Mean Fitness: {mean:.4f}".format(
mean=mean_fit))
goal_line = ax1.vlines([goal], 0, ymax, linestyles='dashed', colors='g', label="Goal: {goal:.4f}".format(goal=goal))
best = ax1.vlines([best_fit], 0, ymax, linestyles='dashed', colors='r', label="Top Fitness: {top:.4f}".format(
top=best_fit))
best_new = ax1.vlines([best_new_fit], 0, ymax, linestyles='dashed', colors='m',
label="Top Fitness (born this gen.):"
" {top:.4f}".format(top=best_fit))
ax1.legend([generations, mean, goal_line, best, best_new], (f'Generation: {i} ({n_individuals} indiv.)',
"Mean Fitness: {mean:.4f}".format(mean=mean_fit),
"Goal: {goal:.4f}".format(goal=goal),
"Top Fitness (all time): {top:.4f}".format(
top=best_fit),
"Top Fitness (born this gen.):{top:.4f}".format(
top=best_new_fit)),
loc='upper left')
ax2.set_title('Comparison between target curve and fittest individuals')
ax2.plot(test_range, best_fit_poly, color='r', label='Fittest (all time)')
ax2.plot(test_range, best_new_fit_poly, color='m', label='Fittest (born this gen.)')
ax2.plot(test_range, target_poly, color='k', lw=.75, label="Target Curve")
ax2.legend(loc='upper left')
ax3.set_title(r'$\Delta$ between target curve and fittest individuals')
ax3.plot(test_range, best_fit_poly - target_poly, color='r', label='Fittest (all time)')
ax3.plot(test_range, best_new_fit_poly - target_poly, color='m', label='Fittest (born this gen.)')
ax3.plot(test_range, np.zeros(test_range.shape), ls='--', lw=.75, color='k', label="Target Curve")
ax3.legend(loc='upper left')
ax4.set_title('Comparison between genes of target and fittest individuals')
ax4.bar(np.arange(0, 16), best_new_fit_indiv.genes - TARGET, color='m', align='edge', width=.4,
label='Fittest (born this gen.)')
ax4.bar(np.arange(0., 16.)+.4, p.generations[i].individuals[0].genes - TARGET, color='r', align='edge', width=.4,
label='Fittest (all time)')
ax4.plot(np.arange(0, 16), np.zeros(16), ls='--', lw=.75, color='k')
ax4.legend(loc='upper left')
animate(0)
anim = animation.FuncAnimation(fig, animate, interval=100, frames=len(p.generations) - 1)
# only uncomment the below line if you have ffmpeg installed and are willing to wait for a while each time it runs with
# more than 100 generations.
# anim.save("fitness_histo_4x_2.mp4")
plt.draw()
plt.show()
| [
"matplotlib.patches.Rectangle",
"numpy.random.rand",
"numpy.array",
"numpy.linspace",
"numpy.zeros",
"matplotlib.pyplot.draw",
"numpy.polynomial.polynomial.polyval",
"genetic_algo.Population",
"matplotlib.pyplot.subplots",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((227, 245), 'numpy.random.rand', 'np.random.rand', (['(16)'], {}), '(16)\n', (241, 245), True, 'import numpy as np\n'), ((257, 275), 'numpy.random.rand', 'np.random.rand', (['(16)'], {}), '(16)\n', (271, 275), True, 'import numpy as np\n'), ((289, 313), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(10000)'], {}), '(0, 1, 10000)\n', (300, 313), True, 'import numpy as np\n'), ((329, 370), 'numpy.polynomial.polynomial.polyval', 'polyval', (['test_range', 'TARGET'], {'tensor': '(False)'}), '(test_range, TARGET, tensor=False)\n', (336, 370), False, 'from numpy.polynomial.polynomial import polyval\n'), ((2313, 2343), 'genetic_algo.Population', 'Population', (['goal', 'dummy_func_1'], {}), '(goal, dummy_func_1)\n', (2323, 2343), False, 'from genetic_algo import Population\n'), ((2399, 2430), 'numpy.linspace', 'np.linspace', (['xmin', '(0)', '(nbins + 1)'], {}), '(xmin, 0, nbins + 1)\n', (2410, 2430), True, 'import numpy as np\n'), ((2443, 2491), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(2)', 'ncols': '(2)', 'figsize': '(20, 12)'}), '(nrows=2, ncols=2, figsize=(20, 12))\n', (2455, 2491), True, 'import matplotlib.pyplot as plt\n'), ((6642, 6652), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (6650, 6652), True, 'import matplotlib.pyplot as plt\n'), ((6653, 6663), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6661, 6663), True, 'import matplotlib.pyplot as plt\n'), ((1666, 1712), 'numpy.polynomial.polynomial.polyval', 'polyval', (['test_range', 'input_array'], {'tensor': '(False)'}), '(test_range, input_array, tensor=False)\n', (1673, 1712), False, 'from numpy.polynomial.polynomial import polyval\n'), ((2210, 2231), 'numpy.array', 'np.array', (['input_array'], {}), '(input_array)\n', (2218, 2231), True, 'import numpy as np\n'), ((3221, 3280), 'numpy.polynomial.polynomial.polyval', 'polyval', (['test_range', 'best_new_fit_indiv.genes'], {'tensor': '(False)'}), '(test_range, best_new_fit_indiv.genes, tensor=False)\n', (3228, 3280), False, 'from numpy.polynomial.polynomial import polyval\n'), ((3301, 3373), 'numpy.polynomial.polynomial.polyval', 'polyval', (['test_range', 'p.generations[i].individuals[0].genes'], {'tensor': '(False)'}), '(test_range, p.generations[i].individuals[0].genes, tensor=False)\n', (3308, 3373), False, 'from numpy.polynomial.polynomial import polyval\n'), ((3580, 3684), 'matplotlib.patches.Rectangle', 'Rectangle', (['(0, 0)', '(1)', '(1)'], {'fc': '"""w"""', 'fill': '(False)', 'edgecolor': '"""none"""', 'linewidth': '(0)', 'label': 'f"""Generation: {i}"""'}), "((0, 0), 1, 1, fc='w', fill=False, edgecolor='none', linewidth=0,\n label=f'Generation: {i}')\n", (3589, 3684), False, 'from matplotlib.patches import Rectangle\n'), ((5747, 5773), 'numpy.zeros', 'np.zeros', (['test_range.shape'], {}), '(test_range.shape)\n', (5755, 5773), True, 'import numpy as np\n'), ((5951, 5967), 'numpy.arange', 'np.arange', (['(0)', '(16)'], {}), '(0, 16)\n', (5960, 5967), True, 'import numpy as np\n'), ((6256, 6272), 'numpy.arange', 'np.arange', (['(0)', '(16)'], {}), '(0, 16)\n', (6265, 6272), True, 'import numpy as np\n'), ((6274, 6286), 'numpy.zeros', 'np.zeros', (['(16)'], {}), '(16)\n', (6282, 6286), True, 'import numpy as np\n'), ((6097, 6117), 'numpy.arange', 'np.arange', (['(0.0)', '(16.0)'], {}), '(0.0, 16.0)\n', (6106, 6117), True, 'import numpy as np\n')] |
import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
# print(tf.__version__)
tf.get_logger().setLevel('ERROR')
# Given 90-d non-reference video data, output video quality score 0-100
def build_model():
model = keras.Sequential([
layers.Dense(90, activation='relu', input_shape=[90]),
# layers.Dense(90, activation='relu', input_shape=[90]),
layers.Dense(90, activation='relu'),
layers.Dense(1)
])
optimizer = tf.keras.optimizers.Adam(0.001)
model.compile(loss='mae',
optimizer=optimizer,
metrics=['mae', 'mse'])
# model.compile(loss='mean_absolute_error', optimizer='adam', metrics=['mean_absolute_error'])
return model
def load_model():
NN_model = build_model()
# NN_model.summary()
wights_file = 'Weights-494--5.61865.hdf5' # choose the best checkpoint
NN_model.load_weights(wights_file) # load it
return NN_model
| [
"tensorflow.keras.optimizers.Adam",
"tensorflow.keras.layers.Dense",
"tensorflow.get_logger"
] | [((493, 524), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', (['(0.001)'], {}), '(0.001)\n', (517, 524), True, 'import tensorflow as tf\n'), ((133, 148), 'tensorflow.get_logger', 'tf.get_logger', ([], {}), '()\n', (146, 148), True, 'import tensorflow as tf\n'), ((294, 347), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(90)'], {'activation': '"""relu"""', 'input_shape': '[90]'}), "(90, activation='relu', input_shape=[90])\n", (306, 347), False, 'from tensorflow.keras import layers\n'), ((414, 449), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(90)'], {'activation': '"""relu"""'}), "(90, activation='relu')\n", (426, 449), False, 'from tensorflow.keras import layers\n'), ((455, 470), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (467, 470), False, 'from tensorflow.keras import layers\n')] |
# -*- coding: utf-8 -*-
# Copyright 2019 the HERA Project
# Licensed under the MIT License
import pytest
import os
import shutil
import numpy as np
import sys
from collections import OrderedDict as odict
import copy
import glob
from pyuvdata import UVData
from pyuvdata import utils as uvutils
import unittest
from scipy import stats
from scipy import constants
from pyuvdata import UVFlag
from .. import datacontainer, io, frf
from ..data import DATA_PATH
@pytest.mark.filterwarnings("ignore:The default for the `center` keyword has changed")
def test_timeavg_waterfall():
fname = os.path.join(DATA_PATH, "zen.2458042.12552.xx.HH.uvXA")
uvd = UVData()
uvd.read_miriad(fname)
d = uvd.get_data(24, 25)
f = uvd.get_flags(24, 25)
n = uvd.get_nsamples(24, 25)
t = np.unique(uvd.time_array)
fr = uvd.freq_array.squeeze()
lsts = []
for _l in uvd.lst_array:
if _l not in lsts:
lsts.append(_l)
lsts = np.array(lsts)
antpos, ants = uvd.get_ENU_antpos()
blv = antpos[ants.tolist().index(24)] - antpos[ants.tolist().index(25)]
# test basic execution
ad, af, an, al, aea = frf.timeavg_waterfall(d, 25, verbose=False)
assert ad.shape == (3, 64)
assert af.shape == (3, 64)
assert an.shape == (3, 64)
assert not np.any(af)
assert np.allclose(an[1, 0], 25.0)
assert np.allclose(an[2, 0], 10.0)
# test rephase
ad, af, an, al, aea = frf.timeavg_waterfall(d, 25, flags=f, rephase=True, lsts=lsts, freqs=fr, bl_vec=blv,
nsamples=n, extra_arrays=dict(times=t), verbose=False)
assert ad.shape == (3, 64)
assert af.shape == (3, 64)
assert an.shape == (3, 64)
assert np.any(af)
assert len(al) == 3
assert len(aea['avg_times']) == 3
assert np.allclose(an.max(), 25.0)
# test various Navgs
ad, af, an, al, aea = frf.timeavg_waterfall(d, 1, flags=f, rephase=True, lsts=lsts, freqs=fr, bl_vec=blv,
nsamples=n, extra_arrays=dict(times=t), verbose=False)
assert ad.shape == (60, 64)
ad, af, an, al, aea = frf.timeavg_waterfall(d, 60, flags=f, rephase=True, lsts=lsts, freqs=fr, bl_vec=blv,
nsamples=n, extra_arrays=dict(times=t), verbose=False)
assert ad.shape == (1, 64)
# wrap lst
ad2, af2, an2, al2, aea2 = frf.timeavg_waterfall(d, 60, flags=f, rephase=True, lsts=lsts + 1.52917804, freqs=fr, bl_vec=blv,
nsamples=n, extra_arrays=dict(times=t), verbose=False)
assert ad.shape == (1, 64)
assert np.allclose(ad, ad2)
assert np.allclose(al, al2 - 1.52917804)
# Test Error
with pytest.raises(ValueError):
frf.timeavg_waterfall(d, 25, verbose=False, wgt_by_nsample=True, wgt_by_favg_nsample=True)
# test weightings
d = np.ones((4, 10))
d[0, :] *= 2
n = np.ones((4, 10))
n[0, 0:5] *= 2
ad, _, _, _, _ = frf.timeavg_waterfall(d, 2, rephase=False, nsamples=n, wgt_by_nsample=True)
np.testing.assert_array_equal(ad[1, :], 1.0)
np.testing.assert_array_equal(ad[0, 0:5], 5. / 3)
np.testing.assert_array_equal(ad[0, 5:10], 1.5)
ad, _, _, _, _ = frf.timeavg_waterfall(d, 2, rephase=False, nsamples=n, wgt_by_nsample=False, wgt_by_favg_nsample=True)
np.testing.assert_array_equal(ad[1, :], 1.0)
np.testing.assert_array_equal(ad[0, :], 1.6)
def test_fir_filtering():
# convert a high-pass frprofile to an FIR filter
frbins = np.linspace(-40e-3, 40e-3, 1024)
frp = np.ones(1024)
frp[512 - 9:512 + 10] = 0.0
fir, tbins = frf.frp_to_fir(frp, delta_bin=np.diff(frbins)[0])
# confirm its purely real
assert not np.any(np.isclose(np.abs(fir.real), 0.0))
assert np.allclose(np.abs(fir.imag), 0.0)
# convert back
_frp, _frbins = frf.frp_to_fir(fir, delta_bin=np.diff(tbins)[0], undo=True)
np.testing.assert_array_almost_equal(frp, _frp.real)
np.testing.assert_array_almost_equal(np.diff(frbins), np.diff(_frbins))
assert np.allclose(np.abs(_frp.imag), 0.0)
# test noise averaging properties
frp = np.zeros(1024)
frp[512] = 1.0
t_ratio = frf.fr_tavg(frp)
assert np.allclose(t_ratio, 1024)
@pytest.mark.filterwarnings("ignore:The default for the `center` keyword has changed")
class Test_FRFilter(object):
def setup_method(self):
self.fname = os.path.join(DATA_PATH, "zen.2458042.12552.xx.HH.uvXA")
self.F = frf.FRFilter(self.fname, filetype='miriad')
self.F.read()
def test_timeavg_data(self):
# test basic time average
self.F.timeavg_data(self.F.data, self.F.times, self.F.lsts, 35, rephase=True, keys=[(24, 25, 'ee')])
assert self.F.Navg == 3
assert len(self.F.avg_data) == 1
assert self.F.avg_data[(24, 25, 'ee')].shape == (20, 64)
# test full time average and overwrite
self.F.timeavg_data(self.F.data, self.F.times, self.F.lsts, 1e10, rephase=True, verbose=False, overwrite=False)
assert self.F.Navg == 60
assert len(self.F.avg_data) == 28
assert self.F.avg_data[(24, 25, 'ee')].shape == (20, 64)
assert self.F.avg_data[(24, 37, 'ee')].shape == (1, 64)
# test weight by nsample
F = copy.deepcopy(self.F)
k = (24, 25, 'ee')
F.nsamples[k][:3] = 0.0
F.timeavg_data(F.data, F.times, F.lsts, 35, nsamples=F.nsamples, keys=[k], overwrite=True,
wgt_by_nsample=True)
assert np.all(np.isclose(F.avg_data[k][0], 0.0)) # assert data is zero b/c I zeroed nsample
assert np.all(np.isclose(F.avg_nsamples[k][0], 0.0)) # assert avg_nsample is also zero
assert np.all(np.isclose(F.avg_nsamples[k][1:], 3.0)) # assert non-zeroed nsample is 3
# repeat without nsample wgt
F.timeavg_data(F.data, F.times, F.lsts, 35, nsamples=F.nsamples, keys=[k], overwrite=True,
wgt_by_nsample=False)
assert not np.any(np.isclose(F.avg_data[k][0, 5:-5], 0.0)) # assert non-edge data is now not zero
assert np.all(np.isclose(F.avg_nsamples[k][0], 0.0)) # avg_nsample should still be zero
# exceptions
pytest.raises(AssertionError, self.F.timeavg_data, self.F.data, self.F.times, self.F.lsts, 1.0)
def test_filter_data(self):
# construct high-pass filter
frates = np.fft.fftshift(np.fft.fftfreq(self.F.Ntimes, self.F.dtime)) * 1e3
w = np.ones((self.F.Ntimes, self.F.Nfreqs), dtype=np.float)
w[np.abs(frates) < 20] = 0.0
frps = datacontainer.DataContainer(dict([(k, w) for k in self.F.data]))
# make gaussian random noise
bl = (24, 25, 'ee')
window = 'blackmanharris'
ec = 0
np.random.seed(0)
self.F.data[bl] = np.reshape(stats.norm.rvs(0, 1, self.F.Ntimes * self.F.Nfreqs)
+ 1j * stats.norm.rvs(0, 1, self.F.Ntimes * self.F.Nfreqs), (self.F.Ntimes, self.F.Nfreqs))
# fr filter noise
self.F.filter_data(self.F.data, frps, overwrite=True, verbose=False, axis=0, keys=[bl])
# check key continue w/ ridiculous edgecut
self.F.filter_data(self.F.data, frps, overwrite=False, verbose=False, keys=[bl], edgecut_low=100, axis=0)
# fft
self.F.fft_data(data=self.F.data, assign='dfft', ax='freq', window=window, edgecut_low=ec, edgecut_hi=ec, overwrite=True)
self.F.fft_data(data=self.F.filt_data, assign='rfft', ax='freq', window=window, edgecut_low=ec, edgecut_hi=ec, overwrite=True)
# ensure drop in noise power is reflective of frf_nsamples
dfft = np.mean(np.abs(self.F.dfft[bl]), axis=0)
rfft = np.mean(np.abs(self.F.rfft[bl]), axis=0)
r = np.mean(dfft / rfft)
assert np.allclose(r, np.sqrt(np.mean(self.F.filt_nsamples[bl])), atol=1e-1)
def test_write_data(self):
self.F.timeavg_data(self.F.data, self.F.times, self.F.lsts, 35, rephase=False, verbose=False)
self.F.write_data(self.F.avg_data, "./out.uv", filetype='miriad', overwrite=True,
add_to_history='testing', times=self.F.avg_times, lsts=self.F.avg_lsts)
assert os.path.exists("./out.uv")
hd = io.HERAData('./out.uv', filetype='miriad')
hd.read()
assert 'testing' in hd.history.replace('\n', '').replace(' ', '')
assert 'Thisfilewasproducedbythefunction' in hd.history.replace('\n', '').replace(' ', '')
shutil.rmtree("./out.uv")
pytest.raises(AssertionError, self.F.write_data, self.F.avg_data, "./out.uv", times=self.F.avg_times)
pytest.raises(ValueError, self.F.write_data, self.F.data, "hi", filetype='foo')
def test_time_avg_data_and_write(self, tmpdir):
# time-averaged data written too file will be compared to this.
tmp_path = tmpdir.strpath
output = tmp_path + '/test_output.miriad'
flag_output = tmp_path + '/test_output.flags.h5'
self.F.timeavg_data(self.F.data, self.F.times, self.F.lsts, 35., rephase=True, overwrite=True,
wgt_by_nsample=True, flags=self.F.flags, nsamples=self.F.nsamples)
frf.time_avg_data_and_write(self.fname, output, t_avg=35., rephase=True, wgt_by_nsample=True, flag_output=flag_output, filetype='miriad')
data_out = frf.FRFilter(output, filetype='miriad')
data_out.read()
for k in data_out.data:
assert np.allclose(data_out.data[k], self.F.avg_data[k])
assert np.allclose(data_out.flags[k], self.F.avg_flags[k])
assert np.allclose(data_out.nsamples[k], self.F.avg_nsamples[k])
def test_time_avg_data_and_write_baseline_list(self, tmpdir):
# compare time averaging over baseline list versus time averaging
# without baseline list.
tmp_path = tmpdir.strpath
uvh5s = sorted(glob.glob(DATA_PATH + '/zen.2458045.*.uvh5'))
output_files = []
for file in uvh5s:
baseline_list = io.baselines_from_filelist_position(file, uvh5s)
output = tmp_path + '/' + file.split('/')[-1]
output_files.append(output)
output_flags = tmp_path + '/' + file.split('/')[-1].replace('.uvh5', '.flags.h5')
with pytest.warns(RuntimeWarning):
frf.time_avg_data_and_write(baseline_list=[], flag_output=output_flags,
input_data_list=uvh5s, rephase=True,
output_data=output, t_avg=35., wgt_by_nsample=True)
frf.time_avg_data_and_write(baseline_list=baseline_list, flag_output=output_flags,
input_data_list=uvh5s, rephase=True,
output_data=output, t_avg=35., wgt_by_nsample=True)
# now do everything at once:
output = tmp_path + '/combined.uvh5'
frf.time_avg_data_and_write(uvh5s, output, t_avg=35., rephase=True, wgt_by_nsample=True)
data_out = frf.FRFilter(output)
data_out_bls = frf.FRFilter(output_files)
data_out.read()
data_out_bls.read()
# check that data, flags, nsamples are all close.
for k in data_out.data:
assert np.all(np.isclose(data_out.data[k], data_out_bls.data[k]))
assert np.all(np.isclose(data_out.flags[k], data_out_bls.flags[k]))
assert np.all(np.isclose(data_out.nsamples[k], data_out_bls.nsamples[k]))
def test_time_average_argparser_multifile(self):
sys.argv = [sys.argv[0], "first.uvh5", "second.uvh5", "output.uvh5", "--cornerturnfile", "input.uvh5", "--t_avg", "35.", "--rephase"]
ap = frf.time_average_argparser()
args = ap.parse_args()
assert args.cornerturnfile == "input.uvh5"
assert args.output_data == "output.uvh5"
assert args.input_data_list == ['first.uvh5', 'second.uvh5']
assert args.t_avg == 35.
assert not args.clobber
assert not args.verbose
assert args.flag_output is None
assert args.filetype == "uvh5"
def test_tophat_frfilter(self):
fname = os.path.join(DATA_PATH, "zen.2458043.12552.xx.HH.uvORA")
k = (24, 25, 'ee')
frfil = frf.FRFilter(fname, filetype='miriad')
frfil.read(bls=[k])
bl = np.linalg.norm(frfil.antpos[24] - frfil.antpos[25]) / constants.c * 1e9
sdf = (frfil.freqs[1] - frfil.freqs[0]) / 1e9
frfil.tophat_frfilter(tol=1e-2, output_prefix='frfiltered')
for k in frfil.data.keys():
assert frfil.frfiltered_resid[k].shape == (60, 64)
assert frfil.frfiltered_model[k].shape == (60, 64)
assert k in frfil.frfiltered_info
# test skip_wgt imposition of flags
fname = os.path.join(DATA_PATH, "zen.2458043.12552.xx.HH.uvORA")
k = (24, 25, 'ee')
# check successful run when avg_red_bllens is True and when False.
for avg_red_bllens in [True, False]:
frfil = frf.FRFilter(fname, filetype='miriad')
frfil.read(bls=[k])
if avg_red_bllens:
frfil.avg_red_baseline_vectors()
wgts = {k: np.ones_like(frfil.flags[k], dtype=np.float)}
wgts[k][:, 0] = 0.0
frfil.tophat_frfilter(keys=[k], wgts=wgts, tol=1e-5, window='blackman-harris', skip_wgt=0.1, maxiter=100)
assert frfil.clean_info[k][(0, frfil.Nfreqs)]['status']['axis_0'][0] == 'skipped'
np.testing.assert_array_equal(frfil.clean_flags[k][:, 0], np.ones_like(frfil.flags[k][:, 0]))
np.testing.assert_array_equal(frfil.clean_model[k][:, 0], np.zeros_like(frfil.clean_resid[k][:, 0]))
np.testing.assert_array_equal(frfil.clean_resid[k][:, 0], np.zeros_like(frfil.clean_resid[k][:, 0]))
def test_load_tophat_frfilter_and_write_baseline_list(self, tmpdir):
tmp_path = tmpdir.strpath
uvh5 = [os.path.join(DATA_PATH, "test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.first.uvh5"),
os.path.join(DATA_PATH, "test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.second.uvh5")]
cals = [os.path.join(DATA_PATH, "test_input/zen.2458101.46106.xx.HH.uv.abs.calfits_54x_only.part1"),
os.path.join(DATA_PATH, "test_input/zen.2458101.46106.xx.HH.uv.abs.calfits_54x_only.part2")]
outfilename = os.path.join(tmp_path, 'temp.h5')
cdir = os.path.join(tmp_path, 'cache_temp')
# make a cache directory
if os.path.isdir(cdir):
shutil.rmtree(cdir)
os.mkdir(cdir)
# test graceful exit with baseline list length of zero.
with pytest.warns(RuntimeWarning):
frf.load_tophat_frfilter_and_write(datafile_list=uvh5, baseline_list=[],
calfile_list=cals, spw_range=[100, 200], cache_dir=cdir,
read_cache=True, write_cache=True, avg_red_bllens=True,
res_outfilename=outfilename, clobber=True,
mode='dayenu')
for avg_bl in [True, False]:
frf.load_tophat_frfilter_and_write(datafile_list=uvh5, baseline_list=[(53, 54)], polarizations=['ee'],
calfile_list=cals, spw_range=[100, 200], cache_dir=cdir,
read_cache=True, write_cache=True, avg_red_bllens=avg_bl,
res_outfilename=outfilename, clobber=True,
mode='dayenu')
hd = io.HERAData(outfilename)
d, f, n = hd.read()
assert len(list(d.keys())) == 1
assert d[(53, 54, 'ee')].shape[1] == 100
assert d[(53, 54, 'ee')].shape[0] == 60
# now do no spw range and no cal files just to cover those lines.
frf.load_tophat_frfilter_and_write(datafile_list=uvh5, baseline_list=[(53, 54)], polarizations=['ee'],
cache_dir=cdir,
read_cache=True, write_cache=True, avg_red_bllens=avg_bl,
res_outfilename=outfilename, clobber=True,
mode='dayenu')
hd = io.HERAData(outfilename)
d, f, n = hd.read()
assert len(list(d.keys())) == 1
assert d[(53, 54, 'ee')].shape[1] == 1024
assert d[(53, 54, 'ee')].shape[0] == 60
# now test flag factorization and time thresholding.
# prepare an input files for broadcasting flags
uvh5 = os.path.join(DATA_PATH, "test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.uvh5")
input_file = os.path.join(tmp_path, 'temp_special_flags.h5')
shutil.copy(uvh5, input_file)
hd = io.HERAData(input_file)
_, flags, _ = hd.read()
ntimes_before = hd.Ntimes
nfreqs_before = hd.Nfreqs
freqs_before = hd.freqs
times_before = hd.times
for bl in flags:
flags[bl][:] = False
flags[bl][0, :hd.Nfreqs // 2] = True # first time has 50% flagged
flags[bl][-3:, -1] = True # last channel has flags for three integrations
hd.update(flags=flags)
hd.write_uvh5(input_file, clobber=True)
# this time_threshold will result in
# entire first integration begin flagged
# and entire final channel being flagged
# when flags are broadcasted.
time_thresh = 2. / hd.Ntimes
for blnum, bl in enumerate(flags.keys()):
outfilename = os.path.join(tmp_path, 'bl_chunk_%d.h5' % blnum)
frf.load_tophat_frfilter_and_write(datafile_list=[input_file], res_outfilename=outfilename,
tol=1e-4, baseline_list=[bl[:2]], polarizations=[bl[-1]],
cache_dir=cdir,
factorize_flags=True,
time_thresh=time_thresh, clobber=True)
# now load all of the outputs in
output_files = glob.glob(tmp_path + '/bl_chunk_*.h5')
hd = io.HERAData(output_files)
d, f, n = hd.read()
hd_original = io.HERAData(uvh5)
for bl in hd_original.bls:
assert bl in d.keys()
for bl in f:
assert np.all(f[bl][:, -1])
assert np.all(f[bl][0, :])
# test apriori flags and flag_yaml
flag_yaml = os.path.join(DATA_PATH, 'test_input/a_priori_flags_sample.yaml')
uvf = UVFlag(hd, mode='flag', copy_flags=True)
uvf.to_waterfall(keep_pol=False, method='and')
uvf.flag_array[:] = False
flagfile = os.path.join(tmp_path, 'test_flag.h5')
uvf.write(flagfile, clobber=True)
frf.load_tophat_frfilter_and_write(datafile_list=[input_file], res_outfilename=outfilename,
tol=1e-4, baseline_list=[bl[:2]], polarizations=[bl[-1]],
clobber=True, mode='dayenu',
external_flags=flagfile, overwrite_flags=True)
# test that all flags are False
hd = io.HERAData(outfilename)
d, f, n = hd.read()
for k in f:
assert np.all(~f[k])
# now do the external yaml
frf.load_tophat_frfilter_and_write(datafile_list=[input_file], res_outfilename=outfilename,
tol=1e-4, baseline_list=[bl[:2]], polarizations=[bl[-1]],
clobber=True, mode='dayenu',
external_flags=flagfile, overwrite_flags=True,
flag_yaml=flag_yaml)
# test that all flags are af yaml flags
hd = io.HERAData(outfilename)
d, f, n = hd.read()
for k in f:
assert np.all(f[k][:, 0])
assert np.all(f[k][:, 1])
assert np.all(f[k][:, 10:20])
assert np.all(f[k][:, 60])
os.remove(outfilename)
shutil.rmtree(cdir)
def test_load_tophat_frfilter_and_write_multifile(self, tmpdir):
# cover line where baseline-list is None and multiple files are provided.
uvh5s = sorted(glob.glob(DATA_PATH + '/zen.2458045.*.uvh5'))
tmp_path = tmpdir.strpath
outfilename = os.path.join(tmp_path, 'temp_output.uvh5')
frf.load_tophat_frfilter_and_write(uvh5s, filled_outfilename=outfilename, tol=1e-4, clobber=True)
hd = io.HERAData(uvh5s)
d, f, n = hd.read()
hdoutput = io.HERAData(outfilename)
doutput, foutput, nouput = hdoutput.read()
for k in doutput:
assert doutput[k].shape == d[k].shape
def test_load_tophat_frfilter_and_write(self, tmpdir):
tmp_path = tmpdir.strpath
uvh5 = os.path.join(DATA_PATH, "test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.uvh5")
outfilename = os.path.join(tmp_path, 'temp.h5')
CLEAN_outfilename = os.path.join(tmp_path, 'temp_clean.h5')
filled_outfilename = os.path.join(tmp_path, 'temp_filled.h5')
frf.load_tophat_frfilter_and_write(uvh5, res_outfilename=outfilename, tol=1e-4, clobber=True, Nbls_per_load=1)
hd = io.HERAData(outfilename)
d, f, n = hd.read(bls=[(53, 54, 'ee')])
for bl in d:
assert not np.all(np.isclose(d[bl], 0.))
frfil = frf.FRFilter(uvh5, filetype='uvh5')
frfil.read(bls=[(53, 54, 'ee')])
frfil.tophat_frfilter(keys=[(53, 54, 'ee')], tol=1e-4, verbose=True)
np.testing.assert_almost_equal(d[(53, 54, 'ee')], frfil.clean_resid[(53, 54, 'ee')], decimal=5)
np.testing.assert_array_equal(f[(53, 54, 'ee')], frfil.flags[(53, 54, 'ee')])
# test NotImplementedError
pytest.raises(NotImplementedError, frf.load_tophat_frfilter_and_write, uvh5, res_outfilename=outfilename, tol=1e-4,
clobber=True, Nbls_per_load=1, avg_red_bllens=True, baseline_list=[(54, 54)], polarizations=['ee'])
# test loading and writing all baselines at once.
uvh5 = os.path.join(DATA_PATH, "test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.uvh5")
outfilename = os.path.join(tmp_path, 'temp.h5')
for avg_bl in [True, False]:
frf.load_tophat_frfilter_and_write(uvh5, res_outfilename=outfilename, tol=1e-4, clobber=True,
Nbls_per_load=None, avg_red_bllens=avg_bl)
hd = io.HERAData(outfilename)
d, f, n = hd.read(bls=[(53, 54, 'ee')])
for bl in d:
assert not np.all(np.isclose(d[bl], 0.))
frfil = frf.FRFilter(uvh5, filetype='uvh5')
frfil.read(bls=[(53, 54, 'ee')])
frfil.tophat_frfilter(keys=[(53, 54, 'ee')], tol=1e-4, verbose=True)
np.testing.assert_almost_equal(d[(53, 54, 'ee')], frfil.clean_resid[(53, 54, 'ee')], decimal=5)
np.testing.assert_array_equal(f[(53, 54, 'ee')], frfil.flags[(53, 54, 'ee')])
cal = os.path.join(DATA_PATH, "test_input/zen.2458101.46106.xx.HH.uv.abs.calfits_54x_only")
outfilename = os.path.join(tmp_path, 'temp.h5')
os.remove(outfilename)
for avg_bl in [True, False]:
frf.load_tophat_frfilter_and_write(uvh5, calfile_list=cal, tol=1e-4, res_outfilename=outfilename,
Nbls_per_load=2, clobber=True, avg_red_bllens=avg_bl)
hd = io.HERAData(outfilename)
assert 'Thisfilewasproducedbythefunction' in hd.history.replace('\n', '').replace(' ', '')
d, f, n = hd.read()
for bl in d:
if not np.all(f[bl]):
assert not np.all(np.isclose(d[bl], 0.))
np.testing.assert_array_equal(f[(53, 54, 'ee')], True)
os.remove(outfilename)
# test skip_autos
frf.load_tophat_frfilter_and_write(uvh5, calfile_list=None, tol=1e-4, res_outfilename=outfilename,
filled_outfilename=filled_outfilename, CLEAN_outfilename=CLEAN_outfilename,
Nbls_per_load=2, clobber=True, avg_red_bllens=avg_bl, skip_autos=True)
hd = io.HERAData(outfilename)
d, f, n = hd.read()
hd_original = io.HERAData(uvh5)
do, fo, no = hd_original.read()
chd = io.HERAData(CLEAN_outfilename)
cd, cf, cn = chd.read()
fhd = io.HERAData(filled_outfilename)
fd, ff, fn = fhd.read()
# test that the resids are are equal to original data.
for bl in do:
if bl[0] == bl[1]:
assert np.allclose(do[bl], d[bl]) # check that resid equals original data.
assert np.allclose(fo[bl], f[bl])
assert np.allclose(no[bl], n[bl])
assert np.allclose(cd[bl], np.zeros_like(cd[bl])) # check that all model values are zero.
assert np.allclose(fd[bl][~f[bl]], d[bl][~f[bl]]) # check that filled data equals original data.
else:
assert not np.allclose(do[bl], d[bl])
assert np.allclose(no[bl], n[bl])
# prepare an input file for broadcasting flags
input_file = os.path.join(tmp_path, 'temp_special_flags.h5')
shutil.copy(uvh5, input_file)
hd = io.HERAData(input_file)
_, flags, _ = hd.read()
ntimes_before = hd.Ntimes
nfreqs_before = hd.Nfreqs
freqs_before = hd.freqs
times_before = hd.times
for bl in flags:
flags[bl][:] = False
flags[bl][0, :hd.Nfreqs // 2] = True # first time has 50% flagged
flags[bl][-3:, -1] = True # last channel has flags for three integrations
hd.update(flags=flags)
hd.write_uvh5(input_file, clobber=True)
# this time_threshold will result in
# entire first integration begin flagged
# and entire final channel being flagged
# when flags are broadcasted.
time_thresh = 2. / hd.Ntimes
frf.load_tophat_frfilter_and_write(input_file, res_outfilename=outfilename, tol=1e-4,
factorize_flags=True, time_thresh=time_thresh, clobber=True)
hd = io.HERAData(outfilename)
d, f, n = hd.read(bls=[(53, 54, 'ee')])
for bl in f:
assert np.any(f[bl][:, :-1])
assert np.all(f[bl][0, :])
# test delay filtering and writing with factorized flags and partial i/o
frf.load_tophat_frfilter_and_write(input_file, res_outfilename=outfilename, tol=1e-4,
factorize_flags=True, time_thresh=time_thresh, clobber=True)
hd = io.HERAData(outfilename)
d, f, n = hd.read(bls=[(53, 54, 'ee')])
for bl in f:
# check that flags were broadcasted.
assert np.all(f[bl][0, :])
assert np.all(f[bl][:, -1])
assert not np.all(np.isclose(d[bl], 0.))
frf.load_tophat_frfilter_and_write(input_file, res_outfilename=outfilename, tol=1e-4, Nbls_per_load=1,
factorize_flags=True, time_thresh=time_thresh, clobber=True)
hd = io.HERAData(outfilename)
d, f, n = hd.read(bls=[(53, 54, 'ee')])
for bl in f:
# check that flags were broadcasted.
assert np.all(f[bl][0, :])
assert np.all(f[bl][:, -1])
assert not np.all(np.isclose(d[bl], 0.))
# test apriori flags and flag_yaml
hd = io.HERAData(uvh5)
hd.read()
flag_yaml = os.path.join(DATA_PATH, 'test_input/a_priori_flags_sample.yaml')
uvf = UVFlag(hd, mode='flag', copy_flags=True)
uvf.to_waterfall(keep_pol=False, method='and')
uvf.flag_array[:] = False
flagfile = os.path.join(tmp_path, 'test_flag.h5')
uvf.write(flagfile, clobber=True)
frf.load_tophat_frfilter_and_write(uvh5, res_outfilename=outfilename,
Nbls_per_load=1, clobber=True, mode='dayenu',
external_flags=flagfile,
overwrite_flags=True)
# test that all flags are False
hd = io.HERAData(outfilename)
d, f, n = hd.read(bls=[(53, 54, 'ee')])
for k in f:
assert np.all(~f[k])
# now without parital io.
frf.load_tophat_frfilter_and_write(uvh5, res_outfilename=outfilename,
clobber=True, mode='dayenu',
external_flags=flagfile,
overwrite_flags=True)
# test that all flags are False
hd = io.HERAData(outfilename)
d, f, n = hd.read(bls=[(53, 54, 'ee')])
for k in f:
assert np.all(~f[k])
def test_sky_frates_minfrate_and_to_filter(self):
# test edge frates
V = frf.FRFilter(os.path.join(DATA_PATH, "PyGSM_Jy_downselect.uvh5"))
V.read()
for to_filter in [None, list(V.data.keys())[:1]]:
cfrates, wfrates = frf.sky_frates(uvd=V.hd, min_frate_half_width=1000, keys=to_filter)
# to_filter set to None -> all keys should be present.
if to_filter is None:
for k in V.data:
assert k in cfrates
assert k in wfrates
# min_frate = 1000 should set all wfrates to 1000
for k in cfrates:
assert wfrates[k] == 1000.
def test_load_dayenu_filter_and_write(self, tmpdir):
tmp_path = tmpdir.strpath
uvh5 = os.path.join(DATA_PATH, "test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.uvh5")
cdir = os.path.join(tmp_path, 'cache_temp')
# make a cache directory
if os.path.isdir(cdir):
shutil.rmtree(cdir)
os.mkdir(cdir)
outfilename = os.path.join(tmp_path, 'temp.h5')
# run dayenu filter
avg_bl = True
frf.load_tophat_frfilter_and_write(uvh5, res_outfilename=outfilename,
cache_dir=cdir, mode='dayenu',
Nbls_per_load=1, clobber=True, avg_red_bllens=avg_bl,
spw_range=(0, 32), write_cache=True)
# generate duplicate cache files to test duplicate key handle for cache load.
frf.load_tophat_frfilter_and_write(uvh5, res_outfilename=outfilename, cache_dir=cdir,
mode='dayenu', avg_red_bllens=avg_bl,
Nbls_per_load=1, clobber=True, read_cache=False,
spw_range=(0, 32), write_cache=True)
# there should now be six cache files (one per i/o/filter). There are three baselines.
assert len(glob.glob(cdir + '/*')) == 6
hd = io.HERAData(outfilename)
assert 'Thisfilewasproducedbythefunction' in hd.history.replace('\n', '').replace(' ', '')
d, f, n = hd.read(bls=[(53, 54, 'ee')])
np.testing.assert_array_equal(f[(53, 54, 'ee')], True)
os.remove(outfilename)
shutil.rmtree(cdir)
os.mkdir(cdir)
# now do all the baselines at once.
for avg_bl in [True, False]:
frf.load_tophat_frfilter_and_write(uvh5, res_outfilename=outfilename,
cache_dir=cdir, mode='dayenu', avg_red_bllens=avg_bl,
Nbls_per_load=None, clobber=True,
spw_range=(0, 32), write_cache=True)
if avg_bl:
assert len(glob.glob(cdir + '/*')) == 1
hd = io.HERAData(outfilename)
assert 'Thisfilewasproducedbythefunction' in hd.history.replace('\n', '').replace(' ', '')
d, f, n = hd.read(bls=[(53, 54, 'ee')])
np.testing.assert_array_equal(f[(53, 54, 'ee')], True)
os.remove(outfilename)
shutil.rmtree(cdir)
os.mkdir(cdir)
# run again using computed cache.
calfile = os.path.join(DATA_PATH, "test_input/zen.2458101.46106.xx.HH.uv.abs.calfits_54x_only")
frf.load_tophat_frfilter_and_write(uvh5, res_outfilename=outfilename, max_frate_coeffs=[0.0, 0.025],
cache_dir=cdir, calfile_list=calfile, read_cache=True,
Nbls_per_load=1, clobber=True, mode='dayenu',
spw_range=(0, 32), write_cache=True)
# no new cache files should be generated.
assert len(glob.glob(cdir + '/*')) == 1
hd = io.HERAData(outfilename)
assert 'Thisfilewasproducedbythefunction' in hd.history.replace('\n', '').replace(' ', '')
d, f, n = hd.read(bls=[(53, 54, 'ee')])
np.testing.assert_array_equal(f[(53, 54, 'ee')], True)
os.remove(outfilename)
shutil.rmtree(cdir)
def test_tophat_clean_argparser(self):
sys.argv = [sys.argv[0], 'a', '--clobber', '--window', 'blackmanharris', '--max_frate_coeffs', '0.024', '-0.229']
parser = frf.tophat_frfilter_argparser()
a = parser.parse_args()
assert a.datafilelist == ['a']
assert a.clobber is True
assert a.window == 'blackmanharris'
assert a.max_frate_coeffs[0] == 0.024
assert a.max_frate_coeffs[1] == -0.229
assert a.time_thresh == 0.05
assert not a.factorize_flags
def test_tophat_linear_argparser(self):
sys.argv = [sys.argv[0], 'a', '--clobber', '--write_cache', '--cache_dir', '/blah/', '--max_frate_coeffs', '0.024', '-0.229', '--mode', 'dayenu']
parser = frf.tophat_frfilter_argparser()
a = parser.parse_args()
assert a.datafilelist == ['a']
assert a.clobber is True
assert a.write_cache is True
assert a.cache_dir == '/blah/'
assert a.max_frate_coeffs[0] == 0.024
assert a.max_frate_coeffs[1] == -0.229
assert a.time_thresh == 0.05
assert not a.factorize_flags
parser = frf.tophat_frfilter_argparser()
a = parser.parse_args()
assert a.datafilelist == ['a']
assert a.clobber is True
assert a.write_cache is True
assert a.cache_dir == '/blah/'
assert a.max_frate_coeffs[0] == 0.024
assert a.max_frate_coeffs[1] == -0.229
assert a.time_thresh == 0.05
assert not a.factorize_flags
| [
"pytest.mark.filterwarnings",
"scipy.stats.norm.rvs",
"numpy.array",
"copy.deepcopy",
"numpy.linalg.norm",
"pyuvdata.UVData",
"os.remove",
"numpy.mean",
"os.path.exists",
"numpy.testing.assert_array_almost_equal",
"numpy.diff",
"pyuvdata.UVFlag",
"numpy.linspace",
"os.path.isdir",
"numpy.testing.assert_almost_equal",
"numpy.random.seed",
"os.mkdir",
"numpy.testing.assert_array_equal",
"glob.glob",
"numpy.abs",
"numpy.allclose",
"numpy.ones",
"numpy.any",
"pytest.raises",
"shutil.copy",
"numpy.ones_like",
"numpy.isclose",
"numpy.unique",
"numpy.fft.fftfreq",
"os.path.join",
"numpy.zeros",
"shutil.rmtree",
"numpy.all",
"numpy.zeros_like",
"pytest.warns"
] | [((463, 553), 'pytest.mark.filterwarnings', 'pytest.mark.filterwarnings', (['"""ignore:The default for the `center` keyword has changed"""'], {}), "(\n 'ignore:The default for the `center` keyword has changed')\n", (489, 553), False, 'import pytest\n'), ((4274, 4364), 'pytest.mark.filterwarnings', 'pytest.mark.filterwarnings', (['"""ignore:The default for the `center` keyword has changed"""'], {}), "(\n 'ignore:The default for the `center` keyword has changed')\n", (4300, 4364), False, 'import pytest\n'), ((591, 646), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""zen.2458042.12552.xx.HH.uvXA"""'], {}), "(DATA_PATH, 'zen.2458042.12552.xx.HH.uvXA')\n", (603, 646), False, 'import os\n'), ((658, 666), 'pyuvdata.UVData', 'UVData', ([], {}), '()\n', (664, 666), False, 'from pyuvdata import UVData\n'), ((795, 820), 'numpy.unique', 'np.unique', (['uvd.time_array'], {}), '(uvd.time_array)\n', (804, 820), True, 'import numpy as np\n'), ((964, 978), 'numpy.array', 'np.array', (['lsts'], {}), '(lsts)\n', (972, 978), True, 'import numpy as np\n'), ((1323, 1350), 'numpy.allclose', 'np.allclose', (['an[1, 0]', '(25.0)'], {}), '(an[1, 0], 25.0)\n', (1334, 1350), True, 'import numpy as np\n'), ((1362, 1389), 'numpy.allclose', 'np.allclose', (['an[2, 0]', '(10.0)'], {}), '(an[2, 0], 10.0)\n', (1373, 1389), True, 'import numpy as np\n'), ((1729, 1739), 'numpy.any', 'np.any', (['af'], {}), '(af)\n', (1735, 1739), True, 'import numpy as np\n'), ((2654, 2674), 'numpy.allclose', 'np.allclose', (['ad', 'ad2'], {}), '(ad, ad2)\n', (2665, 2674), True, 'import numpy as np\n'), ((2686, 2719), 'numpy.allclose', 'np.allclose', (['al', '(al2 - 1.52917804)'], {}), '(al, al2 - 1.52917804)\n', (2697, 2719), True, 'import numpy as np\n'), ((2904, 2920), 'numpy.ones', 'np.ones', (['(4, 10)'], {}), '((4, 10))\n', (2911, 2920), True, 'import numpy as np\n'), ((2946, 2962), 'numpy.ones', 'np.ones', (['(4, 10)'], {}), '((4, 10))\n', (2953, 2962), True, 'import numpy as np\n'), ((3083, 3127), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['ad[1, :]', '(1.0)'], {}), '(ad[1, :], 1.0)\n', (3112, 3127), True, 'import numpy as np\n'), ((3132, 3182), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['ad[0, 0:5]', '(5.0 / 3)'], {}), '(ad[0, 0:5], 5.0 / 3)\n', (3161, 3182), True, 'import numpy as np\n'), ((3186, 3233), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['ad[0, 5:10]', '(1.5)'], {}), '(ad[0, 5:10], 1.5)\n', (3215, 3233), True, 'import numpy as np\n'), ((3362, 3406), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['ad[1, :]', '(1.0)'], {}), '(ad[1, :], 1.0)\n', (3391, 3406), True, 'import numpy as np\n'), ((3411, 3455), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['ad[0, :]', '(1.6)'], {}), '(ad[0, :], 1.6)\n', (3440, 3455), True, 'import numpy as np\n'), ((3550, 3580), 'numpy.linspace', 'np.linspace', (['(-0.04)', '(0.04)', '(1024)'], {}), '(-0.04, 0.04, 1024)\n', (3561, 3580), True, 'import numpy as np\n'), ((3593, 3606), 'numpy.ones', 'np.ones', (['(1024)'], {}), '(1024)\n', (3600, 3606), True, 'import numpy as np\n'), ((3943, 3995), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['frp', '_frp.real'], {}), '(frp, _frp.real)\n', (3979, 3995), True, 'import numpy as np\n'), ((4168, 4182), 'numpy.zeros', 'np.zeros', (['(1024)'], {}), '(1024)\n', (4176, 4182), True, 'import numpy as np\n'), ((4244, 4270), 'numpy.allclose', 'np.allclose', (['t_ratio', '(1024)'], {}), '(t_ratio, 1024)\n', (4255, 4270), True, 'import numpy as np\n'), ((1301, 1311), 'numpy.any', 'np.any', (['af'], {}), '(af)\n', (1307, 1311), True, 'import numpy as np\n'), ((2747, 2772), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2760, 2772), False, 'import pytest\n'), ((3816, 3832), 'numpy.abs', 'np.abs', (['fir.imag'], {}), '(fir.imag)\n', (3822, 3832), True, 'import numpy as np\n'), ((4037, 4052), 'numpy.diff', 'np.diff', (['frbins'], {}), '(frbins)\n', (4044, 4052), True, 'import numpy as np\n'), ((4054, 4070), 'numpy.diff', 'np.diff', (['_frbins'], {}), '(_frbins)\n', (4061, 4070), True, 'import numpy as np\n'), ((4095, 4112), 'numpy.abs', 'np.abs', (['_frp.imag'], {}), '(_frp.imag)\n', (4101, 4112), True, 'import numpy as np\n'), ((4438, 4493), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""zen.2458042.12552.xx.HH.uvXA"""'], {}), "(DATA_PATH, 'zen.2458042.12552.xx.HH.uvXA')\n", (4450, 4493), False, 'import os\n'), ((5310, 5331), 'copy.deepcopy', 'copy.deepcopy', (['self.F'], {}), '(self.F)\n', (5323, 5331), False, 'import copy\n'), ((6243, 6343), 'pytest.raises', 'pytest.raises', (['AssertionError', 'self.F.timeavg_data', 'self.F.data', 'self.F.times', 'self.F.lsts', '(1.0)'], {}), '(AssertionError, self.F.timeavg_data, self.F.data, self.F.\n times, self.F.lsts, 1.0)\n', (6256, 6343), False, 'import pytest\n'), ((6505, 6560), 'numpy.ones', 'np.ones', (['(self.F.Ntimes, self.F.Nfreqs)'], {'dtype': 'np.float'}), '((self.F.Ntimes, self.F.Nfreqs), dtype=np.float)\n', (6512, 6560), True, 'import numpy as np\n'), ((6801, 6818), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (6815, 6818), True, 'import numpy as np\n'), ((7797, 7817), 'numpy.mean', 'np.mean', (['(dfft / rfft)'], {}), '(dfft / rfft)\n', (7804, 7817), True, 'import numpy as np\n'), ((8240, 8266), 'os.path.exists', 'os.path.exists', (['"""./out.uv"""'], {}), "('./out.uv')\n", (8254, 8266), False, 'import os\n'), ((8522, 8547), 'shutil.rmtree', 'shutil.rmtree', (['"""./out.uv"""'], {}), "('./out.uv')\n", (8535, 8547), False, 'import shutil\n'), ((8557, 8662), 'pytest.raises', 'pytest.raises', (['AssertionError', 'self.F.write_data', 'self.F.avg_data', '"""./out.uv"""'], {'times': 'self.F.avg_times'}), "(AssertionError, self.F.write_data, self.F.avg_data,\n './out.uv', times=self.F.avg_times)\n", (8570, 8662), False, 'import pytest\n'), ((8667, 8746), 'pytest.raises', 'pytest.raises', (['ValueError', 'self.F.write_data', 'self.F.data', '"""hi"""'], {'filetype': '"""foo"""'}), "(ValueError, self.F.write_data, self.F.data, 'hi', filetype='foo')\n", (8680, 8746), False, 'import pytest\n'), ((12186, 12242), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""zen.2458043.12552.xx.HH.uvORA"""'], {}), "(DATA_PATH, 'zen.2458043.12552.xx.HH.uvORA')\n", (12198, 12242), False, 'import os\n'), ((12830, 12886), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""zen.2458043.12552.xx.HH.uvORA"""'], {}), "(DATA_PATH, 'zen.2458043.12552.xx.HH.uvORA')\n", (12842, 12886), False, 'import os\n'), ((14413, 14446), 'os.path.join', 'os.path.join', (['tmp_path', '"""temp.h5"""'], {}), "(tmp_path, 'temp.h5')\n", (14425, 14446), False, 'import os\n'), ((14462, 14498), 'os.path.join', 'os.path.join', (['tmp_path', '"""cache_temp"""'], {}), "(tmp_path, 'cache_temp')\n", (14474, 14498), False, 'import os\n'), ((14543, 14562), 'os.path.isdir', 'os.path.isdir', (['cdir'], {}), '(cdir)\n', (14556, 14562), False, 'import os\n'), ((14604, 14618), 'os.mkdir', 'os.mkdir', (['cdir'], {}), '(cdir)\n', (14612, 14618), False, 'import os\n'), ((16775, 16862), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.uvh5"""'], {}), "(DATA_PATH,\n 'test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.uvh5')\n", (16787, 16862), False, 'import os\n'), ((16880, 16927), 'os.path.join', 'os.path.join', (['tmp_path', '"""temp_special_flags.h5"""'], {}), "(tmp_path, 'temp_special_flags.h5')\n", (16892, 16927), False, 'import os\n'), ((16936, 16965), 'shutil.copy', 'shutil.copy', (['uvh5', 'input_file'], {}), '(uvh5, input_file)\n', (16947, 16965), False, 'import shutil\n'), ((18304, 18342), 'glob.glob', 'glob.glob', (["(tmp_path + '/bl_chunk_*.h5')"], {}), "(tmp_path + '/bl_chunk_*.h5')\n", (18313, 18342), False, 'import glob\n'), ((18684, 18748), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""test_input/a_priori_flags_sample.yaml"""'], {}), "(DATA_PATH, 'test_input/a_priori_flags_sample.yaml')\n", (18696, 18748), False, 'import os\n'), ((18763, 18803), 'pyuvdata.UVFlag', 'UVFlag', (['hd'], {'mode': '"""flag"""', 'copy_flags': '(True)'}), "(hd, mode='flag', copy_flags=True)\n", (18769, 18803), False, 'from pyuvdata import UVFlag\n'), ((18912, 18950), 'os.path.join', 'os.path.join', (['tmp_path', '"""test_flag.h5"""'], {}), "(tmp_path, 'test_flag.h5')\n", (18924, 18950), False, 'import os\n'), ((20276, 20298), 'os.remove', 'os.remove', (['outfilename'], {}), '(outfilename)\n', (20285, 20298), False, 'import os\n'), ((20307, 20326), 'shutil.rmtree', 'shutil.rmtree', (['cdir'], {}), '(cdir)\n', (20320, 20326), False, 'import shutil\n'), ((20604, 20646), 'os.path.join', 'os.path.join', (['tmp_path', '"""temp_output.uvh5"""'], {}), "(tmp_path, 'temp_output.uvh5')\n", (20616, 20646), False, 'import os\n'), ((21093, 21180), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.uvh5"""'], {}), "(DATA_PATH,\n 'test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.uvh5')\n", (21105, 21180), False, 'import os\n'), ((21199, 21232), 'os.path.join', 'os.path.join', (['tmp_path', '"""temp.h5"""'], {}), "(tmp_path, 'temp.h5')\n", (21211, 21232), False, 'import os\n'), ((21261, 21300), 'os.path.join', 'os.path.join', (['tmp_path', '"""temp_clean.h5"""'], {}), "(tmp_path, 'temp_clean.h5')\n", (21273, 21300), False, 'import os\n'), ((21330, 21370), 'os.path.join', 'os.path.join', (['tmp_path', '"""temp_filled.h5"""'], {}), "(tmp_path, 'temp_filled.h5')\n", (21342, 21370), False, 'import os\n'), ((21829, 21924), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (["d[53, 54, 'ee']", "frfil.clean_resid[53, 54, 'ee']"], {'decimal': '(5)'}), "(d[53, 54, 'ee'], frfil.clean_resid[53, 54,\n 'ee'], decimal=5)\n", (21859, 21924), True, 'import numpy as np\n'), ((21933, 22006), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (["f[53, 54, 'ee']", "frfil.flags[53, 54, 'ee']"], {}), "(f[53, 54, 'ee'], frfil.flags[53, 54, 'ee'])\n", (21962, 22006), True, 'import numpy as np\n'), ((22054, 22279), 'pytest.raises', 'pytest.raises', (['NotImplementedError', 'frf.load_tophat_frfilter_and_write', 'uvh5'], {'res_outfilename': 'outfilename', 'tol': '(0.0001)', 'clobber': '(True)', 'Nbls_per_load': '(1)', 'avg_red_bllens': '(True)', 'baseline_list': '[(54, 54)]', 'polarizations': "['ee']"}), "(NotImplementedError, frf.load_tophat_frfilter_and_write, uvh5,\n res_outfilename=outfilename, tol=0.0001, clobber=True, Nbls_per_load=1,\n avg_red_bllens=True, baseline_list=[(54, 54)], polarizations=['ee'])\n", (22067, 22279), False, 'import pytest\n'), ((22366, 22453), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.uvh5"""'], {}), "(DATA_PATH,\n 'test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.uvh5')\n", (22378, 22453), False, 'import os\n'), ((22472, 22505), 'os.path.join', 'os.path.join', (['tmp_path', '"""temp.h5"""'], {}), "(tmp_path, 'temp.h5')\n", (22484, 22505), False, 'import os\n'), ((23094, 23189), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (["d[53, 54, 'ee']", "frfil.clean_resid[53, 54, 'ee']"], {'decimal': '(5)'}), "(d[53, 54, 'ee'], frfil.clean_resid[53, 54,\n 'ee'], decimal=5)\n", (23124, 23189), True, 'import numpy as np\n'), ((23198, 23271), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (["f[53, 54, 'ee']", "frfil.flags[53, 54, 'ee']"], {}), "(f[53, 54, 'ee'], frfil.flags[53, 54, 'ee'])\n", (23227, 23271), True, 'import numpy as np\n'), ((23291, 23380), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""test_input/zen.2458101.46106.xx.HH.uv.abs.calfits_54x_only"""'], {}), "(DATA_PATH,\n 'test_input/zen.2458101.46106.xx.HH.uv.abs.calfits_54x_only')\n", (23303, 23380), False, 'import os\n'), ((23399, 23432), 'os.path.join', 'os.path.join', (['tmp_path', '"""temp.h5"""'], {}), "(tmp_path, 'temp.h5')\n", (23411, 23432), False, 'import os\n'), ((23441, 23463), 'os.remove', 'os.remove', (['outfilename'], {}), '(outfilename)\n', (23450, 23463), False, 'import os\n'), ((25510, 25557), 'os.path.join', 'os.path.join', (['tmp_path', '"""temp_special_flags.h5"""'], {}), "(tmp_path, 'temp_special_flags.h5')\n", (25522, 25557), False, 'import os\n'), ((25566, 25595), 'shutil.copy', 'shutil.copy', (['uvh5', 'input_file'], {}), '(uvh5, input_file)\n', (25577, 25595), False, 'import shutil\n'), ((27888, 27952), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""test_input/a_priori_flags_sample.yaml"""'], {}), "(DATA_PATH, 'test_input/a_priori_flags_sample.yaml')\n", (27900, 27952), False, 'import os\n'), ((27967, 28007), 'pyuvdata.UVFlag', 'UVFlag', (['hd'], {'mode': '"""flag"""', 'copy_flags': '(True)'}), "(hd, mode='flag', copy_flags=True)\n", (27973, 28007), False, 'from pyuvdata import UVFlag\n'), ((28116, 28154), 'os.path.join', 'os.path.join', (['tmp_path', '"""test_flag.h5"""'], {}), "(tmp_path, 'test_flag.h5')\n", (28128, 28154), False, 'import os\n'), ((29962, 30049), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.uvh5"""'], {}), "(DATA_PATH,\n 'test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.uvh5')\n", (29974, 30049), False, 'import os\n'), ((30061, 30097), 'os.path.join', 'os.path.join', (['tmp_path', '"""cache_temp"""'], {}), "(tmp_path, 'cache_temp')\n", (30073, 30097), False, 'import os\n'), ((30142, 30161), 'os.path.isdir', 'os.path.isdir', (['cdir'], {}), '(cdir)\n', (30155, 30161), False, 'import os\n'), ((30203, 30217), 'os.mkdir', 'os.mkdir', (['cdir'], {}), '(cdir)\n', (30211, 30217), False, 'import os\n'), ((30240, 30273), 'os.path.join', 'os.path.join', (['tmp_path', '"""temp.h5"""'], {}), "(tmp_path, 'temp.h5')\n", (30252, 30273), False, 'import os\n'), ((31422, 31474), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (["f[53, 54, 'ee']", '(True)'], {}), "(f[53, 54, 'ee'], True)\n", (31451, 31474), True, 'import numpy as np\n'), ((31485, 31507), 'os.remove', 'os.remove', (['outfilename'], {}), '(outfilename)\n', (31494, 31507), False, 'import os\n'), ((31516, 31535), 'shutil.rmtree', 'shutil.rmtree', (['cdir'], {}), '(cdir)\n', (31529, 31535), False, 'import shutil\n'), ((31544, 31558), 'os.mkdir', 'os.mkdir', (['cdir'], {}), '(cdir)\n', (31552, 31558), False, 'import os\n'), ((32374, 32393), 'shutil.rmtree', 'shutil.rmtree', (['cdir'], {}), '(cdir)\n', (32387, 32393), False, 'import shutil\n'), ((32402, 32416), 'os.mkdir', 'os.mkdir', (['cdir'], {}), '(cdir)\n', (32410, 32416), False, 'import os\n'), ((32477, 32566), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""test_input/zen.2458101.46106.xx.HH.uv.abs.calfits_54x_only"""'], {}), "(DATA_PATH,\n 'test_input/zen.2458101.46106.xx.HH.uv.abs.calfits_54x_only')\n", (32489, 32566), False, 'import os\n'), ((33230, 33282), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (["f[53, 54, 'ee']", '(True)'], {}), "(f[53, 54, 'ee'], True)\n", (33259, 33282), True, 'import numpy as np\n'), ((33293, 33315), 'os.remove', 'os.remove', (['outfilename'], {}), '(outfilename)\n', (33302, 33315), False, 'import os\n'), ((33324, 33343), 'shutil.rmtree', 'shutil.rmtree', (['cdir'], {}), '(cdir)\n', (33337, 33343), False, 'import shutil\n'), ((5556, 5589), 'numpy.isclose', 'np.isclose', (['F.avg_data[k][0]', '(0.0)'], {}), '(F.avg_data[k][0], 0.0)\n', (5566, 5589), True, 'import numpy as np\n'), ((5657, 5694), 'numpy.isclose', 'np.isclose', (['F.avg_nsamples[k][0]', '(0.0)'], {}), '(F.avg_nsamples[k][0], 0.0)\n', (5667, 5694), True, 'import numpy as np\n'), ((5753, 5791), 'numpy.isclose', 'np.isclose', (['F.avg_nsamples[k][1:]', '(3.0)'], {}), '(F.avg_nsamples[k][1:], 3.0)\n', (5763, 5791), True, 'import numpy as np\n'), ((6138, 6175), 'numpy.isclose', 'np.isclose', (['F.avg_nsamples[k][0]', '(0.0)'], {}), '(F.avg_nsamples[k][0], 0.0)\n', (6148, 6175), True, 'import numpy as np\n'), ((7696, 7719), 'numpy.abs', 'np.abs', (['self.F.dfft[bl]'], {}), '(self.F.dfft[bl])\n', (7702, 7719), True, 'import numpy as np\n'), ((7752, 7775), 'numpy.abs', 'np.abs', (['self.F.rfft[bl]'], {}), '(self.F.rfft[bl])\n', (7758, 7775), True, 'import numpy as np\n'), ((9491, 9540), 'numpy.allclose', 'np.allclose', (['data_out.data[k]', 'self.F.avg_data[k]'], {}), '(data_out.data[k], self.F.avg_data[k])\n', (9502, 9540), True, 'import numpy as np\n'), ((9560, 9611), 'numpy.allclose', 'np.allclose', (['data_out.flags[k]', 'self.F.avg_flags[k]'], {}), '(data_out.flags[k], self.F.avg_flags[k])\n', (9571, 9611), True, 'import numpy as np\n'), ((9631, 9688), 'numpy.allclose', 'np.allclose', (['data_out.nsamples[k]', 'self.F.avg_nsamples[k]'], {}), '(data_out.nsamples[k], self.F.avg_nsamples[k])\n', (9642, 9688), True, 'import numpy as np\n'), ((9920, 9964), 'glob.glob', 'glob.glob', (["(DATA_PATH + '/zen.2458045.*.uvh5')"], {}), "(DATA_PATH + '/zen.2458045.*.uvh5')\n", (9929, 9964), False, 'import glob\n'), ((13974, 14067), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.first.uvh5"""'], {}), "(DATA_PATH,\n 'test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.first.uvh5')\n", (13986, 14067), False, 'import os\n'), ((14081, 14175), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.second.uvh5"""'], {}), "(DATA_PATH,\n 'test_input/zen.2458101.46106.xx.HH.OCR_53x_54x_only.second.uvh5')\n", (14093, 14175), False, 'import os\n'), ((14189, 14284), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""test_input/zen.2458101.46106.xx.HH.uv.abs.calfits_54x_only.part1"""'], {}), "(DATA_PATH,\n 'test_input/zen.2458101.46106.xx.HH.uv.abs.calfits_54x_only.part1')\n", (14201, 14284), False, 'import os\n'), ((14298, 14393), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""test_input/zen.2458101.46106.xx.HH.uv.abs.calfits_54x_only.part2"""'], {}), "(DATA_PATH,\n 'test_input/zen.2458101.46106.xx.HH.uv.abs.calfits_54x_only.part2')\n", (14310, 14393), False, 'import os\n'), ((14576, 14595), 'shutil.rmtree', 'shutil.rmtree', (['cdir'], {}), '(cdir)\n', (14589, 14595), False, 'import shutil\n'), ((14696, 14724), 'pytest.warns', 'pytest.warns', (['RuntimeWarning'], {}), '(RuntimeWarning)\n', (14708, 14724), False, 'import pytest\n'), ((17764, 17812), 'os.path.join', 'os.path.join', (['tmp_path', "('bl_chunk_%d.h5' % blnum)"], {}), "(tmp_path, 'bl_chunk_%d.h5' % blnum)\n", (17776, 17812), False, 'import os\n'), ((18560, 18580), 'numpy.all', 'np.all', (['f[bl][:, -1]'], {}), '(f[bl][:, -1])\n', (18566, 18580), True, 'import numpy as np\n'), ((18600, 18619), 'numpy.all', 'np.all', (['f[bl][0, :]'], {}), '(f[bl][0, :])\n', (18606, 18619), True, 'import numpy as np\n'), ((19501, 19514), 'numpy.all', 'np.all', (['(~f[k])'], {}), '(~f[k])\n', (19507, 19514), True, 'import numpy as np\n'), ((20130, 20148), 'numpy.all', 'np.all', (['f[k][:, 0]'], {}), '(f[k][:, 0])\n', (20136, 20148), True, 'import numpy as np\n'), ((20168, 20186), 'numpy.all', 'np.all', (['f[k][:, 1]'], {}), '(f[k][:, 1])\n', (20174, 20186), True, 'import numpy as np\n'), ((20206, 20228), 'numpy.all', 'np.all', (['f[k][:, 10:20]'], {}), '(f[k][:, 10:20])\n', (20212, 20228), True, 'import numpy as np\n'), ((20248, 20267), 'numpy.all', 'np.all', (['f[k][:, 60]'], {}), '(f[k][:, 60])\n', (20254, 20267), True, 'import numpy as np\n'), ((20502, 20546), 'glob.glob', 'glob.glob', (["(DATA_PATH + '/zen.2458045.*.uvh5')"], {}), "(DATA_PATH + '/zen.2458045.*.uvh5')\n", (20511, 20546), False, 'import glob\n'), ((24025, 24077), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (["f[53, 54, 'ee']", '(True)'], {}), "(f[53, 54, 'ee'], True)\n", (24054, 24077), True, 'import numpy as np\n'), ((24092, 24114), 'os.remove', 'os.remove', (['outfilename'], {}), '(outfilename)\n', (24101, 24114), False, 'import os\n'), ((26642, 26663), 'numpy.any', 'np.any', (['f[bl][:, :-1]'], {}), '(f[bl][:, :-1])\n', (26648, 26663), True, 'import numpy as np\n'), ((26683, 26702), 'numpy.all', 'np.all', (['f[bl][0, :]'], {}), '(f[bl][0, :])\n', (26689, 26702), True, 'import numpy as np\n'), ((27158, 27177), 'numpy.all', 'np.all', (['f[bl][0, :]'], {}), '(f[bl][0, :])\n', (27164, 27177), True, 'import numpy as np\n'), ((27197, 27217), 'numpy.all', 'np.all', (['f[bl][:, -1]'], {}), '(f[bl][:, -1])\n', (27203, 27217), True, 'import numpy as np\n'), ((27662, 27681), 'numpy.all', 'np.all', (['f[bl][0, :]'], {}), '(f[bl][0, :])\n', (27668, 27681), True, 'import numpy as np\n'), ((27701, 27721), 'numpy.all', 'np.all', (['f[bl][:, -1]'], {}), '(f[bl][:, -1])\n', (27707, 27721), True, 'import numpy as np\n'), ((28662, 28675), 'numpy.all', 'np.all', (['(~f[k])'], {}), '(~f[k])\n', (28668, 28675), True, 'import numpy as np\n'), ((29158, 29171), 'numpy.all', 'np.all', (['(~f[k])'], {}), '(~f[k])\n', (29164, 29171), True, 'import numpy as np\n'), ((29279, 29330), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""PyGSM_Jy_downselect.uvh5"""'], {}), "(DATA_PATH, 'PyGSM_Jy_downselect.uvh5')\n", (29291, 29330), False, 'import os\n'), ((30175, 30194), 'shutil.rmtree', 'shutil.rmtree', (['cdir'], {}), '(cdir)\n', (30188, 30194), False, 'import shutil\n'), ((32276, 32328), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (["f[53, 54, 'ee']", '(True)'], {}), "(f[53, 54, 'ee'], True)\n", (32305, 32328), True, 'import numpy as np\n'), ((32343, 32365), 'os.remove', 'os.remove', (['outfilename'], {}), '(outfilename)\n', (32352, 32365), False, 'import os\n'), ((3686, 3701), 'numpy.diff', 'np.diff', (['frbins'], {}), '(frbins)\n', (3693, 3701), True, 'import numpy as np\n'), ((3769, 3785), 'numpy.abs', 'np.abs', (['fir.real'], {}), '(fir.real)\n', (3775, 3785), True, 'import numpy as np\n'), ((3909, 3923), 'numpy.diff', 'np.diff', (['tbins'], {}), '(tbins)\n', (3916, 3923), True, 'import numpy as np\n'), ((6035, 6074), 'numpy.isclose', 'np.isclose', (['F.avg_data[k][0, 5:-5]', '(0.0)'], {}), '(F.avg_data[k][0, 5:-5], 0.0)\n', (6045, 6074), True, 'import numpy as np\n'), ((6442, 6485), 'numpy.fft.fftfreq', 'np.fft.fftfreq', (['self.F.Ntimes', 'self.F.dtime'], {}), '(self.F.Ntimes, self.F.dtime)\n', (6456, 6485), True, 'import numpy as np\n'), ((6571, 6585), 'numpy.abs', 'np.abs', (['frates'], {}), '(frates)\n', (6577, 6585), True, 'import numpy as np\n'), ((6856, 6907), 'scipy.stats.norm.rvs', 'stats.norm.rvs', (['(0)', '(1)', '(self.F.Ntimes * self.F.Nfreqs)'], {}), '(0, 1, self.F.Ntimes * self.F.Nfreqs)\n', (6870, 6907), False, 'from scipy import stats\n'), ((7856, 7889), 'numpy.mean', 'np.mean', (['self.F.filt_nsamples[bl]'], {}), '(self.F.filt_nsamples[bl])\n', (7863, 7889), True, 'import numpy as np\n'), ((10305, 10333), 'pytest.warns', 'pytest.warns', (['RuntimeWarning'], {}), '(RuntimeWarning)\n', (10317, 10333), False, 'import pytest\n'), ((11301, 11351), 'numpy.isclose', 'np.isclose', (['data_out.data[k]', 'data_out_bls.data[k]'], {}), '(data_out.data[k], data_out_bls.data[k])\n', (11311, 11351), True, 'import numpy as np\n'), ((11379, 11431), 'numpy.isclose', 'np.isclose', (['data_out.flags[k]', 'data_out_bls.flags[k]'], {}), '(data_out.flags[k], data_out_bls.flags[k])\n', (11389, 11431), True, 'import numpy as np\n'), ((11459, 11517), 'numpy.isclose', 'np.isclose', (['data_out.nsamples[k]', 'data_out_bls.nsamples[k]'], {}), '(data_out.nsamples[k], data_out_bls.nsamples[k])\n', (11469, 11517), True, 'import numpy as np\n'), ((12366, 12417), 'numpy.linalg.norm', 'np.linalg.norm', (['(frfil.antpos[24] - frfil.antpos[25])'], {}), '(frfil.antpos[24] - frfil.antpos[25])\n', (12380, 12417), True, 'import numpy as np\n'), ((13228, 13272), 'numpy.ones_like', 'np.ones_like', (['frfil.flags[k]'], {'dtype': 'np.float'}), '(frfil.flags[k], dtype=np.float)\n', (13240, 13272), True, 'import numpy as np\n'), ((13588, 13622), 'numpy.ones_like', 'np.ones_like', (['frfil.flags[k][:, 0]'], {}), '(frfil.flags[k][:, 0])\n', (13600, 13622), True, 'import numpy as np\n'), ((13694, 13735), 'numpy.zeros_like', 'np.zeros_like', (['frfil.clean_resid[k][:, 0]'], {}), '(frfil.clean_resid[k][:, 0])\n', (13707, 13735), True, 'import numpy as np\n'), ((13807, 13848), 'numpy.zeros_like', 'np.zeros_like', (['frfil.clean_resid[k][:, 0]'], {}), '(frfil.clean_resid[k][:, 0])\n', (13820, 13848), True, 'import numpy as np\n'), ((24921, 24947), 'numpy.allclose', 'np.allclose', (['do[bl]', 'd[bl]'], {}), '(do[bl], d[bl])\n', (24932, 24947), True, 'import numpy as np\n'), ((25013, 25039), 'numpy.allclose', 'np.allclose', (['fo[bl]', 'f[bl]'], {}), '(fo[bl], f[bl])\n', (25024, 25039), True, 'import numpy as np\n'), ((25063, 25089), 'numpy.allclose', 'np.allclose', (['no[bl]', 'n[bl]'], {}), '(no[bl], n[bl])\n', (25074, 25089), True, 'import numpy as np\n'), ((25220, 25262), 'numpy.allclose', 'np.allclose', (['fd[bl][~f[bl]]', 'd[bl][~f[bl]]'], {}), '(fd[bl][~f[bl]], d[bl][~f[bl]])\n', (25231, 25262), True, 'import numpy as np\n'), ((25406, 25432), 'numpy.allclose', 'np.allclose', (['no[bl]', 'n[bl]'], {}), '(no[bl], n[bl])\n', (25417, 25432), True, 'import numpy as np\n'), ((31200, 31222), 'glob.glob', 'glob.glob', (["(cdir + '/*')"], {}), "(cdir + '/*')\n", (31209, 31222), False, 'import glob\n'), ((33008, 33030), 'glob.glob', 'glob.glob', (["(cdir + '/*')"], {}), "(cdir + '/*')\n", (33017, 33030), False, 'import glob\n'), ((6952, 7003), 'scipy.stats.norm.rvs', 'stats.norm.rvs', (['(0)', '(1)', '(self.F.Ntimes * self.F.Nfreqs)'], {}), '(0, 1, self.F.Ntimes * self.F.Nfreqs)\n', (6966, 7003), False, 'from scipy import stats\n'), ((21627, 21649), 'numpy.isclose', 'np.isclose', (['d[bl]', '(0.0)'], {}), '(d[bl], 0.0)\n', (21637, 21649), True, 'import numpy as np\n'), ((23937, 23950), 'numpy.all', 'np.all', (['f[bl]'], {}), '(f[bl])\n', (23943, 23950), True, 'import numpy as np\n'), ((25133, 25154), 'numpy.zeros_like', 'np.zeros_like', (['cd[bl]'], {}), '(cd[bl])\n', (25146, 25154), True, 'import numpy as np\n'), ((25356, 25382), 'numpy.allclose', 'np.allclose', (['do[bl]', 'd[bl]'], {}), '(do[bl], d[bl])\n', (25367, 25382), True, 'import numpy as np\n'), ((27248, 27270), 'numpy.isclose', 'np.isclose', (['d[bl]', '(0.0)'], {}), '(d[bl], 0.0)\n', (27258, 27270), True, 'import numpy as np\n'), ((27752, 27774), 'numpy.isclose', 'np.isclose', (['d[bl]', '(0.0)'], {}), '(d[bl], 0.0)\n', (27762, 27774), True, 'import numpy as np\n'), ((22892, 22914), 'numpy.isclose', 'np.isclose', (['d[bl]', '(0.0)'], {}), '(d[bl], 0.0)\n', (22902, 22914), True, 'import numpy as np\n'), ((32038, 32060), 'glob.glob', 'glob.glob', (["(cdir + '/*')"], {}), "(cdir + '/*')\n", (32047, 32060), False, 'import glob\n'), ((23990, 24012), 'numpy.isclose', 'np.isclose', (['d[bl]', '(0.0)'], {}), '(d[bl], 0.0)\n', (24000, 24012), True, 'import numpy as np\n')] |
from dash import Dash, dcc, html, Input, Output
import plotly.express as px
import numpy as np
app = Dash(__name__)
app.layout = html.Div(
[
html.H4("Interactive normal distribution"),
dcc.Graph(id="histograms-x-graph"),
html.P("Mean:"),
dcc.Slider(
id="histograms-x-mean", min=-3, max=3, value=0, marks={-3: "-3", 3: "3"}
),
html.P("Standard Deviation:"),
dcc.Slider(
id="histograms-x-std", min=1, max=3, value=1, marks={1: "1", 3: "3"}
),
]
)
@app.callback(
Output("histograms-x-graph", "figure"),
Input("histograms-x-mean", "value"),
Input("histograms-x-std", "value"),
)
def display_color(mean, std):
data = np.random.normal(mean, std, size=500) # replace with your own data source
fig = px.histogram(data, range_x=[-10, 10])
return fig
if __name__ == "__main__":
app.run_server(debug=True)
| [
"numpy.random.normal",
"plotly.express.histogram",
"dash.html.H4",
"dash.Input",
"dash.Output",
"dash.dcc.Slider",
"dash.dcc.Graph",
"dash.html.P",
"dash.Dash"
] | [((102, 116), 'dash.Dash', 'Dash', (['__name__'], {}), '(__name__)\n', (106, 116), False, 'from dash import Dash, dcc, html, Input, Output\n'), ((729, 766), 'numpy.random.normal', 'np.random.normal', (['mean', 'std'], {'size': '(500)'}), '(mean, std, size=500)\n', (745, 766), True, 'import numpy as np\n'), ((814, 851), 'plotly.express.histogram', 'px.histogram', (['data'], {'range_x': '[-10, 10]'}), '(data, range_x=[-10, 10])\n', (826, 851), True, 'import plotly.express as px\n'), ((565, 603), 'dash.Output', 'Output', (['"""histograms-x-graph"""', '"""figure"""'], {}), "('histograms-x-graph', 'figure')\n", (571, 603), False, 'from dash import Dash, dcc, html, Input, Output\n'), ((609, 644), 'dash.Input', 'Input', (['"""histograms-x-mean"""', '"""value"""'], {}), "('histograms-x-mean', 'value')\n", (614, 644), False, 'from dash import Dash, dcc, html, Input, Output\n'), ((650, 684), 'dash.Input', 'Input', (['"""histograms-x-std"""', '"""value"""'], {}), "('histograms-x-std', 'value')\n", (655, 684), False, 'from dash import Dash, dcc, html, Input, Output\n'), ((156, 198), 'dash.html.H4', 'html.H4', (['"""Interactive normal distribution"""'], {}), "('Interactive normal distribution')\n", (163, 198), False, 'from dash import Dash, dcc, html, Input, Output\n'), ((208, 242), 'dash.dcc.Graph', 'dcc.Graph', ([], {'id': '"""histograms-x-graph"""'}), "(id='histograms-x-graph')\n", (217, 242), False, 'from dash import Dash, dcc, html, Input, Output\n'), ((252, 267), 'dash.html.P', 'html.P', (['"""Mean:"""'], {}), "('Mean:')\n", (258, 267), False, 'from dash import Dash, dcc, html, Input, Output\n'), ((277, 369), 'dash.dcc.Slider', 'dcc.Slider', ([], {'id': '"""histograms-x-mean"""', 'min': '(-3)', 'max': '(3)', 'value': '(0)', 'marks': "{(-3): '-3', (3): '3'}"}), "(id='histograms-x-mean', min=-3, max=3, value=0, marks={(-3):\n '-3', (3): '3'})\n", (287, 369), False, 'from dash import Dash, dcc, html, Input, Output\n'), ((393, 422), 'dash.html.P', 'html.P', (['"""Standard Deviation:"""'], {}), "('Standard Deviation:')\n", (399, 422), False, 'from dash import Dash, dcc, html, Input, Output\n'), ((432, 521), 'dash.dcc.Slider', 'dcc.Slider', ([], {'id': '"""histograms-x-std"""', 'min': '(1)', 'max': '(3)', 'value': '(1)', 'marks': "{(1): '1', (3): '3'}"}), "(id='histograms-x-std', min=1, max=3, value=1, marks={(1): '1', (\n 3): '3'})\n", (442, 521), False, 'from dash import Dash, dcc, html, Input, Output\n')] |
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ImproperlyConfigured
from django.db.models.base import ModelBase
from django.http import (Http404, HttpResponse, HttpResponseForbidden,
HttpResponseRedirect)
from django.template import loader
from secretballot.utils import get_vote_model
Vote = get_vote_model()
def vote(
request,
content_type,
object_id,
vote,
can_vote_test=None,
redirect_url=None,
template_name=None,
template_loader=loader,
extra_context=None,
context_processors=None,
mimetype=None,
):
# get the token from a SecretBallotMiddleware
if not hasattr(request, "secretballot_token"):
raise ImproperlyConfigured(
"To use secretballot a SecretBallotMiddleware must be installed. (see secretballot/middleware.py)"
)
token = request.secretballot_token
if isinstance(content_type, ContentType):
pass
elif isinstance(content_type, ModelBase):
content_type = ContentType.objects.get_for_model(content_type)
elif isinstance(content_type, str) and "." in content_type:
app, modelname = content_type.split(".")
content_type = ContentType.objects.get(app_label=app, model__iexact=modelname)
else:
raise ValueError("content_type must be an instance of ContentType, a model, or \"app.modelname\" string")
# do the action
if vote:
# 404 if object to be voted upon doesn't exist
if content_type.model_class().objects.filter(pk=object_id).count() == 0:
raise Http404
# if there is a can_vote_test func specified, test then 403 if needed
if can_vote_test:
if not can_vote_test(request, content_type, object_id, vote):
return HttpResponseForbidden("vote was forbidden")
vobj, new = Vote.objects.get_or_create(
content_type=content_type, object_id=object_id, token=token, defaults={"vote": vote}
)
if not new:
vobj.vote = vote
vobj.save()
else:
Vote.objects.filter(content_type=content_type, object_id=object_id, token=token).delete()
# build the response
if redirect_url:
return HttpResponseRedirect(redirect_url)
elif template_name:
# get_object_for_this_type uses _base_manager, but we only set
# _default_manager. Drop to lower level API.
content_obj = content_type.model_class()._default_manager.using(content_type._state.db).get(pk=object_id)
c = {"content_obj": content_obj}
# copy extra_context into context, calling any callables
if extra_context:
for k, v in extra_context.items():
if callable(v):
c[k] = v()
else:
c[k] = v
t = template_loader.get_template(template_name)
body = t.render(c, request)
else:
votes = Vote.objects.filter(content_type=content_type, object_id=object_id).count()
body = '{"num_votes":%d}' % votes
return HttpResponse(body, content_type=mimetype)
| [
"django.http.HttpResponseRedirect",
"django.contrib.contenttypes.models.ContentType.objects.get_for_model",
"secretballot.utils.get_vote_model",
"django.contrib.contenttypes.models.ContentType.objects.get",
"django.http.HttpResponse",
"django.http.HttpResponseForbidden",
"django.core.exceptions.ImproperlyConfigured"
] | [((367, 383), 'secretballot.utils.get_vote_model', 'get_vote_model', ([], {}), '()\n', (381, 383), False, 'from secretballot.utils import get_vote_model\n'), ((3106, 3147), 'django.http.HttpResponse', 'HttpResponse', (['body'], {'content_type': 'mimetype'}), '(body, content_type=mimetype)\n', (3118, 3147), False, 'from django.http import Http404, HttpResponse, HttpResponseForbidden, HttpResponseRedirect\n'), ((742, 872), 'django.core.exceptions.ImproperlyConfigured', 'ImproperlyConfigured', (['"""To use secretballot a SecretBallotMiddleware must be installed. (see secretballot/middleware.py)"""'], {}), "(\n 'To use secretballot a SecretBallotMiddleware must be installed. (see secretballot/middleware.py)'\n )\n", (762, 872), False, 'from django.core.exceptions import ImproperlyConfigured\n'), ((2266, 2300), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['redirect_url'], {}), '(redirect_url)\n', (2286, 2300), False, 'from django.http import Http404, HttpResponse, HttpResponseForbidden, HttpResponseRedirect\n'), ((1053, 1100), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['content_type'], {}), '(content_type)\n', (1086, 1100), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((1237, 1300), 'django.contrib.contenttypes.models.ContentType.objects.get', 'ContentType.objects.get', ([], {'app_label': 'app', 'model__iexact': 'modelname'}), '(app_label=app, model__iexact=modelname)\n', (1260, 1300), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((1823, 1866), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', (['"""vote was forbidden"""'], {}), "('vote was forbidden')\n", (1844, 1866), False, 'from django.http import Http404, HttpResponse, HttpResponseForbidden, HttpResponseRedirect\n')] |
# Created by <NAME> on 2021/8/28, 23:59
from common.tools.utils import check_state
from node.tagTreeNote.utils.utils import verify_folder, DEFAULT_PATH_SEPARATOR, common_heading_sub_array
import json
class Path:
# Object should be immutable after init
def __init__(self, path="", validate_folder_name=True):
# "/" is reserved for the default separator.
self.separator = DEFAULT_PATH_SEPARATOR
# a string in the format of "a/b/c" to represent a path
# "" can be used
self.path = path.strip().strip(self.separator).strip()
if validate_folder_name and self.separator in self.path:
self._strip_folder_name()
if validate_folder_name:
for folder in self.split():
check_state(verify_folder(folder))
def _strip_folder_name(self):
all_folder_in_order = self.path.split(self.separator)
result = ""
for folder_name in all_folder_in_order:
folder_name = folder_name.strip()
check_state(len(folder_name) > 0, self.path + "is invalid")
result += folder_name + self.separator
self.path = result[:-1]
def get_path(self):
return self.path
def common_path(self, other):
self_folders = self.split()
other_folders = other.split()
shared_parent = common_heading_sub_array(self_folders, other_folders)
return Path(self.separator.join(shared_parent))
# turn the path into a list of folder name
def split(self) -> list:
return self.path.split(self.separator) if len(self.path) > 0 else []
# return depth of the path (number of folders along the way)
def depth(self) -> int:
return self.path.count(self.separator) + 1 if len(self.path) > 0 else 0
def parent(self):
if self.depth() <= 1:
return Path()
else:
return Path(self.path[: self.path.rindex(self.separator)], validate_folder_name=False)
def parent_(self, level=1):
result = self
for i in range(level):
result = result.parent()
return result
def copy(self):
return Path(self.path, validate_folder_name=False)
def parent_or_self(self):
if self.depth() <= 1:
return self.copy()
else:
return Path(self.path[: self.path.rindex(self.separator)], validate_folder_name=False)
def child(self, child_name: str):
child_name = child_name.strip().strip(self.separator).strip()
check_state(len(child_name) > 0, child_name + "is invalid")
if len(self.path) == 0:
return Path(child_name)
return Path(self.path + self.separator + child_name)
def sibling(self, sibling_name: str):
sibling_name = sibling_name.strip().strip(self.separator).strip()
if self.depth() <= 1:
return Path(sibling_name)
else:
return Path(self.path[: self.path.rindex(self.separator)] + self.separator + sibling_name)
def get_leaf(self) -> str:
if self.separator in self.path:
return self.path[self.path.rindex(self.separator) + len(self.separator)]
else:
return self.path
def is_child_of(self, other_path) -> bool:
check_state(self.separator == other_path.separator, "The separator used in two path are not the same")
return self.path.startswith(other_path.path)
def is_parent_of(self, other_path) -> bool:
return other_path.is_child_of(self)
def to_map(self) -> dict:
all_folder = self.split()
child = {}
for i in range(len(all_folder) - 1, -1, -1):
current = {all_folder[i]: child}
child = current
return child
def __repr__(self):
return self.path
def __str__(self):
return self.path
def __lt__(self, other):
a = self.split()
b = other.split()
l = min(self.depth(), other.depth())
for i in range(l):
if a[i] != b[i]:
return a[i] < b[i]
return self.depth() < other.depth()
def __gt__(self, other):
a = self.split()
b = other.split()
l = min(self.depth(), other.depth())
for i in range(l):
if a[i] != b[i]:
return a[i] > b[i]
return self.depth() > other.depth()
def __eq__(self, other):
return self.path == other.path
def toJson(self):
return json.dumps(self, default=lambda o: o.__dict__)
if __name__ == '__main__':
path1 = Path("a/b/c")
path2 = Path("b/c")
path3 = Path("a/")
path4 = Path(" / a/b")
print(path1.is_child_of(path2))
print(path1.is_child_of(path3))
print(path1.is_child_of(path4))
print(path1 > path4)
t = path4.get_path()
t = "a/c"
print(path4)
print(path4.to_map())
print(",".join([]))
| [
"common.tools.utils.check_state",
"json.dumps",
"node.tagTreeNote.utils.utils.common_heading_sub_array",
"node.tagTreeNote.utils.utils.verify_folder"
] | [((1347, 1400), 'node.tagTreeNote.utils.utils.common_heading_sub_array', 'common_heading_sub_array', (['self_folders', 'other_folders'], {}), '(self_folders, other_folders)\n', (1371, 1400), False, 'from node.tagTreeNote.utils.utils import verify_folder, DEFAULT_PATH_SEPARATOR, common_heading_sub_array\n'), ((3271, 3377), 'common.tools.utils.check_state', 'check_state', (['(self.separator == other_path.separator)', '"""The separator used in two path are not the same"""'], {}), "(self.separator == other_path.separator,\n 'The separator used in two path are not the same')\n", (3282, 3377), False, 'from common.tools.utils import check_state\n'), ((4479, 4525), 'json.dumps', 'json.dumps', (['self'], {'default': '(lambda o: o.__dict__)'}), '(self, default=lambda o: o.__dict__)\n', (4489, 4525), False, 'import json\n'), ((775, 796), 'node.tagTreeNote.utils.utils.verify_folder', 'verify_folder', (['folder'], {}), '(folder)\n', (788, 796), False, 'from node.tagTreeNote.utils.utils import verify_folder, DEFAULT_PATH_SEPARATOR, common_heading_sub_array\n')] |
import json
import logging
from pathlib import Path, WindowsPath
from modules.steam_utils import SteamApps
from shared_modules import Condition, Gate, Process, Profile, Task
from shared_modules.migrate import Session
class ProfileImportExport:
_count = 0
_import_models = (Profile, Task, Condition, Gate, Process)
auto_detected_msg_ls = list()
steam_apps: SteamApps = None
known_app_executables = dict()
@staticmethod
def export(profile: Profile, path: Path):
try:
data = profile.to_dict()
with open(path.as_posix(), 'w') as f:
json.dump(data, f, indent=4, sort_keys=True)
except Exception as e:
logging.error('Error exporting profile: %s', e)
@classmethod
def _prepare_known_apps(cls, use_known_apps: bool):
cls.auto_detected_msg_ls = list()
if not use_known_apps:
return
if cls.steam_apps is None:
cls.steam_apps = SteamApps()
for app_id, manifest in cls.steam_apps.known_apps.items():
cls.known_app_executables[manifest.get('executable')] = manifest
@classmethod
def _update_process_locations_known_apps(cls, entry):
""" Update path locations """
if not isinstance(entry, Process):
return
# Do not alter entries that already have a valid path
exe_path = Path(Path(entry.path) / entry.executable)
if exe_path.is_file() and exe_path.exists():
return
if entry.executable in cls.known_app_executables:
manifest = cls.known_app_executables.get(entry.executable)
path = Path(manifest.get('path') or '')
if path.exists():
win_path = str(WindowsPath(path))
entry.path = win_path
cls.auto_detected_msg_ls.append(manifest.get('name'))
logging.info('Updated Process Entry #%s with auto-detected location: %s', entry.id or -1, win_path)
@classmethod
def _get_single_foreign_attributes(cls, entry, data, use_known_apps):
for k, v in data.items():
if k in entry.json_foreign_attributes:
cls._get_single_relationship(k, entry, v, use_known_apps)
@classmethod
def _get_single_relationship(cls, table_name, parent_entry, data, use_known_apps):
for Model in cls._import_models:
if Model.__tablename__ == table_name:
entry = Model()
entry.from_dict(data)
if use_known_apps:
cls._update_process_locations_known_apps(entry)
Session.add(entry)
setattr(parent_entry, table_name, entry)
@classmethod
def _get_child_relations(cls, parent_entry, data, use_known_apps: bool):
# -- Update Process entries based on KnownApps
if use_known_apps:
cls._update_process_locations_known_apps(parent_entry)
# -- Get Children eg. profile.processes as dictionary
child_relationships = parent_entry.get_children_lists(data)
# -- Collect One to One Relationships
# eg. task.process
cls._get_single_foreign_attributes(parent_entry, data, use_known_apps)
# -- Collect One to Many Relationships
# eg. task.conditions
for Model in cls._import_models:
if Model.json_list_name not in child_relationships:
continue
children = list()
for child_data_entry in child_relationships[Model.json_list_name]:
child = Model()
child.from_dict(child_data_entry)
cls._get_child_relations(child, child_data_entry, use_known_apps)
Session.add(child)
Session.flush() # generate id's
children.append(child)
# Add entries to parent
setattr(parent_entry, Model.json_list_name, children)
@classmethod
def import_profile(cls, file: Path, use_known_apps: bool = False) -> bool:
try:
with open(file.as_posix(), 'r') as f:
data = json.load(f)
except Exception as e:
logging.error('Error opening file for profile import: %s', e)
return False
# -- Read out known Apps and Steam library
cls._prepare_known_apps(use_known_apps)
try:
profile_names = {p.name for p in Session.query(Profile).all()}
while data['name'] in profile_names:
if data['name'][-2:].isdigit():
cls._count += 1
data['name'] = f"{data['name'][:-2]}{cls._count:02d}"
else:
data['name'] = f"{data.get('name')}_{cls._count:02d}"
profile = Profile()
profile.from_dict(data)
Session.add(profile)
Session.flush() # Generate Id's
cls._get_child_relations(profile, data, use_known_apps)
Session.commit()
except Exception as e:
logging.error('Error importing profile: %s', e)
Session.rollback()
return False
return True
| [
"shared_modules.migrate.Session.add",
"shared_modules.migrate.Session.commit",
"shared_modules.migrate.Session.rollback",
"shared_modules.migrate.Session.flush",
"pathlib.Path",
"json.dump",
"logging.info",
"pathlib.WindowsPath",
"shared_modules.migrate.Session.query",
"modules.steam_utils.SteamApps",
"json.load",
"logging.error",
"shared_modules.Profile"
] | [((977, 988), 'modules.steam_utils.SteamApps', 'SteamApps', ([], {}), '()\n', (986, 988), False, 'from modules.steam_utils import SteamApps\n'), ((4798, 4807), 'shared_modules.Profile', 'Profile', ([], {}), '()\n', (4805, 4807), False, 'from shared_modules import Condition, Gate, Process, Profile, Task\n'), ((4856, 4876), 'shared_modules.migrate.Session.add', 'Session.add', (['profile'], {}), '(profile)\n', (4867, 4876), False, 'from shared_modules.migrate import Session\n'), ((4889, 4904), 'shared_modules.migrate.Session.flush', 'Session.flush', ([], {}), '()\n', (4902, 4904), False, 'from shared_modules.migrate import Session\n'), ((5004, 5020), 'shared_modules.migrate.Session.commit', 'Session.commit', ([], {}), '()\n', (5018, 5020), False, 'from shared_modules.migrate import Session\n'), ((609, 653), 'json.dump', 'json.dump', (['data', 'f'], {'indent': '(4)', 'sort_keys': '(True)'}), '(data, f, indent=4, sort_keys=True)\n', (618, 653), False, 'import json\n'), ((697, 744), 'logging.error', 'logging.error', (['"""Error exporting profile: %s"""', 'e'], {}), "('Error exporting profile: %s', e)\n", (710, 744), False, 'import logging\n'), ((1405, 1421), 'pathlib.Path', 'Path', (['entry.path'], {}), '(entry.path)\n', (1409, 1421), False, 'from pathlib import Path, WindowsPath\n'), ((1901, 2005), 'logging.info', 'logging.info', (['"""Updated Process Entry #%s with auto-detected location: %s"""', '(entry.id or -1)', 'win_path'], {}), "('Updated Process Entry #%s with auto-detected location: %s', \n entry.id or -1, win_path)\n", (1913, 2005), False, 'import logging\n'), ((2637, 2655), 'shared_modules.migrate.Session.add', 'Session.add', (['entry'], {}), '(entry)\n', (2648, 2655), False, 'from shared_modules.migrate import Session\n'), ((3747, 3765), 'shared_modules.migrate.Session.add', 'Session.add', (['child'], {}), '(child)\n', (3758, 3765), False, 'from shared_modules.migrate import Session\n'), ((3782, 3797), 'shared_modules.migrate.Session.flush', 'Session.flush', ([], {}), '()\n', (3795, 3797), False, 'from shared_modules.migrate import Session\n'), ((4140, 4152), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4149, 4152), False, 'import json\n'), ((4196, 4257), 'logging.error', 'logging.error', (['"""Error opening file for profile import: %s"""', 'e'], {}), "('Error opening file for profile import: %s', e)\n", (4209, 4257), False, 'import logging\n'), ((5064, 5111), 'logging.error', 'logging.error', (['"""Error importing profile: %s"""', 'e'], {}), "('Error importing profile: %s', e)\n", (5077, 5111), False, 'import logging\n'), ((5124, 5142), 'shared_modules.migrate.Session.rollback', 'Session.rollback', ([], {}), '()\n', (5140, 5142), False, 'from shared_modules.migrate import Session\n'), ((1758, 1775), 'pathlib.WindowsPath', 'WindowsPath', (['path'], {}), '(path)\n', (1769, 1775), False, 'from pathlib import Path, WindowsPath\n'), ((4442, 4464), 'shared_modules.migrate.Session.query', 'Session.query', (['Profile'], {}), '(Profile)\n', (4455, 4464), False, 'from shared_modules.migrate import Session\n')] |
from model.contact import Contact
from random import randrange
import random
def test_modify_contact_firstname(app,db,check_ui):
#contact = json_contacts
if len(db.get_contact_list()) == 0:
app.contact.create(Contact(firstname ="firstname"))
old_contacts = db.get_contact_list()
contact = random.choice(old_contacts)
id = contact.id
old_contacts.remove(contact)
new_contact = Contact(firstname="<NAME>")
old_contacts.append(new_contact)
app.contact.modify_contact_by_id(id,new_contact)
new_contact.id = contact.id
new_contact.lastname = contact.lastname
new_contacts = db.get_contact_list()
assert len(old_contacts) == len(new_contacts)
assert sorted(old_contacts, key=Contact.id_or_max) == sorted(new_contacts, key=Contact.id_or_max)
if check_ui:
assert sorted(new_contacts, key = Contact.id_or_max) == sorted(app.group.get_contact_list(),key = Contact.id_or_max)
#def test_modify_contact_middlename(app):
#if app.contact.count() == 0:
#app.contact.create(Contact(middlename="middlename"))
#app.contact.modify_first_contact(Contact(middlename="New middlename")) | [
"random.choice",
"model.contact.Contact"
] | [((314, 341), 'random.choice', 'random.choice', (['old_contacts'], {}), '(old_contacts)\n', (327, 341), False, 'import random\n'), ((413, 440), 'model.contact.Contact', 'Contact', ([], {'firstname': '"""<NAME>"""'}), "(firstname='<NAME>')\n", (420, 440), False, 'from model.contact import Contact\n'), ((226, 256), 'model.contact.Contact', 'Contact', ([], {'firstname': '"""firstname"""'}), "(firstname='firstname')\n", (233, 256), False, 'from model.contact import Contact\n')] |
import wandb
import torch
import torch.optim as optim
import torch.nn.functional as F
import torch.nn as nn
from torchvision import datasets, transforms
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
def train(config=None):
# Initialize a new wandb run
with wandb.init(config=config):
# If called by wandb.agent, as below,
# this config will be set by Sweep Controller
config = wandb.config
loader = build_dataset(config.batch_size)
network = build_network(config.fc_layer_size, config.dropout)
optimizer = build_optimizer(network, config.optimizer, config.learning_rate)
for epoch in range(config.epochs):
avg_loss = train_epoch(network, loader, optimizer)
wandb.log({"loss": avg_loss, "epoch": epoch})
def build_dataset(batch_size):
transform = transforms.Compose(
[transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))])
# download MNIST training dataset
dataset = datasets.MNIST(".", train=True, download=True,
transform=transform)
sub_dataset = torch.utils.data.Subset(
dataset, indices=range(0, len(dataset), 5))
loader = torch.utils.data.DataLoader(sub_dataset, batch_size=batch_size)
return loader
def build_network(fc_layer_size, dropout):
network = nn.Sequential( # fully-connected, single hidden layer
nn.Flatten(),
nn.Linear(784, fc_layer_size), nn.ReLU(),
nn.Dropout(dropout),
nn.Linear(fc_layer_size, 10),
nn.LogSoftmax(dim=1))
return network.to(device)
def build_optimizer(network, optimizer, learning_rate):
if optimizer == "sgd":
optimizer = optim.SGD(network.parameters(),
lr=learning_rate, momentum=0.9)
elif optimizer == "adam":
optimizer = optim.Adam(network.parameters(),
lr=learning_rate)
return optimizer
def train_epoch(network, loader, optimizer):
cumu_loss = 0
for _, (data, target) in enumerate(loader):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
# ➡ Forward pass
loss = F.nll_loss(network(data), target)
cumu_loss += loss.item()
# ⬅ Backward pass + weight update
loss.backward()
optimizer.step()
wandb.log({"batch loss": loss.item()})
return cumu_loss / len(loader)
if __name__ == '__main__':
train() | [
"torch.nn.ReLU",
"torch.nn.Dropout",
"wandb.log",
"torch.nn.Flatten",
"wandb.init",
"torch.cuda.is_available",
"torchvision.datasets.MNIST",
"torch.utils.data.DataLoader",
"torch.nn.Linear",
"torch.nn.LogSoftmax",
"torchvision.transforms.ToTensor",
"torchvision.transforms.Normalize"
] | [((1030, 1097), 'torchvision.datasets.MNIST', 'datasets.MNIST', (['"""."""'], {'train': '(True)', 'download': '(True)', 'transform': 'transform'}), "('.', train=True, download=True, transform=transform)\n", (1044, 1097), False, 'from torchvision import datasets, transforms\n'), ((1235, 1298), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['sub_dataset'], {'batch_size': 'batch_size'}), '(sub_dataset, batch_size=batch_size)\n', (1262, 1298), False, 'import torch\n'), ((186, 211), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (209, 211), False, 'import torch\n'), ((292, 317), 'wandb.init', 'wandb.init', ([], {'config': 'config'}), '(config=config)\n', (302, 317), False, 'import wandb\n'), ((1440, 1452), 'torch.nn.Flatten', 'nn.Flatten', ([], {}), '()\n', (1450, 1452), True, 'import torch.nn as nn\n'), ((1462, 1491), 'torch.nn.Linear', 'nn.Linear', (['(784)', 'fc_layer_size'], {}), '(784, fc_layer_size)\n', (1471, 1491), True, 'import torch.nn as nn\n'), ((1493, 1502), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1500, 1502), True, 'import torch.nn as nn\n'), ((1512, 1531), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (1522, 1531), True, 'import torch.nn as nn\n'), ((1541, 1569), 'torch.nn.Linear', 'nn.Linear', (['fc_layer_size', '(10)'], {}), '(fc_layer_size, 10)\n', (1550, 1569), True, 'import torch.nn as nn\n'), ((1579, 1599), 'torch.nn.LogSoftmax', 'nn.LogSoftmax', ([], {'dim': '(1)'}), '(dim=1)\n', (1592, 1599), True, 'import torch.nn as nn\n'), ((774, 819), 'wandb.log', 'wandb.log', (["{'loss': avg_loss, 'epoch': epoch}"], {}), "({'loss': avg_loss, 'epoch': epoch})\n", (783, 819), False, 'import wandb\n'), ((901, 922), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (920, 922), False, 'from torchvision import datasets, transforms\n'), ((933, 975), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['(0.1307,)', '(0.3081,)'], {}), '((0.1307,), (0.3081,))\n', (953, 975), False, 'from torchvision import datasets, transforms\n')] |
from django.conf.urls import include, url
from django.contrib import admin
from HMBBF.views import home_information as information
from HMBBF.views import home_seacher_keyword
from HMBBF.views import home_seacher,assembly_days,theme_day,live,load_guests
urlpatterns = [
#首页数据
url(r"index/$",information),
#首页搜索热词
url(r"get_keywords/$",home_seacher_keyword),
#搜索
url(r"search_result/$",home_seacher),
#
url(r"assembly/$",assembly_days,name="assembly_days"),
url(r"theme/$",theme_day,name="theme_day"),
url(r"guests/$",load_guests,name="guests_data"),
url(r"live/$",live,name="theme_live"),
]
| [
"django.conf.urls.url"
] | [((285, 312), 'django.conf.urls.url', 'url', (['"""index/$"""', 'information'], {}), "('index/$', information)\n", (288, 312), False, 'from django.conf.urls import include, url\n'), ((330, 373), 'django.conf.urls.url', 'url', (['"""get_keywords/$"""', 'home_seacher_keyword'], {}), "('get_keywords/$', home_seacher_keyword)\n", (333, 373), False, 'from django.conf.urls import include, url\n'), ((387, 423), 'django.conf.urls.url', 'url', (['"""search_result/$"""', 'home_seacher'], {}), "('search_result/$', home_seacher)\n", (390, 423), False, 'from django.conf.urls import include, url\n'), ((435, 489), 'django.conf.urls.url', 'url', (['"""assembly/$"""', 'assembly_days'], {'name': '"""assembly_days"""'}), "('assembly/$', assembly_days, name='assembly_days')\n", (438, 489), False, 'from django.conf.urls import include, url\n'), ((494, 537), 'django.conf.urls.url', 'url', (['"""theme/$"""', 'theme_day'], {'name': '"""theme_day"""'}), "('theme/$', theme_day, name='theme_day')\n", (497, 537), False, 'from django.conf.urls import include, url\n'), ((542, 590), 'django.conf.urls.url', 'url', (['"""guests/$"""', 'load_guests'], {'name': '"""guests_data"""'}), "('guests/$', load_guests, name='guests_data')\n", (545, 590), False, 'from django.conf.urls import include, url\n'), ((596, 634), 'django.conf.urls.url', 'url', (['"""live/$"""', 'live'], {'name': '"""theme_live"""'}), "('live/$', live, name='theme_live')\n", (599, 634), False, 'from django.conf.urls import include, url\n')] |
from monero.wallet import Wallet
from targets.fields import Fields
from targets.lineitem import LineItem
import csv
import time
# print(jsonRPC.raw_request('get_attribute', 'wallet2.description')) #What's the key??? ATTRIBUTE_DESCRIPTION????
class target(object):
min_height = 0
mywallet = None
currency = None
def __init__(self, mywallet, min_height=0, currency='XMR'):
self.mywallet = mywallet
self.min_height = min_height
self.currency = currency
self.etl()
def extract(self):
return "extract functionality unavailable at this time"
def load(self, stagedData):
return "load functionality unavailable at this time"
def transform(self, stagedData):
return "transform functionality unavailable at this time"
def etl(self):
stagedData = self.extract()
stagedData = self.transform(stagedData)
self.load(stagedData)
class csvfile(target):
def extract(self):
lineitems = []
incoming = self.mywallet.incoming(min_height=self.min_height)
outgoing = self.mywallet.outgoing(min_height=self.min_height)
for payment in incoming:
lineitem = LineItem(payment)
lineitems.append(lineitem)
multiDestPayment = None
for payment in outgoing:
while True:
lineitem = LineItem(payment)
lineitems.append(lineitem)
if not payment.destinations: # don't add any more line items if there are no more destinations
break
return lineitems
def transform(self, lineitems):
# sort list of incoming and outgoing payments(lineitem objects) by timestamp
lineitems = sorted(lineitems, key=lambda lineitem: lineitem.timestamp)
# set initial balance value
balance = 0
# calculate balance for each line item after debit or credit
i = 0
lines = len(lineitems)
for lineitem in lineitems:
i += 1
finalBalance = True if lines == i else False
balance = lineitem.calcBalance(balance, self.currency, finalBalance)
# most recent tx should be on top with most recent balance
lineitems = sorted(lineitems, key=lambda lineitem: lineitem.timestamp, reverse=True)
return lineitems
def load(self, lineitems):
with open('Monero-ETL-'+time.strftime("%d-%m-%Y") + '_'+self.currency+'.csv', 'w') as csvfile:
writer = csv.writer(csvfile)
fieldnames = []
for field in Fields.values:
fieldnames.append(field)
# write the field headers first
writer.writerow(fieldnames)
# load line item variables into row
for lineitem in lineitems:
row = []
# using if statements to allow for fields to be rearranged
# adding fields requires an additional if statement
for field in fieldnames:
if field == "Timestamp":
row.append(lineitem.timestamp)
if field == "Transaction Id":
row.append(lineitem.transaction_id)
if field == "Payment ID":
row.append(lineitem.payment_id)
if field == "Note":
row.append(lineitem.note)
if field == "Receive/Send Address":
row.append(lineitem.address)
if field == "Debit":
row.append(lineitem.debit)
if field == "Credit":
row.append(lineitem.credit)
if field == "Network Fee":
row.append(lineitem.transaction_fee)
if field == "Balance":
row.append(str(lineitem.balance))
if field == "Currency":
row.append(self.currency)
# write the line item into csv
writer.writerow(row)
class SQL(target):
def __init__(self, mywallet, min_height=0):
target.__init__(self, mywallet, min_height=0)
class beanCounter(target):
def __init__(self, mywallet, min_height=0):
target.__init__(self, mywallet, min_height=0)
| [
"csv.writer",
"targets.lineitem.LineItem",
"time.strftime"
] | [((1197, 1214), 'targets.lineitem.LineItem', 'LineItem', (['payment'], {}), '(payment)\n', (1205, 1214), False, 'from targets.lineitem import LineItem\n'), ((2502, 2521), 'csv.writer', 'csv.writer', (['csvfile'], {}), '(csvfile)\n', (2512, 2521), False, 'import csv\n'), ((1370, 1387), 'targets.lineitem.LineItem', 'LineItem', (['payment'], {}), '(payment)\n', (1378, 1387), False, 'from targets.lineitem import LineItem\n'), ((2410, 2435), 'time.strftime', 'time.strftime', (['"""%d-%m-%Y"""'], {}), "('%d-%m-%Y')\n", (2423, 2435), False, 'import time\n')] |
import time
alarm_time = input('What time do you want to have an alarm? format HH24:MI ')
while True:
current_time = time.localtime()
hour_string = str(current_time.tm_hour)
minute_string = str(current_time.tm_min)
second_string = str(current_time.tm_sec)
time_string = hour_string+':'+minute_string+':'+second_string
time_string_minute = hour_string+':'+minute_string
print(time_string)
if alarm_time == time_string_minute:
print('ALARM!!!!!!!!!!!!!')
time.sleep(1)
| [
"time.localtime",
"time.sleep"
] | [((124, 140), 'time.localtime', 'time.localtime', ([], {}), '()\n', (138, 140), False, 'import time\n'), ((503, 516), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (513, 516), False, 'import time\n')] |
#!/bin/python3
# -*- coding: utf-8 -*-
from Utils import Utils
from Dataset import Dataset
class NNDatasetContainer:
def deployScaler(self):
self.dataset.normalization_method=Dataset.Normalization.NORMALIZE_WITH_EXTERNAL_MAXES
self.dataset.normalization_params=self.scaler+tuple()
def importScaler(self):
self.scaler=self.dataset.normalization_params+tuple()
def getNormalizationMethod(self):
norm_method=Dataset.Normalization.DONT_NORMALIZE
norm_param=None
if self.normalize:
if len(self.scaler)>0:
norm_param=self.scaler
norm_method=Dataset.Normalization.NORMALIZE_WITH_EXTERNAL_MAXES
else:
norm_method=Dataset.Normalization.NORMALIZE_WITH_GAP
return norm_method,norm_param
def generateNNArrays(self):
if not self.dataset.converted:
self.dataset.convertToTemporalValues(self.back_samples,self.forward_samples)
norm_method,norm_param=self.getNormalizationMethod()
start_index,dataset_x,dataset_y=self.dataset.getNeuralNetworkArrays(include_test_data=True,normalization=norm_method,external_maxes=norm_param)
if len(self.scaler)==0:
self.importScaler()
if self.train_percent==0:
self.train_x=None
self.train_y=None
self.val_x=None
self.val_y=None
self.test_x=dataset_x
self.test_y=dataset_y
self.train_start_idx=None
self.val_start_idx=None
self.test_start_idx=start_index
if self.verbose:
print()
print('test_x',self.test_x.shape)
print('test_y',self.test_y.shape)
print()
else:
test_index,train_x,test_x=Dataset.splitNeuralNetworkArray(dataset_x,self.train_percent)
_,train_y,test_y=Dataset.splitNeuralNetworkArray(dataset_y,part2_index=test_index)
val_index,train_x,val_x=Dataset.splitNeuralNetworkArray(train_x,1-self.val_percent)
_,train_y,val_y=Dataset.splitNeuralNetworkArray(train_y,part2_index=val_index)
self.train_x=train_x
self.train_y=train_y
self.val_x=val_x
self.val_y=val_y
self.test_x=test_x
self.test_y=test_y
self.train_start_idx=start_index
self.val_start_idx=start_index+val_index
self.test_start_idx=start_index+test_index
if self.verbose:
print()
print('train_x',self.train_x.shape)
print('train_y',self.train_y.shape)
print()
print('val_x',self.val_x.shape)
print('val_y',self.val_y.shape)
print()
print('test_x',self.test_x.shape)
print('test_y',self.test_y.shape)
print()
def getValuesSplittedByFeature(self):
norm_method,norm_param=self.getNormalizationMethod()
normalize=self.dataset.setNormalizationMethod(normalization=norm_method,external_maxes=norm_param)
return self.dataset.getValuesSplittedByFeature(normalize=normalize)
def __init__(self,dataset,scaler,train_percent,val_percent,back_samples,forward_samples,normalize,verbose):
self.dataset=dataset
self.scaler=scaler
self.train_percent=train_percent
self.val_percent=val_percent
self.back_samples=back_samples
self.forward_samples=forward_samples
self.normalize=normalize
self.train_x=None
self.train_y=None
self.val_x=None
self.val_y=None
self.test_x=None
self.test_y=None
self.train_start_idx=None
self.val_start_idx=None
self.test_start_idx=None
self.verbose=verbose | [
"Dataset.Dataset.splitNeuralNetworkArray"
] | [((1572, 1634), 'Dataset.Dataset.splitNeuralNetworkArray', 'Dataset.splitNeuralNetworkArray', (['dataset_x', 'self.train_percent'], {}), '(dataset_x, self.train_percent)\n', (1603, 1634), False, 'from Dataset import Dataset\n'), ((1655, 1721), 'Dataset.Dataset.splitNeuralNetworkArray', 'Dataset.splitNeuralNetworkArray', (['dataset_y'], {'part2_index': 'test_index'}), '(dataset_y, part2_index=test_index)\n', (1686, 1721), False, 'from Dataset import Dataset\n'), ((1749, 1811), 'Dataset.Dataset.splitNeuralNetworkArray', 'Dataset.splitNeuralNetworkArray', (['train_x', '(1 - self.val_percent)'], {}), '(train_x, 1 - self.val_percent)\n', (1780, 1811), False, 'from Dataset import Dataset\n'), ((1829, 1892), 'Dataset.Dataset.splitNeuralNetworkArray', 'Dataset.splitNeuralNetworkArray', (['train_y'], {'part2_index': 'val_index'}), '(train_y, part2_index=val_index)\n', (1860, 1892), False, 'from Dataset import Dataset\n')] |
"""
Unit tests for third_party_auth SAML auth providers
"""
from unittest import mock
from common.djangoapps.third_party_auth.saml import EdXSAMLIdentityProvider, get_saml_idp_class
from common.djangoapps.third_party_auth.tests.data.saml_identity_provider_mock_data import (
expected_user_details,
mock_attributes,
mock_conf
)
from common.djangoapps.third_party_auth.tests.testutil import SAMLTestCase
class TestEdXSAMLIdentityProvider(SAMLTestCase):
"""
Test EdXSAMLIdentityProvider.
"""
@mock.patch('common.djangoapps.third_party_auth.saml.log')
def test_get_saml_idp_class_with_fake_identifier(self, log_mock):
error_mock = log_mock.error
idp_class = get_saml_idp_class('fake_idp_class_option')
error_mock.assert_called_once_with(
'[THIRD_PARTY_AUTH] Invalid EdXSAMLIdentityProvider subclass--'
'using EdXSAMLIdentityProvider base class. Provider: {provider}'.format(provider='fake_idp_class_option')
)
assert idp_class is EdXSAMLIdentityProvider
def test_get_user_details(self):
""" test get_attr and get_user_details of EdXSAMLIdentityProvider"""
edx_saml_identity_provider = EdXSAMLIdentityProvider('demo', **mock_conf)
assert edx_saml_identity_provider.get_user_details(mock_attributes) == expected_user_details
| [
"common.djangoapps.third_party_auth.saml.EdXSAMLIdentityProvider",
"unittest.mock.patch",
"common.djangoapps.third_party_auth.saml.get_saml_idp_class"
] | [((527, 584), 'unittest.mock.patch', 'mock.patch', (['"""common.djangoapps.third_party_auth.saml.log"""'], {}), "('common.djangoapps.third_party_auth.saml.log')\n", (537, 584), False, 'from unittest import mock\n'), ((711, 754), 'common.djangoapps.third_party_auth.saml.get_saml_idp_class', 'get_saml_idp_class', (['"""fake_idp_class_option"""'], {}), "('fake_idp_class_option')\n", (729, 754), False, 'from common.djangoapps.third_party_auth.saml import EdXSAMLIdentityProvider, get_saml_idp_class\n'), ((1207, 1251), 'common.djangoapps.third_party_auth.saml.EdXSAMLIdentityProvider', 'EdXSAMLIdentityProvider', (['"""demo"""'], {}), "('demo', **mock_conf)\n", (1230, 1251), False, 'from common.djangoapps.third_party_auth.saml import EdXSAMLIdentityProvider, get_saml_idp_class\n')] |
import datetime
from datetime import datetime
import cv2
import face_recognition
import numpy as np
import openpyxl
def addInExcel(d):
fp = "Attendance.xlsx"
wb = openpyxl.load_workbook(fp)
sheet = wb.get_active_sheet()
max_row = sheet.max_row
max_column = sheet.max_column
now = datetime.now()
# print("Data Acquired :", d)
sheet.cell(row=1, column=max_column + 1).value = now.strftime("%m/%d/%Y, %H:%M:%S")
for index in range(1, max_row):
sheet.cell(row=index + 1, column=max_column + 1).value = d[sheet.cell(row=index + 1, column=2).value]
wb.save(fp)
video_capture = cv2.VideoCapture(0)
obama_image = face_recognition.load_image_file("images/obama.jpg")
obama_face_encoding = face_recognition.face_encodings(obama_image)[0]
gates_image = face_recognition.load_image_file("images/gates.jpg")
gates_face_encoding = face_recognition.face_encodings(gates_image)[0]
known_face_encodings = [
obama_face_encoding,
gates_face_encoding
]
known_face_names = [
"<NAME>",
"<NAME>"
]
attendance_list = {"<NAME>": "Absent", "<NAME>": "Absent", " ": " "}
face_locations = []
face_encodings = []
face_names = []
process_this_frame = True
while True:
ret, frame = video_capture.read()
small_frame = cv2.resize(frame, (0, 0), fx=0.25, fy=0.25)
rgb_small_frame = small_frame[:, :, ::-1]
if process_this_frame:
face_locations = face_recognition.face_locations(rgb_small_frame)
face_encodings = face_recognition.face_encodings(rgb_small_frame, face_locations)
face_names = []
for face_encoding in face_encodings:
matches = face_recognition.compare_faces(known_face_encodings, face_encoding)
name = "Unknown"
# # If a match was found in known_face_encodings, just use the first one.
# if True in matches:
# first_match_index = matches.index(True)
# name = known_face_names[first_match_index]
# Or instead, use the known face with the smallest distance to the new face
face_distances = face_recognition.face_distance(known_face_encodings, face_encoding)
best_match_index = np.argmin(face_distances)
if matches[best_match_index]:
name = known_face_names[best_match_index]
if name != "Unknown":
face_names.append(name)
attendance_list[name] = "Present"
process_this_frame = not process_this_frame
for (top, right, bottom, left), name in zip(face_locations, face_names):
top *= 4
right *= 4
bottom *= 4
left *= 4
cv2.rectangle(frame, (left, top), (right, bottom), (0, 0, 255), 2)
cv2.rectangle(frame, (left, bottom - 35), (right, bottom), (0, 0, 255), cv2.FILLED)
font = cv2.FONT_HERSHEY_DUPLEX
cv2.putText(frame, name, (left + 6, bottom - 6), font, 1.0, (255, 255, 255), 1)
cv2.imshow('Video', frame)
# Hit 'esc' on the keyboard to quit!
k = cv2.waitKey(30) & 0xff
if k == 27:
present = 0
absent = 0
print("------------------------------------------")
print("Attendance as per", datetime.now())
print("Name Absent/Present")
for i in attendance_list:
print(i, ":", attendance_list[i])
if attendance_list[i] == "Present":
present += 1
elif attendance_list[i] == "Absent":
absent += 1
print("Total Present :", present)
print("Total Absent :", absent)
print("------------------------------------------")
attendance_list['Total Present'] = present
attendance_list['Total Absent'] = absent
# print(attendance_list)
addInExcel(attendance_list)
break
cv2.destroyAllWindows()
video_capture.release()
"""
------------------------------------------
Attendance as per 2020-05-06 09:48:11.096916
Name Absent/Present
<NAME> : Present
<NAME> : Absent
:
Total Present : 1
Total Absent : 1
------------------------------------------
"""
| [
"cv2.rectangle",
"face_recognition.face_locations",
"openpyxl.load_workbook",
"cv2.imshow",
"cv2.putText",
"datetime.datetime.now",
"face_recognition.face_distance",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"face_recognition.load_image_file",
"face_recognition.face_encodings",
"face_recognition.compare_faces",
"numpy.argmin",
"cv2.resize",
"cv2.waitKey"
] | [((651, 670), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (667, 670), False, 'import cv2\n'), ((688, 740), 'face_recognition.load_image_file', 'face_recognition.load_image_file', (['"""images/obama.jpg"""'], {}), "('images/obama.jpg')\n", (720, 740), False, 'import face_recognition\n'), ((827, 879), 'face_recognition.load_image_file', 'face_recognition.load_image_file', (['"""images/gates.jpg"""'], {}), "('images/gates.jpg')\n", (859, 879), False, 'import face_recognition\n'), ((3917, 3940), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3938, 3940), False, 'import cv2\n'), ((185, 211), 'openpyxl.load_workbook', 'openpyxl.load_workbook', (['fp'], {}), '(fp)\n', (207, 211), False, 'import openpyxl\n'), ((326, 340), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (338, 340), False, 'from datetime import datetime\n'), ((764, 808), 'face_recognition.face_encodings', 'face_recognition.face_encodings', (['obama_image'], {}), '(obama_image)\n', (795, 808), False, 'import face_recognition\n'), ((903, 947), 'face_recognition.face_encodings', 'face_recognition.face_encodings', (['gates_image'], {}), '(gates_image)\n', (934, 947), False, 'import face_recognition\n'), ((1316, 1359), 'cv2.resize', 'cv2.resize', (['frame', '(0, 0)'], {'fx': '(0.25)', 'fy': '(0.25)'}), '(frame, (0, 0), fx=0.25, fy=0.25)\n', (1326, 1359), False, 'import cv2\n'), ((3024, 3050), 'cv2.imshow', 'cv2.imshow', (['"""Video"""', 'frame'], {}), "('Video', frame)\n", (3034, 3050), False, 'import cv2\n'), ((1463, 1511), 'face_recognition.face_locations', 'face_recognition.face_locations', (['rgb_small_frame'], {}), '(rgb_small_frame)\n', (1494, 1511), False, 'import face_recognition\n'), ((1538, 1602), 'face_recognition.face_encodings', 'face_recognition.face_encodings', (['rgb_small_frame', 'face_locations'], {}), '(rgb_small_frame, face_locations)\n', (1569, 1602), False, 'import face_recognition\n'), ((2728, 2794), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(left, top)', '(right, bottom)', '(0, 0, 255)', '(2)'], {}), '(frame, (left, top), (right, bottom), (0, 0, 255), 2)\n', (2741, 2794), False, 'import cv2\n'), ((2804, 2892), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(left, bottom - 35)', '(right, bottom)', '(0, 0, 255)', 'cv2.FILLED'], {}), '(frame, (left, bottom - 35), (right, bottom), (0, 0, 255), cv2\n .FILLED)\n', (2817, 2892), False, 'import cv2\n'), ((2937, 3016), 'cv2.putText', 'cv2.putText', (['frame', 'name', '(left + 6, bottom - 6)', 'font', '(1.0)', '(255, 255, 255)', '(1)'], {}), '(frame, name, (left + 6, bottom - 6), font, 1.0, (255, 255, 255), 1)\n', (2948, 3016), False, 'import cv2\n'), ((3104, 3119), 'cv2.waitKey', 'cv2.waitKey', (['(30)'], {}), '(30)\n', (3115, 3119), False, 'import cv2\n'), ((1697, 1764), 'face_recognition.compare_faces', 'face_recognition.compare_faces', (['known_face_encodings', 'face_encoding'], {}), '(known_face_encodings, face_encoding)\n', (1727, 1764), False, 'import face_recognition\n'), ((2157, 2224), 'face_recognition.face_distance', 'face_recognition.face_distance', (['known_face_encodings', 'face_encoding'], {}), '(known_face_encodings, face_encoding)\n', (2187, 2224), False, 'import face_recognition\n'), ((2257, 2282), 'numpy.argmin', 'np.argmin', (['face_distances'], {}), '(face_distances)\n', (2266, 2282), True, 'import numpy as np\n'), ((3282, 3296), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3294, 3296), False, 'from datetime import datetime\n')] |
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
import os
from conda_kapsel.internal.test.tmpfile_utils import with_directory_contents
from conda_kapsel.conda_meta_file import (CondaMetaFile, META_DIRECTORY, DEFAULT_RELATIVE_META_PATH,
possible_meta_file_names)
def _use_existing_meta_file(relative_name):
def check_file(dirname):
filename = os.path.join(dirname, relative_name)
assert os.path.exists(filename)
meta_file = CondaMetaFile.load_for_directory(dirname)
assert 'foo' == meta_file.name
sample_content = "package:\n name: foo\n"
with_directory_contents({relative_name: sample_content}, check_file)
def test_use_existing_meta_file_default_name():
_use_existing_meta_file(DEFAULT_RELATIVE_META_PATH)
def test_use_existing_meta_file_all_names():
for name in possible_meta_file_names:
_use_existing_meta_file(os.path.join(META_DIRECTORY, name))
| [
"os.path.exists",
"conda_kapsel.conda_meta_file.CondaMetaFile.load_for_directory",
"conda_kapsel.internal.test.tmpfile_utils.with_directory_contents",
"os.path.join"
] | [((911, 979), 'conda_kapsel.internal.test.tmpfile_utils.with_directory_contents', 'with_directory_contents', (['{relative_name: sample_content}', 'check_file'], {}), '({relative_name: sample_content}, check_file)\n', (934, 979), False, 'from conda_kapsel.internal.test.tmpfile_utils import with_directory_contents\n'), ((681, 717), 'os.path.join', 'os.path.join', (['dirname', 'relative_name'], {}), '(dirname, relative_name)\n', (693, 717), False, 'import os\n'), ((733, 757), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (747, 757), False, 'import os\n'), ((778, 819), 'conda_kapsel.conda_meta_file.CondaMetaFile.load_for_directory', 'CondaMetaFile.load_for_directory', (['dirname'], {}), '(dirname)\n', (810, 819), False, 'from conda_kapsel.conda_meta_file import CondaMetaFile, META_DIRECTORY, DEFAULT_RELATIVE_META_PATH, possible_meta_file_names\n'), ((1207, 1241), 'os.path.join', 'os.path.join', (['META_DIRECTORY', 'name'], {}), '(META_DIRECTORY, name)\n', (1219, 1241), False, 'import os\n')] |
# ==============================================================================
# Copyright (C) [2022] by Cambricon, Inc. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ==============================================================================
"""Sample utilities
This module defines SSD postproc, Yolov3MM postproc APIs.
"""
import cnis
def clip(x):
"""Limit the number in range [0, 1].
if x < 0, x = 0
x > 1, x = 1
otherwise x = x
"""
return max(0, min(1, x))
def ssd_postproc(model_outputs, model_info, threshold):
"""SSD postproc"""
data = model_outputs.buffers[0].data(model_info.output_shape(0), model_info.output_layout(0))
data = data.reshape(model_info.output_shape(0)[3])
box_num = int(data[0])
objs = []
for i in range(box_num):
obj = cnis.DetectObject()
if data[64 + i * 7 + 1] == 0:
continue
obj.label = int(data[64 + i * 7 + 1] - 1)
obj.score = data[64 + i * 7 + 2]
if threshold > 0 and obj.score < threshold:
continue
# clip to 0-1
obj.bbox.x = clip(data[64 + i * 7 + 3])
obj.bbox.y = clip(data[64 + i * 7 + 4])
obj.bbox.w = clip(data[64 + i * 7 + 5]) - obj.bbox.x
obj.bbox.h = clip(data[64 + i * 7 + 6]) - obj.bbox.y
objs.append(obj)
return objs
def yolov3mm_postproc(model_outputs, model_info, image_size, threshold):
"""Yolov3mm postproc"""
image_w = int(image_size["image_width"])
image_h = int(image_size["image_height"])
model_input_w = model_info.input_shape(0)[2]
model_input_h = model_info.input_shape(0)[1]
if model_info.input_layout(0).order == cnis.DimOrder.NCHW:
model_input_w = model_info.input_shape(0)[3]
model_input_h = model_info.input_shape(0)[2]
scaling_factors = min(1.0 * model_input_w / image_w, 1.0 * model_input_h / image_h)
scaled_w = scaling_factors * image_w
scaled_h = scaling_factors * image_h
box_num = model_outputs.buffers[1].data(dtype=cnis.DataType.INT32)[0]
data = model_outputs.buffers[0].data(dtype=cnis.DataType.FLOAT32)
objs = []
box_step = 7
for i in range(box_num):
left = clip(data[i * box_step + 3])
right = clip(data[i * box_step + 5])
top = clip(data[i * box_step + 4])
bottom = clip(data[i * box_step + 6])
# rectify
left = (left * model_input_w - (model_input_w - scaled_w) / 2) / scaled_w
right = (right * model_input_w - (model_input_w - scaled_w) //2) / scaled_w
top = (top * model_input_h - (model_input_h - scaled_h) / 2) / scaled_h
bottom = (bottom * model_input_h - (model_input_h - scaled_h) / 2) / scaled_h
left = max(0, left)
right = max(0, right)
top = max(0, top)
bottom = max(0, bottom)
obj = cnis.DetectObject()
obj.label = int(data[i * box_step + 1])
obj.score = data[i * box_step + 2]
obj.bbox.x = left
obj.bbox.y = top
obj.bbox.w = min(1 - obj.bbox.x, right - left)
obj.bbox.h = min(1 - obj.bbox.y, bottom - top)
if (threshold > 0 and obj.score < threshold) or obj.bbox.w <= 0 or obj.bbox.h <= 0:
continue
objs.append(obj)
return objs
def print_objs(objs):
if len(objs) == 0:
print("[EasyDK PyAPISamples] @@@@@@@@@@@ No objects detected in frame ")
return
print("[EasyDK PyAPISamples] objects number: ", len(objs))
for obj in objs:
print("[EasyDK PyAPISamples] obj label: {} score: {:.4f} bbox : {:.4f}, {:.4f}, {:.4f}, {:.4f}".format(
obj.label, obj.score, obj.bbox.x, obj.bbox.y, obj.bbox.w, obj.bbox.h)) | [
"cnis.DetectObject"
] | [((1605, 1624), 'cnis.DetectObject', 'cnis.DetectObject', ([], {}), '()\n', (1622, 1624), False, 'import cnis\n'), ((3482, 3501), 'cnis.DetectObject', 'cnis.DetectObject', ([], {}), '()\n', (3499, 3501), False, 'import cnis\n')] |
# myTeam.py
# ---------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to http://ai.berkeley.edu.
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by <NAME>
# (<EMAIL>) and <NAME> (<EMAIL>).
# Student side autograding was added by <NAME>, <NAME>, and
# <NAME> (<EMAIL>).
from captureAgents import CaptureAgent
import random, time, util
from game import Directions, Actions
import game
from util import nearestPoint
#################
# Team creation #
#################
NUM_TRAINING = 0
TRAINING = False
def createTeam(firstIndex, secondIndex, isRed,
first = 'ApproxQLearningOffense', second = 'DefensiveReflexAgent', numTraining = 0, **args):
"""
This function should return a list of two agents that will form the
team, initialized using firstIndex and secondIndex as their agent
index numbers. isRed is True if the red team is being created, and
will be False if the blue team is being created.
As a potentially helpful development aid, this function can take
additional string-valued keyword arguments ("first" and "second" are
such arguments in the case of this function), which will come from
the --redOpts and --blueOpts command-line arguments to capture.py.
For the nightly contest, however, your team will be created without
any extra arguments, so you should make sure that the default
behavior is what you want for the nightly contest.
"""
# The following line is an example only; feel free to change it.
NUM_TRAINING = numTraining
return [eval(first)(firstIndex), eval(second)(secondIndex)]
class ApproxQLearningOffense(CaptureAgent):
def registerInitialState(self, gameState):
self.epsilon = 0.1
self.alpha = 0.2
self.discount = 0.9
self.numTraining = NUM_TRAINING
self.episodesSoFar = 0
self.weights = {'closest-food': -3.099192562140742,
'bias': -9.280875042529367,
'#-of-ghosts-1-step-away': -16.6612110039328,
'eats-food': 11.127808437648863}
self.start = gameState.getAgentPosition(self.index)
self.featuresExtractor = FeaturesExtractor(self)
CaptureAgent.registerInitialState(self, gameState)
def chooseAction(self, gameState):
"""
Picks among the actions with the highest Q(s,a).
"""
legalActions = gameState.getLegalActions(self.index)
if len(legalActions) == 0:
return None
foodLeft = len(self.getFood(gameState).asList())
if foodLeft <= 2:
bestDist = 9999
for action in legalActions:
successor = self.getSuccessor(gameState, action)
pos2 = successor.getAgentPosition(self.index)
dist = self.getMazeDistance(self.start, pos2)
if dist < bestDist:
bestAction = action
bestDist = dist
return bestAction
action = None
if TRAINING:
for action in legalActions:
self.updateWeights(gameState, action)
if not util.flipCoin(self.epsilon):
# exploit
action = self.getPolicy(gameState)
else:
# explore
action = random.choice(legalActions)
return action
def getWeights(self):
return self.weights
def getQValue(self, gameState, action):
"""
Should return Q(state,action) = w * featureVector
where * is the dotProduct operator
"""
# features vector
features = self.featuresExtractor.getFeatures(gameState, action)
return features * self.weights
def update(self, gameState, action, nextState, reward):
"""
Should update your weights based on transition
"""
features = self.featuresExtractor.getFeatures(gameState, action)
oldValue = self.getQValue(gameState, action)
futureQValue = self.getValue(nextState)
difference = (reward + self.discount * futureQValue) - oldValue
# for each feature i
for feature in features:
newWeight = self.alpha * difference * features[feature]
self.weights[feature] += newWeight
# print(self.weights)
def updateWeights(self, gameState, action):
nextState = self.getSuccessor(gameState, action)
reward = self.getReward(gameState, nextState)
self.update(gameState, action, nextState, reward)
def getReward(self, gameState, nextState):
reward = 0
agentPosition = gameState.getAgentPosition(self.index)
# check if I have updated the score
if self.getScore(nextState) > self.getScore(gameState):
diff = self.getScore(nextState) - self.getScore(gameState)
reward = diff * 10
# check if food eaten in nextState
myFoods = self.getFood(gameState).asList()
distToFood = min([self.getMazeDistance(agentPosition, food) for food in myFoods])
# I am 1 step away, will I be able to eat it?
if distToFood == 1:
nextFoods = self.getFood(nextState).asList()
if len(myFoods) - len(nextFoods) == 1:
reward = 10
# check if I am eaten
enemies = [gameState.getAgentState(i) for i in self.getOpponents(gameState)]
ghosts = [a for a in enemies if not a.isPacman and a.getPosition() != None]
if len(ghosts) > 0:
minDistGhost = min([self.getMazeDistance(agentPosition, g.getPosition()) for g in ghosts])
if minDistGhost == 1:
nextPos = nextState.getAgentState(self.index).getPosition()
if nextPos == self.start:
# I die in the next state
reward = -100
return reward
def final(self, state):
"Called at the end of each game."
# call the super-class final method
CaptureAgent.final(self, state)
# print(self.weights)
# did we finish training?
def getSuccessor(self, gameState, action):
"""
Finds the next successor which is a grid position (location tuple).
"""
successor = gameState.generateSuccessor(self.index, action)
pos = successor.getAgentState(self.index).getPosition()
if pos != nearestPoint(pos):
# Only half a grid position was covered
return successor.generateSuccessor(self.index, action)
else:
return successor
def computeValueFromQValues(self, gameState):
"""
Returns max_action Q(state,action)
where the max is over legal actions. Note that if
there are no legal actions, which is the case at the
terminal state, you should return a value of 0.0.
"""
allowedActions = gameState.getLegalActions(self.index)
if len(allowedActions) == 0:
return 0.0
bestAction = self.getPolicy(gameState)
return self.getQValue(gameState, bestAction)
def computeActionFromQValues(self, gameState):
"""
Compute the best action to take in a state. Note that if there
are no legal actions, which is the case at the terminal state,
you should return None.
"""
legalActions = gameState.getLegalActions(self.index)
if len(legalActions) == 0:
return None
actionVals = {}
bestQValue = float('-inf')
for action in legalActions:
targetQValue = self.getQValue(gameState, action)
actionVals[action] = targetQValue
if targetQValue > bestQValue:
bestQValue = targetQValue
bestActions = [k for k, v in actionVals.items() if v == bestQValue]
# random tie-breaking
return random.choice(bestActions)
def getPolicy(self, gameState):
return self.computeActionFromQValues(gameState)
def getValue(self, gameState):
return self.computeValueFromQValues(gameState)
class FeaturesExtractor:
def __init__(self, agentInstance):
self.agentInstance = agentInstance
def getFeatures(self, gameState, action):
# extract the grid of food and wall locations and get the ghost locations
food = self.agentInstance.getFood(gameState)
walls = gameState.getWalls()
enemies = [gameState.getAgentState(i) for i in self.agentInstance.getOpponents(gameState)]
ghosts = [a.getPosition() for a in enemies if not a.isPacman and a.getPosition() != None]
# ghosts = state.getGhostPositions()
features = util.Counter()
features["bias"] = 1.0
# compute the location of pacman after he takes the action
agentPosition = gameState.getAgentPosition(self.agentInstance.index)
x, y = agentPosition
dx, dy = Actions.directionToVector(action)
next_x, next_y = int(x + dx), int(y + dy)
# count the number of ghosts 1-step away
features["#-of-ghosts-1-step-away"] = sum((next_x, next_y) in Actions.getLegalNeighbors(g, walls) for g in ghosts)
# if len(ghosts) > 0:
# minGhostDistance = min([self.agentInstance.getMazeDistance(agentPosition, g) for g in ghosts])
# if minGhostDistance < 3:
# features["minGhostDistance"] = minGhostDistance
# successor = self.agentInstance.getSuccessor(gameState, action)
# features['successorScore'] = self.agentInstance.getScore(successor)
# if there is no danger of ghosts then add the food feature
if not features["#-of-ghosts-1-step-away"] and food[next_x][next_y]:
features["eats-food"] = 1.0
# capsules = self.agentInstance.getCapsules(gameState)
# if len(capsules) > 0:
# closestCap = min([self.agentInstance.getMazeDistance(agentPosition, cap) for cap in self.agentInstance.getCapsules(gameState)])
# features["closestCapsule"] = closestCap
dist = self.closestFood((next_x, next_y), food, walls)
if dist is not None:
# make the distance a number less than one otherwise the update
# will diverge wildly
features["closest-food"] = float(dist) / (walls.width * walls.height)
features.divideAll(10.0)
# print(features)
return features
def closestFood(self, pos, food, walls):
"""
closestFood -- this is similar to the function that we have
worked on in the search project; here its all in one place
"""
fringe = [(pos[0], pos[1], 0)]
expanded = set()
while fringe:
pos_x, pos_y, dist = fringe.pop(0)
if (pos_x, pos_y) in expanded:
continue
expanded.add((pos_x, pos_y))
# if we find a food at this location then exit
if food[pos_x][pos_y]:
return dist
# otherwise spread out from the location to its neighbours
nbrs = Actions.getLegalNeighbors((pos_x, pos_y), walls)
for nbr_x, nbr_y in nbrs:
fringe.append((nbr_x, nbr_y, dist + 1))
# no food found
return None
##########
# Agents #
##########
class ReflexCaptureAgent(CaptureAgent):
"""
A base class for reflex agents that chooses score-maximizing actions
"""
def registerInitialState(self, gameState):
self.start = gameState.getAgentPosition(self.index)
CaptureAgent.registerInitialState(self, gameState)
def chooseAction(self, gameState):
"""
Picks among the actions with the highest Q(s,a).
"""
actions = gameState.getLegalActions(self.index)
# You can profile your evaluation time by uncommenting these lines
# start = time.time()
values = [self.evaluate(gameState, a) for a in actions]
# print 'eval time for agent %d: %.4f' % (self.index, time.time() - start)
maxValue = max(values)
bestActions = [a for a, v in zip(actions, values) if v == maxValue]
foodLeft = len(self.getFood(gameState).asList())
if foodLeft <= 2:
bestDist = 9999
for action in actions:
successor = self.getSuccessor(gameState, action)
pos2 = successor.getAgentPosition(self.index)
dist = self.getMazeDistance(self.start, pos2)
if dist < bestDist:
bestAction = action
bestDist = dist
return bestAction
return random.choice(bestActions)
def getSuccessor(self, gameState, action):
"""
Finds the next successor which is a grid position (location tuple).
"""
successor = gameState.generateSuccessor(self.index, action)
pos = successor.getAgentState(self.index).getPosition()
if pos != nearestPoint(pos):
# Only half a grid position was covered
return successor.generateSuccessor(self.index, action)
else:
return successor
def evaluate(self, gameState, action):
"""
Computes a linear combination of features and feature weights
"""
features = self.getFeatures(gameState, action)
weights = self.getWeights(gameState, action)
return features * weights
def getFeatures(self, gameState, action):
"""
Returns a counter of features for the state
"""
features = util.Counter()
successor = self.getSuccessor(gameState, action)
features['successorScore'] = self.getScore(successor)
return features
def getWeights(self, gameState, action):
"""
Normally, weights do not depend on the gamestate. They can be either
a counter or a dictionary.
"""
return {'successorScore': 1.0}
class DefensiveReflexAgent(ReflexCaptureAgent):
"""
A reflex agent that keeps its side Pacman-free. Again,
this is to give you an idea of what a defensive agent
could be like. It is not the best or only way to make
such an agent.
"""
def getFeatures(self, gameState, action):
features = util.Counter()
successor = self.getSuccessor(gameState, action)
myState = successor.getAgentState(self.index)
myPos = myState.getPosition()
# Computes whether we're on defense (1) or offense (0)
features['onDefense'] = 1
if myState.isPacman: features['onDefense'] = 0
# Computes distance to invaders we can see
enemies = [successor.getAgentState(i) for i in self.getOpponents(successor)]
invaders = [a for a in enemies if a.isPacman and a.getPosition() != None]
features['numInvaders'] = len(invaders)
if len(invaders) > 0:
dists = [self.getMazeDistance(myPos, a.getPosition()) for a in invaders]
features['invaderDistance'] = min(dists)
if action == Directions.STOP: features['stop'] = 1
rev = Directions.REVERSE[gameState.getAgentState(self.index).configuration.direction]
if action == rev: features['reverse'] = 1
return features
def getWeights(self, gameState, action):
return {'numInvaders': -1000, 'onDefense': 100, 'invaderDistance': -10, 'stop': -100, 'reverse': -2} | [
"util.flipCoin",
"random.choice",
"util.nearestPoint",
"game.Actions.getLegalNeighbors",
"game.Actions.directionToVector",
"captureAgents.CaptureAgent.registerInitialState",
"util.Counter",
"captureAgents.CaptureAgent.final"
] | [((2426, 2476), 'captureAgents.CaptureAgent.registerInitialState', 'CaptureAgent.registerInitialState', (['self', 'gameState'], {}), '(self, gameState)\n', (2459, 2476), False, 'from captureAgents import CaptureAgent\n'), ((5778, 5809), 'captureAgents.CaptureAgent.final', 'CaptureAgent.final', (['self', 'state'], {}), '(self, state)\n', (5796, 5809), False, 'from captureAgents import CaptureAgent\n'), ((7474, 7500), 'random.choice', 'random.choice', (['bestActions'], {}), '(bestActions)\n', (7487, 7500), False, 'import random, time, util\n'), ((8227, 8241), 'util.Counter', 'util.Counter', ([], {}), '()\n', (8239, 8241), False, 'import random, time, util\n'), ((8445, 8478), 'game.Actions.directionToVector', 'Actions.directionToVector', (['action'], {}), '(action)\n', (8470, 8478), False, 'from game import Directions, Actions\n'), ((10830, 10880), 'captureAgents.CaptureAgent.registerInitialState', 'CaptureAgent.registerInitialState', (['self', 'gameState'], {}), '(self, gameState)\n', (10863, 10880), False, 'from captureAgents import CaptureAgent\n'), ((11790, 11816), 'random.choice', 'random.choice', (['bestActions'], {}), '(bestActions)\n', (11803, 11816), False, 'import random, time, util\n'), ((12626, 12640), 'util.Counter', 'util.Counter', ([], {}), '()\n', (12638, 12640), False, 'import random, time, util\n'), ((13280, 13294), 'util.Counter', 'util.Counter', ([], {}), '()\n', (13292, 13294), False, 'import random, time, util\n'), ((3227, 3254), 'util.flipCoin', 'util.flipCoin', (['self.epsilon'], {}), '(self.epsilon)\n', (3240, 3254), False, 'import random, time, util\n'), ((3354, 3381), 'random.choice', 'random.choice', (['legalActions'], {}), '(legalActions)\n', (3367, 3381), False, 'import random, time, util\n'), ((6138, 6155), 'util.nearestPoint', 'nearestPoint', (['pos'], {}), '(pos)\n', (6150, 6155), False, 'from util import nearestPoint\n'), ((10400, 10448), 'game.Actions.getLegalNeighbors', 'Actions.getLegalNeighbors', (['(pos_x, pos_y)', 'walls'], {}), '((pos_x, pos_y), walls)\n', (10425, 10448), False, 'from game import Directions, Actions\n'), ((12089, 12106), 'util.nearestPoint', 'nearestPoint', (['pos'], {}), '(pos)\n', (12101, 12106), False, 'from util import nearestPoint\n'), ((8637, 8672), 'game.Actions.getLegalNeighbors', 'Actions.getLegalNeighbors', (['g', 'walls'], {}), '(g, walls)\n', (8662, 8672), False, 'from game import Directions, Actions\n')] |
import core
import sub_menu
import perc_finder
def menu():
print("--------------------------------------------------------")
print("!!!Welcome to Sparks Multipurpose Calculator!!!")
print("--------------------------------------------------------")
print("Please select a option: \n 1) Add\n 2) Subtract\n 3) Multiply\n 4) Divide\n 5) Page 2\n 0) Quit")
user_input_A = 100
#so it won't default to quit if no input is entered
user_input_A = int(input(": "))
if user_input_A == 1:
core.add()
elif user_input_A == 2:
core.subtract()
elif user_input_A == 3:
core.multi()
elif user_input_A == 4:
core.main.divide()
elif user_input_A == 5:
sub_menu.page_2()
elif user_input_A == 0:
sub_menu.sure()
elif user_input_A == 1114:
print("MEOW!")
menu()
else:
print("Invalid option!")
menu()
| [
"sub_menu.sure",
"core.multi",
"core.add",
"core.subtract",
"sub_menu.page_2",
"core.main.divide"
] | [((537, 547), 'core.add', 'core.add', ([], {}), '()\n', (545, 547), False, 'import core\n'), ((584, 599), 'core.subtract', 'core.subtract', ([], {}), '()\n', (597, 599), False, 'import core\n'), ((636, 648), 'core.multi', 'core.multi', ([], {}), '()\n', (646, 648), False, 'import core\n'), ((685, 703), 'core.main.divide', 'core.main.divide', ([], {}), '()\n', (701, 703), False, 'import core\n'), ((740, 757), 'sub_menu.page_2', 'sub_menu.page_2', ([], {}), '()\n', (755, 757), False, 'import sub_menu\n'), ((794, 809), 'sub_menu.sure', 'sub_menu.sure', ([], {}), '()\n', (807, 809), False, 'import sub_menu\n')] |
import time
import asyncio
import aiosmtplib
import yagmail
class AIOSMTP(yagmail.SMTP):
async def login(self):
# aiosmtplib implementation specific
use_tls = str(self.port) == "465"
self.smtp_starttls = not use_tls
if self.oauth2_file is not None:
await self._login_oauth2(self.credentials, use_tls)
else:
await self._login(self.credentials, use_tls=use_tls)
async def _login_oauth2(self, oauth2_info, use_tls):
if "email_address" in oauth2_info:
oauth2_info.pop("email_address")
self.smtp = self.connection(self.host, self.port, use_tls=use_tls, **self.kwargs)
await self.smtp.connect()
auth_string = self.get_oauth_string(self.user, oauth2_info)
await self.smtp.ehlo(oauth2_info["google_client_id"])
if self.starttls is True:
await self.smtp.starttls()
await self.smtp.execute_command(b"AUTH", b"XOAUTH2", bytes(auth_string, "ascii"))
@property
def connection(self):
return aiosmtplib.SMTP
async def send(
self,
to=None,
subject=None,
contents=None,
attachments=None,
cc=None,
bcc=None,
preview_only=False,
headers=None,
):
""" Use this to send an email with gmail"""
recipients, msg_string = self.prepare_send(
to, subject, contents, attachments, cc, bcc, headers
)
if preview_only:
return (recipients, msg_string)
return await self._attempt_send(recipients, msg_string)
async def _attempt_send(self, recipients, msg_string):
attempts = 0
while attempts < 3:
try:
result = await self.smtp.sendmail(self.user, recipients, msg_string)
self.log.info("Message sent to %s", recipients)
self.num_mail_sent += 1
return result
except aiosmtplib.SMTPServerDisconnected as e:
self.log.error(e)
attempts += 1
time.sleep(attempts * 3)
self.unsent.append((recipients, msg_string))
return False
async def send_unsent(self):
"""
Emails that were not being able to send will be stored in :attr:`self.unsent`.
Use this function to attempt to send these again
"""
await asyncio.gather([self._attempt_send(*x) for x in self.unsent])
while self.unsent:
futures = [self._attempt_send(*self.unsent.pop()) for x in self.unsent]
await asyncio.gather(*futures)
async def close(self):
raise ValueError("Should be `async with`")
async def __aenter__(self):
await self.login()
return self
async def __exit(self):
if not self.is_closed:
await self.aclose()
return False
async def __aexit__(self, exc_type, exc_val, exc_tb):
if not self.is_closed:
await self.aclose()
return False
async def aclose(self):
""" Close the connection to the SMTP server """
self.is_closed = True
try:
await self.smtp.quit()
except (TypeError, AttributeError, aiosmtplib.SMTPServerDisconnected):
pass
async def _login(self, password, use_tls):
"""
Login to the SMTP server using password. `login` only needs to be manually run when the
connection to the SMTP server was closed by the user.
"""
self.smtp = self.connection(self.host, self.port)
await self.smtp.connect(port=self.port, use_tls=use_tls)
if self.starttls:
await self.smtp.starttls()
if not self.smtp_skip_login:
password = self.handle_password(self.user, password)
await self.smtp.login(self.user, password)
self.is_closed = False
def __del__(self):
""" Not required in async"""
| [
"time.sleep",
"asyncio.gather"
] | [((2584, 2608), 'asyncio.gather', 'asyncio.gather', (['*futures'], {}), '(*futures)\n', (2598, 2608), False, 'import asyncio\n'), ((2078, 2102), 'time.sleep', 'time.sleep', (['(attempts * 3)'], {}), '(attempts * 3)\n', (2088, 2102), False, 'import time\n')] |
from random import randint
import time
import pdb
def do_random_screen():
result = 1
while(True):
result += 1
line = ''
for i in range(0, 128):
line = line + chr(7)#chr(randint(65,127))
time.sleep(randint(1,10000)*.001);
print(line)
if(result == 30):
break;
return result
if(__name__=='__main__'):
x = do_random_screen()
print(x)
_
| [
"random.randint"
] | [((263, 280), 'random.randint', 'randint', (['(1)', '(10000)'], {}), '(1, 10000)\n', (270, 280), False, 'from random import randint\n')] |
#!/usr/local/bin/python3
import re
_INITIAL_STATE_RE = re.compile(r"initial state: ([#.]+)")
pattern_map = {}
with open("input.txt") as f:
initial_state_line = f.readline().strip()
state = _INITIAL_STATE_RE.match(initial_state_line).group(1)
assert not f.readline().strip()
for rule_line in f.readlines():
[input_pattern, output] = rule_line.strip().split(" => ")
pattern_map[input_pattern] = output
start_pot_offset = 0
for generation in range(20):
next_state = ""
state = ".." + state + ".."
start_pot_offset -= 2
for i in range(len(state)):
pattern = ""
for j in range(i-2, i+3):
if j < 0 or j >= len(state):
pattern += "."
else:
pattern += state[j]
next_state += pattern_map.get(pattern, ".")
state = next_state
result = 0
for i, value in enumerate(state):
if value == "#":
result += i + start_pot_offset
print(result)
| [
"re.compile"
] | [((57, 93), 're.compile', 're.compile', (['"""initial state: ([#.]+)"""'], {}), "('initial state: ([#.]+)')\n", (67, 93), False, 'import re\n')] |
import os, re
from ... import ValidateError, FailPage, ServerError
from ... import skilift
from ....skilift import editfolder, fromjson
from ....ski.project_class_definition import SectionData
# a search for anything none-alphanumeric, not a dot and not a underscore and not an hyphen
_ANDH = re.compile('[^\w\.\-]')
def _get_folder_info(project, folder):
"Given a folder ident string such as number or 'project,number' or 'project_number' return FolderInfo, folder_url"
try:
foldernumber = skilift.get_itemnumber(project, folder)
if foldernumber is None:
raise FailPage(message="Parent folder not recognised")
folder_info = skilift.folder_info(project, foldernumber)
folder_url = skilift.page_path(project, foldernumber)
except ServerError as e:
raise FailPage(message=e.message)
return folder_info, folder_url
def retrieve_add_folder(skicall):
"Fill in the add a folder page"
call_data = skicall.call_data
pd = call_data['pagedata']
project = call_data['editedprojname']
# parent is the folder a new folder is to be added to
# the value in call_data is the string ident submitted by the ftree add_folder button
# or by a value in session_data
if 'parent' in call_data:
parent_info, parent_url = _get_folder_info(project, call_data['parent'])
elif 'add_to_foldernumber' in call_data:
parent_info, parent_url = _get_folder_info(project, call_data['add_to_foldernumber'])
else:
raise FailPage(message = "Parent folder missing")
sd_adminhead = SectionData("adminhead")
sd_adminhead["page_head","large_text"] = "Add folder to : %s" % (parent_url,)
pd.update(sd_adminhead)
pd['staticpath','input_text'] = os.path.join(project, 'static')
pd['newfolderform','parent'] = project+","+str(parent_info.number)
call_data['add_to_foldernumber'] = parent_info.number
# st1: new folder name
if 'new_folder' in call_data:
pd['foldername','new_folder'] = call_data['new_folder']
# cb1: restricted checkbox
if ('checkbox' in call_data) and call_data['checkbox']:
pd['cb1','checked'] = True
else:
pd['cb1','checked'] = False
if parent_info.restricted:
pd['cb1','show_restricted'] = False
else:
pd['cb1','show_restricted'] = True
# it1: text input for folder brief
if ('folder_brief' in call_data) and call_data['folder_brief']:
pd['it1','folder_brief'] = call_data['folder_brief']
# it2: folder ident number
if 'folder_ident_number' in call_data:
pd['it2','folder_ident_number'] = str(call_data['folder_ident_number'])
else:
pd['it2','folder_ident_number'] = str(skilift.next_ident_number(project))
def submit_addfolder(skicall):
""" Creates a folder by making a dictionary similar to:
{
"name":"folder_name",
"ident":999,
"brief":"brief description of the folder",
"restricted":False
}
And then calling editfolder.make_new_folder.
Also calls _make_static_folder if folderpath is in call data
"""
call_data = skicall.call_data
project = call_data['editedprojname']
folder_dict = {}
if 'parent' not in call_data:
raise FailPage(message = "Parent folder missing")
# the parent value in call_data is the string ident submitted by the button
parentinfo, parent_url = _get_folder_info(project, call_data['parent'])
# parentinfo is a named tuple with members
# 'name', 'number', 'restricted', 'brief', 'contains_pages', 'contains_folders'
if ('new_folder' not in call_data) or ('checkbox' not in call_data) or ('folder_brief' not in call_data) or ('folder_ident_number' not in call_data):
raise FailPage("New folder information missing")
try:
folder_ident_number = int(call_data['folder_ident_number'])
except Exception:
raise FailPage("The Folder Ident number must be an integer")
if folder_ident_number<1:
raise FailPage("The Folder Ident number must be a positive integer greater than zero")
folder_dict["ident"] = folder_ident_number
folder_dict["brief"] = call_data['folder_brief']
folder_dict["restricted"] = call_data['checkbox']
new_folder_name = call_data['new_folder']
# check name is alphanumric or underscore or dot or hyphen only
if _ANDH.search(new_folder_name):
raise FailPage(message = "Folder names must be alphanumric and may also have dots or underscores or hyphens")
folder_dict["name"] = new_folder_name
if 'folderpath' in call_data and call_data['folderpath']:
folderpath = call_data['folderpath'].strip()
folderpath = folderpath.strip('/')
folderpath = folderpath.strip('\\')
if not folderpath:
raise FailPage("Sorry, the given static folder is invalid.")
fullpath = os.path.join(skilift.get_projectfiles_dir(project), folderpath)
if not os.path.isdir(fullpath):
raise FailPage("Sorry, the given static folder location cannot be found.")
if not call_data['folder_brief']:
folder_dict["brief"] = "Link to %s" % folderpath
else:
folderpath = None
fullpath = None
# folderpath is the server folder path relative to projectfiles
# fullpath is the absolute server folder path
try:
# create the folder
editfolder.make_new_folder(project, parentinfo.number, folder_dict)
except ServerError as e:
raise FailPage(message = e.message)
if fullpath:
# add subfolders and file pages
_make_static_folder(project, folder_dict, fullpath, folderpath)
call_data['status'] = 'Static folder tree added'
return
call_data['status'] = 'New folder %s added.' % (parent_url + folder_dict["name"] + '/',)
def _make_static_folder(project, folder_dict, fullpath, folderpath):
"""Creates containing sub folders and Filepages pointing to static server files
folderpath is the server folder path relative to projectfiles
fullpath is the absolute server folder path
"""
try:
# loads everything under folderpath as Folders and FilePages
# ident_dict maps folderpath to newly created folder ident numbers
ident_dict = {}
ident_dict[folderpath] = folder_dict["ident"]
ident = folder_dict["ident"]
ident_number_list = skilift.ident_numbers(project)
for root, dirs, files in os.walk(fullpath):
#fpath = root[len(skilift.get_projectdir(project))+1:]
fpath = root[len(skilift.get_projectfiles_dir(project))+1:]
parent_ident = ident_dict[fpath]
if files:
# create files
for filename in files:
new_filepath=os.path.join(fpath, filename)
new_page_dict = {"name":filename,
"brief":"Link to %s" % (new_filepath,),
"FilePage": {
"filepath": new_filepath,
}
}
if ident:
ident +=1
if ident not in ident_number_list:
new_page_dict["ident"] = ident
editfolder.make_new_page(project, parent_ident, new_page_dict)
if dirs:
# create folders
for foldername in dirs:
new_folderpath=os.path.join(fpath, foldername)
new_folder_dict = {"name":foldername,
"brief":"Link to %s" % (new_folderpath,),
"restricted":False
}
if ident:
ident +=1
if ident not in ident_number_list:
new_folder_dict["ident"] = ident
ident_dict[new_folderpath] = editfolder.make_new_folder(project, parent_ident, new_folder_dict)
except ServerError as e:
raise FailPage(e.message)
def submit_upload_folder(skicall):
"Copy a folder from uploaded file"
call_data = skicall.call_data
project = call_data['editedprojname']
# add_to_foldernumber is the folder a new folder is to be added to
if 'add_to_foldernumber' not in call_data:
raise FailPage(message = "Parent folder missing")
# get submitted data for new folder
try:
addident = int(call_data['addident'])
except Exception:
raise FailPage(message = "Addition integer is invalid")
importname = call_data['importname']
uploadfile = call_data['uploadfile']
json_string = uploadfile.decode(encoding='utf-8')
# create the folder
try:
# note: restricted is set to False
fromjson.create_folder(project, call_data['add_to_foldernumber'], addident, importname, False, json_string)
except ServerError as e:
raise FailPage(message = e.message, widget='import_folder')
del call_data['add_to_foldernumber']
call_data['status'] = 'New folder and contents added'
| [
"os.path.isdir",
"os.path.join",
"os.walk",
"re.compile"
] | [((300, 326), 're.compile', 're.compile', (['"""[^\\\\w\\\\.\\\\-]"""'], {}), "('[^\\\\w\\\\.\\\\-]')\n", (310, 326), False, 'import os, re\n'), ((1766, 1797), 'os.path.join', 'os.path.join', (['project', '"""static"""'], {}), "(project, 'static')\n", (1778, 1797), False, 'import os, re\n'), ((6482, 6499), 'os.walk', 'os.walk', (['fullpath'], {}), '(fullpath)\n', (6489, 6499), False, 'import os, re\n'), ((4971, 4994), 'os.path.isdir', 'os.path.isdir', (['fullpath'], {}), '(fullpath)\n', (4984, 4994), False, 'import os, re\n'), ((6810, 6839), 'os.path.join', 'os.path.join', (['fpath', 'filename'], {}), '(fpath, filename)\n', (6822, 6839), False, 'import os, re\n'), ((7565, 7596), 'os.path.join', 'os.path.join', (['fpath', 'foldername'], {}), '(fpath, foldername)\n', (7577, 7596), False, 'import os, re\n')] |
from functools import wraps
from pprint import pprint
import requests
from django_simple_slack_app import slack_commands
from papago_slack.font import make_art
def authorized(func):
@wraps(func)
def wrapper(*args, **kwargs):
event_data = args[0]
if 'user' not in event_data:
send_response(event_data,
"You need to _authorize_ *Papago* to use auto translation! :smirk: \n" +
"Visit <https://yangpago.com/slack/install|Authorize Page> to accept Papago :rocket:\n\n"
"양파고를 이용하시려면 *양파고*를 _권한 인증_ 해주셔야 합니다! :smirk: \n" +
"인증하시려면 <https://yangpago.com/slack/install|인증 페이지>를 방문해주세요! :rocket:")
return
return func(*args, **kwargs)
return wrapper
def send_response(event_data, text, response_type="ephemeral"):
requests.post(event_data['response_url'], json={
"text": text,
"response_type": response_type
})
@slack_commands.on("error")
@authorized
def on_command_error(error):
pprint(error)
@slack_commands.on("/papago")
@slack_commands.on("/papago.usage")
@slack_commands.on("/papago.on")
@slack_commands.on("/papago.off")
@slack_commands.on("/papago.saysorry")
@slack_commands.on("/papago.blaming")
@slack_commands.on("/papago.pepe")
def papago_help(event_data):
send_response(event_data,
":sob: My name is changed, friend! Please use `/yangpago` instead of `/papago`")
@slack_commands.on("/yangpago")
@authorized
def papago_command(event_data):
if event_data['text']:
return
status = "ON" if event_data['channel_id'] in event_data['user'].papago.channels else "OFF"
status_kr = "켜져" if event_data['channel_id'] in event_data['user'].papago.channels else "꺼져"
send_response(event_data,
"You can turn on/off Papago for you in this channel using `/yangpago on`, `/yangpago off`.\n" +
f"Papago is turned *{status}* in this channel for you\n\n" +
"`/yangpago on`과 `/yangpago off` 명령으로 이 채널의 양파고 동작를 개인 설정을 켜고 끌 수 있습니다..\n" +
f"현재 이 채널에서 양파고 동작은 *{status_kr}*있습니다.\n")
@slack_commands.on("/yangpago.usage")
@authorized
def papago_command_team(event_data):
user = event_data['user']
count, letters = user.team.papago.monthly_usage()
send_response(event_data,
f"Your team use {count} requests for {letters} letters in this month\n" +
f"이 팀의 이번달 양파고 사용 횟수는 {count}번이고 총 {letters}글자를 번역 했습니다.")
@slack_commands.on("/yangpago.on")
@authorized
def papago_command_on(event_data):
if 'user' not in event_data:
return
user = event_data['user']
user.papago.channels.append(event_data['channel_id'])
user.papago.save()
send_response(event_data,
"Papago will translate on this channel for you!\n" +
"이제부터 이 채널에 포스팅 하시는 내용을 양파고가 번역 하겠습니다!")
print("PAPAGO ON", event_data['user'].id, "in", event_data['channel_id'])
@slack_commands.on("/yangpago.off")
@authorized
def papago_command_off(event_data):
if 'user' not in event_data:
return
user = event_data['user']
user.papago.channels.remove(event_data['channel_id'])
user.papago.save()
send_response(event_data, "Papago translation is off!\n" +
"이 채널에서 양파고 번역을 정지합니다! 안되잖아... 안되...")
print("PAPAGO OFF", event_data['user'].id, "in", event_data['channel_id'])
@slack_commands.on("/yangpago.saysorry")
@authorized
def papago_saysorry(event_data):
if 'user' not in event_data:
return
send_response(event_data, "죄송합니다... 앞으로 제대로 하겠습니다... :sob:", response_type="in_channel")
@slack_commands.on("/yangpago.blaming")
@authorized
def papago_blaming(event_data):
if 'user' not in event_data:
return
send_response(event_data, "아.. 이런 시부렁 못해먹겠네... :expressionless:", response_type="in_channel")
@slack_commands.on("/yangpago.pepe")
@authorized
def papago_blaming(event_data):
if 'user' not in event_data:
return
pepe_art = make_art(event_data['text'][5:])
send_response(event_data, pepe_art, response_type="in_channel")
| [
"django_simple_slack_app.slack_commands.on",
"requests.post",
"papago_slack.font.make_art",
"functools.wraps",
"pprint.pprint"
] | [((1000, 1026), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""error"""'], {}), "('error')\n", (1017, 1026), False, 'from django_simple_slack_app import slack_commands\n'), ((1089, 1117), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/papago"""'], {}), "('/papago')\n", (1106, 1117), False, 'from django_simple_slack_app import slack_commands\n'), ((1119, 1153), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/papago.usage"""'], {}), "('/papago.usage')\n", (1136, 1153), False, 'from django_simple_slack_app import slack_commands\n'), ((1155, 1186), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/papago.on"""'], {}), "('/papago.on')\n", (1172, 1186), False, 'from django_simple_slack_app import slack_commands\n'), ((1188, 1220), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/papago.off"""'], {}), "('/papago.off')\n", (1205, 1220), False, 'from django_simple_slack_app import slack_commands\n'), ((1222, 1259), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/papago.saysorry"""'], {}), "('/papago.saysorry')\n", (1239, 1259), False, 'from django_simple_slack_app import slack_commands\n'), ((1261, 1297), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/papago.blaming"""'], {}), "('/papago.blaming')\n", (1278, 1297), False, 'from django_simple_slack_app import slack_commands\n'), ((1299, 1332), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/papago.pepe"""'], {}), "('/papago.pepe')\n", (1316, 1332), False, 'from django_simple_slack_app import slack_commands\n'), ((1494, 1524), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/yangpago"""'], {}), "('/yangpago')\n", (1511, 1524), False, 'from django_simple_slack_app import slack_commands\n'), ((2188, 2224), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/yangpago.usage"""'], {}), "('/yangpago.usage')\n", (2205, 2224), False, 'from django_simple_slack_app import slack_commands\n'), ((2561, 2594), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/yangpago.on"""'], {}), "('/yangpago.on')\n", (2578, 2594), False, 'from django_simple_slack_app import slack_commands\n'), ((3044, 3078), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/yangpago.off"""'], {}), "('/yangpago.off')\n", (3061, 3078), False, 'from django_simple_slack_app import slack_commands\n'), ((3490, 3529), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/yangpago.saysorry"""'], {}), "('/yangpago.saysorry')\n", (3507, 3529), False, 'from django_simple_slack_app import slack_commands\n'), ((3720, 3758), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/yangpago.blaming"""'], {}), "('/yangpago.blaming')\n", (3737, 3758), False, 'from django_simple_slack_app import slack_commands\n'), ((3953, 3988), 'django_simple_slack_app.slack_commands.on', 'slack_commands.on', (['"""/yangpago.pepe"""'], {}), "('/yangpago.pepe')\n", (3970, 3988), False, 'from django_simple_slack_app import slack_commands\n'), ((191, 202), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (196, 202), False, 'from functools import wraps\n'), ((880, 978), 'requests.post', 'requests.post', (["event_data['response_url']"], {'json': "{'text': text, 'response_type': response_type}"}), "(event_data['response_url'], json={'text': text,\n 'response_type': response_type})\n", (893, 978), False, 'import requests\n'), ((1072, 1085), 'pprint.pprint', 'pprint', (['error'], {}), '(error)\n', (1078, 1085), False, 'from pprint import pprint\n'), ((4097, 4129), 'papago_slack.font.make_art', 'make_art', (["event_data['text'][5:]"], {}), "(event_data['text'][5:])\n", (4105, 4129), False, 'from papago_slack.font import make_art\n')] |
from guizero import App, Text
from tkinter import Spinbox
from tkinter.ttk import Progressbar
a = App(title="Using tk widgets")
Text(a, text="Spinbox")
sp = Spinbox(from_=0, to=10)
a.add_tk_widget(sp)
Text(a, text="and Progressbar")
pb = Progressbar()
a.add_tk_widget(pb)
pb.start()
Text(a, text="in guizero")
a.display() | [
"tkinter.Spinbox",
"tkinter.ttk.Progressbar",
"guizero.Text",
"guizero.App"
] | [((99, 128), 'guizero.App', 'App', ([], {'title': '"""Using tk widgets"""'}), "(title='Using tk widgets')\n", (102, 128), False, 'from guizero import App, Text\n'), ((130, 153), 'guizero.Text', 'Text', (['a'], {'text': '"""Spinbox"""'}), "(a, text='Spinbox')\n", (134, 153), False, 'from guizero import App, Text\n'), ((160, 183), 'tkinter.Spinbox', 'Spinbox', ([], {'from_': '(0)', 'to': '(10)'}), '(from_=0, to=10)\n', (167, 183), False, 'from tkinter import Spinbox\n'), ((205, 236), 'guizero.Text', 'Text', (['a'], {'text': '"""and Progressbar"""'}), "(a, text='and Progressbar')\n", (209, 236), False, 'from guizero import App, Text\n'), ((243, 256), 'tkinter.ttk.Progressbar', 'Progressbar', ([], {}), '()\n', (254, 256), False, 'from tkinter.ttk import Progressbar\n'), ((289, 315), 'guizero.Text', 'Text', (['a'], {'text': '"""in guizero"""'}), "(a, text='in guizero')\n", (293, 315), False, 'from guizero import App, Text\n')] |
import cv2
import numpy as np
from darty.gui import GUI
from darty.image_tools import Image_Tools
print(cv2.__version__)
class Dartboard_Detector:
ENV = {
'DARTBOARD_SHAPE' : (1000,1000),
'DETECTION_BLUR' : (5,5),
'DETECTION_GREEN_LOW' : 90,
'DETECTION_GREEN_HIGH' : 95,
'DETECTION_RED_LOW' : 0,
'DETECTION_RED_HIGH' : 20,
'DETECTION_STRUCTURING_ELEMENT' : (100,100),
'DETECTION_BINARY_THRESHOLD_MIN' : 127,
'DETECTION_BINARY_THRESHOLD_MAX' : 255,
'DETECTION_OFFSET' : 200,
'ORIENTATION_BLUR' : (5,5),
'ORIENTATION_COLOR_LOW' : 45,
'ORIENTATION_COLOR_HIGH': 60,
'ORIENTATION_KERNEL' : (100,100),
'ORIENTATION_ELEMENT_SIZE_MIN' : 350,
'ORIENTATION_ELEMENT_SIZE_MAX' : 600,
'ORIENTATION_TEMPLATES' : ['shape_top.png','shape_bottom.png','shape_left.png','shape_right.png']
}
def scaleROI(self,IM):
if(IM.ndim == 3):
IM_normal = np.zeros((self.ENV['DARTBOARD_SHAPE'][0],self.ENV['DARTBOARD_SHAPE'][1],IM.shape[2]),"uint8")
else:
IM_normal = np.zeros((self.ENV['DARTBOARD_SHAPE'][0],self.ENV['DARTBOARD_SHAPE'][1]),"uint8")
scale = 1
if IM.shape[0] > IM.shape[1]:
#higher than width
scale = IM_normal.shape[0] / IM.shape[0]
else:
#widther than high
scale = IM_normal.shape[1] / IM.shape[1]
new_y = int(IM.shape[0] * scale)
new_x = int(IM.shape[1] * scale)
offset_y = int((IM_normal.shape[0] - new_y)/2)
offset_x = int((IM_normal.shape[1] - new_x)/2)
IM_resized = cv2.resize(IM, (new_x,new_y),cv2.INTER_AREA)
if(IM.ndim == 3):
IM_normal[offset_y:offset_y+new_y,offset_x:offset_x+new_x,:] = IM_resized
else:
IM_normal[offset_y:offset_y+new_y,offset_x:offset_x+new_x] = IM_resized
return IM_normal
def detectDartboard(self,IM):
IM_blur = cv2.blur(IM,Dartboard_Detector.ENV['DETECTION_BLUR'])
#convert to HSV
base_frame_hsv = cv2.cvtColor(IM_blur, cv2.COLOR_BGR2HSV)
# Extract Green
green_thres_low = int(Dartboard_Detector.ENV['DETECTION_GREEN_LOW'] /255. * 180)
green_thres_high = int(Dartboard_Detector.ENV['DETECTION_GREEN_HIGH'] /255. * 180)
green_min = np.array([green_thres_low, 100, 100],np.uint8)
green_max = np.array([green_thres_high, 255, 255],np.uint8)
frame_threshed_green = cv2.inRange(base_frame_hsv, green_min, green_max)
#Extract Red
red_thres_low = int(Dartboard_Detector.ENV['DETECTION_RED_LOW'] /255. * 180)
red_thres_high = int(Dartboard_Detector.ENV['DETECTION_RED_HIGH'] /255. * 180)
red_min = np.array([red_thres_low, 100, 100],np.uint8)
red_max = np.array([red_thres_high, 255, 255],np.uint8)
frame_threshed_red = cv2.inRange(base_frame_hsv, red_min, red_max)
#Combine
combined = frame_threshed_red + frame_threshed_green
#Close
kernel = np.ones(Dartboard_Detector.ENV['DETECTION_STRUCTURING_ELEMENT'],np.uint8)
closing = cv2.morphologyEx(combined, cv2.MORPH_CLOSE, kernel)
#GUI.show(closing, "Dart_Detector")
#find contours
ret,thresh = cv2.threshold(combined,Dartboard_Detector.ENV['DETECTION_BINARY_THRESHOLD_MIN'],Dartboard_Detector.ENV['DETECTION_BINARY_THRESHOLD_MAX'],0)
im2, contours, hierarchy = cv2.findContours(closing.copy(),cv2.RETR_LIST,cv2.CHAIN_APPROX_SIMPLE)
max_cont = -1
max_idx = 0
for i in range(len(contours)):
length = cv2.arcLength(contours[i], True)
if length > max_cont:
max_idx = i
max_cont = length
x,y,w,h = cv2.boundingRect(contours[max_idx])
x = x-Dartboard_Detector.ENV['DETECTION_OFFSET']
y = y-Dartboard_Detector.ENV['DETECTION_OFFSET']
w = w+int(2*Dartboard_Detector.ENV['DETECTION_OFFSET'])
h = h+int(2*Dartboard_Detector.ENV['DETECTION_OFFSET'])
return x,y,w,h,closing,frame_threshed_green,frame_threshed_red
def getOrientation(self,IM_ROI,IM_ROI_board):
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE,Dartboard_Detector.ENV['ORIENTATION_KERNEL'])
#Segment zones
IM_ROI_blur = cv2.blur(IM_ROI,Dartboard_Detector.ENV['ORIENTATION_BLUR'])
#convert to HSV
IM_ROI_HSV = cv2.cvtColor(IM_ROI_blur, cv2.COLOR_BGR2HSV)
purple_thres_low = int(Dartboard_Detector.ENV['ORIENTATION_COLOR_LOW'] /255. * 180)
purple_thres_high = int(Dartboard_Detector.ENV['ORIENTATION_COLOR_HIGH'] /255. * 180)
purple_min = np.array([purple_thres_low, 100, 100],np.uint8)
purple_max = np.array([purple_thres_high, 255, 255],np.uint8)
frame_thres_color = cv2.inRange(IM_ROI_HSV, purple_min, purple_max)
#Mask
frame_thres_color = cv2.subtract(frame_thres_color,IM_ROI_board)
frame_thres_color_closed = cv2.morphologyEx(frame_thres_color, cv2.MORPH_CLOSE, kernel)
#Compute contours
im2, contours, hierarchy = cv2.findContours(frame_thres_color_closed.copy(),cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
contour_lengths = []
contours_structure = []
for i in range(len(contours)):
length = cv2.arcLength(contours[i],True)
contour_lengths.append(length)
if length > Dartboard_Detector.ENV['ORIENTATION_ELEMENT_SIZE_MIN'] and length < Dartboard_Detector.ENV['ORIENTATION_ELEMENT_SIZE_MAX']:
contours_structure.append(contours[i])
#debug histogramm
#print(len(point_contours))
#plt.hist(contour_lengths, bins=20, range=(50,1000), normed=False, weights=None, cumulative=False, bottom=None, histtype='bar', align='mid', orientation='vertical', rwidth=None, log=False, color=None, label=None, stacked=False, hold=None, data=None)
#plt.show()
return frame_thres_color,frame_thres_color_closed,contours_structure
def getOrientationCorr(self,IM_ROI,base_dir):
kernel_l = cv2.imread(base_dir + self.ENV['ORIENTATION_TEMPLATES'][2])
kernel_r = cv2.imread(base_dir + self.ENV['ORIENTATION_TEMPLATES'][3])
kernel_t = cv2.imread(base_dir + self.ENV['ORIENTATION_TEMPLATES'][0])
kernel_b = cv2.imread(base_dir + self.ENV['ORIENTATION_TEMPLATES'][1])
h = kernel_l.shape[0]
w = kernel_l.shape[1]
#right
res = cv2.matchTemplate(IM_ROI,kernel_r,cv2.TM_CCORR_NORMED)
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
right_top_left = max_loc
right = (right_top_left[0] + w, right_top_left[1] + h//2)
#GUI.imShow(kernel_r)
#left
res = cv2.matchTemplate(IM_ROI,kernel_l,cv2.TM_CCORR_NORMED)
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
left_top_left = max_loc
left = (left_top_left[0], left_top_left[1] + h//2)
#GUI.imShow(kernel_l)
h = kernel_t.shape[0]
w = kernel_t.shape[1]
#top
res = cv2.matchTemplate(IM_ROI,kernel_t,cv2.TM_CCORR_NORMED)
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
top_top_left = max_loc
top = (top_top_left[0] + w//2, top_top_left[1])
#GUI.imShow(kernel_t)
#GUI.imShow(res)
#print(max_loc)
#bottom
res = cv2.matchTemplate(IM_ROI,kernel_b,cv2.TM_CCORR_NORMED)
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
bottom_top_left = max_loc
bottom = (bottom_top_left[0] + w//2, bottom_top_left[1] + h)
#GUI.imShow(kernel_b)
return top_top_left,bottom_top_left,left_top_left,right_top_left,top,bottom,left,right
| [
"cv2.imread",
"numpy.ones",
"cv2.threshold",
"cv2.inRange",
"cv2.arcLength",
"cv2.minMaxLoc",
"numpy.array",
"cv2.morphologyEx",
"numpy.zeros",
"cv2.cvtColor",
"cv2.matchTemplate",
"cv2.resize",
"cv2.subtract",
"cv2.getStructuringElement",
"cv2.blur",
"cv2.boundingRect"
] | [((1676, 1722), 'cv2.resize', 'cv2.resize', (['IM', '(new_x, new_y)', 'cv2.INTER_AREA'], {}), '(IM, (new_x, new_y), cv2.INTER_AREA)\n', (1686, 1722), False, 'import cv2\n'), ((2009, 2063), 'cv2.blur', 'cv2.blur', (['IM', "Dartboard_Detector.ENV['DETECTION_BLUR']"], {}), "(IM, Dartboard_Detector.ENV['DETECTION_BLUR'])\n", (2017, 2063), False, 'import cv2\n'), ((2112, 2152), 'cv2.cvtColor', 'cv2.cvtColor', (['IM_blur', 'cv2.COLOR_BGR2HSV'], {}), '(IM_blur, cv2.COLOR_BGR2HSV)\n', (2124, 2152), False, 'import cv2\n'), ((2377, 2424), 'numpy.array', 'np.array', (['[green_thres_low, 100, 100]', 'np.uint8'], {}), '([green_thres_low, 100, 100], np.uint8)\n', (2385, 2424), True, 'import numpy as np\n'), ((2444, 2492), 'numpy.array', 'np.array', (['[green_thres_high, 255, 255]', 'np.uint8'], {}), '([green_thres_high, 255, 255], np.uint8)\n', (2452, 2492), True, 'import numpy as np\n'), ((2523, 2572), 'cv2.inRange', 'cv2.inRange', (['base_frame_hsv', 'green_min', 'green_max'], {}), '(base_frame_hsv, green_min, green_max)\n', (2534, 2572), False, 'import cv2\n'), ((2784, 2829), 'numpy.array', 'np.array', (['[red_thres_low, 100, 100]', 'np.uint8'], {}), '([red_thres_low, 100, 100], np.uint8)\n', (2792, 2829), True, 'import numpy as np\n'), ((2847, 2893), 'numpy.array', 'np.array', (['[red_thres_high, 255, 255]', 'np.uint8'], {}), '([red_thres_high, 255, 255], np.uint8)\n', (2855, 2893), True, 'import numpy as np\n'), ((2922, 2967), 'cv2.inRange', 'cv2.inRange', (['base_frame_hsv', 'red_min', 'red_max'], {}), '(base_frame_hsv, red_min, red_max)\n', (2933, 2967), False, 'import cv2\n'), ((3078, 3152), 'numpy.ones', 'np.ones', (["Dartboard_Detector.ENV['DETECTION_STRUCTURING_ELEMENT']", 'np.uint8'], {}), "(Dartboard_Detector.ENV['DETECTION_STRUCTURING_ELEMENT'], np.uint8)\n", (3085, 3152), True, 'import numpy as np\n'), ((3170, 3221), 'cv2.morphologyEx', 'cv2.morphologyEx', (['combined', 'cv2.MORPH_CLOSE', 'kernel'], {}), '(combined, cv2.MORPH_CLOSE, kernel)\n', (3186, 3221), False, 'import cv2\n'), ((3310, 3462), 'cv2.threshold', 'cv2.threshold', (['combined', "Dartboard_Detector.ENV['DETECTION_BINARY_THRESHOLD_MIN']", "Dartboard_Detector.ENV['DETECTION_BINARY_THRESHOLD_MAX']", '(0)'], {}), "(combined, Dartboard_Detector.ENV[\n 'DETECTION_BINARY_THRESHOLD_MIN'], Dartboard_Detector.ENV[\n 'DETECTION_BINARY_THRESHOLD_MAX'], 0)\n", (3323, 3462), False, 'import cv2\n'), ((3809, 3844), 'cv2.boundingRect', 'cv2.boundingRect', (['contours[max_idx]'], {}), '(contours[max_idx])\n', (3825, 3844), False, 'import cv2\n'), ((4227, 4322), 'cv2.getStructuringElement', 'cv2.getStructuringElement', (['cv2.MORPH_ELLIPSE', "Dartboard_Detector.ENV['ORIENTATION_KERNEL']"], {}), "(cv2.MORPH_ELLIPSE, Dartboard_Detector.ENV[\n 'ORIENTATION_KERNEL'])\n", (4252, 4322), False, 'import cv2\n'), ((4362, 4422), 'cv2.blur', 'cv2.blur', (['IM_ROI', "Dartboard_Detector.ENV['ORIENTATION_BLUR']"], {}), "(IM_ROI, Dartboard_Detector.ENV['ORIENTATION_BLUR'])\n", (4370, 4422), False, 'import cv2\n'), ((4467, 4511), 'cv2.cvtColor', 'cv2.cvtColor', (['IM_ROI_blur', 'cv2.COLOR_BGR2HSV'], {}), '(IM_ROI_blur, cv2.COLOR_BGR2HSV)\n', (4479, 4511), False, 'import cv2\n'), ((4719, 4767), 'numpy.array', 'np.array', (['[purple_thres_low, 100, 100]', 'np.uint8'], {}), '([purple_thres_low, 100, 100], np.uint8)\n', (4727, 4767), True, 'import numpy as np\n'), ((4788, 4837), 'numpy.array', 'np.array', (['[purple_thres_high, 255, 255]', 'np.uint8'], {}), '([purple_thres_high, 255, 255], np.uint8)\n', (4796, 4837), True, 'import numpy as np\n'), ((4865, 4912), 'cv2.inRange', 'cv2.inRange', (['IM_ROI_HSV', 'purple_min', 'purple_max'], {}), '(IM_ROI_HSV, purple_min, purple_max)\n', (4876, 4912), False, 'import cv2\n'), ((4955, 5000), 'cv2.subtract', 'cv2.subtract', (['frame_thres_color', 'IM_ROI_board'], {}), '(frame_thres_color, IM_ROI_board)\n', (4967, 5000), False, 'import cv2\n'), ((5035, 5095), 'cv2.morphologyEx', 'cv2.morphologyEx', (['frame_thres_color', 'cv2.MORPH_CLOSE', 'kernel'], {}), '(frame_thres_color, cv2.MORPH_CLOSE, kernel)\n', (5051, 5095), False, 'import cv2\n'), ((6141, 6200), 'cv2.imread', 'cv2.imread', (["(base_dir + self.ENV['ORIENTATION_TEMPLATES'][2])"], {}), "(base_dir + self.ENV['ORIENTATION_TEMPLATES'][2])\n", (6151, 6200), False, 'import cv2\n'), ((6220, 6279), 'cv2.imread', 'cv2.imread', (["(base_dir + self.ENV['ORIENTATION_TEMPLATES'][3])"], {}), "(base_dir + self.ENV['ORIENTATION_TEMPLATES'][3])\n", (6230, 6279), False, 'import cv2\n'), ((6299, 6358), 'cv2.imread', 'cv2.imread', (["(base_dir + self.ENV['ORIENTATION_TEMPLATES'][0])"], {}), "(base_dir + self.ENV['ORIENTATION_TEMPLATES'][0])\n", (6309, 6358), False, 'import cv2\n'), ((6378, 6437), 'cv2.imread', 'cv2.imread', (["(base_dir + self.ENV['ORIENTATION_TEMPLATES'][1])"], {}), "(base_dir + self.ENV['ORIENTATION_TEMPLATES'][1])\n", (6388, 6437), False, 'import cv2\n'), ((6535, 6591), 'cv2.matchTemplate', 'cv2.matchTemplate', (['IM_ROI', 'kernel_r', 'cv2.TM_CCORR_NORMED'], {}), '(IM_ROI, kernel_r, cv2.TM_CCORR_NORMED)\n', (6552, 6591), False, 'import cv2\n'), ((6635, 6653), 'cv2.minMaxLoc', 'cv2.minMaxLoc', (['res'], {}), '(res)\n', (6648, 6653), False, 'import cv2\n'), ((6821, 6877), 'cv2.matchTemplate', 'cv2.matchTemplate', (['IM_ROI', 'kernel_l', 'cv2.TM_CCORR_NORMED'], {}), '(IM_ROI, kernel_l, cv2.TM_CCORR_NORMED)\n', (6838, 6877), False, 'import cv2\n'), ((6921, 6939), 'cv2.minMaxLoc', 'cv2.minMaxLoc', (['res'], {}), '(res)\n', (6934, 6939), False, 'import cv2\n'), ((7157, 7213), 'cv2.matchTemplate', 'cv2.matchTemplate', (['IM_ROI', 'kernel_t', 'cv2.TM_CCORR_NORMED'], {}), '(IM_ROI, kernel_t, cv2.TM_CCORR_NORMED)\n', (7174, 7213), False, 'import cv2\n'), ((7257, 7275), 'cv2.minMaxLoc', 'cv2.minMaxLoc', (['res'], {}), '(res)\n', (7270, 7275), False, 'import cv2\n'), ((7473, 7529), 'cv2.matchTemplate', 'cv2.matchTemplate', (['IM_ROI', 'kernel_b', 'cv2.TM_CCORR_NORMED'], {}), '(IM_ROI, kernel_b, cv2.TM_CCORR_NORMED)\n', (7490, 7529), False, 'import cv2\n'), ((7573, 7591), 'cv2.minMaxLoc', 'cv2.minMaxLoc', (['res'], {}), '(res)\n', (7586, 7591), False, 'import cv2\n'), ((1008, 1108), 'numpy.zeros', 'np.zeros', (["(self.ENV['DARTBOARD_SHAPE'][0], self.ENV['DARTBOARD_SHAPE'][1], IM.shape[2])", '"""uint8"""'], {}), "((self.ENV['DARTBOARD_SHAPE'][0], self.ENV['DARTBOARD_SHAPE'][1],\n IM.shape[2]), 'uint8')\n", (1016, 1108), True, 'import numpy as np\n'), ((1140, 1227), 'numpy.zeros', 'np.zeros', (["(self.ENV['DARTBOARD_SHAPE'][0], self.ENV['DARTBOARD_SHAPE'][1])", '"""uint8"""'], {}), "((self.ENV['DARTBOARD_SHAPE'][0], self.ENV['DARTBOARD_SHAPE'][1]),\n 'uint8')\n", (1148, 1227), True, 'import numpy as np\n'), ((3661, 3693), 'cv2.arcLength', 'cv2.arcLength', (['contours[i]', '(True)'], {}), '(contours[i], True)\n', (3674, 3693), False, 'import cv2\n'), ((5375, 5407), 'cv2.arcLength', 'cv2.arcLength', (['contours[i]', '(True)'], {}), '(contours[i], True)\n', (5388, 5407), False, 'import cv2\n')] |
import json
import os
from aws import aws_current_account, aws_discover_org_accounts, current_region
from carve import get_deploy_key, load_graph, unique_node_values
def lambda_handler(event, context):
'''
Prepare to clean up carve managed stacks from all accounts that are not in the graph
- get a list of all accounts
- create a list of stacks to protect that are used by the deployed graph
- return the list to the step function
'''
deploy_key = get_deploy_key()
G = load_graph(deploy_key, local=False)
# remove external beacons from the graph
external = [node for node in G.nodes() if G.nodes().data()[node]['Type'] == 'external']
G.remove_nodes_from(external)
print(f'cleaning up after graph deploy: {deploy_key}')
accounts = aws_discover_org_accounts()
# create a list for carve stacks to not delete
safe_stacks = []
# add the s3 bucket stacks for active regions to safe stacks
deploy_region_list = set(sorted(unique_node_values(G, 'Region')))
deploy_region_list.add(current_region)
for region in deploy_region_list:
s3_stack = f"{os.environ['Prefix']}carve-managed-bucket-{region}"
safe_stacks.append({
'StackName': s3_stack,
'Account': context.invoked_function_arn.split(":")[4],
'Region': region
})
# add all VPC stacks in the graph to safe stacks
vpcs = []
for subnet in list(G.nodes):
vpc = G.nodes().data()[subnet]['VpcId']
if vpc not in vpcs:
vpcs.append(vpc)
safe_stacks.append({
'StackName': f"{os.environ['Prefix']}carve-managed-beacons-{vpc}",
'Account': G.nodes().data()[subnet]['Account'],
'Region': G.nodes().data()[subnet]['Region']
})
# add all private link stacks from the current account to safe stacks
for region in sorted(unique_node_values(G, 'Region')):
safe_stacks.append({
'StackName': f"{os.environ['Prefix']}carve-managed-privatelink-{region}",
'Account': aws_current_account(),
'Region': region
})
print(f'all safe stacks: {safe_stacks}')
# create discovery list of all accounts for step function
discover_stacks = []
for account_id, account_name in accounts.items():
cleanup = {}
cleanup['Account'] = account_id
cleanup['SafeStacks'] = []
for stack in safe_stacks:
if stack['Account'] == account_id:
# cleanup['SafeStacks'] = safe_stacks
cleanup['SafeStacks'].append(stack['StackName'])
discover_stacks.append(cleanup)
# returns to a step function iterator
# return json.dumps(discover_stacks, default=str)
return discover_stacks
| [
"carve.get_deploy_key",
"carve.load_graph",
"carve.unique_node_values",
"aws.aws_discover_org_accounts",
"aws.aws_current_account"
] | [((481, 497), 'carve.get_deploy_key', 'get_deploy_key', ([], {}), '()\n', (495, 497), False, 'from carve import get_deploy_key, load_graph, unique_node_values\n'), ((506, 541), 'carve.load_graph', 'load_graph', (['deploy_key'], {'local': '(False)'}), '(deploy_key, local=False)\n', (516, 541), False, 'from carve import get_deploy_key, load_graph, unique_node_values\n'), ((790, 817), 'aws.aws_discover_org_accounts', 'aws_discover_org_accounts', ([], {}), '()\n', (815, 817), False, 'from aws import aws_current_account, aws_discover_org_accounts, current_region\n'), ((1923, 1954), 'carve.unique_node_values', 'unique_node_values', (['G', '"""Region"""'], {}), "(G, 'Region')\n", (1941, 1954), False, 'from carve import get_deploy_key, load_graph, unique_node_values\n'), ((993, 1024), 'carve.unique_node_values', 'unique_node_values', (['G', '"""Region"""'], {}), "(G, 'Region')\n", (1011, 1024), False, 'from carve import get_deploy_key, load_graph, unique_node_values\n'), ((2095, 2116), 'aws.aws_current_account', 'aws_current_account', ([], {}), '()\n', (2114, 2116), False, 'from aws import aws_current_account, aws_discover_org_accounts, current_region\n')] |
import logging
from requests.exceptions import HTTPError
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from api.serializers import (SchemaLedgerSerializer,
TransformationLedgerSerializer)
from core.models import SchemaLedger, TransformationLedger
logger = logging.getLogger('dict_config_logger')
@api_view(['GET'])
def schemaledger_requests(request):
"""Handles fetching and returning requested schemas"""
# all requests must provide the schema name
messages = []
name = request.GET.get('name')
version = request.GET.get('version')
errorMsg = {
"message": messages
}
if not name:
messages.append("Error; query parameter 'name' is required")
if len(messages) == 0:
# look for a model with the provided name
querySet = SchemaLedger.objects.all()\
.filter(schema_name=name)
if not querySet:
messages.append("Error; no schema found with the name '" +
name + "'")
errorMsg = {
"message": messages
}
return Response(errorMsg, status.HTTP_400_BAD_REQUEST)
# if the schema name is found, filter for the version. If no version is
# provided, we fetch the latest version
if not version:
querySet = querySet.order_by('-major_version', '-minor_version',
'-patch_version')
else:
querySet = querySet.filter(version=version)
if not querySet:
messages.append("Error; no schema found for version '" +
version + "'")
errorMsg = {
"message": messages
}
return Response(errorMsg, status.HTTP_400_BAD_REQUEST)
try:
serializer_class = SchemaLedgerSerializer(querySet.first())
logger.info(querySet.first().metadata)
# only way messages gets sent is if there was an error serializing
# or in the response process.
messages.append("Error fetching records please check the logs.")
except HTTPError as http_err:
logger.error(http_err)
return Response(errorMsg, status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as err:
logger.error(err)
return Response(errorMsg, status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response(serializer_class.data, status.HTTP_200_OK)
else:
logger.error(messages)
return Response(errorMsg, status.HTTP_400_BAD_REQUEST)
@api_view(['GET'])
def transformationledger_requests(request):
"""Handles fetching and returning requested schema mappings"""
# all requests must provide the source and target schema names and versions
messages = []
source_name = request.GET.get('sourceName')
target_name = request.GET.get('targetName')
source_version = request.GET.get('sourceVersion')
target_version = request.GET.get('targetVersion')
errorMsg = {
"message": messages
}
if not source_name:
messages.append("Error; query parameter 'sourceName' is required")
if not source_version:
messages.append("Error; query parameter 'sourceVersion' is required")
if not target_name:
messages.append("Error; query parameter 'targetName' is required")
if not target_version:
messages.append("Error; query parameter 'targetVersion' is required")
if len(messages) == 0:
# look for a model with the provided name
querySet = TransformationLedger.objects.all()\
.filter(source_schema_name=source_name,
target_schema_name=target_name,
source_schema_version=source_version,
target_schema_version=target_version)
if not querySet:
messages.append("Error; no schema mapping found with the "
"sourceName '" + source_name + "', targetName '" +
target_name + "', sourceVersion '" +
source_version + "', targetVersion '" +
target_version + "'.")
errorMsg = {
"message": messages
}
return Response(errorMsg, status.HTTP_400_BAD_REQUEST)
try:
serializer_class = TransformationLedgerSerializer(querySet.first())
messages.append("Error fetching records please check the logs.")
except HTTPError as http_err:
logger.error(http_err)
return Response(errorMsg, status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as err:
logger.error(err)
return Response(errorMsg, status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response(serializer_class.data, status.HTTP_200_OK)
else:
logger.error(messages)
return Response(errorMsg, status.HTTP_400_BAD_REQUEST)
| [
"logging.getLogger",
"core.models.SchemaLedger.objects.all",
"rest_framework.response.Response",
"core.models.TransformationLedger.objects.all",
"rest_framework.decorators.api_view"
] | [((368, 407), 'logging.getLogger', 'logging.getLogger', (['"""dict_config_logger"""'], {}), "('dict_config_logger')\n", (385, 407), False, 'import logging\n'), ((411, 428), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (419, 428), False, 'from rest_framework.decorators import api_view\n'), ((2702, 2719), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (2710, 2719), False, 'from rest_framework.decorators import api_view\n'), ((2651, 2698), 'rest_framework.response.Response', 'Response', (['errorMsg', 'status.HTTP_400_BAD_REQUEST'], {}), '(errorMsg, status.HTTP_400_BAD_REQUEST)\n', (2659, 2698), False, 'from rest_framework.response import Response\n'), ((5053, 5100), 'rest_framework.response.Response', 'Response', (['errorMsg', 'status.HTTP_400_BAD_REQUEST'], {}), '(errorMsg, status.HTTP_400_BAD_REQUEST)\n', (5061, 5100), False, 'from rest_framework.response import Response\n'), ((1198, 1245), 'rest_framework.response.Response', 'Response', (['errorMsg', 'status.HTTP_400_BAD_REQUEST'], {}), '(errorMsg, status.HTTP_400_BAD_REQUEST)\n', (1206, 1245), False, 'from rest_framework.response import Response\n'), ((1837, 1884), 'rest_framework.response.Response', 'Response', (['errorMsg', 'status.HTTP_400_BAD_REQUEST'], {}), '(errorMsg, status.HTTP_400_BAD_REQUEST)\n', (1845, 1884), False, 'from rest_framework.response import Response\n'), ((2543, 2594), 'rest_framework.response.Response', 'Response', (['serializer_class.data', 'status.HTTP_200_OK'], {}), '(serializer_class.data, status.HTTP_200_OK)\n', (2551, 2594), False, 'from rest_framework.response import Response\n'), ((4403, 4450), 'rest_framework.response.Response', 'Response', (['errorMsg', 'status.HTTP_400_BAD_REQUEST'], {}), '(errorMsg, status.HTTP_400_BAD_REQUEST)\n', (4411, 4450), False, 'from rest_framework.response import Response\n'), ((4945, 4996), 'rest_framework.response.Response', 'Response', (['serializer_class.data', 'status.HTTP_200_OK'], {}), '(serializer_class.data, status.HTTP_200_OK)\n', (4953, 4996), False, 'from rest_framework.response import Response\n'), ((901, 927), 'core.models.SchemaLedger.objects.all', 'SchemaLedger.objects.all', ([], {}), '()\n', (925, 927), False, 'from core.models import SchemaLedger, TransformationLedger\n'), ((2312, 2369), 'rest_framework.response.Response', 'Response', (['errorMsg', 'status.HTTP_500_INTERNAL_SERVER_ERROR'], {}), '(errorMsg, status.HTTP_500_INTERNAL_SERVER_ERROR)\n', (2320, 2369), False, 'from rest_framework.response import Response\n'), ((2452, 2509), 'rest_framework.response.Response', 'Response', (['errorMsg', 'status.HTTP_500_INTERNAL_SERVER_ERROR'], {}), '(errorMsg, status.HTTP_500_INTERNAL_SERVER_ERROR)\n', (2460, 2509), False, 'from rest_framework.response import Response\n'), ((3693, 3727), 'core.models.TransformationLedger.objects.all', 'TransformationLedger.objects.all', ([], {}), '()\n', (3725, 3727), False, 'from core.models import SchemaLedger, TransformationLedger\n'), ((4714, 4771), 'rest_framework.response.Response', 'Response', (['errorMsg', 'status.HTTP_500_INTERNAL_SERVER_ERROR'], {}), '(errorMsg, status.HTTP_500_INTERNAL_SERVER_ERROR)\n', (4722, 4771), False, 'from rest_framework.response import Response\n'), ((4854, 4911), 'rest_framework.response.Response', 'Response', (['errorMsg', 'status.HTTP_500_INTERNAL_SERVER_ERROR'], {}), '(errorMsg, status.HTTP_500_INTERNAL_SERVER_ERROR)\n', (4862, 4911), False, 'from rest_framework.response import Response\n')] |
# Copyright (C) 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''This file contains utility functions used by both the test suite and the
single test executor.'''
from __future__ import absolute_import
import os
import importlib
import sys
def load_py_module(path):
'''Load a python file from disk.
Args:
path: String path to python file.
Returns:
python module if success, None otherwise.
'''
assert isinstance(path, str)
try:
if not os.path.exists(path):
print('Path does not exist: ' + path)
return None
path = os.path.abspath(path)
module_dir, module_file = os.path.split(path)
module_name, _ = os.path.splitext(module_file)
# adjust sys.path, runtime counterpart of PYTHONPATH, to temporarily
# include the folder containing the user configuration module
sys.path.append(module_dir)
module_obj = importlib.import_module(module_name)
sys.path.pop(0)
return module_obj
except ImportError as err:
print(str(err))
print("Looking in directory ")
print(module_dir)
return None
| [
"os.path.exists",
"importlib.import_module",
"os.path.splitext",
"os.path.split",
"sys.path.pop",
"os.path.abspath",
"sys.path.append"
] | [((1139, 1160), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (1154, 1160), False, 'import os\n'), ((1195, 1214), 'os.path.split', 'os.path.split', (['path'], {}), '(path)\n', (1208, 1214), False, 'import os\n'), ((1240, 1269), 'os.path.splitext', 'os.path.splitext', (['module_file'], {}), '(module_file)\n', (1256, 1269), False, 'import os\n'), ((1425, 1452), 'sys.path.append', 'sys.path.append', (['module_dir'], {}), '(module_dir)\n', (1440, 1452), False, 'import sys\n'), ((1474, 1510), 'importlib.import_module', 'importlib.import_module', (['module_name'], {}), '(module_name)\n', (1497, 1510), False, 'import importlib\n'), ((1519, 1534), 'sys.path.pop', 'sys.path.pop', (['(0)'], {}), '(0)\n', (1531, 1534), False, 'import sys\n'), ((1028, 1048), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (1042, 1048), False, 'import os\n')] |
import logging
from dataclasses import dataclass, replace
from typing import Tuple, Any, Optional
import numpy as np
from numpy import ndarray
logger = logging.getLogger(__name__)
@dataclass
class COOData:
indices: ndarray
data: ndarray
shape: Tuple[int, ...]
local_shape: Optional[Tuple[int, ...]]
@staticmethod
def _assemble_scipy_csr(
indices: ndarray,
data: ndarray,
shape: Tuple[int, ...],
local_shape: Optional[Tuple[int, ...]]
):
from scipy.sparse import coo_matrix
K = coo_matrix((data, (indices[0], indices[1])), shape=shape)
K.eliminate_zeros()
return K.tocsr()
def __radd__(self, other):
return self.__add__(other)
def tolocal(self, basis=None):
"""Return an array of local finite element matrices.
Parameters
----------
basis
Optionally, sum local facet matrices to form elemental matrices if
the corresponding :class:`skfem.assembly.FacetBasis` is provided.
"""
if self.local_shape is None:
raise NotImplementedError("Cannot build local matrices if "
"local_shape is not specified.")
assert len(self.local_shape) == 2
local = np.moveaxis(self.data.reshape(self.local_shape + (-1,),
order='C'), -1, 0)
if basis is not None:
out = np.zeros((basis.mesh.nfacets,) + local.shape[1:])
out[basis.find] = local
local = np.sum(out[basis.mesh.t2f], axis=0)
return local
def fromlocal(self, local):
"""Reverse of :meth:`COOData.tolocal`."""
return replace(
self,
data=np.moveaxis(local, 0, -1).flatten('C'),
)
def inverse(self):
"""Invert each elemental matrix."""
return self.fromlocal(np.linalg.inv(self.tolocal()))
def __add__(self, other):
if isinstance(other, int):
return self
return replace(
self,
indices=np.hstack((self.indices, other.indices)),
data=np.hstack((self.data, other.data)),
shape=tuple(max(self.shape[i],
other.shape[i]) for i in range(len(self.shape))),
local_shape=None,
)
def tocsr(self):
"""Return a sparse SciPy CSR matrix."""
return self._assemble_scipy_csr(
self.indices,
self.data,
self.shape,
self.local_shape,
)
def toarray(self) -> ndarray:
"""Return a dense numpy array."""
if len(self.shape) == 1:
from scipy.sparse import coo_matrix
return coo_matrix(
(self.data, (self.indices[0], np.zeros_like(self.indices[0]))),
shape=self.shape + (1,),
).toarray().T[0]
elif len(self.shape) == 2:
return self.tocsr().toarray()
# slow implementation for testing N-tensors
out = np.zeros(self.shape)
for itr in range(self.indices.shape[1]):
out[tuple(self.indices[:, itr])] += self.data[itr]
return out
def astuple(self):
return self.indices, self.data, self.shape
def todefault(self) -> Any:
"""Return the default data type.
Scalar for 0-tensor, numpy array for 1-tensor, scipy csr matrix for
2-tensor, self otherwise.
"""
if len(self.shape) == 0:
return np.sum(self.data, axis=0)
elif len(self.shape) == 1:
return self.toarray()
elif len(self.shape) == 2:
return self.tocsr()
return self
def dot(self, x, D=None):
"""Matrix-vector product.
Parameters
----------
x
The vector to multiply with.
D
Optionally, keep some DOFs unchanged. An array of DOF indices.
"""
y = self.data * x[self.indices[1]]
z = np.zeros_like(x)
np.add.at(z, self.indices[0], y)
if D is not None:
z[D] = x[D]
return z
def solve(self, b, D=None, tol=1e-10, maxiters=500):
"""Solve linear system using the conjugate gradient method.
Parameters
----------
b
The right-hand side vector.
D
An optional array of Dirichlet DOF indices for which the fixed
value is taken from ``b``.
tol
A tolerance for terminating the conjugate gradient method.
maxiters
The maximum number of iterations before forced termination.
"""
x = b
r = b - self.dot(x, D=D)
p = r
rsold = np.dot(r, r)
for k in range(maxiters):
Ap = self.dot(p, D=D)
alpha = rsold / np.dot(p, Ap)
x = x + alpha * p
r = r - alpha * Ap
rsnew = np.dot(r, r)
if np.sqrt(rsnew) < tol:
break
p = r + (rsnew / rsold) * p
rsold = rsnew
if k == maxiters:
logger.warning("Iterative solver did not converge.")
return x
| [
"logging.getLogger",
"numpy.sqrt",
"numpy.hstack",
"numpy.sum",
"numpy.zeros",
"numpy.dot",
"numpy.moveaxis",
"scipy.sparse.coo_matrix",
"numpy.add.at",
"numpy.zeros_like"
] | [((156, 183), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (173, 183), False, 'import logging\n'), ((577, 634), 'scipy.sparse.coo_matrix', 'coo_matrix', (['(data, (indices[0], indices[1]))'], {'shape': 'shape'}), '((data, (indices[0], indices[1])), shape=shape)\n', (587, 634), False, 'from scipy.sparse import coo_matrix\n'), ((3085, 3105), 'numpy.zeros', 'np.zeros', (['self.shape'], {}), '(self.shape)\n', (3093, 3105), True, 'import numpy as np\n'), ((4053, 4069), 'numpy.zeros_like', 'np.zeros_like', (['x'], {}), '(x)\n', (4066, 4069), True, 'import numpy as np\n'), ((4078, 4110), 'numpy.add.at', 'np.add.at', (['z', 'self.indices[0]', 'y'], {}), '(z, self.indices[0], y)\n', (4087, 4110), True, 'import numpy as np\n'), ((4779, 4791), 'numpy.dot', 'np.dot', (['r', 'r'], {}), '(r, r)\n', (4785, 4791), True, 'import numpy as np\n'), ((1483, 1532), 'numpy.zeros', 'np.zeros', (['((basis.mesh.nfacets,) + local.shape[1:])'], {}), '((basis.mesh.nfacets,) + local.shape[1:])\n', (1491, 1532), True, 'import numpy as np\n'), ((1589, 1624), 'numpy.sum', 'np.sum', (['out[basis.mesh.t2f]'], {'axis': '(0)'}), '(out[basis.mesh.t2f], axis=0)\n', (1595, 1624), True, 'import numpy as np\n'), ((3562, 3587), 'numpy.sum', 'np.sum', (['self.data'], {'axis': '(0)'}), '(self.data, axis=0)\n', (3568, 3587), True, 'import numpy as np\n'), ((4983, 4995), 'numpy.dot', 'np.dot', (['r', 'r'], {}), '(r, r)\n', (4989, 4995), True, 'import numpy as np\n'), ((2122, 2162), 'numpy.hstack', 'np.hstack', (['(self.indices, other.indices)'], {}), '((self.indices, other.indices))\n', (2131, 2162), True, 'import numpy as np\n'), ((2181, 2215), 'numpy.hstack', 'np.hstack', (['(self.data, other.data)'], {}), '((self.data, other.data))\n', (2190, 2215), True, 'import numpy as np\n'), ((4888, 4901), 'numpy.dot', 'np.dot', (['p', 'Ap'], {}), '(p, Ap)\n', (4894, 4901), True, 'import numpy as np\n'), ((5011, 5025), 'numpy.sqrt', 'np.sqrt', (['rsnew'], {}), '(rsnew)\n', (5018, 5025), True, 'import numpy as np\n'), ((1789, 1814), 'numpy.moveaxis', 'np.moveaxis', (['local', '(0)', '(-1)'], {}), '(local, 0, -1)\n', (1800, 1814), True, 'import numpy as np\n'), ((2837, 2867), 'numpy.zeros_like', 'np.zeros_like', (['self.indices[0]'], {}), '(self.indices[0])\n', (2850, 2867), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# coding: utf-8
# # Pneumonia Diagnosis
# The task is to predict if a person has pneumonia or not using Chest X-Ray.
#
# We will train a Convolutional Neural Network (CNN) that is able to detect whether a patient has pneumonia, both bacterial and viral, based on an X-ray image of their chest. We need to classify a patient as either having pneumonia or not having pneumonia. This is a binary classification problem.
# **Credits**: Kaggle (https://www.kaggle.com/paultimothymooney/chest-xray-pneumonia)
# First, we will create a CNN from scratch and check the test accuracy. And then, we will use transfer learning (using a DenseNet-169 pre-trained model) to create a CNN that will greatly improve the test accuracy.
# In[1]:
get_ipython().system(' wget --load-cookies /tmp/cookies.txt "https://docs.google.com/uc?export=download&confirm=$(wget --save-cookies /tmp/cookies.txt --keep-session-cookies --no-check-certificate \'https://docs.google.com/uc?export=download&id=1li6ctqAvGFgIGMSt-mYrLoM_tbYkzqdO\' -O- | sed -rn \'s/.*confirm=([0-9A-Za-z_]+).*/\\1\\n/p\')&id=1li6ctqAvGFgIGMSt-mYrLoM_tbYkzqdO" -O chest_xray.zip && rm -rf /tmp/cookies.txt')
# In[2]:
get_ipython().system('unzip -qq chest_xray.zip')
# In[3]:
# importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
from collections import Counter
from datetime import datetime
import torch
import torch.nn.functional as F
from torchvision import transforms, datasets, models
from torch import nn, optim
# In[4]:
# specify the data directory path
data_dir = './chest_xray'
train_dir = data_dir + '/train'
valid_dir = data_dir + '/val'
test_dir = data_dir + '/test'
# In[5]:
# check if CUDA support is available
use_cuda = torch.cuda.is_available()
print('Cuda support available? - {}'.format(use_cuda))
# In[6]:
# normalization supported by transfer learning models
normalize = transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
# transform the data
train_transforms = transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize
])
valid_transforms = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize
])
test_transforms = transforms.Compose([
transforms.Resize(size=(224, 224)),
transforms.ToTensor(),
normalize
])
# I have applied the RandomResizedCrop and RandomHorizontalFlip to the training data. This will allow me to have more images using image augmentation techniques. It will generate more resized and flipped images. It will improve the performance of model and also helps to prevent overfitting of the data. For validation data, I have only applied the Resize and center crop transformations. And, for test data, I have only applied image resize.
# In[7]:
# specify the image folders
train_data = datasets.ImageFolder(train_dir, transform=train_transforms)
valid_data = datasets.ImageFolder(valid_dir, transform=valid_transforms)
test_data = datasets.ImageFolder(test_dir, transform=test_transforms)
# In[8]:
batch_size = 32 # samples per batch
num_workers = 0 # number of subprocesses
# data loaders
trainloader = torch.utils.data.DataLoader(train_data, batch_size=batch_size,
shuffle=True, num_workers=num_workers)
validloader = torch.utils.data.DataLoader(valid_data, batch_size=batch_size,
shuffle=True, num_workers=num_workers)
testloader = torch.utils.data.DataLoader(test_data, batch_size=batch_size,
num_workers=num_workers)
# In[48]:
# select a batch from training data
images, labels = next(iter(trainloader))
# In[10]:
# shape of an image
images[0].shape # rgb image of 244 x 244
# In[11]:
# output classes
print(train_data.classes)
print(train_data.class_to_idx)
# We will now have a look at the distribution of samples in the training, validation and testing dataset.
# In[12]:
# distribution of train dataset
cnt = Counter()
for target in train_data.targets:
cnt[target] += 1
normal_count = cnt[0]
pneumonia_count = cnt[1]
sns.barplot(x=['Pneumonia Cases', 'Normal Cases'],
y=[pneumonia_count, normal_count], palette='magma')
plt.title('Train Dataset Label Count')
plt.show()
pneumonia_count, normal_count
# In[13]:
# distribution of validation dataset
cnt = Counter()
for target in valid_data.targets:
cnt[target] += 1
normal_count = cnt[0]
pneumonia_count = cnt[1]
sns.barplot(x=['Pneumonia Cases', 'Normal Cases'],
y=[pneumonia_count, normal_count], palette='magma')
plt.title('Validation Dataset Label Count')
plt.show()
pneumonia_count, normal_count
# In[14]:
# distribution of test dataset
cnt = Counter()
for target in test_data.targets:
cnt[target] += 1
normal_count = cnt[0]
pneumonia_count = cnt[1]
sns.barplot(x=['Pneumonia Cases', 'Normal Cases'],
y=[pneumonia_count, normal_count], palette='magma')
plt.title('Test Dataset Label Count')
plt.show()
pneumonia_count, normal_count
# We will have a look at the normal and pneumonia images of chest x-rays.
# In[51]:
num_classes = 2 # total classes of diagnosis (Normal, Pneumonia)
classes = ['NORMAL', 'PNEUMONIA']
# In[16]:
# un-normalize and display an image
def imshow(inp):
"""Imshow for Tensor."""
inp = inp.numpy().transpose((1, 2, 0))
mean = np.array([0.485, 0.456, 0.406])
std = np.array([0.229, 0.224, 0.225])
inp = std * inp + mean
inp = np.clip(inp, 0, 1)
plt.imshow(inp)
# In[53]:
# plot the images in the batch, along with predicted and true labels
fig = plt.figure(figsize=(25, 8))
for idx in np.arange(10):
ax = fig.add_subplot(2, 10/2, idx+1, xticks=[], yticks=[])
imshow(images.cpu()[idx])
ax.set_title(classes[labels[idx]])
# Let's create a CNN from scratch and check the test accuracy. Then we will try to improve the accuracy using transfer learning.
# In[18]:
# CNN architecture
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
## cnn layers
self.conv1 = nn.Conv2d(3, 32, 3, stride=2, padding=1)
self.conv2 = nn.Conv2d(32, 64, 3, stride=2, padding=1)
self.conv3 = nn.Conv2d(64, 128, 3, padding=1)
# max-pool
self.pool = nn.MaxPool2d(2, 2)
# fully-connected
self.fc1 = nn.Linear(7 * 7 * 128, 512)
self.fc2 = nn.Linear(512, 512)
self.fc3 = nn.Linear(512, num_classes)
# drop-out
self.dropout = nn.Dropout(0.3)
def forward(self, x):
x = F.relu(self.conv1(x))
x = self.pool(x)
x = F.relu(self.conv2(x))
x = self.pool(x)
x = F.relu(self.conv3(x))
x = self.pool(x)
# flatten the images with batch
x = x.view(-1, 7 * 7 * 128)
x = self.dropout(x)
x = F.relu(self.fc1(x))
x = self.dropout(x)
x = F.relu(self.fc2(x))
x = self.dropout(x)
x = self.fc3(x)
return x
# In[19]:
# instantiate the CNN
model_init = Net()
print(model_init)
# move tensors to GPU if CUDA is available
if use_cuda:
model_init.cuda()
# The first convolution layer will have a kernel size of 3 and stride 2, this will decrease the input image size by half. The second convolution layer will also have a kernel size of 3 and stride 2, which will decrease the input image size by half. The third convolution layer will have a kernel size of 3.
#
# I have applied the max-pooling of stride 2 after each convolution layer to reduce the image size by half. I have also applied Relu activation for each of the convolution layers.
#
# Then, I have flattened the inputs and applied a dropout layer with probability as 0.3. Three fully connected layers are applied with Relu activation and dropout 0.3 to produce the final output that will predict the classes of a chest x-ray.
# In[20]:
# define loss function criteria and optimizer
criterion_init = nn.CrossEntropyLoss()
optimizer_init = optim.Adam(model_init.parameters(), lr=0.03)
# Let's define a function to train the model and save the final model parameters as 'model_init.pt'
# In[21]:
def train(n_epochs, train_loader, valid_loader, model, optimizer, criterion,
use_cuda, save_path):
valid_loss_min = np.Inf # initialize inital loss to infinity
train_loss_list = []
valid_loss_list = []
for epoch in range(1, n_epochs+1):
epoch_start = datetime.utcnow()
# initialize variables to monitor training and validation loss
train_loss = 0.0
valid_loss = 0.0
# train the model
model.train()
for data, target in train_loader:
# move to GPU
if use_cuda:
data, target = data.cuda(), target.cuda()
# initialize weights to zero
optimizer.zero_grad()
# predict the output
output = model(data)
# calculate loss
loss = criterion(output, target)
# backpropogation
loss.backward()
# update gradients
optimizer.step()
train_loss += loss.item() * data.size(0)
# validate the model
model.eval()
for data, target in valid_loader:
if use_cuda:
data, target = data.cuda(), target.cuda()
output = model(data)
loss = criterion(output, target)
valid_loss = loss.item() * data.size(0)
# calculate average losses
train_loss = train_loss/len(train_loader.sampler)
valid_loss = valid_loss/len(valid_loader.sampler)
train_loss_list.append(train_loss)
valid_loss_list.append(valid_loss)
print('Epoch training time: {}'.format(datetime.utcnow() - epoch_start))
print('Epoch: {} \tTraining Loss: {:.6f} \tValidation Loss: {:.6f}'.format(
epoch, train_loss, valid_loss))
# save the model if validation loss has decreased
if valid_loss < valid_loss_min:
torch.save(model.state_dict(), save_path)
print('Model saved!\t\tValidation loss decreased ({:.6f} -> {:.6f})'.format(
valid_loss_min, valid_loss))
valid_loss_min = valid_loss
# plot the training and validation loss
plt.figure(figsize=(8, 8))
plt.subplot(1, 2, 1)
plt.plot(range(len(train_loss_list)), train_loss_list,
label='Training Loss')
plt.plot(range(len(valid_loss_list)), valid_loss_list,
label='Validation Loss')
plt.legend(loc='upper right')
plt.xlabel('Number of epochs')
plt.ylabel('Accuracy')
plt.title('Training and Validation Loss')
# return the trained model
return model
# In[22]:
# train the model
start = datetime.utcnow()
model_init = train(10, trainloader, validloader, model_init, optimizer_init,
criterion_init, use_cuda, 'model_init.pt')
print("model_init training time: {}".format(datetime.utcnow() - start))
# In[23]:
# load the model that got the best validation accuracy
model_init.load_state_dict(torch.load('model_init.pt'))
# In[24]:
def test(test_loader, model, criterion, use_cuda):
test_loss = 0.
correct = 0.
total = 0.
for data, target in test_loader:
# move to GPU
if use_cuda:
data, target = data.cuda(), target.cuda()
# predict output
output = model(data)
# calculate the loss
loss = criterion(output, target)
# update average test loss
test_loss += loss.item() * data.size(0)
# convert output probabilities to predicted class
pred = output.data.max(1, keepdim=True)[1]
# compare predictions to true label
correct += np.sum(np.squeeze(pred.eq(target.data.view_as(pred))).cpu().numpy())
total += data.size(0)
print('Test Loss: {:.6f}\n'.format(test_loss/len(testloader.sampler)))
print('\nTest Accuracy: %2d%% (%2d/%2d)' %
(100. * correct / total, correct, total))
# In[25]:
# test the model
test(testloader, model_init, criterion_init, use_cuda)
# In[26]:
# visualize the confusion matrix
def plot_confusion_matrix(C):
plt.figure(figsize=(20, 4))
labels = [0, 1]
cmap=sns.light_palette("blue")
plt.subplot(1, 3, 1)
sns.heatmap(C, annot=True, cmap=cmap, fmt=".0f",
xticklabels=labels, yticklabels=labels)
plt.xlabel('Predicted Class')
plt.ylabel('Original Class')
plt.title("Confusion matrix")
# In[58]:
# generate confustion matrix
confusion_matrix = torch.zeros(num_classes, num_classes)
with torch.no_grad():
for i, (inputs, labels) in enumerate(testloader):
if use_cuda:
inputs = inputs.cuda()
labels = labels.cuda()
outputs = model_init(inputs)
_, preds = torch.max(outputs, 1)
for t, p in zip(labels.view(-1), preds.view(-1)):
confusion_matrix[t.long(), p.long()] += 1
print(confusion_matrix)
# In[59]:
# get the per-class accuracy
print(confusion_matrix.diag()/confusion_matrix.sum(1))
# In[60]:
# plot the confustion matrix
plot_confusion_matrix(confusion_matrix)
# Now, we will use transfer learning to create a CNN that can diagnose pneumonia from images.
#
# We will use DenseNet-169 model as it has good performance on Image classification. The main idea of this model is called "identity shortcut connection" that skips one or more layers. This allows us to prevent overfitting while training. I have eventually added a final fully connected layer that will output the probabilities of 2 classes of normal or pneumonia.
# In[30]:
# download the pretraibed DenseNet-169 model
model = models.densenet169(pretrained=True)
# In[31]:
# freeze the model parameters
for param in model.parameters():
param.requires_grad = False
# In[32]:
# check the number of input and output features
model.classifier
# We will keep the number of input features same, however we will change the number of output features to 2 as we want to predict only two classes i.e. Normal and Pneumonia.
# In[33]:
# update the out_features for model
model.classifier = nn.Linear(model.classifier.in_features, num_classes)
# In[34]:
fc_parameters = model.classifier.parameters()
# In[35]:
for param in fc_parameters:
param.requires_grad = True
# In[36]:
# move model to gpu
if use_cuda:
model = model.cuda()
# In[37]:
# DenseNet-169 model architecture
model
# In[38]:
# define the loss function and optimizer
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.classifier.parameters(), lr=0.001)
# We will use the same function to train the model and save the final model parameters as 'model.pt'
# In[39]:
# train the model
start = datetime.utcnow()
model = train(30, trainloader, validloader, model, optimizer,
criterion, use_cuda, 'model.pt')
print("model training time: {}".format(datetime.utcnow() - start))
# In[40]:
# load the model that got the best validation accuracy
model.load_state_dict(torch.load('model.pt'))
# In[41]:
# test the model
test(testloader, model, criterion, use_cuda)
# Let's try to visualize the predict the final output of few a few xray images.
# In[42]:
dataiter = iter(testloader)
images, labels = dataiter.next()
images.numpy()
if use_cuda:
images = images.cuda()
# get sample outputs
output = model(images)
# convert output probabilities to predicted class
_, preds_tensor = torch.max(output, 1)
if use_cuda:
preds = np.squeeze(preds_tensor.cpu().numpy())
else:
preds = np.squeeze(preds_tensor.numpy())
# plot the images in the batch, along with predicted and true labels
fig = plt.figure(figsize=(25, 8))
for idx in np.arange(10):
ax = fig.add_subplot(2, 10/2, idx+1, xticks=[], yticks=[])
imshow(images.cpu()[idx])
ax.set_title("{} ({})".format(classes[preds[idx]], classes[labels[idx]]),
color=("green" if preds[idx] == labels[idx].item() else "red"))
# In[61]:
# generate confustion matrix
confusion_matrix = torch.zeros(num_classes, num_classes)
with torch.no_grad():
for i, (inputs, labels) in enumerate(testloader):
if use_cuda:
inputs = inputs.cuda()
labels = labels.cuda()
outputs = model(inputs)
_, preds = torch.max(outputs, 1)
for t, p in zip(labels.view(-1), preds.view(-1)):
confusion_matrix[t.long(), p.long()] += 1
print(confusion_matrix)
# In[62]:
# get the per-class accuracy
print(confusion_matrix.diag()/confusion_matrix.sum(1))
# In[63]:
# plot the confusion matrix
plot_confusion_matrix(confusion_matrix)
# In[43]:
| [
"numpy.clip",
"torch.nn.Dropout",
"torch.nn.CrossEntropyLoss",
"matplotlib.pyplot.ylabel",
"torch.max",
"numpy.array",
"torch.cuda.is_available",
"numpy.arange",
"matplotlib.pyplot.imshow",
"matplotlib.pyplot.xlabel",
"torchvision.datasets.ImageFolder",
"torchvision.transforms.ToTensor",
"torchvision.transforms.RandomResizedCrop",
"seaborn.light_palette",
"torchvision.transforms.RandomHorizontalFlip",
"seaborn.heatmap",
"torchvision.transforms.Normalize",
"torchvision.transforms.Resize",
"matplotlib.pyplot.title",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show",
"torchvision.transforms.CenterCrop",
"datetime.datetime.utcnow",
"torch.load",
"torch.nn.Conv2d",
"collections.Counter",
"matplotlib.pyplot.figure",
"torch.nn.MaxPool2d",
"torchvision.models.densenet169",
"torch.nn.Linear",
"torch.utils.data.DataLoader",
"torch.no_grad",
"seaborn.barplot",
"matplotlib.pyplot.subplot",
"torch.zeros"
] | [((1791, 1816), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1814, 1816), False, 'import torch\n'), ((1951, 2017), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['[0.485, 0.456, 0.406]', '[0.229, 0.224, 0.225]'], {}), '([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n', (1971, 2017), False, 'from torchvision import transforms, datasets, models\n'), ((3001, 3060), 'torchvision.datasets.ImageFolder', 'datasets.ImageFolder', (['train_dir'], {'transform': 'train_transforms'}), '(train_dir, transform=train_transforms)\n', (3021, 3060), False, 'from torchvision import transforms, datasets, models\n'), ((3074, 3133), 'torchvision.datasets.ImageFolder', 'datasets.ImageFolder', (['valid_dir'], {'transform': 'valid_transforms'}), '(valid_dir, transform=valid_transforms)\n', (3094, 3133), False, 'from torchvision import transforms, datasets, models\n'), ((3146, 3203), 'torchvision.datasets.ImageFolder', 'datasets.ImageFolder', (['test_dir'], {'transform': 'test_transforms'}), '(test_dir, transform=test_transforms)\n', (3166, 3203), False, 'from torchvision import transforms, datasets, models\n'), ((3326, 3431), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['train_data'], {'batch_size': 'batch_size', 'shuffle': '(True)', 'num_workers': 'num_workers'}), '(train_data, batch_size=batch_size, shuffle=True,\n num_workers=num_workers)\n', (3353, 3431), False, 'import torch\n'), ((3485, 3590), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['valid_data'], {'batch_size': 'batch_size', 'shuffle': '(True)', 'num_workers': 'num_workers'}), '(valid_data, batch_size=batch_size, shuffle=True,\n num_workers=num_workers)\n', (3512, 3590), False, 'import torch\n'), ((3643, 3734), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['test_data'], {'batch_size': 'batch_size', 'num_workers': 'num_workers'}), '(test_data, batch_size=batch_size, num_workers=\n num_workers)\n', (3670, 3734), False, 'import torch\n'), ((4186, 4195), 'collections.Counter', 'Counter', ([], {}), '()\n', (4193, 4195), False, 'from collections import Counter\n'), ((4300, 4406), 'seaborn.barplot', 'sns.barplot', ([], {'x': "['Pneumonia Cases', 'Normal Cases']", 'y': '[pneumonia_count, normal_count]', 'palette': '"""magma"""'}), "(x=['Pneumonia Cases', 'Normal Cases'], y=[pneumonia_count,\n normal_count], palette='magma')\n", (4311, 4406), True, 'import seaborn as sns\n'), ((4416, 4454), 'matplotlib.pyplot.title', 'plt.title', (['"""Train Dataset Label Count"""'], {}), "('Train Dataset Label Count')\n", (4425, 4454), True, 'import matplotlib.pyplot as plt\n'), ((4455, 4465), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4463, 4465), True, 'import matplotlib.pyplot as plt\n'), ((4553, 4562), 'collections.Counter', 'Counter', ([], {}), '()\n', (4560, 4562), False, 'from collections import Counter\n'), ((4667, 4773), 'seaborn.barplot', 'sns.barplot', ([], {'x': "['Pneumonia Cases', 'Normal Cases']", 'y': '[pneumonia_count, normal_count]', 'palette': '"""magma"""'}), "(x=['Pneumonia Cases', 'Normal Cases'], y=[pneumonia_count,\n normal_count], palette='magma')\n", (4678, 4773), True, 'import seaborn as sns\n'), ((4783, 4826), 'matplotlib.pyplot.title', 'plt.title', (['"""Validation Dataset Label Count"""'], {}), "('Validation Dataset Label Count')\n", (4792, 4826), True, 'import matplotlib.pyplot as plt\n'), ((4827, 4837), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4835, 4837), True, 'import matplotlib.pyplot as plt\n'), ((4919, 4928), 'collections.Counter', 'Counter', ([], {}), '()\n', (4926, 4928), False, 'from collections import Counter\n'), ((5032, 5138), 'seaborn.barplot', 'sns.barplot', ([], {'x': "['Pneumonia Cases', 'Normal Cases']", 'y': '[pneumonia_count, normal_count]', 'palette': '"""magma"""'}), "(x=['Pneumonia Cases', 'Normal Cases'], y=[pneumonia_count,\n normal_count], palette='magma')\n", (5043, 5138), True, 'import seaborn as sns\n'), ((5148, 5185), 'matplotlib.pyplot.title', 'plt.title', (['"""Test Dataset Label Count"""'], {}), "('Test Dataset Label Count')\n", (5157, 5185), True, 'import matplotlib.pyplot as plt\n'), ((5186, 5196), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5194, 5196), True, 'import matplotlib.pyplot as plt\n'), ((5804, 5831), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(25, 8)'}), '(figsize=(25, 8))\n', (5814, 5831), True, 'import matplotlib.pyplot as plt\n'), ((5843, 5856), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (5852, 5856), True, 'import numpy as np\n'), ((8203, 8224), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (8222, 8224), False, 'from torch import nn, optim\n'), ((11065, 11082), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (11080, 11082), False, 'from datetime import datetime\n'), ((12875, 12912), 'torch.zeros', 'torch.zeros', (['num_classes', 'num_classes'], {}), '(num_classes, num_classes)\n', (12886, 12912), False, 'import torch\n'), ((14010, 14045), 'torchvision.models.densenet169', 'models.densenet169', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (14028, 14045), False, 'from torchvision import transforms, datasets, models\n'), ((14478, 14530), 'torch.nn.Linear', 'nn.Linear', (['model.classifier.in_features', 'num_classes'], {}), '(model.classifier.in_features, num_classes)\n', (14487, 14530), False, 'from torch import nn, optim\n'), ((14857, 14878), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (14876, 14878), False, 'from torch import nn, optim\n'), ((15085, 15102), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (15100, 15102), False, 'from datetime import datetime\n'), ((15801, 15821), 'torch.max', 'torch.max', (['output', '(1)'], {}), '(output, 1)\n', (15810, 15821), False, 'import torch\n'), ((16014, 16041), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(25, 8)'}), '(figsize=(25, 8))\n', (16024, 16041), True, 'import matplotlib.pyplot as plt\n'), ((16053, 16066), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (16062, 16066), True, 'import numpy as np\n'), ((16383, 16420), 'torch.zeros', 'torch.zeros', (['num_classes', 'num_classes'], {}), '(num_classes, num_classes)\n', (16394, 16420), False, 'import torch\n'), ((5565, 5596), 'numpy.array', 'np.array', (['[0.485, 0.456, 0.406]'], {}), '([0.485, 0.456, 0.406])\n', (5573, 5596), True, 'import numpy as np\n'), ((5607, 5638), 'numpy.array', 'np.array', (['[0.229, 0.224, 0.225]'], {}), '([0.229, 0.224, 0.225])\n', (5615, 5638), True, 'import numpy as np\n'), ((5676, 5694), 'numpy.clip', 'np.clip', (['inp', '(0)', '(1)'], {}), '(inp, 0, 1)\n', (5683, 5694), True, 'import numpy as np\n'), ((5699, 5714), 'matplotlib.pyplot.imshow', 'plt.imshow', (['inp'], {}), '(inp)\n', (5709, 5714), True, 'import matplotlib.pyplot as plt\n'), ((10582, 10608), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (10592, 10608), True, 'import matplotlib.pyplot as plt\n'), ((10613, 10633), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (10624, 10633), True, 'import matplotlib.pyplot as plt\n'), ((10838, 10867), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper right"""'}), "(loc='upper right')\n", (10848, 10867), True, 'import matplotlib.pyplot as plt\n'), ((10872, 10902), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Number of epochs"""'], {}), "('Number of epochs')\n", (10882, 10902), True, 'import matplotlib.pyplot as plt\n'), ((10907, 10929), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Accuracy"""'], {}), "('Accuracy')\n", (10917, 10929), True, 'import matplotlib.pyplot as plt\n'), ((10934, 10975), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and Validation Loss"""'], {}), "('Training and Validation Loss')\n", (10943, 10975), True, 'import matplotlib.pyplot as plt\n'), ((11390, 11417), 'torch.load', 'torch.load', (['"""model_init.pt"""'], {}), "('model_init.pt')\n", (11400, 11417), False, 'import torch\n'), ((12490, 12517), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 4)'}), '(figsize=(20, 4))\n', (12500, 12517), True, 'import matplotlib.pyplot as plt\n'), ((12551, 12576), 'seaborn.light_palette', 'sns.light_palette', (['"""blue"""'], {}), "('blue')\n", (12568, 12576), True, 'import seaborn as sns\n'), ((12581, 12601), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(1)'], {}), '(1, 3, 1)\n', (12592, 12601), True, 'import matplotlib.pyplot as plt\n'), ((12606, 12698), 'seaborn.heatmap', 'sns.heatmap', (['C'], {'annot': '(True)', 'cmap': 'cmap', 'fmt': '""".0f"""', 'xticklabels': 'labels', 'yticklabels': 'labels'}), "(C, annot=True, cmap=cmap, fmt='.0f', xticklabels=labels,\n yticklabels=labels)\n", (12617, 12698), True, 'import seaborn as sns\n'), ((12716, 12745), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Predicted Class"""'], {}), "('Predicted Class')\n", (12726, 12745), True, 'import matplotlib.pyplot as plt\n'), ((12750, 12778), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Original Class"""'], {}), "('Original Class')\n", (12760, 12778), True, 'import matplotlib.pyplot as plt\n'), ((12783, 12812), 'matplotlib.pyplot.title', 'plt.title', (['"""Confusion matrix"""'], {}), "('Confusion matrix')\n", (12792, 12812), True, 'import matplotlib.pyplot as plt\n'), ((12918, 12933), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (12931, 12933), False, 'import torch\n'), ((15376, 15398), 'torch.load', 'torch.load', (['"""model.pt"""'], {}), "('model.pt')\n", (15386, 15398), False, 'import torch\n'), ((16426, 16441), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (16439, 16441), False, 'import torch\n'), ((2118, 2151), 'torchvision.transforms.RandomResizedCrop', 'transforms.RandomResizedCrop', (['(224)'], {}), '(224)\n', (2146, 2151), False, 'from torchvision import transforms, datasets, models\n'), ((2157, 2190), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (2188, 2190), False, 'from torchvision import transforms, datasets, models\n'), ((2196, 2217), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (2215, 2217), False, 'from torchvision import transforms, datasets, models\n'), ((2280, 2302), 'torchvision.transforms.Resize', 'transforms.Resize', (['(256)'], {}), '(256)\n', (2297, 2302), False, 'from torchvision import transforms, datasets, models\n'), ((2308, 2334), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['(224)'], {}), '(224)\n', (2329, 2334), False, 'from torchvision import transforms, datasets, models\n'), ((2340, 2361), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (2359, 2361), False, 'from torchvision import transforms, datasets, models\n'), ((2423, 2457), 'torchvision.transforms.Resize', 'transforms.Resize', ([], {'size': '(224, 224)'}), '(size=(224, 224))\n', (2440, 2457), False, 'from torchvision import transforms, datasets, models\n'), ((2463, 2484), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (2482, 2484), False, 'from torchvision import transforms, datasets, models\n'), ((6280, 6320), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', '(32)', '(3)'], {'stride': '(2)', 'padding': '(1)'}), '(3, 32, 3, stride=2, padding=1)\n', (6289, 6320), False, 'from torch import nn, optim\n'), ((6342, 6383), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(64)', '(3)'], {'stride': '(2)', 'padding': '(1)'}), '(32, 64, 3, stride=2, padding=1)\n', (6351, 6383), False, 'from torch import nn, optim\n'), ((6405, 6437), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(128)', '(3)'], {'padding': '(1)'}), '(64, 128, 3, padding=1)\n', (6414, 6437), False, 'from torch import nn, optim\n'), ((6478, 6496), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)', '(2)'], {}), '(2, 2)\n', (6490, 6496), False, 'from torch import nn, optim\n'), ((6551, 6578), 'torch.nn.Linear', 'nn.Linear', (['(7 * 7 * 128)', '(512)'], {}), '(7 * 7 * 128, 512)\n', (6560, 6578), False, 'from torch import nn, optim\n'), ((6598, 6617), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(512)'], {}), '(512, 512)\n', (6607, 6617), False, 'from torch import nn, optim\n'), ((6638, 6665), 'torch.nn.Linear', 'nn.Linear', (['(512)', 'num_classes'], {}), '(512, num_classes)\n', (6647, 6665), False, 'from torch import nn, optim\n'), ((6718, 6733), 'torch.nn.Dropout', 'nn.Dropout', (['(0.3)'], {}), '(0.3)\n', (6728, 6733), False, 'from torch import nn, optim\n'), ((8691, 8708), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (8706, 8708), False, 'from datetime import datetime\n'), ((13136, 13157), 'torch.max', 'torch.max', (['outputs', '(1)'], {}), '(outputs, 1)\n', (13145, 13157), False, 'import torch\n'), ((16639, 16660), 'torch.max', 'torch.max', (['outputs', '(1)'], {}), '(outputs, 1)\n', (16648, 16660), False, 'import torch\n'), ((11266, 11283), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (11281, 11283), False, 'from datetime import datetime\n'), ((15257, 15274), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (15272, 15274), False, 'from datetime import datetime\n'), ((10044, 10061), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (10059, 10061), False, 'from datetime import datetime\n')] |
from gym_minigrid.minigrid import *
from gym_minigrid.register import register
class FetchObjEnv(MiniGridEnv):
"""
Environment in which the agent has to fetch either a yellow key or a blue ball
named using English text strings
"""
def __init__(
self,
size=8,
numObjs=3,
):
self.numObjs = numObjs
super().__init__(
grid_size=size,
max_steps=5*size**2,
# Set this to True for maximum speed
see_through_walls=True
)
def _gen_grid(self, width, height):
self.grid = Grid(width, height)
# Generate the surrounding walls
self.grid.horz_wall(0, 0)
self.grid.horz_wall(0, height-1)
self.grid.vert_wall(0, 0)
self.grid.vert_wall(width-1, 0)
types = ['key', 'ball','box', 'goal','door']
objs = []
# For each object to be generated
objType = self._rand_elem(types[0:3])
if objType == 'key':
obj = Key('yellow')
elif objType == 'ball':
obj = Ball('yellow')
elif objType == 'box':
obj = Box('yellow')
self.place_obj(obj)
objs.append(obj)
while len(objs) < self.numObjs:
objType = self._rand_elem(types)
#objColor = self._rand_elem(COLOR_NAMES)
if objType == 'key':
obj = Key('yellow')
elif objType == 'ball':
obj = Ball('yellow')
elif objType == 'goal':
obj = Goal('yellow')
elif objType == 'box':
obj = Box('yellow')
elif objType == 'door':
obj = Door('yellow')
self.place_obj(obj)
objs.append(obj)
# Randomize the player start position and orientation
self.place_agent()
# Choose a random object to be picked up
target = objs[0]
self.targetType = target.type
self.targetColor = target.color
descStr = '%s %s' % (self.targetColor, self.targetType)
# Generate the mission string
idx = self._rand_int(0, 5)
if idx == 0:
self.mission = 'get a %s' % descStr
elif idx == 1:
self.mission = 'go get a %s' % descStr
elif idx == 2:
self.mission = 'fetch a %s' % descStr
elif idx == 3:
self.mission = 'go fetch a %s' % descStr
elif idx == 4:
self.mission = 'you must fetch a %s' % descStr
assert hasattr(self, 'mission')
def step(self, action):
obs, reward, done, info = MiniGridEnv.step(self, action)
if self.carrying:
if self.carrying.color == self.targetColor and \
self.carrying.type == self.targetType:
reward = self._reward()
done = True
else:
reward = 0
done = True
return obs, reward, done, info
class FetchObjEnv5x5N2(FetchObjEnv):
def __init__(self):
super().__init__(size=5, numObjs=2)
class FetchObjEnv6x6N2(FetchObjEnv):
def __init__(self):
super().__init__(size=6, numObjs=2)
class FetchObjEnv8x8N2(FetchObjEnv):
def __init__(self):
super().__init__(size=8, numObjs=2)
class FetchObjEnv16x16N2(FetchObjEnv):
def __init__(self):
super().__init__(size=16, numObjs=2)
register(
id='MiniGrid-FetchObj-5x5-N2-v0',
entry_point='gym_minigrid.envs:FetchObjEnv5x5N2'
)
register(
id='MiniGrid-FetchObj-6x6-N2-v0',
entry_point='gym_minigrid.envs:FetchObjEnv6x6N2'
)
register(
id='MiniGrid-FetchObj-8x8-N2-v0',
entry_point='gym_minigrid.envs:FetchObjEnv8x8N2'
)
register(
id='MiniGrid-FetchObj-16x16-N2-v0',
entry_point='gym_minigrid.envs:FetchObjEnv16x16N2'
)
##########################################################################
class FetchObjEnv5x5N3(FetchObjEnv):
def __init__(self):
super().__init__(size=5, numObjs=3)
class FetchObjEnv6x6N3(FetchObjEnv):
def __init__(self):
super().__init__(size=6, numObjs=3)
class FetchObjEnv8x8N3(FetchObjEnv):
def __init__(self):
super().__init__(size=8, numObjs=3)
class FetchObjEnv16x16N3(FetchObjEnv):
def __init__(self):
super().__init__(size=16, numObjs=3)
register(
id='MiniGrid-FetchObj-5x5-N3-v0',
entry_point='gym_minigrid.envs:FetchObjEnv5x5N3'
)
register(
id='MiniGrid-FetchObj-6x6-N3-v0',
entry_point='gym_minigrid.envs:FetchObjEnv6x6N3'
)
register(
id='MiniGrid-FetchObj-8x8-N3-v0',
entry_point='gym_minigrid.envs:FetchObjEnv8x8N3'
)
register(
id='MiniGrid-FetchObj-16x16-N3-v0',
entry_point='gym_minigrid.envs:FetchObjEnv16x16N3'
)
| [
"gym_minigrid.register.register"
] | [((3419, 3516), 'gym_minigrid.register.register', 'register', ([], {'id': '"""MiniGrid-FetchObj-5x5-N2-v0"""', 'entry_point': '"""gym_minigrid.envs:FetchObjEnv5x5N2"""'}), "(id='MiniGrid-FetchObj-5x5-N2-v0', entry_point=\n 'gym_minigrid.envs:FetchObjEnv5x5N2')\n", (3427, 3516), False, 'from gym_minigrid.register import register\n'), ((3523, 3620), 'gym_minigrid.register.register', 'register', ([], {'id': '"""MiniGrid-FetchObj-6x6-N2-v0"""', 'entry_point': '"""gym_minigrid.envs:FetchObjEnv6x6N2"""'}), "(id='MiniGrid-FetchObj-6x6-N2-v0', entry_point=\n 'gym_minigrid.envs:FetchObjEnv6x6N2')\n", (3531, 3620), False, 'from gym_minigrid.register import register\n'), ((3627, 3724), 'gym_minigrid.register.register', 'register', ([], {'id': '"""MiniGrid-FetchObj-8x8-N2-v0"""', 'entry_point': '"""gym_minigrid.envs:FetchObjEnv8x8N2"""'}), "(id='MiniGrid-FetchObj-8x8-N2-v0', entry_point=\n 'gym_minigrid.envs:FetchObjEnv8x8N2')\n", (3635, 3724), False, 'from gym_minigrid.register import register\n'), ((3731, 3832), 'gym_minigrid.register.register', 'register', ([], {'id': '"""MiniGrid-FetchObj-16x16-N2-v0"""', 'entry_point': '"""gym_minigrid.envs:FetchObjEnv16x16N2"""'}), "(id='MiniGrid-FetchObj-16x16-N2-v0', entry_point=\n 'gym_minigrid.envs:FetchObjEnv16x16N2')\n", (3739, 3832), False, 'from gym_minigrid.register import register\n'), ((4346, 4443), 'gym_minigrid.register.register', 'register', ([], {'id': '"""MiniGrid-FetchObj-5x5-N3-v0"""', 'entry_point': '"""gym_minigrid.envs:FetchObjEnv5x5N3"""'}), "(id='MiniGrid-FetchObj-5x5-N3-v0', entry_point=\n 'gym_minigrid.envs:FetchObjEnv5x5N3')\n", (4354, 4443), False, 'from gym_minigrid.register import register\n'), ((4450, 4547), 'gym_minigrid.register.register', 'register', ([], {'id': '"""MiniGrid-FetchObj-6x6-N3-v0"""', 'entry_point': '"""gym_minigrid.envs:FetchObjEnv6x6N3"""'}), "(id='MiniGrid-FetchObj-6x6-N3-v0', entry_point=\n 'gym_minigrid.envs:FetchObjEnv6x6N3')\n", (4458, 4547), False, 'from gym_minigrid.register import register\n'), ((4554, 4651), 'gym_minigrid.register.register', 'register', ([], {'id': '"""MiniGrid-FetchObj-8x8-N3-v0"""', 'entry_point': '"""gym_minigrid.envs:FetchObjEnv8x8N3"""'}), "(id='MiniGrid-FetchObj-8x8-N3-v0', entry_point=\n 'gym_minigrid.envs:FetchObjEnv8x8N3')\n", (4562, 4651), False, 'from gym_minigrid.register import register\n'), ((4658, 4759), 'gym_minigrid.register.register', 'register', ([], {'id': '"""MiniGrid-FetchObj-16x16-N3-v0"""', 'entry_point': '"""gym_minigrid.envs:FetchObjEnv16x16N3"""'}), "(id='MiniGrid-FetchObj-16x16-N3-v0', entry_point=\n 'gym_minigrid.envs:FetchObjEnv16x16N3')\n", (4666, 4759), False, 'from gym_minigrid.register import register\n')] |
import heterocl as hcl
import heterocl.tvm as tvm
import numpy as np
def test_two_stages():
hcl.init()
A = hcl.placeholder((10,), "A")
B = hcl.placeholder((10,), "B")
C = hcl.placeholder((10,), "C")
def kernel(A, B, C):
@hcl.def_([A.shape, B.shape])
def M1(A, B):
with hcl.for_(0, 10) as i:
B[i] = A[i] + 1
@hcl.def_([B.shape, C.shape])
def M2(B, C):
with hcl.for_(0, 10) as i:
C[i] = B[i] + 1
M1(A, B)
M2(B, C)
s = hcl.create_schedule([A, B, C], kernel)
s.to(B, s[kernel.M2], s[kernel.M1], depth=1)
f = hcl.build(s)
a = np.random.randint(100, size=(10,))
b = np.random.randint(100, size=(10,))
c = np.random.randint(100, size=(10,))
hcl_A = hcl.asarray(a)
hcl_B = hcl.asarray(b)
hcl_C = hcl.asarray(c)
f(hcl_A, hcl_B, hcl_C)
np.testing.assert_array_equal(hcl_C.asnumpy(), a + 2)
def test_three_stages():
hcl.init()
A = hcl.placeholder((10,), "A")
B = hcl.placeholder((10,), "B")
C = hcl.placeholder((10,), "C")
D = hcl.placeholder((10,), "D")
def kernel(A, B, C, D):
@hcl.def_([A.shape, B.shape])
def M1(A, B):
with hcl.for_(0, 10) as i:
B[i] = A[i] + 1
@hcl.def_([B.shape, C.shape])
def M2(B, C):
with hcl.for_(0, 10) as i:
C[i] = B[i] + 1
@hcl.def_([C.shape, D.shape])
def M3(C, D):
with hcl.for_(0, 10) as i:
D[i] = C[i] + 1
M1(A, B)
M2(B, C)
M3(C, D)
s = hcl.create_schedule([A, B, C, D], kernel)
s.to(B, s[kernel.M2], s[kernel.M1], depth=1)
s.to(C, s[kernel.M3], s[kernel.M2], depth=1)
f = hcl.build(s)
a = np.random.randint(100, size=(10,))
b = np.random.randint(100, size=(10,))
c = np.random.randint(100, size=(10,))
d = np.random.randint(100, size=(10,))
hcl_A = hcl.asarray(a)
hcl_B = hcl.asarray(b)
hcl_C = hcl.asarray(c)
hcl_D = hcl.asarray(d)
f(hcl_A, hcl_B, hcl_C, hcl_D)
np.testing.assert_array_equal(hcl_D.asnumpy(), a + 3)
def test_internal_stages():
hcl.init()
A = hcl.placeholder((10,), "A")
B = hcl.placeholder((10,), "B")
C = hcl.placeholder((10,), "C")
D = hcl.placeholder((10,), "D")
def kernel(A, B, C, D):
@hcl.def_([A.shape, B.shape, C.shape, D.shape])
def M1(A, B, C, D):
with hcl.for_(0, 10) as i:
B[i] = A[i] + 1
D[i] = C[i] + 1
@hcl.def_([B.shape, C.shape])
def M2(B, C):
with hcl.for_(0, 10) as i:
C[i] = B[i] + 1
M1(A, B, C, D)
M2(B, C)
s = hcl.create_schedule([A, B, C, D], kernel)
s.to(B, s[kernel.M2], s[kernel.M1], depth=1)
s.to(C, s[kernel.M1], s[kernel.M2], depth=1)
f = hcl.build(s)
a = np.random.randint(100, size=(10,))
b = np.random.randint(100, size=(10,))
c = np.random.randint(100, size=(10,))
d = np.random.randint(100, size=(10,))
hcl_A = hcl.asarray(a)
hcl_B = hcl.asarray(b)
hcl_C = hcl.asarray(c)
hcl_D = hcl.asarray(d)
f(hcl_A, hcl_B, hcl_C, hcl_D)
np.testing.assert_array_equal(hcl_D.asnumpy(), a + 3)
def test_fork_stages():
hcl.init()
A = hcl.placeholder((10,), "A")
B = hcl.placeholder((10,), "B")
C = hcl.placeholder((10,), "C")
D = hcl.placeholder((10,), "D")
E = hcl.placeholder((10,), "E")
def kernel(A, B, C, D, E):
@hcl.def_([A.shape, B.shape, C.shape])
def M1(A, B, C):
with hcl.for_(0, 10) as i:
B[i] = A[i] + 1
C[i] = A[i] - 1
@hcl.def_([B.shape, D.shape])
def M2(B, D):
with hcl.for_(0, 10) as i:
D[i] = B[i] + 1
@hcl.def_([C.shape, E.shape])
def M3(C, E):
with hcl.for_(0, 10) as i:
E[i] = C[i] - 1
M1(A, B, C)
M2(B, D)
M3(C, E)
hcl.init()
s1 = hcl.create_schedule([A, B, C, D, E], kernel)
s1.to(B, s1[kernel.M2], s1[kernel.M1], depth=1)
hcl.init()
s2 = hcl.create_schedule([A, B, C, D, E], kernel)
s2.to(C, s2[kernel.M3], s2[kernel.M1], depth=1)
hcl.init()
s3 = hcl.create_schedule([A, B, C, D, E], kernel)
s3.to(B, s3[kernel.M2], s3[kernel.M1], depth=1)
s3.to(C, s3[kernel.M3], s3[kernel.M1], depth=1)
a = np.random.randint(100, size=(10,))
b = np.random.randint(100, size=(10,))
c = np.random.randint(100, size=(10,))
d = np.random.randint(100, size=(10,))
e = np.random.randint(100, size=(10,))
def _test_stream(s):
f = hcl.build(s)
hcl_A = hcl.asarray(a)
hcl_B = hcl.asarray(b)
hcl_C = hcl.asarray(c)
hcl_D = hcl.asarray(d)
hcl_E = hcl.asarray(e)
f(hcl_A, hcl_B, hcl_C, hcl_D, hcl_E)
np.testing.assert_array_equal(hcl_D.asnumpy(), a + 2)
_test_stream(s1)
_test_stream(s2)
_test_stream(s3)
def test_merge_stages():
hcl.init()
A = hcl.placeholder((10,), "A")
B = hcl.placeholder((10,), "B")
C = hcl.placeholder((10,), "C")
D = hcl.placeholder((10,), "D")
E = hcl.placeholder((10,), "E")
def kernel(A, B, C, D, E):
@hcl.def_([A.shape, B.shape])
def M1(A, B):
with hcl.for_(0, 10) as i:
B[i] = A[i] + 1
@hcl.def_([C.shape, D.shape])
def M2(C, D):
with hcl.for_(0, 10) as i:
D[i] = C[i] - 1
@hcl.def_([B.shape, D.shape, E.shape])
def M3(B, D, E):
with hcl.for_(0, 10) as i:
E[i] = B[i] + D[i]
M1(A, B)
M2(C, D)
M3(B, D, E)
hcl.init()
s1 = hcl.create_schedule([A, B, C, D, E], kernel)
s1.to(B, s1[kernel.M3], s1[kernel.M1], depth=1)
hcl.init()
s2 = hcl.create_schedule([A, B, C, D, E], kernel)
s2.to(D, s2[kernel.M3], s2[kernel.M2], depth=1)
hcl.init()
s3 = hcl.create_schedule([A, B, C, D, E], kernel)
s3.to(B, s3[kernel.M3], s3[kernel.M1], depth=1)
s3.to(D, s3[kernel.M3], s3[kernel.M2], depth=1)
print(hcl.lower(s3))
a = np.random.randint(100, size=(10,))
b = np.random.randint(100, size=(10,))
c = np.random.randint(100, size=(10,))
d = np.random.randint(100, size=(10,))
e = np.random.randint(100, size=(10,))
def _test_stream(s):
f = hcl.build(s)
hcl_A = hcl.asarray(a)
hcl_B = hcl.asarray(b)
hcl_C = hcl.asarray(c)
hcl_D = hcl.asarray(d)
hcl_E = hcl.asarray(e)
f(hcl_A, hcl_B, hcl_C, hcl_D, hcl_E)
np.testing.assert_array_equal(hcl_E.asnumpy(), a + c)
_test_stream(s1)
_test_stream(s2)
_test_stream(s3)
def test_loop_stages():
hcl.init()
A = hcl.placeholder((10,), "A")
B = hcl.placeholder((10,), "B")
C = hcl.placeholder((10,), "C")
def kernel(A, B, C):
@hcl.def_([A.shape, B.shape, C.shape])
def M1(A, B, C):
with hcl.for_(0, 10) as i:
with hcl.for_(0, 10) as j:
with hcl.if_(i == 0):
B[j] = A[j]
with hcl.elif_(i < 9):
B[j] = B[j] + 1
with hcl.else_():
C[j] = B[j]
M1(A, B, C)
s = hcl.create_schedule([A, B, C], kernel)
s.to(B, s[kernel.M1], s[kernel.M1], depth=10)
f = hcl.build(s)
a = np.random.randint(100, size=(10,))
b = np.random.randint(100, size=(10,))
c = np.random.randint(100, size=(10,))
hcl_A = hcl.asarray(a)
hcl_B = hcl.asarray(b)
hcl_C = hcl.asarray(c)
f(hcl_A, hcl_B, hcl_C)
np.testing.assert_array_equal(hcl_C.asnumpy(), a + 8)
| [
"heterocl.for_",
"heterocl.placeholder",
"heterocl.def_",
"heterocl.if_",
"heterocl.create_schedule",
"heterocl.build",
"numpy.random.randint",
"heterocl.init",
"heterocl.elif_",
"heterocl.asarray",
"heterocl.lower",
"heterocl.else_"
] | [((97, 107), 'heterocl.init', 'hcl.init', ([], {}), '()\n', (105, 107), True, 'import heterocl as hcl\n'), ((116, 143), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""A"""'], {}), "((10,), 'A')\n", (131, 143), True, 'import heterocl as hcl\n'), ((152, 179), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""B"""'], {}), "((10,), 'B')\n", (167, 179), True, 'import heterocl as hcl\n'), ((188, 215), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""C"""'], {}), "((10,), 'C')\n", (203, 215), True, 'import heterocl as hcl\n'), ((550, 588), 'heterocl.create_schedule', 'hcl.create_schedule', (['[A, B, C]', 'kernel'], {}), '([A, B, C], kernel)\n', (569, 588), True, 'import heterocl as hcl\n'), ((646, 658), 'heterocl.build', 'hcl.build', (['s'], {}), '(s)\n', (655, 658), True, 'import heterocl as hcl\n'), ((668, 702), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (685, 702), True, 'import numpy as np\n'), ((711, 745), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (728, 745), True, 'import numpy as np\n'), ((754, 788), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (771, 788), True, 'import numpy as np\n'), ((801, 815), 'heterocl.asarray', 'hcl.asarray', (['a'], {}), '(a)\n', (812, 815), True, 'import heterocl as hcl\n'), ((828, 842), 'heterocl.asarray', 'hcl.asarray', (['b'], {}), '(b)\n', (839, 842), True, 'import heterocl as hcl\n'), ((855, 869), 'heterocl.asarray', 'hcl.asarray', (['c'], {}), '(c)\n', (866, 869), True, 'import heterocl as hcl\n'), ((986, 996), 'heterocl.init', 'hcl.init', ([], {}), '()\n', (994, 996), True, 'import heterocl as hcl\n'), ((1005, 1032), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""A"""'], {}), "((10,), 'A')\n", (1020, 1032), True, 'import heterocl as hcl\n'), ((1041, 1068), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""B"""'], {}), "((10,), 'B')\n", (1056, 1068), True, 'import heterocl as hcl\n'), ((1077, 1104), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""C"""'], {}), "((10,), 'C')\n", (1092, 1104), True, 'import heterocl as hcl\n'), ((1113, 1140), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""D"""'], {}), "((10,), 'D')\n", (1128, 1140), True, 'import heterocl as hcl\n'), ((1627, 1668), 'heterocl.create_schedule', 'hcl.create_schedule', (['[A, B, C, D]', 'kernel'], {}), '([A, B, C, D], kernel)\n', (1646, 1668), True, 'import heterocl as hcl\n'), ((1775, 1787), 'heterocl.build', 'hcl.build', (['s'], {}), '(s)\n', (1784, 1787), True, 'import heterocl as hcl\n'), ((1797, 1831), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (1814, 1831), True, 'import numpy as np\n'), ((1840, 1874), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (1857, 1874), True, 'import numpy as np\n'), ((1883, 1917), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (1900, 1917), True, 'import numpy as np\n'), ((1926, 1960), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (1943, 1960), True, 'import numpy as np\n'), ((1973, 1987), 'heterocl.asarray', 'hcl.asarray', (['a'], {}), '(a)\n', (1984, 1987), True, 'import heterocl as hcl\n'), ((2000, 2014), 'heterocl.asarray', 'hcl.asarray', (['b'], {}), '(b)\n', (2011, 2014), True, 'import heterocl as hcl\n'), ((2027, 2041), 'heterocl.asarray', 'hcl.asarray', (['c'], {}), '(c)\n', (2038, 2041), True, 'import heterocl as hcl\n'), ((2054, 2068), 'heterocl.asarray', 'hcl.asarray', (['d'], {}), '(d)\n', (2065, 2068), True, 'import heterocl as hcl\n'), ((2195, 2205), 'heterocl.init', 'hcl.init', ([], {}), '()\n', (2203, 2205), True, 'import heterocl as hcl\n'), ((2214, 2241), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""A"""'], {}), "((10,), 'A')\n", (2229, 2241), True, 'import heterocl as hcl\n'), ((2250, 2277), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""B"""'], {}), "((10,), 'B')\n", (2265, 2277), True, 'import heterocl as hcl\n'), ((2286, 2313), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""C"""'], {}), "((10,), 'C')\n", (2301, 2313), True, 'import heterocl as hcl\n'), ((2322, 2349), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""D"""'], {}), "((10,), 'D')\n", (2337, 2349), True, 'import heterocl as hcl\n'), ((2749, 2790), 'heterocl.create_schedule', 'hcl.create_schedule', (['[A, B, C, D]', 'kernel'], {}), '([A, B, C, D], kernel)\n', (2768, 2790), True, 'import heterocl as hcl\n'), ((2897, 2909), 'heterocl.build', 'hcl.build', (['s'], {}), '(s)\n', (2906, 2909), True, 'import heterocl as hcl\n'), ((2919, 2953), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (2936, 2953), True, 'import numpy as np\n'), ((2962, 2996), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (2979, 2996), True, 'import numpy as np\n'), ((3005, 3039), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (3022, 3039), True, 'import numpy as np\n'), ((3048, 3082), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (3065, 3082), True, 'import numpy as np\n'), ((3095, 3109), 'heterocl.asarray', 'hcl.asarray', (['a'], {}), '(a)\n', (3106, 3109), True, 'import heterocl as hcl\n'), ((3122, 3136), 'heterocl.asarray', 'hcl.asarray', (['b'], {}), '(b)\n', (3133, 3136), True, 'import heterocl as hcl\n'), ((3149, 3163), 'heterocl.asarray', 'hcl.asarray', (['c'], {}), '(c)\n', (3160, 3163), True, 'import heterocl as hcl\n'), ((3176, 3190), 'heterocl.asarray', 'hcl.asarray', (['d'], {}), '(d)\n', (3187, 3190), True, 'import heterocl as hcl\n'), ((3313, 3323), 'heterocl.init', 'hcl.init', ([], {}), '()\n', (3321, 3323), True, 'import heterocl as hcl\n'), ((3332, 3359), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""A"""'], {}), "((10,), 'A')\n", (3347, 3359), True, 'import heterocl as hcl\n'), ((3368, 3395), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""B"""'], {}), "((10,), 'B')\n", (3383, 3395), True, 'import heterocl as hcl\n'), ((3404, 3431), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""C"""'], {}), "((10,), 'C')\n", (3419, 3431), True, 'import heterocl as hcl\n'), ((3440, 3467), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""D"""'], {}), "((10,), 'D')\n", (3455, 3467), True, 'import heterocl as hcl\n'), ((3476, 3503), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""E"""'], {}), "((10,), 'E')\n", (3491, 3503), True, 'import heterocl as hcl\n'), ((4036, 4046), 'heterocl.init', 'hcl.init', ([], {}), '()\n', (4044, 4046), True, 'import heterocl as hcl\n'), ((4056, 4100), 'heterocl.create_schedule', 'hcl.create_schedule', (['[A, B, C, D, E]', 'kernel'], {}), '([A, B, C, D, E], kernel)\n', (4075, 4100), True, 'import heterocl as hcl\n'), ((4158, 4168), 'heterocl.init', 'hcl.init', ([], {}), '()\n', (4166, 4168), True, 'import heterocl as hcl\n'), ((4178, 4222), 'heterocl.create_schedule', 'hcl.create_schedule', (['[A, B, C, D, E]', 'kernel'], {}), '([A, B, C, D, E], kernel)\n', (4197, 4222), True, 'import heterocl as hcl\n'), ((4280, 4290), 'heterocl.init', 'hcl.init', ([], {}), '()\n', (4288, 4290), True, 'import heterocl as hcl\n'), ((4300, 4344), 'heterocl.create_schedule', 'hcl.create_schedule', (['[A, B, C, D, E]', 'kernel'], {}), '([A, B, C, D, E], kernel)\n', (4319, 4344), True, 'import heterocl as hcl\n'), ((4458, 4492), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (4475, 4492), True, 'import numpy as np\n'), ((4501, 4535), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (4518, 4535), True, 'import numpy as np\n'), ((4544, 4578), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (4561, 4578), True, 'import numpy as np\n'), ((4587, 4621), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (4604, 4621), True, 'import numpy as np\n'), ((4630, 4664), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (4647, 4664), True, 'import numpy as np\n'), ((5074, 5084), 'heterocl.init', 'hcl.init', ([], {}), '()\n', (5082, 5084), True, 'import heterocl as hcl\n'), ((5093, 5120), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""A"""'], {}), "((10,), 'A')\n", (5108, 5120), True, 'import heterocl as hcl\n'), ((5129, 5156), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""B"""'], {}), "((10,), 'B')\n", (5144, 5156), True, 'import heterocl as hcl\n'), ((5165, 5192), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""C"""'], {}), "((10,), 'C')\n", (5180, 5192), True, 'import heterocl as hcl\n'), ((5201, 5228), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""D"""'], {}), "((10,), 'D')\n", (5216, 5228), True, 'import heterocl as hcl\n'), ((5237, 5264), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""E"""'], {}), "((10,), 'E')\n", (5252, 5264), True, 'import heterocl as hcl\n'), ((5768, 5778), 'heterocl.init', 'hcl.init', ([], {}), '()\n', (5776, 5778), True, 'import heterocl as hcl\n'), ((5788, 5832), 'heterocl.create_schedule', 'hcl.create_schedule', (['[A, B, C, D, E]', 'kernel'], {}), '([A, B, C, D, E], kernel)\n', (5807, 5832), True, 'import heterocl as hcl\n'), ((5890, 5900), 'heterocl.init', 'hcl.init', ([], {}), '()\n', (5898, 5900), True, 'import heterocl as hcl\n'), ((5910, 5954), 'heterocl.create_schedule', 'hcl.create_schedule', (['[A, B, C, D, E]', 'kernel'], {}), '([A, B, C, D, E], kernel)\n', (5929, 5954), True, 'import heterocl as hcl\n'), ((6012, 6022), 'heterocl.init', 'hcl.init', ([], {}), '()\n', (6020, 6022), True, 'import heterocl as hcl\n'), ((6032, 6076), 'heterocl.create_schedule', 'hcl.create_schedule', (['[A, B, C, D, E]', 'kernel'], {}), '([A, B, C, D, E], kernel)\n', (6051, 6076), True, 'import heterocl as hcl\n'), ((6215, 6249), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (6232, 6249), True, 'import numpy as np\n'), ((6258, 6292), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (6275, 6292), True, 'import numpy as np\n'), ((6301, 6335), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (6318, 6335), True, 'import numpy as np\n'), ((6344, 6378), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (6361, 6378), True, 'import numpy as np\n'), ((6387, 6421), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (6404, 6421), True, 'import numpy as np\n'), ((6830, 6840), 'heterocl.init', 'hcl.init', ([], {}), '()\n', (6838, 6840), True, 'import heterocl as hcl\n'), ((6849, 6876), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""A"""'], {}), "((10,), 'A')\n", (6864, 6876), True, 'import heterocl as hcl\n'), ((6885, 6912), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""B"""'], {}), "((10,), 'B')\n", (6900, 6912), True, 'import heterocl as hcl\n'), ((6921, 6948), 'heterocl.placeholder', 'hcl.placeholder', (['(10,)', '"""C"""'], {}), "((10,), 'C')\n", (6936, 6948), True, 'import heterocl as hcl\n'), ((7395, 7433), 'heterocl.create_schedule', 'hcl.create_schedule', (['[A, B, C]', 'kernel'], {}), '([A, B, C], kernel)\n', (7414, 7433), True, 'import heterocl as hcl\n'), ((7492, 7504), 'heterocl.build', 'hcl.build', (['s'], {}), '(s)\n', (7501, 7504), True, 'import heterocl as hcl\n'), ((7514, 7548), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (7531, 7548), True, 'import numpy as np\n'), ((7557, 7591), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (7574, 7591), True, 'import numpy as np\n'), ((7600, 7634), 'numpy.random.randint', 'np.random.randint', (['(100)'], {'size': '(10,)'}), '(100, size=(10,))\n', (7617, 7634), True, 'import numpy as np\n'), ((7647, 7661), 'heterocl.asarray', 'hcl.asarray', (['a'], {}), '(a)\n', (7658, 7661), True, 'import heterocl as hcl\n'), ((7674, 7688), 'heterocl.asarray', 'hcl.asarray', (['b'], {}), '(b)\n', (7685, 7688), True, 'import heterocl as hcl\n'), ((7701, 7715), 'heterocl.asarray', 'hcl.asarray', (['c'], {}), '(c)\n', (7712, 7715), True, 'import heterocl as hcl\n'), ((252, 280), 'heterocl.def_', 'hcl.def_', (['[A.shape, B.shape]'], {}), '([A.shape, B.shape])\n', (260, 280), True, 'import heterocl as hcl\n'), ((384, 412), 'heterocl.def_', 'hcl.def_', (['[B.shape, C.shape]'], {}), '([B.shape, C.shape])\n', (392, 412), True, 'import heterocl as hcl\n'), ((1180, 1208), 'heterocl.def_', 'hcl.def_', (['[A.shape, B.shape]'], {}), '([A.shape, B.shape])\n', (1188, 1208), True, 'import heterocl as hcl\n'), ((1312, 1340), 'heterocl.def_', 'hcl.def_', (['[B.shape, C.shape]'], {}), '([B.shape, C.shape])\n', (1320, 1340), True, 'import heterocl as hcl\n'), ((1444, 1472), 'heterocl.def_', 'hcl.def_', (['[C.shape, D.shape]'], {}), '([C.shape, D.shape])\n', (1452, 1472), True, 'import heterocl as hcl\n'), ((2389, 2435), 'heterocl.def_', 'hcl.def_', (['[A.shape, B.shape, C.shape, D.shape]'], {}), '([A.shape, B.shape, C.shape, D.shape])\n', (2397, 2435), True, 'import heterocl as hcl\n'), ((2577, 2605), 'heterocl.def_', 'hcl.def_', (['[B.shape, C.shape]'], {}), '([B.shape, C.shape])\n', (2585, 2605), True, 'import heterocl as hcl\n'), ((3546, 3583), 'heterocl.def_', 'hcl.def_', (['[A.shape, B.shape, C.shape]'], {}), '([A.shape, B.shape, C.shape])\n', (3554, 3583), True, 'import heterocl as hcl\n'), ((3722, 3750), 'heterocl.def_', 'hcl.def_', (['[B.shape, D.shape]'], {}), '([B.shape, D.shape])\n', (3730, 3750), True, 'import heterocl as hcl\n'), ((3854, 3882), 'heterocl.def_', 'hcl.def_', (['[C.shape, E.shape]'], {}), '([C.shape, E.shape])\n', (3862, 3882), True, 'import heterocl as hcl\n'), ((4703, 4715), 'heterocl.build', 'hcl.build', (['s'], {}), '(s)\n', (4712, 4715), True, 'import heterocl as hcl\n'), ((4733, 4747), 'heterocl.asarray', 'hcl.asarray', (['a'], {}), '(a)\n', (4744, 4747), True, 'import heterocl as hcl\n'), ((4764, 4778), 'heterocl.asarray', 'hcl.asarray', (['b'], {}), '(b)\n', (4775, 4778), True, 'import heterocl as hcl\n'), ((4795, 4809), 'heterocl.asarray', 'hcl.asarray', (['c'], {}), '(c)\n', (4806, 4809), True, 'import heterocl as hcl\n'), ((4826, 4840), 'heterocl.asarray', 'hcl.asarray', (['d'], {}), '(d)\n', (4837, 4840), True, 'import heterocl as hcl\n'), ((4857, 4871), 'heterocl.asarray', 'hcl.asarray', (['e'], {}), '(e)\n', (4868, 4871), True, 'import heterocl as hcl\n'), ((5307, 5335), 'heterocl.def_', 'hcl.def_', (['[A.shape, B.shape]'], {}), '([A.shape, B.shape])\n', (5315, 5335), True, 'import heterocl as hcl\n'), ((5439, 5467), 'heterocl.def_', 'hcl.def_', (['[C.shape, D.shape]'], {}), '([C.shape, D.shape])\n', (5447, 5467), True, 'import heterocl as hcl\n'), ((5571, 5608), 'heterocl.def_', 'hcl.def_', (['[B.shape, D.shape, E.shape]'], {}), '([B.shape, D.shape, E.shape])\n', (5579, 5608), True, 'import heterocl as hcl\n'), ((6191, 6204), 'heterocl.lower', 'hcl.lower', (['s3'], {}), '(s3)\n', (6200, 6204), True, 'import heterocl as hcl\n'), ((6460, 6472), 'heterocl.build', 'hcl.build', (['s'], {}), '(s)\n', (6469, 6472), True, 'import heterocl as hcl\n'), ((6490, 6504), 'heterocl.asarray', 'hcl.asarray', (['a'], {}), '(a)\n', (6501, 6504), True, 'import heterocl as hcl\n'), ((6521, 6535), 'heterocl.asarray', 'hcl.asarray', (['b'], {}), '(b)\n', (6532, 6535), True, 'import heterocl as hcl\n'), ((6552, 6566), 'heterocl.asarray', 'hcl.asarray', (['c'], {}), '(c)\n', (6563, 6566), True, 'import heterocl as hcl\n'), ((6583, 6597), 'heterocl.asarray', 'hcl.asarray', (['d'], {}), '(d)\n', (6594, 6597), True, 'import heterocl as hcl\n'), ((6614, 6628), 'heterocl.asarray', 'hcl.asarray', (['e'], {}), '(e)\n', (6625, 6628), True, 'import heterocl as hcl\n'), ((6985, 7022), 'heterocl.def_', 'hcl.def_', (['[A.shape, B.shape, C.shape]'], {}), '([A.shape, B.shape, C.shape])\n', (6993, 7022), True, 'import heterocl as hcl\n'), ((320, 335), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (328, 335), True, 'import heterocl as hcl\n'), ((452, 467), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (460, 467), True, 'import heterocl as hcl\n'), ((1248, 1263), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (1256, 1263), True, 'import heterocl as hcl\n'), ((1380, 1395), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (1388, 1395), True, 'import heterocl as hcl\n'), ((1512, 1527), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (1520, 1527), True, 'import heterocl as hcl\n'), ((2481, 2496), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (2489, 2496), True, 'import heterocl as hcl\n'), ((2645, 2660), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (2653, 2660), True, 'import heterocl as hcl\n'), ((3626, 3641), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (3634, 3641), True, 'import heterocl as hcl\n'), ((3790, 3805), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (3798, 3805), True, 'import heterocl as hcl\n'), ((3922, 3937), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (3930, 3937), True, 'import heterocl as hcl\n'), ((5375, 5390), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (5383, 5390), True, 'import heterocl as hcl\n'), ((5507, 5522), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (5515, 5522), True, 'import heterocl as hcl\n'), ((5651, 5666), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (5659, 5666), True, 'import heterocl as hcl\n'), ((7065, 7080), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (7073, 7080), True, 'import heterocl as hcl\n'), ((7108, 7123), 'heterocl.for_', 'hcl.for_', (['(0)', '(10)'], {}), '(0, 10)\n', (7116, 7123), True, 'import heterocl as hcl\n'), ((7155, 7170), 'heterocl.if_', 'hcl.if_', (['(i == 0)'], {}), '(i == 0)\n', (7162, 7170), True, 'import heterocl as hcl\n'), ((7233, 7249), 'heterocl.elif_', 'hcl.elif_', (['(i < 9)'], {}), '(i < 9)\n', (7242, 7249), True, 'import heterocl as hcl\n'), ((7316, 7327), 'heterocl.else_', 'hcl.else_', ([], {}), '()\n', (7325, 7327), True, 'import heterocl as hcl\n')] |
#!/usr/bin/env python
import cv2
import glob
num=0
image_list = []
for filename in glob.glob('amazontest10/*.jpg'): #assuming gif
img = cv2.imread(filename)
#crop_img = img[260:649, 477:1023] # Crop from x, y, w, h -> 100, 200, 300, 400
crop_img = img[68:313, 448:810] #for book
#[y1:y2, x1:x2]
# NOTE: its img[y: y + h, x: x + w] and *not* img[x: x + w, y: y + h]
#cv2.imshow("cropped", crop_img)
#cv2.waitKey(0)
cv2.imwrite('amazontest10/pic'+str(num)+'.jpg', crop_img)
num=num+1
| [
"cv2.imread",
"glob.glob"
] | [((83, 114), 'glob.glob', 'glob.glob', (['"""amazontest10/*.jpg"""'], {}), "('amazontest10/*.jpg')\n", (92, 114), False, 'import glob\n'), ((145, 165), 'cv2.imread', 'cv2.imread', (['filename'], {}), '(filename)\n', (155, 165), False, 'import cv2\n')] |
# -*- coding: utf-8 -*-
"""02-insurance-linear.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1j3SczKlApjIG0N7ajJjQR8dm1p29boTw
# Insurance cost prediction using linear regression
In this project we're going to use information like a person's age, sex, BMI, no. of children and smoking habit to predict the price of yearly medical bills. This kind of model is useful for insurance companies to determine the yearly insurance premium for a person. The dataset for this project is taken from [Kaggle](https://www.kaggle.com/mirichoi0218/insurance).
We will create a model with the following steps:
1. Download and explore the dataset
2. Prepare the dataset for training
3. Create a linear regression model
4. Train the model to fit the data
5. Make predictions using the trained model
"""
# Uncomment and run the appropriate command for your operating system, if required
# Linux / Binder
# !pip install numpy matplotlib pandas torch==1.7.0+cpu torchvision==0.8.1+cpu torchaudio==0.7.0 -f https://download.pytorch.org/whl/torch_stable.html
# Windows
# !pip install numpy matplotlib pandas torch==1.7.0+cpu torchvision==0.8.1+cpu torchaudio==0.7.0 -f https://download.pytorch.org/whl/torch_stable.html
# MacOS
# !pip install numpy matplotlib pandas torch torchvision torchaudio
# Import all the necessary libraries
import torch
import torchvision
import torch.nn as nn
import pandas as pd
import matplotlib.pyplot as plt
import torch.nn.functional as F
from torchvision.datasets.utils import download_url
from torch.utils.data import DataLoader, TensorDataset, random_split
# Commented out IPython magic to ensure Python compatibility.
import matplotlib.pyplot as plt
# %matplotlib inline
import seaborn as sns
import matplotlib
import plotly.graph_objs as go
from plotly.offline import download_plotlyjs, init_notebook_mode, plot, iplot
import plotly.express as px
import plotly.graph_objects as go
import numpy as np
"""## Step 1: Download and explore the data
Let us begin by downloading the data. We'll use the `download_url` function from PyTorch to get the data as a CSV (comma-separated values) file.
"""
DATASET_URL = "https://hub.jovian.ml/wp-content/uploads/2020/05/insurance.csv"
DATA_FILENAME = "insurance.csv"
download_url(DATASET_URL, '.')
# read csv we downloaded
dataframe_raw = pd.read_csv(DATA_FILENAME)
dataframe_raw.head()
def customize_dataset(dataframe_raw, rand_str):
dataframe = dataframe_raw.copy(deep=True)
# drop some rows
dataframe = dataframe.sample(int(0.95*len(dataframe)), random_state=int(ord(rand_str[0])))
# scale input
dataframe.bmi = dataframe.bmi * ord(rand_str[1])/100.
# scale target
dataframe.charges = dataframe.charges * ord(rand_str[2])/100.
# drop column
if ord(rand_str[3]) % 2 == 1:
dataframe = dataframe.drop(['region'], axis=1)
return dataframe
dataframe = customize_dataset(dataframe_raw, 'vrajesh')
dataframe.head()
num_rows = dataframe.shape[0]
num_cols = dataframe.shape[1]
print("num_rows",num_rows,"num_cols",num_cols)
print("Shape of the DataFrame",dataframe.shape)
# input variables
input_cols = dataframe.drop('charges',axis=1).columns
# input non-numeric or categorial variables
categorical_cols = [x for x in dataframe.columns if type(dataframe[x][1])==str]
# output/target variable(s)
output_cols = [dataframe["charges"].name]
"""**Get the minimum, maximum and average value of the `charges` column and display it on a graph.**"""
#We sore the dataframe using the values in the charges column and store it in a different variable
data = dataframe.sort_values('charges')
maximum = data.tail(1)
minimum = data.head(1)
average = data.mean(axis=0)
average = average[3]
#We use plotly.express library to represent the data graphically
#We here use the sns library to set the style of the background of the graph
sns.set_style('darkgrid')
fig = px.scatter(dataframe, x=dataframe.index, y=dataframe['charges'],
hover_data=[dataframe['charges']])
fig.show()
data2 = dataframe.sort_values('age')
uniq = data2['age'].unique().tolist()
data3 = [data2['age'].loc[data2.age == x].mean() for x in uniq]
#fig1 = plt.plot(x=[data2['age'].loc[data2.age == x]for x in uniq], y = [data2['age'].loc[data2.age == x].mean()for x in uniq])
#fig1.show()
"""## Step 2: Prepare the dataset for training
We need to convert the data from the Pandas dataframe into a PyTorch tensors for training. To do this, the first step is to convert it numpy arrays. If you've filled out `input_cols`, `categorial_cols` and `output_cols` correctly, this following function will perform the conversion to numpy arrays.
"""
def dataframe_to_arrays(dataframe):
# Make a copy of the original dataframe
dataframe1 = dataframe.copy(deep=True)
# Convert non-numeric categorical columns to numbers
for col in categorical_cols:
dataframe1[col] = dataframe1[col].astype('category').cat.codes
# Extract input & outupts as numpy arrays
inputs_array = dataframe1[input_cols].to_numpy()
targets_array = dataframe1[output_cols].to_numpy()
return inputs_array, targets_array
inputs_array, targets_array = dataframe_to_arrays(dataframe)
inputs_array, targets_array
# Convert values to torch.float before we start manupulating the data
inputs = torch.from_numpy(inputs_array).type(torch.float32)
targets = torch.from_numpy(targets_array).type(torch.float32)
inputs.dtype, targets.dtype
# Create a tensor dataset of inputs and targets
dataset = TensorDataset(inputs, targets)
# Splitting our data
val_percent = 0.1756 # between 0.1 and 0.2
val_size = int(num_rows * val_percent)
train_size = num_rows - val_size
train_ds, val_ds = torch.utils.data.random_split(dataset,[train_size,val_size]) # Use the random_split function to split dataset into 2 parts of the desired length
# Fix a batch size to distribute data
batch_size = 128
train_loader = DataLoader(train_ds, batch_size, shuffle=True)
val_loader = DataLoader(val_ds, batch_size)
for xb, yb in train_loader:
print("inputs:", xb)
print("targets:", yb)
break
"""## Step 3: Create a Linear Regression Model
Our model itself is a fairly straightforward linear regression (we'll build more complex models in the next assignment).
"""
input_size = len(input_cols)
output_size = len(output_cols)
input_size,output_size
# Main Model for Linear Regression
# We use nn.Module class and initialize it using super().__init__()
# We calculate loss using F.l1_loss()
class InsuranceModel(nn.Module):
def __init__(self):
super().__init__()
self.linear = nn.Linear(input_size,output_size) # fill this (hint: use input_size & output_size defined above)
def forward(self, xb):
out = self.linear(xb) # fill this
return out
def training_step(self, batch):
inputs, targets = batch
# Generate predictions
out = self(inputs)
# Calcuate loss
loss = F.l1_loss(out,targets) # fill this
return loss
def validation_step(self, batch):
inputs, targets = batch
# Generate predictions
out = self(inputs)
# Calculate loss
loss = F.l1_loss(out,targets) # fill this
return {'val_loss': loss.detach()}
def validation_epoch_end(self, outputs):
batch_losses = [x['val_loss'] for x in outputs]
epoch_loss = torch.stack(batch_losses).mean() # Combine losses
return {'val_loss': epoch_loss.item()}
def epoch_end(self, epoch, result, num_epochs):
# Print result every 20th epoch
if (epoch+1) % 20 == 0 or epoch == num_epochs-1:
print("Epoch [{}], val_loss: {:.4f}".format(epoch+1, result['val_loss']))
model = InsuranceModel()
list(model.parameters())
"""## Step 4: Train the model to fit the data
To train our model, we'll use the same `fit` function explained in the lecture. That's the benefit of defining a generic training loop - you can use it for any problem.
"""
def evaluate(model, val_loader):
outputs = [model.validation_step(batch) for batch in val_loader]
return model.validation_epoch_end(outputs)
def fit(epochs, lr, model, train_loader, val_loader, opt_func=torch.optim.SGD):
history = []
optimizer = opt_func(model.parameters(), lr)
for epoch in range(epochs):
# Training Phase
for batch in train_loader:
loss = model.training_step(batch)
loss.backward()
optimizer.step()
optimizer.zero_grad()
# Validation phase
result = evaluate(model, val_loader)
model.epoch_end(epoch, result, epochs)
history.append(result)
return history
result = evaluate(model,val_loader) # Use the the evaluate function
print(result)
"""# Lets Train the Model"""
epochs = 1000
lr = 0.18
history1 = fit(epochs, lr, model, train_loader, val_loader)
epochs = 1000
lr = 1
history2 = fit(epochs, lr, model, train_loader, val_loader)
epochs = 2000
lr = 1e-3
history3 = fit(epochs, lr, model, train_loader, val_loader)
epochs = 1000
lr = 3
history4 = fit(epochs, lr, model, train_loader, val_loader)
epochs = 1000
lr = 2.5
history5 = fit(epochs, lr, model, train_loader, val_loader)
import itertools
def seq(start, end, step):
if step == 0:
raise ValueError("step must not be 0")
sample_count = int(abs(end - start) / step)
return itertools.islice(itertools.count(start, step), sample_count)
lrs = seq(1.0, 0.001, 0.01)
for x in lrs:
epochs = 1000
lr = x
history5 = fit(epochs, lr, model, train_loader, val_loader)
for x in lrs:
print(x)
"""**final validation loss of our model?**"""
val_loss = 8066
"""## Step 5: Make predictions using the trained model
"""
def predict_single(input, target, model):
inputs = input.unsqueeze(0)
predictions = model(input)
prediction = predictions[0].detach()
print("Input:", input)
print("Target:", target)
print("Prediction:", prediction)
input, target = val_ds[0]
predict_single(input, target, model)
input, target = val_ds[10]
predict_single(input, target, model)
input, target = val_ds[23]
predict_single(input, target, model)
| [
"plotly.express.scatter",
"torch.nn.functional.l1_loss",
"pandas.read_csv",
"torch.utils.data.random_split",
"torch.stack",
"torch.utils.data.TensorDataset",
"torchvision.datasets.utils.download_url",
"seaborn.set_style",
"torch.from_numpy",
"itertools.count",
"torch.nn.Linear",
"torch.utils.data.DataLoader"
] | [((2310, 2340), 'torchvision.datasets.utils.download_url', 'download_url', (['DATASET_URL', '"""."""'], {}), "(DATASET_URL, '.')\n", (2322, 2340), False, 'from torchvision.datasets.utils import download_url\n'), ((2383, 2409), 'pandas.read_csv', 'pd.read_csv', (['DATA_FILENAME'], {}), '(DATA_FILENAME)\n', (2394, 2409), True, 'import pandas as pd\n'), ((3918, 3943), 'seaborn.set_style', 'sns.set_style', (['"""darkgrid"""'], {}), "('darkgrid')\n", (3931, 3943), True, 'import seaborn as sns\n'), ((3950, 4054), 'plotly.express.scatter', 'px.scatter', (['dataframe'], {'x': 'dataframe.index', 'y': "dataframe['charges']", 'hover_data': "[dataframe['charges']]"}), "(dataframe, x=dataframe.index, y=dataframe['charges'], hover_data\n =[dataframe['charges']])\n", (3960, 4054), True, 'import plotly.express as px\n'), ((5562, 5592), 'torch.utils.data.TensorDataset', 'TensorDataset', (['inputs', 'targets'], {}), '(inputs, targets)\n', (5575, 5592), False, 'from torch.utils.data import DataLoader, TensorDataset, random_split\n'), ((5752, 5814), 'torch.utils.data.random_split', 'torch.utils.data.random_split', (['dataset', '[train_size, val_size]'], {}), '(dataset, [train_size, val_size])\n', (5781, 5814), False, 'import torch\n'), ((5970, 6016), 'torch.utils.data.DataLoader', 'DataLoader', (['train_ds', 'batch_size'], {'shuffle': '(True)'}), '(train_ds, batch_size, shuffle=True)\n', (5980, 6016), False, 'from torch.utils.data import DataLoader, TensorDataset, random_split\n'), ((6030, 6060), 'torch.utils.data.DataLoader', 'DataLoader', (['val_ds', 'batch_size'], {}), '(val_ds, batch_size)\n', (6040, 6060), False, 'from torch.utils.data import DataLoader, TensorDataset, random_split\n'), ((5361, 5391), 'torch.from_numpy', 'torch.from_numpy', (['inputs_array'], {}), '(inputs_array)\n', (5377, 5391), False, 'import torch\n'), ((5422, 5453), 'torch.from_numpy', 'torch.from_numpy', (['targets_array'], {}), '(targets_array)\n', (5438, 5453), False, 'import torch\n'), ((6661, 6695), 'torch.nn.Linear', 'nn.Linear', (['input_size', 'output_size'], {}), '(input_size, output_size)\n', (6670, 6695), True, 'import torch.nn as nn\n'), ((7078, 7101), 'torch.nn.functional.l1_loss', 'F.l1_loss', (['out', 'targets'], {}), '(out, targets)\n', (7087, 7101), True, 'import torch.nn.functional as F\n'), ((7331, 7354), 'torch.nn.functional.l1_loss', 'F.l1_loss', (['out', 'targets'], {}), '(out, targets)\n', (7340, 7354), True, 'import torch.nn.functional as F\n'), ((9595, 9623), 'itertools.count', 'itertools.count', (['start', 'step'], {}), '(start, step)\n', (9610, 9623), False, 'import itertools\n'), ((7569, 7594), 'torch.stack', 'torch.stack', (['batch_losses'], {}), '(batch_losses)\n', (7580, 7594), False, 'import torch\n')] |
# -*- encoding: utf-8 -*-
import numpy as np
import copy
from scipy.ndimage.filters import gaussian_filter
import cv2
def im_to_double(im):
"""
"""
min_val = np.min(im.ravel())
max_val = np.max(im.ravel())
return (im.astype('float') - min_val) / (max_val - min_val)
def im_to_int(im):
"""
"""
return (im.astype('int') * 255)
def ridge_segmentation(normalised_im, blksize, thresh):
"""
"""
rows, cols = normalised_im.shape
segmented_im = np.zeros((rows, cols))
rows_block = int((rows / blksize) * blksize)
cols_block = int((cols / blksize) * blksize)
for i in range(0, rows_block, blksize):
for j in range(0, cols_block, blksize):
if (normalised_im[i:i + blksize,
j: j + blksize].var() >= thresh):
segmented_im[i:i + blksize, j:j + blksize] = 1
im1 = normalised_im - np.mean(normalised_im[np.where(segmented_im > 0)])
stdh = np.std(im1[np.where(segmented_im > 0)])
normalised_im = im1 / stdh
return normalised_im, segmented_im
def ridge_orientation(im, orient_smooth_sigma):
"""
"""
sobelx = cv2.Sobel(im, cv2.CV_64F, 1, 0, ksize=3)
sobely = cv2.Sobel(im, cv2.CV_64F, 0, 1, ksize=3)
orient = np.arctan2((sobely), (sobelx))
Ox = np.cos(2 * orient)
Oy = np.sin(2 * orient)
sin2theta = gaussian_filter(Oy, orient_smooth_sigma, 0)
cos2theta = gaussian_filter(Ox, orient_smooth_sigma, 0)
return (np.arctan2(sin2theta, cos2theta) / 2)
| [
"scipy.ndimage.filters.gaussian_filter",
"numpy.where",
"numpy.zeros",
"numpy.arctan2",
"numpy.cos",
"numpy.sin",
"cv2.Sobel"
] | [((496, 518), 'numpy.zeros', 'np.zeros', (['(rows, cols)'], {}), '((rows, cols))\n', (504, 518), True, 'import numpy as np\n'), ((1162, 1202), 'cv2.Sobel', 'cv2.Sobel', (['im', 'cv2.CV_64F', '(1)', '(0)'], {'ksize': '(3)'}), '(im, cv2.CV_64F, 1, 0, ksize=3)\n', (1171, 1202), False, 'import cv2\n'), ((1216, 1256), 'cv2.Sobel', 'cv2.Sobel', (['im', 'cv2.CV_64F', '(0)', '(1)'], {'ksize': '(3)'}), '(im, cv2.CV_64F, 0, 1, ksize=3)\n', (1225, 1256), False, 'import cv2\n'), ((1275, 1301), 'numpy.arctan2', 'np.arctan2', (['sobely', 'sobelx'], {}), '(sobely, sobelx)\n', (1285, 1301), True, 'import numpy as np\n'), ((1320, 1338), 'numpy.cos', 'np.cos', (['(2 * orient)'], {}), '(2 * orient)\n', (1326, 1338), True, 'import numpy as np\n'), ((1348, 1366), 'numpy.sin', 'np.sin', (['(2 * orient)'], {}), '(2 * orient)\n', (1354, 1366), True, 'import numpy as np\n'), ((1388, 1431), 'scipy.ndimage.filters.gaussian_filter', 'gaussian_filter', (['Oy', 'orient_smooth_sigma', '(0)'], {}), '(Oy, orient_smooth_sigma, 0)\n', (1403, 1431), False, 'from scipy.ndimage.filters import gaussian_filter\n'), ((1448, 1491), 'scipy.ndimage.filters.gaussian_filter', 'gaussian_filter', (['Ox', 'orient_smooth_sigma', '(0)'], {}), '(Ox, orient_smooth_sigma, 0)\n', (1463, 1491), False, 'from scipy.ndimage.filters import gaussian_filter\n'), ((1509, 1541), 'numpy.arctan2', 'np.arctan2', (['sin2theta', 'cos2theta'], {}), '(sin2theta, cos2theta)\n', (1519, 1541), True, 'import numpy as np\n'), ((982, 1008), 'numpy.where', 'np.where', (['(segmented_im > 0)'], {}), '(segmented_im > 0)\n', (990, 1008), True, 'import numpy as np\n'), ((931, 957), 'numpy.where', 'np.where', (['(segmented_im > 0)'], {}), '(segmented_im > 0)\n', (939, 957), True, 'import numpy as np\n')] |
import os
from django.http import JsonResponse
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from rest_framework import permissions
from rest_framework.parsers import JSONParser
from rest_framework.decorators import api_view, permission_classes
from rest_framework.response import Response
from fedex_wrapper import tracking as fedex_tracking
from estafeta_wrapper import tracking as estafeta_tracking
from tosp_auth.permissions import IsAuthenticatedWithSecret
from .models import Shipment, PostalCode
from .serializers import AddressSerializer, ShipmentSerializer, \
InitialShipmentSerializer, FillInformationSerializer, \
ExternalShipmentSerializer, PostalCodeSerializer
@api_view(['POST'])
@permission_classes((permissions.IsAuthenticated, ))
def create_address(request):
"""
List all code snippets, or create a new snippet.
"""
if request.method == 'POST':
data = JSONParser().parse(request)
serializer = AddressSerializer(data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse({'Message': 'Address created successfully'}, status=201)
return JsonResponse(serializer.errors, status=400)
@api_view(['POST'])
@permission_classes((permissions.IsAuthenticated, ))
def create_waybill(request):
"""
Allows you to create an entirely new waybill
"""
if request.method == 'POST':
data = JSONParser().parse(request)
serializer = ShipmentSerializer(data=data, context={'request': request})
if serializer.is_valid():
waybill = serializer.save()
if waybill.create_waybill():
return JsonResponse({'carrier': waybill.carrier.name,
'tracking_number': waybill.tracking_number,
'waybill_link': waybill.waybill_link},
status=201)
else:
return JsonResponse({'error': 'Impossible to generate waybill'},
status=500)
return JsonResponse(serializer.errors, status=400)
@api_view(['POST'])
@permission_classes((IsAuthenticatedWithSecret, ))
def create_shipment_token(request):
"""
Gets the minimum information required for
creating a new shipment and returns its
uuid.
"""
if request.method == 'POST':
data = JSONParser().parse(request)
serializer = InitialShipmentSerializer(data=data,
context={'request': request})
if serializer.is_valid():
shipment = serializer.save()
return JsonResponse({'shipment_token': shipment.unique_id},
status=201)
return JsonResponse(serializer.errors, status=400)
@api_view(['POST'])
@permission_classes((permissions.IsAuthenticated, ))
def fill_shipment_information(request, shipment_token):
"""
Fills the rest of the missing information for a package
"""
if request.method == 'POST':
shipment_data = JSONParser().parse(request)
shipment = Shipment.objects.filter(unique_id=shipment_token).get()
serializer = FillInformationSerializer(instance=shipment, data=shipment_data)
if serializer.is_valid():
shipment = serializer.save()
return JsonResponse({'confirmation_token': shipment.unique_id},
status=201)
return JsonResponse(serializer.errors, status=400)
@api_view(['POST'])
@permission_classes((IsAuthenticatedWithSecret, ))
def generate_waybill(request, shipment_token):
"""
Generates the waybill for a pedido.
"""
if request.method == 'POST':
shipment = Shipment.objects.filter(unique_id=shipment_token).get()
if shipment.waybill_link or shipment.create_waybill():
return JsonResponse({'carrier': shipment.carrier.name,
'tracking_number': shipment.tracking_number,
'waybill_link': shipment.waybill_link}, status=201)
return JsonResponse({'error': 'Impossible to generate waybill'}, status=500)
@api_view(['GET'])
@permission_classes((permissions.IsAuthenticated, ))
def track_shipment(request):
if request.method == 'GET':
data = request.query_params
carrier = data['carrier']
tracking_number = data['tracking_number']
if os.environ['DJANGO_SETTINGS_MODULE'] == 'iupick.settings.production':
production = True
else:
production = False
if carrier == 'Fedex':
track_info = fedex_tracking.track(
production=production,
key=os.environ['FEDEX_KEY'],
password=os.environ['FEDEX_PASSWORD'],
account_number=os.environ['FEDEX_ACCOUNT_NUMBER'],
meter_number=os.environ['FEDEX_METER_NUMBER'],
tracking_number=tracking_number
)
status_detail = track_info.CompletedTrackDetails[0].TrackDetails[0].StatusDetail
street_line_dict = dict(enumerate(status_detail.Location.StreetLines))
response = {'description': status_detail.Description,
'address': {'line_one': street_line_dict.get(0, None),
'line_two': street_line_dict.get(1, None),
'city': status_detail.Location.City,
'state_code': status_detail.Location.StateOrProvinceCode,
'postal_code': status_detail.Location.PostalCode,
'country_name': status_detail.Location.CountryName}}
return JsonResponse(response, status=200)
if carrier == 'Estafeta':
track_info = estafeta_tracking.track(
production=production,
login=os.environ['ESTAFETA_TRACK_LOGIN'],
password=os.environ['<PASSWORD>PASSWORD'],
subscriber_id=os.environ['ESTAFETA_TRACK_SUBSCRIBER'],
waybill=tracking_number
)
history_last_event = track_info.trackingData.TrackingData[0].history.History[-1]
response = {'status': track_info.trackingData.TrackingData[0].statusSPA,
'description': history_last_event.eventDescriptionSPA,
'dateTime': history_last_event.eventDateTime,
'address': history_last_event.eventPlaceName}
return JsonResponse(response, status=200)
return JsonResponse({'Message': 'Datos invalidos'}, status=400)
@api_view(['POST'])
@permission_classes((IsAuthenticatedWithSecret, ))
def confirm_waypoint_shipment(request):
"""
Creates an external shipment associated to the user of the secret token
and the waypoint they are sending..
"""
if request.method == 'POST':
data = JSONParser().parse(request)
serializer = ExternalShipmentSerializer(data=data,
context={'request': request})
if serializer.is_valid():
external_shipment = serializer.save()
return JsonResponse({'shipment_token': external_shipment.unique_id},
status=201)
return JsonResponse(serializer.errors, status=400)
@api_view(['GET'])
@permission_classes((permissions.IsAuthenticated, ))
def postal_code_info(request, code):
if request.method == 'GET':
print(code)
postal_code = get_object_or_404(PostalCode, code=code)
serializer = PostalCodeSerializer(postal_code)
return Response(serializer.data)
| [
"rest_framework.decorators.permission_classes",
"django.http.JsonResponse",
"django.shortcuts.get_object_or_404",
"fedex_wrapper.tracking.track",
"rest_framework.response.Response",
"estafeta_wrapper.tracking.track",
"rest_framework.parsers.JSONParser",
"rest_framework.decorators.api_view"
] | [((765, 783), 'rest_framework.decorators.api_view', 'api_view', (["['POST']"], {}), "(['POST'])\n", (773, 783), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((785, 835), 'rest_framework.decorators.permission_classes', 'permission_classes', (['(permissions.IsAuthenticated,)'], {}), '((permissions.IsAuthenticated,))\n', (803, 835), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((1276, 1294), 'rest_framework.decorators.api_view', 'api_view', (["['POST']"], {}), "(['POST'])\n", (1284, 1294), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((1296, 1346), 'rest_framework.decorators.permission_classes', 'permission_classes', (['(permissions.IsAuthenticated,)'], {}), '((permissions.IsAuthenticated,))\n', (1314, 1346), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((2198, 2216), 'rest_framework.decorators.api_view', 'api_view', (["['POST']"], {}), "(['POST'])\n", (2206, 2216), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((2218, 2266), 'rest_framework.decorators.permission_classes', 'permission_classes', (['(IsAuthenticatedWithSecret,)'], {}), '((IsAuthenticatedWithSecret,))\n', (2236, 2266), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((2884, 2902), 'rest_framework.decorators.api_view', 'api_view', (["['POST']"], {}), "(['POST'])\n", (2892, 2902), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((2904, 2954), 'rest_framework.decorators.permission_classes', 'permission_classes', (['(permissions.IsAuthenticated,)'], {}), '((permissions.IsAuthenticated,))\n', (2922, 2954), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((3609, 3627), 'rest_framework.decorators.api_view', 'api_view', (["['POST']"], {}), "(['POST'])\n", (3617, 3627), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((3629, 3677), 'rest_framework.decorators.permission_classes', 'permission_classes', (['(IsAuthenticatedWithSecret,)'], {}), '((IsAuthenticatedWithSecret,))\n', (3647, 3677), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((4271, 4288), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (4279, 4288), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((4290, 4340), 'rest_framework.decorators.permission_classes', 'permission_classes', (['(permissions.IsAuthenticated,)'], {}), '((permissions.IsAuthenticated,))\n', (4308, 4340), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((6779, 6797), 'rest_framework.decorators.api_view', 'api_view', (["['POST']"], {}), "(['POST'])\n", (6787, 6797), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((6799, 6847), 'rest_framework.decorators.permission_classes', 'permission_classes', (['(IsAuthenticatedWithSecret,)'], {}), '((IsAuthenticatedWithSecret,))\n', (6817, 6847), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((7505, 7522), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (7513, 7522), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((7524, 7574), 'rest_framework.decorators.permission_classes', 'permission_classes', (['(permissions.IsAuthenticated,)'], {}), '((permissions.IsAuthenticated,))\n', (7542, 7574), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((1229, 1272), 'django.http.JsonResponse', 'JsonResponse', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (1241, 1272), False, 'from django.http import JsonResponse\n'), ((2151, 2194), 'django.http.JsonResponse', 'JsonResponse', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (2163, 2194), False, 'from django.http import JsonResponse\n'), ((2837, 2880), 'django.http.JsonResponse', 'JsonResponse', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (2849, 2880), False, 'from django.http import JsonResponse\n'), ((3562, 3605), 'django.http.JsonResponse', 'JsonResponse', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (3574, 3605), False, 'from django.http import JsonResponse\n'), ((4198, 4267), 'django.http.JsonResponse', 'JsonResponse', (["{'error': 'Impossible to generate waybill'}"], {'status': '(500)'}), "({'error': 'Impossible to generate waybill'}, status=500)\n", (4210, 4267), False, 'from django.http import JsonResponse\n'), ((6719, 6775), 'django.http.JsonResponse', 'JsonResponse', (["{'Message': 'Datos invalidos'}"], {'status': '(400)'}), "({'Message': 'Datos invalidos'}, status=400)\n", (6731, 6775), False, 'from django.http import JsonResponse\n'), ((7458, 7501), 'django.http.JsonResponse', 'JsonResponse', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (7470, 7501), False, 'from django.http import JsonResponse\n'), ((7687, 7727), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['PostalCode'], {'code': 'code'}), '(PostalCode, code=code)\n', (7704, 7727), False, 'from django.shortcuts import get_object_or_404\n'), ((7798, 7823), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (7806, 7823), False, 'from rest_framework.response import Response\n'), ((1144, 1213), 'django.http.JsonResponse', 'JsonResponse', (["{'Message': 'Address created successfully'}"], {'status': '(201)'}), "({'Message': 'Address created successfully'}, status=201)\n", (1156, 1213), False, 'from django.http import JsonResponse\n'), ((2725, 2789), 'django.http.JsonResponse', 'JsonResponse', (["{'shipment_token': shipment.unique_id}"], {'status': '(201)'}), "({'shipment_token': shipment.unique_id}, status=201)\n", (2737, 2789), False, 'from django.http import JsonResponse\n'), ((3446, 3514), 'django.http.JsonResponse', 'JsonResponse', (["{'confirmation_token': shipment.unique_id}"], {'status': '(201)'}), "({'confirmation_token': shipment.unique_id}, status=201)\n", (3458, 3514), False, 'from django.http import JsonResponse\n'), ((3972, 4121), 'django.http.JsonResponse', 'JsonResponse', (["{'carrier': shipment.carrier.name, 'tracking_number': shipment.\n tracking_number, 'waybill_link': shipment.waybill_link}"], {'status': '(201)'}), "({'carrier': shipment.carrier.name, 'tracking_number': shipment\n .tracking_number, 'waybill_link': shipment.waybill_link}, status=201)\n", (3984, 4121), False, 'from django.http import JsonResponse\n'), ((4737, 4992), 'fedex_wrapper.tracking.track', 'fedex_tracking.track', ([], {'production': 'production', 'key': "os.environ['FEDEX_KEY']", 'password': "os.environ['FEDEX_PASSWORD']", 'account_number': "os.environ['FEDEX_ACCOUNT_NUMBER']", 'meter_number': "os.environ['FEDEX_METER_NUMBER']", 'tracking_number': 'tracking_number'}), "(production=production, key=os.environ['FEDEX_KEY'],\n password=os.environ['FEDEX_PASSWORD'], account_number=os.environ[\n 'FEDEX_ACCOUNT_NUMBER'], meter_number=os.environ['FEDEX_METER_NUMBER'],\n tracking_number=tracking_number)\n", (4757, 4992), True, 'from fedex_wrapper import tracking as fedex_tracking\n'), ((5852, 5886), 'django.http.JsonResponse', 'JsonResponse', (['response'], {'status': '(200)'}), '(response, status=200)\n', (5864, 5886), False, 'from django.http import JsonResponse\n'), ((5946, 6171), 'estafeta_wrapper.tracking.track', 'estafeta_tracking.track', ([], {'production': 'production', 'login': "os.environ['ESTAFETA_TRACK_LOGIN']", 'password': "os.environ['<PASSWORD>PASSWORD']", 'subscriber_id': "os.environ['ESTAFETA_TRACK_SUBSCRIBER']", 'waybill': 'tracking_number'}), "(production=production, login=os.environ[\n 'ESTAFETA_TRACK_LOGIN'], password=os.environ['<PASSWORD>PASSWORD'],\n subscriber_id=os.environ['ESTAFETA_TRACK_SUBSCRIBER'], waybill=\n tracking_number)\n", (5969, 6171), True, 'from estafeta_wrapper import tracking as estafeta_tracking\n'), ((6669, 6703), 'django.http.JsonResponse', 'JsonResponse', (['response'], {'status': '(200)'}), '(response, status=200)\n', (6681, 6703), False, 'from django.http import JsonResponse\n'), ((7337, 7410), 'django.http.JsonResponse', 'JsonResponse', (["{'shipment_token': external_shipment.unique_id}"], {'status': '(201)'}), "({'shipment_token': external_shipment.unique_id}, status=201)\n", (7349, 7410), False, 'from django.http import JsonResponse\n'), ((983, 995), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (993, 995), False, 'from rest_framework.parsers import JSONParser\n'), ((1490, 1502), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (1500, 1502), False, 'from rest_framework.parsers import JSONParser\n'), ((1737, 1883), 'django.http.JsonResponse', 'JsonResponse', (["{'carrier': waybill.carrier.name, 'tracking_number': waybill.\n tracking_number, 'waybill_link': waybill.waybill_link}"], {'status': '(201)'}), "({'carrier': waybill.carrier.name, 'tracking_number': waybill.\n tracking_number, 'waybill_link': waybill.waybill_link}, status=201)\n", (1749, 1883), False, 'from django.http import JsonResponse\n'), ((2030, 2099), 'django.http.JsonResponse', 'JsonResponse', (["{'error': 'Impossible to generate waybill'}"], {'status': '(500)'}), "({'error': 'Impossible to generate waybill'}, status=500)\n", (2042, 2099), False, 'from django.http import JsonResponse\n'), ((2468, 2480), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (2478, 2480), False, 'from rest_framework.parsers import JSONParser\n'), ((3145, 3157), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (3155, 3157), False, 'from rest_framework.parsers import JSONParser\n'), ((7069, 7081), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (7079, 7081), False, 'from rest_framework.parsers import JSONParser\n')] |
import configparser
class AppConf(object):
def __init__(self, config_path='config/init.ini'):
try:
self.conf_file = config_path
config = configparser.ConfigParser()
config.read(self.conf_file)
self.log_level = config['app']['log_level']
self.nuclio_url = config['nuclio']['url']
except Exception as ex:
print(ex)
| [
"configparser.ConfigParser"
] | [((175, 202), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (200, 202), False, 'import configparser\n')] |
import re
from typing import List
from operator import xor
tag_re = re.compile(
r"""^(?!<[xX][mM][lL])
(<\s*([a-zA-Z_][-a-zA-Z_.\d]*)
((\s*[a-zA-Z_][-a-zA-Z_.\d]*=(['\"]).*\5)*)
\s*(/)?>|</\s*([a-zA-Z_][-a-zA-Z_.\d]*)\s*>)""",
re.X)
open_tag_re = re.compile(
r"""^(?!<[xX][mM][lL])
<\s*(?P<name>[a-zA-Z_][-a-zA-Z_.\d]*)
(?P<attrs>(\s*[a-zA-Z_][-a-zA-Z_.\d]*=(['\"]).*\4)*)\s*>""",
re.X)
close_tag_re = re.compile(
r"""^(?!<[xX][mM][lL])</(?P<name>[a-zA-Z_][-a-zA-Z_.\d]*)\s*>""",
re.X)
self_closed_tag_re = re.compile(r"""^(?!<[xX][mM][lL])
<\s*(?P<name>[a-zA-Z_][-a-zA-Z_.\d]*)
(?P<attrs>(\s*[a-zA-Z_][-a-zA-Z_.\d]*=(['\"]).*\4)*)\s*/>""",
re.X)
data_re = re.compile(r'\s*(?P<data>\S[^<>]+)')
attribute_re = re.compile(
r"""[a-zA-Z_][-a-zA-Z_.\d]*=(["']).*\1""")
decl_re = re.compile(
r"""<\?xml\s+
version=['"](?P<ver>\d\.\d)['"]
(?:\s+encoding=['"](?P<enc>[-a-zA-Z\d]+)['"])?
(?:\s+standalone=['"](?P<stand>yes|no)['"])?
\s*\?>""", re.X)
class XmlLexerError(Exception):
pass
def read_xml_file(f) -> str:
s = f.read().replace('\n', '').lstrip().rstrip()
return s
def get_tokens(f) -> List[str]:
tokens: List[str] = []
s = read_xml_file(f)
tmp_p = 0
for p in range(len(s)):
c = s[p]
if c == '<':
pros_data_match = data_re.match(s[tmp_p:p])
if pros_data_match and pros_data_match.group('data'):
tokens.append(pros_data_match.group('data'))
tmp_p = p
elif c == '>':
pros_tag_match = tag_re.match(s[tmp_p:p + 1])
pros_decl_match = decl_re.match(s[tmp_p:p + 1])
if xor(bool(pros_tag_match), bool(pros_decl_match)):
match = pros_tag_match or pros_decl_match
tokens.append(
match.group(0))
else:
raise XmlLexerError(
'Invalid tag: {}'.format(s[tmp_p:p + 1]))
tmp_p = p + 1
return tokens
| [
"re.compile"
] | [((69, 263), 're.compile', 're.compile', (['"""^(?!<[xX][mM][lL])\n (<\\\\s*([a-zA-Z_][-a-zA-Z_.\\\\d]*)\n ((\\\\s*[a-zA-Z_][-a-zA-Z_.\\\\d]*=([\'\\\\"]).*\\\\5)*)\n \\\\s*(/)?>|</\\\\s*([a-zA-Z_][-a-zA-Z_.\\\\d]*)\\\\s*>)"""', 're.X'], {}), '(\n """^(?!<[xX][mM][lL])\n (<\\\\s*([a-zA-Z_][-a-zA-Z_.\\\\d]*)\n ((\\\\s*[a-zA-Z_][-a-zA-Z_.\\\\d]*=([\'\\\\"]).*\\\\5)*)\n \\\\s*(/)?>|</\\\\s*([a-zA-Z_][-a-zA-Z_.\\\\d]*)\\\\s*>)"""\n , re.X)\n', (79, 263), False, 'import re\n'), ((269, 431), 're.compile', 're.compile', (['"""^(?!<[xX][mM][lL])\n <\\\\s*(?P<name>[a-zA-Z_][-a-zA-Z_.\\\\d]*)\n (?P<attrs>(\\\\s*[a-zA-Z_][-a-zA-Z_.\\\\d]*=([\'\\\\"]).*\\\\4)*)\\\\s*>"""', 're.X'], {}), '(\n """^(?!<[xX][mM][lL])\n <\\\\s*(?P<name>[a-zA-Z_][-a-zA-Z_.\\\\d]*)\n (?P<attrs>(\\\\s*[a-zA-Z_][-a-zA-Z_.\\\\d]*=([\'\\\\"]).*\\\\4)*)\\\\s*>"""\n , re.X)\n', (279, 431), False, 'import re\n'), ((441, 520), 're.compile', 're.compile', (['"""^(?!<[xX][mM][lL])</(?P<name>[a-zA-Z_][-a-zA-Z_.\\\\d]*)\\\\s*>"""', 're.X'], {}), "('^(?!<[xX][mM][lL])</(?P<name>[a-zA-Z_][-a-zA-Z_.\\\\d]*)\\\\s*>', re.X)\n", (451, 520), False, 'import re\n'), ((555, 710), 're.compile', 're.compile', (['"""^(?!<[xX][mM][lL])\n<\\\\s*(?P<name>[a-zA-Z_][-a-zA-Z_.\\\\d]*)\n(?P<attrs>(\\\\s*[a-zA-Z_][-a-zA-Z_.\\\\d]*=([\'\\\\"]).*\\\\4)*)\\\\s*/>"""', 're.X'], {}), '(\n """^(?!<[xX][mM][lL])\n<\\\\s*(?P<name>[a-zA-Z_][-a-zA-Z_.\\\\d]*)\n(?P<attrs>(\\\\s*[a-zA-Z_][-a-zA-Z_.\\\\d]*=([\'\\\\"]).*\\\\4)*)\\\\s*/>"""\n , re.X)\n', (565, 710), False, 'import re\n'), ((738, 775), 're.compile', 're.compile', (['"""\\\\s*(?P<data>\\\\S[^<>]+)"""'], {}), "('\\\\s*(?P<data>\\\\S[^<>]+)')\n", (748, 775), False, 'import re\n'), ((791, 842), 're.compile', 're.compile', (['"""[a-zA-Z_][-a-zA-Z_.\\\\d]*=(["\']).*\\\\1"""'], {}), '(\'[a-zA-Z_][-a-zA-Z_.\\\\d]*=(["\\\']).*\\\\1\')\n', (801, 842), False, 'import re\n'), ((861, 1061), 're.compile', 're.compile', (['"""<\\\\?xml\\\\s+\n version=[\'"](?P<ver>\\\\d\\\\.\\\\d)[\'"]\n (?:\\\\s+encoding=[\'"](?P<enc>[-a-zA-Z\\\\d]+)[\'"])?\n (?:\\\\s+standalone=[\'"](?P<stand>yes|no)[\'"])?\n \\\\s*\\\\?>"""', 're.X'], {}), '(\n """<\\\\?xml\\\\s+\n version=[\'"](?P<ver>\\\\d\\\\.\\\\d)[\'"]\n (?:\\\\s+encoding=[\'"](?P<enc>[-a-zA-Z\\\\d]+)[\'"])?\n (?:\\\\s+standalone=[\'"](?P<stand>yes|no)[\'"])?\n \\\\s*\\\\?>"""\n , re.X)\n', (871, 1061), False, 'import re\n')] |
import cv2
import zmq
import json
import base64
import numpy as np
context = zmq.Context()
footage_socket = context.socket(zmq.SUB)
try:
with open('_config.json') as config_file:
cfg = json.load(config_file)
footage_socket.bind('tcp://{ip}:{port}'.format(
ip=str(cfg['ip']),
port=str(cfg['port'])
))
except:
ip = input('IP your camera [* = localhost]: ')
port = input('Port your camera [default: 5555]: ')
footage_socket.bind('tcp://{ip}:{port}'.format(
ip=str(ip),
port=str(port)
))
finally:
footage_socket.setsockopt_string(zmq.SUBSCRIBE, np.unicode(''))
while True:
try:
frame = footage_socket.recv_string()
img = base64.b64decode(frame)
npimg = np.fromstring(img, dtype=np.uint8)
source = cv2.imdecode(npimg, 1)
cv2.imshow("Stream", source)
cv2.waitKey(1)
except KeyboardInterrupt:
cv2.destroyAllWindows()
break
| [
"base64.b64decode",
"numpy.unicode",
"cv2.imshow",
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.imdecode",
"json.load",
"numpy.fromstring",
"zmq.Context"
] | [((78, 91), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (89, 91), False, 'import zmq\n'), ((200, 222), 'json.load', 'json.load', (['config_file'], {}), '(config_file)\n', (209, 222), False, 'import json\n'), ((622, 636), 'numpy.unicode', 'np.unicode', (['""""""'], {}), "('')\n", (632, 636), True, 'import numpy as np\n'), ((736, 759), 'base64.b64decode', 'base64.b64decode', (['frame'], {}), '(frame)\n', (752, 759), False, 'import base64\n'), ((780, 814), 'numpy.fromstring', 'np.fromstring', (['img'], {'dtype': 'np.uint8'}), '(img, dtype=np.uint8)\n', (793, 814), True, 'import numpy as np\n'), ((837, 859), 'cv2.imdecode', 'cv2.imdecode', (['npimg', '(1)'], {}), '(npimg, 1)\n', (849, 859), False, 'import cv2\n'), ((873, 901), 'cv2.imshow', 'cv2.imshow', (['"""Stream"""', 'source'], {}), "('Stream', source)\n", (883, 901), False, 'import cv2\n'), ((914, 928), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (925, 928), False, 'import cv2\n'), ((976, 999), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (997, 999), False, 'import cv2\n')] |
import numpy as np
import numba
import math
def dtft(x, omegas):
"""
Exact evaluation the DTFT at the indicated points omega for the signal x
Note this is incredibly slow
Note x runs from 0 to N-1
"""
N = len(x)
ns = np.arange(N)
W = np.zeros((len(omegas), N), dtype=np.complex128)
for wi, w in enumerate(omegas):
W[wi, :] = np.exp(-1.0j * w * ns)
return np.dot(W, x)
@numba.jit(nopython=True)
def nextpow2(n):
"""
Return the smallest power of two greater than or equal to n.
"""
return int(math.ceil(math.log(n)/math.log(2)))
# now try ourselves a chirp-z transform
@numba.jit
def chirpz(x, M, A, W):
"""
chirp z transform per Rabiner derivation pp1256
x is our (complex) signal of length N
"""
N = len(x)
L = 2**(nextpow2(N + M -1)) # or nearest power of two
yn = np.zeros(L, dtype=np.complex128)
for n in range(N):
yn_scale = A**(-n) * W**((n**2.0)/2.0)
yn[n] = x[n] * yn_scale
Yr = np.fft.fft(yn)
vn = np.zeros(L, dtype=np.complex128)
for n in range(M):
vn[n] = W**((-n**2.0)/2.0)
for n in range(L-N+1, L):
vn[n] = W**(-((L-n)**2.0)/2.0)
Vr = np.fft.fft(vn)
Gr = Yr * Vr
gk = np.fft.ifft(Gr)
#gk = np.convolve(yn, vn)
Xk = np.zeros(M, dtype=np.complex128)
for k in range(M):
g_scale = W**((k**2.0)/2.0)
Xk[k] = g_scale * gk[k]
return Xk
@numba.jit
def chirpz2d(x, M, A, W):
N = len(x)
x = x.T
out = np.zeros((N, M), dtype=np.complex128)
for i in range(N):
out[i] = chirpz(x[i], M, A, W)
out2d = np.zeros((M, M), dtype=np.complex128)
for i in range(M):
out2d[i] = chirpz(out[:, i], M, A, W)
return out2d
@numba.jit
def fchirpz2d(x, M, A, W):
"""
chirp z transform per Rabiner derivation pp1256
x is our (complex) signal of length N
assume x is square, output M will be square, dims are the same on all sides
"""
N = len(x)
L = 2**(nextpow2(N + M -1)) # or nearest power of two
yn = np.zeros((L, L), dtype=np.complex128)
ns = np.arange(N)
ms = np.arange(M)
yn_scale = A**(-ns) * W**((ns**2.0)/2.0)
a = np.outer(yn_scale, yn_scale)
yn[:N, :N] = x * np.outer(yn_scale, yn_scale)
Yr = np.fft.fft2(yn)
vn = np.zeros(L, dtype=np.complex128)
for n in range(M):
vn[n] = W**((-n**2.0)/2.0)
for n in range(L-N+1, L):
vn[n] = W**(-((L-n)**2.0)/2.0)
Vr = np.fft.fft2(np.outer(vn, vn))
Gr = Yr * Vr
gk = np.fft.ifft2(Gr)
Xk = W**((ms**2.0)/2.0)
return gk[:M, :M] * np.outer(Xk, Xk)
def zoom_fft(x, theta_start, step_size, M):
"""
"zoomed" version of the fft, produces M step_sized samples
around the unit circle starting at theta_start
"""
A = np.exp(1j * theta_start)
W = np.exp(-1j * step_size)
return chirpz(x, M, A, W)
def zoom_fft2(x, theta_start, step_size, M):
"""
"zoomed" version of the fft2, produces M step_sized samples
around the unit circle starting at theta_start
"""
A = np.exp(1j * theta_start)
W = np.exp(-1j * step_size)
return fchirpz2d(x, M, A, W)
| [
"numpy.fft.ifft2",
"numpy.fft.fft",
"numpy.fft.fft2",
"numpy.exp",
"math.log",
"numpy.dot",
"numba.jit",
"numpy.zeros",
"numpy.outer",
"numpy.fft.ifft",
"numpy.arange"
] | [((434, 458), 'numba.jit', 'numba.jit', ([], {'nopython': '(True)'}), '(nopython=True)\n', (443, 458), False, 'import numba\n'), ((252, 264), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (261, 264), True, 'import numpy as np\n'), ((419, 431), 'numpy.dot', 'np.dot', (['W', 'x'], {}), '(W, x)\n', (425, 431), True, 'import numpy as np\n'), ((888, 920), 'numpy.zeros', 'np.zeros', (['L'], {'dtype': 'np.complex128'}), '(L, dtype=np.complex128)\n', (896, 920), True, 'import numpy as np\n'), ((1033, 1047), 'numpy.fft.fft', 'np.fft.fft', (['yn'], {}), '(yn)\n', (1043, 1047), True, 'import numpy as np\n'), ((1062, 1094), 'numpy.zeros', 'np.zeros', (['L'], {'dtype': 'np.complex128'}), '(L, dtype=np.complex128)\n', (1070, 1094), True, 'import numpy as np\n'), ((1249, 1263), 'numpy.fft.fft', 'np.fft.fft', (['vn'], {}), '(vn)\n', (1259, 1263), True, 'import numpy as np\n'), ((1300, 1315), 'numpy.fft.ifft', 'np.fft.ifft', (['Gr'], {}), '(Gr)\n', (1311, 1315), True, 'import numpy as np\n'), ((1360, 1392), 'numpy.zeros', 'np.zeros', (['M'], {'dtype': 'np.complex128'}), '(M, dtype=np.complex128)\n', (1368, 1392), True, 'import numpy as np\n'), ((1583, 1620), 'numpy.zeros', 'np.zeros', (['(N, M)'], {'dtype': 'np.complex128'}), '((N, M), dtype=np.complex128)\n', (1591, 1620), True, 'import numpy as np\n'), ((1695, 1732), 'numpy.zeros', 'np.zeros', (['(M, M)'], {'dtype': 'np.complex128'}), '((M, M), dtype=np.complex128)\n', (1703, 1732), True, 'import numpy as np\n'), ((2142, 2179), 'numpy.zeros', 'np.zeros', (['(L, L)'], {'dtype': 'np.complex128'}), '((L, L), dtype=np.complex128)\n', (2150, 2179), True, 'import numpy as np\n'), ((2189, 2201), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (2198, 2201), True, 'import numpy as np\n'), ((2211, 2223), 'numpy.arange', 'np.arange', (['M'], {}), '(M)\n', (2220, 2223), True, 'import numpy as np\n'), ((2288, 2316), 'numpy.outer', 'np.outer', (['yn_scale', 'yn_scale'], {}), '(yn_scale, yn_scale)\n', (2296, 2316), True, 'import numpy as np\n'), ((2378, 2393), 'numpy.fft.fft2', 'np.fft.fft2', (['yn'], {}), '(yn)\n', (2389, 2393), True, 'import numpy as np\n'), ((2408, 2440), 'numpy.zeros', 'np.zeros', (['L'], {'dtype': 'np.complex128'}), '(L, dtype=np.complex128)\n', (2416, 2440), True, 'import numpy as np\n'), ((2661, 2677), 'numpy.fft.ifft2', 'np.fft.ifft2', (['Gr'], {}), '(Gr)\n', (2673, 2677), True, 'import numpy as np\n'), ((2954, 2980), 'numpy.exp', 'np.exp', (['(1.0j * theta_start)'], {}), '(1.0j * theta_start)\n', (2960, 2980), True, 'import numpy as np\n'), ((2987, 3012), 'numpy.exp', 'np.exp', (['(-1.0j * step_size)'], {}), '(-1.0j * step_size)\n', (2993, 3012), True, 'import numpy as np\n'), ((3236, 3262), 'numpy.exp', 'np.exp', (['(1.0j * theta_start)'], {}), '(1.0j * theta_start)\n', (3242, 3262), True, 'import numpy as np\n'), ((3269, 3294), 'numpy.exp', 'np.exp', (['(-1.0j * step_size)'], {}), '(-1.0j * step_size)\n', (3275, 3294), True, 'import numpy as np\n'), ((376, 398), 'numpy.exp', 'np.exp', (['(-1.0j * w * ns)'], {}), '(-1.0j * w * ns)\n', (382, 398), True, 'import numpy as np\n'), ((2339, 2367), 'numpy.outer', 'np.outer', (['yn_scale', 'yn_scale'], {}), '(yn_scale, yn_scale)\n', (2347, 2367), True, 'import numpy as np\n'), ((2607, 2623), 'numpy.outer', 'np.outer', (['vn', 'vn'], {}), '(vn, vn)\n', (2615, 2623), True, 'import numpy as np\n'), ((2749, 2765), 'numpy.outer', 'np.outer', (['Xk', 'Xk'], {}), '(Xk, Xk)\n', (2757, 2765), True, 'import numpy as np\n'), ((582, 593), 'math.log', 'math.log', (['n'], {}), '(n)\n', (590, 593), False, 'import math\n'), ((594, 605), 'math.log', 'math.log', (['(2)'], {}), '(2)\n', (602, 605), False, 'import math\n')] |
import sys
import pytest
from hypothesis import strategies as st
from hypothesis import given, settings, example
from unicodedata import normalize
# For every (n1, n2, n3) triple, applying n1 then n2 must be the same
# as applying n3.
# Reference: http://unicode.org/reports/tr15/#Design_Goals
compositions = [
('NFC', 'NFC', 'NFC'),
('NFC', 'NFD', 'NFD'),
('NFC', 'NFKC', 'NFKC'),
('NFC', 'NFKD', 'NFKD'),
('NFD', 'NFC', 'NFC'),
('NFD', 'NFD', 'NFD'),
('NFD', 'NFKC', 'NFKC'),
('NFD', 'NFKD', 'NFKD'),
('NFKC', 'NFC', 'NFKC'),
('NFKC', 'NFD', 'NFKD'),
('NFKC', 'NFKC', 'NFKC'),
('NFKC', 'NFKD', 'NFKD'),
('NFKD', 'NFC', 'NFKC'),
('NFKD', 'NFD', 'NFKD'),
('NFKD', 'NFKC', 'NFKC'),
('NFKD', 'NFKD', 'NFKD'),
]
@pytest.mark.parametrize('norm1, norm2, norm3', compositions)
@settings(max_examples=1000)
@example(s=u'---\uafb8\u11a7---') # issue 2289
@given(s=st.text())
def test_composition(s, norm1, norm2, norm3):
assert normalize(norm2, normalize(norm1, s)) == normalize(norm3, s)
@given(st.text(), st.text(), st.text())
def test_find(u, prefix, suffix):
s = prefix + u + suffix
assert 0 <= s.find(u) <= len(prefix)
assert s.find(u, len(prefix), len(s) - len(suffix)) == len(prefix)
@given(st.text(), st.text(), st.text())
def test_index(u, prefix, suffix):
s = prefix + u + suffix
assert 0 <= s.index(u) <= len(prefix)
assert s.index(u, len(prefix), len(s) - len(suffix)) == len(prefix)
@given(st.text(), st.text(), st.text())
def test_rfind(u, prefix, suffix):
s = prefix + u + suffix
assert s.rfind(u) >= len(prefix)
assert s.rfind(u, len(prefix), len(s) - len(suffix)) == len(prefix)
@given(st.text(), st.text(), st.text())
def test_rindex(u, prefix, suffix):
s = prefix + u + suffix
assert s.rindex(u) >= len(prefix)
assert s.rindex(u, len(prefix), len(s) - len(suffix)) == len(prefix)
def adjust_indices(u, start, end):
if end < 0:
end = max(end + len(u), 0)
else:
end = min(end, len(u))
if start < 0:
start = max(start + len(u), 0)
return start, end
@given(st.text(), st.text())
def test_startswith_basic(u, v):
assert u.startswith(v) is (u[:len(v)] == v)
@example(u'x', u'', 1)
@example(u'x', u'', 2)
@given(st.text(), st.text(), st.integers())
def test_startswith_2(u, v, start):
if v or sys.version_info[0] == 2:
expected = u[start:].startswith(v)
else: # CPython leaks implementation details in this case
expected = start <= len(u)
assert u.startswith(v, start) is expected
@example(u'x', u'', 1, 0)
@example(u'xx', u'', -1, 0)
@given(st.text(), st.text(), st.integers(), st.integers())
def test_startswith_3(u, v, start, end):
if v or sys.version_info[0] == 2:
expected = u[start:end].startswith(v)
else: # CPython leaks implementation details in this case
start0, end0 = adjust_indices(u, start, end)
expected = start0 <= len(u) and start0 <= end0
assert u.startswith(v, start, end) is expected
@given(st.text(), st.text())
def test_endswith_basic(u, v):
if len(v) > len(u):
assert u.endswith(v) is False
else:
assert u.endswith(v) is (u[len(u) - len(v):] == v)
@example(u'x', u'', 1)
@example(u'x', u'', 2)
@given(st.text(), st.text(), st.integers())
def test_endswith_2(u, v, start):
if v or sys.version_info[0] == 2:
expected = u[start:].endswith(v)
else: # CPython leaks implementation details in this case
expected = start <= len(u)
assert u.endswith(v, start) is expected
@example(u'x', u'', 1, 0)
@example(u'xx', u'', -1, 0)
@given(st.text(), st.text(), st.integers(), st.integers())
def test_endswith_3(u, v, start, end):
if v or sys.version_info[0] == 2:
expected = u[start:end].endswith(v)
else: # CPython leaks implementation details in this case
start0, end0 = adjust_indices(u, start, end)
expected = start0 <= len(u) and start0 <= end0
assert u.endswith(v, start, end) is expected
| [
"hypothesis.strategies.text",
"hypothesis.example",
"hypothesis.strategies.integers",
"pytest.mark.parametrize",
"hypothesis.settings",
"unicodedata.normalize"
] | [((777, 837), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""norm1, norm2, norm3"""', 'compositions'], {}), "('norm1, norm2, norm3', compositions)\n", (800, 837), False, 'import pytest\n'), ((839, 866), 'hypothesis.settings', 'settings', ([], {'max_examples': '(1000)'}), '(max_examples=1000)\n', (847, 866), False, 'from hypothesis import given, settings, example\n'), ((868, 890), 'hypothesis.example', 'example', ([], {'s': 'u"""---꾸ᆧ---"""'}), "(s=u'---꾸ᆧ---')\n", (875, 890), False, 'from hypothesis import given, settings, example\n'), ((2235, 2256), 'hypothesis.example', 'example', (['u"""x"""', 'u""""""', '(1)'], {}), "(u'x', u'', 1)\n", (2242, 2256), False, 'from hypothesis import given, settings, example\n'), ((2258, 2279), 'hypothesis.example', 'example', (['u"""x"""', 'u""""""', '(2)'], {}), "(u'x', u'', 2)\n", (2265, 2279), False, 'from hypothesis import given, settings, example\n'), ((2587, 2611), 'hypothesis.example', 'example', (['u"""x"""', 'u""""""', '(1)', '(0)'], {}), "(u'x', u'', 1, 0)\n", (2594, 2611), False, 'from hypothesis import given, settings, example\n'), ((2613, 2639), 'hypothesis.example', 'example', (['u"""xx"""', 'u""""""', '(-1)', '(0)'], {}), "(u'xx', u'', -1, 0)\n", (2620, 2639), False, 'from hypothesis import given, settings, example\n'), ((3240, 3261), 'hypothesis.example', 'example', (['u"""x"""', 'u""""""', '(1)'], {}), "(u'x', u'', 1)\n", (3247, 3261), False, 'from hypothesis import given, settings, example\n'), ((3263, 3284), 'hypothesis.example', 'example', (['u"""x"""', 'u""""""', '(2)'], {}), "(u'x', u'', 2)\n", (3270, 3284), False, 'from hypothesis import given, settings, example\n'), ((3586, 3610), 'hypothesis.example', 'example', (['u"""x"""', 'u""""""', '(1)', '(0)'], {}), "(u'x', u'', 1, 0)\n", (3593, 3610), False, 'from hypothesis import given, settings, example\n'), ((3612, 3638), 'hypothesis.example', 'example', (['u"""xx"""', 'u""""""', '(-1)', '(0)'], {}), "(u'xx', u'', -1, 0)\n", (3619, 3638), False, 'from hypothesis import given, settings, example\n'), ((1061, 1070), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (1068, 1070), True, 'from hypothesis import strategies as st\n'), ((1072, 1081), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (1079, 1081), True, 'from hypothesis import strategies as st\n'), ((1083, 1092), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (1090, 1092), True, 'from hypothesis import strategies as st\n'), ((1276, 1285), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (1283, 1285), True, 'from hypothesis import strategies as st\n'), ((1287, 1296), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (1294, 1296), True, 'from hypothesis import strategies as st\n'), ((1298, 1307), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (1305, 1307), True, 'from hypothesis import strategies as st\n'), ((1494, 1503), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (1501, 1503), True, 'from hypothesis import strategies as st\n'), ((1505, 1514), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (1512, 1514), True, 'from hypothesis import strategies as st\n'), ((1516, 1525), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (1523, 1525), True, 'from hypothesis import strategies as st\n'), ((1707, 1716), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (1714, 1716), True, 'from hypothesis import strategies as st\n'), ((1718, 1727), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (1725, 1727), True, 'from hypothesis import strategies as st\n'), ((1729, 1738), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (1736, 1738), True, 'from hypothesis import strategies as st\n'), ((2130, 2139), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (2137, 2139), True, 'from hypothesis import strategies as st\n'), ((2141, 2150), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (2148, 2150), True, 'from hypothesis import strategies as st\n'), ((2287, 2296), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (2294, 2296), True, 'from hypothesis import strategies as st\n'), ((2298, 2307), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (2305, 2307), True, 'from hypothesis import strategies as st\n'), ((2309, 2322), 'hypothesis.strategies.integers', 'st.integers', ([], {}), '()\n', (2320, 2322), True, 'from hypothesis import strategies as st\n'), ((2647, 2656), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (2654, 2656), True, 'from hypothesis import strategies as st\n'), ((2658, 2667), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (2665, 2667), True, 'from hypothesis import strategies as st\n'), ((2669, 2682), 'hypothesis.strategies.integers', 'st.integers', ([], {}), '()\n', (2680, 2682), True, 'from hypothesis import strategies as st\n'), ((2684, 2697), 'hypothesis.strategies.integers', 'st.integers', ([], {}), '()\n', (2695, 2697), True, 'from hypothesis import strategies as st\n'), ((3054, 3063), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (3061, 3063), True, 'from hypothesis import strategies as st\n'), ((3065, 3074), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (3072, 3074), True, 'from hypothesis import strategies as st\n'), ((3292, 3301), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (3299, 3301), True, 'from hypothesis import strategies as st\n'), ((3303, 3312), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (3310, 3312), True, 'from hypothesis import strategies as st\n'), ((3314, 3327), 'hypothesis.strategies.integers', 'st.integers', ([], {}), '()\n', (3325, 3327), True, 'from hypothesis import strategies as st\n'), ((3646, 3655), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (3653, 3655), True, 'from hypothesis import strategies as st\n'), ((3657, 3666), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (3664, 3666), True, 'from hypothesis import strategies as st\n'), ((3668, 3681), 'hypothesis.strategies.integers', 'st.integers', ([], {}), '()\n', (3679, 3681), True, 'from hypothesis import strategies as st\n'), ((3683, 3696), 'hypothesis.strategies.integers', 'st.integers', ([], {}), '()\n', (3694, 3696), True, 'from hypothesis import strategies as st\n'), ((1033, 1052), 'unicodedata.normalize', 'normalize', (['norm3', 's'], {}), '(norm3, s)\n', (1042, 1052), False, 'from unicodedata import normalize\n'), ((924, 933), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (931, 933), True, 'from hypothesis import strategies as st\n'), ((1009, 1028), 'unicodedata.normalize', 'normalize', (['norm1', 's'], {}), '(norm1, s)\n', (1018, 1028), False, 'from unicodedata import normalize\n')] |
from reader import Reader,TRAIN,TEST,DEV
import matplotlib.pyplot as plt
from preprocess import preprocess
from gensim.models.word2vec import Word2Vec
from gensim.models.phrases import Phrases
from progressbar import AnimatedMarker, Bar, BouncingBar, Counter, ETA, \
AdaptiveETA, FileTransferSpeed, FormatLabel, Percentage, \
ProgressBar, ReverseBar, RotatingMarker, \
SimpleProgress, Timer
from scipy import stats
class Anaysis:
def __init__(self):
reader = Reader()
print('loading data')
self.X_train=reader.getData(TRAIN)
print('train data has been loaded!')
self.X_valid=reader.getData(DEV)
print('valid data has been loaded!')
self.X_test=reader.getData(TEST)
print('test data has been loaded!')
self.c_title=[]
self.c_body=[]
self.bigram=Phrases.load('./data/bigram.dat')
self.trigram=Phrases.load('./data/trigram.dat')
def count(self, ori_q, rel_q):
ori_q[0]=preprocess(ori_q[0],bigram=self.bigram,trigram=self.trigram)
rel_q[0]=preprocess(rel_q[0],bigram=self.bigram,trigram=self.trigram)
ori_q[1]=preprocess(ori_q[1],bigram=self.bigram,trigram=self.trigram)
rel_q[1]=preprocess(rel_q[1],bigram=self.bigram,trigram=self.trigram)
self.c_title.append(len(ori_q[0].split()))
self.c_title.append(len(rel_q[0].split()))
self.c_body.append(len(ori_q[1].split()))
self.c_body.append(len(rel_q[1].split()))
def lenDistribution(self, data):
pbar = ProgressBar(widgets=[Percentage(), Bar(), ETA()], maxval=len(data)).start()
for i in range(len(data)):
samples = data[i]
ori_q_id=samples[0]['ORGQ_ID']
ori_q=samples[1]
for j in range(2,len(samples),2):
rel_q=samples[j+1]
self.count(ori_q,rel_q)
pbar.update(i)
def show(self):
print(stats.scoreatpercentile(self.c_title, 95))
plt.hist(self.c_title)
plt.title("Gaussian Histogram")
plt.xlabel("Value")
plt.ylabel("Frequency")
plt.show()
print(stats.scoreatpercentile(self.c_body, 95))
plt.hist(self.c_body)
plt.title("Gaussian Histogram")
plt.xlabel("Value")
plt.ylabel("Frequency")
plt.show()
a=Anaysis()
a.lenDistribution(a.X_train)
a.lenDistribution(a.X_valid)
a.lenDistribution(a.X_test)
a.show()
| [
"progressbar.Bar",
"matplotlib.pyplot.hist",
"scipy.stats.scoreatpercentile",
"gensim.models.phrases.Phrases.load",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"preprocess.preprocess",
"progressbar.Percentage",
"progressbar.ETA",
"reader.Reader",
"matplotlib.pyplot.title",
"matplotlib.pyplot.show"
] | [((484, 492), 'reader.Reader', 'Reader', ([], {}), '()\n', (490, 492), False, 'from reader import Reader, TRAIN, TEST, DEV\n'), ((849, 882), 'gensim.models.phrases.Phrases.load', 'Phrases.load', (['"""./data/bigram.dat"""'], {}), "('./data/bigram.dat')\n", (861, 882), False, 'from gensim.models.phrases import Phrases\n'), ((904, 938), 'gensim.models.phrases.Phrases.load', 'Phrases.load', (['"""./data/trigram.dat"""'], {}), "('./data/trigram.dat')\n", (916, 938), False, 'from gensim.models.phrases import Phrases\n'), ((992, 1054), 'preprocess.preprocess', 'preprocess', (['ori_q[0]'], {'bigram': 'self.bigram', 'trigram': 'self.trigram'}), '(ori_q[0], bigram=self.bigram, trigram=self.trigram)\n', (1002, 1054), False, 'from preprocess import preprocess\n'), ((1070, 1132), 'preprocess.preprocess', 'preprocess', (['rel_q[0]'], {'bigram': 'self.bigram', 'trigram': 'self.trigram'}), '(rel_q[0], bigram=self.bigram, trigram=self.trigram)\n', (1080, 1132), False, 'from preprocess import preprocess\n'), ((1148, 1210), 'preprocess.preprocess', 'preprocess', (['ori_q[1]'], {'bigram': 'self.bigram', 'trigram': 'self.trigram'}), '(ori_q[1], bigram=self.bigram, trigram=self.trigram)\n', (1158, 1210), False, 'from preprocess import preprocess\n'), ((1226, 1288), 'preprocess.preprocess', 'preprocess', (['rel_q[1]'], {'bigram': 'self.bigram', 'trigram': 'self.trigram'}), '(rel_q[1], bigram=self.bigram, trigram=self.trigram)\n', (1236, 1288), False, 'from preprocess import preprocess\n'), ((1993, 2015), 'matplotlib.pyplot.hist', 'plt.hist', (['self.c_title'], {}), '(self.c_title)\n', (2001, 2015), True, 'import matplotlib.pyplot as plt\n'), ((2024, 2055), 'matplotlib.pyplot.title', 'plt.title', (['"""Gaussian Histogram"""'], {}), "('Gaussian Histogram')\n", (2033, 2055), True, 'import matplotlib.pyplot as plt\n'), ((2064, 2083), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Value"""'], {}), "('Value')\n", (2074, 2083), True, 'import matplotlib.pyplot as plt\n'), ((2092, 2115), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency"""'], {}), "('Frequency')\n", (2102, 2115), True, 'import matplotlib.pyplot as plt\n'), ((2124, 2134), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2132, 2134), True, 'import matplotlib.pyplot as plt\n'), ((2199, 2220), 'matplotlib.pyplot.hist', 'plt.hist', (['self.c_body'], {}), '(self.c_body)\n', (2207, 2220), True, 'import matplotlib.pyplot as plt\n'), ((2229, 2260), 'matplotlib.pyplot.title', 'plt.title', (['"""Gaussian Histogram"""'], {}), "('Gaussian Histogram')\n", (2238, 2260), True, 'import matplotlib.pyplot as plt\n'), ((2269, 2288), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Value"""'], {}), "('Value')\n", (2279, 2288), True, 'import matplotlib.pyplot as plt\n'), ((2297, 2320), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency"""'], {}), "('Frequency')\n", (2307, 2320), True, 'import matplotlib.pyplot as plt\n'), ((2329, 2339), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2337, 2339), True, 'import matplotlib.pyplot as plt\n'), ((1942, 1983), 'scipy.stats.scoreatpercentile', 'stats.scoreatpercentile', (['self.c_title', '(95)'], {}), '(self.c_title, 95)\n', (1965, 1983), False, 'from scipy import stats\n'), ((2149, 2189), 'scipy.stats.scoreatpercentile', 'stats.scoreatpercentile', (['self.c_body', '(95)'], {}), '(self.c_body, 95)\n', (2172, 2189), False, 'from scipy import stats\n'), ((1563, 1575), 'progressbar.Percentage', 'Percentage', ([], {}), '()\n', (1573, 1575), False, 'from progressbar import AnimatedMarker, Bar, BouncingBar, Counter, ETA, AdaptiveETA, FileTransferSpeed, FormatLabel, Percentage, ProgressBar, ReverseBar, RotatingMarker, SimpleProgress, Timer\n'), ((1577, 1582), 'progressbar.Bar', 'Bar', ([], {}), '()\n', (1580, 1582), False, 'from progressbar import AnimatedMarker, Bar, BouncingBar, Counter, ETA, AdaptiveETA, FileTransferSpeed, FormatLabel, Percentage, ProgressBar, ReverseBar, RotatingMarker, SimpleProgress, Timer\n'), ((1584, 1589), 'progressbar.ETA', 'ETA', ([], {}), '()\n', (1587, 1589), False, 'from progressbar import AnimatedMarker, Bar, BouncingBar, Counter, ETA, AdaptiveETA, FileTransferSpeed, FormatLabel, Percentage, ProgressBar, ReverseBar, RotatingMarker, SimpleProgress, Timer\n')] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['RuleGroupNamespaceArgs', 'RuleGroupNamespace']
@pulumi.input_type
class RuleGroupNamespaceArgs:
def __init__(__self__, *,
data: pulumi.Input[str],
workspace_id: pulumi.Input[str],
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a RuleGroupNamespace resource.
:param pulumi.Input[str] data: the rule group namespace data that you want to be applied. See more [in AWS Docs](https://docs.aws.amazon.com/prometheus/latest/userguide/AMP-Ruler.html).
:param pulumi.Input[str] workspace_id: The id of the prometheus workspace the rule group namespace should be linked to
:param pulumi.Input[str] name: The name of the rule group namespace
"""
pulumi.set(__self__, "data", data)
pulumi.set(__self__, "workspace_id", workspace_id)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def data(self) -> pulumi.Input[str]:
"""
the rule group namespace data that you want to be applied. See more [in AWS Docs](https://docs.aws.amazon.com/prometheus/latest/userguide/AMP-Ruler.html).
"""
return pulumi.get(self, "data")
@data.setter
def data(self, value: pulumi.Input[str]):
pulumi.set(self, "data", value)
@property
@pulumi.getter(name="workspaceId")
def workspace_id(self) -> pulumi.Input[str]:
"""
The id of the prometheus workspace the rule group namespace should be linked to
"""
return pulumi.get(self, "workspace_id")
@workspace_id.setter
def workspace_id(self, value: pulumi.Input[str]):
pulumi.set(self, "workspace_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the rule group namespace
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _RuleGroupNamespaceState:
def __init__(__self__, *,
data: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
workspace_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering RuleGroupNamespace resources.
:param pulumi.Input[str] data: the rule group namespace data that you want to be applied. See more [in AWS Docs](https://docs.aws.amazon.com/prometheus/latest/userguide/AMP-Ruler.html).
:param pulumi.Input[str] name: The name of the rule group namespace
:param pulumi.Input[str] workspace_id: The id of the prometheus workspace the rule group namespace should be linked to
"""
if data is not None:
pulumi.set(__self__, "data", data)
if name is not None:
pulumi.set(__self__, "name", name)
if workspace_id is not None:
pulumi.set(__self__, "workspace_id", workspace_id)
@property
@pulumi.getter
def data(self) -> Optional[pulumi.Input[str]]:
"""
the rule group namespace data that you want to be applied. See more [in AWS Docs](https://docs.aws.amazon.com/prometheus/latest/userguide/AMP-Ruler.html).
"""
return pulumi.get(self, "data")
@data.setter
def data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the rule group namespace
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="workspaceId")
def workspace_id(self) -> Optional[pulumi.Input[str]]:
"""
The id of the prometheus workspace the rule group namespace should be linked to
"""
return pulumi.get(self, "workspace_id")
@workspace_id.setter
def workspace_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "workspace_id", value)
class RuleGroupNamespace(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
data: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
workspace_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages an Amazon Managed Service for Prometheus (AMP) Rule Group Namespace
## Example Usage
```python
import pulumi
import pulumi_aws as aws
demo_workspace = aws.amp.Workspace("demoWorkspace")
demo_rule_group_namespace = aws.amp.RuleGroupNamespace("demoRuleGroupNamespace",
workspace_id=demo_workspace.id,
data=\"\"\"groups:
- name: test
rules:
- record: metric:recording_rule
expr: avg(rate(container_cpu_usage_seconds_total[5m]))
\"\"\")
```
## Import
The prometheus rule group namespace can be imported using the arn, e.g.,
```sh
$ pulumi import aws:amp/ruleGroupNamespace:RuleGroupNamespace demo arn:aws:aps:us-west-2:123456789012:rulegroupsnamespace/IDstring/namespace_name
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] data: the rule group namespace data that you want to be applied. See more [in AWS Docs](https://docs.aws.amazon.com/prometheus/latest/userguide/AMP-Ruler.html).
:param pulumi.Input[str] name: The name of the rule group namespace
:param pulumi.Input[str] workspace_id: The id of the prometheus workspace the rule group namespace should be linked to
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RuleGroupNamespaceArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an Amazon Managed Service for Prometheus (AMP) Rule Group Namespace
## Example Usage
```python
import pulumi
import pulumi_aws as aws
demo_workspace = aws.amp.Workspace("demoWorkspace")
demo_rule_group_namespace = aws.amp.RuleGroupNamespace("demoRuleGroupNamespace",
workspace_id=demo_workspace.id,
data=\"\"\"groups:
- name: test
rules:
- record: metric:recording_rule
expr: avg(rate(container_cpu_usage_seconds_total[5m]))
\"\"\")
```
## Import
The prometheus rule group namespace can be imported using the arn, e.g.,
```sh
$ pulumi import aws:amp/ruleGroupNamespace:RuleGroupNamespace demo arn:aws:aps:us-west-2:123456789012:rulegroupsnamespace/IDstring/namespace_name
```
:param str resource_name: The name of the resource.
:param RuleGroupNamespaceArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RuleGroupNamespaceArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
data: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
workspace_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RuleGroupNamespaceArgs.__new__(RuleGroupNamespaceArgs)
if data is None and not opts.urn:
raise TypeError("Missing required property 'data'")
__props__.__dict__["data"] = data
__props__.__dict__["name"] = name
if workspace_id is None and not opts.urn:
raise TypeError("Missing required property 'workspace_id'")
__props__.__dict__["workspace_id"] = workspace_id
super(RuleGroupNamespace, __self__).__init__(
'aws:amp/ruleGroupNamespace:RuleGroupNamespace',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
data: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
workspace_id: Optional[pulumi.Input[str]] = None) -> 'RuleGroupNamespace':
"""
Get an existing RuleGroupNamespace resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] data: the rule group namespace data that you want to be applied. See more [in AWS Docs](https://docs.aws.amazon.com/prometheus/latest/userguide/AMP-Ruler.html).
:param pulumi.Input[str] name: The name of the rule group namespace
:param pulumi.Input[str] workspace_id: The id of the prometheus workspace the rule group namespace should be linked to
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RuleGroupNamespaceState.__new__(_RuleGroupNamespaceState)
__props__.__dict__["data"] = data
__props__.__dict__["name"] = name
__props__.__dict__["workspace_id"] = workspace_id
return RuleGroupNamespace(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def data(self) -> pulumi.Output[str]:
"""
the rule group namespace data that you want to be applied. See more [in AWS Docs](https://docs.aws.amazon.com/prometheus/latest/userguide/AMP-Ruler.html).
"""
return pulumi.get(self, "data")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the rule group namespace
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="workspaceId")
def workspace_id(self) -> pulumi.Output[str]:
"""
The id of the prometheus workspace the rule group namespace should be linked to
"""
return pulumi.get(self, "workspace_id")
| [
"pulumi.getter",
"pulumi.set",
"pulumi.ResourceOptions",
"pulumi.get"
] | [((1726, 1759), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""workspaceId"""'}), "(name='workspaceId')\n", (1739, 1759), False, 'import pulumi\n'), ((4190, 4223), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""workspaceId"""'}), "(name='workspaceId')\n", (4203, 4223), False, 'import pulumi\n'), ((11723, 11756), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""workspaceId"""'}), "(name='workspaceId')\n", (11736, 11756), False, 'import pulumi\n'), ((1130, 1164), 'pulumi.set', 'pulumi.set', (['__self__', '"""data"""', 'data'], {}), "(__self__, 'data', data)\n", (1140, 1164), False, 'import pulumi\n'), ((1173, 1223), 'pulumi.set', 'pulumi.set', (['__self__', '"""workspace_id"""', 'workspace_id'], {}), "(__self__, 'workspace_id', workspace_id)\n", (1183, 1223), False, 'import pulumi\n'), ((1577, 1601), 'pulumi.get', 'pulumi.get', (['self', '"""data"""'], {}), "(self, 'data')\n", (1587, 1601), False, 'import pulumi\n'), ((1674, 1705), 'pulumi.set', 'pulumi.set', (['self', '"""data"""', 'value'], {}), "(self, 'data', value)\n", (1684, 1705), False, 'import pulumi\n'), ((1936, 1968), 'pulumi.get', 'pulumi.get', (['self', '"""workspace_id"""'], {}), "(self, 'workspace_id')\n", (1946, 1968), False, 'import pulumi\n'), ((2057, 2096), 'pulumi.set', 'pulumi.set', (['self', '"""workspace_id"""', 'value'], {}), "(self, 'workspace_id', value)\n", (2067, 2096), False, 'import pulumi\n'), ((2266, 2290), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (2276, 2290), False, 'import pulumi\n'), ((2373, 2404), 'pulumi.set', 'pulumi.set', (['self', '"""name"""', 'value'], {}), "(self, 'name', value)\n", (2383, 2404), False, 'import pulumi\n'), ((3723, 3747), 'pulumi.get', 'pulumi.get', (['self', '"""data"""'], {}), "(self, 'data')\n", (3733, 3747), False, 'import pulumi\n'), ((3830, 3861), 'pulumi.set', 'pulumi.set', (['self', '"""data"""', 'value'], {}), "(self, 'data', value)\n", (3840, 3861), False, 'import pulumi\n'), ((4031, 4055), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (4041, 4055), False, 'import pulumi\n'), ((4138, 4169), 'pulumi.set', 'pulumi.set', (['self', '"""name"""', 'value'], {}), "(self, 'name', value)\n", (4148, 4169), False, 'import pulumi\n'), ((4410, 4442), 'pulumi.get', 'pulumi.get', (['self', '"""workspace_id"""'], {}), "(self, 'workspace_id')\n", (4420, 4442), False, 'import pulumi\n'), ((4541, 4580), 'pulumi.set', 'pulumi.set', (['self', '"""workspace_id"""', 'value'], {}), "(self, 'workspace_id', value)\n", (4551, 4580), False, 'import pulumi\n'), ((11493, 11517), 'pulumi.get', 'pulumi.get', (['self', '"""data"""'], {}), "(self, 'data')\n", (11503, 11517), False, 'import pulumi\n'), ((11678, 11702), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (11688, 11702), False, 'import pulumi\n'), ((11934, 11966), 'pulumi.get', 'pulumi.get', (['self', '"""workspace_id"""'], {}), "(self, 'workspace_id')\n", (11944, 11966), False, 'import pulumi\n'), ((1265, 1299), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (1275, 1299), False, 'import pulumi\n'), ((3225, 3259), 'pulumi.set', 'pulumi.set', (['__self__', '"""data"""', 'data'], {}), "(__self__, 'data', data)\n", (3235, 3259), False, 'import pulumi\n'), ((3301, 3335), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (3311, 3335), False, 'import pulumi\n'), ((3385, 3435), 'pulumi.set', 'pulumi.set', (['__self__', '"""workspace_id"""', 'workspace_id'], {}), "(__self__, 'workspace_id', workspace_id)\n", (3395, 3435), False, 'import pulumi\n'), ((8564, 8588), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {}), '()\n', (8586, 8588), False, 'import pulumi\n'), ((10880, 10909), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'id': 'id'}), '(id=id)\n', (10902, 10909), False, 'import pulumi\n')] |
import numpy as np
import tensorflow as tf
from baselines.common import tf_util as U
def shift_up(x):
return x-np.min(x)
class MMD_Critic(object):
def __init__(self, ob_size, ac_size, expert_data, reward_scale=1):
self.expert_data = expert_data
self.b1 = np.median(self._l2_distance(expert_data))
self.expert_tensor = tf.convert_to_tensor(expert_data, tf.float32)
self.ob = tf.placeholder(tf.float32, shape=[None, expert_data.shape[1]])
self.b2 = None
self.b2_tf = tf.placeholder(tf.float32)
self.reward_scale = reward_scale
ob_tf = tf.placeholder(tf.float32, shape=[None, ob_size])
ac_tf = tf.placeholder(tf.float32, shape=[None, ac_size])
in_tf = tf.concat([ob_tf, ac_tf], axis=1)
reward = self.build_reward_op(in_tf, self.b2_tf)
self.reward_func = U.function([ob_tf, ac_tf, self.b2_tf], reward)
def set_b2(self, obs, acs):
if self.b2 is None:
rl_data = np.concatenate([obs, acs], axis=1)
self.b2 = np.median(self._l2_distance(rl_data, base=self.expert_data))
def _l2_distance(self, data, base=None):
if base is None:
base = data
n = data.shape[0]
a = np.expand_dims(data, axis=1) #nx1xk
b = np.expand_dims(base, axis=0) #1xmxk
l2_dist = np.sum(np.square(a-b), axis=-1)
return l2_dist
def _l2_distance_tf(self, data, base=None):
if base is None:
base = data
n = data.shape[0]
a = tf.expand_dims(data, axis=1) #nx1xk
b = tf.expand_dims(base, axis=0) #1xmxk
l2_dist = tf.reduce_sum(tf.square(a-b), axis=-1)
return l2_dist
def get_reward(self, obs, acs, verbose=False):
if obs.ndim == 1:
return 0 #a shortcut to single reward as shift_up would make it zero anyway
# obs = np.expand_dims(obs, axis=0)
# acs = np.expand_dims(acs, axis=0)
if self.b2 is not None:
reward = self.reward_func(obs, acs, self.b2)
return np.squeeze(shift_up(reward))
else:
return 0
def build_reward_op(self, ob, mmd_b2):
expert_l2 = self._l2_distance_tf(ob, self.expert_tensor)
rl_l2 = self._l2_distance_tf(ob)
expert_exp = tf.exp(-expert_l2 / self.b1) + tf.exp(-expert_l2 / mmd_b2)
rl_exp = tf.exp(-rl_l2 / mmd_b2) + tf.exp(-rl_l2 / self.b1)
reward = tf.reduce_mean(expert_exp, axis=-1) - tf.reduce_mean(rl_exp, axis=-1)
return reward*self.reward_scale | [
"tensorflow.placeholder",
"numpy.square",
"tensorflow.concat",
"tensorflow.exp",
"numpy.expand_dims",
"numpy.min",
"tensorflow.convert_to_tensor",
"tensorflow.expand_dims",
"numpy.concatenate",
"tensorflow.reduce_mean",
"tensorflow.square",
"baselines.common.tf_util.function"
] | [((118, 127), 'numpy.min', 'np.min', (['x'], {}), '(x)\n', (124, 127), True, 'import numpy as np\n'), ((355, 400), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['expert_data', 'tf.float32'], {}), '(expert_data, tf.float32)\n', (375, 400), True, 'import tensorflow as tf\n'), ((419, 481), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '[None, expert_data.shape[1]]'}), '(tf.float32, shape=[None, expert_data.shape[1]])\n', (433, 481), True, 'import tensorflow as tf\n'), ((526, 552), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {}), '(tf.float32)\n', (540, 552), True, 'import tensorflow as tf\n'), ((611, 660), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '[None, ob_size]'}), '(tf.float32, shape=[None, ob_size])\n', (625, 660), True, 'import tensorflow as tf\n'), ((677, 726), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '[None, ac_size]'}), '(tf.float32, shape=[None, ac_size])\n', (691, 726), True, 'import tensorflow as tf\n'), ((743, 776), 'tensorflow.concat', 'tf.concat', (['[ob_tf, ac_tf]'], {'axis': '(1)'}), '([ob_tf, ac_tf], axis=1)\n', (752, 776), True, 'import tensorflow as tf\n'), ((862, 908), 'baselines.common.tf_util.function', 'U.function', (['[ob_tf, ac_tf, self.b2_tf]', 'reward'], {}), '([ob_tf, ac_tf, self.b2_tf], reward)\n', (872, 908), True, 'from baselines.common import tf_util as U\n'), ((1245, 1273), 'numpy.expand_dims', 'np.expand_dims', (['data'], {'axis': '(1)'}), '(data, axis=1)\n', (1259, 1273), True, 'import numpy as np\n'), ((1293, 1321), 'numpy.expand_dims', 'np.expand_dims', (['base'], {'axis': '(0)'}), '(base, axis=0)\n', (1307, 1321), True, 'import numpy as np\n'), ((1538, 1566), 'tensorflow.expand_dims', 'tf.expand_dims', (['data'], {'axis': '(1)'}), '(data, axis=1)\n', (1552, 1566), True, 'import tensorflow as tf\n'), ((1586, 1614), 'tensorflow.expand_dims', 'tf.expand_dims', (['base'], {'axis': '(0)'}), '(base, axis=0)\n', (1600, 1614), True, 'import tensorflow as tf\n'), ((993, 1027), 'numpy.concatenate', 'np.concatenate', (['[obs, acs]'], {'axis': '(1)'}), '([obs, acs], axis=1)\n', (1007, 1027), True, 'import numpy as np\n'), ((1354, 1370), 'numpy.square', 'np.square', (['(a - b)'], {}), '(a - b)\n', (1363, 1370), True, 'import numpy as np\n'), ((1654, 1670), 'tensorflow.square', 'tf.square', (['(a - b)'], {}), '(a - b)\n', (1663, 1670), True, 'import tensorflow as tf\n'), ((2307, 2335), 'tensorflow.exp', 'tf.exp', (['(-expert_l2 / self.b1)'], {}), '(-expert_l2 / self.b1)\n', (2313, 2335), True, 'import tensorflow as tf\n'), ((2338, 2365), 'tensorflow.exp', 'tf.exp', (['(-expert_l2 / mmd_b2)'], {}), '(-expert_l2 / mmd_b2)\n', (2344, 2365), True, 'import tensorflow as tf\n'), ((2383, 2406), 'tensorflow.exp', 'tf.exp', (['(-rl_l2 / mmd_b2)'], {}), '(-rl_l2 / mmd_b2)\n', (2389, 2406), True, 'import tensorflow as tf\n'), ((2409, 2433), 'tensorflow.exp', 'tf.exp', (['(-rl_l2 / self.b1)'], {}), '(-rl_l2 / self.b1)\n', (2415, 2433), True, 'import tensorflow as tf\n'), ((2451, 2486), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['expert_exp'], {'axis': '(-1)'}), '(expert_exp, axis=-1)\n', (2465, 2486), True, 'import tensorflow as tf\n'), ((2489, 2520), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['rl_exp'], {'axis': '(-1)'}), '(rl_exp, axis=-1)\n', (2503, 2520), True, 'import tensorflow as tf\n')] |
from collections import namedtuple
import os
import re
from typing import Iterator
import sqlite3
from tqdm import tqdm
# A 'transaction' that was proposed by c-lightning and that needs saving to the
# backup. `version` is the `data_version` of the database **after** `transaction`
# has been applied. A 'snapshot' represents a complete copy of the database.
# This is used by the plugin from time to time to allow the backend to compress
# the changelog and forms a new basis for the backup.
# If `Change` contains a snapshot and a transaction, they apply in that order.
Change = namedtuple('Change', ['version', 'snapshot', 'transaction'])
class Backend(object):
def __init__(self, destination: str):
"""Read the metadata from the destination and prepare any necessary resources.
After this call the following members must be initialized:
- backend.version: the last data version we wrote to the backend
- backend.prev_version: the previous data version in case we need to
roll back the last one
"""
self.version = None
self.prev_version = None
raise NotImplementedError
def add_change(self, change: Change) -> bool:
"""Add a single change to the backend.
This call should always make sure that the change has been correctly
written and flushed before returning.
"""
raise NotImplementedError
def initialize(self) -> bool:
"""Set up any resources needed by this backend.
"""
raise NotImplementedError
def stream_changes(self) -> Iterator[Change]:
"""Retrieve changes from the backend in order to perform a restore.
"""
raise NotImplementedError
def rewind(self) -> bool:
"""Remove the last change that was added to the backup
Because the transaction is reported to the backup plugin before it is
being committed to the database it can happen that we get notified
about a transaction but then `lightningd` is stopped and the
transaction is not committed. This means the backup includes an
extraneous transaction which needs to be removed. A backend must allow
a single rewind operation, and should fail additional calls to rewind
(we may have at most one pending transaction not being committed at
any time).
"""
raise NotImplementedError
def compact(self):
"""Apply some incremental changes to the snapshot to reduce our size.
"""
raise NotImplementedError
def _db_open(self, dest: str) -> sqlite3.Connection:
db = sqlite3.connect(dest)
db.execute("PRAGMA foreign_keys = 1")
return db
def _restore_snapshot(self, snapshot: bytes, dest: str):
if os.path.exists(dest):
os.unlink(dest)
with open(dest, 'wb') as f:
f.write(snapshot)
self.db = self._db_open(dest)
def _rewrite_stmt(self, stmt: str) -> str:
"""We had a stmt expansion bug in c-lightning, this replicates the fix.
We were expanding statements incorrectly, missing some
whitespace between a param and the `WHERE` keyword. This
re-inserts the space.
"""
stmt = re.sub(r'reserved_til=([0-9]+)WHERE', r'reserved_til=\1 WHERE', stmt)
stmt = re.sub(r'peer_id=([0-9]+)WHERE channels.id=', r'peer_id=\1 WHERE channels.id=', stmt)
return stmt
def _restore_transaction(self, tx: Iterator[str]):
assert(self.db)
cur = self.db.cursor()
for q in tx:
q = self._rewrite_stmt(q)
cur.execute(q)
def restore(self, dest: str, remove_existing: bool = False):
"""Restore the backup in this backend to its former glory.
If `dest` is a directory, we assume the default database filename:
lightningd.sqlite3
"""
if os.path.isdir(dest):
dest = os.path.join(dest, "lightningd.sqlite3")
if os.path.exists(dest):
if not remove_existing:
raise ValueError(
"Destination for backup restore exists: {dest}".format(
dest=dest
)
)
os.unlink(dest)
self.db = self._db_open(dest)
for c in tqdm(self.stream_changes(), total=self.version_count):
if c.snapshot is not None:
self._restore_snapshot(c.snapshot, dest)
if c.transaction is not None:
self._restore_transaction(c.transaction)
self.db.commit()
| [
"os.path.exists",
"collections.namedtuple",
"sqlite3.connect",
"os.path.join",
"os.path.isdir",
"os.unlink",
"re.sub"
] | [((583, 643), 'collections.namedtuple', 'namedtuple', (['"""Change"""', "['version', 'snapshot', 'transaction']"], {}), "('Change', ['version', 'snapshot', 'transaction'])\n", (593, 643), False, 'from collections import namedtuple\n'), ((2646, 2667), 'sqlite3.connect', 'sqlite3.connect', (['dest'], {}), '(dest)\n', (2661, 2667), False, 'import sqlite3\n'), ((2805, 2825), 'os.path.exists', 'os.path.exists', (['dest'], {}), '(dest)\n', (2819, 2825), False, 'import os\n'), ((3274, 3342), 're.sub', 're.sub', (['"""reserved_til=([0-9]+)WHERE"""', '"""reserved_til=\\\\1 WHERE"""', 'stmt'], {}), "('reserved_til=([0-9]+)WHERE', 'reserved_til=\\\\1 WHERE', stmt)\n", (3280, 3342), False, 'import re\n'), ((3359, 3447), 're.sub', 're.sub', (['"""peer_id=([0-9]+)WHERE channels.id="""', '"""peer_id=\\\\1 WHERE channels.id="""', 'stmt'], {}), "('peer_id=([0-9]+)WHERE channels.id=',\n 'peer_id=\\\\1 WHERE channels.id=', stmt)\n", (3365, 3447), False, 'import re\n'), ((3921, 3940), 'os.path.isdir', 'os.path.isdir', (['dest'], {}), '(dest)\n', (3934, 3940), False, 'import os\n'), ((4013, 4033), 'os.path.exists', 'os.path.exists', (['dest'], {}), '(dest)\n', (4027, 4033), False, 'import os\n'), ((2839, 2854), 'os.unlink', 'os.unlink', (['dest'], {}), '(dest)\n', (2848, 2854), False, 'import os\n'), ((3961, 4001), 'os.path.join', 'os.path.join', (['dest', '"""lightningd.sqlite3"""'], {}), "(dest, 'lightningd.sqlite3')\n", (3973, 4001), False, 'import os\n'), ((4267, 4282), 'os.unlink', 'os.unlink', (['dest'], {}), '(dest)\n', (4276, 4282), False, 'import os\n')] |
import pendulum
ics_files = (
'_site/courses/web-dev-1/calendar-feed-010.ics',
'_site/courses/web-dev-1/calendar-feed-020.ics',
'_site/courses/web-dev-1/calendar-feed-030.ics',
'_site/courses/web-dev-1/calendar-feed-040.ics',
'_site/courses/web-dev-2/calendar-feed-010.ics',
'_site/courses/web-dev-2/calendar-feed-020.ics',
'_site/courses/web-dev-2/calendar-feed-030.ics',
'_site/courses/web-dev-2/calendar-feed-040.ics',
'_site/courses/web-dev-3/calendar-feed-010.ics',
'_site/courses/web-dev-3/calendar-feed-020.ics',
'_site/courses/web-dev-3/calendar-feed-030.ics',
'_site/courses/web-dev-4/calendar-feed-010.ics',
'_site/courses/web-dev-4/calendar-feed-020.ics',
'_site/courses/web-dev-4/calendar-feed-030.ics',
'_site/courses/web-dev-5/calendar-feed-010.ics',
'_site/courses/web-dev-5/calendar-feed-020.ics',
'_site/courses/web-dev-6/calendar-feed-010.ics',
'_site/courses/web-dev-6/calendar-feed-020.ics',
)
def fix_line_dt(line):
for pref in ('DTSTART', 'DTEND'):
dt_prefix = f'{pref};TZID=America/Toronto:'
if dt_prefix in line:
dt = line.replace(f'{pref};TZID=America/Toronto:', '').strip()
if dt:
dtstart = pendulum.parse(dt, tz='America/Toronto')
line = dt_prefix + dtstart.format('YYYYMMDD[T]HHmmss')
return line
for ics_file in ics_files:
lines = []
with open(ics_file, mode='r', encoding='utf-8') as file:
for line in file:
line = line.strip()
if line:
lines.append(fix_line_dt(line))
with open(ics_file, mode='w', encoding='utf-8') as file:
file.write('\n'.join(lines) + '\n')
| [
"pendulum.parse"
] | [((1220, 1260), 'pendulum.parse', 'pendulum.parse', (['dt'], {'tz': '"""America/Toronto"""'}), "(dt, tz='America/Toronto')\n", (1234, 1260), False, 'import pendulum\n')] |
#!/usr/bin/python
#
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from google.datacatalog_connectors.commons import utils
class RegionTagHelperTestCase(unittest.TestCase):
def test_extract_tag_content_should_return_correct_content(self):
region_tag_helper = utils.RegionTagHelper()
expected_tag_content = '''
metadata_definition:
- name: 'sp_calculateOrder'
purpose: 'This stored procedure will calculate orders.'
inputs:
- name: 'in1'
type: 'string'
outputs:
- name: 'out1'
type: 'int'
'''.strip()
content_string = \
'[GOOGLE_DATA_CATALOG_METADATA_DEFINITION_START] \n' + \
expected_tag_content + \
'\n[GOOGLE_DATA_CATALOG_METADATA_DEFINITION_END] \n'
extracted_tag_content = region_tag_helper.extract_content(
'GOOGLE_DATA_CATALOG_METADATA_DEFINITION', content_string)
self.assertEqual(extracted_tag_content, expected_tag_content)
def test_extract_multiple_tag_content_should_return_correct_content(self):
region_tag_helper = utils.RegionTagHelper()
expected_tag_content = '''
metadata_definition:
- name: 'sp_calculateOrder'
purpose: 'This stored procedure will calculate orders.'
inputs:
- name: 'in1'
type: 'string'
outputs:
- name: 'out1'
type: 'int'
'''.strip()
tags_with_content = \
'[GOOGLE_DATA_CATALOG_METADATA_DEFINITION_START] \n' + \
expected_tag_content + \
'\n[GOOGLE_DATA_CATALOG_METADATA_DEFINITION_END] \n'
expected_tag_content_2 = '''
metadata_definition:
- name: 'sp_other_cloud'
purpose: 'This stored procedure run in another cloud.'
inputs:
- name: 'in1'
type: 'string'
outputs:
- name: 'out1'
type: 'int'
'''.strip()
tags_with_content_2 = \
'[OTHER_CLOUD_DATA_CATALOG_METADATA_DEFINITION_START] \n' + \
expected_tag_content_2 + \
'\n[OTHER_CLOUD_DATA_CATALOG_METADATA_DEFINITION_END] \n'
content_string = tags_with_content + '\n' + tags_with_content_2
extracted_tag_content = region_tag_helper.extract_content(
'GOOGLE_DATA_CATALOG_METADATA_DEFINITION', content_string)
self.assertEqual(extracted_tag_content, expected_tag_content)
extracted_tag_content_2 = region_tag_helper.extract_content(
'OTHER_CLOUD_DATA_CATALOG_METADATA_DEFINITION', content_string)
self.assertEqual(extracted_tag_content_2, expected_tag_content_2)
def test_extract_repeated_tag_should_return_last_content(self):
region_tag_helper = utils.RegionTagHelper()
expected_tag_content = '''
metadata_definition:
- name: 'sp_calculateOrder'
purpose: 'This stored procedure will calculate orders.'
inputs:
- name: 'in1'
type: 'string'
outputs:
- name: 'out1'
type: 'int'
'''.strip()
tags_with_content = \
'[GOOGLE_DATA_CATALOG_METADATA_DEFINITION_START] \n' + \
expected_tag_content + \
'\n[GOOGLE_DATA_CATALOG_METADATA_DEFINITION_END] \n'
expected_tag_content_2 = '''
metadata_definition:
- name: 'sp_other_cloud'
purpose: 'This stored procedure run in another cloud.'
inputs:
- name: 'in1'
type: 'string'
outputs:
- name: 'out1'
type: 'int'
'''.strip()
tags_with_content_2 = \
'[GOOGLE_DATA_CATALOG_METADATA_DEFINITION_START] \n' + \
expected_tag_content_2 + \
'\n[GOOGLE_DATA_CATALOG_METADATA_DEFINITION_END] \n'
content_string = tags_with_content + '\n' + tags_with_content_2
extracted_tag_content = region_tag_helper.extract_content(
'GOOGLE_DATA_CATALOG_METADATA_DEFINITION', content_string)
self.assertEqual(extracted_tag_content, expected_tag_content_2)
def test_extract_tag_content_no_end_region_tag_should_return_none(self):
region_tag_helper = utils.RegionTagHelper()
expected_tag_content = '''
metadata_definition:
- name: 'sp_calculateOrder'
purpose: 'This stored procedure will calculate orders.'
inputs:
- name: 'in1'
type: 'string'
outputs:
- name: 'out1'
type: 'int'
'''.strip()
content_string = \
'[GOOGLE_DATA_CATALOG_METADATA_DEFINITION_START] \n' + \
expected_tag_content + '\n'
extracted_tag_content = region_tag_helper.extract_content(
'GOOGLE_DATA_CATALOG_METADATA_DEFINITION', content_string)
self.assertIsNone(extracted_tag_content)
def test_extract_tag_content_no_start_region_tag_should_return_none(self):
region_tag_helper = utils.RegionTagHelper()
expected_tag_content = '''
metadata_definition:
- name: 'sp_calculateOrder'
purpose: 'This stored procedure will calculate orders.'
inputs:
- name: 'in1'
type: 'string'
outputs:
- name: 'out1'
type: 'int'
'''.strip()
content_string = expected_tag_content + \
'\n[GOOGLE_DATA_CATALOG_METADATA_DEFINITION_END] \n'
extracted_tag_content = region_tag_helper.extract_content(
'GOOGLE_DATA_CATALOG_METADATA_DEFINITION', content_string)
self.assertIsNone(extracted_tag_content)
def test_extract_tag_content_no_region_tags_should_return_none(self):
region_tag_helper = utils.RegionTagHelper()
content_string = '''
metadata_definition:
- name: 'sp_calculateOrder'
purpose: 'This stored procedure will calculate orders.'
inputs:
- name: 'in1'
type: 'string'
outputs:
- name: 'out1'
type: 'int'
'''.strip()
extracted_tag_content = region_tag_helper.extract_content(
'GOOGLE_DATA_CATALOG_METADATA_DEFINITION', content_string)
self.assertIsNone(extracted_tag_content)
def test_extract_tag_content_invalid_region_tags_should_return_none(self):
region_tag_helper = utils.RegionTagHelper()
expected_tag_content = '''
metadata_definition:
- name: 'sp_calculateOrder'
purpose: 'This stored procedure will calculate orders.'
inputs:
- name: 'in1'
type: 'string'
outputs:
- name: 'out1'
type: 'int'
'''.strip()
content_string = \
'[GOOGLE_DATA_CATALOG_METADATA_DEFINITION_STARX] \n' + \
expected_tag_content + \
'\n[GOOGLE_DATA_CATALOG_METADATA_DEFINITION_ENX] \n'
extracted_tag_content = region_tag_helper.extract_content(
'GOOGLE_DATA_CATALOG_METADATA_DEFINITION', content_string)
self.assertIsNone(extracted_tag_content)
| [
"google.datacatalog_connectors.commons.utils.RegionTagHelper"
] | [((820, 843), 'google.datacatalog_connectors.commons.utils.RegionTagHelper', 'utils.RegionTagHelper', ([], {}), '()\n', (841, 843), False, 'from google.datacatalog_connectors.commons import utils\n'), ((1709, 1732), 'google.datacatalog_connectors.commons.utils.RegionTagHelper', 'utils.RegionTagHelper', ([], {}), '()\n', (1730, 1732), False, 'from google.datacatalog_connectors.commons import utils\n'), ((3446, 3469), 'google.datacatalog_connectors.commons.utils.RegionTagHelper', 'utils.RegionTagHelper', ([], {}), '()\n', (3467, 3469), False, 'from google.datacatalog_connectors.commons import utils\n'), ((4963, 4986), 'google.datacatalog_connectors.commons.utils.RegionTagHelper', 'utils.RegionTagHelper', ([], {}), '()\n', (4984, 4986), False, 'from google.datacatalog_connectors.commons import utils\n'), ((5769, 5792), 'google.datacatalog_connectors.commons.utils.RegionTagHelper', 'utils.RegionTagHelper', ([], {}), '()\n', (5790, 5792), False, 'from google.datacatalog_connectors.commons import utils\n'), ((6549, 6572), 'google.datacatalog_connectors.commons.utils.RegionTagHelper', 'utils.RegionTagHelper', ([], {}), '()\n', (6570, 6572), False, 'from google.datacatalog_connectors.commons import utils\n'), ((7212, 7235), 'google.datacatalog_connectors.commons.utils.RegionTagHelper', 'utils.RegionTagHelper', ([], {}), '()\n', (7233, 7235), False, 'from google.datacatalog_connectors.commons import utils\n')] |
from typing import Callable, List
import numpy as np
from decimal import Decimal
MAX_NEWTON_ITERATIONS = 7
phiType = Callable[[float, np.ndarray, float], np.ndarray]
fType = Callable[[float, np.ndarray], np.ndarray]
jacobianType = List[List[Callable[[float, float, np.ndarray], float]]]
matrixType = np.ndarray
class Function:
def __init__(self, fArray: List[fType]):
self.fArray = fArray
for i in range(len(fArray)):
print(fArray[i](0, np.array([1, 1, 1, -1])))
def __call__(self, time: float, U: np.ndarray) -> np.ndarray:
result = np.empty(len(self.fArray))
for i in range(len(self.fArray)):
result[i] = self.fArray[i](time, U)
return result
def solveEDO(u0: np.ndarray, phi: phiType, interval: np.ndarray, discretization: int):
U = np.empty((discretization + 1, len(u0)))
U[0] = u0
step = (interval[1] - interval[0]) / discretization
for iterations in range(discretization):
U[iterations + 1] = U[iterations] + \
step * phi(interval[0] + step * iterations, U[iterations], step)
return U
def getSolution(exactF: fType, interval: np.ndarray, discretization: int):
X = np.empty((discretization + 1, len(exactF)))
step = (interval[1] - interval[0]) / discretization
for iterations in range(discretization + 1):
for i in range(len(exactF)):
X[iterations, i] = exactF[i](interval[0] + iterations * step)
return X
def generateRK44Phi(f: Function) -> phiType:
def phi(time: float, currentU: np.ndarray, step: float) -> np.ndarray:
kappa1 = f(time, currentU)
kappa2 = f(time + step / 2, currentU + step * kappa1 / 2)
kappa3 = f(time + step / 2, currentU + step * kappa2 / 2)
kappa4 = f(time + step, currentU + step * kappa3)
return (kappa1 + 2 * kappa2 + 2 * kappa3 + kappa4) / 6
return phi
def generateImplicitEulerPhi(f: Function, J: jacobianType) -> phiType:
def phi(time: float, currentU: np.ndarray, step: float) -> np.ndarray:
def generateG(currentU: np.ndarray) -> Function:
def g(time: float, nextU: np.ndarray):
return nextU - step * f(time, nextU) - currentU
return g
def generateJacobian(time: float, nextU: np.ndarray, currentU: np.ndarray) -> matrixType:
jacobian = np.empty((len(J), len(J)))
for i in range(len(J)):
line = np.empty(len(J[i]))
for j in range(len(J[i])):
line[j] = J[i][j](time, step, *nextU)
jacobian[i] = line
return jacobian
def inverseJacobian(time: float, nextU: np.ndarray, currentU: np.ndarray) -> matrixType:
return np.linalg.inv(generateJacobian(time, nextU, currentU))
def newtonIteration(currentNextUAproximation: np.ndarray, previousNextUAproximation: np.ndarray):
g = generateG(previousNextUAproximation)
return currentNextUAproximation - inverseJacobian(time, currentNextUAproximation, previousNextUAproximation) * g(time + step, currentNextUAproximation)
nextU = previousNextUAproximation = currentNextUAproximation = currentU
for dummyIterationCounter in range(MAX_NEWTON_ITERATIONS):
previousNextUAproximation = currentNextUAproximation
currentNextUAproximation = nextU
nextU = newtonIteration(currentNextUAproximation,
previousNextUAproximation)
return nextU
return phi
| [
"numpy.array"
] | [((472, 495), 'numpy.array', 'np.array', (['[1, 1, 1, -1]'], {}), '([1, 1, 1, -1])\n', (480, 495), True, 'import numpy as np\n')] |
import datetime
import pandas as pd
import numpy as np
def make_date_map(df, last_day_column) -> dict:
'''return a dictionary '''
# 'DAY' 1 == 2004-03-23
day1 = datetime.datetime(2004, 3, 23) # as derived in transactions notebook; datetime for 'DAY' == 1
ineedthismany = df[last_day_column].max()
last = day1 + datetime.timedelta(days=int(ineedthismany)- 1)
date_range = pd.date_range(day1, last) # date range for our data
# map datetime index to DAY; enumerate() indexes from 0, so we add 1
date_map = {i+1:x for i, x in enumerate(date_range)}
output = df[last_day_column].map(date_map)
output = pd.to_datetime(output)
return date_map
| [
"datetime.datetime",
"pandas.to_datetime",
"pandas.date_range"
] | [((174, 204), 'datetime.datetime', 'datetime.datetime', (['(2004)', '(3)', '(23)'], {}), '(2004, 3, 23)\n', (191, 204), False, 'import datetime\n'), ((399, 424), 'pandas.date_range', 'pd.date_range', (['day1', 'last'], {}), '(day1, last)\n', (412, 424), True, 'import pandas as pd\n'), ((642, 664), 'pandas.to_datetime', 'pd.to_datetime', (['output'], {}), '(output)\n', (656, 664), True, 'import pandas as pd\n')] |
from django.contrib import admin
from .models import SessionLog
admin.site.register(SessionLog)
| [
"django.contrib.admin.site.register"
] | [((66, 97), 'django.contrib.admin.site.register', 'admin.site.register', (['SessionLog'], {}), '(SessionLog)\n', (85, 97), False, 'from django.contrib import admin\n')] |
import numpy as np
from scipy.special import gamma
import bisect
def vTmv(vec, mat=None, vec2=None):
"""Multiply a vector transpose times a matrix times a vector.
@param vec The first vector (will be transposed).
@param mat The matrix in the middle. Identity by default.
@param vec2 The second vector (will not be transposed.) By default, the same as the vec.
@returns Product. Could be a scalar or a matrix depending on whether vec is a row or column
vector.
"""
if len(vec.shape) == 1:
vec = np.reshape(vec, [vec.shape[0], 1])
if mat is None:
mat = np.eye(len(vec))
if vec2 is None:
vec2 = vec
return np.dot(vec.T, np.dot(mat, vec2))
def gammad(d, nu_over_2):
"""D-dimensional gamma function."""
nu = 2.0 * nu_over_2
return np.pi**(d*(d-1.)/4)*np.multiply.reduce([gamma(0.5*(nu+1-i)) for i in range(d)])
def random_wish(dof, S, size=None):
dim = S.shape[0]
if size is None:
x = np.random.multivariate_normal(np.zeros(dim), S, size=dof)
return np.dot(x.T, x)
else:
if isinstance(size, int):
size = (size,)
out = np.empty(size+(dim, dim), dtype=np.float64)
for ind in np.ndindex(size):
x = np.random.multivariate_normal(np.zeros(dim), S, size=dof)
out[ind] = np.dot(x.T, x)
return out
def random_invwish(dof, invS, size=None):
return np.linalg.inv(random_wish(dof, invS, size=size))
def pick_discrete(p):
"""Pick a discrete integer between 0 and len(p) - 1 with probability given by (normalized) p
array. Note that p array will be normalized here."""
c = np.cumsum(p)
c /= c[-1] # Normalize
u = np.random.uniform()
return bisect.bisect(c, u)
# Modified code from http://stackoverflow.com/questions/9081553/python-scatter-plot-size-and-style-of-the-marker/24567352#24567352
def ellipses(x, y, s, q, pa, c='b', ax=None, vmin=None, vmax=None, **kwargs):
"""Scatter plot of ellipses.
(x, y) duh.
s size.
q minor-to-major axes ratio b/a
pa position angle in deg, CCW from +y.
"""
from matplotlib.patches import Ellipse
from matplotlib.collections import PatchCollection
import matplotlib.pyplot as plt
if ax is None:
ax = plt.gca()
if isinstance(c, basestring):
color = c # ie. use colors.colorConverter.to_rgba_array(c)
else:
color = None # use cmap, norm after collection is created
kwargs.update(color=color)
w, h = s*np.sqrt(q), s/np.sqrt(q)
if np.isscalar(x):
patches = [Ellipse((x, y), w, h, pa), ]
else:
patches = [Ellipse((x_, y_), w_, h_, pa_) for x_, y_, w_, h_, pa_ in zip(x, y, w, h, pa)]
collection = PatchCollection(patches, **kwargs)
if color is None:
collection.set_array(np.asarray(c))
if vmin is not None or vmax is not None:
collection.set_clim(vmin, vmax)
ax.add_collection(collection)
ax.autoscale_view()
return collection
def plot_ellipse(mu, Sig, ax=None, **kwargs):
import matplotlib.pyplot as plt
if ax is None:
ax = plt.gca()
val, vec = np.linalg.eigh(Sig)
# 5.991 gives 95% ellipses
s = np.sqrt(np.sqrt(5.991*val[0]*val[1]))
q = np.sqrt(val[0]/val[1])
pa = np.arctan2(vec[0, 1], vec[0, 0])*180/np.pi
ellipses(mu[0], mu[1], s, q, pa, ax=ax, **kwargs)
| [
"numpy.sqrt",
"numpy.isscalar",
"numpy.reshape",
"matplotlib.pyplot.gca",
"numpy.asarray",
"numpy.ndindex",
"matplotlib.collections.PatchCollection",
"bisect.bisect",
"numpy.dot",
"numpy.zeros",
"numpy.empty",
"numpy.arctan2",
"scipy.special.gamma",
"numpy.random.uniform",
"numpy.linalg.eigh",
"numpy.cumsum",
"matplotlib.patches.Ellipse"
] | [((1679, 1691), 'numpy.cumsum', 'np.cumsum', (['p'], {}), '(p)\n', (1688, 1691), True, 'import numpy as np\n'), ((1727, 1746), 'numpy.random.uniform', 'np.random.uniform', ([], {}), '()\n', (1744, 1746), True, 'import numpy as np\n'), ((1758, 1777), 'bisect.bisect', 'bisect.bisect', (['c', 'u'], {}), '(c, u)\n', (1771, 1777), False, 'import bisect\n'), ((2591, 2605), 'numpy.isscalar', 'np.isscalar', (['x'], {}), '(x)\n', (2602, 2605), True, 'import numpy as np\n'), ((2780, 2814), 'matplotlib.collections.PatchCollection', 'PatchCollection', (['patches'], {}), '(patches, **kwargs)\n', (2795, 2814), False, 'from matplotlib.collections import PatchCollection\n'), ((3197, 3216), 'numpy.linalg.eigh', 'np.linalg.eigh', (['Sig'], {}), '(Sig)\n', (3211, 3216), True, 'import numpy as np\n'), ((3302, 3326), 'numpy.sqrt', 'np.sqrt', (['(val[0] / val[1])'], {}), '(val[0] / val[1])\n', (3309, 3326), True, 'import numpy as np\n'), ((557, 591), 'numpy.reshape', 'np.reshape', (['vec', '[vec.shape[0], 1]'], {}), '(vec, [vec.shape[0], 1])\n', (567, 591), True, 'import numpy as np\n'), ((708, 725), 'numpy.dot', 'np.dot', (['mat', 'vec2'], {}), '(mat, vec2)\n', (714, 725), True, 'import numpy as np\n'), ((1076, 1090), 'numpy.dot', 'np.dot', (['x.T', 'x'], {}), '(x.T, x)\n', (1082, 1090), True, 'import numpy as np\n'), ((1176, 1221), 'numpy.empty', 'np.empty', (['(size + (dim, dim))'], {'dtype': 'np.float64'}), '(size + (dim, dim), dtype=np.float64)\n', (1184, 1221), True, 'import numpy as np\n'), ((1239, 1255), 'numpy.ndindex', 'np.ndindex', (['size'], {}), '(size)\n', (1249, 1255), True, 'import numpy as np\n'), ((2320, 2329), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (2327, 2329), True, 'import matplotlib.pyplot as plt\n'), ((3172, 3181), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3179, 3181), True, 'import matplotlib.pyplot as plt\n'), ((3264, 3296), 'numpy.sqrt', 'np.sqrt', (['(5.991 * val[0] * val[1])'], {}), '(5.991 * val[0] * val[1])\n', (3271, 3296), True, 'import numpy as np\n'), ((1033, 1046), 'numpy.zeros', 'np.zeros', (['dim'], {}), '(dim)\n', (1041, 1046), True, 'import numpy as np\n'), ((1354, 1368), 'numpy.dot', 'np.dot', (['x.T', 'x'], {}), '(x.T, x)\n', (1360, 1368), True, 'import numpy as np\n'), ((2558, 2568), 'numpy.sqrt', 'np.sqrt', (['q'], {}), '(q)\n', (2565, 2568), True, 'import numpy as np\n'), ((2572, 2582), 'numpy.sqrt', 'np.sqrt', (['q'], {}), '(q)\n', (2579, 2582), True, 'import numpy as np\n'), ((2626, 2651), 'matplotlib.patches.Ellipse', 'Ellipse', (['(x, y)', 'w', 'h', 'pa'], {}), '((x, y), w, h, pa)\n', (2633, 2651), False, 'from matplotlib.patches import Ellipse\n'), ((2684, 2714), 'matplotlib.patches.Ellipse', 'Ellipse', (['(x_, y_)', 'w_', 'h_', 'pa_'], {}), '((x_, y_), w_, h_, pa_)\n', (2691, 2714), False, 'from matplotlib.patches import Ellipse\n'), ((2867, 2880), 'numpy.asarray', 'np.asarray', (['c'], {}), '(c)\n', (2877, 2880), True, 'import numpy as np\n'), ((3334, 3366), 'numpy.arctan2', 'np.arctan2', (['vec[0, 1]', 'vec[0, 0]'], {}), '(vec[0, 1], vec[0, 0])\n', (3344, 3366), True, 'import numpy as np\n'), ((871, 896), 'scipy.special.gamma', 'gamma', (['(0.5 * (nu + 1 - i))'], {}), '(0.5 * (nu + 1 - i))\n', (876, 896), False, 'from scipy.special import gamma\n'), ((1303, 1316), 'numpy.zeros', 'np.zeros', (['dim'], {}), '(dim)\n', (1311, 1316), True, 'import numpy as np\n')] |
"""
Viewer for optical flow data.
"""
import sys
import cv2
import keyboard
import numpy as np
import os.path as osp
import matplotlib.pyplot as plt
from glob import glob
from viflow.utils import load_config, to_filename, load_optical_flow
def flow2image(flow):
flow = np.float32(flow)
mag, ang = cv2.cartToPolar(flow[..., 0], flow[..., 1])
h, w, _ = flow.shape
hsv = np.zeros((h, w, 3), dtype=np.float32)
hsv[..., 1] = 255
hsv[..., 0] = ang * 180 / np.pi / 2
hsv[..., 2] = cv2.normalize(mag, None, 0, 255, cv2.NORM_MINMAX)
return cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
def view_optical_flow(filepath, cfg):
flows = load_optical_flow(filepath)
ds = cfg.view_downsample
plt.ion()
fig = plt.figure(figsize=(6, 8))
ax = fig.add_subplot(111)
ax.set_axis_off()
ax.set_title(to_filename(filepath))
fig.show()
for i in range(flows.shape[0]):
flow = flows[i][::ds, ::ds, :]
r, c, _ = flow.shape
X, Y = np.arange(0, c, 1), np.arange(r, 0, -1)
U, V = flow[:, :, 0], flow[:, :, 1]
if cfg.view_mode == 'arrow':
plot = ax.quiver(X, Y, U, V, scale=100, color=(0, 0, 0, 0.5))
else:
plot = ax.imshow(flow2image(flow), alpha=1)
plt.draw()
plt.pause(cfg.view_pause)
plot.remove()
if keyboard.is_pressed('q'):
return False
return True
def main(cfg_filepath):
cfg = load_config(cfg_filepath)
inpath = osp.join(cfg.outdir, '*.npz')
for filepath in glob(inpath):
if not view_optical_flow(filepath, cfg):
break
if __name__ == '__main__':
args = sys.argv
cfg_filepath = '../config.json' if len(args) == 1 else args[1]
main(cfg_filepath)
| [
"matplotlib.pyplot.draw",
"cv2.normalize",
"numpy.arange",
"cv2.cartToPolar",
"viflow.utils.load_optical_flow",
"os.path.join",
"keyboard.is_pressed",
"numpy.zeros",
"matplotlib.pyplot.figure",
"cv2.cvtColor",
"matplotlib.pyplot.ion",
"viflow.utils.to_filename",
"viflow.utils.load_config",
"matplotlib.pyplot.pause",
"numpy.float32",
"glob.glob"
] | [((278, 294), 'numpy.float32', 'np.float32', (['flow'], {}), '(flow)\n', (288, 294), True, 'import numpy as np\n'), ((310, 353), 'cv2.cartToPolar', 'cv2.cartToPolar', (['flow[..., 0]', 'flow[..., 1]'], {}), '(flow[..., 0], flow[..., 1])\n', (325, 353), False, 'import cv2\n'), ((389, 426), 'numpy.zeros', 'np.zeros', (['(h, w, 3)'], {'dtype': 'np.float32'}), '((h, w, 3), dtype=np.float32)\n', (397, 426), True, 'import numpy as np\n'), ((507, 556), 'cv2.normalize', 'cv2.normalize', (['mag', 'None', '(0)', '(255)', 'cv2.NORM_MINMAX'], {}), '(mag, None, 0, 255, cv2.NORM_MINMAX)\n', (520, 556), False, 'import cv2\n'), ((568, 604), 'cv2.cvtColor', 'cv2.cvtColor', (['hsv', 'cv2.COLOR_HSV2BGR'], {}), '(hsv, cv2.COLOR_HSV2BGR)\n', (580, 604), False, 'import cv2\n'), ((657, 684), 'viflow.utils.load_optical_flow', 'load_optical_flow', (['filepath'], {}), '(filepath)\n', (674, 684), False, 'from viflow.utils import load_config, to_filename, load_optical_flow\n'), ((719, 728), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (726, 728), True, 'import matplotlib.pyplot as plt\n'), ((739, 765), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 8)'}), '(figsize=(6, 8))\n', (749, 765), True, 'import matplotlib.pyplot as plt\n'), ((1452, 1477), 'viflow.utils.load_config', 'load_config', (['cfg_filepath'], {}), '(cfg_filepath)\n', (1463, 1477), False, 'from viflow.utils import load_config, to_filename, load_optical_flow\n'), ((1491, 1520), 'os.path.join', 'osp.join', (['cfg.outdir', '"""*.npz"""'], {}), "(cfg.outdir, '*.npz')\n", (1499, 1520), True, 'import os.path as osp\n'), ((1541, 1553), 'glob.glob', 'glob', (['inpath'], {}), '(inpath)\n', (1545, 1553), False, 'from glob import glob\n'), ((835, 856), 'viflow.utils.to_filename', 'to_filename', (['filepath'], {}), '(filepath)\n', (846, 856), False, 'from viflow.utils import load_config, to_filename, load_optical_flow\n'), ((1268, 1278), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (1276, 1278), True, 'import matplotlib.pyplot as plt\n'), ((1287, 1312), 'matplotlib.pyplot.pause', 'plt.pause', (['cfg.view_pause'], {}), '(cfg.view_pause)\n', (1296, 1312), True, 'import matplotlib.pyplot as plt\n'), ((1348, 1372), 'keyboard.is_pressed', 'keyboard.is_pressed', (['"""q"""'], {}), "('q')\n", (1367, 1372), False, 'import keyboard\n'), ((993, 1011), 'numpy.arange', 'np.arange', (['(0)', 'c', '(1)'], {}), '(0, c, 1)\n', (1002, 1011), True, 'import numpy as np\n'), ((1013, 1032), 'numpy.arange', 'np.arange', (['r', '(0)', '(-1)'], {}), '(r, 0, -1)\n', (1022, 1032), True, 'import numpy as np\n')] |
# coding=utf-8
from ovs.utils import execute
from ovs.utils import decorator
class IFace():
@decorator.check_cmd(['ip -V'])
def __init__(self, netns = None):
self.netns = netns if netns else ''
@decorator.check_arg
def add_if(self, if_name, if_type = None, mtu = 1500, args = None):
if if_name:
if_type = ' type ' + if_type if if_type else ''
_, error = execute.exec_cmd('{0} ip link add name {1} mtu {2} {3} {4}'.format(self.netns, if_name, mtu, if_type, args if args else ''))
return False if error else True
else:
raise ValueError('Interface name is None')
@decorator.check_arg
def add_veth_peer_if(self, local_if, guest_if, mtu = 1500):
return self.add_if(local_if, 'veth', mtu, 'peer name {0} mtu {1}'.format(guest_if, mtu))
@decorator.check_arg
def del_if(self, if_name):
if if_name:
_, error = execute.exec_cmd('{0} ip link delete {1}'.format(self.netns, if_name))
return False if error else True
else:
raise ValueError('Interface name is None')
@decorator.check_arg
def exist_if(self, if_name):
if if_name:
_, error = execute.exec_cmd('{0} ip link show {1}'.format(self.netns, if_name))
return False if error else True
else:
raise ValueError('Interface name is None')
@decorator.check_arg
def set_if(self, if_name, key, value):
if if_name:
if not key or not value:
raise ValueError('Key or Value is None')
_, error = execute.exec_cmd('{0} ip link set {1} {2} {3}'.format(self.netns, if_name, key, value))
return False if error else True
else:
raise ValueError('Interface name is None')
@decorator.check_arg
def startup(self, if_name):
if if_name:
_, error = execute.exec_cmd('{0} ip link set {1} up'.format(self.netns, if_name))
return False if error else True
else:
raise ValueError('Interface name is None')
@decorator.check_arg
def shutdown(self, if_name):
if if_name:
_, error = execute.exec_cmd('{0} ip link set {1} down'.format(self.netns, if_name))
return False if error else True
else:
raise ValueError('Interface name is None')
class Address():
@decorator.check_cmd(['ip -V'])
def __init__(self, netns = None):
self.netns = netns if netns else ''
@decorator.check_arg
def add_addr(self, if_name, ip_addr):
if if_name:
if not ip_addr:
raise ValueError('Ip Address is None')
_, error = execute.exec_cmd('{0} ip addr add {1} dev {2}'.format(self.netns, ip_addr, if_name))
return False if error else True
else:
raise ValueError('Interface name is None')
@decorator.check_arg
def del_addr(self, if_name, ip_addr):
if if_name:
if not ip_addr:
raise ValueError('Ip Address is None')
_, error = execute.exec_cmd('{0} ip addr del {1} dev {2}'.format(self.netns, ip_addr, if_name))
return False if error else True
else:
raise ValueError('Interface name is None')
@decorator.check_arg
def flush(self, if_name):
if if_name:
_, error = execute.exec_cmd('{0} ip addr flush dev {1}'.format(self.netns, if_name))
return False if error else True
else:
raise ValueError('Interface name is None')
class Route():
@decorator.check_cmd(['ip -V'])
def __init__(self, netns = None):
self.netns = netns if netns else ''
@decorator.check_arg
def add_route(self, gw, dst_ip = None, if_name = None):
if gw:
dst_ip = 'default' if not dst_ip else dst_ip
if_name = 'dev ' + if_name if if_name else ''
_, error = execute.exec_cmd('{0} ip route replace {1} via {2} {3}'.format(self.netns, dst_ip, gw, if_name))
return False if error else True
else:
raise ValueError('Gateway is None')
@decorator.check_arg
def del_route(self, gw, dst_ip = None, if_name = None):
if gw:
dst_ip = 'default' if not dst_ip else dst_ip
if_name = 'dev ' + if_name if if_name else ''
if gw:
_, error = execute.exec_cmd('{0} ip route del {1} via {2} {3}'.format(self.netns, dst_ip, gw, if_name))
else:
_, error = execute.exec_cmd('{0} ip route del {1}'.format(self.netns, dst_ip))
return False if error else True
else:
raise ValueError('Gateway is None')
@decorator.check_arg
def flush(self):
_, error = execute.exec_cmd('{0} ip route flush cache')
return False if error else True
class Netns():
def __init__(self):
pass
def add_ns(self, ns_name):
if ns_name:
_, error = execute.exec_cmd('ip netns add {0}'.format(ns_name))
return False if error else True
else:
raise ValueError('Naamespace name is None')
def del_ns(self, ns_name):
if ns_name:
_, error = execute.exec_cmd('ip netns del {0}'.format(ns_name))
return False if error else True
else:
raise ValueError('Naamespace name is None')
def get_exec(self, pid):
return 'ip netns exec {0}'.format(pid) if pid else None
def exec_ns(self, pid, cmd):
return execute.exec_cmd('ip netns exec {0} {1}'.format(pid, cmd)) if pid and cmd else None
| [
"ovs.utils.decorator.check_cmd",
"ovs.utils.execute.exec_cmd"
] | [((104, 134), 'ovs.utils.decorator.check_cmd', 'decorator.check_cmd', (["['ip -V']"], {}), "(['ip -V'])\n", (123, 134), False, 'from ovs.utils import decorator\n'), ((2447, 2477), 'ovs.utils.decorator.check_cmd', 'decorator.check_cmd', (["['ip -V']"], {}), "(['ip -V'])\n", (2466, 2477), False, 'from ovs.utils import decorator\n'), ((3676, 3706), 'ovs.utils.decorator.check_cmd', 'decorator.check_cmd', (["['ip -V']"], {}), "(['ip -V'])\n", (3695, 3706), False, 'from ovs.utils import decorator\n'), ((4883, 4927), 'ovs.utils.execute.exec_cmd', 'execute.exec_cmd', (['"""{0} ip route flush cache"""'], {}), "('{0} ip route flush cache')\n", (4899, 4927), False, 'from ovs.utils import execute\n')] |
"""
Closes a session intelligently, for vim/zsh
"""
import os
import re
import sys
from tmux_session_utils import tmux_utils
class Closer:
"""
Class with logic to close session
"""
def __init__(self, session: str = None):
"""
.
"""
self.tmux_session = session if session else get_session(session)
self.pane_count_by_window = {}
self.__get_pane_counts()
self.close_commands = []
self.analyze()
def __get_pane_counts(self):
"""
Gets number of panes in each window
"""
windows = tmux_utils.get_window_list(self.tmux_session)
for window in windows.split("\n"):
win_num_match = re.match(r"^([0-9]+):", window)
pane_match = re.match(r".*([0-9]+) panes?", window)
self.pane_count_by_window[win_num_match.group(1)] = int(pane_match.group(1))
def analyze(self):
"""
Identifies windows and panes, and figures out what commands to use to close them
"""
for window, pane_count in self.pane_count_by_window.items():
for pane in range(pane_count):
pane_command = tmux_utils.get_pane_command(
self.tmux_session, window, pane
)
for run_command in close_program(pane_command):
self.close_commands.append(
'tmux send-keys -t {0}:{1}.{2} {3} "C-m"'.format(
self.tmux_session, window, pane, run_command
)
)
def close(self):
"""
Actually close the session
"""
for command in self.close_commands:
os.system(command)
def close_program(program: str) -> list:
"""
Returns commands needed to close a pane with a given command
"""
commands = []
if program.startswith("vi"):
commands.append(":q!")
commands.append("exit")
# pylint: disable=bad-continuation
elif (
any([program.startswith(shell) for shell in ["zsh", "sh", "bash"]])
or not program
):
commands.append("exit")
else:
print("Unknown command! '{0}'".format(program))
return commands
def get_session(session: str = None) -> str:
"""
Gets session name from command line, with injectable option
"""
return session if session else sys.argv[1]
if __name__ == "__main__":
Closer(get_session()).close()
| [
"os.system",
"re.match",
"tmux_session_utils.tmux_utils.get_window_list",
"tmux_session_utils.tmux_utils.get_pane_command"
] | [((597, 642), 'tmux_session_utils.tmux_utils.get_window_list', 'tmux_utils.get_window_list', (['self.tmux_session'], {}), '(self.tmux_session)\n', (623, 642), False, 'from tmux_session_utils import tmux_utils\n'), ((714, 744), 're.match', 're.match', (['"""^([0-9]+):"""', 'window'], {}), "('^([0-9]+):', window)\n", (722, 744), False, 'import re\n'), ((771, 808), 're.match', 're.match', (['""".*([0-9]+) panes?"""', 'window'], {}), "('.*([0-9]+) panes?', window)\n", (779, 808), False, 'import re\n'), ((1723, 1741), 'os.system', 'os.system', (['command'], {}), '(command)\n', (1732, 1741), False, 'import os\n'), ((1179, 1239), 'tmux_session_utils.tmux_utils.get_pane_command', 'tmux_utils.get_pane_command', (['self.tmux_session', 'window', 'pane'], {}), '(self.tmux_session, window, pane)\n', (1206, 1239), False, 'from tmux_session_utils import tmux_utils\n')] |
"""
This module contains utilities for testing event schemas.
"""
from datetime import timedelta
from hypothesis import strategies as st
__all__ = [
"VALID_PRIORITIES",
"VALID_MISSION_TIMES",
"INVALID_PRIORITIES",
"EVENT_VALID_MAP",
"EVENT_INVALID_MAP",
]
VALID_PRIORITIES = st.integers(min_value=1, max_value=5)
VALID_MISSION_TIMES = st.timedeltas(min_value=timedelta(0))
INVALID_PRIORITIES = st.one_of(st.integers(max_value=0), st.integers(min_value=6))
EVENT_VALID_MAP = {
"name": st.text(),
"priority": VALID_PRIORITIES,
"mission_time": VALID_MISSION_TIMES,
"type": st.text(),
}
EVENT_INVALID_MAP = {
"priority": INVALID_PRIORITIES,
}
| [
"hypothesis.strategies.text",
"datetime.timedelta",
"hypothesis.strategies.integers"
] | [((297, 334), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(1)', 'max_value': '(5)'}), '(min_value=1, max_value=5)\n', (308, 334), True, 'from hypothesis import strategies as st\n'), ((426, 450), 'hypothesis.strategies.integers', 'st.integers', ([], {'max_value': '(0)'}), '(max_value=0)\n', (437, 450), True, 'from hypothesis import strategies as st\n'), ((452, 476), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(6)'}), '(min_value=6)\n', (463, 476), True, 'from hypothesis import strategies as st\n'), ((512, 521), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (519, 521), True, 'from hypothesis import strategies as st\n'), ((610, 619), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (617, 619), True, 'from hypothesis import strategies as st\n'), ((381, 393), 'datetime.timedelta', 'timedelta', (['(0)'], {}), '(0)\n', (390, 393), False, 'from datetime import timedelta\n')] |
# -*- encoding: utf-8 -*-
from __future__ import division
try:
import cPickle as pickle
except ImportError:
import pickle
import sys
import random
import os
import re
import datetime
# TODO memorization previous tweet corpus
class MarkovChain(object):
def __init__(self, db_file_path=None):
self.db_file_path = db_file_path
if not db_file_path:
directory = "db"
filename = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
if not os.path.exists(directory):
os.makedirs(directory)
self.db_file_path = os.path.join(directory, filename)
try:
with open(self.db_file_path, 'rb') as dbfile:
self.db = pickle.load(dbfile)
except IOError:
sys.stdout.write('Database file not found, using empty database')
self.db = {}
except ValueError:
sys.stdout.write('Database corrupt or unreadable, using empty database')
self.db = {}
def generate_database(self, text_sample, sentence_sep='[.!?\n]'):
"""Generate word probability database from raw content string."""
# Get an iterator for the 'sentences'
text_sample = self._word_iter(text_sample, sentence_sep)
# We're using '' as special symbol for the beginning
# of a sentence
self.db = {"": {"": 0.0}}
for line in text_sample:
words = line.strip().split() # split words in line
if len(words) == 0:
continue
# first word follows a sentence end
if words[0] in self.db[""]:
self.db[""][words[0]] += 1
else:
self.db[""][words[0]] = 1.0
for i in range(len(words) - 1):
if words[i] in self.db:
# the current word has been found at least once
# increment parametrized wordcounts
if words[i + 1] in self.db[words[i]]:
self.db[words[i]][words[i + 1]] += 1
else:
self.db[words[i]][words[i + 1]] = 1.0
else:
# word has been found for the first time
self.db[words[i]] = {words[i + 1]: 1.0}
# last word precedes a sentence end
if words[len(words) - 1] in self.db:
if "" in self.db[words[len(words) - 1]]:
self.db[words[len(words) - 1]][""] += 1
else:
self.db[words[len(words) - 1]][""] = 1.0
else:
self.db[words[len(words) - 1]] = {"": 1.0}
# We've now got the db filled with parametrized word counts
# We still need to normalize this to represent probabilities
for word in self.db:
wordsum = 0
for nextword in self.db[word]:
wordsum += self.db[word][nextword]
if wordsum != 0:
for nextword in self.db[word]:
self.db[word][nextword] /= wordsum
# Now we dump the db to disk
return self.dumpdb()
def dumpdb(self):
try:
with open(self.db_file_path, 'wb') as dbfile:
pickle.dump(self.db, dbfile)
# It looks like db was written successfully
return True
except IOError:
sys.stderr.write('Database file could not be written')
return False
def generate_string(self):
""".Generate a "sentence" with the database of known text."""
return self._accumulate_with_seed('')
def generate_string_with_seed(self, seed):
"""Generate a "sentence" with the database and a given word."""
# using str.split here means we're contructing the list in memory
# but as the generated sentence only depends on the last word of the seed
# I'm assuming seeds tend to be rather short.
words = seed.split()
if len(words) > 0 and words[len(words) - 1] in self.db:
sen = ''
if len(words) > 1:
sen = words[0]
for i in range(1, len(words) - 1):
sen = sen + ' ' + words[i]
sen += ' '
return sen + self._accumulate_with_seed(words[len(words) - 1])
# Just pretend we've managed to generate a sentence.
sep = ' '
if seed == '':
sep = ''
return seed + sep + self.generate_string()
@staticmethod
def _word_iter(text, separator='.'):
"""
An iterator over the "words" in the given text, as defined by
the regular expression given as separator.
"""
exp = re.compile(separator)
pos = 0
for occ in exp.finditer(text):
sub = text[pos:occ.start()].strip()
if sub:
yield sub
pos = occ.start() + 1
if pos < len(text):
# take case of the last part
sub = text[pos:].strip()
if sub:
yield sub
def _accumulate_with_seed(self, seed):
"""
Accumulate the generated sentence with a given single word as a seed.
"""
next_word = self._next_word(seed)
sentence = [seed] if seed else []
while next_word:
sentence.append(next_word)
next_word = self._next_word(next_word)
return ' '.join(sentence)
def _next_word(self, lastword):
probmap = self.db[lastword]
sample = random.random()
# since rounding errors might make us miss out on some words
maxprob = 0.0
maxprobword = ""
for candidate in probmap:
# remember which word had the highest probability
# this is the word we'll default to if we can't find anythin else
if probmap[candidate] > maxprob:
maxprob = probmap[candidate]
maxprobword = candidate
if sample > probmap[candidate]:
sample -= probmap[candidate]
else:
return candidate
return maxprobword
| [
"os.path.exists",
"pickle.dump",
"os.makedirs",
"re.compile",
"os.path.join",
"pickle.load",
"sys.stderr.write",
"datetime.datetime.now",
"random.random",
"sys.stdout.write"
] | [((4728, 4749), 're.compile', 're.compile', (['separator'], {}), '(separator)\n', (4738, 4749), False, 'import re\n'), ((5554, 5569), 'random.random', 'random.random', ([], {}), '()\n', (5567, 5569), False, 'import random\n'), ((595, 628), 'os.path.join', 'os.path.join', (['directory', 'filename'], {}), '(directory, filename)\n', (607, 628), False, 'import os\n'), ((497, 522), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (511, 522), False, 'import os\n'), ((540, 562), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (551, 562), False, 'import os\n'), ((726, 745), 'pickle.load', 'pickle.load', (['dbfile'], {}), '(dbfile)\n', (737, 745), False, 'import pickle\n'), ((782, 847), 'sys.stdout.write', 'sys.stdout.write', (['"""Database file not found, using empty database"""'], {}), "('Database file not found, using empty database')\n", (798, 847), False, 'import sys\n'), ((912, 984), 'sys.stdout.write', 'sys.stdout.write', (['"""Database corrupt or unreadable, using empty database"""'], {}), "('Database corrupt or unreadable, using empty database')\n", (928, 984), False, 'import sys\n'), ((3256, 3284), 'pickle.dump', 'pickle.dump', (['self.db', 'dbfile'], {}), '(self.db, dbfile)\n', (3267, 3284), False, 'import pickle\n'), ((3401, 3455), 'sys.stderr.write', 'sys.stderr.write', (['"""Database file could not be written"""'], {}), "('Database file could not be written')\n", (3417, 3455), False, 'import sys\n'), ((428, 451), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (449, 451), False, 'import datetime\n')] |
import os;
import sys;
import socket
import jinja2;
backend_nodes = []
existing_entries = []
localhost = socket.gethostbyname(socket.gethostname())
try:
service_name = os.environ['HAPROXY_SERVICE_NAME_TO_PROXY']
except:
sys.exit("Could not find service name to proxy. Make sure you set HAPROXY_SERVICE_NAMES_TO_PROXY in ENV.")
try:
hosts = open("/etc/hosts")
except:
sys.exit("Could not open /etc/hosts to check dynamic hosts.")
for host in hosts:
host_entry = host.split()
if len(host_entry) > 2:
(host_ip, host_name) = host_entry[0:2]
if (host_ip not in existing_entries) and (host_name.startswith(service_name)) and (host_ip not in ["0.0.0.0", "127.0.0.1", localhost]):
existing_entries.append(host_ip)
backend_nodes.append({'name' : host_name, 'ip' : host_ip})
view_vars = {
'backend_nodes' : backend_nodes
}
sys.stdout.write(jinja2.Template(sys.stdin.read()).render(view_vars, env=os.environ))
| [
"sys.stdin.read",
"socket.gethostname",
"sys.exit"
] | [((127, 147), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (145, 147), False, 'import socket\n'), ((226, 342), 'sys.exit', 'sys.exit', (['"""Could not find service name to proxy. Make sure you set HAPROXY_SERVICE_NAMES_TO_PROXY in ENV."""'], {}), "(\n 'Could not find service name to proxy. Make sure you set HAPROXY_SERVICE_NAMES_TO_PROXY in ENV.'\n )\n", (234, 342), False, 'import sys\n'), ((378, 439), 'sys.exit', 'sys.exit', (['"""Could not open /etc/hosts to check dynamic hosts."""'], {}), "('Could not open /etc/hosts to check dynamic hosts.')\n", (386, 439), False, 'import sys\n'), ((886, 902), 'sys.stdin.read', 'sys.stdin.read', ([], {}), '()\n', (900, 902), False, 'import sys\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the PsychoPy library
# Copyright (C) 2002-2018 <NAME> (C) 2019-2021 Open Science Tools Ltd.
# Distributed under the terms of the GNU General Public License (GPL).
from __future__ import absolute_import, print_function
from builtins import str
from os import path
from pathlib import Path
from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate
from psychopy import logging
from psychopy.localization import _localized as __localized
_localized = __localized.copy()
# only use _localized values for label values, nothing functional:
_localized.update({'lineWidth': _translate('Brush Size'),
'lineColor': _translate('Brush Color'),
'lineColorSpace': _translate('Brush Color Space'),
'buttonRequired':_translate('Press Button')})
class BrushComponent(BaseVisualComponent):
"""A class for drawing freehand responses"""
categories = ['Responses']
targets = ['PsychoPy', 'PsychoJS']
iconFile = Path(__file__).parent / 'brush.png'
tooltip = _translate('Brush: a drawing tool')
def __init__(self, exp, parentName, name='brush',
lineColor='$[1,1,1]', lineColorSpace='rgb',
lineWidth=1.5, opacity=1,
buttonRequired=True,
startType='time (s)', startVal=0.0,
stopType='duration (s)', stopVal=1.0,
startEstim='', durationEstim=''):
super(BrushComponent, self).__init__(
exp, parentName, name=name,
startType=startType, startVal=startVal,
stopType=stopType, stopVal=stopVal,
startEstim=startEstim, durationEstim=durationEstim)
self.type = 'Brush'
self.url = "https://www.psychopy.org/builder/components/brush.html"
self.exp.requirePsychopyLibs(['visual'])
self.order.remove("opacity") # Move opacity to the end
self.order += [
"lineWidth", "lineColor", "lineColorSpace", "opacity" # Appearance tab
]
# params
msg = _translate("Fill color of this brush")
self.params['lineColor'] = Param(
lineColor, valType='color', inputType="color", allowedTypes=[], categ='Appearance',
updates='constant',
allowedUpdates=['constant', 'set every repeat'],
hint=msg,
label=_localized['lineColor'])
msg = _translate("Width of the brush's line (always in pixels and limited to 10px max width)")
self.params['lineWidth'] = Param(
lineWidth, valType='num', inputType="spin", allowedTypes=[], categ='Appearance',
updates='constant',
allowedUpdates=['constant', 'set every repeat'],
hint=msg,
label=_localized['lineWidth'])
self.params['lineColorSpace'] = self.params['colorSpace']
del self.params['colorSpace']
msg = _translate("The line opacity")
self.params['opacity'].hint=msg
msg = _translate("Whether a button needs to be pressed to draw (True/False)")
self.params['buttonRequired'] = Param(
buttonRequired, valType='bool', inputType="bool", allowedTypes=[], categ='Basic',
updates='constant',
allowedUpdates=['constant', 'set every repeat'],
hint=msg,
label=_localized['buttonRequired'])
# Remove BaseVisual params which are not needed
del self.params['color'] # because color is defined by lineColor
del self.params['fillColor']
del self.params['borderColor']
del self.params['size'] # because size determined by lineWidth
del self.params['ori']
del self.params['pos']
del self.params['units'] # always in pix
def writeInitCode(self, buff):
params = getInitVals(self.params)
code = ("{name} = visual.Brush(win=win, name='{name}',\n"
" lineWidth={lineWidth},\n"
" lineColor={lineColor},\n"
" lineColorSpace={lineColorSpace},\n"
" opacity={opacity},\n"
" buttonRequired={buttonRequired})").format(name=params['name'],
lineWidth=params['lineWidth'],
lineColor=params['lineColor'],
lineColorSpace=params['lineColorSpace'],
opacity=params['opacity'],
buttonRequired=params['buttonRequired'])
buff.writeIndentedLines(code)
def writeInitCodeJS(self, buff):
# JS code does not use Brush class
params = getInitVals(self.params)
code = ("{name} = {{}};\n"
"get{name} = function() {{\n"
" return ( new visual.ShapeStim({{\n"
" win: psychoJS.window,\n"
" vertices: [[0, 0]],\n"
" lineWidth: {lineWidth},\n"
" lineColor: new util.Color({lineColor}),\n"
" opacity: {opacity},\n"
" closeShape: false,\n"
" autoLog: false\n"
" }}))\n"
"}}\n\n").format(name=params['name'],
lineWidth=params['lineWidth'],
lineColor=params['lineColor'],
opacity=params['opacity'])
buff.writeIndentedLines(code)
# add reset function
code = ("{name}Reset = {name}.reset = function() {{\n"
" if ({name}Shapes.length > 0) {{\n"
" for (let shape of {name}Shapes) {{\n"
" shape.setAutoDraw(false);\n"
" }}\n"
" }}\n"
" {name}AtStartPoint = false;\n"
" {name}Shapes = [];\n"
" {name}CurrentShape = -1;\n"
"}}\n\n").format(name=params['name'])
buff.writeIndentedLines(code)
# Define vars for drawing
code = ("{name}CurrentShape = -1;\n"
"{name}BrushPos = [];\n"
"{name}Pointer = new core.Mouse({{win: psychoJS.window}});\n"
"{name}AtStartPoint = false;\n"
"{name}Shapes = [];\n").format(name=params['name'])
buff.writeIndentedLines(code)
def writeRoutineStartCode(self, buff):
# Write update code
super(BrushComponent, self).writeRoutineStartCode(buff)
# Reset shapes for each trial
buff.writeIndented("{}.reset()\n".format(self.params['name']))
def writeRoutineStartCodeJS(self, buff):
# Write update code
# super(BrushComponent, self).writeRoutineStartCodeJS(buff)
# Reset shapes for each trial
buff.writeIndented("{}Reset();\n".format(self.params['name']))
def writeFrameCodeJS(self, buff):
code = ("if ({name}Pointer.getPressed()[0] === 1 && {name}AtStartPoint != true) {{\n"
" {name}AtStartPoint = true;\n"
" {name}BrushPos = [];\n"
" {name}Shapes.push(get{name}());\n"
" {name}CurrentShape += 1;\n"
" {name}Shapes[{name}CurrentShape].setAutoDraw(true);\n"
"}}\n"
"if ({name}Pointer.getPressed()[0] === 1) {{\n"
" {name}BrushPos.push({name}Pointer.getPos());\n"
" {name}Shapes[{name}CurrentShape].setVertices({name}BrushPos);\n"
"}} else {{\n"
" {name}AtStartPoint = false;\n"
"}}\n".format(name=self.params['name']))
buff.writeIndentedLines(code)
| [
"pathlib.Path",
"psychopy.experiment.components.Param",
"psychopy.experiment.components.getInitVals",
"psychopy.localization._localized.copy",
"psychopy.experiment.components._translate"
] | [((544, 562), 'psychopy.localization._localized.copy', '__localized.copy', ([], {}), '()\n', (560, 562), True, 'from psychopy.localization import _localized as __localized\n'), ((1112, 1147), 'psychopy.experiment.components._translate', '_translate', (['"""Brush: a drawing tool"""'], {}), "('Brush: a drawing tool')\n", (1122, 1147), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((663, 687), 'psychopy.experiment.components._translate', '_translate', (['"""Brush Size"""'], {}), "('Brush Size')\n", (673, 687), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((721, 746), 'psychopy.experiment.components._translate', '_translate', (['"""Brush Color"""'], {}), "('Brush Color')\n", (731, 746), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((785, 816), 'psychopy.experiment.components._translate', '_translate', (['"""Brush Color Space"""'], {}), "('Brush Color Space')\n", (795, 816), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((854, 880), 'psychopy.experiment.components._translate', '_translate', (['"""Press Button"""'], {}), "('Press Button')\n", (864, 880), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((2122, 2160), 'psychopy.experiment.components._translate', '_translate', (['"""Fill color of this brush"""'], {}), "('Fill color of this brush')\n", (2132, 2160), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((2196, 2404), 'psychopy.experiment.components.Param', 'Param', (['lineColor'], {'valType': '"""color"""', 'inputType': '"""color"""', 'allowedTypes': '[]', 'categ': '"""Appearance"""', 'updates': '"""constant"""', 'allowedUpdates': "['constant', 'set every repeat']", 'hint': 'msg', 'label': "_localized['lineColor']"}), "(lineColor, valType='color', inputType='color', allowedTypes=[], categ\n ='Appearance', updates='constant', allowedUpdates=['constant',\n 'set every repeat'], hint=msg, label=_localized['lineColor'])\n", (2201, 2404), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((2472, 2570), 'psychopy.experiment.components._translate', '_translate', (['"""Width of the brush\'s line (always in pixels and limited to 10px max width)"""'], {}), '(\n "Width of the brush\'s line (always in pixels and limited to 10px max width)"\n )\n', (2482, 2570), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((2596, 2801), 'psychopy.experiment.components.Param', 'Param', (['lineWidth'], {'valType': '"""num"""', 'inputType': '"""spin"""', 'allowedTypes': '[]', 'categ': '"""Appearance"""', 'updates': '"""constant"""', 'allowedUpdates': "['constant', 'set every repeat']", 'hint': 'msg', 'label': "_localized['lineWidth']"}), "(lineWidth, valType='num', inputType='spin', allowedTypes=[], categ=\n 'Appearance', updates='constant', allowedUpdates=['constant',\n 'set every repeat'], hint=msg, label=_localized['lineWidth'])\n", (2601, 2801), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((2974, 3004), 'psychopy.experiment.components._translate', '_translate', (['"""The line opacity"""'], {}), "('The line opacity')\n", (2984, 3004), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((3060, 3131), 'psychopy.experiment.components._translate', '_translate', (['"""Whether a button needs to be pressed to draw (True/False)"""'], {}), "('Whether a button needs to be pressed to draw (True/False)')\n", (3070, 3131), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((3172, 3382), 'psychopy.experiment.components.Param', 'Param', (['buttonRequired'], {'valType': '"""bool"""', 'inputType': '"""bool"""', 'allowedTypes': '[]', 'categ': '"""Basic"""', 'updates': '"""constant"""', 'allowedUpdates': "['constant', 'set every repeat']", 'hint': 'msg', 'label': "_localized['buttonRequired']"}), "(buttonRequired, valType='bool', inputType='bool', allowedTypes=[],\n categ='Basic', updates='constant', allowedUpdates=['constant',\n 'set every repeat'], hint=msg, label=_localized['buttonRequired'])\n", (3177, 3382), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((3880, 3904), 'psychopy.experiment.components.getInitVals', 'getInitVals', (['self.params'], {}), '(self.params)\n', (3891, 3904), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((4791, 4815), 'psychopy.experiment.components.getInitVals', 'getInitVals', (['self.params'], {}), '(self.params)\n', (4802, 4815), False, 'from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate\n'), ((1062, 1076), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1066, 1076), False, 'from pathlib import Path\n')] |
import tensorflow as tf
import time
from train_monitor import latet_parameters, generate_and_save_images
import numpy as np
# This annotation causes the function to be "compiled".
@tf.function
def train_step(images, epoch,BATCH_SIZE, noise_dim,generator,discriminator,generator_loss,discriminator_loss,generator_optimizer,discriminator_optimizer):
"""
This is the fundemental training step for a generatove advisarial network
decorated with an @tf.function to compile the function for parrallel
processing
"""
# define noise vector to be fed into generator
noise = tf.random.normal([BATCH_SIZE, noise_dim])
lat_params = latet_parameters(BATCH_SIZE)
with tf.GradientTape() as gen_tape, tf.GradientTape() as disc_tape:
# generate fake images from latent vector and random cosmological
# parameters in the same range as that of the simulation data
generated_images = generator(noise, lat_params, training=True)
# get discriminator predictions on both the fake images and a batch
# of real images from the dataset
fake_output = discriminator((generated_images, lat_params), training=True)
real_output = discriminator(images, training=True)
# calculate the losses based off the chosen loss function for both
# the discriminator and generator
gen_loss = generator_loss(fake_output)
disc_loss, real_loss, fake_loss= discriminator_loss(real_output,
fake_output)
# Calculate the gradient steps for the models from the losses calculated
# previously.
gradients_of_generator = gen_tape.gradient(gen_loss,
generator.trainable_variables)
gradients_of_discriminator = disc_tape.gradient(disc_loss,
discriminator.trainable_variables)
# Apply the gradient steps to the generator and discriminator
generator_optimizer.apply_gradients(zip(gradients_of_generator,
generator.trainable_variables))
discriminator_optimizer.apply_gradients(zip(gradients_of_discriminator,
discriminator.trainable_variables))
# returning these parameters purly for the loss history
return disc_loss, gen_loss, real_loss, fake_loss
def train(dataset, epochs,BATCH_SIZE, noise_dim,generator,discriminator,
generator_loss,discriminator_loss,generator_optimizer,discriminator_optimizer,
seed,comparison,checkpoint,checkpoint_prefix, img_save_freq, ckpt_save_freq):
"""
This is the high level training function and loops the training step
(defined above). It saves images to a directory every img_save_freq and
saves checkpoints every ckpt_save_freq. Note save freq are in batches not
epochs! This is because the training datasets can be enormous and epochs
too long.
seed is a random distribution dimension [num_examples_to_generate, noise_dim]
"""
for epoch in range(epochs):
#start timer for each epoch
start1 = time.time()
i =0
# do one training step for each image batch
for batch in dataset:
disc_loss, gen_loss, real_loss, fake_loss = train_step(batch,
epoch,BATCH_SIZE, noise_dim,generator,discriminator,
generator_loss,discriminator_loss,generator_optimizer,
discriminator_optimizer)
i+=1
if i % img_save_freq ==0:
generate_and_save_images(generator,epoch,i,seed,comparison)
print(f"batch: {i}")
# print the losses for the final image batch in this epoch
print(f"d total: {disc_loss.numpy()}")
print(f"d real: {real_loss.numpy()}")
print(f"d fake: {fake_loss.numpy()}")
print(f"g loss: {gen_loss.numpy()}")
# collect and save losses for future reference
losses = disc_loss, gen_loss, real_loss, fake_loss
np.save("losses/loss at end of epoch_"+str(epoch+1)+"_step_"+str(i),losses)
if i % ckpt_save_freq == 0:
print("saving checkpoint")
checkpoint.save(file_prefix = checkpoint_prefix)
print("checkpoint saved")
print ('Time for epoch {} is {} sec'.format(epoch + 1, time.time()-start))
| [
"tensorflow.random.normal",
"train_monitor.generate_and_save_images",
"train_monitor.latet_parameters",
"tensorflow.GradientTape",
"time.time"
] | [((618, 659), 'tensorflow.random.normal', 'tf.random.normal', (['[BATCH_SIZE, noise_dim]'], {}), '([BATCH_SIZE, noise_dim])\n', (634, 659), True, 'import tensorflow as tf\n'), ((678, 706), 'train_monitor.latet_parameters', 'latet_parameters', (['BATCH_SIZE'], {}), '(BATCH_SIZE)\n', (694, 706), False, 'from train_monitor import latet_parameters, generate_and_save_images\n'), ((735, 752), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (750, 752), True, 'import tensorflow as tf\n'), ((766, 783), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (781, 783), True, 'import tensorflow as tf\n'), ((3300, 3311), 'time.time', 'time.time', ([], {}), '()\n', (3309, 3311), False, 'import time\n'), ((3806, 3869), 'train_monitor.generate_and_save_images', 'generate_and_save_images', (['generator', 'epoch', 'i', 'seed', 'comparison'], {}), '(generator, epoch, i, seed, comparison)\n', (3830, 3869), False, 'from train_monitor import latet_parameters, generate_and_save_images\n'), ((4769, 4780), 'time.time', 'time.time', ([], {}), '()\n', (4778, 4780), False, 'import time\n')] |
"""Add event state and timestamp
Revision ID: 90f8444d5ab7
Revises: 2<PASSWORD>
Create Date: 2017-11-20 23:16:44.079911
"""
# revision identifiers, used by Alembic.
revision = '90f8444d5ab7'
down_revision = '2<PASSWORD>'
from alembic import op
import sqlalchemy as sa
from freshmaker.models import Event
from freshmaker.types import EventState
def upgrade():
with op.batch_alter_table('events', schema=None) as batch_op:
batch_op.add_column(sa.Column('state', sa.Integer(), server_default=str(EventState.INITIALIZED.value), nullable=False))
batch_op.add_column(sa.Column('state_reason', sa.String(), nullable=True))
batch_op.add_column(sa.Column('time_created', sa.DateTime(), nullable=True))
# update state to 'COMPLETE' for historical events
op.execute(
sa.update(Event).values({
'state': op.inline_literal(EventState.COMPLETE.value)
})
)
def downgrade():
with op.batch_alter_table('events', schema=None) as batch_op:
batch_op.drop_column('state')
batch_op.drop_column('state_reason')
batch_op.drop_column('time_created')
| [
"sqlalchemy.DateTime",
"alembic.op.batch_alter_table",
"sqlalchemy.Integer",
"sqlalchemy.String",
"alembic.op.inline_literal",
"sqlalchemy.update"
] | [((375, 418), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""events"""'], {'schema': 'None'}), "('events', schema=None)\n", (395, 418), False, 'from alembic import op\n'), ((945, 988), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""events"""'], {'schema': 'None'}), "('events', schema=None)\n", (965, 988), False, 'from alembic import op\n'), ((479, 491), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (489, 491), True, 'import sqlalchemy as sa\n'), ((614, 625), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (623, 625), True, 'import sqlalchemy as sa\n'), ((697, 710), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (708, 710), True, 'import sqlalchemy as sa\n'), ((808, 824), 'sqlalchemy.update', 'sa.update', (['Event'], {}), '(Event)\n', (817, 824), True, 'import sqlalchemy as sa\n'), ((855, 899), 'alembic.op.inline_literal', 'op.inline_literal', (['EventState.COMPLETE.value'], {}), '(EventState.COMPLETE.value)\n', (872, 899), False, 'from alembic import op\n')] |
import argparse
import json
import multiprocessing
import sys
import traceback
from concurrent.futures._base import as_completed
from concurrent.futures.process import ProcessPoolExecutor
from pathlib import Path
import chainer
from tqdm import tqdm
from analysis.analyzer import is_image, Analyzer
def init_process(model_path, needs_patches, device):
global analyzer
current_process = multiprocessing.current_process()
process_name = current_process.name
if device == "@numpy":
device_id = -1
else:
device_id = int(process_name.split('-')[-1]) - 1
analyzer = Analyzer(model_path, device_id, needs_patches=needs_patches)
def consumer(image_path, file_name):
return analyzer.analyse_path(image_path, file_name)
def main(args, device, num_available_devices):
model_path = Path(args.model)
root_dir = Path(args.root_dir)
image_paths = [file_name for file_name in root_dir.glob('**/*') if is_image(file_name)]
analyzed_images = []
ctx = multiprocessing.get_context('forkserver')
executor = ProcessPoolExecutor(max_workers=num_available_devices, mp_context=ctx, initializer=init_process, initargs=(model_path, not args.no_split, device))
try:
with executor:
current_jobs = []
for i, image_path in enumerate(image_paths):
submitted_job = executor.submit(consumer, image_path, str(image_path.relative_to(root_dir)))
current_jobs.append(submitted_job)
for job in tqdm(as_completed(current_jobs), total=len(current_jobs)):
try:
result = job.result()
analyzed_images.append(result)
except Exception as e:
print(f"Could not process {str(image_path)}, reason: {e}")
traceback.print_exc(file=sys.stdout)
except KeyboardInterrupt:
pass
with (root_dir / 'handwriting_analysis.json').open('w') as f:
json.dump(analyzed_images, f, indent='\t')
num_has_handwriting = len([im for im in analyzed_images if im['has_handwriting']])
print(f"Handwriting to no handwriting ratio: {num_has_handwriting / len(analyzed_images)}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Provided a dir with images, create a json with info if an image contains handwriting or not")
parser.add_argument("root_dir", help="path to dir to analyse")
parser.add_argument('model', help="model to load")
parser.add_argument("--max-size", type=int, default=2000, help="max size of input before splitting into patches")
parser.add_argument("--no-split", action='store_true', default=False, help="do not split input image into individual patches")
num_available_devices = chainer.backends.cuda.cupy.cuda.runtime.getDeviceCount()
if num_available_devices == 0:
num_available_devices = 1
device = "@numpy"
else:
device = "@cuda"
main(parser.parse_args(), device, num_available_devices)
| [
"argparse.ArgumentParser",
"pathlib.Path",
"json.dump",
"multiprocessing.get_context",
"concurrent.futures.process.ProcessPoolExecutor",
"concurrent.futures._base.as_completed",
"analysis.analyzer.Analyzer",
"analysis.analyzer.is_image",
"chainer.backends.cuda.cupy.cuda.runtime.getDeviceCount",
"traceback.print_exc",
"multiprocessing.current_process"
] | [((398, 431), 'multiprocessing.current_process', 'multiprocessing.current_process', ([], {}), '()\n', (429, 431), False, 'import multiprocessing\n'), ((604, 664), 'analysis.analyzer.Analyzer', 'Analyzer', (['model_path', 'device_id'], {'needs_patches': 'needs_patches'}), '(model_path, device_id, needs_patches=needs_patches)\n', (612, 664), False, 'from analysis.analyzer import is_image, Analyzer\n'), ((826, 842), 'pathlib.Path', 'Path', (['args.model'], {}), '(args.model)\n', (830, 842), False, 'from pathlib import Path\n'), ((858, 877), 'pathlib.Path', 'Path', (['args.root_dir'], {}), '(args.root_dir)\n', (862, 877), False, 'from pathlib import Path\n'), ((1007, 1048), 'multiprocessing.get_context', 'multiprocessing.get_context', (['"""forkserver"""'], {}), "('forkserver')\n", (1034, 1048), False, 'import multiprocessing\n'), ((1064, 1214), 'concurrent.futures.process.ProcessPoolExecutor', 'ProcessPoolExecutor', ([], {'max_workers': 'num_available_devices', 'mp_context': 'ctx', 'initializer': 'init_process', 'initargs': '(model_path, not args.no_split, device)'}), '(max_workers=num_available_devices, mp_context=ctx,\n initializer=init_process, initargs=(model_path, not args.no_split, device))\n', (1083, 1214), False, 'from concurrent.futures.process import ProcessPoolExecutor\n'), ((2250, 2390), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Provided a dir with images, create a json with info if an image contains handwriting or not"""'}), "(description=\n 'Provided a dir with images, create a json with info if an image contains handwriting or not'\n )\n", (2273, 2390), False, 'import argparse\n'), ((2781, 2837), 'chainer.backends.cuda.cupy.cuda.runtime.getDeviceCount', 'chainer.backends.cuda.cupy.cuda.runtime.getDeviceCount', ([], {}), '()\n', (2835, 2837), False, 'import chainer\n'), ((1981, 2023), 'json.dump', 'json.dump', (['analyzed_images', 'f'], {'indent': '"""\t"""'}), "(analyzed_images, f, indent='\\t')\n", (1990, 2023), False, 'import json\n'), ((950, 969), 'analysis.analyzer.is_image', 'is_image', (['file_name'], {}), '(file_name)\n', (958, 969), False, 'from analysis.analyzer import is_image, Analyzer\n'), ((1520, 1546), 'concurrent.futures._base.as_completed', 'as_completed', (['current_jobs'], {}), '(current_jobs)\n', (1532, 1546), False, 'from concurrent.futures._base import as_completed\n'), ((1826, 1862), 'traceback.print_exc', 'traceback.print_exc', ([], {'file': 'sys.stdout'}), '(file=sys.stdout)\n', (1845, 1862), False, 'import traceback\n')] |
# @filename: ancensored_downloder.py
# @usage: python ancensored_downloader.py *url to image gallery*
# @author: YedaAnna
# @description: Downloads images from ancensored.com
# @version: 1.0
# @date: Wednesday 3rd November 2015
import os
from bs4 import BeautifulSoup
import urllib.request
import urllib.parse
import time
import sys
import re
import random
import datetime
global base_link, site_link, url
start = time.time()
site_link = "http://ancensored.com/"
url = newurl = []
if len(sys.argv) > 2:
base_link = sys.argv[1]
folder_name = sys.argv[2]
subfolder_name = str(datetime.date.today())
else:
base_link = sys.argv[1]
parsed = urllib.parse.urlparse(sys.argv[1])
folder_name = parsed.path.split('/')[3]
subfolder_name = parsed.path.split('/')[2]
def list_images():
global name, url, img, nextpage_link, newlist, newurl, thumbnail
base_contents = urllib.request.urlopen(base_link).read()
parsed_html = BeautifulSoup(base_contents)
img = parsed_html.find_all(src=re.compile("jpg"))
url = []
newurl = []
for link in img:
url.append(link.get('src'))
avoidthumbnails()
def avoidthumbnails():
global size, newurl
for i in range(len(url)):
try:
size = urllib.request.urlopen(url[i]).info()['Content-Length']
except ValueError as e:
print(e)
if int(size) < 50000: # if size is <50kb it is a thumbnail
fullimages()
else:
newurl = url
download_images()
def fullimages():
global newurl
newurl = []
for i in range(len(url)):
thumbnaail_url_split = urllib.parse.urlparse(url[i])
if "vthumbs" in thumbnaail_url_split.path.split('/'):
thumbnail = os.path.splitext(url[i])[0]
newurl.append(thumbnail + '_full.jpg')
elif "gallery_thumb" in thumbnaail_url_split.path.split('/'):
thumbnail_split_array = []
thumbnail_split_array = thumbnaail_url_split.path.split('/')
thumbnail_split_array.pop(4)
thumbnail_split_array.pop(4)
thumbnail = '/'.join(thumbnail_split_array)
newurl.append(site_link + thumbnail)
else:
continue
download_images()
def download_images():
for i in range(len(newurl)):
try:
urllib.request.urlretrieve(
newurl[i], folder_name + str(random.randrange(1000)) + ".jpg")
except urllib.error.URLError as e:
print(e.reason)
if not os.path.exists(os.getcwd() + '/' + folder_name + '/' + subfolder_name):
os.makedirs(os.getcwd() + '/' + folder_name + '/' + subfolder_name)
os.chdir(os.getcwd() + '/' + folder_name + '/' + subfolder_name)
list_images()
print("End of Program :)")
print("Time taken: " + str(time.time() - start) + " seconds")
| [
"re.compile",
"random.randrange",
"os.path.splitext",
"os.getcwd",
"bs4.BeautifulSoup",
"datetime.date.today",
"time.time"
] | [((415, 426), 'time.time', 'time.time', ([], {}), '()\n', (424, 426), False, 'import time\n'), ((953, 981), 'bs4.BeautifulSoup', 'BeautifulSoup', (['base_contents'], {}), '(base_contents)\n', (966, 981), False, 'from bs4 import BeautifulSoup\n'), ((588, 609), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (607, 609), False, 'import datetime\n'), ((1017, 1034), 're.compile', 're.compile', (['"""jpg"""'], {}), "('jpg')\n", (1027, 1034), False, 'import re\n'), ((1737, 1761), 'os.path.splitext', 'os.path.splitext', (['url[i]'], {}), '(url[i])\n', (1753, 1761), False, 'import os\n'), ((2664, 2675), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2673, 2675), False, 'import os\n'), ((2788, 2799), 'time.time', 'time.time', ([], {}), '()\n', (2797, 2799), False, 'import time\n'), ((2526, 2537), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2535, 2537), False, 'import os\n'), ((2599, 2610), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2608, 2610), False, 'import os\n'), ((2398, 2420), 'random.randrange', 'random.randrange', (['(1000)'], {}), '(1000)\n', (2414, 2420), False, 'import random\n')] |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
from __future__ import (absolute_import, division, print_function, unicode_literals)
import os
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ["*"]
TIME_ZONE = 'Europe/Prague'
LANGUAGE_CODE = 'en-us'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS = (
'activeview',
)
ROOT_URLCONF = 'urls'
SECRET_KEY = '42'
# - - - - - - - - - - - - - - - - - - -
# TEMPLATES settings for older Django
# - - - - - - - - - - - - - - - - - - -
TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), "templates"),
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
)
# - - - - - - - - - - - - - - - - -
# TEMPLATES settings for new Django
# - - - - - - - - - - - - - - - - -
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(os.path.dirname(__file__), "templates"),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.request'
],
},
},
]
| [
"os.path.dirname"
] | [((607, 632), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (622, 632), False, 'import os\n'), ((973, 998), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (988, 998), False, 'import os\n')] |
# -*- coding: utf-8 -*-
__author__ = "苦叶子"
from time import sleep
from selenium import webdriver
from selenium.webdriver import ActionChains
from selenium.webdriver.support.select import Select
if __name__ == "__main__":
driver = webdriver.Chrome()
action_chains = ActionChains(driver)
driver.get("file:///Users/lyy/Documents/project/DevAuto/selenium_python/4/demo.html")
ele = driver.find_element_by_id("select_id")
# 通过索引选中第一个
Select(ele).select_by_index(0)
sleep(2)
# 通过value选中第2个
Select(ele).select_by_value("value_2")
sleep(2)
# 通过文本选中第3个
Select(ele).select_by_visible_text("测试数据3")
sleep(2)
driver.quit()
| [
"selenium.webdriver.Chrome",
"time.sleep",
"selenium.webdriver.support.select.Select",
"selenium.webdriver.ActionChains"
] | [((240, 258), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (256, 258), False, 'from selenium import webdriver\n'), ((279, 299), 'selenium.webdriver.ActionChains', 'ActionChains', (['driver'], {}), '(driver)\n', (291, 299), False, 'from selenium.webdriver import ActionChains\n'), ((496, 504), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (501, 504), False, 'from time import sleep\n'), ((572, 580), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (577, 580), False, 'from time import sleep\n'), ((650, 658), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (655, 658), False, 'from time import sleep\n'), ((461, 472), 'selenium.webdriver.support.select.Select', 'Select', (['ele'], {}), '(ele)\n', (467, 472), False, 'from selenium.webdriver.support.select import Select\n'), ((529, 540), 'selenium.webdriver.support.select.Select', 'Select', (['ele'], {}), '(ele)\n', (535, 540), False, 'from selenium.webdriver.support.select import Select\n'), ((602, 613), 'selenium.webdriver.support.select.Select', 'Select', (['ele'], {}), '(ele)\n', (608, 613), False, 'from selenium.webdriver.support.select import Select\n')] |
import logging
log = logging.getLogger(__name__)
if __name__ == '__main__':
print('this is just a tribute')
| [
"logging.getLogger"
] | [((22, 49), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (39, 49), False, 'import logging\n')] |
# Generated by Django 3.2.3 on 2021-05-20 10:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('travello', '0002_destination_subs'),
]
operations = [
migrations.RemoveField(
model_name='destination',
name='name',
),
migrations.RemoveField(
model_name='destination',
name='subs',
),
]
| [
"django.db.migrations.RemoveField"
] | [((226, 287), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""destination"""', 'name': '"""name"""'}), "(model_name='destination', name='name')\n", (248, 287), False, 'from django.db import migrations\n'), ((332, 393), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""destination"""', 'name': '"""subs"""'}), "(model_name='destination', name='subs')\n", (354, 393), False, 'from django.db import migrations\n')] |
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2018-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
import asyncio
import itertools
from . import taskgroup
class Supervisor:
def __init__(self, *, _name, _loop, _private):
if _name is None:
self._name = f'sup#{_name_counter()}'
else:
self._name = str(_name)
self._loop = _loop
self._unfinished_tasks = 0
self._cancelled = False
self._tasks = set()
self._errors = []
self._base_error = None
self._on_completed_fut = None
@classmethod
async def create(cls, *, name: str=None):
loop = asyncio.get_running_loop()
return cls(_loop=loop, _name=name, _private=True)
def __repr__(self):
msg = f'<Supervisor {self._name!r}'
if self._tasks:
msg += f' tasks:{len(self._tasks)}'
if self._unfinished_tasks:
msg += f' unfinished:{self._unfinished_tasks}'
if self._errors:
msg += f' errors:{len(self._errors)}'
if self._cancelled:
msg += ' cancelling'
msg += '>'
return msg
def create_task(self, coro):
if self._cancelled:
raise RuntimeError(
f'supervisor {self!r} has already been cancelled')
task = self._loop.create_task(coro)
task.add_done_callback(self._on_task_done)
self._unfinished_tasks += 1
self._tasks.add(task)
return task
async def cancel(self):
self._cancel()
if self._unfinished_tasks:
was_cancelled = await self._wait()
if was_cancelled:
raise asyncio.CancelledError()
async def wait(self):
if self._unfinished_tasks:
was_cancelled = await self._wait()
if was_cancelled:
raise asyncio.CancelledError()
if self._base_error is not None:
raise self._base_error
if self._errors:
# Exceptions are heavy objects that can have object
# cycles (bad for GC); let's not keep a reference to
# a bunch of them.
errors = self._errors
self._errors = None
me = taskgroup.TaskGroupError('unhandled errors in a Supervisor',
errors=errors)
raise me from None
async def _wait(self):
was_cancelled = False
# We use while-loop here because "self._on_completed_fut"
# can be cancelled multiple times if our parent task
# is being cancelled repeatedly (or even once, when
# our own cancellation is already in progress)
while self._unfinished_tasks:
if self._on_completed_fut is None:
self._on_completed_fut = self._loop.create_future()
try:
await self._on_completed_fut
except asyncio.CancelledError:
was_cancelled = True
self._cancel()
self._on_completed_fut = None
assert self._unfinished_tasks == 0
self._on_completed_fut = None # no longer needed
return was_cancelled
def _on_task_done(self, task):
self._unfinished_tasks -= 1
assert self._unfinished_tasks >= 0
if self._on_completed_fut is not None and not self._unfinished_tasks:
if not self._on_completed_fut.done():
self._on_completed_fut.set_result(True)
if task.cancelled():
return
exc = task.exception()
if exc is None:
return
self._errors.append(exc)
if self._is_base_error(exc) and self._base_error is None:
self._base_error = exc
self._cancel()
def _cancel(self):
self._cancelled = True
for t in self._tasks:
if not t.done():
t.cancel()
def _is_base_error(self, exc):
assert isinstance(exc, BaseException)
return not isinstance(exc, Exception)
_name_counter = itertools.count(1).__next__
| [
"asyncio.CancelledError",
"itertools.count",
"asyncio.get_running_loop"
] | [((4678, 4696), 'itertools.count', 'itertools.count', (['(1)'], {}), '(1)\n', (4693, 4696), False, 'import itertools\n'), ((1270, 1296), 'asyncio.get_running_loop', 'asyncio.get_running_loop', ([], {}), '()\n', (1294, 1296), False, 'import asyncio\n'), ((2296, 2320), 'asyncio.CancelledError', 'asyncio.CancelledError', ([], {}), '()\n', (2318, 2320), False, 'import asyncio\n'), ((2482, 2506), 'asyncio.CancelledError', 'asyncio.CancelledError', ([], {}), '()\n', (2504, 2506), False, 'import asyncio\n')] |
import smtplib
import ssl
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from api.models import SubmissionAssignmentOne, SubmissionAssignmentTwo, SubmissionAssignmentThree
def send_mail(submission_id, assignment_no):
print(" In send_mail ")
if assignment_no == 1:
sub = SubmissionAssignmentOne.objects.get(id=submission_id)
if assignment_no == 2:
sub = SubmissionAssignmentTwo.objects.get(id=submission_id)
if assignment_no == 3:
sub = SubmissionAssignmentThree.objects.get(id=submission_id)
team = sub.team
message = MIMEMultipart("alternative")
message["Subject"] = "Big Data Assignment Result"
message["From"] = "<EMAIL>"
html = "Hi \n Your bigdata submission with submission id " + str(submission_id) + " has been evaluated \n" \
"<html> " \
"<body> " \
"<h3> Scores </h3> " \
"<p> Task 1: " + str(sub.score_1) + "</p>" + "<p> Task 2: " + str(sub.score_2) + "</p>" \
"<h3> Remarks </h3> " + str(sub.remarks) + "<br>" \
"</body> " \
"</html>"
part = MIMEText(html, "html")
message.attach(part)
emails = [team.member_1, team.member_2, team.member_3, team.member_4]
for email in emails:
if email != 'nan':
_send(email, message)
def _send(receiver_email, message):
print(" Sending mail ")
port = 465 # For SSL
smtp_server = "smtp.gmail.com"
sender_email = "<EMAIL>" # Enter your address
password = "<PASSWORD>" # Enter correct password
context = ssl.create_default_context()
with smtplib.SMTP_SSL(smtp_server, port, context=context) as server:
server.login(sender_email, password)
server.sendmail(sender_email, receiver_email, message.as_string())
| [
"smtplib.SMTP_SSL",
"ssl.create_default_context",
"api.models.SubmissionAssignmentTwo.objects.get",
"api.models.SubmissionAssignmentOne.objects.get",
"email.mime.multipart.MIMEMultipart",
"api.models.SubmissionAssignmentThree.objects.get",
"email.mime.text.MIMEText"
] | [((605, 633), 'email.mime.multipart.MIMEMultipart', 'MIMEMultipart', (['"""alternative"""'], {}), "('alternative')\n", (618, 633), False, 'from email.mime.multipart import MIMEMultipart\n'), ((1116, 1138), 'email.mime.text.MIMEText', 'MIMEText', (['html', '"""html"""'], {}), "(html, 'html')\n", (1124, 1138), False, 'from email.mime.text import MIMEText\n'), ((1572, 1600), 'ssl.create_default_context', 'ssl.create_default_context', ([], {}), '()\n', (1598, 1600), False, 'import ssl\n'), ((325, 378), 'api.models.SubmissionAssignmentOne.objects.get', 'SubmissionAssignmentOne.objects.get', ([], {'id': 'submission_id'}), '(id=submission_id)\n', (360, 378), False, 'from api.models import SubmissionAssignmentOne, SubmissionAssignmentTwo, SubmissionAssignmentThree\n'), ((420, 473), 'api.models.SubmissionAssignmentTwo.objects.get', 'SubmissionAssignmentTwo.objects.get', ([], {'id': 'submission_id'}), '(id=submission_id)\n', (455, 473), False, 'from api.models import SubmissionAssignmentOne, SubmissionAssignmentTwo, SubmissionAssignmentThree\n'), ((515, 570), 'api.models.SubmissionAssignmentThree.objects.get', 'SubmissionAssignmentThree.objects.get', ([], {'id': 'submission_id'}), '(id=submission_id)\n', (552, 570), False, 'from api.models import SubmissionAssignmentOne, SubmissionAssignmentTwo, SubmissionAssignmentThree\n'), ((1610, 1662), 'smtplib.SMTP_SSL', 'smtplib.SMTP_SSL', (['smtp_server', 'port'], {'context': 'context'}), '(smtp_server, port, context=context)\n', (1626, 1662), False, 'import smtplib\n')] |
from django.views.generic import dates
from scrivo.models import Post
from scrivo.settings import DEFAULT_PAGINATE_BY, INDEX_POST_COUNT
class PostArchiveMixin(object):
"""
Mixin to add common archive view attributes
"""
date_field = "published"
queryset = Post.objects.public()
paginate_by = DEFAULT_PAGINATE_BY
class PostArchive(PostArchiveMixin, dates.ArchiveIndexView):
paginate_by = INDEX_POST_COUNT
class PostYearArchive(PostArchiveMixin, dates.YearArchiveView):
make_object_list = True
class PostMonthArchive(PostArchiveMixin, dates.MonthArchiveView):
pass
class PostDayArchive(PostArchiveMixin, dates.DayArchiveView):
pass
class PostDetail(PostArchiveMixin, dates.DateDetailView):
pass | [
"scrivo.models.Post.objects.public"
] | [((277, 298), 'scrivo.models.Post.objects.public', 'Post.objects.public', ([], {}), '()\n', (296, 298), False, 'from scrivo.models import Post\n')] |
from typing import Tuple
import torch
import torch.nn as nn
from torch.nn.functional import conv2d
def _get_box_filter(kernel_size: Tuple[int, int]) -> torch.Tensor:
r"""Utility function that returns a box filter."""
kx: float = float(kernel_size[0])
ky: float = float(kernel_size[1])
scale: torch.Tensor = torch.tensor(1.) / torch.tensor([kx * ky])
tmp_kernel: torch.Tensor = torch.ones(1, 1, kernel_size[0], kernel_size[1])
return scale.to(tmp_kernel.dtype) * tmp_kernel
def _compute_zero_padding(kernel_size: Tuple[int, int]) -> Tuple[int, int]:
r"""Utility function that computes zero padding tuple."""
computed: Tuple[int, ...] = tuple([(k - 1) // 2 for k in kernel_size])
return computed[0], computed[1]
class BoxBlur(nn.Module):
r"""Blurs an image using the box filter.
The function smooths an image using the kernel:
.. math::
K = \frac{1}{\text{kernel_size}_x * \text{kernel_size}_y}
\begin{bmatrix}
1 & 1 & 1 & \cdots & 1 & 1 \\
1 & 1 & 1 & \cdots & 1 & 1 \\
\vdots & \vdots & \vdots & \ddots & \vdots & \vdots \\
1 & 1 & 1 & \cdots & 1 & 1 \\
\end{bmatrix}
Args:
kernel_size (Tuple[int, int]): the blurring kernel size.
Returns:
torch.Tensor: the blurred input tensor.
Shape:
- Input: :math:`(B, C, H, W)`
- Output: :math:`(B, C, H, W)`
Example:
>>> input = torch.rand(2, 4, 5, 7)
>>> blur = kornia.filters.BoxBlur((3, 3))
>>> output = blur(input) # 2x4x5x7
"""
def __init__(self, kernel_size: Tuple[int, int]) -> None:
super(BoxBlur, self).__init__()
self.kernel: torch.Tensor = _get_box_filter(kernel_size)
self.padding: Tuple[int, int] = _compute_zero_padding(kernel_size)
def forward(self, input: torch.Tensor): # type: ignore
if not torch.is_tensor(input):
raise TypeError("Input type is not a torch.Tensor. Got {}"
.format(type(input)))
if not len(input.shape) == 4:
raise ValueError("Invalid input shape, we expect BxCxHxW. Got: {}"
.format(input.shape))
# prepare kernel
b, c, h, w = input.shape
tmp_kernel: torch.Tensor = self.kernel.to(input.device).to(input.dtype)
kernel: torch.Tensor = tmp_kernel.repeat(c, 1, 1, 1)
return conv2d(input, kernel, padding=self.padding, stride=1, groups=c)
# functiona api
def box_blur(input: torch.Tensor, kernel_size: Tuple[int, int]) -> torch.Tensor:
r"""Blurs an image using the box filter.
See :class:`~kornia.filters.BoxBlur` for details.
"""
return BoxBlur(kernel_size)(input)
| [
"torch.tensor",
"torch.nn.functional.conv2d",
"torch.is_tensor",
"torch.ones"
] | [((400, 448), 'torch.ones', 'torch.ones', (['(1)', '(1)', 'kernel_size[0]', 'kernel_size[1]'], {}), '(1, 1, kernel_size[0], kernel_size[1])\n', (410, 448), False, 'import torch\n'), ((326, 343), 'torch.tensor', 'torch.tensor', (['(1.0)'], {}), '(1.0)\n', (338, 343), False, 'import torch\n'), ((345, 368), 'torch.tensor', 'torch.tensor', (['[kx * ky]'], {}), '([kx * ky])\n', (357, 368), False, 'import torch\n'), ((2430, 2493), 'torch.nn.functional.conv2d', 'conv2d', (['input', 'kernel'], {'padding': 'self.padding', 'stride': '(1)', 'groups': 'c'}), '(input, kernel, padding=self.padding, stride=1, groups=c)\n', (2436, 2493), False, 'from torch.nn.functional import conv2d\n'), ((1902, 1924), 'torch.is_tensor', 'torch.is_tensor', (['input'], {}), '(input)\n', (1917, 1924), False, 'import torch\n')] |
from Jumpscale import j
def load_wiki(**kwargs):
wiki = j.tools.markdowndocs.load(path=kwargs["url"], name=kwargs["repo"])
wiki.write()
class Package(j.baseclasses.threebot_package):
def _init(self, **kwargs):
if "branch" in kwargs.keys():
self.branch = kwargs["branch"]
else:
self.branch = "master"
@property
def bcdb(self):
return j.data.bcdb.system
def load(self):
# TODO: load only relevant wikis
j.servers.myjobs.schedule(
load_wiki,
repo="tokens",
url="https://github.com/threefoldfoundation/info_tokens/tree/%s/docs" % self.branch,
)
j.servers.myjobs.schedule(
load_wiki,
repo="foundation",
url="https://github.com/threefoldfoundation/info_foundation/tree/%s/docs" % self.branch,
)
j.servers.myjobs.schedule(
load_wiki, repo="grid", url="https://github.com/threefoldfoundation/info_grid/tree/%s/docs" % self.branch
)
def prepare(self):
j.threebot.package.wikis.install()
j.servers.myjobs.workers_tmux_start()
self.load()
| [
"Jumpscale.j.servers.myjobs.workers_tmux_start",
"Jumpscale.j.threebot.package.wikis.install",
"Jumpscale.j.servers.myjobs.schedule",
"Jumpscale.j.tools.markdowndocs.load"
] | [((62, 128), 'Jumpscale.j.tools.markdowndocs.load', 'j.tools.markdowndocs.load', ([], {'path': "kwargs['url']", 'name': "kwargs['repo']"}), "(path=kwargs['url'], name=kwargs['repo'])\n", (87, 128), False, 'from Jumpscale import j\n'), ((496, 641), 'Jumpscale.j.servers.myjobs.schedule', 'j.servers.myjobs.schedule', (['load_wiki'], {'repo': '"""tokens"""', 'url': "('https://github.com/threefoldfoundation/info_tokens/tree/%s/docs' % self.\n branch)"}), "(load_wiki, repo='tokens', url=\n 'https://github.com/threefoldfoundation/info_tokens/tree/%s/docs' %\n self.branch)\n", (521, 641), False, 'from Jumpscale import j\n'), ((688, 841), 'Jumpscale.j.servers.myjobs.schedule', 'j.servers.myjobs.schedule', (['load_wiki'], {'repo': '"""foundation"""', 'url': "('https://github.com/threefoldfoundation/info_foundation/tree/%s/docs' %\n self.branch)"}), "(load_wiki, repo='foundation', url=\n 'https://github.com/threefoldfoundation/info_foundation/tree/%s/docs' %\n self.branch)\n", (713, 841), False, 'from Jumpscale import j\n'), ((888, 1030), 'Jumpscale.j.servers.myjobs.schedule', 'j.servers.myjobs.schedule', (['load_wiki'], {'repo': '"""grid"""', 'url': "('https://github.com/threefoldfoundation/info_grid/tree/%s/docs' % self.branch)"}), "(load_wiki, repo='grid', url=\n 'https://github.com/threefoldfoundation/info_grid/tree/%s/docs' % self.\n branch)\n", (913, 1030), False, 'from Jumpscale import j\n'), ((1075, 1109), 'Jumpscale.j.threebot.package.wikis.install', 'j.threebot.package.wikis.install', ([], {}), '()\n', (1107, 1109), False, 'from Jumpscale import j\n'), ((1118, 1155), 'Jumpscale.j.servers.myjobs.workers_tmux_start', 'j.servers.myjobs.workers_tmux_start', ([], {}), '()\n', (1153, 1155), False, 'from Jumpscale import j\n')] |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# *****************************************************************************/
# * Authors: <NAME>, <NAME>
# *****************************************************************************/
from __future__ import absolute_import, division, print_function, unicode_literals # , nested_scopes, generators, generator_stop, with_statement, annotations
# General Python module imports
import sys
from collections import deque
class tokenList(object):
"""
Token list handler
"""
DELIMITER_TOKEN = 0 # Chars such as the following ; () [] ::
OPERATOR_TOKEN = 1 # Numerical Operator + - / *
IDENTIFIER_TOKEN = 2 # Other bucket
ML_COMMENT_TOKEN = 3 # Multi-lines such as /* */
SL_COMMENT_TOKEN = 4 # Single line such as //
STRING_TOKEN = 5 # Following a comment token or follow a quote ("") ('')
WHITESPACE_TOKEN = 6 # Space
EOL_TOKEN = 7 # New line \n
def __init__(self):
self.tokenList = []
self.currentTokenIndex = 0
pass
def clear(self):
del self.tokenList[0:]
self.currentTokenIndex = 0
def resetTokenPull(self):
self.currentTokenIndex = 0
def addToken(self, tokenType, tokenValue):
self.tokenList.append((tokenType, tokenValue))
def getNextToken(self):
"""
Read the next token from the list of tokens generated by the input stream
"""
if (self.currentTokenIndex < len(self.tokenList)):
tokenType, token = self.tokenList[self.currentTokenIndex]
self.currentTokenIndex += 1
else:
token = ""
tokenType = tokenList.EOL_TOKEN
return tokenType, token
def getPreviewToken(self):
"""
Read the next token from the list of tokens generated by the input stream without moving index.
"""
if (self.currentTokenIndex < len(self.tokenList)):
tokenType, token = self.tokenList[self.currentTokenIndex]
else:
token = ""
tokenType = tokenList.EOL_TOKEN
return tokenType, token
def putToken(self):
"""
Adjust the token list pointer back one token
"""
if (self.currentTokenIndex > 0): self.currentTokenIndex -= 1
def isEndofList(self):
"""
Check if we have reached the end of the token list
"""
if (self.currentTokenIndex < len(self.tokenList)): return False
else: return True
class fileTokenizer(tokenList):
"""
Tokenize structure definition file on a character-by-character level
"""
def resetTokenizer(self):
self.nextChar = None
self.currentToken = None
self.currentTokenType = tokenList.IDENTIFIER_TOKEN
del self.tokenList[0:]
self.resetTokenPull()
self.mlStartToken = ""
self.startofLine = True
def __init__(self, operatorList = None, delimiterList = None, stringOpList = None, slCommentStartList = None, mlCommentStartList = None, mlCommentEnd = None, continuationCharacter = '\\', stripWhite = True):
self.inputStream = None
self.operatorList = operatorList
self.linebreakList = ['\n', '\l', '\r', '\f']
self.whiteList = [' ', '\t']
self.escapeList = ['\0', '\a']
self.stripWhite = stripWhite
self.continuationCharacter = continuationCharacter
self.delimiterList = delimiterList
self.stringOperatorList = stringOpList
if (mlCommentStartList is not None):
self.mlCommentStartList = mlCommentStartList
self.mlCommentEndToken = mlCommentEnd
self.mlCommentEnabled = True
else:
self.mlCommentStartList = []
self.mlCommentEndToken = None
self.mlCommentEnabled = False
if (slCommentStartList is not None):
self.slCommentStartList = slCommentStartList
self.slCommentEnabled = True
else:
self.slCommentStartList = []
self.slCommentEnabled = False
self.nextChar = None
self.currentToken = None
self.currentTokenType = tokenList.IDENTIFIER_TOKEN
self.tokenList = []
self.resetTokenizer()
#================================================================
#================================================================
# Character operations
#================================================================
#================================================================
def __isDelimiter(self):
if(self.nextChar is not None):
if (self.nextChar in self.delimiterList or self.nextChar in self.stringOperatorList): return True
else: return False
else: return False
def __isWhiteSpace(self):
if(self.nextChar is not None):
if (self.nextChar in self.whiteList): return True
else: return False
else: return False
def __isEscapeChar(self):
if(self.nextChar is not None):
if (self.nextChar in self.escapeList): return True
else: return False
else: return False
def __isLineBreak(self):
if(self.nextChar is not None):
if (self.nextChar in self.linebreakList): return True
else: return False
else: return False
def __isOperator(self):
if(self.nextChar is not None):
# String Operator should not be grouped with isOperator
#if ((self.nextChar in self.operatorList) or (self.nextChar in self.stringOperatorList)): return True
if ((self.nextChar in self.operatorList)): return True
else: return False
else: return False
def __isIdentifier(self):
if(self.nextChar is not None):
if ((self.nextChar in self.operatorList) or \
(self.nextChar in self.stringOperatorList) or \
(self.nextChar in self.whiteList) or \
(self.nextChar in self.linebreakList) or \
(self.nextChar in self.delimiterList)): return False
else: return True
else: return False
def __getNextChar(self):
self.nextChar = self.inputStream.read(1)
if (self.nextChar == ''): self.nextChar = None
def __putChar(self):
self.inputStream.seek(self.inputStream.tell()-1, 0)
#================================================================
#================================================================
# Stream operations
#================================================================
#================================================================
def __stripWitespace(self):
token = ""
# strip white space
while ((self.nextChar is not None) and (self.__isWhiteSpace())):
token += self.nextChar
self.__getNextChar()
# if the first whitespace on the line and not an empty line and strip is false
if ((self.startofLine) and (False == self.stripWhite) and (False == self.__isLineBreak())):
# First white space on the line, add it to the token and clear the start of line flag
self.currentToken = token
self.startofLine = False
def __stripContinuation(self):
# strip to the end of the line, then strip leading while space
while ((self.nextChar is not None) and (False == self.__isLineBreak())): self.__getNextChar()
while ((self.nextChar is not None) and (self.__isWhiteSpace())): self.__getNextChar()
def __pullToken(self):
if (self.nextChar is not None):
if (self.nextChar == self.continuationCharacter): self.__stripContinuation()
# Get the next charater for the token
if(self.currentToken is None): self.currentToken = self.nextChar
else: self.currentToken += self.nextChar
self.__getNextChar()
if (self.__isLineBreak()): self.startofLine = True
#================================================================
#================================================================
# Token operations
#================================================================
#================================================================
def __pullOperator(self):
"""
Pull an operator token from the input stream
"""
self.currentTokenType = fileTokenizer.OPERATOR_TOKEN
while ((self.nextChar is not None) and (True == self.__isOperator())):
self.__pullToken()
def __pullIdentifier(self):
"""
Pull an identifier token from the input stream
"""
self.currentTokenType = fileTokenizer.IDENTIFIER_TOKEN
while ((self.nextChar is not None) and (True == self.__isIdentifier())):
self.__pullToken()
def __isMultiCommentStartDelimiter(self, token):
comment = False
self.mlStartToken = ""
if(self.mlCommentEnabled):
for commentStart in self.mlCommentStartList:
cmpLength = len(commentStart)
if (len(token) >= cmpLength):
if (token[:cmpLength] == commentStart):
comment = True
self.mlStartToken = token[:cmpLength]
break
return comment
def __isSingleCommentStartDelimiter(self, token):
comment = False
if(self.slCommentEnabled):
for commentStart in self.slCommentStartList:
cmpLength = len(commentStart)
if (len(token) >= cmpLength):
if (token[:cmpLength] == commentStart):
comment = True
break
return comment
def __pullMultiLineComment(self):
"""
Pull a multiline comment token from the input stream
"""
self.currentTokenType = tokenList.ML_COMMENT_TOKEN
foundEnd = False
if (self.mlCommentEndToken is None):
mlCommentEndToken = self.mlStartToken
mlCommentEndTokenLen = len(mlCommentEndToken)
mlCommentMinLen = mlCommentEndTokenLen * 2
else:
mlCommentEndToken = self.mlCommentEnd
mlCommentEndTokenLen = len(self.mlCommentEnd)
mlCommentMinLen = mlCommentEndTokenLen + len(self.currentToken)
tokencmpIndex = 0 - mlCommentEndTokenLen
while ((self.nextChar is not None) and (False == foundEnd)):
self.__pullToken()
if (len(self.currentToken) >= mlCommentMinLen):
if (self.currentToken[tokencmpIndex:] == mlCommentEndToken):
foundEnd = True
def __pullSingleLineComment(self):
"""
Pull a single line comment token from the input stream
"""
self.currentTokenType = tokenList.SL_COMMENT_TOKEN
while ((self.nextChar is not None) and (False == self.__isLineBreak())):
self.__pullToken()
def __pullString(self):
"""
Pull a string token from the input stream
"""
self.currentTokenType = tokenList.STRING_TOKEN
self.currentToken = self.nextChar
endChar = self.nextChar
# Ensure not looking at same char twice
self.__getNextChar()
# While not end of string. Ignore the continuation char and let the parser deal with it
while ((self.nextChar is not None) and (self.nextChar != endChar) and (False == self.__isLineBreak())):
self.currentToken += self.nextChar
self.__getNextChar()
# put the closing token onto the string
if(False == self.__isLineBreak()):
self.currentToken += self.nextChar
self.__getNextChar()
#================================================================
#================================================================
# Token generation
#================================================================
#================================================================
def __tokenize(self):
"""
Generate a token from the input stream
"""
self.currentToken = None
# strip white space and put the first non-whitespace charater into the token
self.__stripWitespace()
if(self.nextChar is not None):
# get the token and ending delimiter
if (self.__isLineBreak()):
self.startofLine = True
self.__getNextChar()
elif (self.__isDelimiter()):
self.currentToken = self.nextChar
self.currentTokenType = tokenList.DELIMITER_TOKEN
# Check for string delimiter
if (self.currentToken in self.stringOperatorList): self.__pullString()
else: self.__getNextChar()
elif (self.__isOperator()):
# get the full operator
self.__pullOperator()
if (self.__isMultiCommentStartDelimiter(self.currentToken)):
self.__pullMultiLineComment()
elif (self.__isSingleCommentStartDelimiter(self.currentToken)):
self.__pullSingleLineComment()
else:
# get identifier
self.__pullIdentifier()
def parseStream(self, inputStream):
"""
Generate the token list from the input file
"""
self.resetTokenizer()
self.inputStream = inputStream
self.__getNextChar()
while (self.nextChar is not None):
self.__tokenize()
if (self.currentToken is not None):
self.addToken(self.currentTokenType, self.currentToken)
self.tokenCount = len(self.tokenList)
class numericParser(object):
"""
Parse and validate numeric token
"""
DigitList = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
OctalDigitList = ['0', '1', '2', '3', '4', '5', '6', '7']
BinDigitList = ['0', '1']
HexDigitList = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']
def __init__(self):
self.tokenNumericValue = 0
return super(numericParser, self).__init__()
#================================================================
#================================================================
# Token operations
#================================================================
#================================================================
def __validateAndConvertNumber(self, number, digitList, radix):
isNumber = True
self.tokenNumericValue = 0
for c in number:
if(c not in digitList):
isNumber = False
break
# convert the data
if(isNumber): self.tokenNumericValue = int(number, radix)
#if radix == 16: print("%s\n"%self.tokenNumericValue) #@remove
return isNumber
def isNumber(self, token):
if (token is None):
isNumericValue = False
elif (token[0] in numericParser.DigitList):
isNumericValue = True
number = token.upper()
# Determine radix
if ((len(number) > 2) and (number[0] == '0')):
if (number[1].lower() in ['x', 'o', 'b', 'd']):
base = number[1].lower()
number = number[2:]
elif (number[-1:].lower() in ['o', 'b', 'd']):
# check last character
base = number[-1:].lower()
number = number[0:-1]
else:
# assume decimal number for now
base = 'd'
isNumericValue = True
# validate and convert
if(isNumericValue):
if(base == 'x'):
#print ("is number! %s\n"% number) #@remove
isNumericValue = self.__validateAndConvertNumber(number, numericParser.HexDigitList, 16)
elif (base == 'o'):
isNumericValue = self.__validateAndConvertNumber(number, numericParser.OctalDigitList, 8)
elif (base == 'b'):
isNumericValue = self.__validateAndConvertNumber(number, numericParser.BinDigitList, 2)
elif (base == 'd'):
isNumericValue = self.__validateAndConvertNumber(number, numericParser.DigitList, 10)
else:
isNumericValue = False
else:
# First character must be digit
isNumericValue = False
return isNumericValue
def getValue(self, token):
if (self.isNumber(token)): return self.tokenNumericValue
else: return 0
class parserHelper(numericParser):
"""
File parser helper
"""
DEFAULT_TOKEN = 0 #
IDENTIFIER_TOKEN = 1 #
KEYWORD_TOKEN = 2 #
OPERATOR_TOKEN = 3 #
PREPROCESSOR_TOKEN = 4 #
COMMENT_TOKEN = 5 #
NUMERIC_TOKEN = 6 #
STRING_TOKEN = 7 #
DELIMITER_TOKEN = 8 #
END_OF_LIST = 9 #
def __init__(self, tokenizer, keywordList = None, preprocessorKey = None):
self.errorCount = 0
self.errorAbortCount = 1
self.deferredNodeQueue = deque()
self.previousToken = ""
self.currentToken = ""
self.endOfListFound = False
self.tokenType = parserHelper.DEFAULT_TOKEN
self.tokenizer = tokenizer
self.keyWordList = keywordList
self.preprocessorKey = preprocessorKey
self.printWarning = False
return super(parserHelper, self).__init__()
def resetParser(self):
self.errorCount = 0
self.errorAbortCount = 1
self.previousToken = ""
self.currentToken = ""
self.endOfListFound = False
self.tokenType = parserHelper.DEFAULT_TOKEN
def parseError(self, formatStr, args = None):
self.errorCount += 1
if (self.tokenizer.isEndofList()):
sys.stderr.write("Unexpected end of token list reached\n")
errorStr = format(formatStr % args)
sys.stderr.write(errorStr)
sys.stderr.write("\n")
def parseWarning(self, formatStr, args = None):
warningStr = format(formatStr % args)
if (self.printWarning):
sys.stdout.write(warningStr)
sys.stdout.write("\n")
def parseInformation(self, formatStr, args = None):
informationStr = format(formatStr % args)
sys.stdout.write(informationStr)
sys.stdout.write("\n")
def continueParse(self):
if ((self.errorCount < self.errorAbortCount) and (False == self.tokenizer.isEndofList())): return True
else: return False
#================================================================
#================================================================
# Token operations
#================================================================
#================================================================
def __isKeyWord(self):
# Compare identifier against keyword list
if (self.keyWordList is not None):
if (self.currentToken in self.keyWordList): return True
else: return False
else: return False
def __isPreprocessorKey(self):
# Compare identifier against operator list
if (self.preprocessorKey is not None):
if (self.currentToken[0] == self.preprocessorKey): return True
else: return False
else: return False
def isValidIdentifier(self):
return True
def putToken(self):
self.tokenizer.putToken()
def getNextToken(self):
# get the next token from the tokenizer
self.previousToken = self.currentToken
tokenType, self.currentToken = self.tokenizer.getNextToken()
# Determine the token type
if (tokenType == fileTokenizer.DELIMITER_TOKEN):
self.tokenType = parserHelper.DELIMITER_TOKEN
elif (tokenType == fileTokenizer.IDENTIFIER_TOKEN):
if (self.__isKeyWord()): self.tokenType = parserHelper.KEYWORD_TOKEN
elif (self.currentToken[0] in numericParser.DigitList): self.tokenType = parserHelper.NUMERIC_TOKEN
elif (self.isValidIdentifier()): self.tokenType = parserHelper.IDENTIFIER_TOKEN
else: self.tokenType = parserHelper.DEFAULT_TOKEN
elif ((tokenType == fileTokenizer.ML_COMMENT_TOKEN) or (tokenType == fileTokenizer.SL_COMMENT_TOKEN)):
self.tokenType = parserHelper.COMMENT_TOKEN
elif (tokenType == fileTokenizer.OPERATOR_TOKEN):
if (self.__isPreprocessorKey()): self.tokenType = parserHelper.PREPROCESSOR_TOKEN
else: self.tokenType = parserHelper.OPERATOR_TOKEN
elif (tokenType == fileTokenizer.STRING_TOKEN):
self.tokenType = parserHelper.STRING_TOKEN
elif (tokenType == fileTokenizer.EOL_TOKEN):
self.tokenType = parserHelper.END_OF_LIST
else: self.tokenType = parserHelper.DEFAULT_TOKEN
def getIgnoreToken(self):
# get the next token from the tokenizer and dont update variables used for function pointers!
tokenType, currentToken = self.tokenizer.getNextToken()
return tokenType, currentToken
def getPreviewToken(self):
# get the next token from the tokenizer without change
tokenType, currentToken = self.tokenizer.getPreviewToken() # Dont move the index!
return tokenType, currentToken
def createNode(self):
newNode = tokenList()
self.deferredNodeQueue.append(newNode)
return newNode
def getNextNode(self):
try:
node = self.deferredNodeQueue.popleft()
except:
node = None
return node
| [
"sys.stderr.write",
"collections.deque",
"sys.stdout.write"
] | [((17842, 17849), 'collections.deque', 'deque', ([], {}), '()\n', (17847, 17849), False, 'from collections import deque\n'), ((18730, 18756), 'sys.stderr.write', 'sys.stderr.write', (['errorStr'], {}), '(errorStr)\n', (18746, 18756), False, 'import sys\n'), ((18766, 18788), 'sys.stderr.write', 'sys.stderr.write', (['"""\n"""'], {}), "('\\n')\n", (18782, 18788), False, 'import sys\n'), ((19121, 19153), 'sys.stdout.write', 'sys.stdout.write', (['informationStr'], {}), '(informationStr)\n', (19137, 19153), False, 'import sys\n'), ((19163, 19185), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (19179, 19185), False, 'import sys\n'), ((18615, 18673), 'sys.stderr.write', 'sys.stderr.write', (['"""Unexpected end of token list reached\n"""'], {}), "('Unexpected end of token list reached\\n')\n", (18631, 18673), False, 'import sys\n'), ((18937, 18965), 'sys.stdout.write', 'sys.stdout.write', (['warningStr'], {}), '(warningStr)\n', (18953, 18965), False, 'import sys\n'), ((18979, 19001), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (18995, 19001), False, 'import sys\n')] |
#! /usr/bin/python
import numpy as np
import cv2
import toml
import click
from fitter.pcl_result_2d_image_writer import PCLResult2DImageWriter
from merged_wdr_manager import MergedWDRImageManager
#from fitter.pcl_visualizer import PCLVisualizer
from fitter.pcl_fitter import PCLFitter
from fitter.cameraparam import CameraParam
WINDOW_NAME = "gRPC Test"
IMAGE_WIDTH = 640
IMAGE_HEIGHT = 480
options = [('grpc.max_send_message_length', 10 * 1024 * 1024),
('grpc.max_receive_message_length', 10 * 1024 * 1024)
]
NAMESPACE = 'single_zense_fitter'
UPDATE_FREQ = 10. # Hz
SCALE = 1e-3
class SingleZenseFitter(object):
def __init__(self, toml_path, eps_default=0.016, min_point_default=10):
self.toml_path = toml_path
self._load_toml(toml_path)
self.pub_setting_string()
self.zense_mng = MergedWDRImageManager(options)
self.eps_default = eps_default
self.min_point_default = min_point_default
self.result_img_writer = PCLResult2DImageWriter()
def _load_toml(self, toml_file_path):
sensor_toml = toml.load(open(toml_file_path))
self.sensor_toml_path = sensor_toml
selection_keys = ['pcl_cutoff_dist',
'target_max_dist',
'target_min_dist',
'target_max_len',
'target_min_len',
'target_max_tilt']
if not all([k in sensor_toml['Selection'] for k in selection_keys]):
print('Missing parameter in "[Selection]" section of TOML')
print('Expected: {}, Specified: {}'.format(
selection_keys, sensor_toml['Selection'].keys())
)
self.camera_param = self.setup_camera_param(sensor_toml)
self.pcl_fitter = self.setup_pcl_fitter(sensor_toml)
def setup_camera_param(self, dict_toml):
camera_param = {}
key = 'Camera0'
height = int(dict_toml[key]['height'])
width = int(dict_toml[key]['width'])
fx = float(dict_toml[key]['fx'])
fy = float(dict_toml[key]['fy'])
cx = float(dict_toml[key]['cx'])
cy = float(dict_toml[key]['cy'])
roll = float(dict_toml[key]['rot_angle_roll'])
pitch = float(dict_toml[key]['rot_angle_pitch'])
yaw = float(dict_toml[key]['rot_angle_yaw'])
tx = float(dict_toml[key]['translation_x'])
ty = float(dict_toml[key]['translation_y'])
tz = float(dict_toml[key]['translation_z'])
K = (fx, 0., cx, 0., fy, cy, 0., 0., 1.)
R = (1., 0., 0., 0., 1., 0., 0., 0., 1.)
P = (fx, 0., cx, 0., 0., fy, cy, 0., 0., 0., 1., 0.)
size = (height, width)
camera_param = CameraParam()
camera_param.set_camera_param(K, R, P, size)
camera_param.set_tf_rot_and_trans(
[roll, pitch, yaw], [tx, ty, tz])
return camera_param
def setup_pcl_fitter(self, dict_toml):
set_roll = float(dict_toml['General']['set_roll'])
set_pitch = float(dict_toml['General']['set_pitch'])
set_yaw = float(dict_toml['General']['set_yaw'])
camera_set_param = CameraParam()
camera_set_param.set_tf_rot_and_trans(
[set_roll, set_pitch, set_yaw], [0., 0., 0.])
pcl_fitter = PCLFitter(camera_set_param)
pcl_fitter.pcl_cutoff_dist = float(
dict_toml['Selection']['pcl_cutoff_dist'])
pcl_fitter.target_max_dist = float(
dict_toml['Selection']['target_max_dist'])
pcl_fitter.target_min_dist = float(
dict_toml['Selection']['target_min_dist'])
pcl_fitter.target_max_len = float(
dict_toml['Selection']['target_max_len'])
pcl_fitter.target_min_len = float(
dict_toml['Selection']['target_min_len'])
pcl_fitter.target_max_tilt = float(
dict_toml['Selection']['target_max_tilt'])
return pcl_fitter
def pub_setting_string(self):
cutoff_dist = self.pcl_fitter.pcl_cutoff_dist * 100.0 # [cm]
min_len = self.pcl_fitter.target_min_len * 100.0 # [cm]
max_len = self.pcl_fitter.target_max_len * 100.0 # [cm]
min_dist = self.pcl_fitter.target_min_dist * 100.0 # [cm]
max_dist = self.pcl_fitter.target_max_dist * 100.0 # [cm]
max_tilt = self.pcl_fitter.target_max_tilt # [deg]
setting_str = "cutoff: {:.0f} | len: {:.0f} - {:.0f} | dist: {:.0f} - {:.0f} | tilt: {:.0f}".format(
cutoff_dist, min_len, max_len, min_dist, max_dist, max_tilt
)
print(setting_str)
def terminate(self):
pass
def run(self):
self.fused_depth_image = self.zense_mng.filtered_fused_depth * SCALE
pcd = self.pcl_fitter.get_pcd_from_depth_img(
self.fused_depth_image, self.camera_param)
eps = self.eps_default
min_point = self.min_point_default
line_list, pcd_list, fitgeom_list = [], [], []
line_list, pcd_list, fitgeom_list, all_points_ary, ground_points_ary = self.pcl_fitter.fit_pcd(
pcd, eps, min_point)
res_img = self.result_img_writer.draw_img_with_fit_result(
self.fused_depth_image, line_list)
return res_img
@click.command()
@click.option("--toml-path", "-t", default="../cfg/camera.toml")
def main(toml_path):
fitter = SingleZenseFitter(toml_path)
image_scale = 0.5
key = cv2.waitKey(10)
while key & 0xFF != 27:
res_img = fitter.run()
res_img = cv2.resize(res_img, (int(image_scale * res_img.shape[1]),
int(image_scale * res_img.shape[0])))
cv2.imshow("result", res_img)
key = cv2.waitKey(10)
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
| [
"click.option",
"fitter.pcl_fitter.PCLFitter",
"cv2.imshow",
"cv2.destroyAllWindows",
"fitter.cameraparam.CameraParam",
"click.command",
"cv2.waitKey",
"merged_wdr_manager.MergedWDRImageManager",
"fitter.pcl_result_2d_image_writer.PCLResult2DImageWriter"
] | [((5256, 5271), 'click.command', 'click.command', ([], {}), '()\n', (5269, 5271), False, 'import click\n'), ((5273, 5336), 'click.option', 'click.option', (['"""--toml-path"""', '"""-t"""'], {'default': '"""../cfg/camera.toml"""'}), "('--toml-path', '-t', default='../cfg/camera.toml')\n", (5285, 5336), False, 'import click\n'), ((5432, 5447), 'cv2.waitKey', 'cv2.waitKey', (['(10)'], {}), '(10)\n', (5443, 5447), False, 'import cv2\n'), ((5732, 5755), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (5753, 5755), False, 'import cv2\n'), ((849, 879), 'merged_wdr_manager.MergedWDRImageManager', 'MergedWDRImageManager', (['options'], {}), '(options)\n', (870, 879), False, 'from merged_wdr_manager import MergedWDRImageManager\n'), ((1004, 1028), 'fitter.pcl_result_2d_image_writer.PCLResult2DImageWriter', 'PCLResult2DImageWriter', ([], {}), '()\n', (1026, 1028), False, 'from fitter.pcl_result_2d_image_writer import PCLResult2DImageWriter\n'), ((2738, 2751), 'fitter.cameraparam.CameraParam', 'CameraParam', ([], {}), '()\n', (2749, 2751), False, 'from fitter.cameraparam import CameraParam\n'), ((3171, 3184), 'fitter.cameraparam.CameraParam', 'CameraParam', ([], {}), '()\n', (3182, 3184), False, 'from fitter.cameraparam import CameraParam\n'), ((3311, 3338), 'fitter.pcl_fitter.PCLFitter', 'PCLFitter', (['camera_set_param'], {}), '(camera_set_param)\n', (3320, 3338), False, 'from fitter.pcl_fitter import PCLFitter\n'), ((5668, 5697), 'cv2.imshow', 'cv2.imshow', (['"""result"""', 'res_img'], {}), "('result', res_img)\n", (5678, 5697), False, 'import cv2\n'), ((5712, 5727), 'cv2.waitKey', 'cv2.waitKey', (['(10)'], {}), '(10)\n', (5723, 5727), False, 'import cv2\n')] |
'''
CNN MNIST digits classification
Project: https://github.com/roatienza/dl-keras
Dependencies: keras
Usage: python3 <this file>
'''
# numpy
import numpy as np
from keras.models import Sequential
from keras.layers import Activation, Dense, Dropout
from keras.layers import Conv2D, MaxPooling2D, Flatten
from keras.datasets import mnist
from keras.utils import to_categorical
# load mnist dataset
(x_train, y_train), (x_test, y_test) = mnist.load_data()
# compute the number of labels
num_labels = np.amax(y_train) + 1
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
# input image dimensions
image_size = x_train.shape[1]
x_train = np.reshape(x_train,[-1, image_size, image_size, 1])
x_test = np.reshape(x_test,[-1, image_size, image_size, 1])
# we train our network using float data
x_train = x_train.astype('float32') / 255
x_test = x_test.astype('float32') / 255
# image is processed as is (square grayscale)
input_shape = (image_size, image_size, 1)
batch_size = 128
kernel_size = 3
pool_size = 2
filters = 64
dropout = 0.2
# model is a stack of CNN-ReLU-MaxPooling
model = Sequential()
model.add(Conv2D(filters=filters,
kernel_size=kernel_size, activation='relu',
input_shape=input_shape))
model.add(MaxPooling2D(pool_size))
model.add(Conv2D(filters=filters,
kernel_size=kernel_size, activation='relu'))
model.add(MaxPooling2D(pool_size))
model.add(Conv2D(filters=filters,
kernel_size=kernel_size, activation='relu'))
model.add(Flatten())
# dropout added as regularizer
model.add(Dropout(dropout))
# output layer is 10-dim one-hot vector
model.add(Dense(num_labels))
model.add(Activation('softmax'))
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer='adam', metrics=['accuracy'])
model.fit(x_train, y_train, epochs=10, batch_size=batch_size)
score = model.evaluate(x_test, y_test, batch_size=batch_size)
print("\nTest accuracy: %.1f%%" % (100.0 * score[1]))
| [
"keras.layers.Conv2D",
"numpy.reshape",
"keras.layers.Flatten",
"keras.datasets.mnist.load_data",
"keras.layers.MaxPooling2D",
"keras.models.Sequential",
"keras.utils.to_categorical",
"keras.layers.Dropout",
"keras.layers.Activation",
"keras.layers.Dense",
"numpy.amax"
] | [((440, 457), 'keras.datasets.mnist.load_data', 'mnist.load_data', ([], {}), '()\n', (455, 457), False, 'from keras.datasets import mnist\n'), ((534, 557), 'keras.utils.to_categorical', 'to_categorical', (['y_train'], {}), '(y_train)\n', (548, 557), False, 'from keras.utils import to_categorical\n'), ((567, 589), 'keras.utils.to_categorical', 'to_categorical', (['y_test'], {}), '(y_test)\n', (581, 589), False, 'from keras.utils import to_categorical\n'), ((656, 708), 'numpy.reshape', 'np.reshape', (['x_train', '[-1, image_size, image_size, 1]'], {}), '(x_train, [-1, image_size, image_size, 1])\n', (666, 708), True, 'import numpy as np\n'), ((717, 768), 'numpy.reshape', 'np.reshape', (['x_test', '[-1, image_size, image_size, 1]'], {}), '(x_test, [-1, image_size, image_size, 1])\n', (727, 768), True, 'import numpy as np\n'), ((1104, 1116), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (1114, 1116), False, 'from keras.models import Sequential\n'), ((503, 519), 'numpy.amax', 'np.amax', (['y_train'], {}), '(y_train)\n', (510, 519), True, 'import numpy as np\n'), ((1127, 1223), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': 'filters', 'kernel_size': 'kernel_size', 'activation': '"""relu"""', 'input_shape': 'input_shape'}), "(filters=filters, kernel_size=kernel_size, activation='relu',\n input_shape=input_shape)\n", (1133, 1223), False, 'from keras.layers import Conv2D, MaxPooling2D, Flatten\n'), ((1265, 1288), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['pool_size'], {}), '(pool_size)\n', (1277, 1288), False, 'from keras.layers import Conv2D, MaxPooling2D, Flatten\n'), ((1300, 1367), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': 'filters', 'kernel_size': 'kernel_size', 'activation': '"""relu"""'}), "(filters=filters, kernel_size=kernel_size, activation='relu')\n", (1306, 1367), False, 'from keras.layers import Conv2D, MaxPooling2D, Flatten\n'), ((1396, 1419), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['pool_size'], {}), '(pool_size)\n', (1408, 1419), False, 'from keras.layers import Conv2D, MaxPooling2D, Flatten\n'), ((1431, 1498), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': 'filters', 'kernel_size': 'kernel_size', 'activation': '"""relu"""'}), "(filters=filters, kernel_size=kernel_size, activation='relu')\n", (1437, 1498), False, 'from keras.layers import Conv2D, MaxPooling2D, Flatten\n'), ((1527, 1536), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (1534, 1536), False, 'from keras.layers import Conv2D, MaxPooling2D, Flatten\n'), ((1579, 1595), 'keras.layers.Dropout', 'Dropout', (['dropout'], {}), '(dropout)\n', (1586, 1595), False, 'from keras.layers import Activation, Dense, Dropout\n'), ((1647, 1664), 'keras.layers.Dense', 'Dense', (['num_labels'], {}), '(num_labels)\n', (1652, 1664), False, 'from keras.layers import Activation, Dense, Dropout\n'), ((1676, 1697), 'keras.layers.Activation', 'Activation', (['"""softmax"""'], {}), "('softmax')\n", (1686, 1697), False, 'from keras.layers import Activation, Dense, Dropout\n')] |
import crashstatsutils
import jydoop
import json
cutoff = 50 * 2**20
setupjob = crashstatsutils.dosetupjob([('meta_data', 'json'), ('processed_data', 'json')])
def map(k, meta_data, processed_data, context):
if processed_data is None:
return
meta = json.loads(meta_data)
processed = json.loads(processed_data)
def writeOOM():
tvm = meta.get("TotalVirtualMemory", None)
if tvm is None:
wintype = "Unknown"
else:
tvm = int(tvm)
if tvm > 3.5 * 2**30:
wintype = "win64"
else:
wintype = "win32"
context.write(("OOM", wintype), 1)
if processed.get('signature').startswith('EMPTY'):
if 'TotalVirtualMemory' in meta:
writeOOM()
else:
context.write("notwindows", 1)
return
if processed.get('os_name', None) != 'Windows NT':
context.write("notwindows", 1)
return
if 'OOMAllocationSize' in meta:
writeOOM()
return
if meta.get('Notes', '').find("ABORT: OOM") != -1:
writeOOM()
return
if 'json_dump' not in processed:
context.write("unknown", 1)
return
blocksize = processed['json_dump'].get('largest_free_vm_block', None)
if blocksize is None:
context.write("unknown", 1)
return
blocksize = int(blocksize[2:], 16)
if blocksize < cutoff:
writeOOM()
return
context.write("probably-not-OOM", 1)
combine = jydoop.sumreducer
reduce = jydoop.sumreducer
| [
"crashstatsutils.dosetupjob",
"json.loads"
] | [((82, 161), 'crashstatsutils.dosetupjob', 'crashstatsutils.dosetupjob', (["[('meta_data', 'json'), ('processed_data', 'json')]"], {}), "([('meta_data', 'json'), ('processed_data', 'json')])\n", (108, 161), False, 'import crashstatsutils\n'), ((269, 290), 'json.loads', 'json.loads', (['meta_data'], {}), '(meta_data)\n', (279, 290), False, 'import json\n'), ((307, 333), 'json.loads', 'json.loads', (['processed_data'], {}), '(processed_data)\n', (317, 333), False, 'import json\n')] |
import sqlalchemy
from sqlalchemy.ext.declarative import declarative_base
md = sqlalchemy.MetaData()
Base = declarative_base(metadata=md)
from LanguageDeck.models import associations
from LanguageDeck.models import Cards
from LanguageDeck.models import decks
| [
"sqlalchemy.MetaData",
"sqlalchemy.ext.declarative.declarative_base"
] | [((80, 101), 'sqlalchemy.MetaData', 'sqlalchemy.MetaData', ([], {}), '()\n', (99, 101), False, 'import sqlalchemy\n'), ((109, 138), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {'metadata': 'md'}), '(metadata=md)\n', (125, 138), False, 'from sqlalchemy.ext.declarative import declarative_base\n')] |
import argparse
import yaml
import sys
import os
import argparse
def get_config_file():
dir = os.path.dirname(__file__)
config_file = os.path.join(dir, 'configs/warehourse_environment.yaml')
# parser.add_argument('--config', default=config_file, help='config file')
# args, _ = parser.parse_known_args()
# config_file = args.config
return config_file
def read_parameters(scope):
config_file = get_config_file()
with open(config_file) as file:
parameters = yaml.load(file, Loader=yaml.FullLoader)
return parameters[scope]
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--n_rows', type=int, default=25,
help='number of rows in the warehouse')
parser.add_argument('--n_columns', type=int, default=25,
help='number of columns in the warehouse')
parser.add_argument('--n_robots_row', type=int, default=6,
help='number of robots per row')
parser.add_argument('--n_robots_column', type=int, default=6,
help='number of robots per column')
parser.add_argument('--distance_between_shelves', type=int, default=4,
help='distance between two contiguous shelves')
parser.add_argument('--robot_domain_size', type=list, default=[5, 5],
help='size of the robots domain')
parser.add_argument('--prob_item_appears', type=int, default=0.025,
help='probability of an item appearing at each location')
parser.add_argument('--learning_robot_id', type=int, default=20,
help='learning robot id')
parser.add_argument('--obs_type', type=str, default='image',
help='observation type: image or vector')
parser.add_argument('--n_steps_episode', type=int, default=100,
help='number of steps per episode')
parser.add_argument('--log_obs', type=bool, default=True,
help='wether or not to log the observations')
parser.add_argument('--log_file', type=str, default='./obs_data.csv',
help='path to the log file')
args = parser.parse_args()
return args
| [
"os.path.dirname",
"os.path.join",
"yaml.load",
"argparse.ArgumentParser"
] | [((99, 124), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (114, 124), False, 'import os\n'), ((143, 199), 'os.path.join', 'os.path.join', (['dir', '"""configs/warehourse_environment.yaml"""'], {}), "(dir, 'configs/warehourse_environment.yaml')\n", (155, 199), False, 'import os\n'), ((604, 629), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (627, 629), False, 'import argparse\n'), ((498, 537), 'yaml.load', 'yaml.load', (['file'], {'Loader': 'yaml.FullLoader'}), '(file, Loader=yaml.FullLoader)\n', (507, 537), False, 'import yaml\n')] |
#!/usr/bin/env python3
import sys
import json
class Benchmark(object):
def __init__(self, name):
self.name = name
self.mean = 0
self.median = 0
self.stddev = 0
self.iterations = []
def parse(self, obj):
value = obj['cpu_time']
if obj['run_type'] == 'iteration':
self.iterations.append(value)
else:
self.__dict__[obj['aggregate_name']] = round(value, 2)
def __repr__(self):
return str(self.__dict__)
def get_benchmarks(bench_data):
benchmarks = {}
current_name = None
current_benchmark = None
for entry in bench_data['benchmarks']:
name = entry['run_name']
if current_name is None or not name == current_name:
current_name = name
current_benchmark = Benchmark(name)
benchmarks[name] = current_benchmark
current_benchmark.parse(entry)
return benchmarks
def get_chartjs_bar_chart(benchmarks):
labels = []
data = []
for b in benchmarks.values():
labels.append(b.name)
data.append(b.median)
obj = {
'labels': labels,
'datasets': [{
'label': 'Benchmark',
'data': data
}],
}
return json.dumps(obj, indent=2)
if __name__ == '__main__':
filename = sys.argv[1]
with open(filename) as f:
bench_data = json.load(f)
benchmarks = get_benchmarks(bench_data)
print(get_chartjs_bar_chart(benchmarks))
| [
"json.load",
"json.dumps"
] | [((1122, 1147), 'json.dumps', 'json.dumps', (['obj'], {'indent': '(2)'}), '(obj, indent=2)\n', (1132, 1147), False, 'import json\n'), ((1246, 1258), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1255, 1258), False, 'import json\n')] |
# -*- coding: utf-8 -*-
import logging
from datetime import datetime
from ipware import get_client_ip
from django.conf.urls import url, include
from django.conf import settings
from django.contrib.auth.models import Group
from django.contrib.auth import authenticate, login, logout
from django.db import connection # Used for django tenants.
from django.http import Http404
from django.utils import timezone
from oauthlib.common import generate_token
from oauth2_provider.models import Application, AbstractApplication, AbstractAccessToken, AccessToken, RefreshToken
from oauth2_provider.contrib.rest_framework import OAuth2Authentication
from rest_framework.views import APIView
from rest_framework import generics
from rest_framework import mixins # See: http://www.django-rest-framework.org/api-guide/generic-views/#mixins
from rest_framework import authentication, viewsets, permissions, status, parsers, renderers
from rest_framework.decorators import detail_route, list_route # See: http://www.django-rest-framework.org/api-guide/viewsets/#marking-extra-actions-for-routing
from rest_framework.response import Response
from account.serializers import ActivateSerializer, ProfileInfoRetrieveUpdateSerializer
class ActivateAPIView(APIView):
"""
API endpoint takes the `pr_code` inputted and validates it to see if this
code (1) did not expire (2) and code exists. Once validated this API
endpoint acts like the "login" API by providing all the necessary items.
"""
throttle_classes = ()
permission_classes = ()
def post(self, request):
# Serializer to get our login details.
serializer = ActivateSerializer(data=request.data, context={
'request': request,
})
serializer.is_valid(raise_exception=True)
authenticated_user = serializer.save()
authenticated_user.refresh_from_db()
# Get our web application authorization.
application = Application.objects.filter(name=settings.MIKAPONICS_RESOURCE_SERVER_NAME).first()
# Generate our "NEW" access token which does not have a time limit.
# We want to generate a new token every time because the user may be
# logging in from multiple locations and may log out from multiple
# locations so we don't want the user using the same token every time.
aware_dt = timezone.now()
expires_dt = aware_dt + timezone.timedelta(days=1)
access_token = AccessToken.objects.create(
application=application,
user=authenticated_user,
expires=expires_dt,
token=generate_token(),
scope='read,write,introspection'
)
refresh_token = RefreshToken.objects.create(
application = application,
user = authenticated_user,
access_token=access_token,
token=generate_token()
)
serializer = ProfileInfoRetrieveUpdateSerializer(authenticated_user, many=False, context={
'authenticated_by': authenticated_user,
'authenticated_from': request.client_ip,
'authenticated_from_is_public': request.client_ip_is_routable,
'access_token': access_token,
'refresh_token': refresh_token
})
return Response(serializer.data, status=status.HTTP_200_OK)
| [
"account.serializers.ProfileInfoRetrieveUpdateSerializer",
"account.serializers.ActivateSerializer",
"django.utils.timezone.now",
"rest_framework.response.Response",
"django.utils.timezone.timedelta",
"oauth2_provider.models.Application.objects.filter",
"oauthlib.common.generate_token"
] | [((1645, 1712), 'account.serializers.ActivateSerializer', 'ActivateSerializer', ([], {'data': 'request.data', 'context': "{'request': request}"}), "(data=request.data, context={'request': request})\n", (1663, 1712), False, 'from account.serializers import ActivateSerializer, ProfileInfoRetrieveUpdateSerializer\n'), ((2359, 2373), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (2371, 2373), False, 'from django.utils import timezone\n'), ((2919, 3221), 'account.serializers.ProfileInfoRetrieveUpdateSerializer', 'ProfileInfoRetrieveUpdateSerializer', (['authenticated_user'], {'many': '(False)', 'context': "{'authenticated_by': authenticated_user, 'authenticated_from': request.\n client_ip, 'authenticated_from_is_public': request.\n client_ip_is_routable, 'access_token': access_token, 'refresh_token':\n refresh_token}"}), "(authenticated_user, many=False, context\n ={'authenticated_by': authenticated_user, 'authenticated_from': request\n .client_ip, 'authenticated_from_is_public': request.\n client_ip_is_routable, 'access_token': access_token, 'refresh_token':\n refresh_token})\n", (2954, 3221), False, 'from account.serializers import ActivateSerializer, ProfileInfoRetrieveUpdateSerializer\n'), ((3288, 3340), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_200_OK'}), '(serializer.data, status=status.HTTP_200_OK)\n', (3296, 3340), False, 'from rest_framework.response import Response\n'), ((2406, 2432), 'django.utils.timezone.timedelta', 'timezone.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (2424, 2432), False, 'from django.utils import timezone\n'), ((1950, 2023), 'oauth2_provider.models.Application.objects.filter', 'Application.objects.filter', ([], {'name': 'settings.MIKAPONICS_RESOURCE_SERVER_NAME'}), '(name=settings.MIKAPONICS_RESOURCE_SERVER_NAME)\n', (1976, 2023), False, 'from oauth2_provider.models import Application, AbstractApplication, AbstractAccessToken, AccessToken, RefreshToken\n'), ((2608, 2624), 'oauthlib.common.generate_token', 'generate_token', ([], {}), '()\n', (2622, 2624), False, 'from oauthlib.common import generate_token\n'), ((2870, 2886), 'oauthlib.common.generate_token', 'generate_token', ([], {}), '()\n', (2884, 2886), False, 'from oauthlib.common import generate_token\n')] |
"""add github field to user table
Revision ID: 6<PASSWORD>
Revises: <PASSWORD>
Create Date: 2020-10-28 17:42:52.455994
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<PASSWORD>'
down_revision = '<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.add_column(sa.Column('github', sa.String(length=128), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_column('github')
# ### end Alembic commands ###
| [
"sqlalchemy.String",
"alembic.op.batch_alter_table"
] | [((399, 440), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""user"""'], {'schema': 'None'}), "('user', schema=None)\n", (419, 440), False, 'from alembic import op\n'), ((671, 712), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""user"""'], {'schema': 'None'}), "('user', schema=None)\n", (691, 712), False, 'from alembic import op\n'), ((502, 523), 'sqlalchemy.String', 'sa.String', ([], {'length': '(128)'}), '(length=128)\n', (511, 523), True, 'import sqlalchemy as sa\n')] |
import os
import pandas as pd
import models
import data
import generator as gen
import json
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, ReduceLROnPlateau, ModelCheckpoint
import multiprocessing
from _common import utils
from _common import callbacks as cbs
import argparse
argparser = argparse.ArgumentParser(description='train and evaluate YOLOv3 model on any dataset')
argparser.add_argument('-c', '--conf', help='path to configuration file')
argparser.add_argument('-w', '--weights', help='path to trained model', default=None)
args = argparser.parse_args()
import neptune
neptune.init('kail4ek/sandbox')
def main():
config_path = args.conf
initial_weights = args.weights
with open(config_path) as config_buffer:
config = json.loads(config_buffer.read())
train_set, valid_set, classes = data.create_training_instances(config['train']['train_folder'],
None,
config['train']['cache_name'],
config['model']['labels'])
num_classes = len(classes)
print('Readed {} classes: {}'.format(num_classes, classes))
train_generator = gen.BatchGenerator(
instances=train_set,
labels=classes,
batch_size=config['train']['batch_size'],
input_sz=config['model']['infer_shape'],
shuffle=True,
norm=data.normalize
)
valid_generator = gen.BatchGenerator(
instances=valid_set,
labels=classes,
batch_size=config['train']['batch_size'],
input_sz=config['model']['infer_shape'],
norm=data.normalize,
infer=True
)
early_stop = EarlyStopping(
monitor='val_loss',
min_delta=0,
patience=20,
mode='min',
verbose=1
)
reduce_on_plateau = ReduceLROnPlateau(
monitor='val_loss',
factor=0.5,
patience=5,
verbose=1,
mode='min',
min_delta=0.01,
cooldown=0,
min_lr=0
)
net_input_shape = (config['model']['infer_shape'][0],
config['model']['infer_shape'][1],
3)
train_model = models.create(
base_name=config['model']['base'],
num_classes=num_classes,
input_shape=net_input_shape)
if initial_weights:
train_model.load_weights(initial_weights)
print(train_model.summary())
# plot_model(train_model, to_file='images/MobileNetv2.png', show_shapes=True)
optimizer = Adam(lr=config['train']['learning_rate'], clipnorm=0.001)
train_model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
checkpoint_name = utils.get_checkpoint_name(config)
utils.makedirs_4_file(checkpoint_name)
static_chk_name = utils.get_static_checkpoint_name(config)
utils.makedirs_4_file(static_chk_name)
checkpoint_vloss = cbs.CustomModelCheckpoint(
model_to_save=train_model,
filepath=checkpoint_name,
monitor='val_loss',
verbose=1,
save_best_only=True,
mode='min',
period=1
)
neptune_mon = cbs.NeptuneMonitor(
monitoring=['loss', 'val_loss', 'accuracy', 'val_accuracy'],
neptune=neptune
)
chk_static = ModelCheckpoint(
filepath=static_chk_name,
monitor='val_loss',
verbose=1,
save_best_only=True,
mode='min',
period=1
)
callbacks = [early_stop, reduce_on_plateau, checkpoint_vloss, neptune_mon, chk_static]
### NEPTUNE ###
sources_to_upload = [
'models.py',
'config.json'
]
params = {
'infer_size': "H{}xW{}".format(*config['model']['infer_shape']),
'classes': config['model']['labels'],
}
neptune.create_experiment(
name=utils.get_neptune_name(config),
upload_stdout=False,
upload_source_files=sources_to_upload,
params=params
)
### NEPTUNE ###
hist = train_model.fit_generator(
generator=train_generator,
steps_per_epoch=len(train_generator) * config['train']['train_times'],
validation_data=valid_generator,
validation_steps=len(valid_generator) * config['valid']['valid_times'],
epochs=config['train']['nb_epochs'],
verbose=2 if config['train']['debug'] else 1,
callbacks=callbacks,
workers=multiprocessing.cpu_count(),
max_queue_size=100
)
neptune.send_artifact(static_chk_name)
neptune.send_artifact('config.json')
# Hand-made history
# if not os.path.exists('model'):
# os.makedirs('model')
# df = pd.DataFrame.from_dict(hist.history)
# df.to_csv('model/hist.csv', encoding='utf-8', index=False)
if __name__ == '__main__':
main()
| [
"_common.callbacks.NeptuneMonitor",
"argparse.ArgumentParser",
"tensorflow.keras.callbacks.ReduceLROnPlateau",
"_common.utils.get_static_checkpoint_name",
"generator.BatchGenerator",
"models.create",
"data.create_training_instances",
"tensorflow.keras.optimizers.Adam",
"tensorflow.keras.callbacks.EarlyStopping",
"_common.callbacks.CustomModelCheckpoint",
"multiprocessing.cpu_count",
"_common.utils.get_checkpoint_name",
"neptune.send_artifact",
"tensorflow.keras.callbacks.ModelCheckpoint",
"neptune.init",
"_common.utils.get_neptune_name",
"_common.utils.makedirs_4_file"
] | [((345, 435), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""train and evaluate YOLOv3 model on any dataset"""'}), "(description=\n 'train and evaluate YOLOv3 model on any dataset')\n", (368, 435), False, 'import argparse\n'), ((638, 669), 'neptune.init', 'neptune.init', (['"""kail4ek/sandbox"""'], {}), "('kail4ek/sandbox')\n", (650, 669), False, 'import neptune\n'), ((879, 1010), 'data.create_training_instances', 'data.create_training_instances', (["config['train']['train_folder']", 'None', "config['train']['cache_name']", "config['model']['labels']"], {}), "(config['train']['train_folder'], None,\n config['train']['cache_name'], config['model']['labels'])\n", (909, 1010), False, 'import data\n'), ((1327, 1509), 'generator.BatchGenerator', 'gen.BatchGenerator', ([], {'instances': 'train_set', 'labels': 'classes', 'batch_size': "config['train']['batch_size']", 'input_sz': "config['model']['infer_shape']", 'shuffle': '(True)', 'norm': 'data.normalize'}), "(instances=train_set, labels=classes, batch_size=config[\n 'train']['batch_size'], input_sz=config['model']['infer_shape'],\n shuffle=True, norm=data.normalize)\n", (1345, 1509), True, 'import generator as gen\n'), ((1578, 1759), 'generator.BatchGenerator', 'gen.BatchGenerator', ([], {'instances': 'valid_set', 'labels': 'classes', 'batch_size': "config['train']['batch_size']", 'input_sz': "config['model']['infer_shape']", 'norm': 'data.normalize', 'infer': '(True)'}), "(instances=valid_set, labels=classes, batch_size=config[\n 'train']['batch_size'], input_sz=config['model']['infer_shape'], norm=\n data.normalize, infer=True)\n", (1596, 1759), True, 'import generator as gen\n'), ((1822, 1908), 'tensorflow.keras.callbacks.EarlyStopping', 'EarlyStopping', ([], {'monitor': '"""val_loss"""', 'min_delta': '(0)', 'patience': '(20)', 'mode': '"""min"""', 'verbose': '(1)'}), "(monitor='val_loss', min_delta=0, patience=20, mode='min',\n verbose=1)\n", (1835, 1908), False, 'from tensorflow.keras.callbacks import EarlyStopping, ReduceLROnPlateau, ModelCheckpoint\n'), ((1976, 2102), 'tensorflow.keras.callbacks.ReduceLROnPlateau', 'ReduceLROnPlateau', ([], {'monitor': '"""val_loss"""', 'factor': '(0.5)', 'patience': '(5)', 'verbose': '(1)', 'mode': '"""min"""', 'min_delta': '(0.01)', 'cooldown': '(0)', 'min_lr': '(0)'}), "(monitor='val_loss', factor=0.5, patience=5, verbose=1,\n mode='min', min_delta=0.01, cooldown=0, min_lr=0)\n", (1993, 2102), False, 'from tensorflow.keras.callbacks import EarlyStopping, ReduceLROnPlateau, ModelCheckpoint\n'), ((2331, 2437), 'models.create', 'models.create', ([], {'base_name': "config['model']['base']", 'num_classes': 'num_classes', 'input_shape': 'net_input_shape'}), "(base_name=config['model']['base'], num_classes=num_classes,\n input_shape=net_input_shape)\n", (2344, 2437), False, 'import models\n'), ((2667, 2724), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'lr': "config['train']['learning_rate']", 'clipnorm': '(0.001)'}), "(lr=config['train']['learning_rate'], clipnorm=0.001)\n", (2671, 2724), False, 'from tensorflow.keras.optimizers import Adam\n'), ((2849, 2882), '_common.utils.get_checkpoint_name', 'utils.get_checkpoint_name', (['config'], {}), '(config)\n', (2874, 2882), False, 'from _common import utils\n'), ((2887, 2925), '_common.utils.makedirs_4_file', 'utils.makedirs_4_file', (['checkpoint_name'], {}), '(checkpoint_name)\n', (2908, 2925), False, 'from _common import utils\n'), ((2949, 2989), '_common.utils.get_static_checkpoint_name', 'utils.get_static_checkpoint_name', (['config'], {}), '(config)\n', (2981, 2989), False, 'from _common import utils\n'), ((2994, 3032), '_common.utils.makedirs_4_file', 'utils.makedirs_4_file', (['static_chk_name'], {}), '(static_chk_name)\n', (3015, 3032), False, 'from _common import utils\n'), ((3057, 3218), '_common.callbacks.CustomModelCheckpoint', 'cbs.CustomModelCheckpoint', ([], {'model_to_save': 'train_model', 'filepath': 'checkpoint_name', 'monitor': '"""val_loss"""', 'verbose': '(1)', 'save_best_only': '(True)', 'mode': '"""min"""', 'period': '(1)'}), "(model_to_save=train_model, filepath=\n checkpoint_name, monitor='val_loss', verbose=1, save_best_only=True,\n mode='min', period=1)\n", (3082, 3218), True, 'from _common import callbacks as cbs\n'), ((3295, 3395), '_common.callbacks.NeptuneMonitor', 'cbs.NeptuneMonitor', ([], {'monitoring': "['loss', 'val_loss', 'accuracy', 'val_accuracy']", 'neptune': 'neptune'}), "(monitoring=['loss', 'val_loss', 'accuracy',\n 'val_accuracy'], neptune=neptune)\n", (3313, 3395), True, 'from _common import callbacks as cbs\n'), ((3432, 3551), 'tensorflow.keras.callbacks.ModelCheckpoint', 'ModelCheckpoint', ([], {'filepath': 'static_chk_name', 'monitor': '"""val_loss"""', 'verbose': '(1)', 'save_best_only': '(True)', 'mode': '"""min"""', 'period': '(1)'}), "(filepath=static_chk_name, monitor='val_loss', verbose=1,\n save_best_only=True, mode='min', period=1)\n", (3447, 3551), False, 'from tensorflow.keras.callbacks import EarlyStopping, ReduceLROnPlateau, ModelCheckpoint\n'), ((4627, 4665), 'neptune.send_artifact', 'neptune.send_artifact', (['static_chk_name'], {}), '(static_chk_name)\n', (4648, 4665), False, 'import neptune\n'), ((4670, 4706), 'neptune.send_artifact', 'neptune.send_artifact', (['"""config.json"""'], {}), "('config.json')\n", (4691, 4706), False, 'import neptune\n'), ((3976, 4006), '_common.utils.get_neptune_name', 'utils.get_neptune_name', (['config'], {}), '(config)\n', (3998, 4006), False, 'from _common import utils\n'), ((4556, 4583), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (4581, 4583), False, 'import multiprocessing\n')] |
from unittest.mock import patch
from django.urls import reverse_lazy
from django.contrib.auth import get_user_model
from rest_framework.test import APIClient
from rest_framework.status import (
HTTP_200_OK,
HTTP_201_CREATED,
HTTP_405_METHOD_NOT_ALLOWED,
HTTP_400_BAD_REQUEST,
)
from model_mommy import mommy
import pytest
from django_cryptolock.models import Address, Challenge
from .helpers import (
VALID_BITCOIN_ADDRESS,
VALID_MONERO_ADDRESS,
gen_challenge,
set_bitcoin_settings,
set_monero_settings,
)
User = get_user_model()
pytestmark = pytest.mark.django_db
@pytest.fixture
def api_client():
return APIClient()
@pytest.mark.parametrize("method", ["put", "patch", "delete", "head", "options"])
def test_methods_not_allowed_for_token_login(api_client, method):
func = getattr(api_client, method)
response = func(reverse_lazy("api_token_login"))
assert response.status_code == HTTP_405_METHOD_NOT_ALLOWED
def test_generate_new_token_login_challenge(api_client):
response = api_client.get(reverse_lazy("api_token_login"))
assert response.status_code == HTTP_200_OK
assert "challenge" in response.json().keys()
assert "expires" in response.json().keys()
@pytest.mark.parametrize(
"addr,set_backend,network",
[
(VALID_MONERO_ADDRESS, set_monero_settings, "monero"),
(VALID_BITCOIN_ADDRESS, set_bitcoin_settings, "bitcoin"),
],
)
def test_token_login_fails_invalid_data(
api_client, settings, addr, set_backend, network
):
settings.DJCL_MONERO_NETWORK = "mainnet"
set_backend(settings)
net = Address.NETWORK_BITCOIN if network == "bitcoin" else Address.NETWORK_MONERO
user = mommy.make(User)
mommy.make(Address, user=user, address=addr, network=net)
challenge = gen_challenge()
with patch(f"django_cryptolock.backends.verify_{network}_signature") as sig_mock:
sig_mock.return_value = False
response = api_client.post(
reverse_lazy("api_token_login"),
{"challenge": challenge, "address": addr, "signature": "something"},
)
assert response.status_code == HTTP_400_BAD_REQUEST
errors = response.json()
assert "Please enter a correct address or signature." in errors["__all__"]
@pytest.mark.parametrize(
"addr,set_backend,network",
[
(VALID_MONERO_ADDRESS, set_monero_settings, "monero"),
(VALID_BITCOIN_ADDRESS, set_bitcoin_settings, "bitcoin"),
],
)
def test_token_login_succeeds(api_client, settings, addr, set_backend, network):
settings.DJCL_MONERO_NETWORK = "mainnet"
set_backend(settings)
net = Address.NETWORK_BITCOIN if network == "bitcoin" else Address.NETWORK_MONERO
user = mommy.make(User)
mommy.make(Address, user=user, address=addr, network=net)
challenge = gen_challenge()
with patch(f"django_cryptolock.backends.verify_{network}_signature") as sig_mock:
sig_mock.return_value = True
response = api_client.post(
reverse_lazy("api_token_login"),
{"challenge": challenge, "address": addr, "signature": "something"},
)
assert response.status_code == HTTP_200_OK
assert "token" in response.json().keys()
@pytest.mark.parametrize("method", ["put", "patch", "delete", "head", "options"])
def test_methods_not_allowed_for_sign_up(api_client, method):
func = getattr(api_client, method)
response = func(reverse_lazy("api_signup"))
assert response.status_code == HTTP_405_METHOD_NOT_ALLOWED
def test_generate_new_sign_up_challenge(api_client):
response = api_client.get(reverse_lazy("api_signup"))
assert response.status_code == HTTP_200_OK
assert "challenge" in response.json().keys()
assert "expires" in response.json().keys()
def test_sign_up_fails_no_input(api_client):
response = api_client.post(reverse_lazy("api_signup"))
errors = response.json()
assert response.status_code == HTTP_400_BAD_REQUEST
assert "This field is required." in errors["challenge"]
assert "This field is required." in errors["address"]
assert "This field is required." in errors["signature"]
assert "This field is required." in errors["username"]
@pytest.mark.parametrize(
"addr,set_backend",
[
(VALID_MONERO_ADDRESS, set_monero_settings),
(VALID_BITCOIN_ADDRESS, set_bitcoin_settings),
],
)
def test_sign_up_fails_duplicate_address(api_client, settings, addr, set_backend):
settings.DJCL_MONERO_NETWORK = "mainnet"
set_backend(settings)
challenge = gen_challenge()
mommy.make(Address, address=addr)
response = api_client.post(
reverse_lazy("api_signup"),
{
"challenge": challenge,
"address": addr,
"signature": "something",
"username": "user",
},
)
assert response.status_code == HTTP_400_BAD_REQUEST
errors = response.json()
assert "This address already exists" in errors["address"]
@pytest.mark.parametrize(
"addr,set_backend",
[
(VALID_MONERO_ADDRESS, set_monero_settings),
(VALID_BITCOIN_ADDRESS, set_bitcoin_settings),
],
)
def test_sign_up_fails_invalid_signature(api_client, settings, addr, set_backend):
settings.DJCL_MONERO_NETWORK = "mainnet"
set_backend(settings)
challenge = gen_challenge()
with patch("django_cryptolock.api_views.verify_signature") as sig_mock:
sig_mock.return_value = False
response = api_client.post(
reverse_lazy("api_signup"),
{
"challenge": challenge,
"address": addr,
"signature": "something",
"username": "user",
},
)
assert response.status_code == HTTP_400_BAD_REQUEST
errors = response.json()
assert "Invalid signature" in errors["signature"]
@pytest.mark.parametrize(
"addr,set_backend",
[
(VALID_MONERO_ADDRESS, set_monero_settings),
(VALID_BITCOIN_ADDRESS, set_bitcoin_settings),
],
)
def test_sign_up_succeeds(api_client, settings, addr, set_backend):
settings.DJCL_MONERO_NETWORK = "mainnet"
set_backend(settings)
challenge = gen_challenge()
with patch("django_cryptolock.api_views.verify_signature") as sig_mock:
sig_mock.return_value = True
response = api_client.post(
reverse_lazy("api_signup"),
{
"challenge": challenge,
"address": addr,
"signature": "something",
"username": "user",
},
)
assert response.status_code == HTTP_201_CREATED
| [
"django.contrib.auth.get_user_model",
"model_mommy.mommy.make",
"rest_framework.test.APIClient",
"pytest.mark.parametrize",
"django.urls.reverse_lazy",
"unittest.mock.patch"
] | [((553, 569), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (567, 569), False, 'from django.contrib.auth import get_user_model\n'), ((667, 752), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""method"""', "['put', 'patch', 'delete', 'head', 'options']"], {}), "('method', ['put', 'patch', 'delete', 'head', 'options']\n )\n", (690, 752), False, 'import pytest\n'), ((1237, 1411), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""addr,set_backend,network"""', "[(VALID_MONERO_ADDRESS, set_monero_settings, 'monero'), (\n VALID_BITCOIN_ADDRESS, set_bitcoin_settings, 'bitcoin')]"], {}), "('addr,set_backend,network', [(VALID_MONERO_ADDRESS,\n set_monero_settings, 'monero'), (VALID_BITCOIN_ADDRESS,\n set_bitcoin_settings, 'bitcoin')])\n", (1260, 1411), False, 'import pytest\n'), ((2280, 2454), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""addr,set_backend,network"""', "[(VALID_MONERO_ADDRESS, set_monero_settings, 'monero'), (\n VALID_BITCOIN_ADDRESS, set_bitcoin_settings, 'bitcoin')]"], {}), "('addr,set_backend,network', [(VALID_MONERO_ADDRESS,\n set_monero_settings, 'monero'), (VALID_BITCOIN_ADDRESS,\n set_bitcoin_settings, 'bitcoin')])\n", (2303, 2454), False, 'import pytest\n'), ((3234, 3319), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""method"""', "['put', 'patch', 'delete', 'head', 'options']"], {}), "('method', ['put', 'patch', 'delete', 'head', 'options']\n )\n", (3257, 3319), False, 'import pytest\n'), ((4214, 4355), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""addr,set_backend"""', '[(VALID_MONERO_ADDRESS, set_monero_settings), (VALID_BITCOIN_ADDRESS,\n set_bitcoin_settings)]'], {}), "('addr,set_backend', [(VALID_MONERO_ADDRESS,\n set_monero_settings), (VALID_BITCOIN_ADDRESS, set_bitcoin_settings)])\n", (4237, 4355), False, 'import pytest\n'), ((4991, 5132), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""addr,set_backend"""', '[(VALID_MONERO_ADDRESS, set_monero_settings), (VALID_BITCOIN_ADDRESS,\n set_bitcoin_settings)]'], {}), "('addr,set_backend', [(VALID_MONERO_ADDRESS,\n set_monero_settings), (VALID_BITCOIN_ADDRESS, set_bitcoin_settings)])\n", (5014, 5132), False, 'import pytest\n'), ((5873, 6014), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""addr,set_backend"""', '[(VALID_MONERO_ADDRESS, set_monero_settings), (VALID_BITCOIN_ADDRESS,\n set_bitcoin_settings)]'], {}), "('addr,set_backend', [(VALID_MONERO_ADDRESS,\n set_monero_settings), (VALID_BITCOIN_ADDRESS, set_bitcoin_settings)])\n", (5896, 6014), False, 'import pytest\n'), ((652, 663), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (661, 663), False, 'from rest_framework.test import APIClient\n'), ((1704, 1720), 'model_mommy.mommy.make', 'mommy.make', (['User'], {}), '(User)\n', (1714, 1720), False, 'from model_mommy import mommy\n'), ((1725, 1782), 'model_mommy.mommy.make', 'mommy.make', (['Address'], {'user': 'user', 'address': 'addr', 'network': 'net'}), '(Address, user=user, address=addr, network=net)\n', (1735, 1782), False, 'from model_mommy import mommy\n'), ((2731, 2747), 'model_mommy.mommy.make', 'mommy.make', (['User'], {}), '(User)\n', (2741, 2747), False, 'from model_mommy import mommy\n'), ((2752, 2809), 'model_mommy.mommy.make', 'mommy.make', (['Address'], {'user': 'user', 'address': 'addr', 'network': 'net'}), '(Address, user=user, address=addr, network=net)\n', (2762, 2809), False, 'from model_mommy import mommy\n'), ((4576, 4609), 'model_mommy.mommy.make', 'mommy.make', (['Address'], {'address': 'addr'}), '(Address, address=addr)\n', (4586, 4609), False, 'from model_mommy import mommy\n'), ((873, 904), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""api_token_login"""'], {}), "('api_token_login')\n", (885, 904), False, 'from django.urls import reverse_lazy\n'), ((1058, 1089), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""api_token_login"""'], {}), "('api_token_login')\n", (1070, 1089), False, 'from django.urls import reverse_lazy\n'), ((1825, 1888), 'unittest.mock.patch', 'patch', (['f"""django_cryptolock.backends.verify_{network}_signature"""'], {}), "(f'django_cryptolock.backends.verify_{network}_signature')\n", (1830, 1888), False, 'from unittest.mock import patch\n'), ((2852, 2915), 'unittest.mock.patch', 'patch', (['f"""django_cryptolock.backends.verify_{network}_signature"""'], {}), "(f'django_cryptolock.backends.verify_{network}_signature')\n", (2857, 2915), False, 'from unittest.mock import patch\n'), ((3436, 3462), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""api_signup"""'], {}), "('api_signup')\n", (3448, 3462), False, 'from django.urls import reverse_lazy\n'), ((3612, 3638), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""api_signup"""'], {}), "('api_signup')\n", (3624, 3638), False, 'from django.urls import reverse_lazy\n'), ((3861, 3887), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""api_signup"""'], {}), "('api_signup')\n", (3873, 3887), False, 'from django.urls import reverse_lazy\n'), ((4650, 4676), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""api_signup"""'], {}), "('api_signup')\n", (4662, 4676), False, 'from django.urls import reverse_lazy\n'), ((5359, 5412), 'unittest.mock.patch', 'patch', (['"""django_cryptolock.api_views.verify_signature"""'], {}), "('django_cryptolock.api_views.verify_signature')\n", (5364, 5412), False, 'from unittest.mock import patch\n'), ((6226, 6279), 'unittest.mock.patch', 'patch', (['"""django_cryptolock.api_views.verify_signature"""'], {}), "('django_cryptolock.api_views.verify_signature')\n", (6231, 6279), False, 'from unittest.mock import patch\n'), ((1988, 2019), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""api_token_login"""'], {}), "('api_token_login')\n", (2000, 2019), False, 'from django.urls import reverse_lazy\n'), ((3014, 3045), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""api_token_login"""'], {}), "('api_token_login')\n", (3026, 3045), False, 'from django.urls import reverse_lazy\n'), ((5512, 5538), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""api_signup"""'], {}), "('api_signup')\n", (5524, 5538), False, 'from django.urls import reverse_lazy\n'), ((6378, 6404), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""api_signup"""'], {}), "('api_signup')\n", (6390, 6404), False, 'from django.urls import reverse_lazy\n')] |
# -*- coding: utf-8 -*-
from models import User
from tests.test import AppengineTestCase
import constants
class UserTestCase(AppengineTestCase):
def test_has_profile(self):
self.assertFalse(User.has_profile(None, None),
'None user and profile should not have any profile')
self.assertFalse(User.has_profile('', []),
'Empty user and profile should not have any profile')
self.assertFalse(User.has_profile('', [constants.PROFILE_ADMIN]),
'Non existing user should not have profiles')
user = User(email='<EMAIL>', profiles=[
constants.PROFILE_ADMIN
])
user_key = user.put()
self.assertTrue(User.has_profile(
'<EMAIL>', [constants.PROFILE_ADMIN]
), 'Profiles have been assigned')
user_key.delete()
| [
"models.User.has_profile",
"models.User"
] | [((603, 660), 'models.User', 'User', ([], {'email': '"""<EMAIL>"""', 'profiles': '[constants.PROFILE_ADMIN]'}), "(email='<EMAIL>', profiles=[constants.PROFILE_ADMIN])\n", (607, 660), False, 'from models import User\n'), ((205, 233), 'models.User.has_profile', 'User.has_profile', (['None', 'None'], {}), '(None, None)\n', (221, 233), False, 'from models import User\n'), ((338, 362), 'models.User.has_profile', 'User.has_profile', (['""""""', '[]'], {}), "('', [])\n", (354, 362), False, 'from models import User\n'), ((468, 515), 'models.User.has_profile', 'User.has_profile', (['""""""', '[constants.PROFILE_ADMIN]'], {}), "('', [constants.PROFILE_ADMIN])\n", (484, 515), False, 'from models import User\n'), ((737, 791), 'models.User.has_profile', 'User.has_profile', (['"""<EMAIL>"""', '[constants.PROFILE_ADMIN]'], {}), "('<EMAIL>', [constants.PROFILE_ADMIN])\n", (753, 791), False, 'from models import User\n')] |
# ----------------------------------------------------------------------------
# Copyright (c) 2020, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import importlib
from qiime2.plugin import (
Choices,
Plugin,
Citations,
Range,
Int,
Str,
List,
Bool,
Float
)
from q2_types.feature_table import FeatureTable, PresenceAbsence
from q2_types.feature_data import FeatureData, Sequence
from q2_types.sample_data import SampleData
from q2_types.per_sample_sequences import (
SequencesWithQuality,
PairedEndSequencesWithQuality,
)
import q2_phylogenomics
import q2_phylogenomics._prinseq
import q2_phylogenomics._filter
import q2_phylogenomics._assemble
import q2_phylogenomics._pipelines
from q2_types.bowtie2 import Bowtie2Index
from q2_types.feature_data import DNASequencesDirectoryFormat
from q2_phylogenomics._format import (GenBankFormat, GenBankDirFmt,
BAMFormat, SAMFormat,
BAMFilesDirFmt, SAMFilesDirFmt,
PileUpTSVFormat, PileUpFilesDirFmt,
FASTAFilesDirFmt)
from q2_phylogenomics._types import (AlignmentMap, PileUp, ConsensusSequences,
ReferenceSequence)
citations = Citations.load('citations.bib', package='q2_phylogenomics')
plugin = Plugin(
name='phylogenomics',
version=q2_phylogenomics.__version__,
website='https://github.com/qiime2/q2-phylogenomics',
package='q2_phylogenomics',
description='A QIIME 2 plugin for phylogenomics analyses.',
short_description='A QIIME 2 plugin for phylogenomics analyses.',
)
plugin.register_formats(GenBankFormat, GenBankDirFmt, citations=[])
plugin.register_formats(BAMFormat, SAMFormat, BAMFilesDirFmt, SAMFilesDirFmt,
PileUpTSVFormat, PileUpFilesDirFmt,
citations=[])
plugin.register_formats(FASTAFilesDirFmt)
plugin.register_semantic_types(AlignmentMap, PileUp, ConsensusSequences,
ReferenceSequence)
# before release we want to use GenBank format for this,
# but I think it's broken with skbio < 0.5.6 - I get this
# error when trying to load a genbank file:
# ValueError: cannot set WRITEABLE flag to True of this array
# plugin.register_semantic_type_to_format(ReferenceSequence, GenBankDirFmt)
plugin.register_semantic_type_to_format(ReferenceSequence,
DNASequencesDirectoryFormat)
plugin.register_semantic_type_to_format(SampleData[PileUp], PileUpFilesDirFmt)
plugin.register_semantic_type_to_format(SampleData[AlignmentMap],
BAMFilesDirFmt)
plugin.register_semantic_type_to_format(SampleData[ConsensusSequences],
FASTAFilesDirFmt)
importlib.import_module('q2_phylogenomics._transformers')
prinseq_input = {'demultiplexed_sequences': 'The sequences to be trimmed.'}
prinseq_output = {'trimmed_sequences': 'The resulting trimmed sequences.'}
prinseq_parameters = {
'trim_qual_right': Int % Range(1, None),
'trim_qual_type': Str % Choices(['min', 'mean', 'max', 'sum']),
'trim_qual_window': Int % Range(1, None),
'min_qual_mean': Int % Range(1, None),
'min_len': Int % Range(1, None),
'lc_method': Str % Choices(['dust', 'entropy']),
'lc_threshold': Int % Range(0, 100),
'derep': List[Str % Choices(list('12345'))]}
prinseq_parameter_descriptions = {
'trim_qual_right': 'Trim sequence by quality score from the 3\'-end with '
'this threshold score.',
'trim_qual_type': 'Type of quality score calculation to use. Allowed '
'options are min, mean, max and sum.',
'trim_qual_window': 'The sliding window size used to calculate quality '
'score by type. To stop at the first base that fails '
'the rule defined, use a window size of 1.',
'min_qual_mean': 'Filter sequence with quality score mean below '
'min_qual_mean.',
'min_len': 'Filter sequence shorter than min_len.',
'lc_method': 'Method to filter low complexity sequences.',
'lc_threshold': 'The threshold value used to filter sequences by sequence '
'complexity. The dust method uses this as maximum allowed '
'score and the entropy method as minimum allowed value.',
'derep': 'Type of duplicates to filter. Use integers for multiple '
'selections (e.g. 124 to use type 1, 2 and 4). The order does '
'not matter. Option 2 and 3 will set 1 and option 5 will set 4 '
'as these are subsets of the other option.\n\n1 (exact '
'duplicate), 2 (5\' duplicate), 3 (3\' duplicate), 4 (reverse '
'complement exact duplicate), 5 (reverse complement 5\'/3\' '
'duplicate).'
}
map_paired_reads_input_descriptions = {
'demux': 'The demultiplexed sequences to map to the reference.',
'database': 'The reference sequence(s).'
}
map_paired_reads_parameters = {
'mismatches_per_seed': Int % Range(0, 1, inclusive_end=True),
'ceil_coefficient': Float,
'n_threads': Int % Range(1, None),
'mapped_only': Bool}
map_paired_reads_parameter_descriptions = {
'mismatches_per_seed': 'Max mismatches allowed in seed alignment.',
'ceil_coefficient': 'Coefficient used to specify the bowtie '
'function L(0,x) for max number of non-A/C/G/T '
'characters allowed in an alignment.',
'n_threads': 'Number of alignment threads to launch.',
'mapped_only': 'Retain only records for reads that were mapped '
'to the database in the output files.'
}
map_paired_reads_output_descriptions = {
'alignment_maps': 'Results of mapping reads in each input sample '
'to the provided database.'
}
filter_input = {'demultiplexed_sequences': 'The sequences to be trimmed.',
'database': 'Bowtie2 indexed database.'}
filter_output = {'filtered_sequences': 'The resulting filtered sequences.'}
filter_parameters = {
'n_threads': Int % Range(1, None),
'mode': Str % Choices(['local', 'global']),
'sensitivity': Str % Choices([
'very-fast', 'fast', 'sensitive', 'very-sensitive']),
'ref_gap_open_penalty': Int % Range(1, None),
'ref_gap_ext_penalty': Int % Range(1, None),
'exclude_seqs': Bool,
}
filter_parameter_descriptions = {
'n_threads': 'Number of alignment threads to launch.',
'mode': 'Bowtie2 alignment settings. See bowtie2 manual for more details.',
'sensitivity': 'Bowtie2 alignment sensitivity. See bowtie2 manual for '
'details.',
'ref_gap_open_penalty': 'Reference gap open penalty.',
'ref_gap_ext_penalty': 'Reference gap extend penalty.',
'exclude_seqs': 'Exclude sequences that align to reference. Set this '
'option to False to exclude sequences that do not align '
'to the reference database.'
}
filter_citations = [citations['langmead2012fast'],
citations['heng2009samtools']]
filter_description = (
'Filter out (or keep) sequences that align to reference database, using '
'bowtie2 and samtools. This method can be used to filter out human DNA '
'sequences and other contaminant in any FASTQ sequence data (e.g., '
'shotgun genome or amplicon sequence data), or alternatively (when '
'exclude_seqs is False) to only keep sequences that do align to the '
'reference.')
consensus_sequence_parameter_descriptions = {
'min_depth': 'The minimum depth to call a consensus base.'
}
consensus_sequence_parameters = {
'min_depth': Int
}
consensus_sequence_output_descriptions = {
'table': 'Table describing which consensus sequences are '
'observed in each sample.',
'consensus_sequences': 'Mapping of consensus sequence identifiers '
'to consensus sequences.'
}
consensus_sequence_outputs = [
('table', FeatureTable[PresenceAbsence]),
('consensus_sequences', FeatureData[Sequence])]
make_pileup_parameters = {
'min_mapq': Int % Range(0, None),
'max_depth': Int % Range(1, None)
}
make_pileup_parameter_descriptions = {
'min_mapq': 'The minimum mapQ to consider an alignment.',
'max_depth': 'The max per-file depth.'
}
plugin.pipelines.register_function(
function=q2_phylogenomics._pipelines.filter_clean_consensus,
inputs={
'demultiplexed_sequences': SampleData[PairedEndSequencesWithQuality],
'alignment_ref': Bowtie2Index,
'filter_ref': Bowtie2Index},
parameters={
'enable_cutadapt': Bool,
'enable_prinseq': Bool,
'cutadapt_cores': Int % Range(1, None),
'cutadapt_adapter_f': List[Str],
'cutadapt_front_f': List[Str],
'cutadapt_anywhere_f': List[Str],
'cutadapt_adapter_r': List[Str],
'cutadapt_front_r': List[Str],
'cutadapt_anywhere_r': List[Str],
'cutadapt_error_rate': Float % Range(0, 1, inclusive_start=True,
inclusive_end=True),
'cutadapt_indels': Bool,
'cutadapt_times': Int % Range(1, None),
'cutadapt_overlap': Int % Range(1, None),
'cutadapt_match_read_wildcards': Bool,
'cutadapt_match_adapter_wildcards': Bool,
'cutadapt_minimum_length': Int % Range(1, None),
'cutadapt_discard_untrimmed': Bool,
**{'bowtie2_' + k: v for k, v in filter_parameters.items()},
**{'bowtie2_' + k: v for k, v in map_paired_reads_parameters.items()
if k != 'n_threads'},
**{'prinseq_' + k: v for k, v in prinseq_parameters.items()},
**{'samtools_' + k: v for k, v in make_pileup_parameters.items()},
**{'consensus_' + k: v for k, v in
consensus_sequence_parameters.items()}},
outputs=consensus_sequence_outputs + [
('filtered_sequences', SampleData[PairedEndSequencesWithQuality]),
('clean_sequences', SampleData[PairedEndSequencesWithQuality])],
input_descriptions={
'demultiplexed_sequences': 'Demultiplexed sequences.',
'alignment_ref': 'Reference genome(s) to use for alignment.',
'filter_ref': 'Reference sequences to use for filtering demultiplexed '
'sequences with bowtie2. Will remove sequences that hit '
'reference if exclude_seqs is True, otherwise remove '
'sequences that do not hit the reference. If none is '
'provided, this filter is not performed. This step is '
'typically performed to remove host contaminant reads.'},
parameter_descriptions={
'enable_cutadapt': 'Enable/disable adapter trimming with cutadapt.',
'enable_prinseq': 'Enable/disable quality trimming with prinseq.',
'cutadapt_cores': 'Number of CPU cores to use.',
'cutadapt_adapter_f': (
'Sequence of an adapter ligated to the 3\' end. The '
'adapter and any subsequent bases are trimmed. If a `$` '
'is appended, the adapter is only found if it is at the '
'end of the read. Search in forward read. If your '
'sequence of interest is "framed" by a 5\' and a 3\' '
'adapter, use this parameter to define a "linked" primer '
'- see https://cutadapt.readthedocs.io for complete '
'details.'),
'cutadapt_front_f': (
'Sequence of an adapter ligated to the 5\' end. The '
'adapter and any preceding bases are trimmed. Partial '
'matches at the 5\' end are allowed. If a `^` character '
'is prepended, the adapter is only found if it is at the '
'beginning of the read. Search in forward read.'),
'cutadapt_anywhere_f': (
'Sequence of an adapter that may be ligated to the 5\' '
'or 3\' end. Both types of matches as described under '
'`adapter` and `front` are allowed. If the first base '
'of the read is part of the match, the behavior is as '
'with `front`, otherwise as with `adapter`. This option '
'is mostly for rescuing failed library preparations - '
'do not use if you know which end your adapter was '
'ligated to. Search in forward read.'),
'cutadapt_adapter_r': (
'Sequence of an adapter ligated to the 3\' end. The '
'adapter and any subsequent bases are trimmed. If a `$` '
'is appended, the adapter is only found if it is at the '
'end of the read. Search in reverse read. If your '
'sequence of interest is "framed" by a 5\' and a 3\' '
'adapter, use this parameter to define a "linked" primer '
'- see https://cutadapt.readthedocs.io for complete details.'),
'cutadapt_front_r': (
'Sequence of an adapter ligated to the 5\' end. The '
'adapter and any preceding bases are trimmed. Partial '
'matches at the 5\' end are allowed. If a `^` character '
'is prepended, the adapter is only found if it is at the '
'beginning of the read. Search in reverse read.'),
'cutadapt_anywhere_r': (
'Sequence of an adapter that may be ligated to the 5\' '
'or 3\' end. Both types of matches as described under '
'`adapter` and `front` are allowed. If the first base '
'of the read is part of the match, the behavior is as '
'with `front`, otherwise as with `adapter`. This '
'option is mostly for rescuing failed library '
'preparations - do not use if you know which end your '
'adapter was ligated to. Search in reverse read.'),
'cutadapt_error_rate': 'Maximum allowed error rate.',
'cutadapt_indels': 'Allow insertions or deletions of bases when '
'matching adapters.',
'cutadapt_times': 'Remove multiple occurrences of an adapter if it is '
'repeated, up to `times` times.',
'cutadapt_overlap': 'Require at least `overlap` bases of overlap '
'between read and adapter for an adapter to be '
'found.',
'cutadapt_match_read_wildcards': 'Interpret IUPAC wildcards (e.g., N) '
'in reads.',
'cutadapt_match_adapter_wildcards': 'Interpret IUPAC wildcards (e.g., '
'N) in adapters.',
'cutadapt_minimum_length': (
'Discard reads shorter than specified value. Note, the cutadapt '
'default of 0 has been overridden, because that value produces '
'empty sequence records.'),
'cutadapt_discard_untrimmed': 'Discard reads in which no adapter was '
'found.',
**{'bowtie2_' + k: v for k, v in
filter_parameter_descriptions.items()},
**{'bowtie2_' + k: v for k, v in
map_paired_reads_parameter_descriptions.items()
if k != 'n_threads'},
**{'prinseq_' + k: v for k, v in
prinseq_parameter_descriptions.items()},
**{'samtools_' + k: v for k, v in
make_pileup_parameter_descriptions.items()},
**{'consensus_' + k: v for k, v in
consensus_sequence_parameter_descriptions.items()}},
output_descriptions={
**consensus_sequence_output_descriptions,
'filtered_sequences': 'Sequences after filtering with bowtie2.',
'clean_sequences': 'Squeaky clean sequences after all QC steps.'},
name='Filter, trim, clean, map to reference, and generate consensus.',
description=(
'This pipeline performs a sequence of quality control steps, followed '
'by alignment to a reference genome and generation of a consensus '
'alignment. Input sequences are:\n'
'1. (optionally) filtered by alignment against a reference database '
'with bowtie2.\n'
'2. (optionally) trimmed with cutadapt to remove adapter sequences.\n'
'3. (optionally) Trimmed with prinseq to remove low-quality '
'nucleotides.\n'
'4. Aligned to a reference genome with bowtie2, sorted, and '
'deduplicated.\n'
'5. A consensus alignment is generated using ivar.'),
citations=filter_citations + [
citations['schmieder_prinseq'], citations['langmead2012fast'],
citations['heng2009samtools'], citations['Grubaugh2019ivar']]
)
plugin.methods.register_function(
function=q2_phylogenomics._prinseq.prinseq_single,
inputs={'demultiplexed_sequences': SampleData[SequencesWithQuality]},
parameters=prinseq_parameters,
outputs=[('trimmed_sequences', SampleData[SequencesWithQuality])],
input_descriptions=prinseq_input,
parameter_descriptions=prinseq_parameter_descriptions,
output_descriptions=prinseq_output,
name='Filter and trim demultiplexed single-end sequences with PRINSEQ.',
description='Filter and trim demultiplexed single-end FASTQ sequences '
'based on quality scores using PRINSEQ-lite.',
citations=[citations['schmieder_prinseq']]
)
plugin.methods.register_function(
function=q2_phylogenomics._prinseq.prinseq_paired,
inputs={
'demultiplexed_sequences': SampleData[PairedEndSequencesWithQuality]},
parameters=prinseq_parameters,
outputs=[('trimmed_sequences', SampleData[PairedEndSequencesWithQuality])],
input_descriptions=prinseq_input,
parameter_descriptions=prinseq_parameter_descriptions,
output_descriptions=prinseq_output,
name='Filter and trim demultiplexed paired-end sequences with PRINSEQ.',
description='Filter and trim demultiplexed paired-end FASTQ sequences '
'based on quality scores using PRINSEQ-lite.',
citations=[citations['schmieder_prinseq']]
)
plugin.methods.register_function(
function=q2_phylogenomics._filter.filter_single,
inputs={'demultiplexed_sequences': SampleData[SequencesWithQuality],
'database': Bowtie2Index},
parameters=filter_parameters,
outputs=[('filtered_sequences', SampleData[SequencesWithQuality])],
input_descriptions=filter_input,
parameter_descriptions=filter_parameter_descriptions,
output_descriptions=filter_output,
name='Filter single-end sequences by alignment to reference database.',
description=filter_description,
citations=filter_citations
)
plugin.methods.register_function(
function=q2_phylogenomics._filter.filter_paired,
inputs={
'demultiplexed_sequences': SampleData[PairedEndSequencesWithQuality],
'database': Bowtie2Index},
parameters=filter_parameters,
outputs=[
('filtered_sequences', SampleData[PairedEndSequencesWithQuality])],
input_descriptions=filter_input,
parameter_descriptions=filter_parameter_descriptions,
output_descriptions=filter_output,
name='Filter paired-end sequences by alignment to reference database.',
description=filter_description,
citations=filter_citations
)
plugin.methods.register_function(
function=q2_phylogenomics._filter.bowtie2_build,
inputs={'sequences': FeatureData[Sequence]},
parameters={'n_threads': Int % Range(1, None)},
outputs=[('database', Bowtie2Index)],
input_descriptions={
'sequences': 'Reference sequences used to build bowtie2 index.'},
parameter_descriptions={'n_threads': 'Number of threads to launch'},
output_descriptions={'database': 'Bowtie2 index.'},
name='Build bowtie2 index from reference sequences.',
description='Build bowtie2 index from reference sequences.',
citations=[citations['langmead2012fast']]
)
plugin.methods.register_function(
function=q2_phylogenomics._assemble.map_paired_reads,
inputs={'demux': SampleData[PairedEndSequencesWithQuality],
'database': Bowtie2Index},
parameters=map_paired_reads_parameters,
outputs=[('alignment_maps', SampleData[AlignmentMap])],
input_descriptions=map_paired_reads_input_descriptions,
parameter_descriptions=map_paired_reads_parameter_descriptions,
output_descriptions=map_paired_reads_output_descriptions,
name='Map paired end reads.',
description='Map paired end reads to a database.',
citations=[citations['langmead2012fast']]
)
sort_alignment_maps_input_descriptions = {
'unsorted': 'The unsorted alignment maps.'
}
sort_alignment_maps_output_descriptions = {
'sorted': 'The sorted alignment maps.'
}
plugin.methods.register_function(
function=q2_phylogenomics._assemble.sort_alignment_maps,
inputs={'unsorted': SampleData[AlignmentMap]},
parameters={},
outputs=[('sorted', SampleData[AlignmentMap])],
input_descriptions=sort_alignment_maps_input_descriptions,
parameter_descriptions={},
output_descriptions=sort_alignment_maps_output_descriptions,
name='Sort alignment maps.',
description='Sort alignment maps by reference start position.',
citations=[citations['heng2009samtools']]
)
remove_duplicates_input_descriptions = {
'sorted': 'The sorted alignment maps.'
}
remove_duplicates_output_descriptions = {
'duplicate_filtered': 'The sorted and filtered alignment maps.'
}
plugin.methods.register_function(
function=q2_phylogenomics._assemble.remove_duplicates,
inputs={'sorted': SampleData[AlignmentMap]},
parameters={},
outputs=[('duplicate_filtered', SampleData[AlignmentMap])],
input_descriptions=remove_duplicates_input_descriptions,
parameter_descriptions={},
output_descriptions=remove_duplicates_output_descriptions,
name='Remove duplicates.',
description='Remove duplicate reads from alignment maps.',
citations=[citations['heng2009samtools']]
)
make_pileup_input_descriptions = {
'sorted': 'Sorted alignment maps.',
'reference': 'The reference sequence'
}
make_pileup_output_descriptions = {
'pileups': 'The resulting PileUp data.'
}
plugin.methods.register_function(
function=q2_phylogenomics._assemble.make_pileups,
inputs={'sorted': SampleData[AlignmentMap], # need a sorted property?
# the following should become type ReferenceSequence
# or somehow be integrated with the Bowtie Index
'reference': Bowtie2Index},
parameters=make_pileup_parameters,
outputs=[('pileups', SampleData[PileUp])],
input_descriptions=make_pileup_input_descriptions,
parameter_descriptions=make_pileup_parameter_descriptions,
output_descriptions=make_pileup_output_descriptions,
name='Create PileUp files',
description='Create PileUp Files from sorted alignment maps.',
citations=[citations['heng2009samtools']]
)
consensus_sequence_input_descriptions = {
'pileups': 'The PileUp data.'
}
plugin.methods.register_function(
function=q2_phylogenomics._assemble.consensus_sequence,
inputs={'pileups': SampleData[PileUp]},
parameters=consensus_sequence_parameters,
outputs=consensus_sequence_outputs,
input_descriptions=consensus_sequence_input_descriptions,
parameter_descriptions=consensus_sequence_parameter_descriptions,
output_descriptions=consensus_sequence_output_descriptions,
name='',
description='',
citations=[citations['Grubaugh2019ivar']]
)
| [
"qiime2.plugin.Range",
"importlib.import_module",
"qiime2.plugin.Choices",
"qiime2.plugin.Plugin",
"qiime2.plugin.Citations.load"
] | [((1504, 1563), 'qiime2.plugin.Citations.load', 'Citations.load', (['"""citations.bib"""'], {'package': '"""q2_phylogenomics"""'}), "('citations.bib', package='q2_phylogenomics')\n", (1518, 1563), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((1574, 1868), 'qiime2.plugin.Plugin', 'Plugin', ([], {'name': '"""phylogenomics"""', 'version': 'q2_phylogenomics.__version__', 'website': '"""https://github.com/qiime2/q2-phylogenomics"""', 'package': '"""q2_phylogenomics"""', 'description': '"""A QIIME 2 plugin for phylogenomics analyses."""', 'short_description': '"""A QIIME 2 plugin for phylogenomics analyses."""'}), "(name='phylogenomics', version=q2_phylogenomics.__version__, website=\n 'https://github.com/qiime2/q2-phylogenomics', package=\n 'q2_phylogenomics', description=\n 'A QIIME 2 plugin for phylogenomics analyses.', short_description=\n 'A QIIME 2 plugin for phylogenomics analyses.')\n", (1580, 1868), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((3045, 3102), 'importlib.import_module', 'importlib.import_module', (['"""q2_phylogenomics._transformers"""'], {}), "('q2_phylogenomics._transformers')\n", (3068, 3102), False, 'import importlib\n'), ((3308, 3322), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (3313, 3322), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((3352, 3390), 'qiime2.plugin.Choices', 'Choices', (["['min', 'mean', 'max', 'sum']"], {}), "(['min', 'mean', 'max', 'sum'])\n", (3359, 3390), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((3422, 3436), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (3427, 3436), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((3465, 3479), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (3470, 3479), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((3502, 3516), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (3507, 3516), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((3541, 3569), 'qiime2.plugin.Choices', 'Choices', (["['dust', 'entropy']"], {}), "(['dust', 'entropy'])\n", (3548, 3569), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((3597, 3610), 'qiime2.plugin.Range', 'Range', (['(0)', '(100)'], {}), '(0, 100)\n', (3602, 3610), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((5352, 5383), 'qiime2.plugin.Range', 'Range', (['(0)', '(1)'], {'inclusive_end': '(True)'}), '(0, 1, inclusive_end=True)\n', (5357, 5383), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((5439, 5453), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (5444, 5453), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((6408, 6422), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (6413, 6422), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((6442, 6470), 'qiime2.plugin.Choices', 'Choices', (["['local', 'global']"], {}), "(['local', 'global'])\n", (6449, 6470), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((6497, 6558), 'qiime2.plugin.Choices', 'Choices', (["['very-fast', 'fast', 'sensitive', 'very-sensitive']"], {}), "(['very-fast', 'fast', 'sensitive', 'very-sensitive'])\n", (6504, 6558), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((6603, 6617), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (6608, 6617), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((6652, 6666), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (6657, 6666), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((8445, 8459), 'qiime2.plugin.Range', 'Range', (['(0)', 'None'], {}), '(0, None)\n', (8450, 8459), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((8484, 8498), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (8489, 8498), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((9032, 9046), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (9037, 9046), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((9331, 9384), 'qiime2.plugin.Range', 'Range', (['(0)', '(1)'], {'inclusive_start': '(True)', 'inclusive_end': '(True)'}), '(0, 1, inclusive_start=True, inclusive_end=True)\n', (9336, 9384), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((9496, 9510), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (9501, 9510), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((9546, 9560), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (9551, 9560), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((9700, 9714), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (9705, 9714), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n'), ((19701, 19715), 'qiime2.plugin.Range', 'Range', (['(1)', 'None'], {}), '(1, None)\n', (19706, 19715), False, 'from qiime2.plugin import Choices, Plugin, Citations, Range, Int, Str, List, Bool, Float\n')] |
import numpy as np
from Utils import Utils
class DataLoader:
@staticmethod
def load_train_test_jobs(train_path, test_path, iter_id):
train_arr = np.load(train_path)
test_arr = np.load(test_path)
np_train_X = train_arr['x'][:, :, iter_id]
np_train_T = Utils.convert_to_col_vector(train_arr['t'][:, iter_id])
np_train_e = Utils.convert_to_col_vector(train_arr['e'][:, iter_id])
np_train_yf = Utils.convert_to_col_vector(train_arr['yf'][:, iter_id])
# train_X = np.concatenate((np_train_X, np_train_e, np_train_yf), axis=1)
np_test_X = test_arr['x'][:, :, iter_id]
np_test_T = Utils.convert_to_col_vector(test_arr['t'][:, iter_id])
np_test_e = Utils.convert_to_col_vector(test_arr['e'][:, iter_id])
np_test_yf = Utils.convert_to_col_vector(test_arr['yf'][:, iter_id])
# test_X = np.concatenate((np_test_X, np_test_e, np_test_yf), axis=1)
print("Numpy Train Statistics:")
print(np_train_X.shape)
print(np_train_T.shape)
# print("Numpy Val Statistics:")
# print(val_X.shape)
# print(val_T.shape)
print(" Numpy Test Statistics:")
print(np_test_X.shape)
print(np_test_T.shape)
# X -> x1.. x17, e, yf -> (19, 1)
return np_train_X, np_train_T, np_train_e, np_train_yf, \
np_test_X, np_test_T, np_test_e, np_test_yf
| [
"numpy.load",
"Utils.Utils.convert_to_col_vector"
] | [((164, 183), 'numpy.load', 'np.load', (['train_path'], {}), '(train_path)\n', (171, 183), True, 'import numpy as np\n'), ((203, 221), 'numpy.load', 'np.load', (['test_path'], {}), '(test_path)\n', (210, 221), True, 'import numpy as np\n'), ((294, 349), 'Utils.Utils.convert_to_col_vector', 'Utils.convert_to_col_vector', (["train_arr['t'][:, iter_id]"], {}), "(train_arr['t'][:, iter_id])\n", (321, 349), False, 'from Utils import Utils\n'), ((371, 426), 'Utils.Utils.convert_to_col_vector', 'Utils.convert_to_col_vector', (["train_arr['e'][:, iter_id]"], {}), "(train_arr['e'][:, iter_id])\n", (398, 426), False, 'from Utils import Utils\n'), ((449, 505), 'Utils.Utils.convert_to_col_vector', 'Utils.convert_to_col_vector', (["train_arr['yf'][:, iter_id]"], {}), "(train_arr['yf'][:, iter_id])\n", (476, 505), False, 'from Utils import Utils\n'), ((659, 713), 'Utils.Utils.convert_to_col_vector', 'Utils.convert_to_col_vector', (["test_arr['t'][:, iter_id]"], {}), "(test_arr['t'][:, iter_id])\n", (686, 713), False, 'from Utils import Utils\n'), ((734, 788), 'Utils.Utils.convert_to_col_vector', 'Utils.convert_to_col_vector', (["test_arr['e'][:, iter_id]"], {}), "(test_arr['e'][:, iter_id])\n", (761, 788), False, 'from Utils import Utils\n'), ((810, 865), 'Utils.Utils.convert_to_col_vector', 'Utils.convert_to_col_vector', (["test_arr['yf'][:, iter_id]"], {}), "(test_arr['yf'][:, iter_id])\n", (837, 865), False, 'from Utils import Utils\n')] |
# define some constants to be used in unittests
from datetime import datetime
import unittest
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
from jose import jwt
from jose.constants import ALGORITHMS
from jose.backends import RSAKey
from pyramid import testing
import requests_mock
# A testing request enhanced with some WebOb properties
class DummyRequest(testing.DummyRequest):
@property
def authorization(self):
authorization = self.headers.get('Authorization')
try:
from webob.descriptors import parse_auth
return parse_auth(authorization)
except Exception:
pass
return None
# create an encoded jwt with keys from this module
def gen_jwt(claims):
return jwt.encode(claims, PRIVKEY, ALGORITHMS.RS256)
# help to generate random keys
def gen_new_keys():
key = rsa.generate_private_key(65537, 2048, default_backend())
pem = key.private_bytes(encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption())
privkey = RSAKey(pem, ALGORITHMS.RS256).to_dict()
privkey['id'] = 'keyid'
for k in privkey:
if isinstance(privkey[k], bytes):
privkey[k] = privkey[k].decode('utf-8')
pkey = key.public_key()
pem = pkey.public_bytes(encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.PKCS1)
pubkey = RSAKey(pem, ALGORITHMS.RS256).to_dict()
pubkey['id'] = 'keyid'
for k in pubkey:
if isinstance(pubkey[k], bytes):
pubkey[k] = pubkey[k].decode('utf-8')
return privkey, pubkey
PRIVKEY, PUBKEY = gen_new_keys()
JWKS = {
"keys": [PUBKEY]
}
ISSUER = 'https://example.com'
PROVIDER = 'https://provider.example.org'
WELL_KNOWN_OIDC_CONFIG = {
ISSUER: {
'issuer': ISSUER,
'authorization_endpoint': '{}/auth'.format(ISSUER),
'token_endpoint': '{}/token'.format(ISSUER),
'token_introspection_endpoint': '{}/introspect'.format(ISSUER),
'userinfo_endpoint': '{}/userinfo'.format(ISSUER),
'jwks_uri': '{}/jwks'.format(ISSUER),
},
PROVIDER: {
'issuer': PROVIDER,
'authorization_endpoint': '{}/auth'.format(PROVIDER),
'token_endpoint': '{}/token'.format(PROVIDER),
'token_introspection_endpoint': '{}/introspect'.format(PROVIDER),
'userinfo_endpoint': '{}/userinfo'.format(PROVIDER),
'jwks_uri': '{}/jwks'.format(PROVIDER),
}
}
USER_ID = 'example_user_id'
class FunctionalTestCase(unittest.TestCase):
def setUp(self):
from tokenstore import main
from sqlalchemy.pool import StaticPool
with requests_mock.mock() as m:
m.get('{}/.well-known/openid-configuration'.format(ISSUER),
json=WELL_KNOWN_OIDC_CONFIG[ISSUER])
m.get('{}/jwks'.format(ISSUER), json=JWKS)
m.get('{}/.well-known/openid-configuration'.format(PROVIDER),
json=WELL_KNOWN_OIDC_CONFIG[PROVIDER])
m.get('{}/jwks'.format(PROVIDER), json=JWKS)
app = main(
{},
**{
'sqlalchemy.url': 'sqlite://',
'sqlalchemy.connect_args': {'check_same_thread': False},
'sqlalchemy.poolclass': StaticPool,
'oidc.issuer': ISSUER,
'oidc.client_id': 'example_client_id',
'oidc.providers': 'provider',
'oidc.provider.issuer': PROVIDER,
'oidc.provider.client_id': 'provider_client_id',
'oidc.provider.client_secret': 'provider_secret',
'oidc.provider.metadata.name': 'Provider',
'session.factory': 'pyramid_oidc.session.SessionFactory',
'session.secret': 'session_secret',
'session.cookie_opts.secure': 'False',
'session.cookie_opts.httponly': 'False',
'session.dogpile_opts.backend': 'dogpile.cache.memory',
'session.dogpile_opts.expiration_timeout': '1200',
'openapi.spec': 'tokenstore:openapi.yaml',
'tokenstore.cryptokey': '47fc5aea29aea2f9b49ab8fdc3c87ad2da6245da2d16a32c8cac1c2ec2ffb5ac',
}
)
from ..models.meta import Base
Base.metadata.create_all(app.registry['dbsession_factory']().bind)
from webtest import TestApp
self.testapp = TestApp(app, extra_environ={'wsgi.url_scheme': 'https'})
def _user_token(self, exp=300, aud='example_client_id'):
return gen_jwt({
'iss': 'https://example.com',
'exp': int(datetime.utcnow().timestamp()) + exp,
'aud': aud,
'sub': USER_ID,
'resource_access': {
'token': {
'roles': ['user']
}
}
})
| [
"jose.backends.RSAKey",
"datetime.datetime.utcnow",
"requests_mock.mock",
"webtest.TestApp",
"cryptography.hazmat.primitives.serialization.NoEncryption",
"jose.jwt.encode",
"tokenstore.main",
"webob.descriptors.parse_auth",
"cryptography.hazmat.backends.default_backend"
] | [((869, 914), 'jose.jwt.encode', 'jwt.encode', (['claims', 'PRIVKEY', 'ALGORITHMS.RS256'], {}), '(claims, PRIVKEY, ALGORITHMS.RS256)\n', (879, 914), False, 'from jose import jwt\n'), ((1016, 1033), 'cryptography.hazmat.backends.default_backend', 'default_backend', ([], {}), '()\n', (1031, 1033), False, 'from cryptography.hazmat.backends import default_backend\n'), ((4726, 4782), 'webtest.TestApp', 'TestApp', (['app'], {'extra_environ': "{'wsgi.url_scheme': 'https'}"}), "(app, extra_environ={'wsgi.url_scheme': 'https'})\n", (4733, 4782), False, 'from webtest import TestApp\n'), ((695, 720), 'webob.descriptors.parse_auth', 'parse_auth', (['authorization'], {}), '(authorization)\n', (705, 720), False, 'from webob.descriptors import parse_auth\n'), ((1219, 1247), 'cryptography.hazmat.primitives.serialization.NoEncryption', 'serialization.NoEncryption', ([], {}), '()\n', (1245, 1247), False, 'from cryptography.hazmat.primitives import serialization\n'), ((1263, 1292), 'jose.backends.RSAKey', 'RSAKey', (['pem', 'ALGORITHMS.RS256'], {}), '(pem, ALGORITHMS.RS256)\n', (1269, 1292), False, 'from jose.backends import RSAKey\n'), ((1622, 1651), 'jose.backends.RSAKey', 'RSAKey', (['pem', 'ALGORITHMS.RS256'], {}), '(pem, ALGORITHMS.RS256)\n', (1628, 1651), False, 'from jose.backends import RSAKey\n'), ((2883, 2903), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (2901, 2903), False, 'import requests_mock\n'), ((3300, 4173), 'tokenstore.main', 'main', (['{}'], {}), "({}, **{'sqlalchemy.url': 'sqlite://', 'sqlalchemy.connect_args': {\n 'check_same_thread': False}, 'sqlalchemy.poolclass': StaticPool,\n 'oidc.issuer': ISSUER, 'oidc.client_id': 'example_client_id',\n 'oidc.providers': 'provider', 'oidc.provider.issuer': PROVIDER,\n 'oidc.provider.client_id': 'provider_client_id',\n 'oidc.provider.client_secret': 'provider_secret',\n 'oidc.provider.metadata.name': 'Provider', 'session.factory':\n 'pyramid_oidc.session.SessionFactory', 'session.secret':\n 'session_secret', 'session.cookie_opts.secure': 'False',\n 'session.cookie_opts.httponly': 'False', 'session.dogpile_opts.backend':\n 'dogpile.cache.memory', 'session.dogpile_opts.expiration_timeout':\n '1200', 'openapi.spec': 'tokenstore:openapi.yaml',\n 'tokenstore.cryptokey':\n '47fc5aea29aea2f9b49ab8fdc3c87ad2da6245da2d16a32c8cac1c2ec2ffb5ac'})\n", (3304, 4173), False, 'from tokenstore import main\n'), ((4935, 4952), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4950, 4952), False, 'from datetime import datetime\n')] |
# -*- coding: utf-8 -*-
"""
Created on Sun Dec 18 18:10:32 2018
@author: Tasos
"""
import yaml
from alchemist.laboratory import Laboratory
def test_randomness():
with open("alchemist/tests/fixture.yml", 'r') as shelvesIn:
fixt_loaded = yaml.load(shelvesIn)
fixt_lab1 = Laboratory(fixt_loaded["lower"], fixt_loaded["upper"])
fixt_lab2 = Laboratory(fixt_loaded["lower"], fixt_loaded["upper"])
fixt_lab3 = Laboratory(fixt_loaded["lower"], fixt_loaded["upper"])
fixt_lab4 = Laboratory(fixt_loaded["lower"], fixt_loaded["upper"])
randomness = True
if fixt_lab1.upper == fixt_lab2.upper and fixt_lab1.upper == fixt_lab3.upper:
randomness = (fixt_lab1.upper == fixt_lab4.upper)
assert randomness
def test_can_react():
with open("alchemist/tests/fixture.yml", 'r') as shelvesIn:
fixt_loaded = yaml.load(shelvesIn)
fixt_lab = Laboratory(fixt_loaded["lower"], fixt_loaded["upper"])
creact = fixt_lab.can_react('alcea', 'antialcea')
assert creact
def test_update_shelves():
with open("alchemist/tests/fixture.yml", 'r') as shelvesIn:
fixt_loaded = yaml.load(shelvesIn)
fixt_lab = Laboratory(fixt_loaded["lower"], fixt_loaded["upper"])
new_upper = fixt_lab.upper[1:]
new_lower = fixt_lab.lower[1:]
fixt_lab.update_shelves('antialcea', 0)
update = (new_upper == fixt_lab.upper and new_lower == fixt_lab.lower)
assert update
def test_do_a_reaction():
with open("alchemist/tests/fixture.yml", 'r') as shelvesIn:
fixt_loaded = yaml.load(shelvesIn)
fixt_lab = Laboratory(fixt_loaded["lower"], fixt_loaded["upper"])
new_upper1 = fixt_lab.upper[1:]
new_upper2 = fixt_lab.upper[:3]
new_lower = fixt_lab.lower[1:]
fixt_lab.do_a_reaction()
d_reaction = False
if new_upper1 == fixt_lab.upper or new_upper2 == fixt_lab.upper:
d_reaction = (new_lower == fixt_lab.lower)
assert d_reaction
def test_run_full_experiment():
with open("alchemist/tests/fixture.yml", 'r') as shelvesIn:
fixt_loaded = yaml.load(shelvesIn)
fixt_lab = Laboratory(fixt_loaded["lower"], fixt_loaded["upper"])
full = False
count = fixt_lab.run_full_experiment()
full = (count == 1)
assert full
def test_antianti_upper():
print("\n'test_antianti_upper' is a negative test")
upper = ["antiantialcea", "firma"]
lower = ["antifirma", 'psittaccina']
check = Laboratory(lower, upper)
def test_antianti_lower():
print("\n'test_antianti_lower' is a negative test")
upper = ["antifirma", 'psittaccina']
lower = ["antiantialcea", "firma"]
check = Laboratory(lower, upper)
| [
"alchemist.laboratory.Laboratory",
"yaml.load"
] | [((308, 362), 'alchemist.laboratory.Laboratory', 'Laboratory', (["fixt_loaded['lower']", "fixt_loaded['upper']"], {}), "(fixt_loaded['lower'], fixt_loaded['upper'])\n", (318, 362), False, 'from alchemist.laboratory import Laboratory\n'), ((380, 434), 'alchemist.laboratory.Laboratory', 'Laboratory', (["fixt_loaded['lower']", "fixt_loaded['upper']"], {}), "(fixt_loaded['lower'], fixt_loaded['upper'])\n", (390, 434), False, 'from alchemist.laboratory import Laboratory\n'), ((452, 506), 'alchemist.laboratory.Laboratory', 'Laboratory', (["fixt_loaded['lower']", "fixt_loaded['upper']"], {}), "(fixt_loaded['lower'], fixt_loaded['upper'])\n", (462, 506), False, 'from alchemist.laboratory import Laboratory\n'), ((524, 578), 'alchemist.laboratory.Laboratory', 'Laboratory', (["fixt_loaded['lower']", "fixt_loaded['upper']"], {}), "(fixt_loaded['lower'], fixt_loaded['upper'])\n", (534, 578), False, 'from alchemist.laboratory import Laboratory\n'), ((2541, 2565), 'alchemist.laboratory.Laboratory', 'Laboratory', (['lower', 'upper'], {}), '(lower, upper)\n', (2551, 2565), False, 'from alchemist.laboratory import Laboratory\n'), ((2750, 2774), 'alchemist.laboratory.Laboratory', 'Laboratory', (['lower', 'upper'], {}), '(lower, upper)\n', (2760, 2774), False, 'from alchemist.laboratory import Laboratory\n'), ((266, 286), 'yaml.load', 'yaml.load', (['shelvesIn'], {}), '(shelvesIn)\n', (275, 286), False, 'import yaml\n'), ((888, 908), 'yaml.load', 'yaml.load', (['shelvesIn'], {}), '(shelvesIn)\n', (897, 908), False, 'import yaml\n'), ((929, 983), 'alchemist.laboratory.Laboratory', 'Laboratory', (["fixt_loaded['lower']", "fixt_loaded['upper']"], {}), "(fixt_loaded['lower'], fixt_loaded['upper'])\n", (939, 983), False, 'from alchemist.laboratory import Laboratory\n'), ((1180, 1200), 'yaml.load', 'yaml.load', (['shelvesIn'], {}), '(shelvesIn)\n', (1189, 1200), False, 'import yaml\n'), ((1221, 1275), 'alchemist.laboratory.Laboratory', 'Laboratory', (["fixt_loaded['lower']", "fixt_loaded['upper']"], {}), "(fixt_loaded['lower'], fixt_loaded['upper'])\n", (1231, 1275), False, 'from alchemist.laboratory import Laboratory\n'), ((1615, 1635), 'yaml.load', 'yaml.load', (['shelvesIn'], {}), '(shelvesIn)\n', (1624, 1635), False, 'import yaml\n'), ((1656, 1710), 'alchemist.laboratory.Laboratory', 'Laboratory', (["fixt_loaded['lower']", "fixt_loaded['upper']"], {}), "(fixt_loaded['lower'], fixt_loaded['upper'])\n", (1666, 1710), False, 'from alchemist.laboratory import Laboratory\n'), ((2153, 2173), 'yaml.load', 'yaml.load', (['shelvesIn'], {}), '(shelvesIn)\n', (2162, 2173), False, 'import yaml\n'), ((2194, 2248), 'alchemist.laboratory.Laboratory', 'Laboratory', (["fixt_loaded['lower']", "fixt_loaded['upper']"], {}), "(fixt_loaded['lower'], fixt_loaded['upper'])\n", (2204, 2248), False, 'from alchemist.laboratory import Laboratory\n')] |
import numpy as np
from pymoo.algorithms.moo.nsga2 import RankAndCrowdingSurvival
from pymoo.core.population import Population
from pymoo.core.problem import Problem
from pymoo.util.clearing import EpsilonClearing
from pymoo.util.misc import vectorized_cdist, norm_eucl_dist_by_bounds
from pymoo.util.nds.non_dominated_sorting import NonDominatedSorting
class Selection:
def __init__(self, **kwargs) -> None:
super().__init__()
for k, v in kwargs.items():
self.__dict__[k] = v
class MinSelection(Selection):
def do(self, rem):
return rem[self.F[rem].argmin()]
class RandomSelection(Selection):
def do(self, rem):
return rem[np.random.randint(len(rem))]
class MinMaxSelection(Selection):
def __init__(self, min_eps=0.01, **kwargs) -> None:
super().__init__(min_eps=min_eps, **kwargs)
def do(self, rem):
F, D = self.F[rem], self.D[rem][:, rem]
_min = F.argmin()
clearing = EpsilonClearing(D, self.min_eps)
clearing.select(_min)
_rem = clearing.remaining()
if len(_rem) > 0:
_max = _rem[F[_rem].argmax()]
return [rem[_min], rem[_max]]
else:
return rem[_min]
class FrontwiseSelection(Selection):
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
F = kwargs.get("F")
G = kwargs.get("G")
class MyProblem(Problem):
def __init__(self, **kwargs):
super().__init__(n_constr=0 if G is None else 1, **kwargs)
pop = Population.new(index=np.arange(len(F)), F=F)
if G is not None:
pop.set("G", G)
pop = RankAndCrowdingSurvival(nds=NonDominatedSorting()).do(MyProblem(), pop, n_survive=len(pop))
self.rank = pop.get("rank")[pop.get("index")]
self.crowding = pop.get("crowding")[pop.get("index")]
def do(self, rem):
_rank = self.rank[rem]
_crowding = self.crowding[rem]
I = np.lexsort([- _crowding, _rank])
return rem[I[0]]
def aggregate_by_eps_clearing(X,
eps,
selection=MinSelection,
return_cluster=False,
func_dist=vectorized_cdist,
func_dist_by_bounds=norm_eucl_dist_by_bounds,
calc_distance=True,
problem=None,
xl=None,
xu=None,
**kwargs):
if calc_distance:
if problem is None:
D = func_dist(X, X)
else:
if problem is not None:
xl, xu = problem.bounds()
D = func_dist_by_bounds(X, X, xl, xu)
else:
D = X
clearing = EpsilonClearing(D, eps)
sel = selection(problem=problem, X=X, D=D, **kwargs)
D = {}
while clearing.has_remaining():
rem = clearing.remaining()
S = sel.do(rem)
if isinstance(S, list):
for s in S:
cleared = clearing.select(s)
D[s] = cleared
else:
cleared = clearing.select(S)
D[S] = cleared
I = np.array(list(D.keys()))
if return_cluster:
return I, D
else:
return I
| [
"pymoo.util.clearing.EpsilonClearing",
"numpy.lexsort",
"pymoo.util.nds.non_dominated_sorting.NonDominatedSorting"
] | [((2851, 2874), 'pymoo.util.clearing.EpsilonClearing', 'EpsilonClearing', (['D', 'eps'], {}), '(D, eps)\n', (2866, 2874), False, 'from pymoo.util.clearing import EpsilonClearing\n'), ((982, 1014), 'pymoo.util.clearing.EpsilonClearing', 'EpsilonClearing', (['D', 'self.min_eps'], {}), '(D, self.min_eps)\n', (997, 1014), False, 'from pymoo.util.clearing import EpsilonClearing\n'), ((2006, 2037), 'numpy.lexsort', 'np.lexsort', (['[-_crowding, _rank]'], {}), '([-_crowding, _rank])\n', (2016, 2037), True, 'import numpy as np\n'), ((1719, 1740), 'pymoo.util.nds.non_dominated_sorting.NonDominatedSorting', 'NonDominatedSorting', ([], {}), '()\n', (1738, 1740), False, 'from pymoo.util.nds.non_dominated_sorting import NonDominatedSorting\n')] |
"""
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Copyright (C) 2018 <NAME>.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
"""
from agora import Agora, Planner
from agora.collector.execution import parse_rdf
from agora.collector.http import http_get, RDF_MIMES
from rdflib import Graph, ConjunctiveGraph
from agora_graphql.gql.sparql import sparql_from_graphql
__author__ = '<NAME>'
def roots_gen(gen):
for c, s, p, o in gen:
yield s.toPython()
class DataGraph(object):
def __init__(self, gql_query, gateway, **kwargs):
pass
@property
def roots(self):
gen = self.__data_gw.fragment(self.__sparql_query,
scholar=self.__scholar,
follow_cycles=self.__follow_cycles,
**self.__kg_params)['generator']
return roots_gen(gen)
@property
def loader(self):
def wrapper(uri, format):
result = self.__data_gw.loader(uri, format)
if result is None and self.__data_gw.loader != http_get:
for fmt in sorted(RDF_MIMES.keys(), key=lambda x: x != format):
result = http_get(uri, format=fmt)
if result is not None and not isinstance(result, bool):
content, headers = result
if not isinstance(content, Graph):
g = ConjunctiveGraph()
parse_rdf(g, content, fmt, headers)
result = g, headers
break
return result
return wrapper
def __new__(cls, *args, **kwargs):
dg = super(DataGraph, cls).__new__(cls)
dg.__gql_query = args[0]
dg.__gateway = args[1]
dg.__agora = Agora(auto=False)
dg.__agora.planner = Planner(dg.__gateway.agora.fountain)
dg.__sparql_query = sparql_from_graphql(dg.__agora.fountain, dg.__gql_query, root_mode=True)
data_gw_cache = kwargs.get('data_gw_cache', None)
if data_gw_cache is None or dg.__gql_query not in data_gw_cache:
data_gw = dg.__gateway.data(dg.__sparql_query, serverless=True, static_fountain=True,
host=kwargs.get('host', None), port=kwargs.get('port', None),
base=kwargs.get('base', 'store'))
else:
data_gw = data_gw_cache[dg.__gql_query]
if 'server_name' in kwargs:
del kwargs['server_name']
if 'port' in kwargs:
del kwargs['port']
dg.__data_gw = data_gw
if data_gw_cache is not None:
data_gw_cache[dg.__gql_query] = data_gw
if 'data_gw_cache' in kwargs:
del kwargs['data_gw_cache']
if 'scholar' in kwargs:
dg.__scholar = bool(kwargs['scholar'])
del kwargs['scholar']
else:
dg.__scholar = False
if 'follow_cycles' in kwargs:
dg.__follow_cycles = kwargs['follow_cycles']
del kwargs['follow_cycles']
else:
dg.__follow_cycles = True
dg.__kg_params = kwargs
return dg
def data_graph(gql_query, gateway, **kwargs):
return DataGraph(gql_query, gateway, **kwargs)
| [
"agora.Agora",
"agora.collector.execution.parse_rdf",
"agora.collector.http.RDF_MIMES.keys",
"agora.collector.http.http_get",
"rdflib.ConjunctiveGraph",
"agora_graphql.gql.sparql.sparql_from_graphql",
"agora.Planner"
] | [((2509, 2526), 'agora.Agora', 'Agora', ([], {'auto': '(False)'}), '(auto=False)\n', (2514, 2526), False, 'from agora import Agora, Planner\n'), ((2556, 2592), 'agora.Planner', 'Planner', (['dg.__gateway.agora.fountain'], {}), '(dg.__gateway.agora.fountain)\n', (2563, 2592), False, 'from agora import Agora, Planner\n'), ((2621, 2693), 'agora_graphql.gql.sparql.sparql_from_graphql', 'sparql_from_graphql', (['dg.__agora.fountain', 'dg.__gql_query'], {'root_mode': '(True)'}), '(dg.__agora.fountain, dg.__gql_query, root_mode=True)\n', (2640, 2693), False, 'from agora_graphql.gql.sparql import sparql_from_graphql\n'), ((1807, 1823), 'agora.collector.http.RDF_MIMES.keys', 'RDF_MIMES.keys', ([], {}), '()\n', (1821, 1823), False, 'from agora.collector.http import http_get, RDF_MIMES\n'), ((1882, 1907), 'agora.collector.http.http_get', 'http_get', (['uri'], {'format': 'fmt'}), '(uri, format=fmt)\n', (1890, 1907), False, 'from agora.collector.http import http_get, RDF_MIMES\n'), ((2125, 2143), 'rdflib.ConjunctiveGraph', 'ConjunctiveGraph', ([], {}), '()\n', (2141, 2143), False, 'from rdflib import Graph, ConjunctiveGraph\n'), ((2172, 2207), 'agora.collector.execution.parse_rdf', 'parse_rdf', (['g', 'content', 'fmt', 'headers'], {}), '(g, content, fmt, headers)\n', (2181, 2207), False, 'from agora.collector.execution import parse_rdf\n')] |
from pathlib import Path
import pytest
from poetry_hooks.utils import create__version__str
from poetry_hooks.utils import get_main_pkg
from poetry_hooks.utils import get_pyproject_toml
from poetry_hooks.utils import get_version
from poetry_hooks.utils import parse__version__str
@pytest.fixture
def fake_project(tmpdir, fixtures):
myproject = tmpdir.join("myproject").mkdir()
myproject.join("test_pkg").mkdir()
fixtures = Path(fixtures)
with myproject.as_cwd():
txt = fixtures.joinpath("fake_pyproject.toml").read_text()
myproject.join("pyproject.toml").write(txt)
return myproject
def test_get_pyproject_toml(fake_project):
with fake_project.as_cwd():
pyproject = get_pyproject_toml()
print(pyproject)
def test_get_main_pkg(fake_project):
with fake_project.as_cwd():
print(get_pyproject_toml())
pkg = get_main_pkg()
print(pkg)
def test_get_version(fake_project):
with fake_project.as_cwd():
print(get_version())
def test_create_version_str(fake_project):
with fake_project.as_cwd():
print(create__version__str())
def test_parse_version_str(fake_project):
with fake_project.as_cwd():
print(create__version__str())
data = parse__version__str(create__version__str())
print(data)
| [
"pathlib.Path",
"poetry_hooks.utils.get_pyproject_toml",
"poetry_hooks.utils.get_main_pkg",
"poetry_hooks.utils.get_version",
"poetry_hooks.utils.create__version__str"
] | [((437, 451), 'pathlib.Path', 'Path', (['fixtures'], {}), '(fixtures)\n', (441, 451), False, 'from pathlib import Path\n'), ((718, 738), 'poetry_hooks.utils.get_pyproject_toml', 'get_pyproject_toml', ([], {}), '()\n', (736, 738), False, 'from poetry_hooks.utils import get_pyproject_toml\n'), ((885, 899), 'poetry_hooks.utils.get_main_pkg', 'get_main_pkg', ([], {}), '()\n', (897, 899), False, 'from poetry_hooks.utils import get_main_pkg\n'), ((849, 869), 'poetry_hooks.utils.get_pyproject_toml', 'get_pyproject_toml', ([], {}), '()\n', (867, 869), False, 'from poetry_hooks.utils import get_pyproject_toml\n'), ((1003, 1016), 'poetry_hooks.utils.get_version', 'get_version', ([], {}), '()\n', (1014, 1016), False, 'from poetry_hooks.utils import get_version\n'), ((1109, 1131), 'poetry_hooks.utils.create__version__str', 'create__version__str', ([], {}), '()\n', (1129, 1131), False, 'from poetry_hooks.utils import create__version__str\n'), ((1223, 1245), 'poetry_hooks.utils.create__version__str', 'create__version__str', ([], {}), '()\n', (1243, 1245), False, 'from poetry_hooks.utils import create__version__str\n'), ((1282, 1304), 'poetry_hooks.utils.create__version__str', 'create__version__str', ([], {}), '()\n', (1302, 1304), False, 'from poetry_hooks.utils import create__version__str\n')] |
from RPA.Browser.Selenium import Selenium
from timeit import default_timer as timer
from time import sleep
performance_results = {
"Launch page": [],
"Go to URL": [],
"Handle disclaimer": [],
"Trigger search": [],
"Scrape results": [],
"Close browser": [],
}
def create_page():
browser = Selenium()
browser.open_chrome_browser(url="about:blank")
return browser
def go_to_bing(browser):
url = "https://www.bing.com"
browser.go_to(url)
def handle_disclaimer(browser):
accept_button_selector = "css:button[class*='accept']"
sleep(1)
browser.wait_until_element_is_visible(locator=accept_button_selector)
browser.click_element(locator=accept_button_selector)
def trigger_search(browser):
input_selector = "css:input[id*='form'][type*='search']"
search_term = "<PASSWORD>"
browser.click_element_when_visible(locator=input_selector)
browser.input_text(locator=input_selector, text=search_term, clear=True)
browser.press_keys(None, "RETURN")
def scrape_results(browser):
result_selector = "css:li > h2 > a"
browser.wait_until_element_is_visible(locator=result_selector)
elements = browser.get_webelements(locator=result_selector)
results = [
{
"href": browser.get_element_attribute(locator=element, attribute="href"),
"text": browser.get_text(locator=result_selector)
}
for element in elements
]
return results
def go_to_next_page(browser):
next_page_selector = "css:a[href*='/search'][title*='Nächste']"
browser.wait_until_element_is_visible(locator=next_page_selector)
modal_selector = "css:div[id*='notification'] span[class*='cta2']"
modal_visible = browser.is_element_visible(locator=modal_selector)
if modal_visible:
browser.click_element(locator=modal_selector)
browser.click_element(locator=next_page_selector)
def shutdown(browser):
browser.close_browser()
def scrape_bing_results():
step1 = timer()
browser = create_page()
step2 = timer()
performance_results["Launch page"].append(step2-step1)
go_to_bing(browser)
step3 = timer()
performance_results["Go to URL"].append(step3-step2)
handle_disclaimer(browser)
step4 = timer()
performance_results["Handle disclaimer"].append(step4-step3)
trigger_search(browser)
step5 = timer()
performance_results["Trigger search"].append(step5-step4)
search_results = []
for i in range(0, 3):
results = scrape_results(browser)
for result in results:
search_results.append(result)
go_to_next_page(browser)
print(search_results, len(search_results))
step6 = timer()
performance_results["Scrape results"].append(step6-step5)
shutdown(browser)
step7 = timer()
performance_results["Close browser"].append(step7-step6)
if __name__ == "__main__":
for i in range(0, 30):
scrape_bing_results()
print("\n\n")
lp_values = performance_results["Launch page"]
gtu_values = performance_results["Go to URL"]
hd_values = performance_results["Handle disclaimer"]
ts_values = performance_results["Trigger search"]
sr_values = performance_results["Scrape results"]
cb_values = performance_results["Close browser"]
avg_performance_results = {
"Launch page": sum(lp_values) / len(lp_values),
"Go to URL": sum(gtu_values) / len(gtu_values),
"Handle disclaimer": sum(hd_values) / len(hd_values),
"Trigger search": sum(ts_values) / len(ts_values),
"Scrape result": sum(sr_values) / len(sr_values),
"Close browser": sum(cb_values) / len(cb_values)
}
print("avg performance_results: ", avg_performance_results)
| [
"timeit.default_timer",
"time.sleep",
"RPA.Browser.Selenium.Selenium"
] | [((319, 329), 'RPA.Browser.Selenium.Selenium', 'Selenium', ([], {}), '()\n', (327, 329), False, 'from RPA.Browser.Selenium import Selenium\n'), ((581, 589), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (586, 589), False, 'from time import sleep\n'), ((2012, 2019), 'timeit.default_timer', 'timer', ([], {}), '()\n', (2017, 2019), True, 'from timeit import default_timer as timer\n'), ((2062, 2069), 'timeit.default_timer', 'timer', ([], {}), '()\n', (2067, 2069), True, 'from timeit import default_timer as timer\n'), ((2167, 2174), 'timeit.default_timer', 'timer', ([], {}), '()\n', (2172, 2174), True, 'from timeit import default_timer as timer\n'), ((2277, 2284), 'timeit.default_timer', 'timer', ([], {}), '()\n', (2282, 2284), True, 'from timeit import default_timer as timer\n'), ((2392, 2399), 'timeit.default_timer', 'timer', ([], {}), '()\n', (2397, 2399), True, 'from timeit import default_timer as timer\n'), ((2725, 2732), 'timeit.default_timer', 'timer', ([], {}), '()\n', (2730, 2732), True, 'from timeit import default_timer as timer\n'), ((2831, 2838), 'timeit.default_timer', 'timer', ([], {}), '()\n', (2836, 2838), True, 'from timeit import default_timer as timer\n')] |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.9.7)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x0d\x01\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\xca\x00\x00\x00\xc9\x08\x06\x00\x00\x00\x62\xf1\xad\x06\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\
\x09\x70\x48\x59\x73\x00\x00\x1e\xc2\x00\x00\x1e\xc2\x01\x6e\xd0\
\x75\x3e\x00\x00\x0c\x96\x49\x44\x41\x54\x78\x5e\xed\xdd\x3f\x88\
\x23\xd7\x1d\x07\x70\xed\xae\xa4\x3b\x5c\x5d\xe9\xd2\xe5\x95\x2e\
\x8d\x6f\x84\x72\xe0\xe2\x20\x45\x0e\x52\xc4\xb8\x08\x76\x11\xb8\
\x22\xa0\xd1\xc6\x67\x62\x42\x7c\x8e\xc1\x18\x43\xc2\xc5\x84\x70\
\x24\x10\xce\x04\x92\xcb\x15\x87\xaf\x09\xc6\x85\x59\x52\x04\xa7\
\x09\x9b\x26\x38\x4d\x70\x9a\xe0\x6b\xcc\xa5\x73\xa9\xbc\xef\xd3\
\xd3\x5a\x7f\x7e\xda\x7d\x1a\xbd\x27\xcd\xfb\xfd\xbe\x0f\x3e\x04\
\xe7\x24\xcd\x68\xf5\xbe\x33\xef\xdf\xcc\x74\xf6\x59\xba\xd7\xc6\
\x2f\x5c\xba\x36\xfa\x56\x6f\x30\x7a\xb5\x37\x18\xbf\x4d\xb4\x0c\
\xf5\xc3\x1b\x8e\x9e\x0b\xd5\x46\x6f\xe9\x0f\x8f\xaf\xf6\x07\xa3\
\xba\x3f\x18\x9f\xf4\xaa\xfa\x73\xf7\xbf\x13\xa2\x2d\x9c\xf4\x07\
\xf5\x83\x5e\x35\x7e\xb9\x33\x1c\x5d\x09\xd5\xac\xcc\xd2\x7b\x71\
\xf4\x7c\xaf\x3a\xbe\xcb\x60\x50\x6e\xbd\x41\xfd\x71\x77\x50\xdf\
\x7a\x66\xf8\xfa\xb3\xa1\xfa\xb5\xbc\x0c\xef\x5c\xc6\x99\xc3\x85\
\xe3\x4b\xe9\x0b\x11\xed\xc0\x09\x9a\x6a\xa1\x46\xb6\xaf\xf8\x7e\
\x46\x35\xfe\x42\xd8\x71\xa2\x9d\x73\xfd\x9b\x8f\xd0\xaa\x09\xd5\
\x73\xff\xe5\x52\x35\xbe\xe1\x76\xec\x74\x79\x47\x89\x5a\xa1\x1a\
\xdf\xdf\x6f\x93\xcc\x75\xa2\xd0\x36\x14\x77\x8e\xa8\x4d\xaa\xfa\
\x6b\x74\x09\x42\xcd\xdd\x5d\xc1\x28\x16\x9b\x59\x54\x1c\x77\x76\
\x41\x3f\x3a\x54\xe3\xbc\xa5\x5f\x8d\x6e\xba\x0d\x3e\x15\x77\x84\
\xa8\xf5\xea\xcf\xb2\x37\xc5\xc2\x5c\x88\xb0\x71\xa2\x72\xa0\x35\
\x94\xad\xa3\xef\xce\x22\xf7\xa4\x8d\x12\x15\xc9\xf5\x5b\x92\x0f\
\x23\x63\x32\x47\xdc\x18\x51\xc1\x30\xdf\x97\x6c\x69\x0c\x52\x37\
\x1d\x35\x90\x37\x46\x54\xb8\xd3\xad\x3b\xf8\x48\x1b\x3b\xee\xa4\
\x1d\x26\x27\x43\x95\x6f\x50\x30\x4f\xc2\x75\x5a\x64\x84\xeb\xe0\
\xbf\x17\x6a\xfe\x66\x05\xab\x33\xa5\x0f\x24\xd2\x0a\xab\x4c\x42\
\xf5\x8f\x2b\x18\x3a\x93\x3e\x88\x48\xb9\xd3\x10\x81\xb8\x82\x36\
\x9b\xf0\x21\x44\xfa\x55\xa3\x9b\x21\x06\xe7\x17\x3f\xca\x25\x7d\
\x00\x91\x01\xe8\x97\x47\x8d\x82\xb9\x17\x73\x25\x30\x19\x77\xc1\
\x02\x4a\xbf\x8e\x4b\x7c\x23\x91\x1d\x98\x88\x3c\xf7\xac\xe2\x5e\
\x74\xb2\xfc\x26\x22\x8b\x70\x21\x62\x88\xc5\x52\x19\x8e\xae\x70\
\x06\x9e\x68\xa6\x7e\x10\x92\xb1\x58\x90\x20\xf9\x0d\x44\x06\xb9\
\x93\x86\xd8\xfc\xe2\x04\x23\xd1\xa2\xd5\x09\x48\x97\x1c\x36\xbb\
\x88\x96\xe0\x8a\xc8\xf9\x12\x6e\x10\x21\xbf\x98\xc8\x28\x3f\xfa\
\x35\x5f\xb0\x20\x4c\x7a\x21\x91\x79\xc3\xe3\xab\x21\x26\x98\x3f\
\x19\xdf\x17\x5f\x44\x64\xdc\xc2\x55\x90\xee\xff\xe0\xfc\x09\x91\
\x60\x61\x3e\x85\xd7\x9d\x10\xad\x53\xff\x38\xc4\xc4\x37\xbd\x78\
\x15\x23\x91\x00\x37\x9a\x9f\xa6\x04\x43\xc3\xc2\x0b\x88\x08\xc2\
\x0c\xbd\xbf\x2e\x5e\x7c\x01\x11\x39\x27\xd3\xa0\xf0\xfa\x13\xa2\
\xf3\x30\x28\x44\x11\x18\x14\xa2\x08\x0c\x0a\x51\x04\x06\x85\x28\
\x02\x83\x42\x14\x81\x41\x21\x8a\xc0\xa0\x10\x45\x60\x50\x88\x22\
\x30\x28\x44\x11\x18\x14\xa2\x08\x0c\x0a\x51\x04\x06\x85\x28\x02\
\x83\xb2\x73\xd7\x6f\x4f\xba\xaf\x7c\x30\xe9\x7e\xff\xd7\x93\xa3\
\x37\x1e\x2f\x38\x7c\xf7\x2f\x93\xc3\x5f\xfd\x63\xea\xfd\xbf\xae\
\xfc\xbb\x7f\x9f\x23\x7e\x2e\xe5\xc4\xa0\xe4\xd4\xfb\xce\xbb\x93\
\xee\x0f\xff\x70\x16\x80\xce\xe3\xff\xa5\xf3\xe8\x2b\xff\x99\x47\
\x3f\xf9\xf3\xa4\xfb\xda\x6f\x26\xbd\x1b\x3f\x15\xf7\x81\x92\x60\
\x50\x92\x7a\xe9\xcd\x49\xf7\xd6\x87\x93\xc3\x9f\xff\x6d\x72\xf0\
\xc7\xff\xca\x15\x3c\xa3\x83\xdf\xff\x67\x72\xf4\xd6\x27\x93\xde\
\x77\xdf\x97\xf7\x8f\x9a\x62\x50\x52\xc0\x11\x1d\x4d\x25\x1c\xe5\
\xa5\x0a\xbc\x0f\x67\xa1\xf9\xf6\xcf\xc4\x7d\xa6\x8d\x30\x28\x8d\
\xb9\xbe\xc6\xd1\xe8\x4f\x7b\x39\x73\x6c\x0a\x67\xb8\xde\xf7\x7e\
\x21\x7f\x0f\x8a\xc1\xa0\x6c\x0c\x01\x71\x1d\xeb\xce\xc3\x27\x62\
\xa5\x6c\x33\xf4\x69\x18\x98\x46\x18\x94\x4d\xa0\x89\x55\xc2\x19\
\xe4\x22\x68\x26\xb2\xf3\xbf\x11\x06\x25\x06\xda\xf9\x87\xbf\xfc\
\xbb\x58\xe9\x8a\xe5\xce\x88\x47\x3f\x7a\x24\x7e\x5f\x5a\xc1\xa0\
\x5c\x04\xf3\x16\x25\x36\xb3\x62\xf9\xe6\x18\xcf\x2e\x17\x61\x50\
\xce\x83\xce\xba\x54\xb9\xd4\x71\x07\x02\x4c\x80\x4a\x7f\x03\xf2\
\x18\x94\x75\x30\x49\x28\x56\x2a\xc5\xd8\x14\x5b\x8b\x41\x59\x71\
\xfd\xb6\xc9\x90\xcc\x1c\xbe\xf3\xa9\xfc\x77\xb1\x8d\x41\x59\x80\
\x90\x68\xeb\xb4\x37\x80\x03\x85\xf8\xf7\xb1\x8b\x41\x99\x87\xa3\
\xa9\x54\x71\x2c\x62\x58\x16\x30\x28\x33\x66\x3a\xee\x1b\xc0\x82\
\x4b\xe9\x6f\x65\x10\x83\x02\x7e\x08\xb8\x45\xeb\xb4\xda\x04\x8b\
\x3c\xa5\xbf\x99\x31\x0c\x0a\x26\x13\x5b\x33\x4f\xe2\xf6\xc3\x5f\
\x8b\xe2\x60\x51\xa3\xf8\x9a\x5d\x73\x07\x10\x5e\x03\x63\x3d\x28\
\xae\xf3\x7e\xf0\xdb\x7f\xc9\x15\x24\xa3\x83\xfb\xff\xf6\x2b\x7b\
\x51\x01\x63\xd6\x5e\xf9\xeb\x5a\xdc\x6b\x31\x7c\x7b\x70\xef\x9f\
\xe2\x67\xe6\x84\x65\x3b\xc6\x27\x25\x6d\x07\x05\xab\x6a\xa5\x8a\
\x91\x03\x02\x89\x7e\x10\x2a\xbd\xb4\x2f\x1b\x99\x5d\xf7\x82\xa5\
\xfd\xc2\xb6\x72\xc0\xdf\x4a\xdc\x17\x1b\xec\x06\x05\x0b\x1c\xa5\
\x0a\x91\x1a\x9a\x50\xd8\x96\xb4\x0f\x29\x20\x78\xbb\x1a\xd2\x36\
\xdc\x5f\x31\x1a\x94\x5d\x34\xb9\x66\x8b\x0e\xdd\xb6\xc4\x7d\x48\
\x0c\x4b\x50\xd0\xa4\x13\xf7\x25\x15\xd7\x5f\x31\xda\x04\xb3\x19\
\x94\xdc\x43\xc1\x38\xc2\xef\xab\x42\x61\x48\x57\xda\xa7\x54\x8c\
\xce\xaf\x18\x0c\x0a\xce\x26\x19\xaf\x29\x69\x43\x45\x42\x13\x49\
\xda\xb7\x54\x0c\x5e\x93\x6f\x2f\x28\x39\x8f\xb8\x6d\x9a\xa0\x43\
\x53\x2c\xd7\xdc\x90\xc1\x8e\xbd\xad\xa0\xa0\x39\x94\xab\xf2\xe0\
\xb6\x44\xd2\x36\xf7\x09\x43\xcf\xb9\xe6\x88\x8c\x9d\x55\x6c\x05\
\x05\x73\x17\xd2\x8f\xbe\xad\x36\x2f\xf5\xe8\xfe\xe0\x77\xe2\x3e\
\x6f\x0b\x43\xd3\xd2\xf6\x94\x32\x14\x94\x97\xde\xcc\x72\x36\xc1\
\xe8\xd9\xae\x46\xb6\x9a\xca\x32\xdf\x62\x6b\x04\xcc\x4e\x50\xd0\
\x34\x12\x7f\xf0\x6d\xa0\xb2\xa4\x98\x40\xcc\xcd\x1d\x24\x72\x0c\
\x60\x60\xf4\x50\xdc\x9e\x3e\x76\x82\x92\x63\xe9\x47\x49\x15\xc5\
\x77\xee\x85\xef\xb0\x0d\xcc\xdb\x48\xdb\x52\xc8\x46\x50\x70\xd4\
\x97\x7e\xe8\x6d\xf8\x26\x97\xb0\xad\x36\xcb\xb1\x64\xc7\xc8\x7d\
\xc2\x6c\x04\x25\xc7\x90\x30\x3a\xc9\xd2\xb6\xda\xcc\x8f\x82\x09\
\xdf\x65\x1b\xb8\x19\xa0\xb4\x2d\x65\x6c\x04\x25\xf5\x72\x15\xb4\
\xf7\xdb\xde\x81\x5f\x27\xf9\xdf\xc2\x35\x69\xa5\xed\x28\x63\x20\
\x28\x18\xed\x12\x7e\xe0\x6d\x60\x98\x59\xdc\x56\x01\x72\x2c\xdf\
\x31\x30\xfa\xa5\x3f\x28\x39\x56\x09\x17\x31\xd2\xb5\x4e\x86\x61\
\x72\x03\xab\x8a\xf5\x07\x25\x75\xff\x04\x57\x1f\x4a\xdb\x29\x49\
\xea\xdb\x31\x19\xb8\xc5\x91\xfe\xa0\xa0\x62\x4b\x3f\x6e\x53\x1a\
\xe6\x0e\x52\x9f\x65\x4b\x1c\x01\xdc\x90\xf2\xa0\xb8\x0e\x77\xea\
\x66\x86\x86\xe1\x50\x7f\x9f\x00\xe1\xbb\x35\xe6\xfe\xc6\xa5\x0e\
\x6e\x44\xd2\x1d\x94\xe4\xf3\x27\x8a\x2a\x44\xea\x99\x7a\xe5\x8b\
\x24\x75\x07\x25\xf5\x6c\xb4\xa6\x26\x46\xea\xc9\xc7\x36\xae\x9e\
\x4e\x48\x79\x50\x12\xaf\xef\xd2\xd4\x69\xf5\x4f\x0d\x13\xbe\x63\
\x53\x6d\x5e\x41\x9d\x80\xee\xa0\xa4\x1e\xf1\xd2\x74\xd4\x4c\x7d\
\xb6\x55\xbe\xec\x5e\x77\x50\x52\x2f\x2f\x57\xd5\x0e\x4f\x3c\x11\
\xab\x61\xd8\xfc\x1c\xba\x83\x92\x7a\xc5\xb0\xb6\x47\x51\x4b\xdf\
\xb1\x29\xdc\x96\x49\xda\x86\x12\xca\x83\x92\xf8\xb6\xa4\xd2\x36\
\x4a\x96\x7a\xe8\x5c\xda\x86\x12\xca\x83\x92\x78\x08\x54\xda\x46\
\xc9\x52\x1f\x48\xb4\x9d\x71\xe7\xe8\x0e\x8a\xf4\x63\x36\xa5\xb1\
\x69\xc1\xa0\x44\x63\x50\x62\x69\x0c\x4a\xea\xe5\x3d\x0c\x4a\xa1\
\xa4\x1f\xb3\x29\x8d\xd7\x5d\x24\x0f\x4a\xc9\xab\xaa\xcf\xa7\x37\
\x28\xfe\x1e\x5e\xc2\x8f\xd9\x94\xc6\xe1\xcf\xd4\xc3\xe7\x8a\x9f\
\xa3\xa2\x38\x28\x89\x17\xfe\xa9\x0c\x4a\xe2\xe5\xf6\x0c\x4a\x81\
\x18\x94\x8b\xa5\x7e\x5c\x04\x83\x52\xa0\xd4\x41\x61\x1f\xe5\x62\
\x8a\x57\x10\xb3\x33\x1f\x4b\xe5\xf0\x70\xe2\x1b\x4d\x70\xd4\xab\
\x50\xd2\x8f\xd9\x94\xbf\xf3\x8a\xb0\x8d\x92\x71\x1e\x25\x1a\x83\
\xb2\x09\x69\x1b\x25\x4b\x7e\xf1\x16\x83\x52\x26\xae\x65\x3a\x9f\
\xf4\x1d\xb7\xa1\xf8\x72\x60\xdd\x41\x49\xfd\x4c\x43\x6d\x47\x4c\
\xe9\x3b\x36\xa5\xb1\x69\x3a\x47\x77\x50\x52\x0f\x7f\x6a\xba\xcf\
\x2e\x46\xa8\xa4\xef\xd8\x94\xf2\x3b\xb1\x28\x0f\xca\x3b\x9f\x8a\
\x3f\x6a\x53\x9a\x1e\x73\x90\xfa\x39\x8f\xca\x1f\x57\xa7\x3b\x28\
\x78\x7c\xb5\xf4\xa3\x36\xa5\xe9\x72\xd7\xe4\x07\x91\x82\x6f\x33\
\x1b\x41\x77\x50\x52\x1f\x35\x35\xcd\xa5\xa4\x9e\x43\x51\xfe\x50\
\x21\xdd\x41\x49\xdd\x0e\x07\x5c\x6b\x2e\x6d\xab\x28\xb8\x31\xa0\
\xf0\xdd\xb6\x81\x9b\x55\x88\xdb\xd2\x41\x77\x50\x72\xdc\x29\x12\
\xb7\x23\x15\xb7\x55\x10\xac\xc9\x92\xbe\xdb\x36\x54\x1c\x40\xd6\
\x53\x1e\x14\x27\x79\x13\x43\xc1\xfd\xab\x52\x3f\xfa\x41\xf9\xd0\
\x30\xe8\x0f\x4a\xea\xa5\xe4\x1a\x2a\x45\xea\x83\x87\xf2\x11\x2f\
\xd0\x1f\x94\x1c\x0f\xce\x29\xb9\xf9\x95\xa3\xdf\x66\xe0\xf1\x74\
\xfa\x83\x92\xe3\x41\xa7\x98\xc8\x94\xb6\x55\x82\xd4\x67\x58\x50\
\x7c\x1d\xca\x8c\xfe\xa0\x40\xea\x55\xb2\x50\xe4\x72\x96\x0c\x4f\
\xdb\xea\x3c\x7c\x22\x6f\x4b\x17\x1b\x41\x49\x3d\xb9\x06\x25\x4e\
\xb0\xe5\x68\x86\xe2\x0c\x25\x6d\x4b\x19\x1b\x41\x49\x7d\x43\x6a\
\x40\xa7\xbe\xa8\x87\x7c\x5e\xbf\x9d\x7c\x91\x28\x94\xf8\x18\xf1\
\x06\x6c\x04\xc5\xcf\xa7\xb8\x26\x82\xf4\x43\x6f\xa3\xa4\xbe\x4a\
\x8e\x67\xed\xfb\x07\x2b\xe9\x9e\x3f\x99\x31\x12\x14\x07\x4d\x25\
\xf1\xc7\xde\x52\x09\x4f\xc4\xcd\x31\xc1\x08\x06\x1e\x72\x3a\x63\
\x27\x28\x39\x86\x45\x3d\x77\x54\x6d\x75\xc7\x1e\x4d\xae\x0c\x83\
\x19\xa0\xf8\x66\x12\xcb\xec\x04\x05\x52\xdf\x75\x64\xa6\xcd\x77\
\x68\xc9\x31\x1c\x0c\x1a\x6f\xdf\x74\x0e\x5b\x41\x49\xfd\xd8\xe8\
\x79\x7e\x09\xbe\x3b\x7a\x4b\xdb\xdd\x97\xd4\x8f\x9f\x9b\xa7\x61\
\xcd\xdb\x06\x6c\x05\x05\x52\x2f\xdf\x98\xe7\x3b\xf7\x2d\xe9\xdc\
\xe6\xea\x93\x81\xf2\xab\x19\x25\xf6\x82\x92\xf3\xac\x02\xbe\x12\
\xed\x39\x2c\xb9\x9a\x5b\x33\xc6\xce\x26\x60\x2f\x28\x90\xab\xaf\
\x32\x83\xce\xf3\x5e\xee\xec\xee\x02\x9a\xfa\x3e\x01\xcb\x0c\x9e\
\x4d\xc0\x66\x50\xb2\x8d\x80\x2d\xf1\xb3\xf7\x3b\x3a\xbb\xf8\x47\
\x85\x67\x98\x2b\x5a\x66\x60\x5d\x97\xc4\x66\x50\x20\xcb\x04\x9c\
\xc4\x55\xde\x9c\x8f\xdd\x46\xc5\xcd\xd9\xef\x9a\x67\x68\xde\x64\
\x99\xdd\xa0\xe4\x5a\xd2\xb1\x0e\xb6\x85\xb5\x56\x49\xe6\x5c\xdc\
\x59\x0a\x4b\x47\x70\x1d\x88\xb4\xad\x1c\xb0\xff\x6d\x1b\xd5\xdb\
\x21\xc3\x41\x71\x70\x9f\x2e\xa9\x52\xe4\xe6\x43\xf3\xc6\xe3\x8d\
\xfa\x31\x58\x57\x86\x33\xd3\x2e\xc3\x71\x06\x93\xaa\x8a\xee\x69\
\xd6\x80\xed\xa0\x40\xea\x5b\x1a\x35\x85\xf0\x60\x90\x61\x5e\xea\
\xe7\xe4\x37\x85\xbf\x91\xf4\xb7\x33\x84\x41\x81\xdc\xc3\xa9\x25\
\x33\xdc\x2f\x99\xc7\xa0\x78\xae\xed\x9d\x7b\xc8\xb8\x44\x7e\x02\
\xd5\x6e\xbf\x64\x1e\x83\x72\xc6\x75\x90\x77\x35\x7a\x54\x82\x36\
\x4c\x9c\xb6\x08\x83\xb2\x80\x61\xf1\x18\x92\x15\x0c\xca\x0a\x84\
\xa5\x25\x9d\xe8\x7d\xf0\x2b\xa1\x19\x92\x65\x0c\x8a\x08\x7d\x96\
\xcc\x4b\x41\xda\x88\x7d\x92\xb5\x18\x94\xf3\xec\x6c\xf6\xbe\x05\
\x0c\xdc\x9b\x6b\x1b\x0c\xca\x45\xfc\x12\x91\xc4\xcf\x3a\x6c\x13\
\x7c\x37\xa3\xeb\xb7\x36\xc1\xa0\xc4\xc0\xac\xf8\x5e\x66\xc4\x33\
\xf3\x17\x9b\xb1\x3f\x12\x83\x41\xd9\xc4\x2e\x17\x20\xe6\x84\x55\
\x00\x3c\x8b\x6c\x84\x41\x69\x02\x6b\xae\x8a\x6c\x8e\x3d\x7c\xc2\
\xe5\x28\xcd\x30\x28\xdb\xc0\x0a\xde\x12\x86\x92\x71\x21\x99\x5f\
\xea\xcf\x11\xad\xa6\x18\x94\x14\xb0\xb2\xd6\xaf\x17\xdb\xc1\x85\
\x53\xd1\x1e\x7d\xe5\xfb\x20\x06\x2f\xdb\xcd\x81\x41\x49\x0d\x15\
\x73\x6f\xa1\x99\x85\x03\xb7\x39\xe5\xd9\x23\x25\x06\x25\x27\x5c\
\x6f\x82\x3b\x49\x62\x05\x6e\x8e\x26\x9a\x5f\x9a\xef\x82\xe1\x2f\
\x08\xb3\x7d\xbd\x48\x6e\x0c\xca\xae\xf9\x0b\xb0\x5e\xf9\xc0\x07\
\x08\x93\x7c\xde\x5b\x9f\xac\x5c\x8b\x72\x06\x41\x98\xbd\xce\x05\
\x02\xef\xdd\xcb\x8d\x2b\x6c\x63\x50\x88\x22\x30\x28\x44\x11\x18\
\x14\xa2\x08\x0c\x0a\x51\x04\x06\x85\x28\x02\x83\x42\x14\x81\x41\
\x21\x8a\xc0\xa0\x10\x45\x60\x50\x88\x22\x30\x28\x44\x11\x18\x14\
\xa2\x08\x0c\x0a\x51\x04\x06\x85\x28\x02\x83\x42\x14\x81\x41\x21\
\x8a\xc0\xa0\x10\x45\x60\x50\x88\x22\x30\x28\x44\x11\x18\x14\xa2\
\x08\x0c\x0a\x51\x04\x06\x85\x28\xc2\x34\x28\xfd\xe1\xf1\x55\xe1\
\x1f\x89\x68\x6a\x1a\x94\xce\x70\x74\x45\xf8\x47\x22\x72\x7a\x83\
\xf1\x47\xd3\xa0\xb8\xd2\xaf\xea\xaf\xa5\x17\x11\x99\x57\x8d\xef\
\x85\x98\x74\x3a\xbd\x6a\xfc\x85\xf8\x22\x22\xe3\xdc\x19\xe5\xed\
\x10\x13\x77\x46\x71\xed\xb0\xe5\x17\x10\x11\x82\x32\x7a\x35\xc4\
\x04\x4d\xaf\xf1\x7d\xe9\x45\x44\xd6\x61\x54\x38\xc4\xc4\x37\xbd\
\xde\x93\x5e\x44\x64\xde\xf0\xf8\x6a\x88\x89\x0f\xca\xcb\xe2\x8b\
\x88\x2c\xab\xc6\x4f\x3b\xc3\x3b\x97\x43\x4c\x5c\x71\xff\xc1\x91\
\x2f\xa2\x25\xae\x4b\x12\x12\xf2\x4d\xe9\x0d\xea\x8f\xc5\x17\x13\
\x19\x85\x96\x56\x88\xc7\x37\x05\xbd\x7b\xe9\xc5\x44\x26\xb9\x16\
\xd6\x62\xb3\x2b\x94\x67\x86\xaf\x3f\x2b\xbe\x81\xc8\x20\xb4\xb0\
\x42\x34\x56\x4b\x7f\x50\x7f\x26\xbd\x89\xc8\x9a\xee\xa0\xbe\x15\
\x62\xb1\x5a\xfa\x83\x51\x2d\xbd\x89\xc8\x14\xd7\xec\x42\x0b\x2b\
\xc4\x42\x2e\xee\x85\xa7\x2b\x6f\x24\x32\x65\x54\x87\x38\xac\x2f\
\xfd\x6a\x74\x53\x7e\x33\x91\x7e\xbd\xaa\xfe\x52\xec\xc4\x4b\xc5\
\xbd\x81\x67\x15\x32\x2a\xe2\x6c\x32\x2b\xdd\x6b\xe3\x17\xe4\x0f\
\x21\x52\xed\x34\xfa\x6c\x32\x2b\xb8\x60\x45\xf8\x20\x22\xbd\x5c\
\xb7\x23\x54\xff\xf8\x82\x5e\x3f\xaf\x53\x21\x33\xe6\x2f\xd0\xda\
\xb4\xf4\x5e\x1c\x3d\xcf\x35\x60\x64\xc0\xc9\xc6\x4d\xae\xe5\xc2\
\x95\xc5\xa4\x19\x5a\x4d\x17\xce\x99\xc4\x16\xf7\x61\xbc\x5e\x85\
\xf4\x71\xad\x25\xb4\x9a\x42\x35\x4f\x53\xd8\xb9\x27\x75\x9a\x74\
\xde\x63\x0a\x3a\x3c\xe2\x06\x89\x4a\x52\x8d\x9f\x5e\xaa\xc6\x37\
\x42\xb5\xce\x53\xb0\x58\x8c\x1d\x7c\x2a\x55\xaf\xaa\x3f\x5f\xb8\
\xbc\x37\x67\xf1\xb7\x61\x75\xa9\x94\x76\x84\xa8\xc5\x4e\x70\xc3\
\xc7\x50\x8d\x77\x53\x2e\x0d\x47\xcf\xb9\x0d\x73\xa9\x0b\x15\xa1\
\x57\x1d\xdf\x0d\x55\x77\x3f\x05\x6b\x63\xb0\x90\x4c\xda\x39\xa2\
\x16\x38\xc1\x92\xac\x50\x5d\xf7\x5c\x86\x77\x2e\xe3\x8e\x7a\x6c\
\x8e\x51\x8b\x9c\x66\xef\xb0\x37\x2d\xd3\x65\x2f\xc7\x77\xd9\xd9\
\xa7\x7d\xc1\x04\xe2\xc2\xdd\x1d\x5b\x5d\x5c\x87\x69\x3a\xa3\x5f\
\x3f\xe0\x59\x86\x72\xc3\x48\x16\x0e\xd0\xed\x69\x62\x35\x2c\x38\
\x05\x62\x0e\x86\x7d\x19\x4a\x07\xf7\x77\x18\xd5\x3b\x1b\xea\xdd\
\x4b\x71\x7d\x1a\x0c\x31\x4f\x9f\xf6\x35\xaa\xd1\xbf\x21\x5a\x07\
\x33\xe8\xa8\x2b\xc9\x97\x9b\x44\x97\x4e\xe7\xff\xc9\xdc\xa1\x7b\
\x90\xad\xc4\xfe\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\
\x00\x00\x38\xab\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x02\x00\x00\x00\x02\x00\x08\x06\x00\x00\x00\xf4\x78\xd4\xfa\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\x48\x00\x00\x0e\x48\
\x01\x6b\x6f\x0d\xff\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x38\x28\x49\x44\
\x41\x54\x78\xda\xed\xdd\x0b\x94\x95\xd5\x7d\xf7\xf1\x3a\x22\x0b\
\x23\x22\x59\xba\x96\x74\x85\x98\x10\x17\x22\x1a\x05\x31\x1a\x23\
\x44\x01\xb5\x31\x9a\x10\x8b\x52\x92\xfa\x82\x18\x69\xa3\xa9\xf7\
\x1a\x6d\xab\x31\x60\x90\x98\xd6\xda\xc6\xa4\xa2\x68\x62\xa0\x2a\
\x29\xe5\x92\x4a\xad\x73\x61\xb8\xcb\x70\x67\x60\xee\xf7\xfb\xfd\
\x7e\x39\x67\xce\xcc\x39\x33\xfb\xdd\xfb\xf8\x18\x47\x18\x66\xce\
\x39\x73\x9e\xe7\xec\xe7\xd9\xdf\xbd\xd6\x67\xad\x77\xf5\x8d\x0a\
\x7b\xef\x67\xff\x7f\xe7\xb9\xec\xfd\x67\x42\x88\x3f\x03\xa0\xb7\
\xa9\x1f\x66\x8d\x93\xbe\x20\x5d\x2d\xcd\x97\x16\x49\x0f\x4a\xcf\
\x4a\xaf\x48\xeb\xa4\x4d\xd2\x36\x29\x45\xda\x25\x65\x48\x47\xa5\
\x6c\xa9\x48\xaa\x90\xea\xa5\x36\xc9\x27\x85\x2c\x3e\xeb\xff\x56\
\x6f\xfd\x6f\x8a\xac\x7f\xe6\xa8\xf5\xef\xd8\x65\xfd\x3b\xb7\x59\
\xff\x8d\x75\xd6\x7f\xf3\x59\xeb\xcf\xb0\xc8\xfa\x33\xcd\x90\x26\
\x4b\xe7\x32\x66\x80\xfe\xe8\x04\x20\xb1\x85\xfd\x1c\xe9\x2b\xd2\
\x2d\xd2\x03\xd2\x2a\xe9\x1d\x69\xbb\x74\x5c\xaa\xb4\x0a\xb4\x70\
\x19\xbf\x54\x25\x65\x4a\xe9\xd2\xbb\xd2\x8b\xd2\x72\xe9\x56\xe9\
\x52\x69\x2c\x73\x00\x20\x00\x00\x5e\x2d\xf0\x49\xd2\x97\xa4\x9b\
\xa5\x65\xd2\x0a\xeb\x17\xf4\x6e\xab\xb8\xf7\xbb\xb0\xb8\xc7\x4b\
\xbf\x15\x12\xf6\x48\xeb\xa5\x95\xd2\xfd\xd2\x5c\xe9\xcb\xaa\xef\
\x98\x43\x00\x01\x00\x70\x43\xb1\x9f\x28\xdd\x24\x3d\x2c\xad\x95\
\x0e\xba\xf4\xd7\xbb\x4e\x77\x11\x0e\x49\x6f\x49\x8f\x58\x21\xea\
\xf3\xcc\x35\x80\x00\x00\x24\xaa\xd0\x9f\x2d\x5d\x2e\x2d\xb6\x6e\
\x6b\x6f\xb3\x9e\x9d\x53\xb4\x9d\xa1\xee\x9c\xfc\xaf\xd5\xf7\x6a\
\x0c\xa6\xab\x31\x61\x6e\x02\x04\x00\x20\xde\x05\xff\x8b\x56\xa1\
\x79\x55\x3a\x2c\xf5\x50\x84\xb5\xd3\x63\x8d\xcd\xab\xd6\x58\x7d\
\x91\xb9\x0b\x10\x00\x80\x68\x7f\xdd\x5f\x63\xdd\xc6\xdf\x60\xfd\
\xda\xa4\xc0\xba\xf7\x4e\xc1\x06\x6b\x2c\xaf\xe1\x2e\x01\x40\x00\
\x00\x06\x17\xfc\x09\xd2\x5f\x58\x2f\xa0\xa9\x37\xef\xbb\x28\x9c\
\x9e\xd5\x65\x8d\xf1\x4a\x6b\xcc\x27\x70\x0d\x80\x00\x00\x98\xf5\
\x56\xfe\x0d\x56\x11\x38\x68\xf8\x5b\xf8\xa6\xeb\xb7\xe6\xc0\x4a\
\x6b\x4e\xf0\xd5\x01\x08\x00\x80\xc7\x8a\xfe\x24\xe9\x3e\xeb\x76\
\x70\x0b\x85\x0f\x67\xd0\x62\xcd\x11\x35\x57\x26\x71\xed\x80\x00\
\x00\xb8\xaf\xe0\x8f\xb1\x3e\xc7\x5b\x2d\x1d\x93\x06\x28\x6e\x88\
\xd2\x80\x35\x77\x56\x5b\x73\x69\x0c\xd7\x16\x08\x00\x80\x9e\x45\
\xff\x02\xeb\x97\xdb\x66\xa9\x83\x02\x86\x38\xeb\xb0\xe6\x96\x9a\
\x63\x17\x70\xcd\x81\x00\x00\x24\xb6\xe8\x9f\x2f\xdd\x2b\xbd\x2f\
\xf5\x52\xa4\xe0\x90\x5e\x6b\xce\xa9\xb9\x77\x3e\xd7\x22\x08\x00\
\x80\x33\x45\xff\x3c\xeb\x3b\xef\x2d\x7c\x8f\x0f\x4d\xf6\x1f\xd8\
\x62\xcd\xc9\xf3\xb8\x46\x41\x00\x00\xe2\x5b\xf4\xcf\x95\xee\x96\
\x36\xb2\xb5\x2e\x34\xe6\xb3\xe6\xe8\xdd\x9c\x88\x08\x02\x00\x30\
\xba\xcf\xf5\x6e\x97\xde\xe3\xdb\x7c\xb8\x74\xcf\x81\xf7\xac\x39\
\xcc\xe7\x85\x20\x00\x00\x11\x14\x7e\x75\x9e\xfc\xf3\xec\xad\x0f\
\x0f\xa9\xb0\xe6\xf4\x64\xae\x71\x10\x00\x80\xd3\xb7\xdf\x5d\x60\
\x1d\xaa\x13\xa2\x60\xc0\xa3\x42\xd6\x1c\x5f\xc0\xb6\xc4\x20\x00\
\xc0\xf4\xc2\x3f\x45\x5a\x25\xd5\x50\x1c\x60\x98\x1a\x6b\xee\x4f\
\x61\x2d\x00\x01\x00\xa6\x14\xfd\x73\xa4\x7b\xa4\x54\x36\xe8\x01\
\xc2\xd7\x40\xaa\x75\x4d\x9c\xc3\x1a\x01\x02\x00\xbc\x58\xf8\x2f\
\xb2\x9e\x83\x36\xb0\xe8\x03\x43\x6a\xb0\xae\x91\x8b\x58\x33\x40\
\x00\x80\x17\x0a\xff\x54\x69\x8d\xe4\x67\x81\x07\x22\xe2\xb7\xae\
\x99\xa9\xac\x21\x20\x00\xc0\x8d\x85\x7f\x8e\xb4\x95\xd3\xf6\x80\
\x51\x9d\x56\xa8\xae\xa1\x39\xac\x29\x20\x00\xc0\x0d\x6f\xf3\xab\
\x67\x99\x07\x58\xbc\x81\xb8\x3a\x60\x5d\x5b\x7c\x3d\x00\x02\x00\
\xb4\xdb\x9e\xf7\x11\xa9\x84\x85\x1a\xb0\x55\xa9\x75\xad\xb1\xed\
\x30\x08\x00\x48\x68\xe1\x9f\x60\xbd\xb4\xd4\xca\xc2\x0c\x38\xaa\
\xd5\xba\xf6\x26\xb0\x16\x81\x00\x00\xa7\x7f\xf1\x3f\x23\xb5\xb0\
\x10\x03\x09\xd5\x62\x5d\x8b\xdc\x11\x00\x01\x00\xb6\x16\xfe\x71\
\xd2\x13\x7c\xca\x07\x68\xf9\x09\xa1\xba\x36\xc7\xb1\x56\x81\x00\
\x80\x78\x16\xfe\xb1\xd2\x8f\xd9\xb1\x0f\x70\xc5\x0e\x83\xea\x5a\
\x1d\xcb\xda\x05\x02\x00\x46\x53\xf8\xc7\x48\xcb\x39\x98\x07\x70\
\xe5\x01\x44\xea\xda\x1d\xc3\x5a\x06\x02\x00\xa2\x29\xfc\xea\x28\
\xde\x25\x52\x31\x0b\x29\xe0\x6a\xc5\xd6\xb5\xcc\x91\xc4\x20\x00\
\x60\xc4\xe2\x3f\x57\xca\x64\xe1\x04\x3c\x45\x5d\xd3\x73\x59\xe3\
\x40\x00\xc0\x99\x4e\xe6\xdb\xcc\x42\x09\x78\xda\x66\x4e\x20\x04\
\x01\x00\x9f\x14\xfe\xf1\xd2\x6a\x29\xc0\xe2\x08\x18\x21\x60\x5d\
\xf3\xe3\x59\x03\x09\x00\x30\xb3\xf0\x9f\x25\x2d\x93\xea\x58\x10\
\x01\x23\xd5\x59\x6b\xc0\x59\xac\x89\x04\x00\x98\x53\xfc\x67\x4b\
\x87\x59\x00\x01\x58\x6b\xc1\x6c\xd6\x46\x02\x00\xbc\x5d\xf8\x2f\
\x91\x36\xb0\xe0\x01\x18\x82\x5a\x1b\x2e\x61\xad\x24\x00\xc0\x7b\
\xa7\xf4\x3d\x25\xf9\x58\xe4\x00\x0c\xc3\x67\xad\x15\x9c\x3a\x48\
\x00\x80\x07\x8a\xff\x4c\xe9\x08\x0b\x1b\x80\x28\xa8\x35\x63\x26\
\x6b\x28\x01\x00\xee\xdd\xb7\xff\x25\x29\xc8\x62\x06\x20\x06\x41\
\x6b\x0d\xe1\x7c\x01\x02\x00\x5c\xb6\x99\x4f\x21\x0b\x18\x80\x38\
\x28\x64\x13\x21\x02\x00\xf4\x2f\xfc\x13\xa5\x37\xa5\x01\x16\x2d\
\x00\x71\x34\x60\xad\x2d\x13\x59\x6b\x09\x00\xd0\xaf\xf8\x2f\x94\
\x6a\x59\xa8\x00\xd8\x48\xad\x31\x0b\x59\x73\x09\x00\xd0\xa3\xf0\
\x4f\x92\xb6\xb0\x30\x01\x70\x90\x5a\x73\x26\xb1\x06\x13\x00\x90\
\xb8\xe2\xbf\x40\x6a\x62\x31\x02\x90\x00\x6a\xed\x59\xc0\x5a\x4c\
\x00\x80\xb3\x85\xff\x73\xd2\xeb\x2c\x40\x00\x34\xa0\xd6\xa2\xcf\
\xb1\x36\x13\x00\x60\x7f\xf1\x9f\x25\xe5\xb3\xe8\x00\xd0\x88\x5a\
\x93\x66\xb1\x46\x13\x00\x60\x4f\xe1\x4f\x92\x9e\x96\xfa\x58\x6c\
\x00\x68\xa8\xcf\x5a\xa3\x92\x58\xb3\x09\x00\x88\x5f\xf1\x9f\x2c\
\xed\x60\x81\x01\xe0\x02\x6a\xad\x9a\xcc\xda\x4d\x00\xc0\xe8\x8b\
\xff\x22\xa9\x95\x45\x05\x80\x8b\xa8\x35\x6b\x11\x6b\x38\x01\x00\
\xb1\x15\xfe\xf1\xd2\xdb\x2c\x24\x00\x5c\x4c\xad\x61\xe3\x59\xd3\
\x09\x00\x88\xbc\xf8\x4f\x93\x72\x59\x3c\x00\x78\x80\x5a\xcb\xa6\
\xb1\xb6\x13\x00\x30\x72\xf1\xbf\x4b\xea\x60\xd1\x00\xe0\x21\x6a\
\x4d\xbb\x8b\x35\x9e\x00\x80\x33\xbf\xe5\xbf\x9a\x7d\xfc\x01\x78\
\xf8\x3c\x81\xd5\x7c\x25\x40\x00\xc0\x67\x8b\xff\x85\x52\x0a\x0b\
\x04\x00\x03\xa8\xb5\xee\x42\xd6\x7e\x02\x00\xc5\xff\xe3\x8d\x7d\
\xca\x59\x14\x00\x18\xa4\x9c\x8d\x83\x08\x00\xa6\x17\xff\x65\x52\
\x0f\x8b\x01\x00\x03\xa9\xb5\x6f\x19\xb5\x80\x00\x60\x5a\xe1\x1f\
\x2b\xbd\xc6\x02\x00\x00\xe1\xb5\x70\x2c\xb5\x81\x00\x60\x42\xf1\
\xbf\x58\xda\xcf\x45\x0f\x00\x7f\xa2\xd6\xc4\x8b\xa9\x11\x04\x00\
\x2f\x17\xff\xe9\x52\x19\x17\x3b\x00\x9c\x46\xad\x8d\xd3\xa9\x15\
\x04\x00\x2f\x16\xff\x79\x52\x1b\x17\x39\x00\x9c\x91\x5a\x23\xe7\
\x51\x33\x08\x00\x5e\x2a\xfe\x4b\xa7\x72\x8a\x1f\x00\x44\x42\xad\
\x95\x4b\xa9\x1d\x04\x00\x2f\x14\xff\x15\x5c\xd0\x00\x10\xb5\x15\
\xd4\x10\x02\x80\x9b\xdf\xf4\x5f\xcf\x45\x0c\x00\x31\x5b\xcf\x17\
\x02\x04\x00\xb7\x15\xff\x89\xd2\x4e\x2e\x5e\x00\x18\x35\xb5\x96\
\x4e\xa4\xb6\x10\x00\xdc\x50\xfc\xa7\x4c\xe5\x24\x3f\x00\x88\x27\
\xb5\xa6\x4e\xa1\xc6\x10\x00\x74\x2e\xfe\xd7\x4a\x0d\x5c\xac\x00\
\x10\x77\x6a\x6d\xbd\x96\x5a\x43\x00\xd0\xb1\xf8\xcf\x99\xca\x31\
\xbe\x00\x60\x27\xb5\xc6\xce\xa1\xe6\x10\x00\x74\x2a\xfe\xb7\x49\
\x3e\x2e\x4e\x00\xb0\x9d\x5a\x6b\x6f\xa3\xf6\x10\x00\x74\x28\xfe\
\xdf\x93\x02\x5c\x94\x00\xe0\x18\xb5\xe6\x7e\x8f\x1a\x44\x00\x48\
\x64\xf1\xff\x81\x14\xe4\x62\x04\x00\xc7\xa9\xb5\xf7\x07\xd4\x22\
\x02\x40\x22\x8a\xff\x72\xa9\x9f\x8b\x10\x00\x12\x46\xad\xc1\xcb\
\xa9\x49\x04\x00\x27\x8b\xff\xe3\xd2\x00\x17\x1f\x00\x24\x9c\x5a\
\x8b\x1f\xa7\x36\x11\x00\x9c\x28\xfe\xcf\x71\xc1\x01\x80\x76\x9e\
\xa3\x46\x11\x00\xec\x2c\xfe\xbf\xe4\x22\x03\x00\x6d\xfd\x92\x5a\
\x45\x00\xa0\xf8\x03\x00\x21\x00\x04\x00\x6e\xfb\x03\x00\x8f\x03\
\x40\x00\x88\xfe\x85\x3f\x2e\x28\x00\x70\x17\x5e\x0c\x24\x00\x8c\
\xfa\x53\x3f\xde\xf6\x07\x00\x77\x7e\x1d\xc0\x27\x82\x04\x80\x98\
\x37\xf9\xe1\x3b\x7f\x00\x70\xf7\x3e\x01\x6c\x16\x44\x00\x88\x7a\
\x7b\x5f\x76\xf8\x03\x00\x6f\xec\x18\xc8\xb6\xc1\x04\x80\x88\x0f\
\xf6\x61\x6f\x7f\x00\xf0\x0e\xb5\xa6\x73\x80\x10\x01\x60\xc4\x23\
\x7d\x39\xd5\x0f\x00\xbc\x47\xad\xed\x1c\x25\x4c\x00\x18\xb2\xf8\
\x5f\x3b\xf5\xe3\xb3\xa6\xb9\x50\x00\xc0\x9b\xd4\x1a\x7f\x2d\x35\
\x8f\x00\x30\xb8\xf8\x4f\x91\x1a\xb8\x38\xe0\x15\x97\x27\x67\x8b\
\x6b\xb7\xe7\x8a\x9b\x76\xe6\x8b\x3b\xf6\x16\x8a\x45\x19\x25\xe2\
\xfe\xc3\x65\xe2\xe1\x63\x15\xe2\x1f\x4e\x56\x8b\x9f\xe7\xd6\x8a\
\x7f\x2b\xac\x17\x6b\x4b\x9b\xc4\x7b\x95\x2d\xe2\x7f\x6a\xda\xc4\
\xa6\xea\x56\xb1\xae\xbc\x59\xac\x29\x69\x14\x2f\x17\xd4\x8b\x95\
\x39\xb5\xe2\xe9\x93\x55\xe1\x7f\x46\xfd\xb3\x8b\xe5\xbf\xe3\xce\
\x7d\x45\x62\xee\xae\x02\x71\x7d\x7a\xae\xb8\x32\x25\x9b\xbe\x86\
\x1b\xa9\xb5\x7e\x0a\xb5\x8f\x00\xa0\x8a\xff\x44\x29\x97\x8b\x02\
\x6e\x31\x33\x2d\x27\x5c\xd0\xff\x29\xab\x5a\xbc\x5d\xd6\x2c\xf6\
\x36\x75\x89\xac\x0e\xbf\x28\xeb\x0e\x88\xc6\x40\x50\xf8\x43\xfd\
\xc2\xa9\x16\x1a\x18\x10\xed\x7d\x21\x51\xd3\xd3\x27\x0e\xb6\x74\
\x8b\x3f\xc8\x30\xb1\x3a\xaf\x4e\xfc\xcd\x91\x72\x71\xeb\xee\x82\
\x70\x10\x61\xcc\xa0\x21\xb5\xe6\x4f\x24\x00\x98\x5d\xfc\xc7\x4a\
\x3b\xb9\x18\xa0\xa3\xaf\xca\x5f\xd8\x0b\x3e\x2a\x12\x4f\x9d\xa8\
\x12\x6f\xc8\x5f\xe5\x3b\x1b\x3b\x45\xb5\xbf\x4f\x0c\x08\xf7\xb4\
\xbe\xfe\x01\x51\xdc\x15\x10\x69\xf5\x1d\xe1\xbf\x83\xba\xfb\xa0\
\xee\x24\x5c\xb7\x3d\x97\x31\x46\xa2\xa9\xb5\x7f\x2c\x01\xc0\xdc\
\x00\xb0\x9e\x8b\x00\x3a\xdc\xae\xbf\x7d\x6f\xa1\x78\xf4\x78\xa5\
\x78\xb5\xa8\x41\x24\xcb\x62\x59\x2a\x7f\xcd\xab\x5f\xd7\x5e\x6e\
\x6d\x7d\x21\x71\xb4\xcd\x27\x36\x56\xb5\x8a\x9f\xc8\x90\x73\xe3\
\x8e\x3c\xe6\x03\x9c\xb6\x9e\x00\x60\x66\xf1\x5f\xc1\xe4\x47\x22\
\x5c\x26\x7d\x67\x5f\x51\xf8\x56\xf9\xee\xa6\x2e\x47\x6f\xd9\xeb\
\xde\x0a\xba\x7a\xc4\xef\xca\x9a\xc5\x03\x47\xca\xc5\xd5\xa9\x39\
\xcc\x17\x38\x61\x05\x01\xc0\xac\xe2\xbf\x94\x49\x0f\x27\x7d\x73\
\x67\xbe\x78\xe6\x64\xb5\x78\xbf\xb6\x4d\x34\xf5\x06\xa9\xf4\x11\
\x3e\x3e\x38\xd0\xd2\x2d\xfe\xb5\xa0\x5e\x2c\xdc\x5f\x2c\xa6\x25\
\x33\x8f\x60\x9b\xa5\x04\x00\x33\x8a\xff\x3c\xa9\x8f\x09\x0f\xbb\
\xcd\xdb\x55\x10\x7e\x9b\x3e\xb7\xa3\x87\x6a\x1e\x87\xa6\x5e\x36\
\xfc\xb0\xae\x43\x3c\x97\x5d\x13\xee\x5b\xe6\x18\xe2\x48\xd5\x84\
\x79\x04\x00\x6f\x17\xff\xe9\x52\x1b\x93\x1d\x76\xfe\xd2\x57\xb7\
\xf6\x4f\xb4\xfb\xa9\xd8\x36\xb7\x72\x5f\xaf\x78\xa5\xb0\x9e\x77\
\x07\x10\x2f\xaa\x36\x4c\x27\x00\x78\xb3\xf8\x5f\x2c\x95\x31\xc9\
\x61\xc7\x67\x79\x3f\xcb\xa9\x11\x47\x5b\x7d\x62\x80\xba\xec\x78\
\x53\x2f\x4b\xaa\x3b\x03\xf7\x1d\x2a\x0b\xbf\x5f\xc1\x9c\xc4\x28\
\xa8\x1a\x71\x31\x01\xc0\x7b\x9f\xfb\xed\x67\x72\x23\x9e\xbe\xbd\
\xb7\x50\xbc\x53\xd1\x22\xba\x83\xbc\xc4\xa7\x4b\x2b\xf3\xf5\x8a\
\x97\xf2\xeb\xf8\xcc\x10\xa3\xb1\xdf\x94\xcf\x03\x4d\x09\x00\xaf\
\x31\xa9\x11\xaf\x4f\xf6\xd4\xce\x78\x19\x2d\xdd\x54\x5b\x8d\x5b\
\x6f\xff\x40\x78\x77\xc3\xef\x1f\x28\x61\xde\x22\x16\xaf\x11\x00\
\xbc\x51\xfc\x97\x31\x99\x11\x8f\xdb\xfc\xea\x6d\xf4\xfa\x9e\x3e\
\xaa\xab\xcb\x5a\x61\x57\x20\xbc\xad\xf1\xac\x34\xee\x0a\x20\x2a\
\xcb\x08\x00\xee\x2e\xfe\xb3\xa4\x1e\x26\x32\x46\x5b\xf8\xd5\x1b\
\xe8\x34\x77\x37\xb5\xdf\x82\xda\x74\x68\x2e\x5f\x10\x20\x32\xaa\
\x76\xcc\x22\x00\xb8\xb3\xf8\x5f\x28\x95\x33\x89\x11\x8b\x19\xa9\
\x39\xe1\x4f\xf8\xda\x28\xfc\x9e\x7c\x3c\xf0\x56\x69\x53\xf8\xb0\
\x24\xe6\x3a\x46\xa0\x6a\xc8\x85\x04\x00\x77\x15\xff\x24\x29\x85\
\xc9\x8b\x68\x5d\x4d\xe1\x37\xa6\xa9\x31\x7e\x21\xb7\x56\x4c\xe7\
\xc0\x22\x0c\x4f\xd5\x92\x24\x02\x80\x7b\x02\xc0\x6a\x26\x2d\xa2\
\xf5\xd0\xd1\x0a\x51\xcb\x33\x7e\x23\xbf\x1c\x78\xe8\x58\x05\xd7\
\x00\x86\xb3\x9a\x00\xe0\x8e\xe2\x7f\x97\x34\xc0\x84\x45\xa4\x6e\
\xde\x99\x2f\xb6\x37\x74\x52\x09\x0d\x6f\x87\x5b\xbb\xc3\x5b\x0e\
\x73\x4d\x60\x08\xaa\xa6\xdc\x45\x00\xd0\xbb\xf8\x4f\x93\x3a\x98\
\xac\x88\xf4\x93\x3e\xf5\xcd\x38\x87\xf1\xd0\x3e\x69\x6a\x23\x27\
\x75\x5e\xc3\xdc\x5d\xf9\x5c\x23\x38\x95\xaa\x2d\xd3\x08\x00\x7a\
\x16\xff\xf1\x52\x2e\x93\x14\x91\xf8\xab\x8c\x12\x91\xdf\xc9\x1e\
\xfd\xb4\xa1\x5b\xa0\x7f\x40\xbc\x51\xd2\x28\xae\x49\xe3\x44\x42\
\x7c\x86\xaa\x31\xe3\x09\x00\xfa\x05\x80\xb7\x99\x9c\x18\xc9\xd7\
\xb6\xe7\x8a\x3f\x54\xb6\xb0\x65\x2f\x2d\xa2\xd6\x18\x08\x86\xb7\
\x18\xe6\xda\xc1\x20\x6f\x13\x00\xf4\x2a\xfe\x8b\x98\x94\x18\xc9\
\x53\x27\xaa\x44\x33\x47\xf1\xd2\xa2\x6c\xfd\x32\x2d\xae\x29\x69\
\x0c\x3f\x32\xe2\x3a\x82\x65\x11\x01\x40\x8f\xe2\x3f\x59\x6a\x65\
\x42\xe2\x4c\x6e\xdb\x53\x28\x32\x9a\xd9\xba\x97\x36\xba\x76\xa4\
\xd5\x27\x6e\xda\xc9\xbb\x01\x08\x53\x35\x67\x32\x01\x20\xf1\xdf\
\xfb\xef\x60\x32\x62\x28\xd3\x92\xb3\xc4\xaf\x8a\x1a\xc2\x1b\xbf\
\xd0\x68\xf1\x68\x6a\x47\xc8\x1f\x1d\x2d\xe7\xfa\x82\xb0\x6a\x4f\
\x12\x01\x20\x71\x01\xe0\x69\x26\x21\xce\xf4\xac\x7f\x4f\x53\x17\
\x15\x8b\x66\x4b\x7b\xbb\xac\x59\x5c\x91\xc2\x23\x01\x64\x3d\x4d\
\x00\x48\xdc\x3e\xff\x7d\x4c\x40\x9c\xea\xce\x7d\x45\xa2\xd2\xd7\
\x4b\x95\xa2\xd9\xda\xb2\x3a\xfc\xe2\x96\xdd\x9c\x2b\x60\xb8\x3e\
\x37\x9f\x17\xe0\xd6\xe2\xff\x39\x29\x9f\xc9\x87\x53\x3d\x76\xbc\
\x92\xef\xfa\x69\x8e\xb5\xae\x60\xbf\x78\x2c\xb3\x92\x6b\xcf\x6c\
\xaa\x16\x7d\x8e\x00\xe0\x5c\x00\x78\x9d\x49\x87\x53\x37\xf5\x59\
\x5b\xda\x44\x45\xa2\x25\xa4\xfd\x57\x65\xab\xb8\x92\x47\x02\x26\
\x7b\x9d\x00\xe0\x4c\xf1\x5f\xc0\x64\xc3\xa9\xcf\xfb\xf7\xf2\xbc\
\x9f\x96\xe0\xb6\xaf\xb9\x4b\x5c\x95\xca\xc6\x41\x06\x5b\x40\x00\
\xb0\xb7\xf8\x4f\x92\x9a\x98\x68\xf8\xc4\x77\xf6\x15\x89\x2a\x3f\
\xcf\xfb\x69\x7a\xb4\x03\x2d\xdd\xe1\x13\x25\xb9\x36\x8d\xa4\x6a\
\xd3\x24\x02\x80\x7d\x01\x60\x0b\x93\x0c\x7f\x7a\xde\x9f\xc9\xf3\
\x7e\x9a\x7e\x4d\xed\x17\x30\x93\x2d\x84\x4d\xb5\x85\x00\x60\x4f\
\xf1\x5f\xc8\xe4\xc2\x27\xcf\xfb\xdf\xe4\x79\x3f\x4d\xe3\x76\xbc\
\xcd\x27\x66\xa5\xe5\x72\xbd\x9a\x69\x21\x01\x20\xbe\xc5\x7f\xa2\
\x54\xcb\xc4\x82\xfa\xf6\x7a\x67\x23\x47\xf7\xd2\xf4\x6f\x59\xed\
\xfe\xf0\xfb\x29\x5c\xb7\xc6\x51\xb5\x6a\x22\x01\x20\x7e\x01\xe0\
\x4d\x26\x15\xd4\x2f\xff\xb4\x86\x0e\x2a\x0b\xcd\x35\x2d\xb7\xa3\
\x47\x5c\x9f\x4e\x08\x30\xd0\x9b\x04\x80\xf8\x14\xff\xb9\xd2\x00\
\x13\x8a\xe2\xff\x7f\x75\xed\x54\x14\x9a\xeb\x5a\x41\x57\x8f\xb8\
\x21\x3d\x8f\xeb\xd8\x2c\xaa\x66\xcd\x25\x00\x8c\xae\xf8\x8f\x93\
\x0a\x99\x4c\xec\xe9\xff\xc7\x9a\x36\x2a\x09\xcd\xb5\xad\xa4\x3b\
\x20\x6e\xdc\x41\x08\x30\x8c\xaa\x5d\xe3\x08\x00\xb1\x07\x80\x97\
\x98\x44\x66\xbb\x4c\xda\x58\xd5\x4a\x05\xa1\xb9\xbe\x95\xf9\x7a\
\xc5\x37\xb8\x13\x60\x9a\x97\x08\x00\xb1\x15\xff\x99\x52\x90\x09\
\x64\xb6\x77\x2a\x5a\xa8\x1c\x34\xcf\xb4\x8c\x96\xee\xf0\xe3\x2c\
\xae\x6d\x63\xa8\x1a\x36\x93\x00\x10\x5d\xf1\x3f\x5b\x3a\xc2\xe4\
\x31\xdb\x6f\xcb\xf8\xd4\x8f\xe6\xbd\xf6\x7a\x49\x23\xd7\xb7\x59\
\x54\x2d\x3b\x9b\x00\x10\x79\x00\x78\x8a\x49\x63\xb6\xff\x28\x6e\
\xa4\x52\xd0\x3c\xd9\x06\xa4\x1f\x1d\x2d\xe7\x3a\x37\xcb\x53\x04\
\x80\xc8\x8a\xff\x25\x92\x8f\x09\x63\xae\x57\x0a\xeb\xa9\x12\x34\
\x4f\xb7\x8e\xbe\x90\x98\xb7\x8b\xa3\x84\x0d\xa2\x6a\xda\x25\x04\
\x80\x91\x03\xc0\x06\x26\x8b\xb9\x7e\x91\x57\x47\x75\xa0\x19\xd1\
\xb2\x3b\xfc\xe2\xab\x9c\x20\x68\x92\x0d\x04\x80\xe1\x8b\xff\x6c\
\x26\x89\xb9\x7e\x78\xb8\x2c\x7c\x7b\x94\x46\x33\xa5\xa9\x63\x84\
\xb9\xf6\x8d\x32\x9b\x00\x30\x74\xf1\x3f\x4b\x3a\xcc\x04\x31\x93\
\xfa\x3c\xaa\xa9\x37\x48\x45\xa0\x19\xd7\x9e\x3e\x59\xc5\x1a\x60\
\x0e\x55\xe3\xce\x22\x00\x9c\x1e\x00\x96\x31\x39\xcc\xfd\xd6\x7f\
\x4f\x53\x17\x95\x80\x66\x64\xeb\x09\xf5\x87\x8f\xb5\x66\x2d\x30\
\xc6\x32\x02\xc0\x67\x8b\xff\x78\xa9\x8e\x89\xc1\x73\x7f\x1a\xcd\
\xc4\xa6\x36\x09\xba\x86\x23\x84\x4d\xa1\x6a\xdd\x78\x02\xc0\xa7\
\x01\x60\x35\x93\xc2\x4c\x7f\xf9\x51\xb1\xe8\xeb\xe7\xc9\x3f\x8d\
\xb6\xad\xb6\x9d\x35\xc1\x1c\xab\x09\x00\x1f\x17\xff\x29\x52\x80\
\x09\x61\x9e\x19\xa9\x39\xe1\x5f\x3e\x34\x1a\xed\xe3\xf6\x83\x03\
\xa5\xac\x0d\x66\x50\x35\x6f\x0a\x01\xe0\xc3\xac\xcd\x4c\x06\x33\
\x6d\xa9\xe6\x80\x1f\x1a\x6d\x70\xcb\x6a\xf7\x87\xdf\x89\x61\x7d\
\x30\xc2\x66\xa3\x03\x80\x75\xd4\x2f\x13\xc1\x40\x4f\x66\x56\xb1\
\xda\xdb\xdc\x7a\xfb\x07\x44\x63\x20\x28\x0a\xbb\x02\xe2\x70\x6b\
\xb7\x48\x6b\xe8\x10\x9b\xaa\x5b\xc5\xef\xca\x9a\xc2\x9b\x2d\xfd\
\x2c\xa7\x46\x3c\x96\x59\x29\x96\x1d\x2a\x13\x0b\xf7\x17\x8b\x7b\
\x0f\x96\x8a\x87\x8f\x55\x88\xe7\xb2\x6b\xc4\xbf\x16\xd4\xcb\xff\
\x5d\xb3\xd8\x2a\x43\xda\xae\xc6\x4e\x71\x42\x16\xa6\x0a\x5f\xaf\
\xe8\x0c\x86\xe8\x58\x9b\xdb\x53\x27\xf8\x2a\xc0\x20\x73\x8d\x0c\
\x00\xf2\x2f\x9e\x24\x65\x32\x01\xcc\x33\x7f\x77\x81\xe8\x0a\xf6\
\xb3\xd2\xc7\xb1\xa9\xb7\x28\x8a\x64\xa1\xff\x43\x65\x8b\xf8\x89\
\x2c\x20\x76\xee\x32\x37\x57\xfe\xbb\xff\x31\xab\x3a\x7c\x44\x73\
\x7d\x4f\x1f\x9d\x1f\xe7\x56\x27\xfb\xf4\xaa\x54\x5e\x08\x34\x84\
\xaa\x81\x49\x26\x06\x80\x25\x0c\xbe\x79\xa6\x27\x67\x87\x7f\x4d\
\xd2\x46\xd7\xd4\x8b\x93\x47\xdb\x7c\x62\x6d\x69\x53\x78\x5f\xf9\
\xeb\xb6\xe7\x26\x6c\x4c\x6f\xdb\x53\x28\x7e\x9a\x5d\x23\x3e\xa8\
\x6d\x17\xcd\xec\xe5\x10\x97\xf6\xef\x85\x0d\xac\x17\xe6\x58\x62\
\x54\x00\x90\x7f\xe1\x31\x52\x31\x03\x6f\x9e\xdf\x14\x37\xb0\xba\
\xc7\xd8\xd4\xed\xf7\xf5\xe5\xcd\xe1\x17\xc5\x74\xde\x42\xf6\x8e\
\xbd\x85\x62\x4d\x49\x23\x8f\x0b\x46\xd1\xfc\xa1\x7e\x71\xe3\x8e\
\x3c\xd6\x0c\x33\xa8\x5a\x38\xc6\xa4\x00\xb0\x9c\x41\x37\xcf\x4d\
\x3b\xf3\x45\x80\x4f\xfe\x62\x7a\x31\xec\x9f\xb2\xaa\xc5\xd5\x2e\
\xbb\x2d\xac\xbe\x6b\x7f\xb9\xa0\x9e\xbb\x02\x31\xb6\xff\xae\x62\
\x9b\x60\x83\x2c\x37\x22\x00\xc8\xbf\xe8\x58\xa9\x82\x01\x37\xcf\
\xd6\x1a\xde\xfa\x8f\xa6\x1d\x6d\xf5\x89\xc5\x07\x4a\x5c\x3f\xee\
\xea\x79\xf6\x8a\x9c\x1a\x51\xed\xe7\x7d\x81\x68\x9a\xca\xca\x0b\
\x3e\x62\x87\x40\x43\xa8\x9a\x38\xd6\x84\x00\xf0\x63\x06\xdb\x3c\
\xdf\x93\x0b\x19\xbf\xfd\x23\x6b\x25\xdd\x01\xf1\xd0\xb1\x0a\xcf\
\xcd\x81\xcb\x93\xb3\xc3\xfb\xde\x17\x77\x05\x18\xe4\x08\x5b\x46\
\x73\x37\xeb\x87\x39\x7e\xec\xe9\x00\x20\xff\x82\xe3\xa4\x1a\x06\
\xda\x3c\xfb\x9b\xd9\xeb\x7f\xa4\xa6\x3e\xdb\x5b\x9d\x57\x1b\x2e\
\x94\x5e\x3f\xfb\x41\x3d\xd2\x50\xcf\xb9\x69\x23\x37\xf5\x92\x27\
\x6b\x88\x11\x54\x6d\x1c\xe7\xe5\x00\xf0\x04\x83\x6c\x9e\x07\x8e\
\x94\xb3\x8a\x8f\xd0\xd4\xb7\xfa\xa6\x1d\x08\x73\xe7\xde\x22\xee\
\x06\x44\xf2\x0e\x48\x87\x9f\x75\xc4\x1c\x4f\x78\x32\x00\xc8\xbf\
\xd8\x79\x52\x03\x03\x6c\x96\x69\xc9\x59\xa2\xa0\xab\x87\x55\x7c\
\x98\xf6\x4e\x45\x8b\xd6\x6f\xf5\xdb\x49\xbd\xd8\xb8\x95\x1d\x21\
\x47\x6c\xdf\xe5\xb4\x40\x53\xa8\x1a\x79\x9e\x17\x03\xc0\x33\x0c\
\xae\x79\xd4\xad\x5e\xda\xd0\x4d\xbd\x13\xf1\xf3\xdc\x5a\xe6\x89\
\xf4\x0f\x27\x79\x24\x30\x5c\x5b\x57\xde\xcc\x3c\x31\xc7\x33\x9e\
\x0a\x00\xf2\x2f\x34\x41\x6a\x61\x60\xcd\xfb\x75\xd7\x10\xe0\x13\
\xb0\xa1\x5a\x68\x60\x40\x3c\x23\x8b\x1e\xf3\x84\x47\x02\x91\xb4\
\xf6\xbe\x90\xb8\xd2\xd0\xbb\x44\x06\x52\xb5\x72\x82\x97\x02\xc0\
\xf3\x0c\xaa\x79\xd4\x6e\x66\xb4\xa1\x8b\xff\xc3\x1e\x7c\xcb\x3f\
\x5e\xa1\x51\xed\x28\x48\x3b\xbd\xa9\x73\x1b\x98\x23\xc6\x78\xde\
\x13\x01\xc0\x7a\xf6\xdf\xca\x80\x9a\xe5\x1b\xe9\x79\xc2\xc7\x2d\
\xdd\x21\x9b\xfa\x26\x9e\x39\x32\xfc\xe7\x82\xe9\x0d\x9d\x4c\x94\
\x53\xda\xde\xa6\x2e\xe6\x87\x39\x5a\x9d\x78\x17\xc0\x89\x00\xf0\
\x08\x83\x69\x1e\xb5\x65\x2d\xed\xf4\xb6\x91\xdd\xdd\x22\xbe\x13\
\x70\xbc\xcd\xc7\x84\x19\xd4\xd4\xc6\x40\x37\xef\xcc\x67\x7e\x98\
\xe3\x11\x57\x07\x00\xf9\x17\x38\x5b\x2a\x65\x20\xcd\x5b\xbc\x39\
\xed\xef\xf4\x76\x4c\x16\xb4\x2b\x78\x8e\x1b\x31\x75\xc0\x51\x69\
\x37\xef\x04\x0c\x6e\xbf\x2a\xe2\x90\x20\x83\xa8\xda\x79\xb6\x9b\
\x03\xc0\x3d\x0c\xa2\x99\x6f\x74\xd3\x3e\xdb\xd4\xcb\x90\x1c\xee\
\x12\x3d\x75\xf4\x70\x23\x2f\x92\xfe\xa9\x55\xfb\x7b\xc3\x1b\x29\
\x31\x37\x8c\x71\x8f\x9b\x03\xc0\x01\x06\xd0\x3c\xc7\xb8\x75\xfb\
\x99\xa6\x76\xf8\xbb\x7b\x7f\x31\x73\x23\x46\xea\x1b\x78\xee\x28\
\x7d\xda\x96\x1e\x2a\x63\x5e\x98\xe3\x80\x2b\x03\x80\xfc\x83\xcf\
\x61\xf0\xcc\xa3\x8e\x82\xa5\x7d\xb6\xa9\xfd\xef\x99\x1b\xa3\xb3\
\xe4\x50\xa9\xe8\xe3\x24\xc9\x70\xdb\x56\xdb\xce\x9c\x30\xcb\x1c\
\x37\x06\x80\xad\x0c\x9c\x79\x7e\xcf\xcb\x7f\x9f\x69\x6c\xe0\x12\
\x3f\x2f\xe4\xd6\x32\xa1\x64\xeb\x09\xf5\xf3\x2e\x89\x59\xb6\xba\
\x2a\x00\xc8\x3f\xf0\x54\xa9\x9f\x81\x33\x8b\xda\xa8\x44\x6d\x58\
\x42\xfb\xb8\x55\xf8\x7a\x8d\xdd\xe2\xd7\xae\xcf\x03\xd9\x56\xfa\
\xe3\xc6\x23\x25\xa3\xa8\x5a\x3a\xd5\x4d\x01\x60\x0d\x83\x66\x9e\
\x27\x33\xab\x58\x99\x07\xb5\x1f\x1e\xe6\x59\x6d\xbc\xa9\xe7\xdf\
\x34\x21\x56\xb1\x85\xb4\x69\xd6\xb8\x22\x00\xc8\x3f\xe8\x45\x92\
\x9f\x01\x33\xcf\x81\x96\x6e\x56\x66\xab\x7d\x58\xd7\xc1\x9c\xb0\
\x49\x5a\x7d\x87\xf1\xf3\xeb\x03\xde\x03\x30\x8d\xaa\xa9\x17\xb9\
\x21\x00\xb0\xed\xaf\x81\x6e\xdb\x5d\x40\xd5\xb7\x9a\xda\x01\x71\
\x0e\x1b\xb6\xd8\xe6\x16\x39\xd7\x7a\x0d\x7f\x21\xb0\xa6\xa7\x8f\
\xb9\xc0\xf6\xc0\x7a\x05\x00\xf9\x07\x3c\x67\x2a\x47\xfe\x1a\x69\
\x6d\x69\x13\x95\xdf\x6a\xab\xf3\xb8\x3d\x6b\xb7\x37\x4a\x1a\x8d\
\x9f\x67\xec\x2b\x61\x1c\x55\x5b\xcf\xd1\x39\x00\xb0\xf1\x8f\xa1\
\xd8\xac\xe5\xe3\x96\xdf\xd9\x13\x7e\x59\x8d\x39\x61\xaf\x99\x69\
\x39\xc6\xcf\xb9\xbf\xe3\x40\x29\x36\x06\xd2\x2c\x00\xa4\x32\x40\
\xe6\xf9\xd6\x1e\xbe\xfd\x57\x4d\xdd\x94\x5e\x9c\x51\xc2\x9c\x70\
\xc8\x4f\xb3\x6b\x8c\x9e\x6f\x6f\x95\x36\x31\x0f\xcc\x93\xaa\x65\
\x00\x90\x7f\xb0\x29\xd2\x00\x03\x64\x9e\x67\xb3\xd9\xfa\x57\x35\
\x0e\xfa\x71\xd6\xd7\xb6\xe7\x86\x8f\x56\x36\xb5\x1d\x6d\xf5\x31\
\x0f\xcc\xa3\x6a\xec\x14\x1d\x03\xc0\x2a\x06\xc7\x4c\x5b\x6b\xda\
\x78\xf1\x2f\xd4\x1f\x3e\xbc\x86\xf9\xe0\xac\x8f\x9a\xbb\x8c\x9d\
\x73\x81\xfe\x01\x31\x9d\xc7\x4d\x26\x5a\xa5\x55\x00\x98\xfa\xf1\
\xa9\x7f\x35\x0c\x8c\x99\xd4\x01\x25\xa6\x37\x6e\xc7\x26\xc6\x8a\
\x1c\xb3\x1f\x03\x2c\x64\x43\x20\x13\xa9\x5a\x7b\xb6\x4e\x01\x60\
\x01\x83\x62\xa6\x6f\xee\xcc\x37\xbe\xf8\xab\x4f\xd2\x78\x23\x3b\
\x31\x66\xcb\x7e\x37\xf9\x83\xc0\x17\xd8\x10\xc8\x54\x0b\x74\x0a\
\x00\xdb\x18\x10\x33\x3d\x91\x59\x69\x7c\x00\xd8\x50\xd9\xc2\x5c\
\x48\xa0\xcc\x36\xbf\xb1\x73\xef\xb7\x65\xdc\x79\x32\xd4\x36\x2d\
\x02\x80\xfc\x83\x4c\x96\x42\x0c\x88\x99\xde\xa9\x68\x31\xba\xf8\
\xab\x97\xd0\xe6\xef\x2e\x60\x2e\x24\xd0\xbf\x14\xd4\x1b\x3b\xff\
\x36\x57\xf3\xe2\xa9\xa1\x54\xcd\x9d\xac\x43\x00\x60\xe7\x3f\x83\
\x15\x74\x9a\x7d\x38\xcb\xff\xd4\xb4\x31\x0f\x12\xbd\x0b\xa5\xc1\
\x9f\xa1\xa6\x37\x74\x32\x07\xd8\x19\x30\x31\x01\x40\xfe\x01\x92\
\xa4\x0a\x06\xc2\xdc\xcf\xb0\x4c\x7e\xfe\xaa\xfe\xee\x77\xee\x2d\
\x62\x2e\x68\xa0\xcc\x67\xe6\x8b\xa8\x7c\x0a\x68\x34\x55\x7b\x93\
\x12\x19\x00\x6e\x67\x10\xcc\xf5\xb7\x47\xca\x8d\xfe\xf5\xbf\x9d\
\x5f\x5f\xda\xd8\xd3\x64\xe6\xe7\x80\x25\xdd\x01\xc6\xdf\x6c\xb7\
\x27\x32\x00\xbc\xc7\x00\x98\xeb\xd5\xa2\x06\xa3\x03\xc0\x22\x76\
\xfd\xd3\xc6\x96\x6a\x33\xf7\xa2\x68\xe9\x0d\x32\xfe\x66\x7b\x2f\
\x21\x01\x40\xfe\x87\xcf\x95\xba\x18\x00\x73\xa9\xb7\xdf\x4d\x6d\
\x19\x2d\xdd\xcc\x01\x8d\xbc\x6e\xe8\xe1\x40\xea\x25\xd4\xcb\x18\
\x7f\x93\xa9\x1a\x7c\x6e\x22\x02\xc0\xdd\x74\xbe\xd9\x92\x0d\x3e\
\x97\x7d\xf9\x91\x72\xe6\x80\x46\x7e\x9e\x5b\x6b\xec\x5c\x9c\x95\
\xc6\x0e\x94\x86\xbb\x3b\x11\x01\x60\x23\x1d\x6f\xb6\x43\xf2\x57\
\xb0\x89\xad\x21\x10\xe4\xc4\x3f\xcd\x3c\x7a\xdc\xdc\xfd\x28\xf8\
\x0c\xd5\x78\x1b\x1d\x0d\x00\xf2\x3f\x78\x9e\xe4\xa3\xe3\xcd\x56\
\xdc\x15\x30\x72\xc1\x5d\xcb\xb6\xbf\xda\xf9\xfe\x81\x12\x63\x03\
\xc0\x5f\xb2\x1d\xb0\xe9\x54\x2d\x3e\xcf\xc9\x00\xb0\x98\x4e\x47\
\x6b\x6f\xc8\xc8\x05\xf7\xf6\xbd\x85\x8c\xbf\x66\x6e\x95\xbf\x82\
\x4d\x6d\x3f\x3c\x5c\xc6\x1c\xc0\x62\x27\x03\xc0\x16\x3a\xdc\x6c\
\xd3\x92\xb3\x44\xbf\x81\x9b\x00\x9c\x68\xf7\x33\xfe\x1a\xba\x3a\
\x35\xc7\xd8\x00\xa0\xb6\xe3\x66\x0e\x18\x6f\x8b\x23\x01\x40\xfe\
\x87\xce\x97\x7a\xe8\x70\xb3\x5d\x9f\x9e\x6b\xe4\x62\xfb\xb3\x9c\
\x1a\xc6\x5f\x53\xea\x8d\x78\x13\xdb\x63\xc7\x09\x00\x08\xd7\xe4\
\xf3\x9d\x08\x00\xf7\xd2\xd9\x50\xb7\xc1\x4d\x6b\xea\xd4\xbf\x6b\
\xb7\xf3\xc6\x35\x77\x00\xf4\x6a\x0f\xf0\x08\x00\x1f\xbb\xd7\x89\
\x00\xf0\x3e\x1d\x8d\xbf\x3e\x58\x6a\xdc\x42\xfb\x41\x5d\x3b\x63\
\xaf\xa9\x39\x06\x1f\x4b\xbd\xf8\x00\x1b\x52\x21\xec\x7d\x5b\x03\
\x80\xfc\x0f\x5c\x20\xf5\xd2\xd1\x78\xf8\x58\x85\x79\xbf\xb4\xf8\
\xf6\x5f\x5b\xdf\xde\x6b\xee\x81\x40\x77\xee\xe3\x3c\x0a\x84\xa9\
\xda\x7c\x81\x9d\x01\xe0\x3e\x3a\x19\x26\x7e\x77\xcd\xb7\xff\x7a\
\x5b\x9c\x61\xee\x67\x80\x73\x77\xe5\x33\x07\xf0\x89\xfb\xec\x0c\
\x00\x9b\xe9\x60\x28\x7f\x65\xd8\x82\xcb\xb7\xff\x7a\x7b\xe6\x64\
\xb5\xb1\x01\xe0\x6b\xbc\x97\x82\x4f\x6d\xb6\x25\x00\xc8\x7f\xf1\
\x18\xa9\x83\x0e\x86\x72\x93\x61\xcf\x5c\xf9\xf6\x5f\x6f\xeb\xca\
\x9b\x8d\x0d\x00\xdc\x99\xc2\x20\xaa\x46\x8f\xb1\x23\x00\xdc\x44\
\xe7\xe2\x13\x6a\xd1\x31\x65\x1f\x80\x2c\xbe\xfd\xd7\xde\xe1\x56\
\x33\xb7\xa5\xee\x09\xf5\x33\xfe\x38\xd5\x4d\x76\x04\x80\xd5\x74\
\x2c\x06\x53\xcf\xc5\x4d\x68\x2f\xe6\xd5\x32\xde\x1a\x53\xa7\xe1\
\x75\x05\xfb\x8d\x0c\x00\x4d\x1c\x07\x8c\xd3\xad\xb6\x23\x00\x1c\
\xa3\x63\x31\xd8\xde\xa6\x2e\xcf\x2f\xb0\x6a\x73\x99\x1b\x77\xe4\
\x31\xde\x1a\x53\x6f\xc1\x9b\xda\xf2\x3a\x7b\x98\x03\x38\xd5\xb1\
\xb8\x06\x00\xf9\x2f\x9c\x24\x0d\xd0\xb1\x18\xec\xb9\xec\x1a\xcf\
\x2f\xb0\x2a\xe4\x30\xd6\x7a\x7b\xb5\xa8\xc1\xd8\x00\xb0\xad\x96\
\xbd\x29\x70\x1a\x55\xab\x27\xc5\x33\x00\xf0\xf9\x1f\x4e\xf3\xf5\
\xf4\x3c\xcf\x6f\xbf\xfa\x93\x13\x55\x8c\xb5\xe6\x0a\xba\x7a\x8c\
\x0d\x00\xaf\x14\xd6\x33\x07\x10\xf3\xe7\x80\x91\x06\x80\x0d\x74\
\x28\x86\x92\xd1\xe2\xdd\x97\xaf\xfc\xa1\x7e\x31\x23\x35\x87\x71\
\xd6\xd8\x5f\xec\x31\x77\x03\x20\xd5\x1e\x3a\x56\xc1\x3c\xc0\x50\
\x36\xc4\x25\x00\xc8\x7f\x51\x92\xd4\x42\x87\x62\x28\xea\x70\x1c\
\xaf\xb6\xdf\x97\x37\x33\xc6\xdc\xfe\xd7\xba\xdd\xb6\xbb\x80\x79\
\x80\xa1\xa8\x9a\x9d\x14\x8f\x00\x70\x03\x9d\x89\x33\x51\x9b\x90\
\xb4\xf7\x85\x3c\xb7\xb0\xaa\x83\x7f\x66\xf3\xf2\x9f\xd6\xd4\x01\
\x40\xad\xbd\x21\x63\x8b\x7f\x40\xce\x51\x75\x2c\x37\x73\x01\x67\
\x70\x43\x3c\x02\xc0\x4a\x3a\x12\xc3\x59\x95\x5b\xeb\xb9\xc5\xf5\
\x9d\x8a\x16\xc6\x56\x73\xab\xf3\x6a\x8d\xfe\xf5\x9f\xdb\xc1\x17\
\x00\x18\xd6\xca\x78\x04\x80\x83\x74\x24\x86\x33\x3d\x39\x5b\x94\
\x76\x07\x3c\xb3\xb0\xf6\xc9\x5f\x56\xdf\xdc\xc9\xfe\xea\x3a\xbb\
\x32\x25\xdb\x98\x7d\x28\xce\xd4\xfe\xa7\xa6\x8d\xb9\x80\xe1\x1c\
\x1c\x55\x00\x90\xff\x82\x09\x52\x3f\x1d\x89\x91\x2c\x3f\x52\xee\
\x99\x85\xf5\xbd\x4a\x7e\xfd\xeb\xee\x85\x5c\xb3\x7f\xfd\xab\xf6\
\x72\x01\x5f\x00\x60\x58\xaa\x76\x4f\x18\x4d\x00\xf8\x16\x9d\x88\
\x48\xed\xf6\xc0\xc6\x40\x6d\x7d\x21\xf1\x8d\x74\x9e\xfd\xeb\x4c\
\xdd\x9d\x31\x75\xe7\xbf\xc1\x4d\x9d\x80\xc8\x7c\xc0\x08\xbe\x35\
\x9a\x00\xf0\x02\x1d\x88\x48\xa9\x97\xe6\xdc\x7e\x5b\x56\x1d\x73\
\xcc\x58\xea\x6d\x67\x63\xa7\xf1\xc5\x5f\xbd\xfc\xc8\x0b\x80\x88\
\xc0\x0b\xa3\x09\x00\x3b\xe8\x40\x44\x63\xe1\xfe\xe2\xf0\x01\x25\
\x6e\x6c\xff\xcb\xae\x6a\xda\xfb\xfb\x13\x55\x82\x26\xc4\xd6\x6a\
\x9e\xff\x23\x22\x3b\x62\x0a\x00\x53\x3f\x3e\xfe\xd7\x47\x07\x22\
\x5a\x8f\x67\x56\xba\x6e\x41\x6d\x0c\x04\x39\x57\xdd\x05\x3b\x4f\
\xb6\xf5\x85\xa8\xfe\xdc\xa9\x42\xe4\x54\x0d\x1f\x13\x4b\x00\xb8\
\x8e\xce\x43\xac\xfe\xa3\xb8\xd1\x35\x8b\x69\x70\x60\x40\x2c\x3b\
\x54\xc6\xb8\x69\xee\x83\xda\x76\x2a\xbf\x35\x5f\xaf\x49\x63\x87\
\x4a\x44\xec\xba\x58\x02\xc0\xe3\x74\x1c\x62\xa5\x8e\x68\xfd\x63\
\x4d\x9b\xf6\x8b\xa9\x3a\xcb\xe0\x31\x7e\x4d\x69\xef\xc1\xa3\xe5\
\x54\x7e\xab\xed\x6f\xee\x62\x4e\x20\x1a\x8f\xc7\x12\x00\x36\xd1\
\x71\x18\x6d\x08\xf8\x4d\xb1\xbe\x5b\xb5\xaa\x63\x8c\x9e\x39\x59\
\xcd\x58\x69\x6e\x56\x5a\xae\xf1\xdf\xfc\x0f\x6e\x6a\xe3\x2d\xe6\
\x05\xa2\xb0\x29\x96\x00\x50\x47\xc7\x21\x1e\x9e\xcc\xac\x0a\x6f\
\xad\xab\x5b\x5b\x91\x53\xc3\xf8\x68\x4e\xbd\xe9\x9e\xd6\xd0\x41\
\xd5\x1f\xd4\x6e\x61\xff\x7f\x44\xa7\x2e\xaa\x00\x20\xff\x81\x4b\
\xe9\x34\xc4\x93\xfa\x66\xb9\xa5\x57\x8f\x5f\x71\x1d\x7d\x21\x5e\
\xa2\x72\x89\x75\xe5\xcd\x54\xfc\x41\xed\x68\x9b\x8f\x79\x81\x58\
\x5c\x1a\x4d\x00\x58\x4a\x87\x21\xde\xe6\xee\x2a\x10\x1f\x35\x77\
\x25\x7c\x01\x9d\xbb\x8b\x6d\x7e\xdd\xe0\xc5\x3c\x76\xfb\x3b\xb5\
\x71\xfc\x2f\x62\xb4\x34\x9a\x00\xb0\x96\x0e\x83\x5d\x1e\x38\x5c\
\x26\x0a\x3a\x7b\x1c\x7f\xd9\x4f\x7d\x99\x70\x79\x72\x36\x63\xe0\
\x02\x7f\x27\x0b\x9d\x86\x4f\x8d\x12\xda\xca\x7c\xbd\x6c\xfe\x83\
\x58\xad\x8d\x26\x00\x64\xd1\x61\xb0\xfb\xd9\xee\x3f\x65\x55\xdb\
\xfe\x72\x97\x3f\xd4\x2f\xd6\x97\x37\xf3\xab\xdf\x45\x16\x65\x94\
\xb8\x76\x33\x29\x3b\xdb\xf3\xd9\xbc\xb3\x82\x98\x65\x45\x14\x00\
\xe4\xff\x70\xac\x14\xa4\xc3\xe0\x04\x75\xa6\xbb\x7a\x13\x3f\xb9\
\xbe\x23\xae\xfb\xbb\x37\xf5\x06\xc5\xab\x45\x0d\xe2\x3a\x36\xf7\
\x71\x15\xf5\x82\x9b\x2e\xef\x8a\xe8\xd4\x54\x9f\x5c\x95\xca\xb7\
\xff\x88\x99\xaa\xe9\x63\x23\x09\x00\x33\xe8\x2c\x24\x82\x3a\x56\
\x78\xe9\xa1\x32\xf1\xbb\xb2\x66\x51\xd2\x1d\x08\x6f\x78\x12\xcd\
\x02\xa9\x42\xc4\xca\x9c\x5a\x71\xe7\xde\xa2\xf0\x27\x88\xf4\xa9\
\xbb\xa8\xb0\xa6\x6e\x73\xd3\x4e\x6f\x2a\xcc\x32\x47\x30\x4a\x33\
\x22\x09\x00\x4b\xe8\x28\xe8\xb2\x8f\x80\xda\x9e\xf7\xdb\x7b\x0b\
\xc5\x92\x43\xa5\xe2\x89\xcc\xca\xf0\x8b\x61\x3f\xcb\xa9\x09\xbf\
\xc5\xbf\xe4\x60\x69\xb8\xd8\xdf\xb8\x23\x8f\x82\xef\x72\xea\x20\
\xa9\x82\xae\x1e\x2a\xfd\x10\x4d\x3d\x0e\xb9\x3e\x9d\x3b\x59\x18\
\xb5\x25\x91\x04\x80\x7f\xa6\xa3\x00\x38\xe5\xb6\x3d\x85\xa2\xda\
\xdf\x47\xa5\x3f\x43\x7b\xa7\xa2\x85\x79\x82\x78\xf8\xe7\x48\x02\
\x40\x32\x1d\x05\xc0\x09\x7f\xb9\xbf\x98\x67\xfe\x23\xec\x59\xa1\
\x0e\x41\x62\xae\x20\x0e\x92\x23\x09\x00\xb5\x74\x14\x00\xbb\xa9\
\xf7\x3d\x7c\xbc\xed\xcf\xb6\xbf\x70\x4a\xed\xb0\x01\x40\xfe\x0f\
\x2e\xa4\x93\x00\xd8\x4d\xbd\xc3\xd1\xc7\x87\xfe\xc3\xb6\xa2\xae\
\x00\xfb\x56\x20\xde\x2e\x1c\x2e\x00\xcc\xa3\x83\x00\xd8\x49\xbd\
\xc4\x49\xed\x1f\xb9\xdd\xc7\x11\xd5\x88\xbf\x79\xc3\x05\x80\x47\
\xe9\x20\x00\x76\x51\x9f\xb3\xd1\x46\x6e\xa9\xf5\x1d\xcc\x17\xd8\
\xe1\xd1\xe1\x02\xc0\x5b\x74\x10\x80\x78\x53\x3b\x3f\xaa\xb7\xd9\
\x69\x23\xb7\x40\xff\x40\xf8\xdc\x0c\xe6\x0d\x6c\xf0\xd6\x70\x01\
\xe0\x10\x1d\x04\x20\x9e\xae\x48\xc9\x16\x1f\xd4\xb5\x53\xd9\xd9\
\xf4\x07\x89\x77\x68\xc8\x00\x20\xff\x3f\x92\x24\x1f\x1d\x04\x20\
\x5e\x66\xa4\xe6\x24\xfc\x04\x48\x37\xb5\xbc\xce\x9e\x70\x60\x62\
\xee\xc0\x26\xaa\xc6\x27\x0d\x15\x00\xbe\x44\xe7\x00\x88\x17\xf5\
\xfd\x7a\x56\xbb\x9f\xaa\x1e\x61\x53\x5f\x45\x7c\x67\x5f\x11\x73\
\x07\x76\xfb\xd2\x50\x01\xe0\x66\x3a\x06\x40\x3c\xa8\xd3\x17\xcb\
\xba\x03\x54\xf5\x28\xda\xcb\x05\xf5\xcc\x1d\x38\xe1\xe6\xa1\x02\
\xc0\x32\x3a\x06\xc0\x68\xa9\xf3\x19\xea\x7b\xd8\xda\x37\x9a\x76\
\xbc\xcd\xc7\x37\xff\x70\xca\xb2\xa1\x02\xc0\x0a\x3a\x06\xc0\x68\
\x2c\xce\x28\x11\xed\x7d\x21\x2a\x7a\x14\xcd\x1f\xea\x0f\x9f\x87\
\xc0\xfc\x81\x43\x56\x0c\x15\x00\xd6\xd1\x31\x00\x62\xf5\x37\x47\
\xca\xc3\x27\xd7\xd1\xa2\x6b\x2f\xb0\xdd\x2f\x9c\xb5\x6e\xa8\x00\
\xb0\x9b\x8e\x01\x10\x8b\x9f\x9c\xac\x12\xa1\x01\xb6\xf7\x8b\xb6\
\x6d\x6f\xe8\xe4\x28\x6b\x38\x6d\xf7\x50\x01\xa0\x92\x8e\x01\x10\
\xad\x5f\xe4\xd5\x09\x4a\x7f\xf4\xad\xa4\x3b\x20\xae\x49\xcb\x61\
\x0e\xc1\x69\x95\x9f\x09\x00\xf2\xff\x70\x8e\xd4\x4f\xc7\x00\x88\
\xc6\xda\xd2\x26\x2a\x79\x0c\xad\x33\x18\xe2\xb9\x3f\x12\x45\xd5\
\xfa\x73\x06\x07\x80\xaf\xd0\x29\x00\x22\xa5\xde\x58\xdf\x54\xdd\
\x4a\x25\x8f\xa1\xa9\x83\x90\xfe\xf6\x48\x39\xf3\x08\x89\xf4\x95\
\xc1\x01\xe0\x16\x3a\x04\x40\x24\xae\x4a\xcd\x09\x3f\xbb\xa6\xc5\
\xd6\xfe\xad\x90\xef\xfd\x91\x70\xb7\x0c\x0e\x00\x0f\xd0\x21\x00\
\x46\x72\xed\xf6\x5c\x71\xa4\xd5\x47\x15\x8f\xb1\xa5\xd4\x77\xf0\
\xd2\x1f\x74\xf0\xc0\xe0\x00\xb0\x8a\x0e\x01\x30\x9c\xd9\x3b\xf2\
\x44\x41\x57\x0f\x55\x3c\xc6\x96\xd9\xe6\x0f\x9f\x8d\xc0\x5c\x82\
\x06\x56\x0d\x0e\x00\xef\xd0\x21\x00\xce\x44\xbd\xb0\x56\xed\x67\
\x77\xbf\x58\x5b\x7e\x67\x8f\xf8\xda\xf6\x5c\xe6\x12\x74\xf1\xce\
\xe0\x00\xb0\x9d\x0e\x01\x30\x94\x85\xfb\x8b\x45\x6b\x2f\xbb\xfb\
\xc5\xda\xca\x7c\xbd\xe2\x1b\xe9\x79\xcc\x25\xe8\x64\xfb\xe0\x00\
\x70\x9c\x0e\x01\x70\xaa\xfb\x0f\x97\x09\x1f\xbb\xfb\xc5\xdc\x6a\
\x7b\xfa\xc4\xcd\x3b\xf3\x99\x4b\xd0\xcd\xf1\xc1\x01\x80\x4d\x80\
\x00\x7c\xc6\x13\x99\x95\x22\xc8\xee\x7e\x31\xb7\xa6\xde\x20\xdf\
\xfa\x43\xeb\xcd\x80\x3e\x09\x00\x3e\x3a\x04\xc0\x27\x7e\x9e\x5b\
\xcb\xee\x7e\xa3\x68\x6d\x7d\x21\xf1\x9d\x7d\x45\xcc\x25\xe8\xca\
\x17\x0e\x00\xf2\xff\x31\x8e\xce\x00\xf0\x89\x35\x25\x8d\x54\xf0\
\x51\xb4\xba\x9e\x3e\x71\xfb\x5e\x7e\xf9\x43\x7b\xe3\x54\x00\xf8\
\x02\x1d\x01\x40\xed\xee\xb7\xb1\x8a\xdd\xfd\x46\xbb\xbf\xff\x4d\
\x3c\xf3\x87\x3b\x7c\x41\x05\x80\xab\xe9\x08\xc0\x6c\x5f\x4d\xc9\
\x16\x69\x0d\x1d\x54\xf0\x51\xb4\x93\xed\x7e\x71\x7d\x3a\x9f\xfa\
\xc1\x35\xae\x56\x01\x60\x3e\x1d\x01\x98\x4b\xed\xee\x77\xb8\xb5\
\x9b\x0a\x3e\x8a\xf6\x51\x73\x17\x9b\xfc\xc0\x6d\xe6\xab\x00\xb0\
\x88\x8e\x00\xcc\x74\xa3\xda\xdd\xaf\x93\xdd\xfd\x46\xd3\xfe\xaf\
\xae\x5d\x5c\x91\x92\xcd\x7c\x82\xdb\x2c\x52\x01\xe0\x41\x3a\x02\
\x30\xcf\x2d\xbb\x0b\x44\xb5\xbf\x97\x0a\x3e\x8a\xf6\x5e\x65\x8b\
\x98\x96\xcc\x5c\x82\x2b\x3d\xa8\x02\xc0\xb3\x74\x04\x60\x96\xf9\
\xb2\xf8\xab\xb7\xd5\x69\xb1\xb7\xdf\x14\x37\x30\x97\xe0\x66\xcf\
\xaa\x00\xf0\x0a\x1d\x01\x98\x63\xde\xae\x82\xf0\x0e\x75\xb4\xd8\
\x9a\xda\x1f\xe1\x85\xdc\x5a\xe6\x12\xdc\xee\x15\x15\x00\xd6\xd1\
\x11\x80\x19\xe6\xca\xe2\x5f\x43\xf1\x8f\xb9\xa9\x9d\x11\x9f\xcc\
\xac\x62\x2e\xc1\x0b\xd6\xa9\x00\xb0\x89\x8e\x00\x4c\x28\xfe\xf9\
\x9c\xe8\x37\x8a\xe6\x0f\xf5\x8b\x07\x0e\x97\x31\x97\xe0\x15\x9b\
\x54\x00\xd8\x46\x47\x00\xde\xa6\x0e\xa4\xe1\x85\xbf\xd8\x5b\x7b\
\x5f\x48\x2c\xca\x28\x61\x2e\xc1\x4b\xb6\xa9\x00\x90\x42\x47\x00\
\xde\xa5\x76\xa6\xab\xa2\xf8\xc7\xdc\xea\x7b\xfa\xc4\x1d\x6c\xed\
\x0b\xef\x49\x51\x01\x60\x17\x1d\x01\x78\xd3\x37\x65\xf1\xaf\xf0\
\x51\xfc\x63\x6d\x65\xdd\x81\xf0\xa3\x13\xe6\x12\x3c\x68\x97\x0a\
\x00\x19\x74\x04\xe0\x3d\x73\x64\xf1\x2f\xa7\xf8\xc7\xdc\xb2\x3a\
\xfc\xe2\xeb\xe9\x79\xcc\x25\x78\x55\x86\x0a\x00\x47\xe9\x08\xc0\
\x5b\x66\xef\xc8\x13\x65\x14\xff\x98\x5b\x46\x73\xb7\x98\x99\xc6\
\xd6\xbe\xf0\xb4\xa3\x2a\x00\x64\xd3\x11\x80\x77\xa8\xed\x7d\xd5\
\xad\x6b\x5a\x6c\x2d\xb9\xbe\x43\x5c\xc9\xd6\xbe\xf0\xbe\x6c\x15\
\x00\x8a\xe8\x08\xc0\x3b\xc5\xbf\x94\xe2\x1f\x73\xfb\x03\x5b\xfb\
\xc2\x1c\x45\x2a\x00\x54\xd0\x11\x80\xfb\xa9\x5b\xd6\x1c\xec\x13\
\x7b\x5b\x53\xd2\xc8\x3c\x82\x49\x2a\x54\x00\xa8\xa7\x23\x00\x77\
\x53\xbf\x5a\x77\x35\x76\x52\xc5\x63\xdc\xda\xf7\xc5\x3c\xb6\xf6\
\x85\x71\xea\x55\x00\x68\xa3\x23\x00\x77\x7b\xbb\xac\x99\x4a\x1e\
\x43\x0b\x0d\x0c\x88\xa7\x4e\xb0\xb5\x2f\x8c\xd4\xa6\x02\x80\x8f\
\x8e\x00\xdc\xeb\xb9\xec\x1a\x2a\x79\x0c\xad\x27\xd4\x2f\x96\x1f\
\x29\x67\x0e\xc1\x54\x3e\x15\x00\x42\x74\x04\xe0\x4e\x4b\x0e\x95\
\x86\x7f\xc5\xd2\xa2\xdf\xda\x77\x31\x5b\xfb\xc2\x6c\x21\x02\x00\
\xe0\x52\xb7\xee\x2e\x08\x17\x32\x5a\x74\xad\x21\x10\x14\x77\xee\
\x2d\x62\x0e\x81\x00\xc0\x23\x00\xc0\x7d\x2e\x4f\xce\x16\x27\xda\
\xfd\x54\xf3\x28\x9b\xda\x19\x51\x1d\x89\xcc\x1c\x02\x3e\x7e\x04\
\xc0\x4b\x80\x80\xcb\xfc\xba\xa8\x81\x6a\x1e\x65\xcb\xe9\xe8\x11\
\x37\xb0\xb5\x2f\xf0\x99\x97\x00\xf9\x0c\x10\x70\x91\xbf\xca\x28\
\xe1\xb9\x7f\x94\xed\x40\x4b\xb7\xb8\x86\xad\x7d\x81\xd3\x3e\x03\
\x64\x23\x20\xc0\x25\x66\xa4\xe6\x88\x4a\xf6\xf8\x8f\xaa\xa5\xb1\
\xb5\x2f\x70\xc6\x8d\x80\xd8\x0a\x18\x70\x89\x4d\xd5\xad\x54\xf4\
\x28\xda\xc6\xaa\xd6\xf0\xfb\x12\xcc\x1d\xe0\x34\x45\x1c\x06\x04\
\xb8\xc4\x43\x47\x2b\xa8\xe8\x51\xb4\x37\xd8\xda\x17\x18\x4e\x36\
\xc7\x01\x03\x2e\xf0\xd5\x94\x6c\x51\xed\xe7\xd6\x7f\xa4\x5b\xfb\
\xfe\x22\xaf\x8e\x79\x03\x0c\x2f\x7c\x1c\x70\x06\x1d\x01\xe8\xed\
\x57\xbc\xf5\x1f\xf1\xd6\xbe\x4f\x9f\x64\x6b\x5f\x20\x02\x19\x2a\
\x00\xec\xa2\x23\x00\x7d\xdd\xbc\x33\x5f\xf8\x43\xfd\x54\xf7\x08\
\xb6\xf6\xfd\xd1\x51\xb6\xf6\x05\x22\xb4\x4b\x05\x80\x14\x3a\x02\
\xd0\xd7\x07\x75\xed\x54\xf7\x11\x5a\x67\x30\x24\x7e\x70\xa0\x94\
\xf9\x02\x44\x2e\x45\x05\x80\x6d\x74\x04\xa0\xa7\x7b\x0f\x96\x52\
\xdd\x47\x68\x8d\x81\xa0\xf8\xce\x3e\xb6\xf6\x05\xa2\xb4\x4d\x05\
\x80\x4d\x74\x04\xa0\x9f\x69\xc9\x59\x22\xbf\xb3\x87\x0a\x3f\x4c\
\xab\xf0\xf5\x8a\xf9\xbb\xd9\xda\x17\x88\xc1\x26\x15\x00\xd6\xd1\
\x11\x80\x7e\x1e\x3b\x5e\x49\x85\x1f\xa6\x95\x74\x07\xc4\x8d\x3b\
\xd8\xda\x17\x88\xd1\x3a\x15\x00\x5e\xa1\x23\x00\xfd\xa8\xbd\xeb\
\x69\x14\x7f\xc0\x26\xaf\xa8\x00\xf0\x2c\x1d\x01\xe8\xe5\x87\x87\
\xcb\xa8\xf2\x14\x7f\xc0\x4e\xcf\xaa\x00\xf0\x20\x1d\x01\xe8\x45\
\x1d\x5e\x43\xa3\xf8\x03\x36\x7a\x50\x05\x80\x45\x74\x04\xa0\x8f\
\x45\x19\x25\x54\x7a\x8a\x3f\x60\xb7\x45\x2a\x00\xcc\xa7\x23\x00\
\x7d\xa4\x35\x74\x50\xed\x29\xfe\x80\xdd\xe6\xab\x00\x30\x83\x8e\
\x00\xf4\xf0\xad\x3d\x85\xe1\xbd\xec\x69\x14\x7f\xc0\x66\x33\x54\
\x00\x98\x4c\x47\x00\x7a\x78\xbd\xa4\x91\x8a\x4f\xf1\x07\x9c\x30\
\x59\x05\x80\x73\xe9\x08\x20\xf1\x2e\x93\x38\xf1\x8f\xe2\x0f\x38\
\xe4\xdc\x3f\x93\xd7\x99\x0a\x01\x7e\x3a\x03\x48\xac\xc5\x07\x78\
\xf9\x8f\xe2\x0f\x38\xc2\xaf\x6a\xff\x27\x01\xa0\x8a\x0e\x01\x12\
\xeb\x9d\x8a\x16\x2a\x3f\xc5\x1f\x70\x42\xd5\xe0\x00\x90\x49\x87\
\x00\x89\x73\x79\x72\xb6\x68\xe9\x0d\x52\xfc\x29\xfe\x80\x13\x32\
\x07\x07\x80\x74\x3a\x04\x48\x9c\x07\x8e\x94\x1b\x5f\xfc\xab\xfc\
\xbd\x14\x7f\xc0\x19\xe9\x83\x03\xc0\xbb\x74\x08\x90\x38\x7f\xac\
\x69\x33\xba\xf8\xfb\x43\xfd\xe2\xbb\x1c\xe9\x0b\x38\xe5\xdd\xc1\
\x01\xe0\x45\x3a\x04\x48\x1c\xd3\x6f\xff\x3f\x99\x59\xc5\x3c\x00\
\x9c\xf3\xe2\xe0\x00\xb0\x9c\x0e\x01\x12\xe3\xb6\x3d\x85\x46\x17\
\xff\xb7\xcb\x9a\x99\x07\x80\xb3\x96\x0f\x0e\x00\xb7\xd2\x21\x40\
\x62\xfc\x63\x56\xb5\xb1\xc5\x5f\x1d\x7a\xa4\x5e\x80\x64\x1e\x00\
\x8e\xba\x75\x70\x00\xb8\x94\x0e\x01\x12\xe3\xbf\xab\x5a\x8d\x2c\
\xfe\x8d\x81\xa0\xb8\x21\x9d\x97\xfe\x80\x04\xb8\x74\x70\x00\x18\
\x2b\xf5\xd3\x29\x80\xf3\xca\xba\x03\x46\x06\x80\xc7\x8e\x57\x32\
\xfe\x80\xf3\x54\xad\x1f\xfb\xa7\x00\xc0\x66\x40\x40\x62\x7c\x5d\
\xfe\x02\x36\xb1\xed\x69\xea\x62\xfc\x81\x04\x6e\x02\x74\x6a\x00\
\xd8\x43\xc7\x00\xce\x7a\xe8\x58\x85\x71\xc5\x3f\xd0\x3f\x20\x6e\
\xd9\x5d\xc0\xf8\x03\x89\xb1\x67\xa8\x00\xb0\x9e\x8e\x01\x9c\xf5\
\x56\x69\x93\x71\x01\xe0\x57\x45\x0d\x8c\x3d\x90\x38\xeb\x87\x0a\
\x00\x2b\xe9\x18\xc0\x59\xbb\x1b\x3b\x8d\x2a\xfe\x4d\xbd\x41\x71\
\x65\x0a\x6f\xfd\x03\x09\xb4\x72\xa8\x00\x70\x3f\x1d\x03\x38\xab\
\xc4\xb0\x17\x00\xdf\x28\x69\x64\xdc\x81\xc4\xba\x7f\xa8\x00\x30\
\x97\x8e\x01\x9c\x73\x99\xa4\x9e\x87\x9b\xd2\xd4\xdf\x94\x67\xff\
\x40\xc2\xcd\x1d\x2a\x00\x7c\x99\x8e\x01\x9c\x33\x7b\x87\x59\x5f\
\x00\x7c\xd4\xcc\x9b\xff\x80\x06\xbe\x3c\x54\x00\x48\x92\xfc\x74\
\x0e\xe0\x8c\xef\x1f\x28\x31\x2a\x00\x3c\xca\x77\xff\x40\xa2\xa9\
\x1a\x9f\x74\x5a\x00\xb0\x42\xc0\x61\x3a\x08\x70\xc6\x4f\x4e\x56\
\x19\x53\xfc\x9b\x7b\x83\x62\x3a\x5b\xfe\x02\x89\x76\x78\x70\xcd\
\x3f\x35\x00\xfc\x96\x0e\x02\x9c\xf1\x6a\x51\x83\x31\x01\x60\x6d\
\x69\x13\x63\x0e\x24\xde\x6f\x87\x0b\x00\x8f\xd1\x41\x80\x33\xb6\
\x56\xb7\x19\xf3\xf2\xdf\xad\xbc\xfc\x07\xe8\xe0\xb1\xe1\x02\xc0\
\x7c\x3a\x08\x70\x46\x46\x4b\xb7\x11\x01\x60\x3f\x2f\xff\x01\xba\
\x98\x3f\x5c\x00\xb8\x88\x0e\x02\x9c\x71\xb2\xdd\x6f\x44\x00\xf8\
\x69\x76\x0d\xe3\x0d\xe8\xe1\xa2\x33\x06\x00\x2b\x04\xd4\xd2\x49\
\x80\xfd\x0a\xbb\xcc\xd8\x04\xe8\xf6\xbd\x85\x8c\x37\x90\x78\xb5\
\xa7\xd6\xfb\xa1\x02\x40\x32\x1d\x05\xd8\xaf\xd2\xd7\xeb\xf9\xe2\
\xdf\xd2\x1b\x64\xac\x01\x3d\x24\x47\x12\x00\xfe\x85\x8e\x02\xec\
\xd7\x18\x08\x7a\x3e\x00\xa4\xd4\x77\x30\xd6\x80\x1e\xfe\x25\x92\
\x00\xb0\x84\x8e\x02\xec\xd7\x19\x0c\x79\x3e\x00\xac\xca\xad\x65\
\xac\x01\x3d\x2c\x89\x24\x00\xcc\xa0\xa3\x00\xfb\x05\x07\xbc\x7f\
\x0e\xc0\xf7\x3e\x2a\x62\xac\x01\x3d\xcc\x88\x24\x00\x8c\x95\x82\
\x74\x16\x60\x9f\xcb\x93\xb3\x3d\x5f\xfc\xbb\x82\xfd\x62\x5a\x32\
\x63\x0d\x68\x40\xd5\xf4\xb1\x23\x06\x00\x2b\x04\x64\xd1\x61\x80\
\x7d\x66\xa6\xe5\x78\x3e\x00\xec\x6e\xec\x64\xac\x01\x3d\x64\x0d\
\x55\xeb\xcf\x14\x00\xd6\xd2\x61\x80\x7d\xae\x4f\xcf\xf5\x7c\x00\
\x78\xb9\xa0\x9e\xb1\x06\xf4\xb0\x36\x9a\x00\xb0\x94\x0e\x03\xec\
\x73\x65\x8a\xf7\x1f\x01\x2c\xce\x28\x61\xac\x01\x3d\x2c\x8d\x26\
\x00\x5c\x4a\x87\x01\xf6\xf2\x87\xfa\x3d\x5b\xfc\x7b\xe4\xdf\xed\
\x8a\x14\x4e\xff\x03\x34\x71\x69\xc4\x01\xc0\x0a\x01\x75\x74\x1a\
\x60\x9f\xba\x9e\x3e\xcf\x06\x00\x75\xce\x01\x63\x0c\x68\xa1\xee\
\x4c\x75\x7e\xb8\x00\xb0\x89\x8e\x03\xec\x93\xdf\xd9\xe3\xd9\x00\
\xf0\xeb\xa2\x06\xc6\x18\xd0\xc3\xa6\x58\x02\xc0\xe3\x74\x1c\x60\
\x9f\x03\x1e\x3e\x0d\x70\xe9\xa1\x32\xc6\x18\xd0\xc3\xe3\xb1\x04\
\x80\xeb\xe8\x38\xc0\x3e\x6a\x9b\x5c\x2f\x36\xb5\xc1\xd1\xd5\xa9\
\x39\x8c\x31\xa0\x87\xeb\x62\x09\x00\x63\x24\x1f\x9d\x07\xd8\xe3\
\xbf\x2a\x5b\x3d\x19\x00\x8e\xb7\xf9\x18\x5f\x40\x0f\xaa\x86\x8f\
\x89\x3a\x00\x58\x21\x60\x07\x1d\x08\xd8\xe3\xa7\xd9\x35\x9e\x0c\
\x00\x6b\x4b\x9b\x18\x5f\x40\x0f\x3b\x86\xab\xf1\x23\x05\x80\x17\
\xe8\x40\xc0\x1e\xdf\xde\x5b\xe8\xc9\x00\xf0\x37\x47\xca\x19\x5f\
\x40\x0f\x2f\x8c\x26\x00\x7c\x8b\x0e\x04\xec\x71\x99\xd4\xda\xeb\
\xad\x13\x01\xfb\x07\x84\x98\x95\x96\xcb\xf8\x02\x7a\xf8\xd6\x68\
\x02\xc0\x04\xa9\x9f\x4e\x04\xec\x91\xea\xb1\x17\x01\x73\x3b\x7a\
\x18\x57\x40\x0f\xaa\x76\x4f\x88\x39\x00\x58\x21\xe0\x20\x1d\x09\
\xd8\xe3\xc5\xbc\x5a\x4f\x05\x80\xdf\x97\x37\x33\xae\x80\x1e\x0e\
\x8e\x54\xdf\x23\x09\x00\x2b\xe9\x48\xc0\x1e\x77\x7d\x54\xcc\xf7\
\xff\x00\xec\xb0\x32\x1e\x01\xe0\x06\x3a\x12\xb0\xef\x3d\x80\x02\
\x8f\xec\x08\x58\xee\xeb\x0d\xff\x7d\x18\x57\x40\x0b\x37\xc4\x23\
\x00\x24\x49\x2d\x74\x26\x60\x8f\x27\x32\x2b\x3d\x11\x00\x5e\xca\
\xaf\x63\x3c\x01\x3d\xa8\x9a\x9d\x34\xea\x00\x60\x85\x80\x0d\x74\
\x28\x60\x8f\xcb\x93\xb3\x45\x85\xfc\xf5\xec\xe6\xd6\xdb\x3f\x20\
\xae\x4f\xe7\xed\x7f\x40\x13\x1b\x22\xa9\xed\x91\x06\x80\xfb\xe8\
\x50\xc0\x3e\xcf\x66\x57\xbb\x3a\x00\xbc\x5f\xdb\xc6\x38\x02\xfa\
\xb8\x2f\x9e\x01\x60\x92\x34\x40\xa7\x02\xf6\xb8\x22\x25\x5b\xd4\
\xbb\xf4\x78\x60\xf5\xeb\xff\xb6\xdd\x05\x8c\x23\xa0\x07\x55\xab\
\x27\xc5\x2d\x00\x58\x21\xe0\x18\x1d\x0b\xd8\xe7\xd1\xe3\xee\x7c\
\x17\xe0\x57\x1c\xfd\x0b\xe8\xe4\x58\xa4\x75\x3d\x9a\x00\xb0\x9a\
\x8e\x05\xec\xf5\x5a\x71\xa3\xab\x8a\x7f\x69\x77\x20\x7c\xf7\x82\
\xb1\x03\xb4\xb1\xda\x8e\x00\x70\x13\x1d\x0b\xd8\xff\x59\xe0\xf6\
\x86\x4e\xd7\x04\x80\xff\x77\xb0\x94\x71\x03\xf4\x72\x93\x1d\x01\
\x40\x1d\x0f\xdc\x41\xe7\x02\xf6\x9a\x99\x96\x23\x0a\xbb\x02\xda\
\x17\xff\x97\x0b\xea\x19\x2f\x40\x2f\xaa\x46\x8f\x89\x7b\x00\xb0\
\x42\xc0\x66\x3a\x18\xb0\xdf\xfc\xdd\x05\xa2\xda\xaf\xef\xa7\x81\
\xbf\x2d\xe3\xc8\x5f\x40\x43\x9b\xa3\xa9\xe9\xd1\x06\x00\x3e\x07\
\x04\x1c\x72\xdd\xf6\x5c\xb1\xaf\xb9\x4b\xbb\xe2\xbf\xb9\xba\x95\
\x1d\xff\x00\x17\x7f\xfe\x17\x6b\x00\xb8\x40\xea\xa5\x93\x01\xe7\
\x36\x09\x7a\xa3\x44\x9f\x17\x03\xff\xb3\xa2\x39\xfc\x67\x62\x6c\
\x00\xed\xa8\xda\x7c\x81\x6d\x01\xc0\x0a\x01\xef\xd3\xd1\x80\xb3\
\x1e\x39\x5e\x21\xba\x83\xfd\x09\x2b\xfc\x4d\xbd\x41\xb1\xfc\x48\
\x39\x63\x01\xe8\xeb\xfd\x68\xeb\x79\x2c\x01\xe0\x5e\x3a\x1a\x70\
\x9e\xda\x6a\xf7\xcd\xd2\x26\xe1\x0f\x39\x1b\x04\x76\x34\x76\x8a\
\x1b\xd2\xf3\x18\x03\x40\x6f\xf7\x3a\x11\x00\xce\x97\x7a\xe8\x6c\
\x20\x31\x54\x31\xfe\x5d\x59\x93\xe8\xb1\x31\x08\x04\x07\x06\xc4\
\xb6\xda\x76\x71\x4f\x46\x31\x7d\x0e\xe8\x4f\xd5\xe4\xf3\x6d\x0f\
\x00\x56\x08\xd8\x42\x87\x03\x89\x75\xe3\x8e\x3c\xb1\x2a\xb7\x56\
\xec\x96\xbf\xd0\xe3\x75\x57\x40\x1d\xe9\xbb\xa6\xa4\x51\xcc\xd9\
\x99\x4f\x1f\x03\xee\xb1\x25\x96\x5a\x1e\x6b\x00\x58\x4c\x87\x03\
\xfa\xb8\x32\x25\x5b\xdc\x77\xa8\x4c\xbc\x55\xda\x24\x0e\xb6\x74\
\x8b\x92\xee\x80\x68\xef\x0b\x89\x81\x33\x14\x7a\x15\x18\xea\x7a\
\xfa\xc4\x91\x56\x9f\x58\x2b\xff\x99\x87\x8e\x56\x88\xaf\x73\x9b\
\x1f\x70\xab\xc5\x4e\x06\x80\xf3\x24\x1f\x9d\x0e\xe8\x6d\x7a\x72\
\x76\xf8\xd7\xfc\x82\x8f\x8a\xc4\x1d\x7b\x0b\xc3\x77\x0d\xae\x64\
\xeb\x5e\xc0\x4b\x54\x2d\x3e\xcf\xb1\x00\x60\x85\x80\x8d\x74\x3c\
\x00\x00\x09\xb5\x31\xd6\x3a\x3e\x9a\x00\x70\x37\x1d\x0f\x00\x40\
\x42\xdd\x9d\x88\x00\x70\xae\xd4\x45\xe7\x03\x00\x90\x10\xaa\x06\
\x9f\xeb\x78\x00\xb0\x42\xc0\x7b\x0c\x00\x00\x00\x09\xf1\xde\x68\
\x6a\xf8\x68\x03\xc0\xed\x0c\x00\x00\x00\x09\x71\x7b\x22\x03\x40\
\x92\x54\xc1\x20\x00\x00\xe0\x28\x55\x7b\x93\x12\x16\x00\xac\x10\
\xf0\x3c\x03\x01\x00\x80\xa3\x9e\x1f\x6d\xfd\x8e\x47\x00\x98\x2c\
\x85\x18\x0c\x00\x00\x1c\xa1\x6a\xee\xe4\x84\x07\x00\x2b\x04\x6c\
\x63\x40\x00\x00\x70\xc4\xb6\x78\xd4\xee\x78\x05\x80\x05\x0c\x08\
\x00\x00\x8e\x58\xa0\x53\x00\x38\x5b\xaa\x61\x50\x00\x00\xb0\x95\
\xaa\xb5\x67\x6b\x13\x00\xac\x10\xb0\x8a\x81\x01\x00\xc0\x56\xab\
\xe2\x55\xb7\xe3\x19\x00\xa6\x48\x03\x0c\x0e\x00\x00\xb6\x50\x35\
\x76\x8a\x76\x01\xc0\x0a\x01\xa9\x0c\x10\x00\x00\xb6\x48\x8d\x67\
\xcd\x8e\x77\x00\xb8\x87\x01\x02\x00\xc0\x16\xf7\xe8\x1c\x00\xce\
\x91\x1a\x18\x24\x00\x00\xe2\x4a\xd5\xd6\x73\xb4\x0d\x00\xec\x0c\
\x08\x00\x80\x2d\x9e\x8f\x77\xbd\xb6\x23\x00\x5c\x24\xf9\x19\x2c\
\x00\x00\xe2\x42\xd5\xd4\x8b\xb4\x0f\x00\x56\x08\x58\xc3\x80\x01\
\x00\x10\x17\x6b\xec\xa8\xd5\x76\x05\x80\xa9\x52\x3f\x83\x06\xe8\
\x63\x5a\x72\x96\xb8\x21\x3d\x4f\xcc\xdb\x55\x20\xae\x4e\xcd\xa1\
\x4f\x00\x77\x50\xb5\x74\xaa\x6b\x02\x80\x15\x02\xb6\x32\x70\x80\
\xb3\x54\x61\xff\xd1\xd1\x72\xb1\xb6\xb4\x49\xfc\xb1\xa6\x4d\xec\
\x6b\xee\x12\xf9\x9d\x3d\xa2\xa9\x37\x28\x42\x03\x03\x62\x70\xf3\
\x87\xfa\x45\x85\xaf\x57\x1c\x6d\xf3\x89\x94\xfa\x0e\xf1\x6e\x45\
\x8b\x58\x99\x53\x2b\xe6\xef\x2e\xa0\x2f\x01\x7d\x6c\xb5\xab\x4e\
\xdb\x19\x00\xe6\x30\x70\x80\xfd\xee\xd8\x5b\x28\x7e\x91\x57\x27\
\xf6\xcb\x62\xdf\xd7\xff\xd9\x22\x1f\x6b\x2b\xeb\x0e\x88\xb7\xcb\
\x9a\xc5\xfd\x87\xcb\xc4\x95\x29\xd9\xf4\x33\x90\x38\x73\x5c\x17\
\x00\xac\x10\x70\x80\xc1\x03\xe2\xef\x9a\xb4\x1c\xf1\xeb\xa2\x06\
\x51\xdb\xd3\x27\xec\x6e\xea\x4e\xc1\x07\x75\xed\xe2\x76\x19\x34\
\xe8\x7b\xc0\x51\x07\xec\xac\xd1\x76\x07\x00\x36\x06\x02\xe2\xe8\
\xaa\xd4\x1c\xf1\xcb\xfc\x3a\xd1\xd6\x17\x12\x4e\x37\xf5\x08\xe1\
\xbf\xab\x5a\xc5\x37\x77\xe6\x33\x16\x80\x0b\x37\xfe\x71\x3a\x00\
\xa8\x53\x02\x4b\x19\x44\x60\x74\xa6\x27\x67\x8b\xe7\xb3\x6b\x44\
\x43\x20\x28\x12\xdd\x02\xfd\x03\xe2\xad\xd2\x26\xf1\xb5\xed\xb9\
\x8c\x0d\x60\x1f\x55\x3b\xcf\x76\x6d\x00\xb0\x42\xc0\x23\x0c\x24\
\x10\xbb\xc5\x19\x25\xa2\xd2\xd7\x2b\x74\x6b\x1d\x7d\x21\xf1\x93\
\x93\x55\x8c\x11\x60\x8f\x47\xec\xae\xcf\x4e\x04\x80\xf3\xa4\x56\
\x06\x13\x88\xde\x73\xf2\x57\x7f\xbc\x5e\xec\xb3\xab\xfd\xb6\xac\
\x29\xfc\x89\x21\xe3\x05\xc4\x8d\xaa\x99\xe7\xb9\x3e\x00\xb0\x3d\
\x30\x10\xdb\x2d\xff\x0d\x95\x2d\xc2\x2d\x6d\x77\x63\xa7\x98\x95\
\xc6\x23\x01\x20\x4e\x9e\x77\xa2\x36\x3b\x15\x00\x26\x48\x2d\x0c\
\x2a\x30\xb2\x6f\xa4\xe7\x89\xa3\xad\x3e\xe1\xb6\x56\xd2\x1d\x10\
\xb7\xb2\x87\x00\x30\x5a\xaa\x56\x4e\xf0\x4c\x00\xb0\x42\xc0\x33\
\x0c\x2c\x30\xbc\x3b\xf7\x15\x89\x7a\x07\x3e\xed\xb3\xab\xb5\xf7\
\x85\xc4\xf7\x0f\x94\x30\x96\x40\xec\x9e\x71\xaa\x2e\x3b\x19\x00\
\xd4\xbb\x00\x1c\x15\x0c\x9c\xc1\x8d\x3b\xf2\x1c\xf9\xae\xdf\xee\
\xa6\x3e\x51\x64\x37\x41\x20\x26\x0d\x4e\x3c\xfb\x77\x3c\x00\x58\
\x21\xe0\x09\x06\x18\x18\xfa\xfb\xfe\x13\xed\x7e\xe1\x95\x56\xd8\
\x15\x10\x33\xd3\x38\x6f\x00\x88\xd2\x13\x4e\xd6\x64\xa7\x03\xc0\
\x38\xa9\x86\x41\x06\x3e\x75\x99\xa4\x76\xda\xf3\x5a\x4b\x6f\xe8\
\xe4\xeb\x00\x20\x72\xaa\x36\x8e\xf3\x6c\x00\xb0\x42\xc0\x8f\x19\
\x68\xe0\x53\xbf\x29\x6e\x10\x5e\x6d\x6f\x94\x34\x32\xc6\x40\x64\
\x7e\xec\x74\x3d\x4e\x44\x00\x18\x2b\x55\x30\xd8\x40\x96\x78\x3c\
\xb3\x52\x78\xbd\x3d\x21\xff\x8e\x8c\x35\x30\x2c\x55\x13\xc7\x7a\
\x3e\x00\x58\x21\x60\x39\x03\x0e\xd3\xa9\x03\x7d\x5a\x7a\x83\x9e\
\x0f\x00\xcd\xf2\xef\x38\x23\x95\xf7\x01\x80\x61\x2c\x4f\x44\x2d\
\x4e\x54\x00\x18\x23\x15\x33\xe8\x30\xd9\x6b\xc5\x8d\xc2\x94\xf6\
\x6a\x51\x03\x63\x0e\x0c\x4d\xd5\xc2\x31\xc6\x04\x00\x2b\x04\x2c\
\x61\xe0\x61\xaa\xd9\x3b\xf2\xc2\xc7\xec\x9a\xd2\x7c\xf2\xef\xfa\
\xf5\xf4\x3c\xc6\x1e\x38\xdd\x92\x44\xd5\xe1\x44\x06\x80\x24\x29\
\x93\xc1\x87\x89\x36\x56\xb5\x0a\xd3\xda\xfa\xf2\x66\xc6\x1e\xf8\
\x2c\x55\x03\x93\x8c\x0b\x00\x56\x08\x98\xcb\x04\x80\x69\xee\xd8\
\x5b\x28\x42\x03\x03\xc6\x05\x00\x75\xa8\xd1\xbc\x5d\x6c\x10\x04\
\x0c\x32\x37\x91\x35\x38\xa1\x01\xc0\x0a\x01\x9b\x99\x04\x30\xc9\
\x87\x75\x1d\xc2\xd4\xa6\xee\x7c\x30\x07\x80\xb0\xcd\x89\xae\xbf\
\x3a\x04\x80\x29\x52\x80\xc9\x00\x13\x7c\x35\x25\xdb\xa8\x67\xff\
\x43\x7d\x11\x70\x19\xf3\x00\x50\x35\x6f\x8a\xf1\x01\xc0\x0a\x01\
\xab\x99\x10\x30\xc1\xf2\x23\xe5\xc2\xf4\xb6\x70\x7f\x31\x73\x01\
\xa6\x5b\xad\x43\xed\xd5\x25\x00\x8c\x97\xea\x98\x14\xf0\xba\x77\
\x2b\x5a\x8c\x0f\x00\xbf\xe2\x93\x40\x98\x4d\xd5\xba\xf1\x04\x80\
\xcf\x86\x80\x65\x4c\x0c\x78\x9d\x17\x4e\xfb\x1b\x6d\xcb\x6c\xf3\
\x33\x17\x60\xb2\x65\xba\xd4\x5d\x9d\x02\xc0\x59\xd2\x61\x26\x07\
\xbc\xea\x3b\xfb\x8a\x04\x4d\x88\xfe\x01\x21\xae\x4f\xcf\x65\x4e\
\xc0\x44\xaa\xc6\x9d\x45\x00\x18\x3a\x04\xcc\x66\x82\xc0\xab\x5e\
\x2e\xa8\xa7\xfa\x5b\xed\xc9\xcc\x2a\xe6\x04\x4c\x34\x5b\xa7\x9a\
\xab\x55\x00\xb0\x42\xc0\x06\x26\x09\xbc\x68\x4b\x75\x1b\x95\x9f\
\xad\x81\x61\xae\x0d\xba\xd5\x5b\x1d\x03\xc0\x25\x92\x8f\xc9\x02\
\xaf\xd9\xd3\xd4\x45\xe5\xb7\xda\x86\xca\x16\xe6\x04\x4c\xa2\x6a\
\xda\x25\x04\x80\xc8\x42\xc0\x53\x4c\x18\x78\x4d\x6e\x47\x0f\x95\
\xdf\x6a\x69\xf5\x1d\xcc\x09\x98\xe4\x29\x1d\x6b\xad\xae\x01\xe0\
\x6c\xe9\x08\x93\x06\x5e\xd2\x18\x08\x52\xf9\xad\x76\xac\xcd\xc7\
\x9c\x80\x29\x54\x2d\x3b\x9b\x00\x10\x5d\x08\x98\x29\x05\x99\x3c\
\xf0\x82\x69\xc9\x59\x46\xee\xff\x7f\xa6\x56\xe5\xef\x65\x5e\xc0\
\x04\xaa\x86\xcd\xd4\xb5\xce\x6a\x1b\x00\xac\x10\xf0\x12\x13\x08\
\x5e\xa0\x8e\xc2\xa5\x7d\xda\xd4\x76\xc8\xcc\x0b\x18\xe0\x25\x9d\
\x6b\xac\xee\x01\x60\x9c\x54\xc8\x24\x82\xdb\xdd\xbe\xb7\x90\xaa\
\x7f\x4a\xbb\x2a\x35\x87\xb9\x01\x2f\x53\xb5\x6b\x1c\x01\x60\xf4\
\x47\x06\x0f\x30\x99\xe0\x66\xdf\xdc\x99\x4f\xc5\x1f\xd4\xd4\xe3\
\x10\x0e\x05\x82\x87\xa9\x9a\x35\x57\xf7\xfa\xaa\x7d\x00\xb0\x42\
\xc0\x9b\x4c\x28\xb8\xd9\x95\x29\xd9\x54\xfd\x41\xad\xa9\x37\xc8\
\xbc\x80\x97\xbd\xe9\x86\xda\xea\x96\x00\x30\x51\xaa\x65\x52\xc1\
\xcd\x3a\x83\x21\x2a\xbf\xd5\x0a\x3a\x7b\x98\x13\xf0\x2a\x55\xab\
\x26\x12\x00\xe2\x1b\x02\x16\x32\xb1\xe0\x66\x65\xbe\x5e\x2a\xbf\
\xd5\x32\x9a\xbb\x99\x13\xf0\xaa\x85\x6e\xa9\xab\xae\x09\x00\x56\
\x08\xd8\xc2\xe4\x82\x5b\x1d\x6d\xf5\x51\xf9\xad\xf6\x41\x6d\x3b\
\x73\x02\x5e\xb4\xc5\x4d\x35\xd5\x6d\x01\x60\x92\xd4\xc4\x24\x83\
\x1b\xa5\xd6\x77\x50\xf9\xad\xf6\x9f\x15\xcd\xcc\x09\x78\x8d\xaa\
\x4d\x93\x08\x00\xf6\x86\x80\x05\x4c\x34\xb8\x91\xda\xff\x9e\xf6\
\x71\xfb\xf7\x42\x0e\x03\x82\xe7\x2c\x70\x5b\x3d\x75\x5d\x00\xb0\
\x42\xc0\xeb\x4c\x36\xb8\xcd\x2f\xf2\xea\xa8\xfc\x56\x7b\xf4\x78\
\x25\x73\x02\x5e\xf2\xba\x1b\x6b\xa9\x5b\x03\xc0\xe7\xa4\x7c\x26\
\x1d\xdc\xe4\x0e\x36\x03\xfa\xd3\x1e\x00\xd7\x6e\xcf\x65\x4e\xc0\
\x2b\x54\x2d\xfa\x1c\x01\xc0\xd9\x10\x30\x4b\xea\x63\xf2\xc1\x4d\
\xaa\xfd\x7d\xc6\x07\x80\x43\x2d\x7c\x01\x00\xcf\x50\x35\x68\x96\
\x5b\xeb\xa8\x6b\x03\x80\x15\x02\x9e\x66\x02\xc2\x4d\xde\xad\xe0\
\x3d\x80\x7f\xce\xaf\x63\x2e\xc0\x2b\x9e\x76\x73\x0d\x75\x7b\x00\
\x48\x92\x76\x30\x09\xe1\x16\x7f\x7b\xa4\xdc\xf8\x00\x70\xe7\xde\
\x22\xe6\x02\xbc\x40\xd5\x9e\x24\x02\x40\x62\x43\xc0\x64\xa9\x95\
\xc9\x08\x37\xb8\x3a\x35\x47\x04\xfa\xcd\x3d\x16\xb8\xa6\xa7\x8f\
\x79\x00\x2f\x50\x35\x67\xb2\xdb\xeb\xa7\xeb\x03\x80\x15\x02\x16\
\x31\x21\xe1\x16\x7b\x9a\xba\x8c\x0d\x00\xea\x11\x08\x73\x00\x1e\
\xb0\xc8\x0b\xb5\xd3\x13\x01\xc0\x0a\x01\x6f\x33\x29\xe1\x06\x0f\
\x1c\x2e\x33\xb2\xf8\x07\x07\x06\xc4\xad\xbb\x0b\x98\x03\x70\xbb\
\xb7\xbd\x52\x37\xbd\x14\x00\xc6\x4b\xb9\x4c\x4e\xb8\xc1\xbe\x66\
\xf3\xee\x02\xb0\xfb\x1f\x3c\x40\xd5\x98\xf1\x04\x00\x3d\x43\xc0\
\x34\xa9\x83\x49\x0a\xdd\x7d\x77\x5f\x91\x30\xe9\x55\x80\xae\x60\
\xbf\xf8\x7a\x7a\x1e\x63\x0f\x37\x53\xb5\x65\x9a\x97\x6a\xa6\xa7\
\x02\x80\x15\x02\xee\x92\x06\x98\xac\xd0\xdd\xa6\xea\x56\x63\x02\
\xc0\xbf\x16\xd4\x33\xe6\x70\x33\x55\x53\xee\xf2\x5a\xbd\xf4\x5c\
\x00\xb0\x42\xc0\x6a\x26\x2c\x74\x37\x67\x67\xbe\xf0\x87\xfa\x3d\
\x5f\xfc\xeb\x7b\xfa\xc4\x55\xa9\x39\x8c\x39\xdc\x6c\xb5\x17\x6b\
\xa5\x57\x03\x80\xda\x1f\x20\x85\x49\x0b\xdd\xbd\x5a\xd4\xe0\xf9\
\x00\xf0\xf7\x27\xaa\x18\x6b\xb8\x59\x8a\xdb\xbf\xf7\x37\x2a\x00\
\x58\x21\xe0\x42\xa9\x9c\xc9\x0b\x9d\x5d\x9e\x9c\x2d\x52\x3c\x7c\
\x4c\xf0\x9a\x92\x46\xc6\x19\x6e\xa6\x6a\xc8\x85\x5e\xad\x93\x9e\
\x0d\x00\x83\xce\x0b\xe8\x61\x12\x43\x67\x57\xa6\x64\x8b\x03\x2d\
\xdd\x9e\x2b\xfe\x1b\xab\x5a\x19\x5f\xb8\x99\xaa\x1d\xb3\xbc\x5c\
\x23\x3d\x1d\x00\xac\x10\xb0\x8c\x89\x0c\xdd\x5d\x93\x96\x23\x72\
\x3b\x7a\x3c\x53\xfc\xd3\x1b\x3a\xc3\x77\x37\x18\x5b\xb8\xd8\x32\
\xaf\xd7\x47\xcf\x07\x00\x2b\x04\xbc\xc6\x64\x86\xee\xbe\x91\x9e\
\x27\x2a\x7d\xbd\xae\x2f\xfe\x47\x5b\x7d\xbc\xf4\x07\xb7\x7b\xcd\
\x84\xda\x68\x4a\x00\x18\x2b\xed\x67\x52\x43\x77\xb7\xec\x2e\x10\
\xb5\x3d\xee\x3d\x32\xf8\x64\xbb\x5f\x7c\x6d\x7b\x2e\x63\x09\x37\
\x53\xb5\x62\x2c\x01\xc0\x5b\x21\xe0\x62\xa9\x8c\xc9\x0d\xdd\xa9\
\x02\x9a\xe6\xc2\x17\x03\xd7\x97\x37\x8b\x2b\x52\xb8\xed\x0f\x57\
\x53\x35\xe2\x62\x53\xea\xa2\x31\x01\xc0\x0a\x01\xd3\xa5\x36\x26\
\x39\xdc\xe0\x67\x39\x35\xae\x38\x39\xb0\x33\x18\x12\x0f\x1f\xab\
\x60\xcc\xe0\x76\xaa\x36\x4c\x37\xa9\x26\x1a\x15\x00\xac\x10\x30\
\x4f\xea\x63\xb2\xc3\x0d\xee\xdc\x5b\x24\x0a\xbb\x02\xda\x16\xff\
\xac\x0e\xbf\x98\xcf\x01\x3f\x70\x3f\x55\x13\xe6\x99\x56\x0f\x8d\
\x0b\x00\x56\x08\x58\xca\x84\x87\x5b\xa8\x17\xea\x7e\x5f\xde\x2c\
\x7a\x35\xba\x1b\xa0\xee\x4c\xac\x2d\x6d\xe2\x96\x3f\xbc\x62\xa9\
\x89\xb5\xd0\xc8\x00\x60\x85\x80\x15\x4c\x7a\xb8\x6d\xeb\xe0\xb7\
\xcb\x9a\x13\xba\x7d\xb0\x2a\xfc\x2a\x8c\xdc\xb8\x83\x83\x7d\xe0\
\x19\x2b\x4c\xad\x83\xc6\x06\x00\x2b\x04\xac\x67\xf2\xc3\x6d\xd4\
\xa9\x7a\xaf\x97\x34\x8a\xee\x60\x3f\x85\x1f\x18\x9d\xf5\x26\xd7\
\x40\xd3\x03\x80\xfa\x3c\x70\x27\x17\x01\xdc\xfa\xb5\xc0\xea\xbc\
\x3a\xb1\xb7\xa9\x4b\xf4\xd8\x70\x57\x20\x38\x30\x20\x0e\xb5\x74\
\x87\x4f\xf2\xa3\xf0\xc3\x83\x76\x9a\xf2\xb9\x1f\x01\xe0\xcc\x21\
\x60\xa2\x94\xcb\xc5\x00\xb7\x6f\x27\xbc\xf4\x50\x99\x78\xa3\xa4\
\x31\xfc\x62\x5e\xac\xaf\x0b\xa8\x8d\x88\xde\xad\x68\x11\x0f\x1d\
\xad\x10\x33\xd3\xd8\xcc\x07\x9e\xa5\xd6\xfc\x89\xa6\xd7\x3f\xe3\
\x03\x80\x15\x02\xa6\x48\x0d\x5c\x14\xf0\x8a\x59\x69\xb9\xe2\x8e\
\xbd\x85\x62\xc9\xc1\x52\xf1\xd8\xf1\x4a\xb1\x32\xa7\x56\xfc\xba\
\xa8\x41\xbc\x57\xd9\x22\xb6\x56\xb7\x89\xb7\x4a\x9b\xc4\x4b\xf9\
\x75\xe2\xa9\x13\x55\x62\x99\x0c\x0e\x77\xee\x2b\x0a\x3f\x5a\xa0\
\xef\x60\x00\xb5\xd6\x4f\xa1\xf6\x11\x00\x06\x87\x80\x6b\xa5\x0e\
\x2e\x0e\x00\xf0\x2c\xb5\xc6\x5f\x4b\xcd\x23\x00\x0c\x15\x02\xe6\
\x48\x3e\x2e\x12\x00\xf0\x1c\xb5\xb6\xcf\xa1\xd6\x11\x00\x86\x0b\
\x01\xb7\x49\x01\x2e\x16\x00\xf0\x0c\xb5\xa6\xdf\x46\x8d\x23\x00\
\x44\x12\x02\xbe\x27\x05\xb9\x68\x00\xc0\xf5\xd4\x5a\xfe\x3d\x6a\
\x1b\x01\x20\x9a\x10\xf0\x03\xa9\x9f\x8b\x07\x00\x5c\x4b\xad\xe1\
\x3f\xa0\xa6\x11\x00\x62\x09\x01\xcb\xa5\x01\x2e\x22\x00\x70\x1d\
\xb5\x76\x2f\xa7\x96\x11\x00\x46\x13\x02\x1e\xe7\x42\x02\x00\xd7\
\x79\x9c\x1a\x46\x00\x88\x47\x08\x78\x8e\x8b\x09\x00\x5c\xe3\x39\
\x6a\x17\x01\x20\x9e\x21\xe0\x97\x5c\x54\x00\xa0\xbd\x5f\x52\xb3\
\x08\x00\x84\x00\x00\xa0\xf8\x83\x00\xc0\xe3\x00\x00\xe0\xb6\x3f\
\x08\x00\xa3\x7f\x31\x90\xaf\x03\x00\x40\x8f\xb7\xfd\x79\xe1\x8f\
\x00\xe0\xf8\x27\x82\xec\x13\x00\x00\x89\xfd\xce\x9f\x4f\xfd\x08\
\x00\x09\xdb\x2c\x88\x1d\x03\x01\x20\x31\x3b\xfc\xb1\xc9\x0f\x01\
\x20\xe1\xdb\x06\x73\x76\x00\x00\x38\x27\xc0\xf6\xbe\x04\x00\x9d\
\x0e\x10\xe2\x14\x41\x00\xb0\x9f\x5a\x6b\x39\xd8\x87\x00\xa0\xdd\
\x51\xc2\x1d\x5c\x9c\x00\x60\x1b\xb5\xc6\x72\xa4\x2f\x01\x40\xcb\
\x10\x70\xad\xd4\xc0\x45\x0a\x00\x71\xa7\xd6\xd6\x6b\xa9\x35\x04\
\x00\x9d\x43\xc0\x14\x29\x97\x8b\x15\x00\xe2\x46\xad\xa9\x53\xa8\
\x31\x04\x00\x37\x84\x80\x89\xd2\x4e\x2e\x5a\x00\x18\x35\xb5\x96\
\x4e\xa4\xb6\x10\x00\xdc\x14\x02\xc6\x4a\xeb\xb9\x78\x01\x20\x66\
\x6a\x0d\x1d\x4b\x4d\x21\x00\xb8\x35\x08\xac\xe0\x22\x06\x80\xa8\
\xad\xa0\x86\x10\x00\xbc\x10\x02\x96\x4a\x7d\x5c\xd0\x00\x30\x22\
\xb5\x56\x2e\xa5\x76\x10\x00\xbc\x14\x02\xe6\x49\x6d\x5c\xdc\x00\
\x70\x46\x6a\x8d\x9c\x47\xcd\x20\x00\x78\x31\x04\x4c\x97\xca\xb8\
\xc8\x01\xe0\x34\x6a\x6d\x9c\x4e\xad\x20\x00\x78\x39\x04\x5c\x2c\
\xed\xe7\x62\x07\x80\x3f\x51\x6b\xe2\xc5\xd4\x08\x02\x80\x29\x5f\
\x08\xfc\x07\x17\x3d\x00\x84\xd7\x42\xde\xf4\x27\x00\x18\xf9\x72\
\xa0\x9f\x05\x00\x80\x81\xfc\xbc\xec\x47\x00\x30\x3d\x04\xcc\x94\
\x4a\x59\x0c\x00\x18\x44\xad\x79\x33\xa9\x01\x04\x00\x42\xc0\x87\
\x59\x9f\x97\xfe\x8f\x45\x01\x80\x01\xd4\x5a\xf7\x79\xd6\x7e\x02\
\x00\x3e\x0d\x01\x49\xd2\x4a\x69\x80\x05\x02\x80\x07\x0d\x58\x6b\
\x5c\x12\x6b\x3e\x01\x00\x43\x07\x81\x3b\xd9\x2f\x00\x80\x07\xbf\
\xef\xbf\x93\x35\x9e\x00\x80\x91\x43\xc0\xa5\xd2\x09\x16\x0d\x00\
\x1e\xa0\xd6\xb2\x4b\x59\xdb\x09\x00\x88\x3c\x04\x9c\x2b\xad\x61\
\xf1\x00\xe0\x62\x6a\x0d\x3b\x97\x35\x9d\x00\x80\xd8\x82\xc0\x02\
\xa9\x89\x85\x04\x80\x8b\xa8\x35\x6b\x01\x6b\x38\x01\x00\xa3\x0f\
\x01\x7f\x2e\xa5\xb2\xa8\x00\x70\x01\xb5\x56\xfd\x39\x6b\x37\x01\
\x00\xf1\x0b\x01\x67\x49\x4f\x4a\xbd\x2c\x30\x00\x34\xd4\x6b\xad\
\x51\x67\xb1\x66\x13\x00\x60\x4f\x10\x98\x21\xe5\xb2\xd8\x00\xd0\
\x88\x5a\x93\x66\xb0\x46\x13\x00\xe0\xcc\x0b\x82\xaf\xb1\xe8\x00\
\xd0\xc0\x6b\xbc\xe8\x47\x00\x80\xf3\x41\xe0\xbb\x52\x23\x0b\x10\
\x80\x04\x50\x6b\xcf\x77\x59\x8b\x09\x00\x48\x5c\x08\x98\x24\x6d\
\x61\x31\x02\xe0\x20\xb5\xe6\x4c\x62\x0d\x26\x00\x40\x8f\x20\xb0\
\x50\xaa\x65\x61\x02\x60\x23\xb5\xc6\x2c\x64\xcd\x25\x00\x40\xbf\
\x10\x30\x51\x7a\x93\xf3\x04\x00\xd8\xb0\x8f\xbf\x5a\x5b\x26\xb2\
\xd6\x12\x00\xa0\x77\x10\x98\x2b\x15\xb2\x68\x01\x88\x03\xb5\x96\
\xcc\x65\x6d\x25\x00\xc0\x3d\x21\x60\x9c\xf4\x92\x14\x64\x01\x03\
\x10\x83\xa0\xb5\x86\x8c\x63\x4d\x25\x00\xc0\x9d\x41\x60\xa6\x74\
\x84\xc5\x0c\x40\x14\xd4\x9a\x31\x93\x35\x94\x00\x00\xf7\x87\x80\
\xb3\xa5\xa7\x24\x1f\x0b\x1b\x80\x61\xf8\xac\xb5\xe2\x6c\xd6\x4e\
\x02\x00\xbc\x15\x04\x2e\x91\x36\xb0\xc8\x01\x18\x82\x5a\x1b\x2e\
\x61\xad\x24\x00\xc0\xdb\x41\x60\xb6\x74\x98\x05\x0f\x80\xb5\x16\
\xcc\x66\x6d\x24\x00\xc0\xac\xc3\x85\x96\x49\x75\x2c\x80\x80\x91\
\xea\xac\x35\x80\xc3\x7b\x08\x00\x30\x34\x08\x8c\x97\x56\x4b\x01\
\x16\x44\xc0\x08\x01\xeb\x9a\x1f\xcf\x1a\x48\x00\x00\x54\x10\x98\
\x22\x6d\x66\x71\x04\x3c\x4d\x5d\xe3\x53\x58\xf3\x40\x00\xc0\x99\
\x36\x11\xca\x64\xa1\x04\x3c\x25\x93\xcd\x7c\x40\x00\x40\x24\x21\
\x20\x49\x5a\x22\x15\xb3\x70\x02\xae\x56\x6c\x5d\xcb\x49\xac\x6d\
\x20\x00\x20\x9a\x20\x30\x46\x5a\x2e\x55\xb0\x90\x02\xae\x52\x61\
\x5d\xbb\x63\x58\xcb\x40\x00\xc0\x68\x82\xc0\x58\xe9\xc7\x52\x0d\
\x0b\x2b\xa0\xb5\x1a\xeb\x5a\x1d\xcb\xda\x05\x02\x00\xe2\x7d\xbe\
\xc0\x13\x52\x03\x0b\x2d\xa0\x95\x06\xeb\xda\x64\xdf\x7e\x10\x00\
\x60\x6b\x10\x38\x4f\x7a\x46\x6a\x61\xe1\x05\x12\xaa\xc5\xba\x16\
\xcf\x63\x6d\x02\x01\x00\x4e\x06\x81\x09\xd2\xf3\x52\x2b\x0b\x31\
\xe0\xa8\x56\xeb\xda\x9b\xc0\x5a\x04\x02\x00\x12\x7d\x47\xe0\x11\
\xa9\x84\x85\x19\xb0\x55\xa9\x75\xad\xf1\x8b\x1f\x04\x00\x68\x77\
\xea\xe0\x3d\xd2\x01\x16\x6a\x20\xae\x0e\x58\xd7\x16\xa7\xf4\x81\
\x00\x00\xed\xc3\xc0\x1c\x69\xab\xd4\xcf\xe2\x0d\xc4\xa4\xdf\xba\
\x86\xe6\xb0\xa6\x80\x00\x00\x37\x06\x81\xa9\xd2\x1a\xc9\xcf\x82\
\x0e\x44\xc4\x6f\x5d\x33\x53\x59\x43\x40\x00\x80\x17\x82\xc0\x45\
\xd6\x4b\x4b\x7c\x42\x08\x9c\xf9\x53\x3e\x75\x8d\x5c\xc4\x9a\x01\
\x02\x00\xbc\x18\x04\xce\xb1\x9e\x65\xa6\x4a\x03\x2c\xfa\x30\xdc\
\x80\x75\x2d\xa8\x6b\xe2\x1c\xd6\x08\x10\x00\x60\xd2\x09\x84\xab\
\xd8\x61\x10\x86\xee\xd8\xb7\x8a\x93\xf9\x40\x00\x00\x5f\x0f\x7c\
\x98\xb5\x40\xda\x26\x85\x28\x0e\xf0\xa8\x90\x35\xc7\x17\xf0\x36\
\x3f\x08\x00\xc0\xe9\x61\x60\xb2\xf5\x1c\x94\x03\x88\xe0\xa5\x83\
\x79\xd4\x9c\x9e\xcc\x35\x0e\x02\x00\x30\x72\x10\x50\x47\x12\xdf\
\x2e\xbd\x27\x75\x51\x44\xe0\x32\x5d\xd6\xdc\xbd\x9d\xa3\x78\x41\
\x00\x00\x62\x0f\x03\xe7\x4a\x77\x4b\x1b\x25\x1f\xc5\x05\x9a\xf2\
\x59\x73\x54\xcd\xd5\x73\xb9\x76\x41\x00\x00\xe2\xbf\xed\xf0\x62\
\x69\x8b\xd4\x43\xd1\x41\x82\xf5\x58\x73\x71\x31\xdb\xf3\x82\x00\
\x00\x38\x17\x06\xce\x97\xee\x95\xde\x97\x7a\x29\x46\x70\x48\xaf\
\x35\xe7\xd4\xdc\x3b\x9f\x6b\x11\x04\x00\x20\xb1\x61\xe0\x02\xe9\
\x3e\x69\xb3\xd4\x41\x91\x42\x9c\x75\x58\x73\x4b\xcd\xb1\x0b\xb8\
\xe6\x40\x00\x00\xf4\x0c\x03\x63\xa4\x9b\xa4\xd5\xd2\x31\x36\x1c\
\x42\x8c\x1b\xf4\x1c\xb3\xe6\x90\x9a\x4b\x63\xb8\xb6\x40\x00\x00\
\xdc\x17\x08\x26\x59\xbf\xdc\x36\x48\x2d\x14\x37\x9c\x41\x8b\x35\
\x47\xd4\x5c\x99\xc4\xb5\x03\x02\x00\xe0\xbd\xcf\x0b\x6f\x90\x56\
\x4a\x07\x39\xad\xd0\xf8\xd3\xf6\x0e\x5a\x73\xe1\x06\x3e\xd7\x03\
\x01\x00\x30\x2b\x10\x4c\x90\xfe\xc2\x2a\x02\xdb\xd9\x73\xc0\xf3\
\xdf\xe6\x6f\xb7\xc6\x5a\x8d\xf9\x04\xae\x01\x10\x00\x00\x0c\xde\
\x96\xf8\x1a\xe9\x61\xeb\x76\x70\x25\x85\xd3\xb5\x2a\xad\x31\x7c\
\xd8\x1a\x53\xb6\xdf\x05\x08\x00\x40\x54\xa1\xe0\x8b\xd6\x77\xde\
\xaf\x4a\x87\xd9\x7f\x40\xdb\xef\xf1\x0f\x5b\x63\xa4\xc6\xea\x8b\
\xcc\x5d\x80\x00\x00\xd8\x71\x97\xe0\x72\xab\xd0\xbc\x68\x1d\xf0\
\xc2\xd9\x05\xce\xfe\xb2\xff\x5f\xab\xef\xd5\x18\x4c\xe7\xd7\x3d\
\x40\x00\x00\x12\x19\x0c\x26\x5a\x9f\x8c\xa9\x5b\xce\x6b\xad\x17\
\xcc\xd8\xba\x38\x76\x7e\xe9\x90\xf4\x96\xf4\x88\x74\xb3\xf4\x79\
\xe6\x1a\x40\x00\x00\xdc\xf2\xd5\xc1\x97\xac\xe2\xb5\x4c\x5a\x21\
\xad\x93\x76\x5b\xbf\x64\xfb\x0d\x7f\x0b\xbf\x4a\xda\x23\xad\xb7\
\x5e\xce\xbb\x5f\x9a\x2b\x7d\x99\xb7\xf2\x01\x02\x00\xe0\xe5\x80\
\x70\x8e\xf4\x15\xe9\x16\xe9\x01\x69\x95\xf4\x8e\xf5\xb6\xfa\x71\
\x2b\x24\xf8\x5c\xfa\xeb\x5d\x15\xf7\x4c\x29\x5d\x7a\xd7\xba\x65\
\xbf\x5c\xba\x55\xba\x54\x1a\xcb\x1c\x00\x08\x00\x00\x86\x0f\x0a\
\xe3\xa4\x2f\x48\x57\x4b\xf3\xa5\x45\xd2\x83\xd2\xb3\xd2\x2b\xd6\
\x5d\x85\x4d\xd6\xfb\x08\x29\xd2\x2e\x29\x43\x3a\x2a\x65\x4b\x45\
\xd6\x7b\x0a\xf5\x52\x9b\x15\x2a\x42\x16\x9f\xf5\x7f\xab\xb7\xfe\
\x37\x45\xd6\x3f\x73\xd4\xfa\x77\xec\xb2\xfe\x9d\xdb\xac\xff\xc6\
\x3a\xeb\xbf\xf9\xac\xf5\x67\x58\x64\xfd\x99\x66\xa8\x33\xef\x39\
\x09\x0f\x70\x87\xff\x0f\x46\x3b\x00\x65\x2e\x90\xb6\x57\x00\x00\
\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x11\xcf\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x02\x00\x00\x00\x02\x00\x08\x03\x00\x00\x00\xc3\xa6\x24\xc8\
\x00\x00\x00\x03\x73\x42\x49\x54\x08\x08\x08\xdb\xe1\x4f\xe0\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x19\x06\x00\x00\x19\x06\x01\
\x8b\xd7\x13\xdd\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\
\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x02\xb2\x50\x4c\x54\
\x45\xff\xff\xff\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\
\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\
\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\x23\x1f\x20\
\x23\x1f\x20\x01\x2a\x90\xb7\x00\x00\x00\xe5\x74\x52\x4e\x53\x00\
\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\
\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1f\x20\x21\x22\x23\x24\
\x25\x26\x27\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32\x33\x34\x36\
\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f\x40\x41\x42\x43\x44\x45\x46\
\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51\x52\x54\x55\x58\x59\
\x5b\x5c\x5d\x5e\x5f\x61\x62\x63\x64\x65\x68\x69\x6a\x6b\x6c\x6d\
\x6f\x70\x71\x72\x73\x74\x76\x77\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f\
\x80\x81\x82\x83\x84\x85\x86\x87\x89\x8a\x8b\x8d\x8e\x8f\x91\x92\
\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\xa0\xa1\xa2\xa3\
\xa4\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\
\xb5\xb6\xb7\xb8\xb9\xba\xbd\xbe\xbf\xc0\xc1\xc2\xc4\xc5\xc6\xc7\
\xc8\xc9\xca\xcb\xcc\xcd\xce\xd0\xd1\xd2\xd4\xd5\xd6\xd7\xd8\xd9\
\xda\xdb\xdc\xdd\xde\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\
\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\
\xfb\xfc\xfd\xfe\xec\xcd\x14\x0a\x00\x00\x0d\x9e\x49\x44\x41\x54\
\x78\xda\xed\xdd\xfb\x63\xd5\x75\x1d\xc7\xf1\xcf\x2e\x8c\x31\x60\
\x43\x2e\x03\x81\x08\x24\x68\x0a\x53\x34\x45\x88\xcd\x64\x03\x9d\
\xa1\x22\x91\xca\x90\x5b\x80\x91\x29\x92\xb4\x80\x24\x50\x6a\x06\
\x82\x97\xb8\x0a\x15\x10\xa2\x60\xa4\x24\x4b\x8a\x45\x66\xe0\x92\
\x8b\xcb\x81\x03\x36\xc6\x36\xd8\x76\x3e\xff\x47\xe3\x2a\x17\x95\
\x73\xce\xe7\xfb\x3d\xdf\xef\xe7\xf3\x7e\xbe\x7e\x67\xdf\xc3\xf7\
\xf9\x18\x9c\xeb\xf7\x28\xc5\xfc\x5b\xbb\xfb\x96\xed\x3a\xd4\xa4\
\xc3\xb5\xa6\x43\x65\xcb\x1f\xc8\x20\x8e\xff\x6b\x3f\xa7\x56\x87\
\x75\x75\x25\x99\x04\xf2\x79\xf7\x54\xea\x30\xef\x68\x11\x89\x7c\
\xdd\xf4\x66\x1d\xee\xb5\x3c\x45\x24\x3f\xfb\xeb\xf0\x0f\x01\xfe\
\x6d\x64\xb3\x05\x00\x5a\xf8\x5f\xc0\xaf\x65\x54\x6a\x1b\x76\x94\
\x7b\x82\x3e\xed\x19\x6d\xc7\x4a\x48\xe5\xcb\xd2\x4e\x58\x02\xa0\
\x8e\xe7\x03\x7c\x59\x81\xb6\x65\xdf\x27\x96\x1f\x2b\xb5\x06\xc0\
\x2b\xc4\xf2\x63\xef\x5a\x03\xa0\x8c\x58\x7e\xec\x63\x6b\x00\x1c\
\x26\x96\x1f\x3b\x69\x0d\x80\x26\x62\xf9\xb1\x46\x6b\x00\x68\x62\
\x01\x80\x01\x80\x01\x80\x01\x80\x01\x80\x01\x80\x01\x80\x01\x80\
\x01\x80\x01\x80\x01\x80\x01\x80\x01\x80\x01\x80\x01\x80\x01\x80\
\x01\x80\x01\x80\x01\x80\x01\x80\x01\x80\x01\xe0\xe2\x92\x3a\xdc\
\xd8\x2f\x77\xf8\xc8\x02\x8b\x76\xcf\xb7\x3b\x03\xc0\x78\x69\x03\
\x1e\x7c\xfa\xb5\xb2\x4f\x6b\x23\xda\xc6\x55\x2e\x2d\x4c\x02\x40\
\xbc\x4b\xbd\xfd\xa7\x9b\xf6\x35\x6b\xcb\xf7\xf7\x3c\x00\xc4\xb1\
\xe4\xdc\x27\xb7\xd4\x6a\x37\x56\xda\x06\x00\x31\xfe\xea\xdf\xbf\
\xee\x98\x76\x68\x3b\xb2\x00\x10\xc3\x06\x2d\xae\xd2\x8e\xed\xcd\
\x64\x00\x44\xb9\xee\x33\xcb\xb5\x83\x5b\x00\x80\xa8\xd6\x77\xf9\
\x19\xed\xe4\x9a\xbf\x05\x80\xeb\x2f\x67\x55\xb3\x76\x75\x1b\x01\
\x70\xbd\x0d\xd9\x18\xd1\x0e\xaf\x17\x00\xbe\x76\xfd\xdf\xd2\x6e\
\x6f\x1a\x00\xbe\x66\x19\xf3\x4e\x3b\xde\x5f\xbf\x09\x80\xaf\x5e\
\xd1\x27\xda\xf9\x7d\xe4\x21\x80\x7a\xb7\x00\xf4\xde\xac\x05\xec\
\xa4\x87\x00\x0e\x39\x05\x60\x6a\xbd\x16\xb1\x34\xef\x00\x94\x39\
\x04\x20\x6b\xbd\x16\xb2\x74\xef\x00\xac\x76\x07\xc0\xad\x15\x1a\
\x00\x31\xef\x87\xce\x00\x98\x76\x5a\x03\x20\xf6\x75\x69\x76\x03\
\x40\xe6\x06\xad\x01\x10\xcf\x5e\x77\x02\x40\x8f\x3d\x1a\x00\xf1\
\xad\x57\xa3\x03\x00\xfa\x56\x68\x00\xc4\xbb\xd9\xf6\x03\x18\x5c\
\xa5\x01\x10\xff\x5b\x64\xdf\xb0\x1d\x40\x5e\xad\x06\x80\xc1\x32\
\xb6\xd8\x0d\xe0\xa1\x46\x0d\x00\xa3\x25\xff\xd2\x66\x00\x45\xcd\
\x1a\x00\xa6\xbb\x73\xa7\xb5\x00\xee\x6e\xd0\x00\xf0\x60\xf9\xbf\
\x3e\x68\x25\x80\x9c\xe3\x1a\x00\x1e\xad\xd3\xc0\xfc\x58\x3f\xb0\
\xb4\x38\x68\x00\xbd\x2b\x35\x00\x82\x5b\x41\x63\xc0\x00\xba\xed\
\xd3\x00\x90\xd2\xff\x4b\x00\xb4\xdb\xad\x01\x20\xa6\xff\x97\x00\
\x78\x59\x03\x40\x4e\xff\x6b\x01\x8c\xd3\x00\x10\xd4\xff\x1a\x00\
\xfd\x4f\x02\x40\x52\xff\xab\x01\xa4\x97\x6b\x00\x48\xea\x7f\x35\
\x80\x65\x1a\x00\xa2\xfa\x5f\x05\x60\xac\x06\x80\xac\xfe\x57\x02\
\xb8\xe1\x08\x00\x84\xf5\xbf\x12\x40\xa9\x06\x80\xb0\xfe\x57\x00\
\xb8\xad\x05\x00\xd2\xfa\x5f\x0e\x20\xf9\x03\x0d\x00\x69\xfd\x2f\
\x07\x30\x49\x03\x40\x5c\xff\xcb\x00\x64\x1f\x03\x80\xbc\xfe\x9a\
\x7b\x80\xc1\x03\x08\xb2\xff\x17\x00\x7a\x36\x02\x40\x60\xff\x2f\
\x00\x2c\xd2\x00\x10\xd8\xff\x12\x80\x6e\xa7\x00\x20\xb1\xff\x25\
\x00\xf3\x35\x00\x24\xf6\xbf\x08\xa0\x53\x0d\x00\xd2\x25\xf6\xbf\
\x08\x60\x0e\xfd\x83\x00\x10\x7c\xff\x0b\x00\x52\xab\xe8\x1f\x00\
\x80\x10\xf4\xbf\x00\x60\x34\xf9\xb5\x4e\x95\xd8\xff\x02\x80\x35\
\xe4\xd7\x9f\x8b\xec\x7f\x1e\x40\x66\x03\xfd\x75\xb9\xc8\xfe\xe7\
\x01\x4c\x20\xbf\xd6\xab\x44\xf6\x3f\x0f\x60\x07\xf9\xb5\x1e\x2b\
\xb2\xff\x39\x00\xbd\x22\xe4\xd7\x0d\x59\x22\xfb\x9f\x03\x30\x8b\
\xfc\x5a\x2f\x94\xd9\xff\x1c\x80\xb7\xc9\xaf\x8f\x77\x4e\x60\xff\
\xc2\x10\xbd\xf2\xda\x7a\x73\xd2\x78\x1d\x48\x47\x1e\x16\xda\xff\
\x2c\x80\xa1\xf4\xd7\xcf\x49\xed\xaf\x79\x1d\xe0\xec\xef\xff\xb3\
\x49\x52\xfb\x6b\x1e\x04\x6a\x7d\xf4\x01\xb1\xbf\xff\x67\x01\xb4\
\x13\xfe\x5e\xb0\x53\xf3\x33\x05\xf7\x6f\x05\x90\x2f\xb7\x7d\xf3\
\xe1\xbf\xbd\x32\xa6\xbd\x92\xdc\x5f\x7b\xf7\x2c\x40\xa4\x6c\x76\
\x61\x4e\x22\x1f\x4c\x59\xb8\xf0\xf5\x6f\x05\xb0\xc2\x93\x9f\x53\
\x37\xb7\x07\x7d\x2d\xec\xdf\x0a\xc0\x93\xcb\x59\x2e\x27\xbf\x9d\
\xfd\x5b\x01\x54\x9b\xff\x90\xc6\xf1\xd4\xb5\xb4\xbf\x56\x5d\xcd\
\x7f\x46\xf5\x1d\xd4\xb5\xb5\xbf\xf6\xe0\x79\xc0\xa6\x3c\xea\x5a\
\xdb\x5f\xab\x62\xe3\x1f\x31\x89\xba\xf6\xf6\xd7\x6a\xa1\xe9\x4f\
\xd8\x46\xdd\x00\xfb\x37\x98\x5e\x59\x5a\xbd\x6a\xfa\xf0\x3f\x97\
\xbc\x01\xf6\x1f\x59\x60\x0a\xc0\xf4\x9b\xe1\xd6\x90\x37\xc8\xfe\
\xca\x18\xc0\x36\xc3\x1f\x50\x40\xdf\x20\xfb\x9b\x03\x78\xdf\xec\
\xcf\xd7\xa4\x11\x38\xc8\xfe\xe6\x00\xf6\x9a\xfd\xf9\xb5\x04\x0e\
\xb4\xbf\x39\x80\x03\x66\x7f\xbe\x84\xc2\x81\xf6\x37\x07\x60\x78\
\x69\xa8\xc9\x24\x0e\xb4\xbf\x39\x80\x26\xb3\x3f\x3f\x86\xc6\x81\
\xf6\x37\x07\xc0\x83\x00\xab\xfb\x03\x40\x78\x7f\x00\x08\xef\x0f\
\x00\xe1\xfd\x01\x20\xbc\x3f\x00\x84\xf7\x07\x80\xf0\xfe\x00\x10\
\xde\x1f\x00\xa1\xdc\xa8\x84\xf5\x07\x80\xf0\xfe\x00\x10\xde\x1f\
\x00\xc2\xfb\x03\x40\x78\x7f\x00\x08\xef\x0f\x00\xe1\xfd\x01\x20\
\xbc\x3f\x00\x84\xf7\x07\x80\xf0\xfe\x00\x10\xde\x1f\x00\xc2\xfb\
\x03\x40\x78\x7f\x00\x08\xef\x0f\x00\xe7\xfb\xdf\xab\x00\x40\x7f\
\x00\xd0\x1f\x00\xf4\x07\x00\xfd\x01\x40\x7f\x00\xd0\x1f\x00\xf4\
\x07\x00\xfd\x01\x40\x7f\x00\x48\xef\x0f\x00\xe1\xfd\x01\x20\xbc\
\x3f\x00\x84\xf7\x07\x80\xf0\xfe\x00\x10\xde\x1f\x00\x01\x6d\x74\
\x48\xfa\x03\x40\x78\x7f\x00\x08\xef\x6f\xfa\xbd\x9f\x4d\x00\xb0\
\xbb\xbf\x1a\x68\x76\xc4\x43\x00\xb0\xbb\xbf\xea\x64\x76\xc8\x5d\
\x00\x88\xbd\xff\xe9\x10\xf5\x57\x86\x97\x7b\x5f\x0a\x00\xcb\xfb\
\xab\x25\x46\x07\x1d\x0d\x00\xcb\xfb\xab\xef\x9a\x1c\xf4\x44\x3a\
\x00\x2c\xef\xaf\xd4\xbb\x06\x47\x7d\x46\x01\xc0\xf6\xfe\xea\x3b\
\xf1\x1f\xb5\x32\x03\x00\xd6\xf7\x57\x6a\x41\xdc\x4f\x02\xe4\x2b\
\x00\xd8\xdf\x5f\x25\x6f\x8c\xf3\xb0\x53\x14\x00\x1c\xe8\xaf\x54\
\xfa\xef\xe2\xfa\xfd\x7f\x42\x01\xc0\x89\xfe\xad\xfb\xf1\xa9\x98\
\x8f\x7a\x60\x84\x02\x80\x2b\xfd\x95\xea\xb9\x2c\xb6\x9b\x77\xf4\
\xc9\xb6\x0a\x00\xee\xf4\x6f\x5d\xd6\xb8\x55\x7f\xad\x38\x7e\xe2\
\xfa\xfb\xfc\xc3\x3f\x2f\xc9\x4b\xbd\xf4\xe7\x00\x10\x70\xff\xef\
\x05\xfc\x37\x03\x80\xec\xfe\x00\x10\xde\x1f\x00\xc2\xfb\x03\x40\
\x78\x7f\x00\x08\xef\x0f\x00\xe1\xfd\x01\x20\xbc\x3f\x00\x84\xf7\
\x07\x80\xf0\xfe\x00\x10\xde\x1f\x00\xc2\xfb\x03\x40\x78\x7f\x00\
\x08\xef\x0f\x80\xaf\xda\x7d\x32\xfa\x03\x40\x78\x7f\x00\x08\xef\
\x0f\x00\xe1\xfd\x01\x20\xbc\x3f\x00\x84\xf7\x07\x80\xf0\xfe\x00\
\x10\xde\x1f\x00\xc2\xfb\x03\x40\x78\x7f\x00\x08\xef\x0f\x00\xe1\
\xfd\x01\x20\xbc\x3f\x00\x12\xd1\xbf\x3e\xbc\xfd\x01\x20\xbc\x3f\
\x00\x84\xf7\x07\x80\xf0\xfe\x00\xf0\xbd\xff\x3d\x0a\x00\xf4\x07\
\x00\xfd\x01\x40\x7f\x00\xd0\x1f\x00\xf4\x07\x00\xfd\x01\x40\x7f\
\x00\xd0\x1f\x00\xf4\x07\x00\xfd\x01\x40\x7f\x00\xd0\x1f\x00\x89\
\xdf\xfd\xf4\x17\x0d\x80\xfe\xb2\x01\xd0\x5f\x36\x00\xfa\xcb\x06\
\x40\x7f\xd9\x00\xe8\x2f\x1b\x00\xfd\x65\x03\xa0\xbf\x6c\x00\xf4\
\x97\x0d\x80\xfe\xb2\x01\xd0\x5f\x36\x00\xfa\xcb\x06\x40\x7f\xd9\
\x00\xe8\x2f\x1b\x00\xfd\x65\x03\xa0\xbf\x6c\x00\xf4\x97\x0d\x80\
\xfe\xb2\x01\xd0\x5f\x36\x00\xff\xfa\xe7\x2b\x00\xd0\x1f\x00\xf4\
\x07\x00\xfd\x01\x40\x7f\x00\xd0\x1f\x00\xf4\x07\x00\xfd\x01\x40\
\x7f\x00\xd0\x1f\x00\xf4\x07\x40\x70\x2b\xa2\xbf\x68\x00\xf4\x97\
\x0d\x80\xfe\xb2\x01\xd0\x5f\x36\x80\x21\x8d\xf4\x97\x0c\xa0\xfb\
\x61\xfa\x8b\x06\xb0\x91\xfe\xa2\x01\xdc\x45\x7f\xd9\x00\xb6\xd3\
\x5f\x34\x80\x6e\x2d\x2e\xf5\x4f\x1a\xf2\xf8\xec\x25\xbf\xbd\xfe\
\x4a\x4b\x9e\xb8\x37\x1d\x00\x67\xf7\xa8\x43\xfd\xfb\x94\x56\xc6\
\x70\x1b\xeb\xfe\x30\x02\x00\x4a\x2d\x71\xa6\x7f\xe6\x0b\x31\x3f\
\x9d\xb1\xe5\x26\x00\xac\x77\xa5\xff\x37\x3f\x8c\xe3\x96\xd6\x8c\
\x16\x0f\xe0\x2f\x8e\xf4\xcf\xa9\x8e\xeb\xb6\xb6\x14\x4b\x07\xb0\
\xc9\x8d\xfe\x5d\xfe\x1b\xe7\xad\x6d\x1a\x2e\x1c\xc0\x52\x37\xee\
\xff\x6d\x8d\xfb\xf6\x1e\xed\x2a\x1b\xc0\x0c\x27\xfa\x17\x1a\xdc\
\xe2\x17\x64\x03\xb8\xc9\x89\xc7\xff\xff\x34\xb8\xc9\x67\x7a\xcb\
\x7e\x26\xf0\x3f\x0e\xf4\xbf\xc5\xe8\x46\xff\x44\x36\x80\xc7\x1c\
\x78\xfe\xf7\x59\xa3\x5b\xbd\x53\x36\x80\x94\x7f\xd9\xff\xfc\xbf\
\xd9\xeb\x19\x2d\x6d\x45\x03\x50\x77\x7a\xf5\x7e\xa0\xfa\x3c\x4b\
\xff\x1b\xfb\x86\x6c\x00\xaa\xd8\xf6\xfe\xea\xa4\xd9\x2d\xbf\x4b\
\x38\x00\x35\xdf\xf2\xfe\x09\x4a\xe3\xf0\xbb\x82\x17\xd8\xdd\x1f\
\x00\xc1\x0b\x08\xb4\x3f\x00\x02\xff\x5f\x20\xd8\xfe\x00\x08\x5a\
\x40\xc0\xfd\x01\x10\xb0\x80\xa0\xfb\x03\xc0\x93\xcd\xb3\xb6\x3f\
\x00\x02\x15\x10\x7c\x7f\x00\x04\x29\x20\x04\xfd\x01\x10\xa0\x80\
\x30\xf4\x07\x80\x67\xfb\x85\x95\xfd\x01\x10\x98\x80\x70\xf4\x07\
\x40\x50\x02\x42\xd2\x1f\x00\x5e\xae\xc4\xbe\xfe\x00\x08\x46\x40\
\x68\xfa\x03\x20\x10\x01\xa7\x42\xd3\x1f\x00\x1e\xef\xe7\x96\xf5\
\x07\x40\x00\x02\xc2\xd4\x1f\x00\x89\x17\x10\xaa\xfe\x00\xf0\x7e\
\x73\x6d\xea\x0f\x80\x44\x0b\x08\x59\x7f\x00\x24\x58\x40\xd8\xfa\
\x03\x20\xb1\x02\x42\xd7\x1f\x00\xfe\xec\x39\x5b\xfa\x03\x20\x91\
\x02\x42\xd8\x1f\x00\x7e\xed\x67\xd7\xfe\x35\xea\x42\xd8\x1f\x00\
\xbe\xed\xb1\xab\x3f\x37\x78\x70\x90\x02\x80\x20\x00\x6a\xe8\xbf\
\xaf\xf8\x4b\x6c\xee\xa1\x00\x20\x0a\x80\x4a\x99\x78\xe9\xa3\xb7\
\x2d\xef\x0c\x0b\xe9\x8d\x04\x80\xaf\xeb\xf7\xa3\xdf\xfc\x71\xc7\
\xda\x45\x3f\xe8\x12\xda\x5b\x08\x00\xe1\x03\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\
\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\
\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\
\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\x60\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xc0\x3a\x00\x23\xa3\x3b\x4a\x83\xd9\x51\xc6\xd2\xc9\xaf\x75\
\x36\x04\x30\x3c\xba\xc3\x1c\x31\x3b\xca\x0c\x42\xf9\xb5\x1c\x43\
\x00\xb9\xd1\x1d\xa6\xc2\xec\x28\xcf\x13\xca\xaf\x15\x1a\x02\xe8\
\x17\xdd\x61\xca\xcd\x8e\xb2\x9d\x50\x7e\x6d\xb6\x21\x80\x1b\xa3\
\x3b\xcc\x7b\x66\x47\x69\xba\x81\x52\x3e\xad\xcc\x10\x40\x87\xe8\
\x0e\xb3\xd5\xf0\x30\xe3\x29\xe5\xcf\x7a\x44\xcc\xc2\x44\x92\xa2\
\x3b\xce\x3a\x43\x00\xbb\x48\xe5\xcf\xe6\x1a\x86\xa9\x8d\xf2\x38\
\x2b\x0c\x8f\xa3\x1f\xa6\x95\x2f\xff\x00\xd4\x19\x76\xf9\x34\x41\
\xd0\xf4\xfe\x74\x6a\xf9\xb0\xe5\xa6\x5d\xca\xa2\x3c\xd0\x23\xa6\
\x07\xd2\x2b\xa9\xe5\xfd\xc6\x1b\x67\x79\x2d\xca\x23\xe5\x1a\x1f\
\x49\x3f\x45\x2f\xaf\x77\x47\xa3\x71\x95\xa7\xa3\x3c\x54\xfb\x88\
\xf1\xa1\x22\x08\xf0\x78\x79\xd5\xe6\xbf\x96\x0f\x46\x7b\xb0\x83\
\xe6\xc7\xd2\x2b\xb9\x1f\xe0\xe5\x26\x35\x79\xd0\x64\x40\xb4\x47\
\xdb\xe6\xc1\xc1\xf4\x7e\x1e\x0b\x78\xb6\x5c\x4f\x8a\x34\xa7\x45\
\x7b\xbc\x17\xb5\x27\xdb\x35\x9e\xe7\x04\x3d\x58\x5a\xc1\x9a\x88\
\x27\x3d\xf6\x45\x7d\xc8\xc9\xda\xa3\x35\x6d\x7f\x7e\xc6\xd8\x02\
\x16\xf7\xc6\x4c\x2e\x59\x5b\xe3\x55\x8d\x4d\x51\x03\x18\xa4\x99\
\x83\x8b\xfe\x8e\x79\x52\x35\x67\xcb\xc1\xdd\x1e\xfd\x7f\x3b\x1b\
\x38\x5b\xee\xad\x26\x25\x7a\x00\x53\x39\x5d\xee\xed\x4f\x31\xdc\
\xf1\x1c\xc0\xe9\x72\x6f\x33\x63\x79\xe8\xf1\x19\xe7\xcb\xb9\x0d\
\x8a\x05\xc0\x6a\xce\x97\x6b\xab\x4e\x8a\x05\xc0\xa3\x9c\x30\xd7\
\xb6\x3a\xa6\x67\x9f\x3a\xd6\x73\xc6\x1c\xdb\xa8\xd8\x9e\x7f\x5c\
\xc5\x19\x73\x6b\x9f\xa5\xc6\x06\xa0\x90\x53\xe6\xd6\x7e\x15\xe3\
\x2b\x10\x29\x3c\x0e\x70\x6b\xb7\xc4\xfa\x1a\xd4\x22\xce\x99\x4b\
\xdb\x13\xf3\x8b\x90\x83\x39\x69\x2e\x6d\x66\xec\x2f\x43\x97\x73\
\xd6\xdc\xd9\x99\xee\xb1\x03\x18\xc7\x69\x73\x67\xcb\xe3\x78\x23\
\x4a\xca\xc7\x9c\x37\x57\xd6\xdc\x37\x9e\xb7\x22\x4d\xe0\xc4\xb9\
\xb2\x55\x71\xbd\x17\xad\xcd\x01\xce\x9c\x1b\x8b\xe4\xc4\xf7\x6e\
\xc4\x29\x9c\x3a\x37\xb6\x31\xce\xb7\xa3\xa6\xff\x8f\x73\xe7\xc4\
\x86\xc4\xfb\x86\xe4\xe9\x9c\x3b\x17\xf6\x56\xdc\xef\x48\x4f\xdd\
\xcb\xd9\xb3\x7f\xa7\xfb\xc7\xff\x99\x84\x61\x9c\x3e\xfb\x37\xcf\
\xe4\x53\x29\xaf\x73\xfe\x6c\xdf\x27\x19\x26\x00\xb2\x8f\x73\x06\
\x2d\x5f\x91\xd9\x07\xd3\x78\x28\x68\xf9\x36\x1b\x7e\x32\x31\x79\
\x37\xe7\xd0\xe6\xd5\xf7\x36\xfd\x6c\x6a\x0e\xef\x0e\xb4\x79\x53\
\xcd\x3f\x9d\x5c\xcc\x59\xb4\x77\xeb\xbd\xf8\x7c\xfa\x4a\xce\xa3\
\xad\xab\xc8\xf2\x02\x40\x87\x8f\x38\x93\x96\x3e\x05\x74\xab\x37\
\x97\xa8\xb8\xb9\x81\x73\x69\xe5\xa6\x79\x75\x91\x92\x89\x9c\x4b\
\x1b\xb7\xc1\xbb\xcb\xd4\xbc\xc4\xd9\xb4\x6f\x7b\x32\xbd\x03\x90\
\xfc\x06\xe7\xd3\xba\x3b\x80\x3d\xbc\xbc\x52\x55\xdb\x77\x38\xa3\
\x76\xad\xaa\xaf\xf2\x74\x1d\xff\xc1\x39\xb5\x69\xb5\x83\xbd\xbe\
\x5a\x5d\xf6\x7e\xce\xaa\x3d\x6b\x1c\xe1\xfd\xf5\x0a\xfb\x54\x72\
\x5e\x6d\x59\xf3\x43\x7e\x5c\xb1\xb2\x0f\xff\x06\x58\xb2\x86\x22\
\xe5\xcb\xb2\xb9\x1f\x60\xc5\x8e\xdf\xed\xd7\x55\x6b\x3b\xf2\x58\
\xc0\x82\x55\xe6\x28\xdf\xd6\x96\xe7\x03\x42\xbf\x7d\xbd\x95\x8f\
\x4b\x7e\x89\x33\x1c\xee\xed\xee\xe6\xf3\xc5\xcb\x27\xf2\xca\x50\
\x98\xf7\x72\x3b\xdf\x2f\x5f\x7f\x33\xaf\x0e\x87\x76\x27\xc7\x25\
\xe2\x0b\x0c\x3a\xf0\x0e\x91\x90\xae\xbc\xbf\x4a\xcc\x8a\x79\x9f\
\x60\x18\xb7\x2c\x71\x5f\xd3\x94\xc3\x7b\x85\x43\xb7\x23\x63\x13\
\xf9\x3d\x36\xc9\x53\xf8\xc4\x48\xa8\xd6\x52\x9a\xe8\xef\x67\xca\
\xe6\x53\x63\x21\xda\x07\xb7\x05\xf0\x6d\x56\xc3\xf8\xec\x70\x48\
\x76\x6c\x52\x72\x20\xdf\x67\x96\x3a\x9d\x2b\x48\x84\x60\x8d\xa5\
\xd9\x81\x7d\xa5\x5d\xfa\x14\xae\x23\x14\xf0\x4e\x2d\xea\x19\xe8\
\xb7\x1a\xb6\x99\xc0\xd5\xe4\x02\x5c\xcd\xfc\x6e\x2a\xe8\xa5\x8c\
\xe3\x9a\xa2\x01\xad\x6a\x4e\x27\x15\x8a\x0d\x5e\xcc\xb5\xc5\x13\
\xbe\x86\x35\xa3\x53\x55\x68\x96\x3a\x6a\x0d\x2f\x12\x25\x70\x91\
\x1d\x13\x32\x55\xc8\x96\xf5\xf8\xda\x2a\xca\x24\xe4\x7e\xdf\xdb\
\xb3\x7a\xa9\x70\x6e\xe0\xb4\xdf\xf3\xad\xb3\xfe\x3e\xe6\xdb\x31\
\x67\x68\x9a\x0a\xf3\x92\x07\x4f\x7e\x71\xdb\xc1\x08\xa9\x3c\x5f\
\xf5\xce\x15\xb3\xf2\xdb\x29\x3b\xd6\x3e\xf7\x91\xb9\x2b\xd6\x6d\
\x7d\xaf\xbc\xe2\x08\xf7\x0d\x0c\xd6\x74\xec\xc0\xde\xf7\xb7\x6d\
\x78\x75\x61\xf1\xd0\xae\x09\x8b\xf7\x7f\x40\x1b\xb3\xe9\xbe\xf3\
\x50\xac\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x06\x4d\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x7d\x00\x00\x00\x3b\x08\x06\x00\x00\x00\x56\x81\x85\x54\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\
\x09\x70\x48\x59\x73\x00\x00\x0f\x61\x00\x00\x0f\x61\x01\xa8\x3f\
\xa7\x69\x00\x00\x05\xe2\x49\x44\x41\x54\x78\x5e\xed\x9d\x4b\x88\
\x1c\x55\x14\x86\x67\xe9\xd2\xa5\xbb\xb8\xc8\x4c\x44\x70\xe7\x42\
\xdc\xa8\x3b\x31\x3d\x8c\x20\x48\x16\x22\x42\x36\xea\x66\xc8\x4c\
\xc0\x20\xc2\x64\x33\x88\xa2\x20\x0a\x01\xc9\x42\x45\x48\x04\x13\
\x54\xf0\x81\xa8\x04\x66\xa1\x28\xc8\x80\x82\xba\x30\x28\xa8\x8b\
\x20\x1a\x14\xf1\x81\x8b\xb6\xbf\x4a\xd5\x50\x73\xe7\x3f\xf7\x51\
\xd5\x6d\x32\xd5\xe7\x87\x9f\x84\x9e\xfb\x3c\xff\xb9\xe7\xdc\x7b\
\xab\xa9\x5e\x70\x38\x1c\x0e\x87\xc3\xe1\x70\x0c\x08\xab\x87\x56\
\x6e\x5c\x5f\x3c\x7c\x64\x7d\x71\xf9\xc9\xb5\xa5\xe5\x0b\x13\xfe\
\xb5\xbe\xb4\x3c\x76\x5e\xdb\x9c\xe8\xb4\x3d\xe1\xd9\xb5\xa5\xd1\
\x89\x63\x07\x97\x6f\xab\xe5\x4c\x63\xfd\xe0\xe8\x61\x17\x79\x20\
\x5c\x1c\xbd\x77\xfc\xe6\x7b\x6e\xa8\xa5\xdd\x8b\xd5\x03\x2b\xd7\
\x53\x48\x56\x76\xee\x63\x8e\x2e\x13\xb5\x6b\x99\x77\xc3\x05\x1f\
\x2e\x89\xdc\x93\x90\x7f\x53\x2d\xf5\x15\xac\x2d\x2e\x3f\xa4\x0a\
\x3b\x87\x43\xf2\xfd\xc6\x81\x3b\xae\xab\x04\x27\xac\x7b\x0e\x9f\
\x0f\xb2\xc1\xab\x44\x9f\xfc\xe7\x5e\x55\xc0\x39\x3c\x4e\x16\xf7\
\x85\x4a\xf4\xc9\xb1\xec\xa4\x2a\xe0\x1c\x22\x47\x97\x1b\xd1\xdf\
\xd0\x05\x9c\x43\x64\xb5\xa1\x9b\xec\xda\xbf\x53\x7f\x74\x0e\x94\
\x87\x0e\xdf\xe9\xa2\xcf\x1b\xff\x2f\xd1\x4f\x3d\xf0\x78\xc5\x67\
\x57\x56\xe5\xdf\x67\xc1\x93\xb7\x3f\xb8\xd3\xef\x53\x77\x3f\x22\
\xcb\xcc\x82\xcf\xdf\x7f\xbc\xea\x73\xf3\xae\xa3\xf2\xef\x29\x32\
\xd6\x66\xdc\x8f\xdd\x72\x9f\x2c\xd3\x8b\xb3\x10\x1d\x61\xdf\x79\
\xe6\x95\xf1\xf7\xdb\xdf\x8c\x2d\xfc\xfe\xf3\xaf\xe3\xcf\xce\x7f\
\x38\x7e\xf5\xd8\xd3\xb2\x8d\x2e\xc4\xc8\xef\xbf\x70\x66\xfc\xd3\
\x57\x17\xeb\x5e\xf6\x82\x31\x51\x06\x87\x50\x6d\x84\xa4\xdc\xb7\
\x9f\x7e\xb1\x87\xed\xfa\xf4\xfb\xd1\x8b\xaf\x57\x73\x0a\xc1\x67\
\x1f\x9f\x79\x37\xea\x00\xb4\x85\xbd\x7e\xf9\xf1\x52\x5d\x6b\x37\
\xf8\x7c\xeb\xe5\xb7\x3a\x3b\xd1\x1e\x4e\x53\x74\x3c\xf4\xeb\xad\
\xcf\xeb\xa1\xe6\x83\x49\xbd\x76\xe2\x39\xd9\x66\x0e\x31\x1a\x86\
\x2d\x05\x75\x52\x2b\x09\x43\x2b\x34\x02\x9c\xdb\x38\x35\xfe\xf7\
\xef\x7f\xea\x4f\x6d\xfc\xf9\xdb\x1f\x72\x8e\x6f\x6e\x9e\xce\xaa\
\x0f\x28\x87\x73\x84\x6d\x14\x73\x5a\xa2\xbf\xf4\xe8\x66\xf6\xe0\
\x2d\xb0\x0a\x73\x57\x60\x43\x42\x29\x06\xed\x0a\xa2\x42\x6c\x05\
\xc5\x44\x67\x75\x97\x02\x91\x9b\xb6\xbf\xfc\xe0\x93\xfa\xd3\x32\
\xb0\xea\xdb\x63\x2c\xe6\x34\x44\xc7\x83\xa7\x05\xc2\x61\x6e\xfe\
\x9d\x86\xa3\x01\x84\xb7\x56\xbc\x25\x3a\x2b\xbc\x2b\xc8\xd5\x08\
\xd7\x07\x7d\x22\x63\x6f\xd1\x31\x4a\xca\xf0\xac\x44\xf2\x20\xe1\
\x34\x96\xe7\x1b\x5c\xba\xf8\xc3\xf8\x89\x5b\x8f\xc8\xfe\x1a\xe2\
\x18\xa9\x7e\xe9\x8b\x3e\x49\x39\xb1\x3c\x0f\xd8\x5f\xa8\x7e\x2c\
\xd1\x55\xdf\xcc\x11\x31\xf9\xd7\xca\xcf\xc0\x1a\x37\x0e\xcf\x58\
\x19\x33\x6d\xc4\x22\x18\xed\xab\xf1\x66\xb1\xaf\xe8\x31\x11\x09\
\x5f\x18\x4d\xd5\x3b\x7d\x74\xa3\x12\xd7\xc2\xf6\xdb\x5b\xb2\x1e\
\x64\x55\x5a\x75\x31\x68\x3b\x84\xb6\xc9\x0a\x8b\x8d\x57\x9d\x2c\
\x2c\xd1\x1b\xd0\x9f\xb5\x19\x25\xff\xa6\x1c\x13\x30\x17\x15\xdd\
\x70\x7c\x9c\xd1\x02\xf3\x09\xeb\x64\xb1\x8f\xe8\x4c\x4a\x21\x66\
\xf8\x36\x53\x93\xb2\x8e\x77\xb4\xad\x80\xf7\x93\xe3\x55\x9d\x86\
\x38\x8c\xb5\xea\x59\x61\x61\xf9\x98\xe8\xcc\x33\xd5\x1f\x27\x85\
\x18\x18\x4b\x2a\xaa\xb1\xea\x15\xac\xe8\x94\x64\x57\xd1\x19\xa8\
\xe5\xc5\x39\x82\xb7\x69\x89\x60\x4d\xca\x2a\x4f\x8e\x57\xe5\x43\
\xc6\x42\x76\x98\xdb\x63\xa2\xe7\x6c\xa8\x68\x4f\x1d\xe5\x1a\xe4\
\x8c\x19\xe7\x57\x20\x6a\xa9\xf2\x49\x76\x15\x9d\xc1\x2a\x30\x41\
\x6b\x53\x64\xd1\x9a\x94\x12\x81\x90\xa6\x80\x23\xb4\xcb\xa5\xd8\
\xec\x9c\xa9\x87\x78\x84\x68\x75\x72\x88\x89\x9e\xbb\xe1\xb4\xa2\
\x59\xc9\x98\xd5\x1e\x01\x5b\xab\xb2\x49\x76\x15\xdd\x9a\x48\xd7\
\xcb\x16\xeb\xf8\x12\xe6\x2d\x2b\xb4\x97\xf6\x8b\xc0\x39\xce\x69\
\x89\x4e\x1e\x56\xe5\x15\xad\x10\x5f\x72\xf4\xb2\x42\x7c\xe9\x02\
\xab\xd8\x55\x74\x6b\x23\x95\xca\x4f\x16\xad\x23\x10\x06\x6b\x97\
\xb3\x9c\x23\x77\xd5\x95\xd2\x12\x1d\x11\x54\x79\x45\xeb\x48\x5b\
\x92\x06\xad\x45\xc6\xf8\x54\xf9\x28\xbb\x8a\xae\xf2\x39\x47\x0c\
\x55\x36\x87\xec\xe6\x15\xc2\x5d\xbc\xe5\x6c\x9d\x3c\x3e\x83\x96\
\xe8\x25\x9b\x28\x4b\xf4\xdc\x3d\x08\xbc\xea\xa2\x63\x60\x85\xd2\
\xbc\xda\x26\x2b\x55\x21\x5c\x51\x6a\x53\xd4\xc7\xd9\x52\xb4\x44\
\xe7\x36\x4e\x95\x57\xb4\x44\x2f\x39\x72\x5d\x75\xd1\x2d\x43\x70\
\xb1\xa0\xca\xe7\x30\xd7\x91\x14\xfa\x38\x5b\x8a\xd6\x5c\xc3\xb4\
\x13\xe3\x20\x44\xb7\x56\x65\xec\x42\x25\x87\x0a\xe1\xcd\x93\x42\
\x49\x7e\x2d\xa5\x8b\x5e\xd3\x32\x44\x9f\x15\x67\xb5\x19\x0a\xaa\
\x50\xb2\x93\x2e\xa5\x8b\xde\xa2\x42\x9f\xdc\x8a\x01\x14\xc2\x94\
\x31\xd5\xf3\x6a\x06\x5d\xf4\x16\xad\x87\x01\xd3\x3e\xb2\x85\x57\
\xa3\x44\x13\x85\xae\xfd\xa6\xe8\xa2\xb7\x68\x7d\x59\x82\xa3\x97\
\x2a\x9f\x22\xe2\x2a\x60\xb0\x9c\x72\x25\x06\x84\x38\x19\x11\x82\
\xf6\x62\x75\x5d\xf4\x16\x99\xb4\x42\x97\xbc\xce\x35\xac\x75\x8f\
\x1f\x4e\xca\x32\x60\xc9\x11\x0a\x86\x37\x5c\x44\x2e\x0c\x1b\x5e\
\xc5\xba\xe8\x2d\x5a\xf7\xe5\xa0\xe4\xd2\x01\x5a\xb7\x6c\xca\x81\
\x10\x45\x39\x08\xab\x36\x37\xc4\x5b\x42\xaa\x67\xd4\x2e\x7a\x40\
\x2b\xbf\x5a\xcf\x87\x15\xb9\x33\xb7\x60\x39\x8f\xe5\x24\x39\x47\
\x46\xee\x03\xac\x5b\x3d\x75\x2d\xea\xa2\x07\xb4\x9e\xb4\x01\x56\
\x23\x79\x53\xd5\x83\x18\xdf\xca\xcf\x80\x55\x67\x5d\xad\xc6\xa2\
\x0c\x8f\x1b\xad\xef\xd9\xf1\x79\x18\xd6\x1b\x30\x5e\x55\xcf\x45\
\x17\x8c\x09\x07\x88\x06\x3c\x4d\x62\xe2\x4c\x92\xd5\xc4\x04\xd4\
\xd1\xab\x01\x02\xf4\xf9\x72\x42\x93\x9f\xe9\x8b\x3e\x89\x26\x8c\
\xc1\x3a\x71\x00\xeb\x89\x97\x8b\x2e\xc8\x6a\x8c\x7d\x05\xa9\x0b\
\x72\x9f\x3e\x59\x27\x88\x52\xb0\xfa\xad\xa8\xe2\xa2\x1b\x64\x03\
\x65\xe5\xd9\x12\xa4\x52\x42\x48\x84\xea\xdb\x2f\x91\x28\xb6\x01\
\x74\xd1\x13\xe4\xd8\xa4\x76\xd6\x39\x20\xdc\x97\x18\xa1\xcd\xae\
\xfd\xe2\x30\x29\xa3\xb9\xe8\x19\x64\x33\x44\x7e\x8c\xe5\xec\x36\
\x48\x0d\x18\x45\xb5\x55\x42\x26\x8f\x61\x62\x79\xbb\x01\x62\xb3\
\x19\x54\xed\x84\x74\xd1\x0b\xc9\x80\x08\xd7\x18\x28\x24\x3b\xff\
\x59\x5d\x9d\xb2\x09\x0c\xfb\xc3\xf0\x18\x79\x56\x7d\xee\x2b\xce\
\x52\x74\xe7\x35\x4a\x17\x7d\x0e\xe9\xa2\xcf\x21\x5d\xf4\x39\xe4\
\x15\xd1\xfd\x45\x43\xf3\xc4\xfa\x45\x43\xfe\x4a\xb1\xf9\x61\xfd\
\x4a\x31\x7f\x79\xe0\xfc\x70\xe7\xe5\x81\xfe\x9a\xd0\xf9\xe1\xce\
\x6b\x42\x81\xbf\x10\x78\xf8\xdc\xf5\x42\xe0\x06\x93\x5d\xbc\xbf\
\xfa\x7b\xa0\x24\x92\x57\x1b\xb8\x10\xfe\x92\xff\xa1\x32\xf2\x92\
\xff\x06\xfe\x73\x1e\x03\x62\xea\xe7\x3c\xda\x10\x3f\xdc\xe3\xdc\
\x3f\x4c\xfc\x70\xcf\xc2\xc2\x7f\x30\x8e\x88\x40\xbb\xc2\x0b\x1b\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x05\x7a\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x65\x00\x00\x00\x65\x08\x06\x00\x00\x00\x54\x7c\x2d\xcf\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\
\x09\x70\x48\x59\x73\x00\x00\x0f\x61\x00\x00\x0f\x61\x01\xa8\x3f\
\xa7\x69\x00\x00\x05\x0f\x49\x44\x41\x54\x78\x5e\xed\x9d\x4d\xa8\
\xdc\x54\x14\xc7\xe7\x23\xc9\x2b\x5d\x75\xd9\x65\x97\x2e\xbb\x2c\
\x7d\x89\xf3\xdc\x88\x28\xc8\x43\x10\x8a\x20\x28\x14\x6c\x11\x4c\
\xf2\x7c\x94\xfa\x81\xad\xf8\x51\xa1\xa5\x42\x55\xb4\x58\x2c\x94\
\xc2\x2b\x58\x68\x17\x85\xd2\x2e\x5a\xba\x7a\xb8\x1a\x41\xf0\x21\
\x08\x75\x57\x11\xa4\xee\x5c\xc6\x7b\xee\x64\xe6\xcd\xdc\x9c\x7c\
\x4e\x26\x39\xbe\xf9\x5f\xf8\x6d\x26\xe7\x9e\x24\xf7\x77\xbf\x92\
\x59\xa4\x53\x47\x59\x59\xf5\xd7\x80\xbf\x66\x1f\xf5\x0f\xc7\x4d\
\xd2\x5c\x59\x19\xf8\x87\x2c\x2f\x38\x61\x7b\xc1\x5d\xdb\x0d\x9e\
\x38\x5e\x18\x01\x1e\xdb\x0d\x1f\xdb\x5e\x78\xcb\xf6\xfc\x37\x3a\
\x03\xff\x40\xdc\x84\xf5\x15\x6b\x35\x3c\xa2\x4e\xf4\xd0\x3c\x31\
\x28\x43\xb0\x45\x9d\x3a\x6e\xd2\xea\x65\xff\x60\xf3\x20\xd9\xe6\
\x4f\x02\x4a\xe3\x06\xff\xda\xee\xc6\x97\x95\x47\x0e\xcd\x91\x8e\
\x1b\x3e\x65\x93\x83\xb9\x50\x53\xff\x8e\x33\xd8\x78\x26\x6e\xea\
\x62\x85\xe6\x41\xb2\xca\x25\x04\x35\xa1\x3a\xfc\x8a\x1b\xbe\x10\
\x37\x79\x76\x71\x5c\x7f\x9d\x4d\x02\xea\x87\xa6\xb3\xbc\x1d\x1b\
\x05\x60\x84\x34\x0b\xed\xd2\xd2\xd7\x98\xc1\x99\x7d\x14\xc0\x55\
\x04\x8b\x85\x36\x53\xb1\x85\xd9\xe2\x78\x7e\xc0\x55\x00\xcd\x40\
\x1b\xab\x58\x45\x5c\xf4\x28\xc1\xc3\x60\xcb\x0c\x63\x1b\xa3\xa2\
\x77\x5b\x7c\x20\x68\x90\x99\xd1\x42\xaf\x4d\xb8\x20\xd0\x30\x6e\
\xf8\xed\xc8\x88\x5a\xf9\xb1\xe3\x92\x01\x2d\x21\xda\x09\x3d\xc0\
\x70\x01\xa0\x25\xe8\x49\x1f\xeb\x89\x2c\xf4\xba\xe2\x78\xc1\x69\
\xee\x20\x68\x07\xf5\xac\x78\xac\x43\x6f\x2d\xb9\x83\xa0\x1d\xf4\
\xff\x2f\x6a\xc5\xbf\xca\x1d\x04\xed\x00\x29\x02\x81\x14\x81\x40\
\x8a\x40\x20\x45\x20\x90\x22\x10\x48\x11\x08\xa4\x08\x04\x52\x04\
\x02\x29\x02\x81\x14\x81\x40\x8a\x40\x20\x45\x20\x90\x22\x10\x48\
\x11\x08\xa4\x08\x04\x52\x04\x02\x29\x02\x81\x14\x81\x40\x8a\x40\
\x20\x45\x20\x90\x22\x10\x48\x11\x08\xa4\x08\x64\x61\x52\xec\x97\
\x3f\x8d\xfa\x1f\xdd\x8b\xba\xdf\xed\x44\x9d\xdb\xff\x4c\xe8\xfe\
\xf0\x7b\xd4\xfb\xec\x51\x64\xbf\xf2\x05\x5b\x2f\x0f\xfb\xc5\x33\
\xa9\x79\xfb\xef\xde\x8c\xec\x97\x3e\x66\xeb\x69\xd6\x36\xa3\xee\
\xb5\x3f\x26\x75\xfa\xa7\x6e\x8f\x7e\x7f\x76\x23\xb2\xde\xbe\x3e\
\x9b\xf3\xe6\xdf\xa3\xeb\x5c\xff\x3c\x91\x67\x7c\x0d\x9d\x1b\x7f\
\xce\xc4\x77\xbf\xfa\x39\xb2\xde\xbc\x9c\x88\x2f\x4b\xed\x52\xe8\
\x82\x7b\x97\x86\xbb\x17\x9b\x41\xf7\xfb\xdf\xd8\x9b\x66\x51\x0d\
\xa7\x1b\x82\xc9\x63\xd2\xfb\xe4\x41\xe4\x3c\x77\x2a\x99\x83\x91\
\x42\x12\x67\x1a\x97\x61\x5a\x5e\xff\xbd\x3b\x6c\xcc\x34\xdd\xcb\
\xbf\xf2\xe7\x2f\x48\xad\x52\x8a\xdc\x60\x02\xd5\xc3\xec\x57\xcf\
\xb3\xf9\xc6\x90\xe8\xe9\xc6\x2c\x42\xef\xfc\x76\x32\x97\x29\xe5\
\xfd\x3b\x85\xf3\x5a\x6f\x5d\x8d\xfa\x1f\xde\x65\x8f\x71\xb0\xe7\
\x2f\x48\x7d\x52\x8c\x1b\x2e\xc5\x8f\x7f\x45\xce\xf3\x1f\xf0\x79\
\x55\xef\xec\x5d\xf8\x89\xaf\x97\xc3\xa4\x87\x8f\x99\xe7\x1a\xcb\
\x52\xa0\xb3\xa5\x51\x9b\x14\xae\x17\xd1\xf4\x64\xce\xb1\xb4\xd6\
\x70\xd3\x5b\x5a\xcf\xb2\x4e\x5e\x4b\xc4\xea\x69\x6f\x7a\x4d\xa2\
\x35\xe1\xf5\xaf\x93\xa3\xd4\x94\x9d\x22\x85\xcb\x97\x35\x4d\x15\
\x8d\x4f\x74\x8a\x82\xd4\x22\x85\x9b\xb6\x74\x23\xa7\xcd\xab\x74\
\x13\xa6\x44\xae\x67\x31\x8d\x98\x95\x97\xbb\x0e\xeb\xf8\x95\xdd\
\x18\x26\x5f\xd6\xfc\xcf\x76\xb4\x12\xf1\x55\xd7\x96\x5a\xa4\x24\
\x7a\x73\xd6\x74\x34\x86\x69\x20\xb3\x67\xd9\xc7\x2e\xce\x1c\x2f\
\x92\x97\xae\x65\xb2\x13\x33\x77\x78\xcc\x39\x67\xa4\x19\x24\x24\
\xe7\x4c\x49\x66\x7c\xab\x52\xcc\x1e\x52\x74\x91\xcb\xab\x67\xca\
\x2e\x9a\x37\x15\x53\x4a\x9e\xe4\x45\xc7\xa7\x30\xbf\x14\x66\x21\
\xce\xea\x7d\xd3\x98\x23\xc1\xec\x59\xa6\xb4\xaa\x73\xf4\x04\xa3\
\xd1\x72\x7b\xf2\x9c\xf1\x7b\x4f\xca\x1c\x79\x53\x81\x14\x26\xd6\
\xa0\xac\x14\x8c\x94\xa2\xcc\xd1\x78\x98\xbe\x78\x44\x2f\xf4\x55\
\xf2\x92\x68\xbd\xfb\x3a\x7b\x3f\x77\xf7\xb5\xa7\xa5\x48\xda\x12\
\x67\x8e\xae\x65\x92\xc2\x3d\xb4\x65\xde\x00\x33\xe5\xd5\xf2\xf0\
\x68\x4a\x34\x73\x2e\x93\x14\xc2\xec\xa1\xc4\xe4\x35\xcb\xd4\x8d\
\x58\xaf\x5d\x5a\xc8\x6b\x96\xfe\x3b\x5b\xc9\x38\xb3\x11\x97\x4d\
\x4a\xe2\x82\xca\x90\x75\xf1\x55\xf3\x16\x18\x79\x7b\x5f\x8a\x82\
\x9b\xc6\x72\xc9\x79\x75\x41\x54\x79\x75\xcf\xee\xd4\x96\x51\x0a\
\xd1\xfa\x9f\x5c\x4a\x32\xfd\xf7\xc1\xe6\x58\x56\x29\x63\x1a\xff\
\x3b\xf8\x9b\x5f\xf4\x4b\xc8\x45\x36\xf2\xff\x5e\x0a\xa8\x0e\xa4\
\x08\x04\x52\x04\x02\x29\x02\x81\x14\x81\x40\x8a\x40\x20\x45\x20\
\x90\x22\x10\x48\x11\x08\xa4\x08\x04\x52\x04\x02\x29\x02\x81\x14\
\x81\x40\x8a\x40\x20\x45\x20\x90\x22\x10\x48\x11\x08\xa4\x08\x04\
\x52\x04\x02\x29\x02\x81\x14\x81\x40\x8a\x40\xb4\x14\xdb\x0d\xcf\
\x71\x07\x41\x3b\x8c\x46\x0a\xbe\x19\x2c\x0b\xd7\x5f\xc7\x27\x05\
\x85\xa1\x3f\x29\x68\xad\x86\x47\xb8\x83\xa0\x1d\xf6\x0f\x36\x0f\
\xea\x2f\x6c\xab\xc5\xfe\x29\x17\x00\x9a\xc5\x76\x83\x1d\xfd\x99\
\x5a\x2a\x8e\x17\x6c\x71\x41\xa0\x59\x68\xd3\x15\x2b\x51\x52\xd4\
\xe2\xc2\x05\x81\x66\xb1\x8f\xfa\x87\x63\x25\xa3\xa2\x7e\x1c\x9a\
\x41\xa0\x39\x6c\x2f\xbc\x15\xab\xd8\x2d\x18\x2d\xed\x92\x18\x25\
\xe3\xa2\xd6\x96\x6d\xae\x02\x58\x2c\x33\x6b\x89\x59\x68\x3b\xa6\
\x02\x1e\x73\x15\xc1\xa2\x08\xb6\x69\x07\x1c\x2b\xe0\x0b\x7d\xa4\
\x1e\x5b\xe4\x66\xa0\x01\x30\x7a\x2e\x29\x50\x68\x7e\x53\x7b\xe6\
\x27\x5c\x22\x50\x1b\xc3\x95\x81\x7f\x28\x6e\xf2\x62\x85\x2a\x50\
\x45\x23\x11\xa8\x01\xbd\xd3\x1a\xf8\x07\xe2\xa6\x2e\x59\xe8\x69\
\xdf\x0b\x4e\x63\x3a\xab\x07\x9a\xae\xf4\x5b\xe0\x5a\x8a\xb2\xaa\
\xec\x9e\xc5\x26\xa0\x32\x43\xcb\x0b\x4e\xe4\x2e\xe8\x55\x8b\xde\
\x08\xd0\xe8\x19\xbd\x9a\x79\x08\x52\xd0\x7f\x1e\xfa\x41\xe9\x75\
\xa3\xd3\xe9\xfc\x07\xe3\x1b\x9f\x1a\xd2\xbd\x5e\x5b\x00\x00\x00\
\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x05\x78\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x65\x00\x00\x00\x65\x08\x06\x00\x00\x00\x54\x7c\x2d\xcf\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\
\x09\x70\x48\x59\x73\x00\x00\x0f\x61\x00\x00\x0f\x61\x01\xa8\x3f\
\xa7\x69\x00\x00\x05\x0d\x49\x44\x41\x54\x78\x5e\xed\x9d\x4d\x68\
\x1d\x55\x14\xc7\xdf\xd2\x65\x97\xdd\xa5\x8b\xcc\x4b\x49\x37\x75\
\xa1\x15\x44\x5a\x04\x41\xf3\x66\x48\xba\xd0\xc6\x2f\x0c\x82\x12\
\x2c\x36\x4d\xe6\xf9\xd5\xda\x26\xd8\x48\xa4\x44\x53\x2a\x35\x4a\
\x49\x8b\xd0\x92\x1a\x5a\x23\x45\x28\x95\xb6\x29\x16\x79\x0b\x17\
\x8a\x08\x59\x76\xe9\xb2\x4b\x97\xe3\x3d\x93\x3b\xe9\xbc\xfb\xce\
\x9b\xef\x79\x73\xcc\xfb\x1f\xf8\x41\xc8\xdc\x39\x33\x73\x7e\x77\
\xee\xbd\x33\x6f\x31\xb5\x2c\x31\x53\xb7\xf7\xba\x56\x63\xdc\xb5\
\x9c\x39\xcd\xfa\x4c\xdd\xd9\x00\xce\x46\x50\x93\xe9\xba\x73\xdc\
\x1d\x6a\x1c\xd2\x25\x2b\x27\xa6\x86\x46\xf7\xb8\x96\xbd\xa4\x0e\
\xb8\xe9\xd6\x1d\x0f\x24\xc5\x7e\xa4\xb8\xac\x24\xbd\xa8\x4b\x99\
\x3f\x9a\xc3\x23\xbb\x29\x29\x7f\x40\x90\x0e\xbb\x95\xfb\xee\x71\
\x07\xed\x49\x75\x4b\xfe\xcb\x1f\x00\x64\x45\xd5\x74\x75\x76\xe0\
\xe0\x13\xba\xcc\xc9\x43\xed\xb8\xcc\x25\x04\x45\x61\xb7\xa6\x06\
\x46\x77\xe9\x72\xc7\x07\x4d\xdc\x7c\x22\x50\x28\x96\xfd\x90\xa6\
\x07\x5d\xf6\xee\xa1\x84\x2c\xb0\x09\x40\x49\xd8\xad\xc8\xa1\xcc\
\x5f\xe6\xb2\x3b\x82\x72\xb1\x2f\x6b\x05\xed\x41\xb6\x66\x2c\xe7\
\x1f\x7e\x27\x50\x36\x4d\x6b\x64\xbf\x56\xf1\x38\xd4\x03\xe1\xc7\
\x5c\x63\xd0\x2b\xec\x96\x56\xb1\x15\xb4\x0a\xc0\xd2\xb7\x7a\xda\
\x1e\x30\xd5\xb0\x35\xc1\x35\x02\xbd\x26\x34\xb7\x60\x09\x2c\x03\
\x9a\xd3\x7d\x21\xfe\x04\x8f\xa1\x4b\x0e\xf4\x1a\xc6\x7f\xd1\xc8\
\x6d\x04\x95\x40\x53\x49\x8d\xcc\x70\x1b\x41\x45\x58\xce\x1c\x2d\
\x85\xc7\xd8\x8d\xa0\x1a\x2c\x7b\x09\x2b\x2f\x71\xa8\x15\x18\xa4\
\x48\x03\x52\x04\x02\x29\x02\x81\x14\x81\x40\x8a\x40\x20\x45\x20\
\x90\x22\x10\x48\x11\x08\xa4\x08\x04\x52\x04\x02\x29\x02\x81\x14\
\x81\x40\x8a\x40\x20\x45\x20\x90\x22\x10\x48\x11\x08\xa4\x08\x04\
\x52\x04\x02\x29\x02\x81\x14\x81\x14\x2c\xe5\xb3\x67\xdf\xf2\xae\
\xbb\xe7\xbc\x07\x5f\xaf\x79\x9b\x3f\xfe\xba\x4d\xeb\xdb\x75\xef\
\xe6\xa7\xcb\xde\xe7\x07\xdf\x66\xf7\x8b\xe3\xf4\x53\xaf\x75\xcd\
\xfb\xfd\x3b\x67\xbc\xd9\xa7\x5f\x67\xf7\x23\x3e\x18\x3e\xec\xfd\
\xbe\xf2\xf3\xf6\x3e\x57\xde\x5b\xf0\xff\xdf\x1c\x1a\xf5\x2e\xbe\
\x79\xaa\x2d\xe7\xdf\x6b\x1b\xfe\x79\xce\x3f\x37\xd1\x91\x27\x38\
\x87\x3f\xaf\xfe\xd2\xd6\xfe\xee\xe2\x55\xef\xfc\xd8\x74\x47\xfb\
\xec\x14\x24\x85\x4e\xf8\xde\xe2\x95\xed\x93\x8d\xe2\xb7\x6f\x6e\
\xb0\x17\xcd\x41\x85\xa3\x42\x70\x79\x4c\x7e\x3a\x71\xc1\xfb\x70\
\xdf\xe1\x8e\x1c\x9c\x94\xd9\x03\x6f\xb4\x15\x97\x23\x2c\x6f\xf5\
\xfd\xb3\x6c\x9b\x30\x0f\xce\xaf\xb1\xc7\x4f\x4f\x01\x52\x92\x5c\
\xa0\x09\xf5\xb0\x2f\x9e\x7f\x97\xcd\x17\x40\xa2\xc3\xc5\x4c\xc2\
\xad\x33\x2b\x1d\x79\x4c\x29\xd7\xa6\x16\x13\xe7\x5d\x3e\xf2\x91\
\xf7\xc3\xf4\x97\xec\x36\x0e\xee\xf8\xe9\xc9\x29\xc5\xbc\xe0\x34\
\xfc\x75\xed\x8e\x77\xf2\xc9\x57\xd8\xbc\xd4\x3b\x6f\xcf\x5f\x62\
\xf7\x8b\x23\xe8\xe1\x01\x79\xce\x31\x2d\x49\x3a\x5b\x3c\x39\xa5\
\x70\xbd\x88\x86\x27\x73\x8c\xa5\xb9\x86\x1b\xde\xba\xf5\xac\xef\
\x5e\xfd\xa4\xa3\x2d\xe5\x0d\xcf\x49\x24\xee\xab\xc6\xb1\x8e\xbb\
\xd4\x94\xdd\x4d\x0a\x97\x2f\x6a\x98\x4a\xda\xde\xec\x14\xe9\xc9\
\x21\x85\x1b\xb6\xa8\xc8\xdd\xc6\x55\xba\x08\x53\x22\xd7\xb3\xb8\
\x22\x46\xe5\xe5\xce\xe3\xc2\xcb\xcd\xed\xed\x5c\xbe\xa8\xf1\x9f\
\xeb\x68\x69\xda\xe7\x9f\x5b\x72\x48\x31\x7b\x73\xd4\x70\x14\xc0\
\x15\xc8\xec\x59\x67\x5f\x98\x6c\xdb\x9e\x24\x2f\x9d\x4b\xb0\x12\
\x33\x57\x78\xdc\x31\xc3\xd2\x4c\x4c\xc9\x71\x43\x92\xd9\xbe\x52\
\x29\x66\x0f\x49\x3a\xc9\xc5\xed\x67\xca\x4e\x9a\xb7\x1b\xa6\x94\
\x38\xc9\x65\xb7\x8f\x27\xa3\x14\x6e\x22\x8e\xea\x7d\x61\xcc\x3b\
\xc1\xec\x59\xa6\xb4\xbc\x63\xb4\x59\xb4\xb8\x9e\x9c\xb7\xfd\x8e\
\x93\x92\x27\x6f\x37\x20\x85\x69\x6b\x92\x56\x0a\xee\x94\x84\xe4\
\x29\x1e\x86\xaf\x38\x04\x4e\xf4\x59\xf2\x92\x68\x5a\x7d\x5d\x6f\
\x9e\x8b\x5d\x7d\xed\x68\x29\x92\x96\xc4\x51\x77\x57\x5f\x49\xe1\
\x1e\xda\xa2\x2e\x80\x1b\xf2\x8a\x78\x78\x34\x25\x9a\x39\xfb\x4a\
\x0a\x61\xf6\x50\x22\x78\xcd\x12\xbe\x90\xc5\x97\x8e\x96\xf2\x9a\
\x65\x65\xe2\x74\x47\x3b\xb3\x88\x7d\x27\xc5\x3c\xa1\x34\x44\x9d\
\x7c\xd6\xbc\x49\xee\xbc\x1d\x2f\x85\xe0\x86\xb1\x38\xe2\x5e\x5d\
\x10\x59\x5e\xdd\x73\x2b\xb5\xbe\x94\x42\x54\xfd\x23\x17\x49\xa6\
\xdf\x3e\xb8\x1c\x7d\x2b\x25\xa0\xd7\x3f\x07\xdf\x5f\x5a\xf5\x5f\
\x42\x96\x59\xe4\xff\xbd\x14\x50\x04\x90\x22\x10\x48\x11\x08\xa4\
\x08\x04\x52\x04\x02\x29\x02\x81\x14\x81\x40\x8a\x40\x20\x45\x20\
\x90\x22\x10\x48\x11\x08\xa4\x08\x04\x52\x04\x02\x29\x02\x81\x14\
\x81\x40\x8a\x40\x20\x45\x20\x90\x22\x10\x48\x11\x08\xa4\x08\x04\
\x52\x04\x02\x29\x02\x81\x14\x81\x40\x8a\x40\x20\x45\x20\x90\x22\
\x10\x25\x85\xbe\xc2\xc9\x6f\x04\x95\x60\x39\x0b\xb5\xe9\x41\xe7\
\x19\x76\x23\xa8\x04\xfa\x86\x33\xbe\xf3\x28\x0c\x9a\x4e\xfc\xaf\
\xa2\xaa\x3f\xf0\xc9\x73\x21\xa8\x3b\x65\xaf\x2f\x85\x26\x17\xae\
\x01\xe8\x31\x96\xb3\xe9\x0b\xa1\xc0\x64\x2f\x04\x9a\xe4\x83\xa0\
\x8f\x3a\xbb\x96\xfd\x90\x6d\x08\x7a\x46\xd3\x1a\xd9\xaf\x95\x6c\
\x05\x3e\x57\x5b\x31\xf4\x79\x5a\x2e\x66\xea\xce\x1f\xec\x0e\xa0\
\x54\xe8\xd3\xf3\xcd\xe1\x91\xdd\x5a\x43\x7b\xd0\xcc\xaf\x26\xfd\
\x47\xdc\x8e\xa0\x44\x06\xed\x49\xad\x80\x0f\x0c\x63\x3d\xa6\xdb\
\xb0\x65\x86\x5a\x8d\x1d\x67\x13\x80\x62\xb1\x9c\x75\x5d\xf2\x64\
\xb1\x75\xc7\x60\x28\x2b\x8d\xf0\xf2\x37\x4d\xe8\x39\xa6\xc5\x26\
\x05\x99\xf0\xdf\x9e\x58\x8d\x71\x5d\xe2\xec\xe1\xdf\x35\x78\x8e\
\xc9\x05\xad\xb0\xd4\xdd\x31\x47\xcf\x84\xba\xac\xc5\x04\xc9\x51\
\xc9\x97\xf1\xae\x2c\x05\x6a\xde\xa0\x97\x8c\x53\x03\xa3\xbb\x74\
\x19\xcb\x0b\x5a\x57\xbb\x43\x8d\x43\x80\xa7\xe3\xe9\x3c\x71\xd4\
\x6a\xff\x01\xfa\xe8\x5d\x11\xb2\x88\x6e\x66\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x0f\x5c\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x02\xd7\x00\x00\x00\x83\x04\x03\x00\x00\x00\xb4\x20\x2a\xa6\
\x00\x00\x00\x1b\x50\x4c\x54\x45\xff\xff\xff\x7f\x7f\x7f\x5f\x5f\
\x5f\xdf\xdf\xdf\xbf\xbf\xbf\x1f\x1f\x1f\x3f\x3f\x3f\x9f\x9f\x9f\
\x00\x00\x00\xc8\x53\xa5\x5e\x00\x00\x00\x01\x74\x52\x4e\x53\x00\
\x40\xe6\xd8\x66\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\
\x00\x00\x0e\xc4\x01\x95\x2b\x0e\x1b\x00\x00\x0e\xda\x49\x44\x41\
\x54\x78\x9c\xed\x5d\xcb\x7b\xdb\x36\x12\xe7\x9b\x3a\x76\xbb\x9b\
\x44\x47\xb5\x49\x5b\x1d\x9d\x38\xca\xea\xc8\xb6\x76\xc2\x63\xdc\
\x5d\x39\x3c\x6a\x93\xf8\x8b\x8e\x69\x1b\x67\x79\x94\xdb\xc8\xc6\
\x9f\xbd\xc4\x93\xa0\x30\xa4\x00\x01\x7a\xac\xcb\xdf\xf7\x45\x8e\
\x28\x08\x8f\x1f\x87\x33\x83\xc1\x00\xf2\xbc\x1e\x3d\x7a\xf4\xe8\
\xd1\xa3\x47\x8f\x1e\x3d\x7a\xf4\xe8\xd1\xa3\x47\x8f\x1e\x3d\x7a\
\xf4\xe8\xd1\xa3\x47\x8f\x1e\x3d\xee\x0b\x9e\x5e\xcd\x7e\x38\x74\
\x1f\x74\x10\x5e\xec\xbb\xc5\xc9\x57\xae\x6b\x8c\x2e\x4e\xa2\xc5\
\xbf\x5d\xd7\xba\x03\x5c\xff\xb1\xe7\x06\xd3\x62\xe4\xba\xca\xf3\
\x13\xcf\x0b\xd0\x47\xd7\xd5\xba\xc6\xf3\x07\x68\xbf\x64\x87\x3f\
\x16\x68\xe4\xba\xce\x2f\xf8\xb5\xf8\xd3\x71\xb5\xae\x91\xaf\x3e\
\xec\x97\xec\x08\x5d\x96\xce\xc9\xf6\xe7\xf8\x75\xb8\xef\x47\xd4\
\x14\xa1\xe7\xed\x59\xb2\x4f\xbc\x81\x73\xb2\xe3\x15\xd6\x20\x53\
\xe4\xb8\xda\x1d\x60\xcf\x64\x7b\x3b\x20\x7b\x80\x5e\x93\xd7\x13\
\xc7\xf5\xba\xc7\x3d\x20\x3b\xee\xc9\x6e\x83\x7b\xb2\xd3\x59\xaf\
\x46\x5a\xe0\x9e\x6c\x8a\xf2\x76\x27\xd5\x3a\xc5\xbd\x21\x7b\x71\
\xb7\x93\x6a\x9d\xe2\xbe\x90\x1d\xa1\xf9\x2e\xaa\x75\x8b\xfb\x42\
\x76\xb2\x3a\x7e\xfb\x78\x6f\xc8\x1e\x7e\xd9\x45\xad\x8e\x71\x4f\
\xc8\x8e\x56\xd9\x0e\x6a\x75\x8d\x7b\x42\xf6\xe4\x5f\x3b\xa8\xd4\
\x39\xee\x07\xd9\xe1\xec\xff\x40\x63\xdf\x17\xb2\x27\x73\xf7\x75\
\xee\x00\x8e\xc9\x1e\xa0\x0a\x1b\x4a\x8c\x9c\xb6\x48\xf0\xb6\xfa\
\x97\x56\xc2\xed\xe3\xe6\x8f\x77\xde\x7e\x2f\xc8\x4e\x70\x95\xb1\
\xa7\x49\xf6\xb5\x75\x7b\x8f\xb6\xfc\x9e\x6b\xb2\xef\xce\xce\xce\
\xba\x4b\x74\x93\x9d\xbe\xd6\x6e\xcb\x17\x6b\x33\x58\xb0\xbd\x49\
\xf5\x2f\xac\x5a\x3f\x83\xc9\xfe\x3c\x7b\xcf\x5a\xb0\x77\x12\xf3\
\x2d\x57\x85\x5c\x93\xbd\x71\xbd\x64\x03\xd9\x65\xa6\xdd\x56\xc4\
\x27\xe8\x01\x59\xd5\x7c\xc8\xaf\x83\x64\xbf\xb8\x0b\x17\x94\xa2\
\xd2\x7e\xfd\x2c\xdd\x32\x34\x70\x64\x64\x1b\x0d\x83\xd3\x36\x9e\
\x55\xa8\x07\x02\x92\xbd\x18\x45\x05\x79\x68\xc2\x1b\x83\x16\xda\
\x30\xcc\xb6\xfa\xda\x91\x91\x3d\x35\x59\x7a\xf7\xa9\x42\xa0\x8a\
\x1a\x89\xdb\x04\x91\x9d\xa2\x8f\x31\x5a\xe2\xff\xc5\xfa\x7a\xaa\
\x1d\xc9\x76\x6e\xfd\x91\x91\x3d\x33\x6a\xed\xea\x84\xd6\x48\x20\
\x54\x31\x44\x76\x8c\x4e\x06\x34\x54\x35\xce\xe8\x95\x6b\x9b\x69\
\x50\xb8\x5d\x3c\xf7\xb8\xc8\x0e\x18\x63\xa9\xde\x24\x65\x3a\x87\
\xae\x42\x64\x4f\x57\x9e\x7f\x8b\xaf\x87\x7c\xbc\xc8\xca\x4e\x0e\
\xb7\x4a\x7e\x39\x2e\xb2\xa7\x23\x5e\x28\xd3\x69\xcd\x07\xf5\x2f\
\x44\xf6\x90\x8b\x62\xc2\x3a\x18\x51\xa5\xb2\x2d\x62\xf3\xc4\x89\
\xf0\xf4\x27\x84\x7e\x3f\x75\x38\x09\xd8\x44\xf6\xe9\xf3\x31\xba\
\x3b\x3d\x6d\xfb\x78\xc1\xba\x92\x6b\xae\x71\x15\x50\xd7\x21\xb2\
\x17\x5c\xa6\xf2\x11\xfd\xeb\xdb\xc5\xbf\x7d\x73\x19\x65\xda\x6e\
\x69\xd3\xec\x5a\x8d\x1b\xc8\xa6\x0d\xb6\x51\x19\xf1\x21\x0c\x35\
\x75\xe2\x70\x04\xb5\x01\x90\x5d\x70\xf3\xc9\x6f\x67\x82\xec\xb2\
\xe0\xcc\xa7\xa9\x21\xfd\xb3\x47\xc9\x3e\x91\xdb\x55\xc0\x9f\x71\
\x6f\xa1\xe9\x9f\x81\xcd\x01\x3c\x84\x5c\x43\x0b\xcb\x36\xd5\x53\
\x54\xad\x18\x8f\xac\xbe\xee\x04\x9b\x75\x76\x17\xb8\xc1\x0b\x75\
\xad\x57\x00\x3d\xcd\x00\xd9\x11\x62\xfd\xf2\xb9\x84\x97\x96\x8b\
\xf1\xf9\xd2\xee\xfb\x2e\x60\x47\xf6\x90\xcd\x52\x52\x5d\xcd\x16\
\x41\x94\x01\x64\xfb\xbc\x42\x61\xd8\xc6\x46\xce\x5b\x94\xad\x5f\
\x89\x8f\x60\x7d\xd9\x8e\x6c\x6e\xef\x82\x16\xeb\xa5\x46\x5d\x0a\
\x60\xee\x0d\x90\x2d\x2a\xcc\xf9\x94\xa6\x80\x14\x55\xf8\xb0\x20\
\x26\x45\x59\xf6\xa1\x8b\x6e\xe1\xdb\xba\x66\xf0\x99\x72\x83\xcf\
\xc5\xea\x89\x57\xeb\xda\xaa\xcd\x10\x56\xf4\x56\x64\x47\x2b\xf6\
\x9f\x18\xb6\x5e\x3e\xd5\xb3\x2f\x1e\xd7\x97\x20\xd5\x09\x90\x1d\
\x73\x7f\x93\xfa\xc7\x0b\x6a\xa6\xe7\x6b\xa5\xc2\x31\xb3\xdf\x4a\
\xfa\x71\x41\xc8\x4e\xa4\xeb\xe9\xce\x72\x82\xce\x6f\x3f\xfd\x84\
\xbb\x3b\x60\xbe\x0b\xf9\xf3\xf5\xf7\xb3\xa5\xf7\x1b\xbd\x0b\x02\
\x56\x64\x53\xb7\x39\xa1\xe3\x05\x1e\xf3\x98\x92\x2d\x67\x88\x94\
\x4b\xb5\x98\x42\x36\xe5\x96\xc8\x34\x0b\x46\xc1\x9e\x5f\x89\xd0\
\xac\x12\xeb\xd9\xec\xed\xda\x07\xcc\x29\x97\x33\xdc\x42\x4b\x03\
\xdb\x8a\x14\x0b\xda\xa4\x1a\x7d\xf4\x6a\x41\xee\xfa\x35\x5a\xfd\
\x90\x61\xc2\xcf\x2f\xce\x3e\xa3\x77\x52\x51\x2b\xb2\x13\xce\x62\
\x8b\xe7\xc7\xbc\x6f\x24\xb5\x31\x05\x2c\x29\x20\xd9\x39\xbf\x86\
\x3a\x3c\xbf\x14\xfd\x91\x79\x2f\x20\xdb\xcc\x6e\x4d\xc3\xfb\xc7\
\x0a\x2c\x41\x4d\xcc\xc1\x6e\x9b\x21\xbf\x21\x5d\xc1\x42\x11\x10\
\x89\x0b\x10\x7e\x92\x9f\xa1\xbb\x37\x1e\x96\x87\xac\x2e\x6a\x45\
\xf6\x60\x83\xe7\x47\xef\x41\x63\xee\x07\xd9\x29\x80\xec\x21\x53\
\x50\x21\x57\x54\x90\xe7\x57\x92\x11\x2e\x80\x1b\xcd\x6e\x4d\x43\
\x06\x16\xa3\x9d\x90\x1d\xb2\x27\x90\xbe\xe2\x5e\x26\x37\xf4\x3a\
\xe9\x71\x2a\xcb\x82\x15\xd9\xd3\xd7\xbc\x41\xd8\xf3\xa3\xf7\xa0\
\xa1\x01\x12\xc0\x4e\x01\x64\x73\x06\x53\x4e\x17\xe0\xf9\x85\x34\
\x6e\x98\x03\x5f\x67\xd1\x83\x86\x0c\x8c\xe7\x4a\x31\x17\x08\xd8\
\x7d\x25\x7d\x99\xe0\xa1\x96\x23\xf2\x01\xe3\x64\x2c\x25\x23\x59\
\x91\x5d\xce\xe9\xdf\x36\xcf\x0f\x31\x33\x25\x69\x80\x00\x90\x43\
\x80\x2d\xee\x7b\x88\x59\x36\xe0\xf9\x25\xd4\x88\x4e\x81\xcd\x39\
\xf4\x06\x84\xb2\xfe\xf2\x86\x2e\x42\xb5\x2a\xd8\x23\x97\x93\x8e\
\x46\xd5\xfd\xe7\x5e\x03\xe3\x64\x2a\x85\x96\xac\xc8\xe6\x93\xef\
\x16\xcf\x8f\xcd\x4c\xa6\x32\x99\xfe\x4a\x2d\xa7\x92\x1d\xf2\x60\
\x77\xc0\x65\x13\xf0\xfc\x4a\xc4\x02\xb6\xaa\x36\xa7\xfa\x23\x45\
\x32\xbf\x90\x69\x76\x80\x21\xed\xc5\x94\x8e\x6b\x8c\x4e\x26\x8c\
\x51\x46\x76\x20\xdd\x71\x2b\xb2\xc7\x6c\x98\xad\x9e\xdf\x1c\xff\
\x29\x65\x7e\x21\x0f\x4c\x25\x5b\x4c\x20\x93\x1b\x7e\x41\x55\x54\
\x8c\xff\x29\xd0\x38\xd5\x1f\x41\x23\x5e\x99\xef\x66\xc3\xd4\x82\
\x0e\x68\x40\x07\x99\xa0\xc7\x7c\x26\xc1\xc8\x4e\x51\x2d\x26\x56\
\x64\x2f\xd8\x30\x73\xd8\xad\x62\xfa\x63\x2c\xab\xe9\x08\xd0\x19\
\xea\xa5\x7a\x02\x79\xb7\x76\x41\xae\xe9\x0b\x6b\x5c\x55\x23\xf4\
\xa3\xb8\xf1\xc9\x74\x37\x64\x17\xdc\xd8\x92\x77\x21\x5a\x89\xf8\
\xfb\x92\x5d\xa9\x15\xa0\x15\xd9\xfc\x26\x0e\x85\xb8\xce\x08\xd8\
\xd3\xcb\xcc\x54\x21\x3b\x20\xa1\x16\xd9\x42\x2f\xc5\x4c\xa0\x13\
\x55\x51\x05\x4c\x49\x94\xea\x9d\x86\xf4\xd7\xae\xc8\x46\xe8\x8c\
\xe0\x67\x8f\xf5\x86\x37\x23\xc8\xae\x1f\x6c\x42\x76\x4c\x39\x32\
\x6e\xa8\xc8\xe8\xdf\xda\xfb\x6a\x84\x80\x21\x33\xa5\x47\xb6\x98\
\x40\x0e\x18\xd9\x80\x62\xe6\xfc\x8f\x55\xc5\x04\xe9\x2f\x51\x95\
\x63\xac\x85\x9f\x03\xb4\x26\xd9\x72\x01\x42\xf6\xa0\x33\x68\xdd\
\xd1\x50\xc6\xfe\x0a\xb5\xd4\x20\x1b\x32\x53\x1e\xa0\x71\x54\xb2\
\x85\x87\xc1\x19\x02\x14\x15\xbf\x21\x85\xea\x4c\x32\x3f\xa5\xa1\
\xbf\x76\x45\x76\xd1\xec\xfd\x44\x84\x0e\x04\xd9\x6b\x92\x6d\x49\
\x36\x64\xbd\x30\xe8\x6a\x4b\xd3\x4c\xe9\x91\x2d\x7c\x67\xce\xd0\
\x50\xed\x1c\x13\x76\xa8\xf1\x01\x1d\x70\xa5\xbf\xc2\x79\x7d\xf1\
\x0b\xee\x4b\x13\xb8\x67\xc8\x02\x4b\x6c\x20\x1b\xbd\xbf\xca\x51\
\xd3\x1b\xe9\xd2\xd9\x26\xed\x70\xe2\x00\xeb\x45\xeb\x22\xca\x1a\
\x9b\xa9\x17\xd2\xc5\x4c\x2d\xa7\x90\x2d\xb8\xe5\x64\x17\xb8\xc7\
\x49\xa3\x1c\x73\x81\x20\xb7\xb3\xd6\x5f\x52\xbe\x16\x1e\xe9\x0e\
\x66\x90\xc3\xc6\x80\xfc\x3f\x7d\xd4\xf4\xb3\x53\x69\xd1\xd8\xca\
\x40\xb2\x76\x62\xd2\xe9\x77\xeb\x9f\x4a\x66\x4a\xca\xd6\xd3\xd2\
\xd9\x62\x0a\xc3\xc8\x0e\x89\xa2\x7a\xd0\x28\xc3\x7c\x9d\x1c\xd8\
\x2e\x42\xef\x15\xd6\x5f\x24\xa9\x90\x55\xb5\x1b\x03\x99\x33\x6b\
\x12\xd0\x77\x5f\x79\x7c\xaf\xbf\xf0\xb3\xeb\x27\xcf\x05\xd9\xc4\
\xd3\xf5\x95\x09\x1a\x13\x78\x6c\xa6\xa4\xa8\x9c\x16\xd9\x42\x0f\
\x33\x6f\x84\x46\x18\xde\xb0\x7a\xc9\x36\xca\xaa\xfa\x11\x29\x2a\
\x69\x91\x07\xac\x04\xf5\x7d\xb1\x0c\x0c\x32\xf1\xe1\x8e\xbc\x11\
\x6e\xa7\x89\x3c\x85\x97\x98\x7d\xea\x7d\x31\xb2\x63\xd4\x7c\xb8\
\x4c\xf0\x6c\x76\x59\x3b\xaf\xcc\x1b\x21\xce\x57\xac\x78\xbb\x09\
\x6d\x0e\x6b\x80\x9a\x10\x3d\x6f\x44\x64\x4b\x31\x3f\xdb\xc7\x0f\
\x09\x3d\x5f\x00\x8f\x86\x56\x57\xe0\xea\x03\xe9\x29\x8e\x10\x7b\
\x43\xf3\x62\xaf\x2a\x19\x90\x1e\xa9\xe9\xb2\x75\x50\x35\x86\x66\
\x3a\xdb\xab\x6d\xc6\x7f\xc8\x90\xff\x24\xf9\x5e\x64\x3c\xec\xba\
\xac\x66\x0c\xc9\xf6\xdf\x78\xcf\xea\x65\x11\xe6\x67\x93\x19\xeb\
\x43\xa5\xec\x80\xa8\x91\xa4\x9a\xe8\x49\x9a\x33\xd4\x99\x41\xd6\
\x46\x8f\xcd\x20\x49\xf0\xe5\x9c\x15\x1b\x32\x2d\x48\x62\x9b\x63\
\x29\x4f\xca\xa7\x16\x0f\x93\x5e\x3d\x6a\xc1\xbb\xea\x55\x3a\x09\
\x47\x6b\x11\x72\x8b\x13\x67\x86\x44\x8f\xa5\xa4\x1f\x64\x69\xa4\
\xa0\xb2\x8e\x58\xc8\x53\x8a\x33\x18\x92\xfd\xa0\xaa\xb8\x14\x72\
\xca\x66\x90\x38\x22\x17\xa9\xc9\x61\x39\x69\xe7\xfb\xe9\x8d\x17\
\xd7\x4e\x32\xb4\x08\xa9\x90\x5d\x07\xb6\x58\x6c\x04\xdb\xba\x90\
\x13\x91\x33\xfb\x1e\x15\x8f\xc3\x6b\x39\x64\x12\x31\x8b\xe7\xe3\
\x3c\xc2\xf4\x17\x6f\xf8\x3a\x90\x74\x5b\xae\xe8\x39\x15\xd1\x72\
\x43\x01\x00\x01\x59\x1f\xc8\xb3\xea\xe5\x05\xd1\xdf\x25\x15\x06\
\x74\x3b\xf2\xb0\x2b\x38\xaa\x8b\x9a\x91\x4d\x8e\x65\x49\x84\xe3\
\xc8\x62\x23\xd8\x04\x4e\x32\xa5\xf0\x10\xbb\x9c\xd7\x23\x7f\xe5\
\x7d\x57\x5f\x8c\x74\x02\x51\xb5\x87\xc1\xa3\x7e\xe5\x4d\xf8\x80\
\xb7\xe0\xaf\x98\xa5\x7c\x7a\xb5\xfa\xd0\xf8\xea\xf0\x82\x10\x9a\
\xa0\x4f\x8b\xd5\x6d\xe6\xf9\xc5\xad\xf4\x31\x0f\x51\x76\xc1\x1f\
\x6d\x2e\xa3\x60\xb8\xfa\xe8\x3d\xad\xd4\x5d\xf4\x1b\x49\x16\x8d\
\x16\x08\x7d\xc8\xf0\x92\xc9\x65\x56\x89\x8d\x3c\x7d\x36\x23\x3b\
\x98\x7b\x7c\x55\x82\x34\x43\xfb\x96\xa2\xaf\x9e\x02\xcf\xdf\xe2\
\xee\x25\x2a\xde\x57\x4f\x83\xbc\x34\x94\xea\x84\x58\xeb\xc0\x16\
\x2f\x1e\x5d\xad\xfe\x2e\x75\xba\xb5\x83\x4b\xf2\x71\x55\x21\xc9\
\xe1\x6a\xa4\xbb\x80\xf9\x41\xeb\x0d\x03\xcb\xd1\x1b\x51\xb1\x7b\
\x55\xdd\x5a\xbe\x89\x63\xc0\x1c\x63\xb4\x7c\xb6\xba\x42\x17\xf2\
\xd8\xcc\xc8\x26\xc7\xb2\x84\x42\xee\xb8\xb0\xbc\x2c\x2e\x54\xb3\
\x87\x57\xc3\xe8\x2a\xb3\xfc\x19\x94\x06\xa6\x90\x5d\x67\x0e\x42\
\x0f\x02\xdd\xb3\x00\x22\x24\x92\x9d\x83\x5f\x5a\x68\x10\x39\xdd\
\x5c\x04\x6a\xf5\xbf\x8f\xa0\xe1\x2f\xbd\x7f\xfe\xed\x53\xe3\x92\
\x19\xd9\xf4\x58\x16\xe1\xcc\x74\xaa\xc1\x08\x41\x9f\x26\x3a\xcb\
\x62\x12\x5b\x60\xd6\xd8\xcb\xd6\x36\xe7\xf8\x15\x3e\x10\x89\x07\
\x72\xba\xa0\x5a\xf9\xed\x01\xcc\xf4\x0c\x25\x1b\x13\x18\x0a\x16\
\x3b\xbf\xec\x83\xd9\xaf\x31\x30\xb3\x57\x08\x2d\x6b\xb6\x16\x19\
\x50\xc9\xd7\xad\x6d\x92\x26\xc1\xf3\x1e\x20\x37\x48\xc1\x07\x8d\
\x32\xba\xb0\x26\x9b\x1c\xcb\x52\x9f\xa7\x90\x74\xc5\x76\x12\xf0\
\x2c\x39\xc8\xdb\x6d\x92\xfd\x6b\xe6\x8d\xeb\x7a\xc1\xbc\xea\x37\
\x6d\x6d\x52\x9d\x8b\xa0\x31\x45\x1a\x19\x55\xa1\xcb\x79\x8f\x35\
\xd9\x04\xbe\x60\x11\x4e\xb7\xe6\x35\x83\x1a\x20\x9f\x03\xdd\x6a\
\x28\xf5\xea\xb9\x29\xea\x42\x50\xf9\xa8\x55\x7b\x9d\x93\x8f\x41\
\xfd\x15\x68\xa4\x9f\xa5\xa4\xb1\xf0\xe1\xec\xed\x36\x86\x72\x0d\
\x6e\xc8\x8e\x85\x46\xed\x94\x16\xd8\x4c\x8d\x81\x61\x34\xc8\x8e\
\xd1\x32\x92\x26\x5d\x50\x12\x7b\x02\x2e\xc2\x61\x90\x69\x5c\x8b\
\xfe\xd2\x18\x28\xad\x78\xfc\xe4\xf9\x83\x95\x3d\xdb\x6e\xc8\x2e\
\x6b\x19\x01\x73\xdb\x19\x5a\xcc\x14\x64\xb7\x1b\xee\x11\x1a\xc9\
\x69\x79\x3e\x20\x90\xad\x9b\x50\x53\xe6\x66\x83\xfa\x6b\xde\xf6\
\x2d\xa9\x0c\xee\x48\x82\xbd\x58\xdd\x14\xe8\x0e\xb8\x21\xbb\xa8\
\x05\x6b\xd8\x21\x00\xb0\x99\x82\x9e\x85\x06\xd9\xc1\x1b\xaf\x94\
\xd4\x00\xf0\x85\xb0\xd5\x52\x4c\x33\xfc\x0a\xeb\xaf\xae\xae\x72\
\x90\xbb\x48\x36\x4c\xb5\x2c\x61\x1b\x20\x04\x2c\x87\x39\xd9\xb2\
\xdc\x75\x89\x0b\x68\xa6\x20\x41\x6d\x92\x1d\x5e\x7d\x2b\xcf\xfb\
\xc4\xce\x03\xa9\x03\xad\xb4\xd1\x93\x8b\x63\xf0\x91\xd2\x59\xf4\
\xfb\xa6\xfa\x17\x11\x99\x8e\xc0\xde\x1b\xe0\xec\x33\xba\xfd\x94\
\xad\x5d\x34\x27\x5b\x9e\x89\x25\x1d\xdf\x06\xcf\xcb\x05\x37\x35\
\x36\x25\x31\x7a\x94\xc9\x6f\x73\x73\x11\x83\x04\x3b\xd5\xd1\x0b\
\xf8\x91\x09\xe8\x90\xc0\x0c\x65\x03\x88\x05\x16\x19\xc6\x64\xa7\
\x8d\xc4\x84\x8e\x2e\x81\x5b\x43\x4a\x88\xba\xce\xed\x2e\x5d\xf7\
\xd3\x00\x3a\x5b\x57\x89\x97\x13\x23\x12\x79\x80\x32\x08\xad\x61\
\x4c\x76\x53\xd2\xd4\x87\xbc\x1b\xe0\xc3\xdc\x49\x76\xe4\x26\x89\
\x5d\x67\x03\x3c\xd9\x52\x3f\xa0\xcb\x5a\xc0\x9a\xbd\x3d\x4c\xc9\
\x26\x67\xa5\xd7\xe4\xf0\x8d\x73\x9a\x80\x77\xba\x77\x6f\xe4\xe2\
\x3b\x7c\xed\x70\xa9\x51\x26\xc6\x2d\xc5\xeb\x64\x87\xa7\x14\xbc\
\x93\x7c\xb7\xc3\xdc\xbc\x13\xa6\x64\x93\x63\xe2\xeb\x76\x02\xb3\
\xfc\x80\x78\x0e\x5d\xed\x26\xdb\xc9\xde\xf5\x40\x67\x98\x74\x4a\
\x54\x50\x35\xc2\x67\x09\x2f\xf9\xaa\x0c\x4f\x62\x1e\x6f\xbf\x58\
\x6c\x4a\x36\x9e\x36\x44\xa3\xfa\xbd\x59\x66\xcf\x18\xa4\xb5\x9b\
\x6c\x27\xa7\x32\x68\x1d\xa3\xf1\xad\xf4\x7f\xbe\x0a\xea\x8b\x25\
\x30\x76\xe1\x7c\xf5\x8f\x57\xc5\xed\x69\xfb\x3e\xdc\x0e\x18\x92\
\x4d\x56\xab\x7d\x49\x6d\x1b\x9d\xca\xd0\x72\x5e\xc6\x86\xfd\xa0\
\x0e\xce\x1b\xd1\xfb\xa1\x08\x29\x26\x1f\xf1\xa0\xff\xf0\xfd\x49\
\x65\x37\xa3\xa5\xa8\x09\x4f\x4f\x27\x5b\x6e\x19\x01\xc8\x7e\x7a\
\x75\xd9\xfa\x53\x1b\xe4\xa4\x10\x79\xc7\x97\xd1\x79\x23\x2d\x3f\
\x55\xb1\x81\x6c\x07\x27\xe9\x4c\x46\x1a\x85\xa2\x65\xfd\xff\x84\
\xc5\x57\xf0\xf0\x82\x91\x58\x6b\xae\x6a\xc2\x02\x0e\x07\x04\x36\
\x43\x25\x3b\xf8\xe3\xe7\x1f\x8b\x96\xb8\x1a\x7b\xa8\x32\xe9\x92\
\xc1\x49\x3a\xe1\x2f\xf0\xf5\xdd\x9f\x11\xf5\xed\xe6\x22\xcd\x35\
\x31\x9e\xee\x36\xfd\x48\x42\x89\x75\xfa\x09\xd9\xce\x01\x87\xba\
\x36\x43\x25\xfb\x2a\x6b\xdf\x28\x3e\x10\x2b\xf8\x02\x06\x67\x44\
\x55\x22\x02\xe2\x48\x4e\x3f\x93\xd7\xc4\x78\xe6\xed\xaf\xd5\xbf\
\xfc\xc4\x1b\xf0\x8f\xe8\x92\x60\xb8\xe5\xe9\x01\x0a\xd9\xd4\xbf\
\xb0\x9d\x40\x19\xe1\x48\xc8\x96\xd6\xc4\x02\x59\xd8\xbe\xa7\x02\
\x4e\x10\x13\x41\xdb\xf6\x38\x0e\x85\x6c\x3a\x69\x81\x76\x05\xed\
\x0c\x87\x26\x3b\xa2\xed\x4b\x6b\x62\xa5\x2c\x6b\x17\x92\xae\x2c\
\xa1\xc4\x5c\x6d\x28\x64\x53\xef\xcc\x3e\xea\x65\x80\x43\x93\xcd\
\xe4\xb6\x5e\x13\x8b\xe4\xd1\x63\xfb\x3c\xe4\x3d\x1c\xab\xfb\xdf\
\x0c\xa0\x90\x5d\x90\x84\x27\x20\xd3\x7f\x77\x38\x30\xd9\x21\x0d\
\x99\x49\x6b\x62\xb9\xec\x62\xe1\xe0\x8c\x58\x09\x55\xb7\x69\x9b\
\x40\x21\x9b\x6e\x63\x0e\x5c\x1e\x20\xb3\x09\x07\x26\x3b\xa0\x9b\
\xf6\xe8\x9a\x18\x46\xd8\x38\xcd\x00\x87\x91\xc5\xca\x3c\xcd\x8b\
\xdb\xf6\x87\x48\x54\xc9\xfe\xcb\x91\xfd\x98\x6e\xf1\xae\x17\xdb\
\x26\xf8\xbd\x08\x5a\xe2\x25\x67\xc1\x3e\x0d\x73\x97\x5b\x06\xc7\
\x14\xb2\xcf\x99\x1a\x11\x26\xc0\x22\xf0\xa2\x89\x43\xeb\xec\x98\
\x28\xe2\xa9\x88\x34\x11\x39\x16\x89\x40\xf8\x9d\xd0\xe1\xf4\x07\
\xd1\xb6\xfd\x59\xb4\x96\xe9\xba\xd8\x41\x64\x15\x78\xd1\xc4\xa1\
\xc9\xf6\x89\x8b\xf1\x99\xbf\xa5\xa9\x2d\x3c\x11\x88\x6c\xdc\x14\
\x83\x27\x49\x16\xe9\xb6\x3f\xf8\xd7\x42\x76\xbd\xb9\xd0\x26\xf0\
\xa2\x89\x43\x93\x4d\xf7\x66\x7e\xc3\xdf\x52\x6b\xc8\x13\x81\x62\
\x7c\x27\x84\x28\x93\x34\xe5\xe9\xb6\x93\x90\x16\xb2\x45\x84\xd1\
\x2a\xf0\xa2\x89\x43\x93\xed\x11\xc9\xe2\x44\xd0\xcc\x23\x11\xba\
\x22\x9e\x89\x58\x6c\x0f\x56\xeb\xf6\xd3\x04\x30\xd9\x75\x36\xba\
\x55\xe0\x45\x13\x07\x27\x1b\x9f\x8e\x21\xc2\x0e\x4c\x6d\x72\x02\
\x16\x98\xa0\x71\x9d\x25\xfd\x2e\x2c\xdf\x6f\xdb\x0e\x4c\x76\xbd\
\x11\xd9\x2a\xf0\xa2\x89\x83\x93\x8d\x73\x27\x02\x66\x03\x79\x00\
\x5b\x6c\xd4\x1d\x55\x2f\xb9\x58\x91\x8c\xbe\xbb\x7c\xa2\x7c\x5f\
\x17\x30\xd9\x63\xfe\xd8\xd8\x05\x5e\x34\x71\x70\xb2\xb1\xef\x47\
\xd6\xc4\x3c\x11\x6b\x13\x23\x7e\x85\x5f\x22\x07\xe9\x68\x5e\x0b\
\xd9\xa9\x70\x75\xec\x02\x2f\x9a\x38\x38\xd9\xd8\xf7\x3b\xdf\x43\
\x3b\x20\xd9\xb9\x30\xb7\x76\x81\x17\x4d\x1c\x9c\x6c\xec\xfb\x69\
\x85\xbc\x2d\x01\x91\x1d\xd5\x7e\xa4\x5d\xe0\x45\x13\x07\x27\x1b\
\xfb\x7e\xfb\xf8\x65\x72\x88\xec\x89\x94\x39\x69\x15\x78\xd1\xc4\
\xc1\xc9\xae\x7c\xbf\x68\xb9\x87\x66\x00\xb2\xe9\x74\x95\x12\x60\
\x17\x78\xd1\xc4\xe1\xc9\x1e\x8f\xfc\xd1\x1e\x9a\x01\xc8\xa6\x71\
\x18\xaa\xa5\xed\x02\x2f\x9a\x38\x3c\xd9\xe5\xeb\xad\xf6\x89\x99\
\x02\x20\x9b\x08\x36\xdd\x0a\x64\x19\x78\xd1\xc4\xe1\xc9\x9e\x7e\
\xd9\x6e\x9f\x98\x21\x54\xb2\xe9\x74\xd5\x1f\x91\x37\x76\x81\x17\
\x4d\x1c\x9e\xec\xf8\xc6\xe5\x3e\xb1\x56\xa8\x64\xd3\x38\x0c\x4b\
\xf1\xb0\x0b\xbc\x68\xe2\xf0\x64\xfb\xb7\x2e\xf7\x89\xb5\x42\x5d\
\x5d\x47\x72\x6e\x88\x5d\xe0\x45\x13\x87\x27\xdb\x3a\xf7\x5d\x0f\
\x0a\xd9\xfc\x10\x08\x46\x80\x55\xe0\x45\x13\x87\x27\xdb\xdb\xcf\
\x92\x2b\xb0\x06\xd9\x48\xc4\xb1\x0a\xbc\x68\xe2\x08\xc8\x86\x36\
\xb1\xb9\xc7\x8e\x0e\xf3\x31\xc2\x11\x90\x5d\xee\xa5\x0b\x3d\xd9\
\x04\xcf\xf7\xd2\x4a\x4f\xf6\x1e\xd1\x93\xbd\x47\x0c\xd6\xb3\x52\
\xf7\x8b\x23\xff\x0d\x5f\xc7\xe8\xc9\xde\x23\xa2\xb3\x4d\xbf\xe1\
\xbb\x53\x90\xdf\xf0\x3d\x60\xfb\x3d\x7a\xf4\xe8\xd1\xa3\x47\x8f\
\x1e\x3d\x7a\xfc\x15\xf0\x3f\x5c\x4c\x6c\x29\xb8\x18\x13\x99\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\x3e\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x1f\x00\x00\x00\x33\x04\x03\x00\x00\x00\x9d\xee\xce\x47\
\x00\x00\x00\x1b\x50\x4c\x54\x45\xff\xff\xff\x7f\x7f\x7f\xdf\xdf\
\xdf\x9f\x9f\x9f\x3f\x3f\x3f\x1f\x1f\x1f\x5f\x5f\x5f\xbf\xbf\xbf\
\x00\x00\x00\xce\x4a\x91\x57\x00\x00\x00\x01\x74\x52\x4e\x53\x00\
\x40\xe6\xd8\x66\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\
\x00\x00\x0e\xc4\x01\x95\x2b\x0e\x1b\x00\x00\x00\xbc\x49\x44\x41\
\x54\x28\x91\x63\x60\x18\xe6\xa0\x2c\xa2\xc5\x00\xcc\x28\x87\xf0\
\xd5\x3a\x3a\x3a\x5a\x40\x0c\xa6\x70\x30\x9f\x29\x02\x28\xd0\xe1\
\x00\x64\x31\x3a\x80\x05\x54\x5b\xc5\x15\x94\x3d\x9a\x81\x2c\x8d\
\x02\xb0\x80\x87\x00\x90\x60\x6e\x05\x12\xa2\x60\x3e\x73\x33\x98\
\xb2\x00\x1a\x0b\x61\xb1\x26\x80\x29\x16\x07\x06\xc6\x00\x88\x94\
\x02\x98\x62\x6f\x60\x60\x85\xd8\x5d\x08\x75\x4c\x38\x83\x29\xaa\
\xeb\x12\x19\x02\x51\x05\xcc\xd8\x9b\x50\x05\x4a\x58\x1c\x50\x05\
\x54\x35\x0c\x50\x05\x34\x44\xd1\xbc\xcc\x11\x88\x2e\xd0\x84\x2e\
\x80\x66\x26\x03\xba\x99\x0c\x15\x0a\x68\x02\xe8\x96\x30\xb8\xa0\
\xf1\x99\xda\xd0\x04\xa0\xe1\x84\x00\x6c\xed\x68\x02\xac\x0d\xe8\
\xb6\xa2\xbb\x4b\x42\x00\x4d\x20\xa3\x00\x4d\x20\x02\xdd\x5d\xe8\
\xce\x60\x08\x42\x17\xa0\x33\x00\x00\xe9\x91\x19\xc7\xd8\xf0\x8d\
\xa1\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x12\x5e\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x02\xd0\x00\x00\x00\x94\x04\x03\x00\x00\x00\x48\x2f\x03\xfc\
\x00\x00\x00\x1b\x50\x4c\x54\x45\xff\xff\xff\x7f\x7f\x7f\x5f\x5f\
\x5f\xdf\xdf\xdf\xbf\xbf\xbf\x1f\x1f\x1f\x3f\x3f\x3f\x9f\x9f\x9f\
\x00\x00\x00\xc8\x53\xa5\x5e\x00\x00\x00\x01\x74\x52\x4e\x53\x00\
\x40\xe6\xd8\x66\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\
\x00\x00\x0e\xc4\x01\x95\x2b\x0e\x1b\x00\x00\x11\xdc\x49\x44\x41\
\x54\x78\x9c\xed\x5d\x49\x97\xdc\xb6\x11\x26\x09\x2e\x7d\x74\x12\
\x2b\xe6\xb1\xad\xf7\xf4\xc2\xe3\xbc\x38\x23\xf1\xd8\xf6\x93\x6d\
\x1e\x25\x45\x23\xf1\xd8\x71\xac\x98\x47\x29\xb6\x1c\x1e\x47\xcb\
\x8c\xf9\xb3\xd3\x24\x16\x16\x16\x92\x45\x10\xec\x6e\xd9\xf3\x1d\
\x66\xba\x9b\xdb\xc7\x02\x50\xa8\x2a\x14\x00\xcf\xbb\xc3\x1d\xee\
\x70\x12\x84\x7b\xc4\x49\x64\x6d\x16\x7f\x00\x7c\x7b\x81\x38\xe9\
\x72\x67\x7d\x7f\x92\x3d\xb7\xbe\x56\xc3\x83\x1f\xdd\xdd\xeb\xc8\
\x20\x1f\x51\x67\xd9\x0b\xeb\xb2\xf9\xa7\xf5\xb5\x1a\xaa\x66\xe7\
\xee\x66\xc7\x45\xf4\x19\xea\x34\x54\xbd\x37\x81\x94\x2e\x65\x13\
\x37\xa8\x7a\x71\x8e\x78\x30\x76\x90\x64\xfc\x53\xf0\xc4\xf2\xfe\
\x97\x4e\x45\xe3\xb6\xd8\x8e\x89\x31\xcd\x41\x1e\x65\x8d\xf8\xf2\
\xcc\xf2\xfe\x65\xf3\xda\xee\x4a\x33\xdc\x96\xdb\x11\x11\xef\x07\
\x0f\x85\x37\x3f\xa4\xbd\xa0\xeb\x9d\xd5\xfd\x2f\x9b\x77\x56\xd7\
\x0d\xe1\x93\xad\xd2\xc5\x68\x7d\x2b\x7a\x41\x47\x56\xba\xe3\x20\
\x97\xad\xcd\x75\xc3\x28\x5c\xf6\xad\x47\xc4\xa8\x8a\x86\x82\xf6\
\x7f\xb3\xb9\x7d\xdc\xdc\xda\x5c\x36\x02\xbf\xb9\xb1\xed\x97\x4f\
\x8a\xf7\xa3\x47\x81\xa0\x3d\x2b\x89\x65\xee\xeb\x5f\xd6\xec\x5d\
\xdf\xf2\x08\x48\xc6\xbb\x16\x28\xe8\xca\xa2\x22\x25\x8d\x7b\x8d\
\x1a\x35\xe3\x95\xe3\x3c\x11\x5c\x8f\x1e\x86\x82\xae\x71\x06\xb7\
\x72\xbd\x7b\xa1\x90\xc6\xad\x1d\x73\x1c\x6c\xf6\xf4\xff\x5f\x5f\
\x74\xe4\xd5\x37\x80\x82\xce\xf7\xf3\x6f\x5f\x36\xb6\xe6\x37\x84\
\x42\xae\x6e\x3e\x38\xb8\xe9\x91\x51\xd0\x6a\xfa\x6d\xd3\x75\x5a\
\xa1\x2a\x16\x28\xe8\xe8\x7a\xf6\xdd\x03\x27\x9a\x43\x25\x17\x37\
\x37\xcb\x6f\x7a\x6c\xd4\x5d\x35\x39\xa8\xd2\xa6\x35\xc3\xe2\x9d\
\x72\x18\x0a\x3a\x9e\xef\x29\x14\x2e\x8c\x68\x8d\xdc\xc1\x64\xb4\
\xd0\x62\x27\x46\xb6\x6b\xff\x16\x4f\x2f\xc2\xec\xd0\x1e\xbf\x55\
\x0f\x43\x41\xcf\xb7\xef\x88\x13\xcd\xa1\x93\xab\x3f\x41\xef\x90\
\x9a\x12\x2f\x0e\x7f\x93\x43\xb7\xf5\x37\xf5\x30\x14\x74\x32\xbb\
\x76\xba\xd1\x1c\x3a\xb9\x4f\x51\x77\x54\xed\x1f\xbf\xab\x77\xf7\
\x3c\xa2\xd5\x3f\x28\xe8\x70\xb6\x01\x51\xb8\xf0\x56\x0c\xe4\xc2\
\xe6\xd3\xd3\x1d\x65\xfb\x27\xda\xb5\x7f\xf3\x5d\xa0\x99\x4d\x50\
\xd0\x64\xb6\xd4\x2a\x17\xe6\x81\x89\x5c\xf6\xe9\xd9\x1d\x5d\x1b\
\xf4\xbb\x8f\xc1\x67\x51\xfb\xef\xab\x16\xfc\xf0\x22\x41\x27\x8d\
\x8b\x38\x87\x44\x8e\x21\x77\xee\xd8\xaf\x0e\xa0\xec\x92\xed\x9b\
\xf6\x5f\xdb\xc7\x0b\xf1\x2e\x12\x74\xd4\x34\xce\xa2\x12\x8c\x1c\
\x83\xbf\x82\xc3\xb9\x32\x80\xa0\xc3\x27\x5d\x58\x62\x50\xd0\xb3\
\x83\x1d\xa9\xc3\x08\x29\x23\xc7\x70\x70\x0e\x5d\x38\x42\xc7\x04\
\x10\x34\x79\xbe\xd7\x0e\x2f\x11\xf4\x41\x1a\xee\x34\xa9\x42\x2e\
\x6d\xac\x62\x89\x27\x04\xb4\x93\xde\xeb\x21\x84\x25\x82\xf6\x9d\
\xa8\x68\x0e\x99\x5c\x74\x76\x06\x5e\xa0\xdb\x64\xff\x80\x8a\xb3\
\x04\x9f\x0d\xdc\x97\xe8\xe8\x1c\xa9\xa2\xa7\x28\x52\xc8\xe4\xfc\
\xb3\x0b\x2c\x65\xba\x0f\x95\x42\x6d\x57\x81\xcf\x06\x3b\x59\x12\
\xf4\x4c\x3b\x3a\x45\x46\xee\xa6\x28\x52\xc8\xf7\x3a\x3b\x25\x1d\
\x1b\x7a\xe7\xe0\x06\xfc\x06\x82\xcc\xa6\x61\xda\x25\x0e\x4b\x89\
\x73\x94\x27\x29\x76\x50\xc9\x65\x67\xa6\xa4\x2b\x13\x9d\x0a\x90\
\x4e\x77\xe2\x63\x78\xad\x9d\x49\x32\xd0\x42\xc3\x79\x36\x44\x82\
\xac\x74\x93\x14\xe9\xc3\xaf\xe5\xef\x4e\x9c\x4e\x77\x08\x84\xab\
\xfa\x3f\xf0\x6b\x0c\x06\xdd\xea\x5e\x90\xfe\x5e\xbd\xbc\x91\x4c\
\xbd\x64\x5e\x1d\x8a\x71\x6a\x74\x9a\xa2\x91\x5c\x74\x5e\x96\x74\
\xca\x8b\x3d\x81\xb4\x48\xd9\xab\xc0\xa2\x0f\x1a\xc4\x5b\xf5\xf2\
\xee\x65\x45\x7a\xa3\x3f\xcf\x58\x2b\x9a\x66\xfa\x24\x0c\x45\x23\
\x39\x37\x5e\xa7\x2b\x84\x62\x64\x34\x90\x0c\x80\xa2\x6f\x76\x7c\
\x84\xa5\xfd\x38\x51\x01\xe3\xeb\x59\x0f\xcf\x50\x7d\x21\x82\x62\
\x07\x95\x9c\x53\x23\x7d\x31\x72\x51\x49\x22\xc9\x3a\xf2\xfb\xe0\
\x17\x90\x5e\xb1\xf3\x46\x11\xed\x67\x3d\xbc\x41\xf5\x85\x08\x8a\
\x1d\x34\x72\x95\xe3\xc4\x9c\x45\xa8\x04\x97\x5c\xae\x5d\x7d\x07\
\x04\x46\xc1\x33\x6f\x1c\xf9\xac\xd0\x24\xb2\x2f\x44\x50\xec\xa0\
\x91\xab\xcf\xc8\x65\xf1\xfb\x57\x2d\x64\xda\xb9\x68\xa6\xc0\x38\
\x7e\x31\x71\xbb\x74\x56\x84\x28\x46\x29\x51\x0c\xc5\x0e\x1a\xb9\
\xfc\x8c\x7a\xc3\xbe\x59\x7a\xa9\xac\xd0\xfc\x3e\x07\x45\x64\x2a\
\x91\xa9\xa6\xf8\x9f\x99\x0f\xc7\x08\x02\x45\xd1\x33\x91\x8b\x4f\
\x15\xfc\xff\xa6\x7c\xd6\x55\x81\x47\xfd\x4f\x55\x5f\x5b\x2b\xa5\
\x19\x97\xa2\xfa\x88\xdc\xbb\x70\x42\xa5\x92\x79\xd6\x5d\x8a\x32\
\x3a\x50\x14\x3d\x13\x39\xac\x99\xee\x1a\x0f\x0f\x06\x6f\xc7\xec\
\x4b\x40\xa5\xaf\x23\xe5\x56\x3e\xbd\x10\x76\x6a\xcc\xe9\x26\x13\
\xbc\xfd\xeb\x59\x84\x2a\x8c\xd1\x81\xa3\xe8\x99\xc8\x11\x5c\x67\
\xeb\x1a\xa4\x6c\x5d\x8b\xdd\xe1\x43\x6f\x7f\x46\x5c\x49\x3e\xb8\
\xba\x6a\xae\xae\xb6\xf0\xfc\x58\x28\x50\x51\x57\xc8\x84\x0a\x46\
\xce\x0c\xe0\x77\x6b\x30\x3e\x32\x8e\xa2\x99\x5c\x79\x12\xb3\x23\
\xba\xf9\xd9\xfb\xba\xdc\xb7\x43\x3e\xe2\xb7\xba\xef\x4e\x7c\x35\
\x8e\x16\xf6\x55\xe9\x1e\xf2\x11\xd8\xf3\x28\x12\x94\x9d\x8b\xa4\
\x68\x44\x76\x12\x27\x3c\x6b\xe5\xfb\xf0\x50\x39\xf3\x9e\x6e\xd9\
\xb7\xdd\x58\x33\x85\x7a\xe5\x88\xac\xa9\x33\x55\x74\x8c\x52\xa1\
\x48\x8a\x46\xd4\x0e\x07\xca\xd0\x08\x3b\x29\x90\x67\x9e\x77\x5f\
\xfc\x06\xf5\x5f\xae\x71\x2e\x44\x77\x8f\x9c\x6f\xa5\x3b\xe8\xa3\
\xd8\x60\xac\x3b\x2c\x45\x23\xf2\x53\x84\xa4\xd9\xfc\x88\xda\x23\
\x3d\xf3\x08\xbc\x69\x6d\xb0\x8e\xf6\xfc\x23\x6e\xbe\xd5\xdb\x79\
\x8c\x0a\x8c\x18\xd0\x14\xcd\x17\x9f\x20\xda\xb1\xd9\xd1\x67\xef\
\xfc\xfe\xd9\xb0\x42\xe8\xb1\x75\x50\x99\xb4\xcc\x46\x13\xc2\x99\
\xf9\xe4\x29\xa6\x61\xa3\x29\x9a\x10\x9c\xc2\xbe\x63\x11\xb6\xe0\
\xb3\xa8\x7f\xbb\x0a\x28\xbd\xf2\x5a\xbb\xa4\x9c\x17\xc6\x7f\x38\
\x53\x1f\x56\x18\x0f\x79\x11\x45\x5c\x77\xbb\x0e\x92\xed\xaf\xe2\
\x33\x01\xd6\x0f\x31\x34\xc2\x14\x15\xc4\xb4\x46\x83\xb0\x09\x96\
\x51\xc4\x19\x90\xeb\x20\x7c\xf2\x52\x7c\xf6\x41\x79\x27\x06\x6d\
\x56\xac\xda\x95\x84\x0d\xc2\xca\x5d\x48\xb1\x19\x76\x89\x66\x76\
\x28\x32\xbe\x98\x3e\x85\x3c\xbf\x16\x9f\x23\xa0\xc1\x62\x03\xe3\
\x68\xd5\x29\x37\x09\xa6\xba\x2d\xa4\x58\x0d\x36\x9a\x89\x89\x39\
\x13\x18\x9f\x10\x48\xf1\x7e\xdb\x9f\x0e\x62\x2e\x1b\x43\xc7\xe4\
\xaf\xaa\xe1\x02\x8c\x02\x5d\x48\x31\x53\xa3\x29\xbf\x5e\xb1\xa5\
\x0f\xea\x31\x51\x7d\x73\xf5\xd3\x38\x2d\xcc\x90\x1d\xe8\xc5\x53\
\xf0\xb9\x30\x74\x4c\xe1\xaa\x1e\xec\xc1\xf6\xba\x9e\x3c\x69\x21\
\xc5\x5a\x89\x0f\x3e\xfc\x8d\x54\x9d\x84\x47\xe3\x90\x0f\xdf\x91\
\x6a\xc2\x45\x03\x03\xd6\x43\x00\x6d\xa9\x04\xc5\xdd\x8e\xcb\x69\
\x29\xb9\xab\x7a\xb0\x07\x7f\x65\x3f\x79\xd2\x42\x8a\xaa\xa9\x5e\
\x6d\xc3\xb2\xd3\x45\x63\x73\x7c\xc3\x72\xe7\xe5\x13\x55\x36\x9e\
\x34\x65\x81\x9b\x4c\x60\x4f\xd1\x0e\x63\x5c\xaa\x27\x57\x6b\x7a\
\xb0\x05\xc2\x9b\x58\x4a\x31\x97\x23\xd2\x49\xf3\x3a\xa2\xcd\x88\
\x4e\x17\x39\x74\x89\x06\x81\x15\xef\x4c\xde\xbe\x42\x6c\xb2\x0a\
\x12\xd8\x8b\x83\xf6\xd3\x86\x76\xff\xa4\x9e\x9c\xae\x69\x76\xd4\
\x88\xb0\xfc\x52\x8a\x4a\xab\x89\x9a\x8b\x4d\xf7\x83\x18\x34\x32\
\xc4\x51\x49\x17\x78\x9b\x1a\x92\x48\xa7\xb8\x83\xa8\x6d\x00\x9f\
\x72\xf8\x4c\xf6\xea\xc9\xc5\x9a\x1e\x6c\x8a\xf0\xc0\x97\x52\x8c\
\x64\xd7\x30\xbf\xf1\xfc\xdb\xb6\x05\xc4\xac\xba\x85\x86\x6e\xa2\
\x4b\x8b\x52\x07\x7e\xf5\x3b\x4f\x75\xe4\x41\xcf\x2b\x82\x4f\xa9\
\x3e\x7a\xb1\xd6\x08\x37\x6b\x7a\xb0\x19\x62\x20\x6b\x29\xc5\x58\
\xee\x70\x45\x82\x48\xb1\xa5\xff\x7d\x93\x0f\xf4\xbe\x3b\xb0\x1d\
\x67\xe6\x4f\x79\xcd\x9b\xbe\xa4\x72\xc8\xb1\x78\x47\xf4\x35\x9f\
\xe2\x35\xed\xbb\x0a\x11\xea\x58\x4a\x51\x31\x21\x45\x54\x95\xa7\
\x13\xc6\x7a\xc5\x25\xdd\x15\xfe\x64\x15\x9b\x22\x0f\x4c\xed\x1a\
\x96\x5a\x58\x3d\xd3\xaf\x0c\xd6\x1c\x0a\x2a\x11\x23\x86\x4b\x29\
\xfa\xf2\x60\x16\x1f\x63\x14\x5d\x59\xae\x37\xaa\xa0\xab\xe4\xc1\
\xa4\xa0\xb3\xed\xf8\x71\x90\x10\x30\xdd\xd5\x25\x6b\x1a\xd2\x0d\
\x22\xa6\xb4\x94\xa2\xec\x7d\x12\x2e\x75\x31\xef\xb4\xd6\xcb\x9a\
\x3a\xf5\xf1\xa4\x8d\x5f\x4c\x9c\x70\xd5\x7f\xcc\x26\x75\x64\xb8\
\xa2\x21\x4d\x30\x31\xa5\xa5\x14\xe5\x78\x8a\x18\xf9\x12\x3d\x59\
\xa6\x5f\x4c\x97\x88\x8a\x26\x05\x1d\x8d\x5b\xda\x30\x53\xbc\x9c\
\xd4\x91\x64\xc5\x54\x9f\x10\x23\xe8\xa5\x14\x89\x14\x55\xf2\xb9\
\xf4\x0a\xae\x17\x4a\xad\x39\x10\xca\x4a\xeb\x63\xbf\x29\x6f\x69\
\xe3\x7a\xcc\x7e\x30\x4c\x42\x80\x80\x4e\x3a\x42\x47\xe2\xb2\x3d\
\xb5\x87\xbc\x6a\x9e\xed\x3c\x96\x69\x4a\x23\xe1\x5f\xe9\x27\x8d\
\x44\xd6\xe6\x3c\x7f\xf4\x14\xb9\xd9\x04\xdc\xc6\xa0\x46\x70\x45\
\x53\x8e\xf7\xd2\x15\x01\xd5\x35\x85\x22\xe8\xbf\x37\xec\x4e\x62\
\x54\x2f\x19\xa7\x06\xf2\x6a\x31\xd5\x75\xba\x46\x19\x10\x96\x3f\
\x3d\xce\x5a\x21\xb2\xd4\xe9\x4d\xfb\xcf\xbf\xff\xaa\xf4\x92\x07\
\xb4\x04\x3a\x26\x08\x41\x2f\xa7\x28\x75\x04\x62\x58\xac\x7a\xcd\
\x39\xec\xd5\x0b\x36\x54\xbd\xd4\xf2\x11\x9a\xaa\xd1\xfe\x24\x92\
\xb0\xc9\xb8\x56\x03\x2b\xab\x61\x14\xb0\xd5\xea\x93\xed\xd0\x5e\
\xe7\x5e\x3d\xfe\xb5\xeb\x8b\x92\xb2\x79\xf6\x4b\x2b\xed\xf0\xea\
\xf3\xef\xab\x5b\x26\x17\x1f\x11\x8e\x5e\x4e\x11\x08\x9a\x56\xe0\
\xae\xa2\x36\xc3\xd6\x5d\x41\x25\x9c\xc9\x47\x2e\x6f\x3e\xbf\x78\
\xd4\xe5\x54\xe6\xe2\x69\xe5\xeb\xce\x4e\x97\xb0\x17\x57\x04\x3d\
\xab\x04\xd1\x74\x33\x8b\xe4\xb5\xb0\xbb\xa6\xfe\x8d\x5e\xdf\x3e\
\x3a\x7b\x7f\xd1\x4d\xc1\x78\xfb\xba\x7d\x2a\xeb\xf9\x03\x44\x38\
\x7a\x39\x45\xc9\x86\x2c\x98\x80\x09\x17\xbe\xc1\xba\x63\xb7\x53\
\x96\x3c\xec\x82\x79\x49\x7b\x59\x9f\xba\x52\x6d\xc7\x04\x0d\xe0\
\x23\xde\x22\xb5\xf0\xc1\xf3\xee\x35\x68\x54\x86\x2e\x13\x4a\x63\
\x8e\x39\x15\x71\xcd\x5e\x2e\x46\x08\x7a\x39\xc5\x0a\x0a\x3a\x65\
\x02\x4e\x78\x3b\x31\x58\x77\x25\x97\x19\x54\x48\xac\x67\x6b\x63\
\xd8\x3d\xe7\x6c\x3f\xc9\x8d\x22\x40\x18\xc9\xb5\x85\xa0\xb3\xee\
\xb6\x7e\x57\x25\x48\x79\x4b\x17\xd2\xf0\x5a\xed\xb7\xa3\x8f\xa5\
\x01\xb3\x18\x91\x18\xb7\x9c\xa2\xe4\x7e\x56\x4c\xc0\xc2\x7d\xd6\
\xad\xbb\x43\xef\x79\xd5\x42\xee\x1c\x22\xfa\x88\x83\xde\xf5\xfb\
\x3e\x32\xc5\x86\x27\x62\xc4\x48\x52\x3d\x7f\x30\x8b\x79\x05\x21\
\x7d\xff\xd6\xfc\x67\xc6\xd4\xa6\x61\xc7\xdf\xb3\xc7\x4f\x0b\x7a\
\x39\xc5\x0c\x0a\x9a\x1b\x73\xc1\x3b\xe5\x87\x1e\x09\x95\xb0\xd2\
\x94\x6a\x7a\x8f\xf0\xda\x8b\x80\x63\x7d\x3d\xc9\x8d\x02\xf3\x16\
\xc5\x7c\x41\xb3\xc1\x25\x42\xbb\x9d\xa0\xf9\x40\x4a\x4a\x93\x09\
\xfa\xf0\xee\xdd\xf7\xc8\x91\xa0\xc7\x29\xc2\xc8\x15\xe1\x77\x8b\
\x99\x7c\x43\x9d\x01\x93\x70\x24\x1f\xf9\x9a\xfd\x7f\x0e\x87\x74\
\x0b\x6c\x1c\x28\x42\x04\x32\x54\x73\x12\x01\xee\xbc\xb2\x2b\xab\
\xf7\x97\xec\xf5\xb8\xa0\x6b\xda\xdd\x60\x04\xbd\x9c\x22\x14\x74\
\xef\x18\x32\x46\xbe\xee\xfe\xb1\xa2\xcd\xcd\x37\xfd\xd2\x7b\xda\
\x7f\xc9\x5d\x0a\x3a\x47\x0c\xeb\x69\xb7\x65\xb8\x66\x77\xe0\x49\
\xcd\x5c\xd0\x39\xad\x82\x11\x62\x6c\x76\x39\x45\x28\x68\x21\xd7\
\x88\xdd\xd5\x90\x50\x16\x71\x33\xda\x68\xca\xfc\x37\x04\x94\x4f\
\x2d\xe8\x4d\xf3\xf1\x71\x07\xaa\x30\x12\xa1\x24\x7b\x41\x3f\xa1\
\x5f\x15\x41\x77\x9d\xd0\x95\x54\x93\x9c\x08\x5a\x68\x55\xe1\x18\
\x6e\x3e\x72\x46\x9a\x38\xf9\x48\x97\xd9\x0b\xfa\x0e\x2e\xf0\xbc\
\xc1\x46\x36\x71\x6f\x31\x3b\x20\xbd\x51\x5e\x3c\xe3\x56\x01\x17\
\x34\x3b\x41\x13\x34\x6c\x07\xce\x28\x42\x41\x0b\xc7\x90\x8b\xc8\
\x90\x8b\xca\x62\x1c\x03\x66\xe5\x65\x0e\xac\x6b\xb4\xa0\x37\x88\
\xb7\xd8\xcc\x17\x74\x24\x5f\x72\x70\x5e\x15\x1d\xbd\x19\xa8\xd1\
\x06\x41\x2f\xa7\x08\xc7\xcb\x72\xfe\x91\x8b\xc8\x90\x4f\x46\x87\
\x14\x93\x81\xe7\xe6\x30\xd3\xbe\xbd\x4b\xa0\x38\x2c\x5b\xf1\x1e\
\x40\x81\x62\x84\x68\x21\x68\x65\xc8\x23\x7e\xce\x75\x87\xa2\xa3\
\x35\x41\x5b\x3e\x1e\x2f\xe8\x42\x28\x31\x26\xc6\xd6\xc8\x57\x46\
\xc7\x36\x9d\xac\xe2\x01\xdb\x7c\xf3\x14\x7e\xf9\x80\xf4\x0c\x01\
\xc3\x8d\x74\x36\xfa\x2d\x8c\xf0\xe5\xda\x90\x5e\xa4\x8d\x64\x47\
\x73\x0f\x63\xa6\xa0\x2d\x29\x42\x41\x8b\x0a\xcc\x04\x4d\x13\x28\
\xe5\xe9\x20\x51\xa7\xb6\xf3\x81\x40\xd5\xe6\xc3\xd0\x97\x31\xac\
\xa4\x3a\x44\xee\xd0\xb6\xfb\xf6\x4e\xac\xe5\xdc\xdb\xd1\x3b\xfa\
\x15\x23\x68\x97\xaa\x43\xf8\x81\xcc\xea\xa0\x0a\x82\xe5\x7c\xfa\
\x57\xdd\x79\x34\xcc\x94\x09\x47\xe6\x5e\x9f\x12\xea\x49\xb3\xe2\
\x1d\x5b\x1d\x16\x82\xe6\x8e\x2e\xcd\x6b\xb8\xdc\xb7\x11\xfe\xd7\
\xf4\x5e\xf4\x78\x79\xc3\x1e\xef\xc6\xbc\xc3\x0b\x5a\xe4\x52\x33\
\x3b\xba\x73\xad\x78\xd8\xb3\xa0\x8f\xea\xd2\x0c\xfa\xfc\xe0\x50\
\x1e\xa9\x87\x7d\xa1\x97\x5f\x4f\x72\x63\x8f\x5b\xc7\xea\xe0\x7d\
\x79\x4c\x83\xeb\x17\xed\xcb\x76\x37\x61\xb1\x0e\xae\x5a\xdc\xd9\
\xd1\x58\xab\x43\xb8\x99\xcc\x33\x4c\xda\x9e\x8a\x4f\x1a\x61\x8b\
\x0e\x75\x91\x83\x7e\x25\x10\x65\x71\xad\x02\x6a\x94\xf1\x41\x43\
\xff\x95\x18\x49\x5e\xc9\x8e\x3e\x90\xeb\xa2\x46\x6f\xdb\xe7\x74\
\xe3\x3d\x11\x1f\x1d\xda\x77\xfc\x98\xf1\x7a\x34\x87\x85\x0b\xba\
\x77\xb8\x59\xac\xa3\x4d\x2c\x10\x03\x26\x7c\xb9\x9a\x36\x8e\x5e\
\x8b\xa7\x86\x72\xff\x26\xe9\x73\x31\x1e\x66\x42\xf8\xe2\xa2\xe0\
\xd9\x66\xda\x5b\x80\x42\xe0\xb0\x11\xf4\x41\x17\x5e\xb0\x49\xa0\
\xa4\x5b\x98\x80\x2d\x50\xb2\x69\x7e\x68\x7f\xe2\x0d\xd8\xca\x05\
\x9f\x4d\x11\x78\x86\x89\x38\x91\x47\xef\xea\x77\xe4\x1e\x3f\xea\
\xdf\x50\x29\xd6\x1f\xbc\x10\xac\xd6\x94\x3e\x85\xe2\x94\xa6\xba\
\xd7\xfb\x91\xe7\x1e\x0e\x06\xbc\x4b\x50\x23\x36\xb0\x10\x38\xac\
\x04\xed\x37\x2f\x3d\x92\x1e\x2a\xd2\xf7\xb4\x3e\x7d\xd7\x34\xb7\
\xbf\xb4\x82\xbe\xfc\xb1\xb5\x39\x58\x0d\xb3\x09\x2a\xcd\xa7\x08\
\x04\x2d\x1c\x43\x11\x8f\x0e\x5f\xdd\xfc\xa5\x3f\x75\x43\xcf\xba\
\xbd\x48\xff\x0d\xef\x00\xee\x2e\xaf\x81\x96\x6e\x87\x1f\x9b\x1c\
\x94\x4f\xc2\x4d\x97\x40\x79\x0b\x58\x08\x1c\x16\x41\x25\xaf\x95\
\xec\x6d\xd9\xe6\x14\x31\x83\x9d\x19\x64\x9b\x86\x64\x2f\x4a\xf1\
\x6a\xb8\x78\xf4\x52\x8a\x40\xd0\x91\x70\xb8\x43\xe3\x48\x24\x4b\
\x54\x7d\x7b\x23\x25\xa1\xc3\x65\x9c\xe5\xf4\xf3\x0a\xf6\x8c\x2a\
\xa9\x8f\x7c\xa4\xc9\xd3\xa2\xea\x52\x21\x88\x0b\xec\x92\xef\x1e\
\xbd\xf9\x59\xff\xf1\x60\x75\x90\x37\x5f\xec\xf8\x57\x8b\xc0\xbf\
\x05\x45\x10\xf8\xdf\xf4\x06\x84\xd1\x4c\xfe\xce\x78\x03\xb8\x65\
\x66\x20\x11\x2e\x77\xde\x10\xba\x11\x53\x61\xbb\x28\xc1\x6d\xa9\
\x10\x38\x2c\x02\xff\x83\xd8\xc8\x0e\x2f\x66\xcc\x70\x39\x45\x20\
\x68\x30\x61\xa0\xda\x19\x4e\xfd\xb3\x99\xc2\xb6\xff\x2c\x6d\x15\
\x41\x46\xd2\x0d\xba\xa4\xdf\x90\xd7\x00\x79\xe4\x53\x2e\x04\x0e\
\x9b\x31\xc3\x21\x58\x08\x7a\x39\x45\x30\x38\x5b\xf7\xf7\x32\x26\
\x37\xbf\x34\xfc\xe6\xc1\x21\x15\xaf\x80\x8f\x0e\x47\x06\xe8\xbb\
\x65\xb5\x44\xe7\x2b\x8f\xe5\xcb\x85\x20\x18\x39\x5c\xc2\x45\x11\
\xf4\xfc\x74\x03\x1b\x8a\xdc\x5d\x7f\xb3\x83\xf3\x6e\x25\x89\xf1\
\x67\x99\x35\x10\xdc\x3b\x42\x2a\x9f\x60\xa4\x9a\x74\x89\x09\x22\
\xfc\x2d\x67\xa7\xc8\x85\xc0\x91\x39\x4c\xf9\x57\x04\x8d\xc9\x54\
\x5a\x4e\xb1\xe1\x63\x80\x4f\x3a\x0b\x99\xc1\x94\x45\x1e\x9b\xcb\
\xeb\x5f\xf0\x51\x3b\xf0\x65\x24\x13\x9d\xa6\xb7\xf7\x09\xa9\x52\
\x6c\x46\x2e\x04\xf1\xab\xc3\xd5\x9f\x14\x41\x87\xb3\x53\xc2\x2c\
\x28\xf2\x94\xb0\xa8\xb9\x0e\xc1\x79\xa6\x5d\x4c\xcc\x13\x3c\xa5\
\x65\x6d\xe0\xb2\xc3\x63\x9b\x28\xd1\xe0\x5f\x3f\x45\x12\x26\x53\
\xa9\x85\x60\x38\x65\x29\x74\x41\xcf\x4b\x72\xb4\xa1\xc8\x93\x1c\
\x37\xcd\x16\x26\x25\x1a\xa6\xfa\x0c\xec\x42\xda\x27\x26\x79\xca\
\x6a\xbe\xe9\x70\x43\xa2\x81\xef\x5c\xf4\x26\x30\x27\x56\x2d\x04\
\x06\xbb\x24\xc7\xa1\xc7\x4b\x12\x99\x9d\xb6\x6b\x43\x91\x37\x9b\
\xe0\xa5\x57\xc3\xeb\xf4\x5d\xea\xf4\xdd\x38\x3a\x48\xbb\xce\x48\
\xbe\xd2\x95\x37\x08\x1a\x8f\xed\x13\x4e\x60\x96\xb7\x5a\x08\x14\
\xc4\x5d\x7e\x74\xf8\x7d\xd5\xfc\xf4\x18\xfe\x32\x37\x11\xdd\x86\
\x62\xc2\x7a\x5c\xf2\xea\xfe\x2d\x94\x6d\xb1\xb8\x8f\x1f\xdb\xda\
\xa7\xec\x92\x70\xfa\x28\x0b\xcc\xf1\x51\x0b\x81\x22\x74\xb7\x03\
\xde\x46\x0d\xd8\xcf\x9e\x5a\x61\x43\xd1\xe7\x36\x64\xd8\x7b\x4a\
\x2d\xe2\xf9\x41\x49\x05\x23\x33\x42\x59\xfe\x48\x6f\xc1\xc3\xca\
\xa1\x16\x02\xe7\xb9\xe2\x2a\x0c\x15\x6a\xb2\xd0\x5e\x7c\xb6\xa1\
\xe8\x0f\xb8\x9f\xf3\x77\x40\x52\x31\x32\x97\x9c\xae\xd6\x12\xf6\
\x0d\x16\x4c\x1f\xd0\x0a\x81\x62\xd5\xad\xcc\xb3\x79\xd3\xdf\xac\
\x28\x0e\x4e\xec\x97\x2c\x35\x1b\x8c\x2c\x1e\x4a\xfb\x6b\xbf\x77\
\x13\x82\x9e\x84\x56\x08\x14\xd3\x33\x39\x16\xa0\x46\x4d\xe8\x5c\
\x46\x71\x70\x21\x32\xbb\xfd\x9e\x01\xb1\x11\xdd\x43\xf3\x72\x62\
\x48\x5d\xb4\x3a\xad\x10\x28\xd4\x9e\xc7\x29\x0a\xd4\x14\xe5\x65\
\x14\x37\x43\x13\xfb\x27\x57\x59\x9d\xc0\xd8\x2a\x14\x7c\x80\x57\
\x3c\x19\xcc\x68\xd7\x0a\x81\xa2\x70\xe8\x81\x6b\xc8\xe7\x4d\xba\
\xb7\xa2\x38\xfc\x8c\xd1\xf5\x3a\x26\x41\x9e\x8e\x1c\x8c\xba\x96\
\x5a\x03\xc5\x58\x01\x75\xad\x14\x02\xc5\xf4\xec\xb3\x05\x18\xac\
\x6d\x10\x0b\x29\x16\x83\xea\x69\xd9\x0a\x34\x97\xdb\x91\x83\x34\
\x1e\x5b\x01\xb3\x33\x15\x1d\x8b\x5e\x08\x1d\xca\x35\x17\xba\x8e\
\x71\x0b\xa3\x2c\xa2\xa8\x2e\x8c\x02\xb0\x68\x4d\xa5\xfb\x63\x07\
\x3b\x41\x13\xd8\x49\xe7\xa2\xd9\xe9\x85\xd0\x82\xb8\x33\xa3\x0d\
\x40\x2d\xf5\xb3\x90\x62\xe6\xd2\xb5\xc5\x22\x6a\x2b\x47\x00\x75\
\x56\xbf\xf2\x93\x5e\x08\x2d\x92\x35\xcd\x68\xec\xe2\x55\x7b\xf6\
\xc9\x8a\x22\x6a\x69\x3d\xd7\x88\x5b\x41\xe7\x70\xc5\xe5\x7e\xe5\
\x27\xbd\x10\xe8\x15\xd7\x2b\xf2\x21\xf3\x96\x63\xb3\xa2\x88\x89\
\x10\x3a\x87\xdf\x56\x09\x69\xd5\x49\xd2\xa7\x94\x68\x85\xd0\x22\
\x5f\x77\x65\xcf\x12\xb5\xc0\xe0\x12\x8a\xa7\x59\x60\x30\x3c\xf4\
\x26\xa1\xdc\x09\x8b\x6c\x34\xbd\x10\x5a\x68\x3d\x8f\x5b\x64\x98\
\x76\xbd\x88\xe2\x89\x96\xcc\xac\xb6\xde\xa5\x5c\xc0\x22\x13\xdb\
\x50\x08\x9e\xbc\x2e\xe8\x0a\x40\xad\xee\xbc\x88\xe2\x49\x16\x81\
\x3d\x18\x7f\x1f\xc9\xd5\x4e\xfa\xa5\x5f\x33\x58\x2f\x04\xd8\x6c\
\xd7\xc1\xb0\x91\x0b\xb0\x88\xe2\x49\x96\x35\x3e\xb0\x7a\xf0\x42\
\xf1\xa2\xfa\x01\x38\xbd\x10\x8c\x93\x96\x9c\x02\x25\x86\x45\x14\
\x4f\xb2\x50\xb7\x11\x62\x46\xa3\x5e\x08\xc6\xb9\x34\x4e\x81\x6b\
\xd8\x4b\x28\x9e\x64\xe9\x79\x23\xea\x31\x15\x97\xae\xcc\x12\xb9\
\x99\xc2\x02\x8a\xa7\xd9\x4c\xc1\x84\x68\xac\x46\xe8\x13\x78\x1d\
\x03\xb9\x3d\x88\x3d\xc5\xd3\x6c\x0f\x62\x82\x96\x8e\x02\x30\xb1\
\xac\xbb\x03\xa0\x36\xbc\x59\x40\xf1\x44\x1b\xde\x18\x91\x0d\x17\
\xf9\x68\x55\x72\x02\xdc\xee\x6c\xf6\x14\xfd\x33\xda\xde\x77\x68\
\xfe\x91\xd7\xaf\x6e\xb1\xe6\xc3\x71\x9b\x92\xd9\x52\x3c\xd9\xa6\
\x64\x06\x98\xd6\x8c\xa4\xd0\xe2\x37\xee\x81\xdd\x66\x6f\x3f\x70\
\x64\x8a\x22\x76\xd7\xf1\xa3\xc0\xb8\x41\x71\x8b\xa1\x49\x8d\x0e\
\x81\xdd\x38\xd2\x96\x62\x7d\x36\x46\xc7\x01\x97\x43\x85\x3e\x6a\
\x56\x39\x02\x6e\x5b\x70\x6b\x8a\x67\xb5\x15\x6a\x38\x34\x7a\x59\
\x1e\xa1\xc3\x4e\x51\x92\xb0\xa5\x78\x5e\x9b\xfb\xc2\xfc\x6c\x08\
\xd3\x16\x11\xce\x91\xe3\xe2\xf2\x96\x14\xfd\xb3\xda\xae\xfa\xe0\
\x08\x1b\xe9\xa6\xc7\x88\x98\xa3\x37\x60\xb7\xa2\x78\x66\x1b\xb0\
\xb7\xeb\x5f\xeb\x18\x6a\xae\x6e\x91\x60\x06\xc2\x3d\x5b\x8a\xc5\
\x39\xf5\x85\x1e\x5b\x6a\x5d\x85\xba\xb9\xf9\x4a\x28\x71\x5a\xd4\
\x8e\x62\x75\xba\xfd\x9b\xcc\xc8\xf4\x97\x25\xe5\xcc\x9d\xdc\x2c\
\x51\x23\xc7\xf4\x6c\x28\x92\x33\xf2\x0b\x29\x0c\x0b\xf4\x5e\xde\
\x1e\xc7\xd2\x8f\x90\xe9\x00\x36\x14\x83\xb3\x09\x46\x8f\x60\x6c\
\xfa\xad\x4b\x24\xf6\x4e\xf2\x24\x45\xa4\x49\xf3\x07\x41\xb9\x5e\
\xf3\x4e\xcf\x4d\x45\x9f\x14\xf5\x6a\xce\xdb\xf9\xa9\xe8\x93\x22\
\x5a\xad\x7d\xfb\x9f\x82\x8a\x3e\x1e\x16\x28\xe9\x09\xe4\x67\x66\
\x45\x9f\x1a\xd5\x5a\xf1\x88\xec\x7c\x46\x57\xce\x02\xf9\x4a\xd9\
\x71\xe1\x79\x05\x3a\x4e\x0f\x7f\xa5\x80\x44\xdc\xa8\x69\x7a\x7f\
\x74\xac\x64\xe0\xd5\x77\xc6\x9d\x82\x62\x15\x03\x8f\x94\x77\x9a\
\x43\x41\xb0\xca\xc0\x5e\x70\xa7\x39\x34\x54\x6b\xe8\x8e\x3b\xcd\
\xa1\xe3\x72\x05\xdd\x41\xce\x28\xd1\xe0\x6c\x10\xae\x60\x77\xc4\
\x77\xde\x8a\x01\x75\xe3\x3c\xf8\x9d\xba\xbf\xe5\xef\x00\x81\xf3\
\xea\x97\x9c\xd9\x68\xe1\xb9\xa0\x72\x6d\x8a\xe5\x77\xee\xb7\x11\
\xae\xbb\x43\x52\xde\x05\xee\x8c\x70\x2d\x98\xf8\x9c\x32\x94\xce\
\x0a\x97\x6e\xad\xde\xea\xce\xb6\x1b\x00\x71\xb9\xbe\x9e\xb6\x46\
\xef\x1d\x7a\x38\x5d\x53\xc8\xe5\xf2\x93\xbf\x3b\x64\x0e\x2b\x61\
\xf5\x7c\xfa\x9c\x3b\xac\x81\xff\x03\x9f\x04\x17\x76\x2b\x85\xf8\
\x1c\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\x30\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x22\x00\x00\x00\x32\x04\x03\x00\x00\x00\xcb\xbf\x14\x6a\
\x00\x00\x00\x1b\x50\x4c\x54\x45\xff\xff\xff\x9f\x9f\x9f\x5f\x5f\
\x5f\xbf\xbf\xbf\x1f\x1f\x1f\x3f\x3f\x3f\x7f\x7f\x7f\xdf\xdf\xdf\
\x00\x00\x00\xc4\x02\x4f\xa8\x00\x00\x00\x01\x74\x52\x4e\x53\x00\
\x40\xe6\xd8\x66\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\
\x00\x00\x0e\xc4\x01\x95\x2b\x0e\x1b\x00\x00\x00\xae\x49\x44\x41\
\x54\x28\x91\x63\x60\x18\x59\xa0\x2c\xa4\x03\x0c\x9c\x61\x02\x45\
\x1d\x50\xd0\x08\x15\x60\xec\xe8\x68\xf1\xe8\xe8\x70\x71\x71\x55\
\x80\x8a\x48\x74\x28\x33\xb0\x47\x74\x18\xc0\x0d\x61\x04\x2b\x66\
\xed\x08\x80\x8b\x54\x74\x08\x00\x49\xe6\x8e\x76\xb8\x48\x44\x13\
\x88\x64\x87\x1b\x0b\xd4\x04\x91\xec\x68\x83\x89\xb0\x74\x24\x40\
\xd4\xc0\x45\x2c\x20\x96\x30\x23\x74\x69\xb4\x40\x35\x37\xc3\x0d\
\x6e\x05\x53\x4c\x1d\x0d\x30\x11\x8f\x26\xa8\x71\x30\x07\xc3\xec\
\xb0\x40\xb8\xd9\x03\x22\xa2\xd1\x8a\x70\x60\x1b\xc4\x2a\x77\xb8\
\x48\x46\x13\xc4\x18\x84\x47\x59\x3a\xc0\x9a\xe0\x76\x83\xd4\x03\
\xdd\xcc\xd4\x52\x80\x10\x61\x30\x69\x2a\x28\xf4\x48\x40\x12\x60\
\x60\x4f\xf5\x70\x15\x67\x18\x05\x44\x02\x00\x18\x8e\x2d\x7e\xcf\
\xa3\x84\xc1\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\xf5\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x3d\x00\x00\x00\x58\x04\x03\x00\x00\x00\xb4\x75\xe2\x33\
\x00\x00\x00\x1b\x50\x4c\x54\x45\xff\xff\xff\xdf\xdf\xdf\x1f\x1f\
\x1f\x5f\x5f\x5f\x3f\x3f\x3f\x9f\x9f\x9f\x7f\x7f\x7f\xbf\xbf\xbf\
\x00\x00\x00\x38\xd2\xda\x47\x00\x00\x00\x01\x74\x52\x4e\x53\x00\
\x40\xe6\xd8\x66\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\
\x00\x00\x0e\xc4\x01\x95\x2b\x0e\x1b\x00\x00\x01\x73\x49\x44\x41\
\x54\x48\x89\xed\x94\xbd\x4f\xc3\x30\x10\xc5\xf3\x1d\xc6\x0a\x81\
\x94\x11\x21\x24\x3c\xd2\xcd\x63\xc5\x94\xb1\x88\xa5\x63\x19\x2a\
\x65\x04\x09\x21\x8f\x25\x94\xe4\xfd\xd9\xd8\x31\x4e\xce\xb1\x95\
\x0c\x95\x2a\x81\xfa\xa6\xb3\x9e\xef\x92\xdf\xd9\xbe\x20\x38\xeb\
\xac\x7f\xa0\x7c\x37\xed\x97\x78\xe8\xe3\x8f\x57\xc7\xce\x80\xbd\
\x89\x63\x34\x8e\x1f\x01\x37\x26\x16\xc0\x62\xec\x17\x18\xea\x73\
\xc0\xc9\xdf\x00\x6b\x13\x33\x1c\x1c\x9f\x0f\x35\x43\x78\xbe\xcf\
\x86\x9a\x29\xf0\x3d\xb6\x65\x4e\x5f\x33\x06\x3e\xc7\x7e\x4a\x6a\
\x52\x14\x92\xd3\x9a\x98\xa2\x90\x9c\xbd\x89\x29\x0a\xc9\x59\x99\
\x98\x7b\xda\x23\x73\xb6\x26\x66\xa8\xc7\xb6\xca\x31\x67\x22\x51\
\xbe\x1c\xff\x84\xf8\x62\x06\xbf\x9a\xc1\x2f\xff\x04\xfe\x25\x65\
\x88\x49\x4d\xa1\x51\x38\xf5\x23\xd2\x9e\x4a\xa3\x80\x3e\xa1\x82\
\x7c\xb3\xec\x50\x72\xeb\x05\x6c\x86\xf6\xc9\x5f\x55\x56\x66\xbd\
\x00\x3e\xf8\x09\xba\xf6\x24\x16\x23\x1b\xea\x57\x1a\xb5\xa0\x8c\
\xaa\xe6\xef\xff\xe5\xd0\xa8\xd5\x8a\xf8\x29\x7a\x7e\xd1\xf0\x2e\
\x64\x23\x7c\xd4\x0b\xbd\x73\x2b\x54\xfb\x33\xeb\x88\x24\xbe\x6e\
\x4a\xc8\x1b\xb9\x59\x7a\xa2\xa5\xbe\xc4\x7f\x44\xfd\x14\x5c\xf3\
\x7a\x1d\x84\x0c\xcb\xd4\xbe\x41\x12\xff\x8d\xab\x1a\x58\xca\xd5\
\x15\xc6\x27\xac\xe6\x45\xf8\x7c\x8b\xbb\x97\x6e\xf9\xce\x0e\xf6\
\x7c\xf2\xcd\x0b\x1b\xdf\x9d\x17\x36\x7e\x3b\xe5\x27\xe4\x72\xfb\
\x14\x91\xcb\xed\x93\x20\x97\xdb\xa7\x0a\x70\xe7\x2d\x51\xe9\x19\
\x97\x54\x98\xc6\xcf\x67\xf0\xb3\x23\xf1\x2f\x8e\xc7\x77\xe6\x81\
\xed\xbb\xf3\x80\xaa\xc0\xfd\xa4\x1f\xee\x26\xcb\x9f\x35\xa9\x1f\
\x52\x06\xa1\xf1\x36\xca\x6a\x62\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
\x00\x00\x0f\x5c\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x02\xd7\x00\x00\x00\x83\x04\x03\x00\x00\x00\xb4\x20\x2a\xa6\
\x00\x00\x00\x1b\x50\x4c\x54\x45\xff\xff\xff\x7f\x7f\x7f\x5f\x5f\
\x5f\xdf\xdf\xdf\xbf\xbf\xbf\x1f\x1f\x1f\x3f\x3f\x3f\x9f\x9f\x9f\
\x00\x00\x00\xc8\x53\xa5\x5e\x00\x00\x00\x01\x74\x52\x4e\x53\x00\
\x40\xe6\xd8\x66\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\
\x00\x00\x0e\xc4\x01\x95\x2b\x0e\x1b\x00\x00\x0e\xda\x49\x44\x41\
\x54\x78\x9c\xed\x5d\xcb\x7b\xdb\x36\x12\xe7\x9b\x3a\x76\xbb\x9b\
\x44\x47\xb5\x49\x5b\x1d\x9d\x38\xca\xea\xc8\xb6\x76\xc2\x63\xdc\
\x5d\x39\x3c\x6a\x93\xf8\x8b\x8e\x69\x1b\x67\x79\x94\xdb\xc8\xc6\
\x9f\xbd\xc4\x93\xa0\x30\xa4\x00\x01\x7a\xac\xcb\xdf\xf7\x45\x8e\
\x28\x08\x8f\x1f\x87\x33\x83\xc1\x00\xf2\xbc\x1e\x3d\x7a\xf4\xe8\
\xd1\xa3\x47\x8f\x1e\x3d\x7a\xf4\xe8\xd1\xa3\x47\x8f\x1e\x3d\x7a\
\xf4\xe8\xd1\xa3\x47\x8f\x1e\x3d\xee\x0b\x9e\x5e\xcd\x7e\x38\x74\
\x1f\x74\x10\x5e\xec\xbb\xc5\xc9\x57\xae\x6b\x8c\x2e\x4e\xa2\xc5\
\xbf\x5d\xd7\xba\x03\x5c\xff\xb1\xe7\x06\xd3\x62\xe4\xba\xca\xf3\
\x13\xcf\x0b\xd0\x47\xd7\xd5\xba\xc6\xf3\x07\x68\xbf\x64\x87\x3f\
\x16\x68\xe4\xba\xce\x2f\xf8\xb5\xf8\xd3\x71\xb5\xae\x91\xaf\x3e\
\xec\x97\xec\x08\x5d\x96\xce\xc9\xf6\xe7\xf8\x75\xb8\xef\x47\xd4\
\x14\xa1\xe7\xed\x59\xb2\x4f\xbc\x81\x73\xb2\xe3\x15\xd6\x20\x53\
\xe4\xb8\xda\x1d\x60\xcf\x64\x7b\x3b\x20\x7b\x80\x5e\x93\xd7\x13\
\xc7\xf5\xba\xc7\x3d\x20\x3b\xee\xc9\x6e\x83\x7b\xb2\xd3\x59\xaf\
\x46\x5a\xe0\x9e\x6c\x8a\xf2\x76\x27\xd5\x3a\xc5\xbd\x21\x7b\x71\
\xb7\x93\x6a\x9d\xe2\xbe\x90\x1d\xa1\xf9\x2e\xaa\x75\x8b\xfb\x42\
\x76\xb2\x3a\x7e\xfb\x78\x6f\xc8\x1e\x7e\xd9\x45\xad\x8e\x71\x4f\
\xc8\x8e\x56\xd9\x0e\x6a\x75\x8d\x7b\x42\xf6\xe4\x5f\x3b\xa8\xd4\
\x39\xee\x07\xd9\xe1\xec\xff\x40\x63\xdf\x17\xb2\x27\x73\xf7\x75\
\xee\x00\x8e\xc9\x1e\xa0\x0a\x1b\x4a\x8c\x9c\xb6\x48\xf0\xb6\xfa\
\x97\x56\xc2\xed\xe3\xe6\x8f\x77\xde\x7e\x2f\xc8\x4e\x70\x95\xb1\
\xa7\x49\xf6\xb5\x75\x7b\x8f\xb6\xfc\x9e\x6b\xb2\xef\xce\xce\xce\
\xba\x4b\x74\x93\x9d\xbe\xd6\x6e\xcb\x17\x6b\x33\x58\xb0\xbd\x49\
\xf5\x2f\xac\x5a\x3f\x83\xc9\xfe\x3c\x7b\xcf\x5a\xb0\x77\x12\xf3\
\x2d\x57\x85\x5c\x93\xbd\x71\xbd\x64\x03\xd9\x65\xa6\xdd\x56\xc4\
\x27\xe8\x01\x59\xd5\x7c\xc8\xaf\x83\x64\xbf\xb8\x0b\x17\x94\xa2\
\xd2\x7e\xfd\x2c\xdd\x32\x34\x70\x64\x64\x1b\x0d\x83\xd3\x36\x9e\
\x55\xa8\x07\x02\x92\xbd\x18\x45\x05\x79\x68\xc2\x1b\x83\x16\xda\
\x30\xcc\xb6\xfa\xda\x91\x91\x3d\x35\x59\x7a\xf7\xa9\x42\xa0\x8a\
\x1a\x89\xdb\x04\x91\x9d\xa2\x8f\x31\x5a\xe2\xff\xc5\xfa\x7a\xaa\
\x1d\xc9\x76\x6e\xfd\x91\x91\x3d\x33\x6a\xed\xea\x84\xd6\x48\x20\
\x54\x31\x44\x76\x8c\x4e\x06\x34\x54\x35\xce\xe8\x95\x6b\x9b\x69\
\x50\xb8\x5d\x3c\xf7\xb8\xc8\x0e\x18\x63\xa9\xde\x24\x65\x3a\x87\
\xae\x42\x64\x4f\x57\x9e\x7f\x8b\xaf\x87\x7c\xbc\xc8\xca\x4e\x0e\
\xb7\x4a\x7e\x39\x2e\xb2\xa7\x23\x5e\x28\xd3\x69\xcd\x07\xf5\x2f\
\x44\xf6\x90\x8b\x62\xc2\x3a\x18\x51\xa5\xb2\x2d\x62\xf3\xc4\x89\
\xf0\xf4\x27\x84\x7e\x3f\x75\x38\x09\xd8\x44\xf6\xe9\xf3\x31\xba\
\x3b\x3d\x6d\xfb\x78\xc1\xba\x92\x6b\xae\x71\x15\x50\xd7\x21\xb2\
\x17\x5c\xa6\xf2\x11\xfd\xeb\xdb\xc5\xbf\x7d\x73\x19\x65\xda\x6e\
\x69\xd3\xec\x5a\x8d\x1b\xc8\xa6\x0d\xb6\x51\x19\xf1\x21\x0c\x35\
\x75\xe2\x70\x04\xb5\x01\x90\x5d\x70\xf3\xc9\x6f\x67\x82\xec\xb2\
\xe0\xcc\xa7\xa9\x21\xfd\xb3\x47\xc9\x3e\x91\xdb\x55\xc0\x9f\x71\
\x6f\xa1\xe9\x9f\x81\xcd\x01\x3c\x84\x5c\x43\x0b\xcb\x36\xd5\x53\
\x54\xad\x18\x8f\xac\xbe\xee\x04\x9b\x75\x76\x17\xb8\xc1\x0b\x75\
\xad\x57\x00\x3d\xcd\x00\xd9\x11\x62\xfd\xf2\xb9\x84\x97\x96\x8b\
\xf1\xf9\xd2\xee\xfb\x2e\x60\x47\xf6\x90\xcd\x52\x52\x5d\xcd\x16\
\x41\x94\x01\x64\xfb\xbc\x42\x61\xd8\xc6\x46\xce\x5b\x94\xad\x5f\
\x89\x8f\x60\x7d\xd9\x8e\x6c\x6e\xef\x82\x16\xeb\xa5\x46\x5d\x0a\
\x60\xee\x0d\x90\x2d\x2a\xcc\xf9\x94\xa6\x80\x14\x55\xf8\xb0\x20\
\x26\x45\x59\xf6\xa1\x8b\x6e\xe1\xdb\xba\x66\xf0\x99\x72\x83\xcf\
\xc5\xea\x89\x57\xeb\xda\xaa\xcd\x10\x56\xf4\x56\x64\x47\x2b\xf6\
\x9f\x18\xb6\x5e\x3e\xd5\xb3\x2f\x1e\xd7\x97\x20\xd5\x09\x90\x1d\
\x73\x7f\x93\xfa\xc7\x0b\x6a\xa6\xe7\x6b\xa5\xc2\x31\xb3\xdf\x4a\
\xfa\x71\x41\xc8\x4e\xa4\xeb\xe9\xce\x72\x82\xce\x6f\x3f\xfd\x84\
\xbb\x3b\x60\xbe\x0b\xf9\xf3\xf5\xf7\xb3\xa5\xf7\x1b\xbd\x0b\x02\
\x56\x64\x53\xb7\x39\xa1\xe3\x05\x1e\xf3\x98\x92\x2d\x67\x88\x94\
\x4b\xb5\x98\x42\x36\xe5\x96\xc8\x34\x0b\x46\xc1\x9e\x5f\x89\xd0\
\xac\x12\xeb\xd9\xec\xed\xda\x07\xcc\x29\x97\x33\xdc\x42\x4b\x03\
\xdb\x8a\x14\x0b\xda\xa4\x1a\x7d\xf4\x6a\x41\xee\xfa\x35\x5a\xfd\
\x90\x61\xc2\xcf\x2f\xce\x3e\xa3\x77\x52\x51\x2b\xb2\x13\xce\x62\
\x8b\xe7\xc7\xbc\x6f\x24\xb5\x31\x05\x2c\x29\x20\xd9\x39\xbf\x86\
\x3a\x3c\xbf\x14\xfd\x91\x79\x2f\x20\xdb\xcc\x6e\x4d\xc3\xfb\xc7\
\x0a\x2c\x41\x4d\xcc\xc1\x6e\x9b\x21\xbf\x21\x5d\xc1\x42\x11\x10\
\x89\x0b\x10\x7e\x92\x9f\xa1\xbb\x37\x1e\x96\x87\xac\x2e\x6a\x45\
\xf6\x60\x83\xe7\x47\xef\x41\x63\xee\x07\xd9\x29\x80\xec\x21\x53\
\x50\x21\x57\x54\x90\xe7\x57\x92\x11\x2e\x80\x1b\xcd\x6e\x4d\x43\
\x06\x16\xa3\x9d\x90\x1d\xb2\x27\x90\xbe\xe2\x5e\x26\x37\xf4\x3a\
\xe9\x71\x2a\xcb\x82\x15\xd9\xd3\xd7\xbc\x41\xd8\xf3\xa3\xf7\xa0\
\xa1\x01\x12\xc0\x4e\x01\x64\x73\x06\x53\x4e\x17\xe0\xf9\x85\x34\
\x6e\x98\x03\x5f\x67\xd1\x83\x86\x0c\x8c\xe7\x4a\x31\x17\x08\xd8\
\x7d\x25\x7d\x99\xe0\xa1\x96\x23\xf2\x01\xe3\x64\x2c\x25\x23\x59\
\x91\x5d\xce\xe9\xdf\x36\xcf\x0f\x31\x33\x25\x69\x80\x00\x90\x43\
\x80\x2d\xee\x7b\x88\x59\x36\xe0\xf9\x25\xd4\x88\x4e\x81\xcd\x39\
\xf4\x06\x84\xb2\xfe\xf2\x86\x2e\x42\xb5\x2a\xd8\x23\x97\x93\x8e\
\x46\xd5\xfd\xe7\x5e\x03\xe3\x64\x2a\x85\x96\xac\xc8\xe6\x93\xef\
\x16\xcf\x8f\xcd\x4c\xa6\x32\x99\xfe\x4a\x2d\xa7\x92\x1d\xf2\x60\
\x77\xc0\x65\x13\xf0\xfc\x4a\xc4\x02\xb6\xaa\x36\xa7\xfa\x23\x45\
\x32\xbf\x90\x69\x76\x80\x21\xed\xc5\x94\x8e\x6b\x8c\x4e\x26\x8c\
\x51\x46\x76\x20\xdd\x71\x2b\xb2\xc7\x6c\x98\xad\x9e\xdf\x1c\xff\
\x29\x65\x7e\x21\x0f\x4c\x25\x5b\x4c\x20\x93\x1b\x7e\x41\x55\x54\
\x8c\xff\x29\xd0\x38\xd5\x1f\x41\x23\x5e\x99\xef\x66\xc3\xd4\x82\
\x0e\x68\x40\x07\x99\xa0\xc7\x7c\x26\xc1\xc8\x4e\x51\x2d\x26\x56\
\x64\x2f\xd8\x30\x73\xd8\xad\x62\xfa\x63\x2c\xab\xe9\x08\xd0\x19\
\xea\xa5\x7a\x02\x79\xb7\x76\x41\xae\xe9\x0b\x6b\x5c\x55\x23\xf4\
\xa3\xb8\xf1\xc9\x74\x37\x64\x17\xdc\xd8\x92\x77\x21\x5a\x89\xf8\
\xfb\x92\x5d\xa9\x15\xa0\x15\xd9\xfc\x26\x0e\x85\xb8\xce\x08\xd8\
\xd3\xcb\xcc\x54\x21\x3b\x20\xa1\x16\xd9\x42\x2f\xc5\x4c\xa0\x13\
\x55\x51\x05\x4c\x49\x94\xea\x9d\x86\xf4\xd7\xae\xc8\x46\xe8\x8c\
\xe0\x67\x8f\xf5\x86\x37\x23\xc8\xae\x1f\x6c\x42\x76\x4c\x39\x32\
\x6e\xa8\xc8\xe8\xdf\xda\xfb\x6a\x84\x80\x21\x33\xa5\x47\xb6\x98\
\x40\x0e\x18\xd9\x80\x62\xe6\xfc\x8f\x55\xc5\x04\xe9\x2f\x51\x95\
\x63\xac\x85\x9f\x03\xb4\x26\xd9\x72\x01\x42\xf6\xa0\x33\x68\xdd\
\xd1\x50\xc6\xfe\x0a\xb5\xd4\x20\x1b\x32\x53\x1e\xa0\x71\x54\xb2\
\x85\x87\xc1\x19\x02\x14\x15\xbf\x21\x85\xea\x4c\x32\x3f\xa5\xa1\
\xbf\x76\x45\x76\xd1\xec\xfd\x44\x84\x0e\x04\xd9\x6b\x92\x6d\x49\
\x36\x64\xbd\x30\xe8\x6a\x4b\xd3\x4c\xe9\x91\x2d\x7c\x67\xce\xd0\
\x50\xed\x1c\x13\x76\xa8\xf1\x01\x1d\x70\xa5\xbf\xc2\x79\x7d\xf1\
\x0b\xee\x4b\x13\xb8\x67\xc8\x02\x4b\x6c\x20\x1b\xbd\xbf\xca\x51\
\xd3\x1b\xe9\xd2\xd9\x26\xed\x70\xe2\x00\xeb\x45\xeb\x22\xca\x1a\
\x9b\xa9\x17\xd2\xc5\x4c\x2d\xa7\x90\x2d\xb8\xe5\x64\x17\xb8\xc7\
\x49\xa3\x1c\x73\x81\x20\xb7\xb3\xd6\x5f\x52\xbe\x16\x1e\xe9\x0e\
\x66\x90\xc3\xc6\x80\xfc\x3f\x7d\xd4\xf4\xb3\x53\x69\xd1\xd8\xca\
\x40\xb2\x76\x62\xd2\xe9\x77\xeb\x9f\x4a\x66\x4a\xca\xd6\xd3\xd2\
\xd9\x62\x0a\xc3\xc8\x0e\x89\xa2\x7a\xd0\x28\xc3\x7c\x9d\x1c\xd8\
\x2e\x42\xef\x15\xd6\x5f\x24\xa9\x90\x55\xb5\x1b\x03\x99\x33\x6b\
\x12\xd0\x77\x5f\x79\x7c\xaf\xbf\xf0\xb3\xeb\x27\xcf\x05\xd9\xc4\
\xd3\xf5\x95\x09\x1a\x13\x78\x6c\xa6\xa4\xa8\x9c\x16\xd9\x42\x0f\
\x33\x6f\x84\x46\x18\xde\xb0\x7a\xc9\x36\xca\xaa\xfa\x11\x29\x2a\
\x69\x91\x07\xac\x04\xf5\x7d\xb1\x0c\x0c\x32\xf1\xe1\x8e\xbc\x11\
\x6e\xa7\x89\x3c\x85\x97\x98\x7d\xea\x7d\x31\xb2\x63\xd4\x7c\xb8\
\x4c\xf0\x6c\x76\x59\x3b\xaf\xcc\x1b\x21\xce\x57\xac\x78\xbb\x09\
\x6d\x0e\x6b\x80\x9a\x10\x3d\x6f\x44\x64\x4b\x31\x3f\xdb\xc7\x0f\
\x09\x3d\x5f\x00\x8f\x86\x56\x57\xe0\xea\x03\xe9\x29\x8e\x10\x7b\
\x43\xf3\x62\xaf\x2a\x19\x90\x1e\xa9\xe9\xb2\x75\x50\x35\x86\x66\
\x3a\xdb\xab\x6d\xc6\x7f\xc8\x90\xff\x24\xf9\x5e\x64\x3c\xec\xba\
\xac\x66\x0c\xc9\xf6\xdf\x78\xcf\xea\x65\x11\xe6\x67\x93\x19\xeb\
\x43\xa5\xec\x80\xa8\x91\xa4\x9a\xe8\x49\x9a\x33\xd4\x99\x41\xd6\
\x46\x8f\xcd\x20\x49\xf0\xe5\x9c\x15\x1b\x32\x2d\x48\x62\x9b\x63\
\x29\x4f\xca\xa7\x16\x0f\x93\x5e\x3d\x6a\xc1\xbb\xea\x55\x3a\x09\
\x47\x6b\x11\x72\x8b\x13\x67\x86\x44\x8f\xa5\xa4\x1f\x64\x69\xa4\
\xa0\xb2\x8e\x58\xc8\x53\x8a\x33\x18\x92\xfd\xa0\xaa\xb8\x14\x72\
\xca\x66\x90\x38\x22\x17\xa9\xc9\x61\x39\x69\xe7\xfb\xe9\x8d\x17\
\xd7\x4e\x32\xb4\x08\xa9\x90\x5d\x07\xb6\x58\x6c\x04\xdb\xba\x90\
\x13\x91\x33\xfb\x1e\x15\x8f\xc3\x6b\x39\x64\x12\x31\x8b\xe7\xe3\
\x3c\xc2\xf4\x17\x6f\xf8\x3a\x90\x74\x5b\xae\xe8\x39\x15\xd1\x72\
\x43\x01\x00\x01\x59\x1f\xc8\xb3\xea\xe5\x05\xd1\xdf\x25\x15\x06\
\x74\x3b\xf2\xb0\x2b\x38\xaa\x8b\x9a\x91\x4d\x8e\x65\x49\x84\xe3\
\xc8\x62\x23\xd8\x04\x4e\x32\xa5\xf0\x10\xbb\x9c\xd7\x23\x7f\xe5\
\x7d\x57\x5f\x8c\x74\x02\x51\xb5\x87\xc1\xa3\x7e\xe5\x4d\xf8\x80\
\xb7\xe0\xaf\x98\xa5\x7c\x7a\xb5\xfa\xd0\xf8\xea\xf0\x82\x10\x9a\
\xa0\x4f\x8b\xd5\x6d\xe6\xf9\xc5\xad\xf4\x31\x0f\x51\x76\xc1\x1f\
\x6d\x2e\xa3\x60\xb8\xfa\xe8\x3d\xad\xd4\x5d\xf4\x1b\x49\x16\x8d\
\x16\x08\x7d\xc8\xf0\x92\xc9\x65\x56\x89\x8d\x3c\x7d\x36\x23\x3b\
\x98\x7b\x7c\x55\x82\x34\x43\xfb\x96\xa2\xaf\x9e\x02\xcf\xdf\xe2\
\xee\x25\x2a\xde\x57\x4f\x83\xbc\x34\x94\xea\x84\x58\xeb\xc0\x16\
\x2f\x1e\x5d\xad\xfe\x2e\x75\xba\xb5\x83\x4b\xf2\x71\x55\x21\xc9\
\xe1\x6a\xa4\xbb\x80\xf9\x41\xeb\x0d\x03\xcb\xd1\x1b\x51\xb1\x7b\
\x55\xdd\x5a\xbe\x89\x63\xc0\x1c\x63\xb4\x7c\xb6\xba\x42\x17\xf2\
\xd8\xcc\xc8\x26\xc7\xb2\x84\x42\xee\xb8\xb0\xbc\x2c\x2e\x54\xb3\
\x87\x57\xc3\xe8\x2a\xb3\xfc\x19\x94\x06\xa6\x90\x5d\x67\x0e\x42\
\x0f\x02\xdd\xb3\x00\x22\x24\x92\x9d\x83\x5f\x5a\x68\x10\x39\xdd\
\x5c\x04\x6a\xf5\xbf\x8f\xa0\xe1\x2f\xbd\x7f\xfe\xed\x53\xe3\x92\
\x19\xd9\xf4\x58\x16\xe1\xcc\x74\xaa\xc1\x08\x41\x9f\x26\x3a\xcb\
\x62\x12\x5b\x60\xd6\xd8\xcb\xd6\x36\xe7\xf8\x15\x3e\x10\x89\x07\
\x72\xba\xa0\x5a\xf9\xed\x01\xcc\xf4\x0c\x25\x1b\x13\x18\x0a\x16\
\x3b\xbf\xec\x83\xd9\xaf\x31\x30\xb3\x57\x08\x2d\x6b\xb6\x16\x19\
\x50\xc9\xd7\xad\x6d\x92\x26\xc1\xf3\x1e\x20\x37\x48\xc1\x07\x8d\
\x32\xba\xb0\x26\x9b\x1c\xcb\x52\x9f\xa7\x90\x74\xc5\x76\x12\xf0\
\x2c\x39\xc8\xdb\x6d\x92\xfd\x6b\xe6\x8d\xeb\x7a\xc1\xbc\xea\x37\
\x6d\x6d\x52\x9d\x8b\xa0\x31\x45\x1a\x19\x55\xa1\xcb\x79\x8f\x35\
\xd9\x04\xbe\x60\x11\x4e\xb7\xe6\x35\x83\x1a\x20\x9f\x03\xdd\x6a\
\x28\xf5\xea\xb9\x29\xea\x42\x50\xf9\xa8\x55\x7b\x9d\x93\x8f\x41\
\xfd\x15\x68\xa4\x9f\xa5\xa4\xb1\xf0\xe1\xec\xed\x36\x86\x72\x0d\
\x6e\xc8\x8e\x85\x46\xed\x94\x16\xd8\x4c\x8d\x81\x61\x34\xc8\x8e\
\xd1\x32\x92\x26\x5d\x50\x12\x7b\x02\x2e\xc2\x61\x90\x69\x5c\x8b\
\xfe\xd2\x18\x28\xad\x78\xfc\xe4\xf9\x83\x95\x3d\xdb\x6e\xc8\x2e\
\x6b\x19\x01\x73\xdb\x19\x5a\xcc\x14\x64\xb7\x1b\xee\x11\x1a\xc9\
\x69\x79\x3e\x20\x90\xad\x9b\x50\x53\xe6\x66\x83\xfa\x6b\xde\xf6\
\x2d\xa9\x0c\xee\x48\x82\xbd\x58\xdd\x14\xe8\x0e\xb8\x21\xbb\xa8\
\x05\x6b\xd8\x21\x00\xb0\x99\x82\x9e\x85\x06\xd9\xc1\x1b\xaf\x94\
\xd4\x00\xf0\x85\xb0\xd5\x52\x4c\x33\xfc\x0a\xeb\xaf\xae\xae\x72\
\x90\xbb\x48\x36\x4c\xb5\x2c\x61\x1b\x20\x04\x2c\x87\x39\xd9\xb2\
\xdc\x75\x89\x0b\x68\xa6\x20\x41\x6d\x92\x1d\x5e\x7d\x2b\xcf\xfb\
\xc4\xce\x03\xa9\x03\xad\xb4\xd1\x93\x8b\x63\xf0\x91\xd2\x59\xf4\
\xfb\xa6\xfa\x17\x11\x99\x8e\xc0\xde\x1b\xe0\xec\x33\xba\xfd\x94\
\xad\x5d\x34\x27\x5b\x9e\x89\x25\x1d\xdf\x06\xcf\xcb\x05\x37\x35\
\x36\x25\x31\x7a\x94\xc9\x6f\x73\x73\x11\x83\x04\x3b\xd5\xd1\x0b\
\xf8\x91\x09\xe8\x90\xc0\x0c\x65\x03\x88\x05\x16\x19\xc6\x64\xa7\
\x8d\xc4\x84\x8e\x2e\x81\x5b\x43\x4a\x88\xba\xce\xed\x2e\x5d\xf7\
\xd3\x00\x3a\x5b\x57\x89\x97\x13\x23\x12\x79\x80\x32\x08\xad\x61\
\x4c\x76\x53\xd2\xd4\x87\xbc\x1b\xe0\xc3\xdc\x49\x76\xe4\x26\x89\
\x5d\x67\x03\x3c\xd9\x52\x3f\xa0\xcb\x5a\xc0\x9a\xbd\x3d\x4c\xc9\
\x26\x67\xa5\xd7\xe4\xf0\x8d\x73\x9a\x80\x77\xba\x77\x6f\xe4\xe2\
\x3b\x7c\xed\x70\xa9\x51\x26\xc6\x2d\xc5\xeb\x64\x87\xa7\x14\xbc\
\x93\x7c\xb7\xc3\xdc\xbc\x13\xa6\x64\x93\x63\xe2\xeb\x76\x02\xb3\
\xfc\x80\x78\x0e\x5d\xed\x26\xdb\xc9\xde\xf5\x40\x67\x98\x74\x4a\
\x54\x50\x35\xc2\x67\x09\x2f\xf9\xaa\x0c\x4f\x62\x1e\x6f\xbf\x58\
\x6c\x4a\x36\x9e\x36\x44\xa3\xfa\xbd\x59\x66\xcf\x18\xa4\xb5\x9b\
\x6c\x27\xa7\x32\x68\x1d\xa3\xf1\xad\xf4\x7f\xbe\x0a\xea\x8b\x25\
\x30\x76\xe1\x7c\xf5\x8f\x57\xc5\xed\x69\xfb\x3e\xdc\x0e\x18\x92\
\x4d\x56\xab\x7d\x49\x6d\x1b\x9d\xca\xd0\x72\x5e\xc6\x86\xfd\xa0\
\x0e\xce\x1b\xd1\xfb\xa1\x08\x29\x26\x1f\xf1\xa0\xff\xf0\xfd\x49\
\x65\x37\xa3\xa5\xa8\x09\x4f\x4f\x27\x5b\x6e\x19\x01\xc8\x7e\x7a\
\x75\xd9\xfa\x53\x1b\xe4\xa4\x10\x79\xc7\x97\xd1\x79\x23\x2d\x3f\
\x55\xb1\x81\x6c\x07\x27\xe9\x4c\x46\x1a\x85\xa2\x65\xfd\xff\x84\
\xc5\x57\xf0\xf0\x82\x91\x58\x6b\xae\x6a\xc2\x02\x0e\x07\x04\x36\
\x43\x25\x3b\xf8\xe3\xe7\x1f\x8b\x96\xb8\x1a\x7b\xa8\x32\xe9\x92\
\xc1\x49\x3a\xe1\x2f\xf0\xf5\xdd\x9f\x11\xf5\xed\xe6\x22\xcd\x35\
\x31\x9e\xee\x36\xfd\x48\x42\x89\x75\xfa\x09\xd9\xce\x01\x87\xba\
\x36\x43\x25\xfb\x2a\x6b\xdf\x28\x3e\x10\x2b\xf8\x02\x06\x67\x44\
\x55\x22\x02\xe2\x48\x4e\x3f\x93\xd7\xc4\x78\xe6\xed\xaf\xd5\xbf\
\xfc\xc4\x1b\xf0\x8f\xe8\x92\x60\xb8\xe5\xe9\x01\x0a\xd9\xd4\xbf\
\xb0\x9d\x40\x19\xe1\x48\xc8\x96\xd6\xc4\x02\x59\xd8\xbe\xa7\x02\
\x4e\x10\x13\x41\xdb\xf6\x38\x0e\x85\x6c\x3a\x69\x81\x76\x05\xed\
\x0c\x87\x26\x3b\xa2\xed\x4b\x6b\x62\xa5\x2c\x6b\x17\x92\xae\x2c\
\xa1\xc4\x5c\x6d\x28\x64\x53\xef\xcc\x3e\xea\x65\x80\x43\x93\xcd\
\xe4\xb6\x5e\x13\x8b\xe4\xd1\x63\xfb\x3c\xe4\x3d\x1c\xab\xfb\xdf\
\x0c\xa0\x90\x5d\x90\x84\x27\x20\xd3\x7f\x77\x38\x30\xd9\x21\x0d\
\x99\x49\x6b\x62\xb9\xec\x62\xe1\xe0\x8c\x58\x09\x55\xb7\x69\x9b\
\x40\x21\x9b\x6e\x63\x0e\x5c\x1e\x20\xb3\x09\x07\x26\x3b\xa0\x9b\
\xf6\xe8\x9a\x18\x46\xd8\x38\xcd\x00\x87\x91\xc5\xca\x3c\xcd\x8b\
\xdb\xf6\x87\x48\x54\xc9\xfe\xcb\x91\xfd\x98\x6e\xf1\xae\x17\xdb\
\x26\xf8\xbd\x08\x5a\xe2\x25\x67\xc1\x3e\x0d\x73\x97\x5b\x06\xc7\
\x14\xb2\xcf\x99\x1a\x11\x26\xc0\x22\xf0\xa2\x89\x43\xeb\xec\x98\
\x28\xe2\xa9\x88\x34\x11\x39\x16\x89\x40\xf8\x9d\xd0\xe1\xf4\x07\
\xd1\xb6\xfd\x59\xb4\x96\xe9\xba\xd8\x41\x64\x15\x78\xd1\xc4\xa1\
\xc9\xf6\x89\x8b\xf1\x99\xbf\xa5\xa9\x2d\x3c\x11\x88\x6c\xdc\x14\
\x83\x27\x49\x16\xe9\xb6\x3f\xf8\xd7\x42\x76\xbd\xb9\xd0\x26\xf0\
\xa2\x89\x43\x93\x4d\xf7\x66\x7e\xc3\xdf\x52\x6b\xc8\x13\x81\x62\
\x7c\x27\x84\x28\x93\x34\xe5\xe9\xb6\x93\x90\x16\xb2\x45\x84\xd1\
\x2a\xf0\xa2\x89\x43\x93\xed\x11\xc9\xe2\x44\xd0\xcc\x23\x11\xba\
\x22\x9e\x89\x58\x6c\x0f\x56\xeb\xf6\xd3\x04\x30\xd9\x75\x36\xba\
\x55\xe0\x45\x13\x07\x27\x1b\x9f\x8e\x21\xc2\x0e\x4c\x6d\x72\x02\
\x16\x98\xa0\x71\x9d\x25\xfd\x2e\x2c\xdf\x6f\xdb\x0e\x4c\x76\xbd\
\x11\xd9\x2a\xf0\xa2\x89\x83\x93\x8d\x73\x27\x02\x66\x03\x79\x00\
\x5b\x6c\xd4\x1d\x55\x2f\xb9\x58\x91\x8c\xbe\xbb\x7c\xa2\x7c\x5f\
\x17\x30\xd9\x63\xfe\xd8\xd8\x05\x5e\x34\x71\x70\xb2\xb1\xef\x47\
\xd6\xc4\x3c\x11\x6b\x13\x23\x7e\x85\x5f\x22\x07\xe9\x68\x5e\x0b\
\xd9\xa9\x70\x75\xec\x02\x2f\x9a\x38\x38\xd9\xd8\xf7\x3b\xdf\x43\
\x3b\x20\xd9\xb9\x30\xb7\x76\x81\x17\x4d\x1c\x9c\x6c\xec\xfb\x69\
\x85\xbc\x2d\x01\x91\x1d\xd5\x7e\xa4\x5d\xe0\x45\x13\x07\x27\x1b\
\xfb\x7e\xfb\xf8\x65\x72\x88\xec\x89\x94\x39\x69\x15\x78\xd1\xc4\
\xc1\xc9\xae\x7c\xbf\x68\xb9\x87\x66\x00\xb2\xe9\x74\x95\x12\x60\
\x17\x78\xd1\xc4\xe1\xc9\x1e\x8f\xfc\xd1\x1e\x9a\x01\xc8\xa6\x71\
\x18\xaa\xa5\xed\x02\x2f\x9a\x38\x3c\xd9\xe5\xeb\xad\xf6\x89\x99\
\x02\x20\x9b\x08\x36\xdd\x0a\x64\x19\x78\xd1\xc4\xe1\xc9\x9e\x7e\
\xd9\x6e\x9f\x98\x21\x54\xb2\xe9\x74\xd5\x1f\x91\x37\x76\x81\x17\
\x4d\x1c\x9e\xec\xf8\xc6\xe5\x3e\xb1\x56\xa8\x64\xd3\x38\x0c\x4b\
\xf1\xb0\x0b\xbc\x68\xe2\xf0\x64\xfb\xb7\x2e\xf7\x89\xb5\x42\x5d\
\x5d\x47\x72\x6e\x88\x5d\xe0\x45\x13\x87\x27\xdb\x3a\xf7\x5d\x0f\
\x0a\xd9\xfc\x10\x08\x46\x80\x55\xe0\x45\x13\x87\x27\xdb\xdb\xcf\
\x92\x2b\xb0\x06\xd9\x48\xc4\xb1\x0a\xbc\x68\xe2\x08\xc8\x86\x36\
\xb1\xb9\xc7\x8e\x0e\xf3\x31\xc2\x11\x90\x5d\xee\xa5\x0b\x3d\xd9\
\x04\xcf\xf7\xd2\x4a\x4f\xf6\x1e\xd1\x93\xbd\x47\x0c\xd6\xb3\x52\
\xf7\x8b\x23\xff\x0d\x5f\xc7\xe8\xc9\xde\x23\xa2\xb3\x4d\xbf\xe1\
\xbb\x53\x90\xdf\xf0\x3d\x60\xfb\x3d\x7a\xf4\xe8\xd1\xa3\x47\x8f\
\x1e\x3d\x7a\xfc\x15\xf0\x3f\x5c\x4c\x6c\x29\xb8\x18\x13\x99\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x02\x10\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x42\x00\x00\x00\x47\x04\x03\x00\x00\x00\x41\xb7\x8c\x68\
\x00\x00\x00\x1b\x50\x4c\x54\x45\xff\xff\xff\x5f\x5f\x5f\x9f\x9f\
\x9f\xdf\xdf\xdf\xbf\xbf\xbf\x7f\x7f\x7f\x1f\x1f\x1f\x3f\x3f\x3f\
\x00\x00\x00\xca\xb6\x71\x3b\x00\x00\x00\x01\x74\x52\x4e\x53\x00\
\x40\xe6\xd8\x66\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\
\x00\x00\x0e\xc4\x01\x95\x2b\x0e\x1b\x00\x00\x01\x8e\x49\x44\x41\
\x54\x48\x89\xed\x95\x3d\x6f\xc2\x30\x10\x86\x93\x98\x24\x1d\xbb\
\xb4\xca\x18\xa9\x12\x62\x44\x0c\x55\xc6\xa8\x43\xc9\xdc\x0f\x91\
\x11\x4a\x1b\x3c\xb6\x5b\x46\x42\xdb\x70\x3f\xbb\xfe\xba\x34\x40\
\xce\x1e\x91\xaa\xbe\xc3\x61\x94\x87\xbb\xf3\xf9\x75\xf0\xbc\x73\
\x8a\x2d\xaa\x55\x6a\x25\xee\x1f\xbd\x79\x9b\x5b\x80\xd1\x5a\x84\
\x64\x67\x21\x32\xf9\xf3\x08\xa6\x34\xb1\x90\x21\x84\x4b\x9a\xe0\
\x2f\x22\x30\xd8\xd2\x04\xb4\x2a\x7e\x5a\x72\x38\x89\xb1\xb3\x8a\
\x52\x00\xef\x0e\x22\x86\xd4\x41\x14\x7b\x07\xe0\xd5\x6b\x07\x10\
\x42\xee\x2a\xf2\xea\x00\x58\x65\x39\x15\xa5\x5b\x57\x17\x6c\xe3\
\x00\xbc\x58\x9e\xeb\x8d\x8d\xf8\x90\x61\xd9\x7d\xad\x41\xa9\xdd\
\x3c\x1d\xa4\x60\xbf\x84\xc7\x6a\x75\x06\x33\xe5\x0b\xa9\x2b\x19\
\xc2\xfe\xb9\x24\x7a\x3c\x68\x2b\xbf\xad\xaa\x8a\x1f\x78\x8c\x83\
\x21\x1b\xf5\x51\xea\xb2\x69\xaf\x0a\x98\x53\x2a\x74\x2e\x0d\xf4\
\xc7\x1e\xc0\xb7\x5e\x5c\x50\x9e\xf0\x61\x87\xc4\x76\x98\x88\xd0\
\x92\x19\x45\x64\xb0\xd4\x8b\x09\x2e\x8e\x35\xc1\x8d\x25\xd4\x2d\
\x4a\x70\x63\x9c\xba\x89\xf8\x20\xc4\x3d\x1d\x8b\xe9\x3b\x26\xfb\
\x21\x8a\x04\x66\x60\x23\x4e\xa4\x10\xe3\xd0\x4f\xca\x7d\x4e\x10\
\x11\x7c\x89\x38\xbb\x26\x01\x51\x5e\xa9\xa1\x9e\xe3\x38\xe6\x40\
\xfb\xdf\x8c\xa3\xa4\xef\x90\x19\x47\x4c\x8d\xbc\x1b\x47\xa8\x1a\
\x1e\x52\x60\x7a\x24\x27\xda\xb9\xc3\x27\x77\x83\xee\x88\xc8\x1c\
\xe8\x8e\x0c\xa8\xb7\x30\xba\xa3\x24\x3b\xad\x8d\x3b\x6a\x92\x40\
\x77\x70\xd9\x0f\x1b\xb0\x50\xe7\x0e\x90\x36\x8e\xd2\x53\x22\xc4\
\x4d\x72\x49\x14\xa7\x39\xd8\x18\x9a\xa9\xe9\x54\x54\x79\x1b\x68\
\x53\x4a\x79\x2c\x16\xbb\xf5\x2d\xff\x18\x22\x5b\xd2\x3e\xaf\x6c\
\x80\xd0\xdd\x83\xeb\x05\xf8\xaf\x3f\xa4\x1f\x0e\xd8\x6f\x39\x5c\
\xde\xeb\x13\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\x5f\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x1c\x00\x00\x00\x33\x04\x03\x00\x00\x00\x76\xd9\x75\x44\
\x00\x00\x00\x1b\x50\x4c\x54\x45\xff\xff\xff\x7f\x7f\x7f\xdf\xdf\
\xdf\x9f\x9f\x9f\x3f\x3f\x3f\x1f\x1f\x1f\x5f\x5f\x5f\xbf\xbf\xbf\
\x00\x00\x00\xce\x4a\x91\x57\x00\x00\x00\x01\x74\x52\x4e\x53\x00\
\x40\xe6\xd8\x66\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\
\x00\x00\x0e\xc4\x01\x95\x2b\x0e\x1b\x00\x00\x00\xdd\x49\x44\x41\
\x54\x28\x91\xdd\x91\x31\x0f\x82\x30\x10\x85\x0f\x08\xe2\xc8\xc8\
\xc8\xa0\x09\xa3\x23\xa3\x71\xea\x68\x8c\xc6\x8e\x2e\x26\x1d\x59\
\x4c\x1c\x2d\x16\xb9\x9f\x6d\x7b\xbd\x03\x46\x67\x6f\xe8\xeb\x17\
\xda\x77\xaf\x07\xc0\x0f\x55\xdc\xcf\xbb\x99\x52\x55\x6e\xec\x8c\
\x8f\x11\x52\x9c\x3e\x17\x58\x02\xa8\xa3\xe0\xd6\xf9\xc5\x8c\x82\
\x2a\xec\x2a\xb9\x9c\xe0\x2d\xe0\x87\xb1\xc1\x3a\xe0\xc0\x68\x1c\
\xad\x83\xf8\xbe\xc9\x00\x23\xae\xf0\x15\x44\xa3\x5c\xdd\x07\x41\
\x3e\x6c\x28\x4f\x21\x88\x64\x9c\xa1\x63\x27\x17\x1d\x2c\x2c\x34\
\x8f\xfe\x5e\xb9\x7a\x36\x1e\x2e\xbe\x34\xc6\x17\xb5\x51\x2b\x0a\
\x3e\xa9\xc6\x2e\xbe\x8e\xda\xa6\xd2\x56\x73\x5b\x2b\x18\xdb\xf5\
\x1c\x8a\x4e\xad\xd9\x09\x62\xa8\x96\x9d\x00\x69\x28\x4a\x46\xa3\
\x2d\x19\xf7\x32\x46\x4b\x4e\x25\x23\x0d\xb4\x71\x4c\xf0\x0c\xb7\
\xd4\x49\x30\x77\x21\x44\x27\x98\xf9\x54\xcf\x2b\x4c\x65\x0e\x89\
\xab\x67\xcc\xf4\xf2\x57\xff\x79\x7d\x01\xdf\x3e\x3f\x80\x00\xa7\
\x0a\x42\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\x87\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x2a\x00\x00\x00\x32\x04\x03\x00\x00\x00\xd8\x68\x54\x9e\
\x00\x00\x00\x1b\x50\x4c\x54\x45\xff\xff\xff\x5f\x5f\x5f\x9f\x9f\
\x9f\xdf\xdf\xdf\xbf\xbf\xbf\x7f\x7f\x7f\x1f\x1f\x1f\x3f\x3f\x3f\
\x00\x00\x00\xca\xb6\x71\x3b\x00\x00\x00\x01\x74\x52\x4e\x53\x00\
\x40\xe6\xd8\x66\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\
\x00\x00\x0e\xc4\x01\x95\x2b\x0e\x1b\x00\x00\x01\x05\x49\x44\x41\
\x54\x38\x8d\xed\x92\x3d\x0f\x82\x40\x0c\x86\x41\x04\x1d\x99\x0c\
\xa3\x93\x71\x24\x4c\x8c\xc6\x45\x66\xa3\x89\xa3\x1a\x8d\x8c\x8e\
\x8c\xe2\x07\xf6\x67\xdb\xbb\x5e\xb1\x55\x07\xdd\x7d\x07\xe8\xbd\
\x5c\xdb\xe7\x7a\x78\xde\xaf\xaa\xc0\xaa\x39\x2c\x94\x1d\x54\x70\
\xc4\x57\x56\x6e\x95\x9d\xc0\xc4\xbc\x42\x88\xa5\x5b\x82\xfb\x5a\
\xcb\x0a\x70\xa7\xa0\xa0\x1c\x52\x07\x6e\x14\xf4\x6d\x7d\x27\x1f\
\xce\xec\x9e\x9e\x6e\x04\x17\x0a\x72\xe9\xe6\xb0\xa6\x60\xcc\x01\
\x2d\x62\x26\x14\x68\x09\x0c\x99\x30\x95\xb8\xb4\x08\x99\x85\x70\
\x1b\xae\x2f\x0a\x74\xdc\x21\xba\xa5\xd8\x8a\xb8\xb4\x5a\xdd\xc5\
\xc9\x10\xf7\x8a\xcf\x6c\xa0\x4c\x2c\x67\x25\x27\xd3\xe2\xce\x60\
\xa7\x5c\x87\xbb\x92\x03\x6b\x71\x7b\xf2\xb8\x2d\x6e\x68\x9b\x3e\
\x71\xeb\xf7\x93\xb5\xd3\xf5\x15\x05\x4f\x37\x52\x7b\x79\xba\x39\
\xdf\x88\xc0\x35\x64\xb2\x5b\xe5\xa6\x5b\x29\x97\xa7\x5b\x9a\xfa\
\x41\xaa\x71\x3d\x30\x57\x19\x51\x1e\x62\xd6\x9c\x83\x6e\x41\x7b\
\x83\x11\xd4\xa9\xeb\x86\x15\xf6\xae\x85\x91\xbd\x8b\x1e\x92\xf9\
\xea\xf7\xb3\x59\x49\xb3\xdc\xbc\x9a\xa8\xe9\x3c\xfd\xe0\xfe\xf5\
\x85\x1e\x52\xea\x4e\xdd\x41\xf7\x19\x05\x00\x00\x00\x00\x49\x45\
\x4e\x44\xae\x42\x60\x82\
\x00\x00\x0f\x5c\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x02\xd7\x00\x00\x00\x83\x04\x03\x00\x00\x00\xb4\x20\x2a\xa6\
\x00\x00\x00\x1b\x50\x4c\x54\x45\xff\xff\xff\x7f\x7f\x7f\x5f\x5f\
\x5f\xdf\xdf\xdf\xbf\xbf\xbf\x1f\x1f\x1f\x3f\x3f\x3f\x9f\x9f\x9f\
\x00\x00\x00\xc8\x53\xa5\x5e\x00\x00\x00\x01\x74\x52\x4e\x53\x00\
\x40\xe6\xd8\x66\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\
\x00\x00\x0e\xc4\x01\x95\x2b\x0e\x1b\x00\x00\x0e\xda\x49\x44\x41\
\x54\x78\x9c\xed\x5d\xcb\x7b\xdb\x36\x12\xe7\x9b\x3a\x76\xbb\x9b\
\x44\x47\xb5\x49\x5b\x1d\x9d\x38\xca\xea\xc8\xb6\x76\xc2\x63\xdc\
\x5d\x39\x3c\x6a\x93\xf8\x8b\x8e\x69\x1b\x67\x79\x94\xdb\xc8\xc6\
\x9f\xbd\xc4\x93\xa0\x30\xa4\x00\x01\x7a\xac\xcb\xdf\xf7\x45\x8e\
\x28\x08\x8f\x1f\x87\x33\x83\xc1\x00\xf2\xbc\x1e\x3d\x7a\xf4\xe8\
\xd1\xa3\x47\x8f\x1e\x3d\x7a\xf4\xe8\xd1\xa3\x47\x8f\x1e\x3d\x7a\
\xf4\xe8\xd1\xa3\x47\x8f\x1e\x3d\xee\x0b\x9e\x5e\xcd\x7e\x38\x74\
\x1f\x74\x10\x5e\xec\xbb\xc5\xc9\x57\xae\x6b\x8c\x2e\x4e\xa2\xc5\
\xbf\x5d\xd7\xba\x03\x5c\xff\xb1\xe7\x06\xd3\x62\xe4\xba\xca\xf3\
\x13\xcf\x0b\xd0\x47\xd7\xd5\xba\xc6\xf3\x07\x68\xbf\x64\x87\x3f\
\x16\x68\xe4\xba\xce\x2f\xf8\xb5\xf8\xd3\x71\xb5\xae\x91\xaf\x3e\
\xec\x97\xec\x08\x5d\x96\xce\xc9\xf6\xe7\xf8\x75\xb8\xef\x47\xd4\
\x14\xa1\xe7\xed\x59\xb2\x4f\xbc\x81\x73\xb2\xe3\x15\xd6\x20\x53\
\xe4\xb8\xda\x1d\x60\xcf\x64\x7b\x3b\x20\x7b\x80\x5e\x93\xd7\x13\
\xc7\xf5\xba\xc7\x3d\x20\x3b\xee\xc9\x6e\x83\x7b\xb2\xd3\x59\xaf\
\x46\x5a\xe0\x9e\x6c\x8a\xf2\x76\x27\xd5\x3a\xc5\xbd\x21\x7b\x71\
\xb7\x93\x6a\x9d\xe2\xbe\x90\x1d\xa1\xf9\x2e\xaa\x75\x8b\xfb\x42\
\x76\xb2\x3a\x7e\xfb\x78\x6f\xc8\x1e\x7e\xd9\x45\xad\x8e\x71\x4f\
\xc8\x8e\x56\xd9\x0e\x6a\x75\x8d\x7b\x42\xf6\xe4\x5f\x3b\xa8\xd4\
\x39\xee\x07\xd9\xe1\xec\xff\x40\x63\xdf\x17\xb2\x27\x73\xf7\x75\
\xee\x00\x8e\xc9\x1e\xa0\x0a\x1b\x4a\x8c\x9c\xb6\x48\xf0\xb6\xfa\
\x97\x56\xc2\xed\xe3\xe6\x8f\x77\xde\x7e\x2f\xc8\x4e\x70\x95\xb1\
\xa7\x49\xf6\xb5\x75\x7b\x8f\xb6\xfc\x9e\x6b\xb2\xef\xce\xce\xce\
\xba\x4b\x74\x93\x9d\xbe\xd6\x6e\xcb\x17\x6b\x33\x58\xb0\xbd\x49\
\xf5\x2f\xac\x5a\x3f\x83\xc9\xfe\x3c\x7b\xcf\x5a\xb0\x77\x12\xf3\
\x2d\x57\x85\x5c\x93\xbd\x71\xbd\x64\x03\xd9\x65\xa6\xdd\x56\xc4\
\x27\xe8\x01\x59\xd5\x7c\xc8\xaf\x83\x64\xbf\xb8\x0b\x17\x94\xa2\
\xd2\x7e\xfd\x2c\xdd\x32\x34\x70\x64\x64\x1b\x0d\x83\xd3\x36\x9e\
\x55\xa8\x07\x02\x92\xbd\x18\x45\x05\x79\x68\xc2\x1b\x83\x16\xda\
\x30\xcc\xb6\xfa\xda\x91\x91\x3d\x35\x59\x7a\xf7\xa9\x42\xa0\x8a\
\x1a\x89\xdb\x04\x91\x9d\xa2\x8f\x31\x5a\xe2\xff\xc5\xfa\x7a\xaa\
\x1d\xc9\x76\x6e\xfd\x91\x91\x3d\x33\x6a\xed\xea\x84\xd6\x48\x20\
\x54\x31\x44\x76\x8c\x4e\x06\x34\x54\x35\xce\xe8\x95\x6b\x9b\x69\
\x50\xb8\x5d\x3c\xf7\xb8\xc8\x0e\x18\x63\xa9\xde\x24\x65\x3a\x87\
\xae\x42\x64\x4f\x57\x9e\x7f\x8b\xaf\x87\x7c\xbc\xc8\xca\x4e\x0e\
\xb7\x4a\x7e\x39\x2e\xb2\xa7\x23\x5e\x28\xd3\x69\xcd\x07\xf5\x2f\
\x44\xf6\x90\x8b\x62\xc2\x3a\x18\x51\xa5\xb2\x2d\x62\xf3\xc4\x89\
\xf0\xf4\x27\x84\x7e\x3f\x75\x38\x09\xd8\x44\xf6\xe9\xf3\x31\xba\
\x3b\x3d\x6d\xfb\x78\xc1\xba\x92\x6b\xae\x71\x15\x50\xd7\x21\xb2\
\x17\x5c\xa6\xf2\x11\xfd\xeb\xdb\xc5\xbf\x7d\x73\x19\x65\xda\x6e\
\x69\xd3\xec\x5a\x8d\x1b\xc8\xa6\x0d\xb6\x51\x19\xf1\x21\x0c\x35\
\x75\xe2\x70\x04\xb5\x01\x90\x5d\x70\xf3\xc9\x6f\x67\x82\xec\xb2\
\xe0\xcc\xa7\xa9\x21\xfd\xb3\x47\xc9\x3e\x91\xdb\x55\xc0\x9f\x71\
\x6f\xa1\xe9\x9f\x81\xcd\x01\x3c\x84\x5c\x43\x0b\xcb\x36\xd5\x53\
\x54\xad\x18\x8f\xac\xbe\xee\x04\x9b\x75\x76\x17\xb8\xc1\x0b\x75\
\xad\x57\x00\x3d\xcd\x00\xd9\x11\x62\xfd\xf2\xb9\x84\x97\x96\x8b\
\xf1\xf9\xd2\xee\xfb\x2e\x60\x47\xf6\x90\xcd\x52\x52\x5d\xcd\x16\
\x41\x94\x01\x64\xfb\xbc\x42\x61\xd8\xc6\x46\xce\x5b\x94\xad\x5f\
\x89\x8f\x60\x7d\xd9\x8e\x6c\x6e\xef\x82\x16\xeb\xa5\x46\x5d\x0a\
\x60\xee\x0d\x90\x2d\x2a\xcc\xf9\x94\xa6\x80\x14\x55\xf8\xb0\x20\
\x26\x45\x59\xf6\xa1\x8b\x6e\xe1\xdb\xba\x66\xf0\x99\x72\x83\xcf\
\xc5\xea\x89\x57\xeb\xda\xaa\xcd\x10\x56\xf4\x56\x64\x47\x2b\xf6\
\x9f\x18\xb6\x5e\x3e\xd5\xb3\x2f\x1e\xd7\x97\x20\xd5\x09\x90\x1d\
\x73\x7f\x93\xfa\xc7\x0b\x6a\xa6\xe7\x6b\xa5\xc2\x31\xb3\xdf\x4a\
\xfa\x71\x41\xc8\x4e\xa4\xeb\xe9\xce\x72\x82\xce\x6f\x3f\xfd\x84\
\xbb\x3b\x60\xbe\x0b\xf9\xf3\xf5\xf7\xb3\xa5\xf7\x1b\xbd\x0b\x02\
\x56\x64\x53\xb7\x39\xa1\xe3\x05\x1e\xf3\x98\x92\x2d\x67\x88\x94\
\x4b\xb5\x98\x42\x36\xe5\x96\xc8\x34\x0b\x46\xc1\x9e\x5f\x89\xd0\
\xac\x12\xeb\xd9\xec\xed\xda\x07\xcc\x29\x97\x33\xdc\x42\x4b\x03\
\xdb\x8a\x14\x0b\xda\xa4\x1a\x7d\xf4\x6a\x41\xee\xfa\x35\x5a\xfd\
\x90\x61\xc2\xcf\x2f\xce\x3e\xa3\x77\x52\x51\x2b\xb2\x13\xce\x62\
\x8b\xe7\xc7\xbc\x6f\x24\xb5\x31\x05\x2c\x29\x20\xd9\x39\xbf\x86\
\x3a\x3c\xbf\x14\xfd\x91\x79\x2f\x20\xdb\xcc\x6e\x4d\xc3\xfb\xc7\
\x0a\x2c\x41\x4d\xcc\xc1\x6e\x9b\x21\xbf\x21\x5d\xc1\x42\x11\x10\
\x89\x0b\x10\x7e\x92\x9f\xa1\xbb\x37\x1e\x96\x87\xac\x2e\x6a\x45\
\xf6\x60\x83\xe7\x47\xef\x41\x63\xee\x07\xd9\x29\x80\xec\x21\x53\
\x50\x21\x57\x54\x90\xe7\x57\x92\x11\x2e\x80\x1b\xcd\x6e\x4d\x43\
\x06\x16\xa3\x9d\x90\x1d\xb2\x27\x90\xbe\xe2\x5e\x26\x37\xf4\x3a\
\xe9\x71\x2a\xcb\x82\x15\xd9\xd3\xd7\xbc\x41\xd8\xf3\xa3\xf7\xa0\
\xa1\x01\x12\xc0\x4e\x01\x64\x73\x06\x53\x4e\x17\xe0\xf9\x85\x34\
\x6e\x98\x03\x5f\x67\xd1\x83\x86\x0c\x8c\xe7\x4a\x31\x17\x08\xd8\
\x7d\x25\x7d\x99\xe0\xa1\x96\x23\xf2\x01\xe3\x64\x2c\x25\x23\x59\
\x91\x5d\xce\xe9\xdf\x36\xcf\x0f\x31\x33\x25\x69\x80\x00\x90\x43\
\x80\x2d\xee\x7b\x88\x59\x36\xe0\xf9\x25\xd4\x88\x4e\x81\xcd\x39\
\xf4\x06\x84\xb2\xfe\xf2\x86\x2e\x42\xb5\x2a\xd8\x23\x97\x93\x8e\
\x46\xd5\xfd\xe7\x5e\x03\xe3\x64\x2a\x85\x96\xac\xc8\xe6\x93\xef\
\x16\xcf\x8f\xcd\x4c\xa6\x32\x99\xfe\x4a\x2d\xa7\x92\x1d\xf2\x60\
\x77\xc0\x65\x13\xf0\xfc\x4a\xc4\x02\xb6\xaa\x36\xa7\xfa\x23\x45\
\x32\xbf\x90\x69\x76\x80\x21\xed\xc5\x94\x8e\x6b\x8c\x4e\x26\x8c\
\x51\x46\x76\x20\xdd\x71\x2b\xb2\xc7\x6c\x98\xad\x9e\xdf\x1c\xff\
\x29\x65\x7e\x21\x0f\x4c\x25\x5b\x4c\x20\x93\x1b\x7e\x41\x55\x54\
\x8c\xff\x29\xd0\x38\xd5\x1f\x41\x23\x5e\x99\xef\x66\xc3\xd4\x82\
\x0e\x68\x40\x07\x99\xa0\xc7\x7c\x26\xc1\xc8\x4e\x51\x2d\x26\x56\
\x64\x2f\xd8\x30\x73\xd8\xad\x62\xfa\x63\x2c\xab\xe9\x08\xd0\x19\
\xea\xa5\x7a\x02\x79\xb7\x76\x41\xae\xe9\x0b\x6b\x5c\x55\x23\xf4\
\xa3\xb8\xf1\xc9\x74\x37\x64\x17\xdc\xd8\x92\x77\x21\x5a\x89\xf8\
\xfb\x92\x5d\xa9\x15\xa0\x15\xd9\xfc\x26\x0e\x85\xb8\xce\x08\xd8\
\xd3\xcb\xcc\x54\x21\x3b\x20\xa1\x16\xd9\x42\x2f\xc5\x4c\xa0\x13\
\x55\x51\x05\x4c\x49\x94\xea\x9d\x86\xf4\xd7\xae\xc8\x46\xe8\x8c\
\xe0\x67\x8f\xf5\x86\x37\x23\xc8\xae\x1f\x6c\x42\x76\x4c\x39\x32\
\x6e\xa8\xc8\xe8\xdf\xda\xfb\x6a\x84\x80\x21\x33\xa5\x47\xb6\x98\
\x40\x0e\x18\xd9\x80\x62\xe6\xfc\x8f\x55\xc5\x04\xe9\x2f\x51\x95\
\x63\xac\x85\x9f\x03\xb4\x26\xd9\x72\x01\x42\xf6\xa0\x33\x68\xdd\
\xd1\x50\xc6\xfe\x0a\xb5\xd4\x20\x1b\x32\x53\x1e\xa0\x71\x54\xb2\
\x85\x87\xc1\x19\x02\x14\x15\xbf\x21\x85\xea\x4c\x32\x3f\xa5\xa1\
\xbf\x76\x45\x76\xd1\xec\xfd\x44\x84\x0e\x04\xd9\x6b\x92\x6d\x49\
\x36\x64\xbd\x30\xe8\x6a\x4b\xd3\x4c\xe9\x91\x2d\x7c\x67\xce\xd0\
\x50\xed\x1c\x13\x76\xa8\xf1\x01\x1d\x70\xa5\xbf\xc2\x79\x7d\xf1\
\x0b\xee\x4b\x13\xb8\x67\xc8\x02\x4b\x6c\x20\x1b\xbd\xbf\xca\x51\
\xd3\x1b\xe9\xd2\xd9\x26\xed\x70\xe2\x00\xeb\x45\xeb\x22\xca\x1a\
\x9b\xa9\x17\xd2\xc5\x4c\x2d\xa7\x90\x2d\xb8\xe5\x64\x17\xb8\xc7\
\x49\xa3\x1c\x73\x81\x20\xb7\xb3\xd6\x5f\x52\xbe\x16\x1e\xe9\x0e\
\x66\x90\xc3\xc6\x80\xfc\x3f\x7d\xd4\xf4\xb3\x53\x69\xd1\xd8\xca\
\x40\xb2\x76\x62\xd2\xe9\x77\xeb\x9f\x4a\x66\x4a\xca\xd6\xd3\xd2\
\xd9\x62\x0a\xc3\xc8\x0e\x89\xa2\x7a\xd0\x28\xc3\x7c\x9d\x1c\xd8\
\x2e\x42\xef\x15\xd6\x5f\x24\xa9\x90\x55\xb5\x1b\x03\x99\x33\x6b\
\x12\xd0\x77\x5f\x79\x7c\xaf\xbf\xf0\xb3\xeb\x27\xcf\x05\xd9\xc4\
\xd3\xf5\x95\x09\x1a\x13\x78\x6c\xa6\xa4\xa8\x9c\x16\xd9\x42\x0f\
\x33\x6f\x84\x46\x18\xde\xb0\x7a\xc9\x36\xca\xaa\xfa\x11\x29\x2a\
\x69\x91\x07\xac\x04\xf5\x7d\xb1\x0c\x0c\x32\xf1\xe1\x8e\xbc\x11\
\x6e\xa7\x89\x3c\x85\x97\x98\x7d\xea\x7d\x31\xb2\x63\xd4\x7c\xb8\
\x4c\xf0\x6c\x76\x59\x3b\xaf\xcc\x1b\x21\xce\x57\xac\x78\xbb\x09\
\x6d\x0e\x6b\x80\x9a\x10\x3d\x6f\x44\x64\x4b\x31\x3f\xdb\xc7\x0f\
\x09\x3d\x5f\x00\x8f\x86\x56\x57\xe0\xea\x03\xe9\x29\x8e\x10\x7b\
\x43\xf3\x62\xaf\x2a\x19\x90\x1e\xa9\xe9\xb2\x75\x50\x35\x86\x66\
\x3a\xdb\xab\x6d\xc6\x7f\xc8\x90\xff\x24\xf9\x5e\x64\x3c\xec\xba\
\xac\x66\x0c\xc9\xf6\xdf\x78\xcf\xea\x65\x11\xe6\x67\x93\x19\xeb\
\x43\xa5\xec\x80\xa8\x91\xa4\x9a\xe8\x49\x9a\x33\xd4\x99\x41\xd6\
\x46\x8f\xcd\x20\x49\xf0\xe5\x9c\x15\x1b\x32\x2d\x48\x62\x9b\x63\
\x29\x4f\xca\xa7\x16\x0f\x93\x5e\x3d\x6a\xc1\xbb\xea\x55\x3a\x09\
\x47\x6b\x11\x72\x8b\x13\x67\x86\x44\x8f\xa5\xa4\x1f\x64\x69\xa4\
\xa0\xb2\x8e\x58\xc8\x53\x8a\x33\x18\x92\xfd\xa0\xaa\xb8\x14\x72\
\xca\x66\x90\x38\x22\x17\xa9\xc9\x61\x39\x69\xe7\xfb\xe9\x8d\x17\
\xd7\x4e\x32\xb4\x08\xa9\x90\x5d\x07\xb6\x58\x6c\x04\xdb\xba\x90\
\x13\x91\x33\xfb\x1e\x15\x8f\xc3\x6b\x39\x64\x12\x31\x8b\xe7\xe3\
\x3c\xc2\xf4\x17\x6f\xf8\x3a\x90\x74\x5b\xae\xe8\x39\x15\xd1\x72\
\x43\x01\x00\x01\x59\x1f\xc8\xb3\xea\xe5\x05\xd1\xdf\x25\x15\x06\
\x74\x3b\xf2\xb0\x2b\x38\xaa\x8b\x9a\x91\x4d\x8e\x65\x49\x84\xe3\
\xc8\x62\x23\xd8\x04\x4e\x32\xa5\xf0\x10\xbb\x9c\xd7\x23\x7f\xe5\
\x7d\x57\x5f\x8c\x74\x02\x51\xb5\x87\xc1\xa3\x7e\xe5\x4d\xf8\x80\
\xb7\xe0\xaf\x98\xa5\x7c\x7a\xb5\xfa\xd0\xf8\xea\xf0\x82\x10\x9a\
\xa0\x4f\x8b\xd5\x6d\xe6\xf9\xc5\xad\xf4\x31\x0f\x51\x76\xc1\x1f\
\x6d\x2e\xa3\x60\xb8\xfa\xe8\x3d\xad\xd4\x5d\xf4\x1b\x49\x16\x8d\
\x16\x08\x7d\xc8\xf0\x92\xc9\x65\x56\x89\x8d\x3c\x7d\x36\x23\x3b\
\x98\x7b\x7c\x55\x82\x34\x43\xfb\x96\xa2\xaf\x9e\x02\xcf\xdf\xe2\
\xee\x25\x2a\xde\x57\x4f\x83\xbc\x34\x94\xea\x84\x58\xeb\xc0\x16\
\x2f\x1e\x5d\xad\xfe\x2e\x75\xba\xb5\x83\x4b\xf2\x71\x55\x21\xc9\
\xe1\x6a\xa4\xbb\x80\xf9\x41\xeb\x0d\x03\xcb\xd1\x1b\x51\xb1\x7b\
\x55\xdd\x5a\xbe\x89\x63\xc0\x1c\x63\xb4\x7c\xb6\xba\x42\x17\xf2\
\xd8\xcc\xc8\x26\xc7\xb2\x84\x42\xee\xb8\xb0\xbc\x2c\x2e\x54\xb3\
\x87\x57\xc3\xe8\x2a\xb3\xfc\x19\x94\x06\xa6\x90\x5d\x67\x0e\x42\
\x0f\x02\xdd\xb3\x00\x22\x24\x92\x9d\x83\x5f\x5a\x68\x10\x39\xdd\
\x5c\x04\x6a\xf5\xbf\x8f\xa0\xe1\x2f\xbd\x7f\xfe\xed\x53\xe3\x92\
\x19\xd9\xf4\x58\x16\xe1\xcc\x74\xaa\xc1\x08\x41\x9f\x26\x3a\xcb\
\x62\x12\x5b\x60\xd6\xd8\xcb\xd6\x36\xe7\xf8\x15\x3e\x10\x89\x07\
\x72\xba\xa0\x5a\xf9\xed\x01\xcc\xf4\x0c\x25\x1b\x13\x18\x0a\x16\
\x3b\xbf\xec\x83\xd9\xaf\x31\x30\xb3\x57\x08\x2d\x6b\xb6\x16\x19\
\x50\xc9\xd7\xad\x6d\x92\x26\xc1\xf3\x1e\x20\x37\x48\xc1\x07\x8d\
\x32\xba\xb0\x26\x9b\x1c\xcb\x52\x9f\xa7\x90\x74\xc5\x76\x12\xf0\
\x2c\x39\xc8\xdb\x6d\x92\xfd\x6b\xe6\x8d\xeb\x7a\xc1\xbc\xea\x37\
\x6d\x6d\x52\x9d\x8b\xa0\x31\x45\x1a\x19\x55\xa1\xcb\x79\x8f\x35\
\xd9\x04\xbe\x60\x11\x4e\xb7\xe6\x35\x83\x1a\x20\x9f\x03\xdd\x6a\
\x28\xf5\xea\xb9\x29\xea\x42\x50\xf9\xa8\x55\x7b\x9d\x93\x8f\x41\
\xfd\x15\x68\xa4\x9f\xa5\xa4\xb1\xf0\xe1\xec\xed\x36\x86\x72\x0d\
\x6e\xc8\x8e\x85\x46\xed\x94\x16\xd8\x4c\x8d\x81\x61\x34\xc8\x8e\
\xd1\x32\x92\x26\x5d\x50\x12\x7b\x02\x2e\xc2\x61\x90\x69\x5c\x8b\
\xfe\xd2\x18\x28\xad\x78\xfc\xe4\xf9\x83\x95\x3d\xdb\x6e\xc8\x2e\
\x6b\x19\x01\x73\xdb\x19\x5a\xcc\x14\x64\xb7\x1b\xee\x11\x1a\xc9\
\x69\x79\x3e\x20\x90\xad\x9b\x50\x53\xe6\x66\x83\xfa\x6b\xde\xf6\
\x2d\xa9\x0c\xee\x48\x82\xbd\x58\xdd\x14\xe8\x0e\xb8\x21\xbb\xa8\
\x05\x6b\xd8\x21\x00\xb0\x99\x82\x9e\x85\x06\xd9\xc1\x1b\xaf\x94\
\xd4\x00\xf0\x85\xb0\xd5\x52\x4c\x33\xfc\x0a\xeb\xaf\xae\xae\x72\
\x90\xbb\x48\x36\x4c\xb5\x2c\x61\x1b\x20\x04\x2c\x87\x39\xd9\xb2\
\xdc\x75\x89\x0b\x68\xa6\x20\x41\x6d\x92\x1d\x5e\x7d\x2b\xcf\xfb\
\xc4\xce\x03\xa9\x03\xad\xb4\xd1\x93\x8b\x63\xf0\x91\xd2\x59\xf4\
\xfb\xa6\xfa\x17\x11\x99\x8e\xc0\xde\x1b\xe0\xec\x33\xba\xfd\x94\
\xad\x5d\x34\x27\x5b\x9e\x89\x25\x1d\xdf\x06\xcf\xcb\x05\x37\x35\
\x36\x25\x31\x7a\x94\xc9\x6f\x73\x73\x11\x83\x04\x3b\xd5\xd1\x0b\
\xf8\x91\x09\xe8\x90\xc0\x0c\x65\x03\x88\x05\x16\x19\xc6\x64\xa7\
\x8d\xc4\x84\x8e\x2e\x81\x5b\x43\x4a\x88\xba\xce\xed\x2e\x5d\xf7\
\xd3\x00\x3a\x5b\x57\x89\x97\x13\x23\x12\x79\x80\x32\x08\xad\x61\
\x4c\x76\x53\xd2\xd4\x87\xbc\x1b\xe0\xc3\xdc\x49\x76\xe4\x26\x89\
\x5d\x67\x03\x3c\xd9\x52\x3f\xa0\xcb\x5a\xc0\x9a\xbd\x3d\x4c\xc9\
\x26\x67\xa5\xd7\xe4\xf0\x8d\x73\x9a\x80\x77\xba\x77\x6f\xe4\xe2\
\x3b\x7c\xed\x70\xa9\x51\x26\xc6\x2d\xc5\xeb\x64\x87\xa7\x14\xbc\
\x93\x7c\xb7\xc3\xdc\xbc\x13\xa6\x64\x93\x63\xe2\xeb\x76\x02\xb3\
\xfc\x80\x78\x0e\x5d\xed\x26\xdb\xc9\xde\xf5\x40\x67\x98\x74\x4a\
\x54\x50\x35\xc2\x67\x09\x2f\xf9\xaa\x0c\x4f\x62\x1e\x6f\xbf\x58\
\x6c\x4a\x36\x9e\x36\x44\xa3\xfa\xbd\x59\x66\xcf\x18\xa4\xb5\x9b\
\x6c\x27\xa7\x32\x68\x1d\xa3\xf1\xad\xf4\x7f\xbe\x0a\xea\x8b\x25\
\x30\x76\xe1\x7c\xf5\x8f\x57\xc5\xed\x69\xfb\x3e\xdc\x0e\x18\x92\
\x4d\x56\xab\x7d\x49\x6d\x1b\x9d\xca\xd0\x72\x5e\xc6\x86\xfd\xa0\
\x0e\xce\x1b\xd1\xfb\xa1\x08\x29\x26\x1f\xf1\xa0\xff\xf0\xfd\x49\
\x65\x37\xa3\xa5\xa8\x09\x4f\x4f\x27\x5b\x6e\x19\x01\xc8\x7e\x7a\
\x75\xd9\xfa\x53\x1b\xe4\xa4\x10\x79\xc7\x97\xd1\x79\x23\x2d\x3f\
\x55\xb1\x81\x6c\x07\x27\xe9\x4c\x46\x1a\x85\xa2\x65\xfd\xff\x84\
\xc5\x57\xf0\xf0\x82\x91\x58\x6b\xae\x6a\xc2\x02\x0e\x07\x04\x36\
\x43\x25\x3b\xf8\xe3\xe7\x1f\x8b\x96\xb8\x1a\x7b\xa8\x32\xe9\x92\
\xc1\x49\x3a\xe1\x2f\xf0\xf5\xdd\x9f\x11\xf5\xed\xe6\x22\xcd\x35\
\x31\x9e\xee\x36\xfd\x48\x42\x89\x75\xfa\x09\xd9\xce\x01\x87\xba\
\x36\x43\x25\xfb\x2a\x6b\xdf\x28\x3e\x10\x2b\xf8\x02\x06\x67\x44\
\x55\x22\x02\xe2\x48\x4e\x3f\x93\xd7\xc4\x78\xe6\xed\xaf\xd5\xbf\
\xfc\xc4\x1b\xf0\x8f\xe8\x92\x60\xb8\xe5\xe9\x01\x0a\xd9\xd4\xbf\
\xb0\x9d\x40\x19\xe1\x48\xc8\x96\xd6\xc4\x02\x59\xd8\xbe\xa7\x02\
\x4e\x10\x13\x41\xdb\xf6\x38\x0e\x85\x6c\x3a\x69\x81\x76\x05\xed\
\x0c\x87\x26\x3b\xa2\xed\x4b\x6b\x62\xa5\x2c\x6b\x17\x92\xae\x2c\
\xa1\xc4\x5c\x6d\x28\x64\x53\xef\xcc\x3e\xea\x65\x80\x43\x93\xcd\
\xe4\xb6\x5e\x13\x8b\xe4\xd1\x63\xfb\x3c\xe4\x3d\x1c\xab\xfb\xdf\
\x0c\xa0\x90\x5d\x90\x84\x27\x20\xd3\x7f\x77\x38\x30\xd9\x21\x0d\
\x99\x49\x6b\x62\xb9\xec\x62\xe1\xe0\x8c\x58\x09\x55\xb7\x69\x9b\
\x40\x21\x9b\x6e\x63\x0e\x5c\x1e\x20\xb3\x09\x07\x26\x3b\xa0\x9b\
\xf6\xe8\x9a\x18\x46\xd8\x38\xcd\x00\x87\x91\xc5\xca\x3c\xcd\x8b\
\xdb\xf6\x87\x48\x54\xc9\xfe\xcb\x91\xfd\x98\x6e\xf1\xae\x17\xdb\
\x26\xf8\xbd\x08\x5a\xe2\x25\x67\xc1\x3e\x0d\x73\x97\x5b\x06\xc7\
\x14\xb2\xcf\x99\x1a\x11\x26\xc0\x22\xf0\xa2\x89\x43\xeb\xec\x98\
\x28\xe2\xa9\x88\x34\x11\x39\x16\x89\x40\xf8\x9d\xd0\xe1\xf4\x07\
\xd1\xb6\xfd\x59\xb4\x96\xe9\xba\xd8\x41\x64\x15\x78\xd1\xc4\xa1\
\xc9\xf6\x89\x8b\xf1\x99\xbf\xa5\xa9\x2d\x3c\x11\x88\x6c\xdc\x14\
\x83\x27\x49\x16\xe9\xb6\x3f\xf8\xd7\x42\x76\xbd\xb9\xd0\x26\xf0\
\xa2\x89\x43\x93\x4d\xf7\x66\x7e\xc3\xdf\x52\x6b\xc8\x13\x81\x62\
\x7c\x27\x84\x28\x93\x34\xe5\xe9\xb6\x93\x90\x16\xb2\x45\x84\xd1\
\x2a\xf0\xa2\x89\x43\x93\xed\x11\xc9\xe2\x44\xd0\xcc\x23\x11\xba\
\x22\x9e\x89\x58\x6c\x0f\x56\xeb\xf6\xd3\x04\x30\xd9\x75\x36\xba\
\x55\xe0\x45\x13\x07\x27\x1b\x9f\x8e\x21\xc2\x0e\x4c\x6d\x72\x02\
\x16\x98\xa0\x71\x9d\x25\xfd\x2e\x2c\xdf\x6f\xdb\x0e\x4c\x76\xbd\
\x11\xd9\x2a\xf0\xa2\x89\x83\x93\x8d\x73\x27\x02\x66\x03\x79\x00\
\x5b\x6c\xd4\x1d\x55\x2f\xb9\x58\x91\x8c\xbe\xbb\x7c\xa2\x7c\x5f\
\x17\x30\xd9\x63\xfe\xd8\xd8\x05\x5e\x34\x71\x70\xb2\xb1\xef\x47\
\xd6\xc4\x3c\x11\x6b\x13\x23\x7e\x85\x5f\x22\x07\xe9\x68\x5e\x0b\
\xd9\xa9\x70\x75\xec\x02\x2f\x9a\x38\x38\xd9\xd8\xf7\x3b\xdf\x43\
\x3b\x20\xd9\xb9\x30\xb7\x76\x81\x17\x4d\x1c\x9c\x6c\xec\xfb\x69\
\x85\xbc\x2d\x01\x91\x1d\xd5\x7e\xa4\x5d\xe0\x45\x13\x07\x27\x1b\
\xfb\x7e\xfb\xf8\x65\x72\x88\xec\x89\x94\x39\x69\x15\x78\xd1\xc4\
\xc1\xc9\xae\x7c\xbf\x68\xb9\x87\x66\x00\xb2\xe9\x74\x95\x12\x60\
\x17\x78\xd1\xc4\xe1\xc9\x1e\x8f\xfc\xd1\x1e\x9a\x01\xc8\xa6\x71\
\x18\xaa\xa5\xed\x02\x2f\x9a\x38\x3c\xd9\xe5\xeb\xad\xf6\x89\x99\
\x02\x20\x9b\x08\x36\xdd\x0a\x64\x19\x78\xd1\xc4\xe1\xc9\x9e\x7e\
\xd9\x6e\x9f\x98\x21\x54\xb2\xe9\x74\xd5\x1f\x91\x37\x76\x81\x17\
\x4d\x1c\x9e\xec\xf8\xc6\xe5\x3e\xb1\x56\xa8\x64\xd3\x38\x0c\x4b\
\xf1\xb0\x0b\xbc\x68\xe2\xf0\x64\xfb\xb7\x2e\xf7\x89\xb5\x42\x5d\
\x5d\x47\x72\x6e\x88\x5d\xe0\x45\x13\x87\x27\xdb\x3a\xf7\x5d\x0f\
\x0a\xd9\xfc\x10\x08\x46\x80\x55\xe0\x45\x13\x87\x27\xdb\xdb\xcf\
\x92\x2b\xb0\x06\xd9\x48\xc4\xb1\x0a\xbc\x68\xe2\x08\xc8\x86\x36\
\xb1\xb9\xc7\x8e\x0e\xf3\x31\xc2\x11\x90\x5d\xee\xa5\x0b\x3d\xd9\
\x04\xcf\xf7\xd2\x4a\x4f\xf6\x1e\xd1\x93\xbd\x47\x0c\xd6\xb3\x52\
\xf7\x8b\x23\xff\x0d\x5f\xc7\xe8\xc9\xde\x23\xa2\xb3\x4d\xbf\xe1\
\xbb\x53\x90\xdf\xf0\x3d\x60\xfb\x3d\x7a\xf4\xe8\xd1\xa3\x47\x8f\
\x1e\x3d\x7a\xfc\x15\xf0\x3f\x5c\x4c\x6c\x29\xb8\x18\x13\x99\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x09\
\x0b\x8a\x5f\x53\
\x00\x65\
\x00\x71\x00\x75\x00\x61\x00\x74\x00\x69\x00\x6f\x00\x6e\x00\x73\
\x00\x05\
\x00\x6f\xa6\x53\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x0d\
\x06\xac\xe9\xa7\
\x00\x61\
\x00\x70\x00\x70\x00\x5f\x00\x69\x00\x63\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0f\
\x0d\xab\xd5\x27\
\x00\x68\
\x00\x65\x00\x6c\x00\x70\x00\x5f\x00\x62\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x10\
\x05\x1e\x3a\xc7\
\x00\x6c\
\x00\x69\x00\x6e\x00\x6b\x00\x5f\x00\x74\x00\x6f\x00\x5f\x00\x70\x00\x61\x00\x67\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x10\
\x05\x59\x67\x47\
\x00\x6f\
\x00\x63\x00\x6d\x00\x5f\x00\x69\x00\x63\x00\x6f\x00\x6e\x00\x5f\x00\x77\x00\x74\x00\x68\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0f\
\x03\x30\x74\x47\
\x00\x6f\
\x00\x63\x00\x6d\x00\x5f\x00\x69\x00\x63\x00\x6f\x00\x6e\x00\x5f\x00\x62\x00\x6c\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0c\
\x09\x15\x3c\xa7\
\x00\x6f\
\x00\x63\x00\x6d\x00\x5f\x00\x69\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x10\
\x08\x55\xe8\x87\
\x00\x6c\
\x00\x6f\x00\x67\x00\x6e\x00\x6f\x00\x72\x00\x6d\x00\x61\x00\x6c\x00\x5f\x00\x36\x00\x38\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x09\
\x04\x34\x8c\x47\
\x00\x67\
\x00\x61\x00\x6d\x00\x6d\x00\x61\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0b\
\x0c\xd4\x28\x67\
\x00\x77\
\x00\x65\x00\x69\x00\x62\x00\x75\x00\x6c\x00\x6c\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0c\
\x0d\x56\x91\xa7\
\x00\x73\
\x00\x69\x00\x67\x00\x6d\x00\x61\x00\x5f\x00\x33\x00\x36\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x09\
\x02\x69\xbe\x27\
\x00\x6d\
\x00\x75\x00\x5f\x00\x33\x00\x36\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x10\
\x08\xa7\xe8\x87\
\x00\x6c\
\x00\x6f\x00\x67\x00\x6e\x00\x6f\x00\x72\x00\x6d\x00\x61\x00\x6c\x00\x5f\x00\x33\x00\x36\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x09\
\x02\x69\xbc\x47\
\x00\x72\
\x00\x32\x00\x5f\x00\x33\x00\x36\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x09\
\x0c\xa4\xa2\x87\
\x00\x74\
\x00\x68\x00\x65\x00\x74\x00\x61\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x08\
\x02\x69\x58\x47\
\x00\x72\
\x00\x5f\x00\x33\x00\x36\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x10\
\x08\xb5\xe8\x87\
\x00\x6c\
\x00\x6f\x00\x67\x00\x6e\x00\x6f\x00\x72\x00\x6d\x00\x61\x00\x6c\x00\x5f\x00\x34\x00\x38\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\
\x00\x00\x00\x18\x00\x02\x00\x00\x00\x01\x00\x00\x00\x0e\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x0a\x00\x00\x00\x04\
\x00\x00\x01\xe0\x00\x00\x00\x00\x00\x01\x00\x00\xa1\xda\
\x00\x00\x01\xb0\x00\x00\x00\x00\x00\x01\x00\x00\x9e\x63\
\x00\x00\x01\x72\x00\x00\x00\x00\x00\x01\x00\x00\x8d\x0a\
\x00\x00\x01\x20\x00\x00\x00\x00\x00\x01\x00\x00\x78\x32\
\x00\x00\x00\xfa\x00\x00\x00\x00\x00\x01\x00\x00\x68\xd2\
\x00\x00\x01\x8a\x00\x00\x00\x00\x00\x01\x00\x00\x8f\x03\
\x00\x00\x01\xf6\x00\x00\x00\x00\x00\x01\x00\x00\xa3\x65\
\x00\x00\x01\xc8\x00\x00\x00\x00\x00\x01\x00\x00\xa0\x77\
\x00\x00\x01\x38\x00\x00\x00\x00\x00\x01\x00\x00\x79\x74\
\x00\x00\x01\x54\x00\x00\x00\x00\x00\x01\x00\x00\x8b\xd6\
\x00\x00\x00\x18\x00\x02\x00\x00\x00\x06\x00\x00\x00\x0f\
\x00\x00\x00\xb8\x00\x00\x00\x00\x00\x01\x00\x00\x5d\xd8\
\x00\x00\x00\x6c\x00\x00\x00\x00\x00\x01\x00\x00\x45\xb4\
\x00\x00\x00\x92\x00\x00\x00\x00\x00\x01\x00\x00\x57\x87\
\x00\x00\x00\x28\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\xdc\x00\x00\x00\x00\x00\x01\x00\x00\x63\x56\
\x00\x00\x00\x48\x00\x00\x00\x00\x00\x01\x00\x00\x0d\x05\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x18\x00\x02\x00\x00\x00\x01\x00\x00\x00\x0e\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x0a\x00\x00\x00\x04\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x01\xe0\x00\x00\x00\x00\x00\x01\x00\x00\xa1\xda\
\x00\x00\x01\x7c\x39\x6b\x98\x96\
\x00\x00\x01\xb0\x00\x00\x00\x00\x00\x01\x00\x00\x9e\x63\
\x00\x00\x01\x7e\x50\x30\x91\x49\
\x00\x00\x01\x72\x00\x00\x00\x00\x00\x01\x00\x00\x8d\x0a\
\x00\x00\x01\x7c\x39\x6b\x98\x96\
\x00\x00\x01\x20\x00\x00\x00\x00\x00\x01\x00\x00\x78\x32\
\x00\x00\x01\x7c\x39\x6b\x98\x96\
\x00\x00\x00\xfa\x00\x00\x00\x00\x00\x01\x00\x00\x68\xd2\
\x00\x00\x01\x7c\x39\x6b\x98\x96\
\x00\x00\x01\x8a\x00\x00\x00\x00\x00\x01\x00\x00\x8f\x03\
\x00\x00\x01\x7c\x39\x6b\x98\x96\
\x00\x00\x01\xf6\x00\x00\x00\x00\x00\x01\x00\x00\xa3\x65\
\x00\x00\x01\x7c\x39\x6b\x98\x96\
\x00\x00\x01\xc8\x00\x00\x00\x00\x00\x01\x00\x00\xa0\x77\
\x00\x00\x01\x7c\x39\x6b\x98\x96\
\x00\x00\x01\x38\x00\x00\x00\x00\x00\x01\x00\x00\x79\x74\
\x00\x00\x01\x7c\x39\x6b\x98\x96\
\x00\x00\x01\x54\x00\x00\x00\x00\x00\x01\x00\x00\x8b\xd6\
\x00\x00\x01\x7c\x39\x6b\x98\x96\
\x00\x00\x00\x18\x00\x02\x00\x00\x00\x06\x00\x00\x00\x0f\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\xb8\x00\x00\x00\x00\x00\x01\x00\x00\x5d\xd8\
\x00\x00\x01\x7c\x7b\x84\x7b\x0a\
\x00\x00\x00\x6c\x00\x00\x00\x00\x00\x01\x00\x00\x45\xb4\
\x00\x00\x01\x7c\x39\x6b\x98\xa5\
\x00\x00\x00\x92\x00\x00\x00\x00\x00\x01\x00\x00\x57\x87\
\x00\x00\x01\x7c\x7b\x51\x37\xb0\
\x00\x00\x00\x28\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x7c\x71\x90\x26\xc5\
\x00\x00\x00\xdc\x00\x00\x00\x00\x00\x01\x00\x00\x63\x56\
\x00\x00\x01\x7c\x7b\x51\x6e\x4b\
\x00\x00\x00\x48\x00\x00\x00\x00\x00\x01\x00\x00\x0d\x05\
\x00\x00\x01\x7c\x39\x6b\x98\xa5\
"
qt_version = QtCore.qVersion().split('.')
if qt_version < ['5', '8', '0']:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| [
"PyQt5.QtCore.qVersion",
"PyQt5.QtCore.qUnregisterResourceData",
"PyQt5.QtCore.qRegisterResourceData"
] | [((194848, 194949), 'PyQt5.QtCore.qRegisterResourceData', 'QtCore.qRegisterResourceData', (['rcc_version', 'qt_resource_struct', 'qt_resource_name', 'qt_resource_data'], {}), '(rcc_version, qt_resource_struct,\n qt_resource_name, qt_resource_data)\n', (194876, 194949), False, 'from PyQt5 import QtCore\n'), ((194976, 195079), 'PyQt5.QtCore.qUnregisterResourceData', 'QtCore.qUnregisterResourceData', (['rcc_version', 'qt_resource_struct', 'qt_resource_name', 'qt_resource_data'], {}), '(rcc_version, qt_resource_struct,\n qt_resource_name, qt_resource_data)\n', (195006, 195079), False, 'from PyQt5 import QtCore\n'), ((194619, 194636), 'PyQt5.QtCore.qVersion', 'QtCore.qVersion', ([], {}), '()\n', (194634, 194636), False, 'from PyQt5 import QtCore\n')] |
from poemsai.tokenization import add_special_token
import torch
from transformers import GPT2LMHeadModel, GPT2Tokenizer
def test_add_special_token():
token = '[<bla>]'
tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
model = GPT2LMHeadModel.from_pretrained('gpt2')
copy_from = tokenizer.all_special_tokens[0]
orig_token_id = tokenizer(copy_from)['input_ids'][0]
emb = model.get_input_embeddings()
orig_emb_rows = emb.weight.shape[0]
add_special_token(token, tokenizer, model, copy_from=copy_from)
new_emb = model.get_input_embeddings()
assert token in tokenizer.additional_special_tokens
assert torch.allclose(new_emb.weight[-1], new_emb.weight[orig_token_id])
assert new_emb.weight.shape[0] == orig_emb_rows + 1, f'{new_emb.weight.shape[0]}, {orig_emb_rows + 1}'
| [
"torch.allclose",
"transformers.GPT2Tokenizer.from_pretrained",
"transformers.GPT2LMHeadModel.from_pretrained",
"poemsai.tokenization.add_special_token"
] | [((190, 227), 'transformers.GPT2Tokenizer.from_pretrained', 'GPT2Tokenizer.from_pretrained', (['"""gpt2"""'], {}), "('gpt2')\n", (219, 227), False, 'from transformers import GPT2LMHeadModel, GPT2Tokenizer\n'), ((240, 279), 'transformers.GPT2LMHeadModel.from_pretrained', 'GPT2LMHeadModel.from_pretrained', (['"""gpt2"""'], {}), "('gpt2')\n", (271, 279), False, 'from transformers import GPT2LMHeadModel, GPT2Tokenizer\n'), ((473, 536), 'poemsai.tokenization.add_special_token', 'add_special_token', (['token', 'tokenizer', 'model'], {'copy_from': 'copy_from'}), '(token, tokenizer, model, copy_from=copy_from)\n', (490, 536), False, 'from poemsai.tokenization import add_special_token\n'), ((652, 717), 'torch.allclose', 'torch.allclose', (['new_emb.weight[-1]', 'new_emb.weight[orig_token_id]'], {}), '(new_emb.weight[-1], new_emb.weight[orig_token_id])\n', (666, 717), False, 'import torch\n')] |
from nose.tools import eq_
from ...features import Feature
from ..scorer_model import MLScorerModel, ScorerModel
def test_scorer_model():
sm = ScorerModel([Feature("foo")], version="0.0.1")
eq_(sm.version, "0.0.1")
del sm.version
eq_(sm.version, None)
def test_from_config():
config = {
'scorer_models': {
'test': {
'module': "nose.tools.eq_"
}
}
}
sm = ScorerModel.from_config(config, 'test')
eq_(sm, eq_)
def test_ml_scorer_model():
sm = MLScorerModel([Feature("foo")])
del sm.trained
eq_(sm.trained, None)
| [
"nose.tools.eq_"
] | [((202, 226), 'nose.tools.eq_', 'eq_', (['sm.version', '"""0.0.1"""'], {}), "(sm.version, '0.0.1')\n", (205, 226), False, 'from nose.tools import eq_\n'), ((252, 273), 'nose.tools.eq_', 'eq_', (['sm.version', 'None'], {}), '(sm.version, None)\n', (255, 273), False, 'from nose.tools import eq_\n'), ((490, 502), 'nose.tools.eq_', 'eq_', (['sm', 'eq_'], {}), '(sm, eq_)\n', (493, 502), False, 'from nose.tools import eq_\n'), ((599, 620), 'nose.tools.eq_', 'eq_', (['sm.trained', 'None'], {}), '(sm.trained, None)\n', (602, 620), False, 'from nose.tools import eq_\n')] |
Subsets and Splits