ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a350d72953f2812f11a81b290079283b1a9a92f | """
weasyprint.layout.percentages
-----------------------------
Resolve percentages into fixed values.
:copyright: Copyright 2011-2018 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from ..formatting_structure import boxes
def _percentage(value, refer_to):
"""Get the value corresponding to the value/percentage and the reference
``refer_to`` is the length for 100%. If ``refer_to`` is not a number, it
just replaces percentages.
"""
if value == 'auto':
result = value
elif value.unit == 'px':
result = value.value
else:
assert value.unit == '%'
result = value.value * refer_to / 100.
return result
def resolve_one_percentage(box, property_name, refer_to,
main_flex_direction=None):
"""Set a used length value from a computed length value.
``refer_to`` is the length for 100%. If ``refer_to`` is not a number, it
just replaces percentages.
"""
# box.style has computed values
value = box.style[property_name]
# box attributes are used values
percentage = _percentage(value, refer_to)
setattr(box, property_name, percentage)
if property_name in ('min_width', 'min_height') and percentage == 'auto':
if (main_flex_direction is None or
property_name != ('min_%s' % main_flex_direction)):
setattr(box, property_name, 0)
def resolve_position_percentages(box, containing_block):
cb_width, cb_height = containing_block
resolve_one_percentage(box, 'left', cb_width)
resolve_one_percentage(box, 'right', cb_width)
resolve_one_percentage(box, 'top', cb_height)
resolve_one_percentage(box, 'bottom', cb_height)
def resolve_percentages(box, containing_block, main_flex_direction=None):
"""Set used values as attributes of the box object."""
if isinstance(containing_block, boxes.Box):
# cb is short for containing block
cb_width = containing_block.width
cb_height = containing_block.height
else:
cb_width, cb_height = containing_block
if isinstance(box, boxes.PageBox):
maybe_height = cb_height
else:
maybe_height = cb_width
resolve_one_percentage(box, 'margin_left', cb_width)
resolve_one_percentage(box, 'margin_right', cb_width)
resolve_one_percentage(box, 'margin_top', maybe_height)
resolve_one_percentage(box, 'margin_bottom', maybe_height)
resolve_one_percentage(box, 'padding_left', cb_width)
resolve_one_percentage(box, 'padding_right', cb_width)
resolve_one_percentage(box, 'padding_top', maybe_height)
resolve_one_percentage(box, 'padding_bottom', maybe_height)
resolve_one_percentage(box, 'width', cb_width)
resolve_one_percentage(box, 'min_width', cb_width, main_flex_direction)
resolve_one_percentage(box, 'max_width', cb_width, main_flex_direction)
# XXX later: top, bottom, left and right on positioned elements
if cb_height == 'auto':
# Special handling when the height of the containing block
# depends on its content.
height = box.style['height']
if height == 'auto' or height.unit == '%':
box.height = 'auto'
else:
assert height.unit == 'px'
box.height = height.value
resolve_one_percentage(box, 'min_height', 0)
resolve_one_percentage(box, 'max_height', float('inf'))
else:
resolve_one_percentage(box, 'height', cb_height)
resolve_one_percentage(box, 'min_height', cb_height)
resolve_one_percentage(box, 'max_height', cb_height)
# Used value == computed value
for side in ['top', 'right', 'bottom', 'left']:
prop = 'border_{0}_width'.format(side)
setattr(box, prop, box.style[prop])
if box.style['box_sizing'] == 'border-box':
if box.width != 'auto':
box.width -= (box.padding_left + box.padding_right +
box.border_left_width + box.border_right_width)
if box.height != 'auto':
box.height -= (box.padding_top + box.padding_bottom +
box.border_top_width + box.border_bottom_width)
elif box.style['box_sizing'] == 'padding-box':
if box.width != 'auto':
box.width -= box.padding_left + box.padding_right
if box.height != 'auto':
box.height -= box.padding_top + box.padding_bottom
else:
assert box.style['box_sizing'] == 'content-box'
def resolve_radii_percentages(box):
corners = ('top_left', 'top_right', 'bottom_right', 'bottom_left')
for corner in corners:
property_name = 'border_%s_radius' % corner
rx, ry = box.style[property_name]
rx = _percentage(rx, box.border_width())
ry = _percentage(ry, box.border_height())
setattr(box, property_name, (rx, ry))
|
py | 1a350da74ee2c8796ca2258222ebed253c409832 | # -*- coding: utf-8 -*-
#
# django-otp-yubikey documentation build configuration file, created by
# sphinx-quickstart on Sun Jul 22 16:13:25 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# autodoc and viewcode need valid settings in order to process Django modules.
import django
import django.conf
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../ext'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'otpdocs',
]
django.conf.settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS=[
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django_otp',
'otp_yubikey',
],
SECRET_KEY='properly-configured',
)
django.setup()
intersphinx_mapping = {
'python': ('http://docs.python.org/3/', None),
'django': ('https://docs.djangoproject.com/en/1.11/',
'https://docs.djangoproject.com/en/1.11/_objects/'),
'django-otp': ('http://django-otp-official.readthedocs.io/en/latest/', None),
'yubiotp': ('http://yubiotp.readthedocs.io/en/latest/', None),
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'django-otp-yubikey'
copyright = '2012, Peter Sagerson'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = '1.0.1'
# The short X.Y version.
version = '.'.join(release.split('.')[:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-otp-yubikeydoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-otp-yubikey.tex', 'django-otp-yubikey Documentation',
'Peter Sagerson', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-otp-yubikey', 'django-otp-yubikey Documentation',
['Peter Sagerson'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'django-otp-yubikey', 'django-otp-yubikey Documentation',
'Peter Sagerson', 'django-otp-yubikey', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
|
py | 1a350e9fe35cf80f913a7560a38598155bc8a6a3 | from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:9898")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:9898")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Libercoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Libercoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
|
py | 1a350fa0ccf274acfafd350757db2287eb535e50 | from django.contrib import admin
from .site import admin_site
from .. import models
class ConfigurationAdmin(admin.ModelAdmin):
list_display = ['datetime', 'esmug_percentage_discount',
'gucem_percentage_discount', 'grouped_command_day',]
readonly_fields = ('datetime',)
def get_readonly_fields(self, request, instance=None):
if instance is None:
return []
return [f.name for f in self.model._meta.fields]
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
# Remove delete action
actions = super().get_actions(request)
del actions['delete_selected']
return actions
admin_site.register(models.configuration.Configuration, ConfigurationAdmin)
|
bzl | 1a351033185ff1b339ca9c93cac17b6eaf883fdc | """Rules for writing tests with JUnit"""
load("@bazel_skylib//lib:dicts.bzl", _dicts = "dicts")
load(
"@io_bazel_rules_scala//scala/private:common_attributes.bzl",
"common_attrs",
"implicit_deps",
"launcher_template",
)
load("@io_bazel_rules_scala//scala/private:common_outputs.bzl", "common_outputs")
load(
"@io_bazel_rules_scala//scala/private:phases/phases.bzl",
"extras_phases",
"phase_collect_jars_junit_test",
"phase_compile_junit_test",
"phase_coverage_common",
"phase_declare_executable",
"phase_default_info",
"phase_java_wrapper_common",
"phase_jvm_flags",
"phase_merge_jars",
"phase_runfiles_common",
"phase_scalac_provider",
"phase_unused_deps_checker",
"phase_write_executable_junit_test",
"phase_write_manifest",
"run_phases",
)
def _scala_junit_test_impl(ctx):
if (not (ctx.attr.prefixes) and not (ctx.attr.suffixes)):
fail(
"Setting at least one of the attributes ('prefixes','suffixes') is required",
)
return run_phases(
ctx,
# customizable phases
[
("scalac_provider", phase_scalac_provider),
("write_manifest", phase_write_manifest),
("unused_deps_checker", phase_unused_deps_checker),
("collect_jars", phase_collect_jars_junit_test),
("java_wrapper", phase_java_wrapper_common),
("declare_executable", phase_declare_executable),
# no need to build an ijar for an executable
("compile", phase_compile_junit_test),
("coverage", phase_coverage_common),
("merge_jars", phase_merge_jars),
("runfiles", phase_runfiles_common),
("jvm_flags", phase_jvm_flags),
("write_executable", phase_write_executable_junit_test),
("default_info", phase_default_info),
],
)
_scala_junit_test_attrs = {
"prefixes": attr.string_list(default = []),
"suffixes": attr.string_list(default = []),
"suite_label": attr.label(
default = Label(
"//src/java/io/bazel/rulesscala/test_discovery:test_discovery",
),
),
"suite_class": attr.string(
default = "io.bazel.rulesscala.test_discovery.DiscoveredTestSuite",
),
"print_discovered_classes": attr.bool(
default = False,
mandatory = False,
),
"jvm_flags": attr.string_list(),
"_junit": attr.label(
default = Label(
"//external:io_bazel_rules_scala/dependency/junit/junit",
),
),
"_hamcrest": attr.label(
default = Label(
"//external:io_bazel_rules_scala/dependency/hamcrest/hamcrest_core",
),
),
"_bazel_test_runner": attr.label(
default = Label(
"@io_bazel_rules_scala//scala:bazel_test_runner_deploy",
),
allow_files = True,
),
}
_junit_resolve_deps = {
"_scala_toolchain": attr.label_list(
default = [
Label(
"//external:io_bazel_rules_scala/dependency/scala/scala_library",
),
Label("//external:io_bazel_rules_scala/dependency/junit/junit"),
Label(
"//external:io_bazel_rules_scala/dependency/hamcrest/hamcrest_core",
),
],
allow_files = False,
),
}
_scala_junit_test_attrs.update(launcher_template)
_scala_junit_test_attrs.update(implicit_deps)
_scala_junit_test_attrs.update(common_attrs)
_scala_junit_test_attrs.update(_junit_resolve_deps)
_scala_junit_test_attrs.update({
"tests_from": attr.label_list(providers = [[JavaInfo]]),
})
def make_scala_junit_test(*extras):
return rule(
attrs = _dicts.add(
_scala_junit_test_attrs,
extras_phases(extras),
*[extra["attrs"] for extra in extras if "attrs" in extra]
),
fragments = ["java"],
outputs = _dicts.add(
common_outputs,
*[extra["outputs"] for extra in extras if "outputs" in extra]
),
test = True,
toolchains = ["@io_bazel_rules_scala//scala:toolchain_type"],
implementation = _scala_junit_test_impl,
)
scala_junit_test = make_scala_junit_test()
|
py | 1a3511e824cfc16e341322234be53d08cac50040 | # -*- coding: utf-8 -*-
#
# Vingd API for Python documentation build configuration file, created by
# sphinx-quickstart on Wed Nov 21 11:27:59 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
from datetime import datetime
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
import vingd
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.autosummary']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Vingd API for Python'
copyright = unicode(datetime.utcnow().year) + u', Vingd Inc.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'VingdAPIforPythonDocs'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'VingdAPIforPython.tex', u'Vingd API for Python Documentation',
u'Radomir Stevanovic, Vingd Inc.', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'vingdapiforpython', u'Vingd API for Python Documentation',
[u'Radomir Stevanovic, Vingd Inc.'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'VingdAPIforPython', u'Vingd API for Python Documentation',
u'Radomir Stevanovic, Vingd Inc.', 'VingdAPIforPython', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
py | 1a35121c5cad773d651bed49f0d768e89c2a91ba | """
Descriptor data structure.
Descriptors are basic data structure used throughout PSD files. Descriptor is
one kind of serialization protocol for data objects, and
enum classes in :py:mod:`psd_tools.terminology` or bytes indicates what kind
of descriptor it is.
The class ID can be pre-defined enum if the tag is 4-byte length or plain
bytes if the length is arbitrary. They depend on the internal version of
Adobe Photoshop but the detail is unknown.
Pretty printing is the best approach to check the descriptor content::
from IPython.pretty import pprint
pprint(descriptor)
"""
from __future__ import absolute_import, unicode_literals
import attr
import logging
from psd_tools.psd.base import (
BaseElement,
BooleanElement,
DictElement,
IntegerElement,
ListElement,
NumericElement,
StringElement,
)
from psd_tools.constants import OSType
from psd_tools.terminology import Klass, Enum, Event, Form, Key, Type, Unit
from psd_tools.validators import in_
from psd_tools.utils import (
read_fmt,
write_fmt,
read_unicode_string,
write_unicode_string,
write_bytes,
read_length_block,
write_length_block,
write_padding,
new_registry,
trimmed_repr,
)
logger = logging.getLogger(__name__)
TYPES, register = new_registry(attribute='ostype')
_TERMS = set(
item.value for kls in (Klass, Enum, Event, Form, Key, Type, Unit)
for item in kls
)
def read_length_and_key(fp):
"""
Helper to read descriptor key.
"""
length = read_fmt('I', fp)[0]
key = fp.read(length or 4)
if length == 0 and key not in _TERMS:
logger.debug('Unknown term: %r' % (key))
_TERMS.add(key)
return key
def write_length_and_key(fp, value):
"""
Helper to write descriptor key.
"""
written = write_fmt(fp, 'I', 0 if value in _TERMS else len(value))
written += write_bytes(fp, value)
return written
class _DescriptorMixin(DictElement):
@classmethod
def _read_body(cls, fp):
name = read_unicode_string(fp, padding=1)
classID = read_length_and_key(fp)
items = []
count = read_fmt('I', fp)[0]
for _ in range(count):
key = read_length_and_key(fp)
ostype = OSType(fp.read(4))
kls = TYPES.get(ostype)
value = kls.read(fp)
items.append((key, value))
return dict(name=name, classID=classID, items=items)
def _write_body(self, fp):
written = write_unicode_string(fp, self.name, padding=1)
written += write_length_and_key(fp, self.classID)
written += write_fmt(fp, 'I', len(self))
for key in self:
written += write_length_and_key(fp, key)
written += write_bytes(fp, self[key].ostype.value)
written += self[key].write(fp)
return written
@classmethod
def _key_converter(cls, key):
if hasattr(key, 'encode'):
return key.encode('ascii')
return getattr(key, 'value', key)
def _repr_pretty_(self, p, cycle):
if cycle:
return "{name}{{...}".format(name=self.__class__.__name__)
prefix = '{cls}({name}){{'.format(
cls=self.__class__.__name__,
name=getattr(self.classID, 'name', self.classID),
)
with p.group(2, prefix, '}'):
p.breakable('')
for idx, key in enumerate(self):
if idx:
p.text(',')
p.breakable()
value = self[key]
p.pretty(key.decode('ascii'))
p.text(': ')
if isinstance(value, bytes):
p.text(trimmed_repr(value))
else:
p.pretty(value)
p.breakable('')
@register(OSType.DESCRIPTOR)
@attr.s(repr=False)
class Descriptor(_DescriptorMixin):
"""
Dict-like descriptor structure.
Key values can be 4-character `bytes` in
:py:class:`~psd_tools.terminology.Key` or arbitrary length `bytes`.
Supports direct access by :py:class:`~psd_tools.terminology.Key`.
Example::
from psd_tools.terminology import Key
descriptor[Key.Enabled]
for key in descriptor:
print(descriptor[key])
.. py:attribute:: name
`str`
.. py:attribute:: classID
bytes in :py:class:`~psd_tools.terminology.Klass`
"""
name = attr.ib(default='', type=str)
classID = attr.ib(default=Klass.Null.value)
@classmethod
def read(cls, fp):
return cls(**cls._read_body(fp))
def write(self, fp):
return self._write_body(fp)
@register(OSType.OBJECT_ARRAY)
@attr.s(repr=False)
class ObjectArray(_DescriptorMixin):
"""
Object array structure almost equivalent to
:py:class:`~psd_tools.psd.descriptor.Descriptor`.
.. py:attribute:: items_count
`int` value
.. py:attribute:: name
`str` value
.. py:attribute:: classID
bytes in :py:class:`~psd_tools.terminology.Klass`
"""
items_count = attr.ib(default=0, type=int)
name = attr.ib(default='', type=str)
classID = attr.ib(default=Klass.Null.value)
@classmethod
def read(cls, fp):
items_count = read_fmt('I', fp)[0]
return cls(items_count=items_count, **cls._read_body(fp))
def write(self, fp):
written = write_fmt(fp, 'I', self.items_count)
written += self._write_body(fp)
return written
@register(OSType.LIST)
@attr.s(repr=False)
class List(ListElement):
"""
List structure.
Example::
for item in list_value:
print(item)
"""
@classmethod
def read(cls, fp):
items = []
count = read_fmt('I', fp)[0]
for _ in range(count):
key = OSType(fp.read(4))
kls = TYPES.get(key)
value = kls.read(fp)
items.append(value)
return cls(items)
def write(self, fp):
written = write_fmt(fp, 'I', len(self))
for item in self:
written += write_bytes(fp, item.ostype.value)
written += item.write(fp)
return written
@register(OSType.PROPERTY)
@attr.s(repr=False)
class Property(BaseElement):
"""
Property structure.
.. py:attribute:: name
`str` value
.. py:attribute:: classID
bytes in :py:class:`~psd_tools.terminology.Klass`
.. py:attribute:: keyID
bytes in :py:class:`~psd_tools.terminology.Key`
"""
name = attr.ib(default='', type=str)
classID = attr.ib(default=b'\x00\x00\x00\x00', type=bytes)
keyID = attr.ib(default=b'\x00\x00\x00\x00', type=bytes)
@classmethod
def read(cls, fp):
name = read_unicode_string(fp)
classID = read_length_and_key(fp)
keyID = read_length_and_key(fp)
return cls(name, classID, keyID)
def write(self, fp):
written = write_unicode_string(fp, self.name)
written += write_length_and_key(fp, self.classID)
written += write_length_and_key(fp, self.keyID)
return written
@register(OSType.UNIT_FLOAT)
@attr.s(slots=True, repr=False, eq=False, order=False)
class UnitFloat(NumericElement):
"""
Unit float structure.
.. py:attribute:: unit
unit of the value in :py:class:`Unit`
.. py:attribute:: value
`float` value
"""
value = attr.ib(default=0.0, type=float)
unit = attr.ib(default=Unit._None, converter=Unit, validator=in_(Unit))
@classmethod
def read(cls, fp):
unit, value = read_fmt('4sd', fp)
return cls(unit=Unit(unit), value=value)
def write(self, fp):
return write_fmt(fp, '4sd', self.unit.value, self.value)
def _repr_pretty_(self, p, cycle):
if cycle:
return self.__repr__()
p.pretty(self.value)
p.text(' ')
p.text(self.unit.name)
@register(OSType.UNIT_FLOATS)
@attr.s(repr=False)
class UnitFloats(BaseElement):
"""
Unit floats structure.
.. py:attribute:: unit
unit of the value in :py:class:`Unit`
.. py:attribute:: values
List of `float` values
"""
unit = attr.ib(default=Unit._None, converter=Unit, validator=in_(Unit))
values = attr.ib(factory=list)
@classmethod
def read(cls, fp):
unit, count = read_fmt('4sI', fp)
values = list(read_fmt('%dd' % count, fp))
return cls(unit, values)
def write(self, fp):
return write_fmt(
fp, '4sI%dd' % len(self.values), self.unit.value, len(self.values),
*self.values
)
def __iter__(self):
for value in self.values:
yield value
def __getitem__(self, index):
return self.values[index]
def __len__(self):
return len(self.values)
@register(OSType.DOUBLE)
class Double(NumericElement):
"""
Double structure.
.. py:attribute:: value
`float` value
"""
@classmethod
def read(cls, fp):
return cls(*read_fmt('d', fp))
def write(self, fp):
return write_fmt(fp, 'd', self.value)
@attr.s(repr=False)
class Class(BaseElement):
"""
Class structure.
.. py:attribute:: name
`str` value
.. py:attribute:: classID
bytes in :py:class:`~psd_tools.terminology.Klass`
"""
name = attr.ib(default='', type=str)
classID = attr.ib(default=b'\x00\x00\x00\x00', type=bytes)
@classmethod
def read(cls, fp):
name = read_unicode_string(fp)
classID = read_length_and_key(fp)
return cls(name, classID)
def write(self, fp):
written = write_unicode_string(fp, self.name)
written += write_length_and_key(fp, self.classID)
return written
@register(OSType.STRING)
class String(StringElement):
"""
String structure.
.. py:attribute:: value
`str` value
"""
pass
@register(OSType.ENUMERATED_REFERENCE)
@attr.s(repr=False)
class EnumeratedReference(BaseElement):
"""
Enumerated reference structure.
.. py:attribute:: name
`str` value
.. py:attribute:: classID
bytes in :py:class:`~psd_tools.terminology.Klass`
.. py:attribute:: typeID
bytes in :py:class:`~psd_tools.terminology.Type`
.. py:attribute:: enum
bytes in :py:class:`~psd_tools.terminology.Enum`
"""
name = attr.ib(default='', type=str)
classID = attr.ib(default=b'\x00\x00\x00\x00', type=bytes)
typeID = attr.ib(default=b'\x00\x00\x00\x00', type=bytes)
enum = attr.ib(default=b'\x00\x00\x00\x00', type=bytes)
@classmethod
def read(cls, fp):
name = read_unicode_string(fp)
classID = read_length_and_key(fp)
typeID = read_length_and_key(fp)
enum = read_length_and_key(fp)
return cls(name, classID, typeID, enum)
def write(self, fp):
written = write_unicode_string(fp, self.name)
written += write_length_and_key(fp, self.classID)
written += write_length_and_key(fp, self.typeID)
written += write_length_and_key(fp, self.enum)
return written
@register(OSType.OFFSET)
@attr.s(repr=False)
class Offset(BaseElement):
"""
Offset structure.
.. py:attribute:: name
`str` value
.. py:attribute:: classID
bytes in :py:class:`~psd_tools.terminology.Klass`
.. py:attribute:: value
`int` value
"""
name = attr.ib(default='', type=str)
classID = attr.ib(default=b'\x00\x00\x00\x00', type=bytes)
value = attr.ib(default=0)
@classmethod
def read(cls, fp):
name = read_unicode_string(fp)
classID = read_length_and_key(fp)
offset = read_fmt('I', fp)[0]
return cls(name, classID, offset)
def write(self, fp):
written = write_unicode_string(fp, self.name)
written += write_length_and_key(fp, self.classID)
written += write_fmt(fp, 'I', self.value)
return written
@register(OSType.BOOLEAN)
class Bool(BooleanElement):
"""
Bool structure.
.. py:attribute:: value
`bool` value
"""
@classmethod
def read(cls, fp):
return cls(read_fmt('?', fp)[0])
def write(self, fp):
return write_fmt(fp, '?', self.value)
@register(OSType.LARGE_INTEGER)
class LargeInteger(IntegerElement):
"""
LargeInteger structure.
.. py:attribute:: value
`int` value
"""
@classmethod
def read(cls, fp):
return cls(read_fmt('q', fp)[0])
def write(self, fp):
return write_fmt(fp, 'q', self.value)
@register(OSType.INTEGER)
class Integer(IntegerElement):
"""
Integer structure.
.. py:attribute:: value
`int` value
"""
@classmethod
def read(cls, fp):
return cls(read_fmt('i', fp)[0])
def write(self, fp):
return write_fmt(fp, 'i', self.value)
@register(OSType.ENUMERATED)
@attr.s(repr=False)
class Enumerated(BaseElement):
"""
Enum structure.
.. py:attribute:: typeID
bytes in :py:class:`~psd_tools.terminology.Type`
.. py:attribute:: enum
bytes in :py:class:`~psd_tools.terminology.Enum`
"""
typeID = attr.ib(default=b'\x00\x00\x00\x00', type=bytes)
enum = attr.ib(default=b'\x00\x00\x00\x00', type=bytes)
@classmethod
def read(cls, fp):
typeID = read_length_and_key(fp)
enum = read_length_and_key(fp)
return cls(typeID, enum)
def write(self, fp):
written = write_length_and_key(fp, self.typeID)
written += write_length_and_key(fp, self.enum)
return written
def _repr_pretty_(self, p, cycle):
if cycle:
return self.__repr__()
p.text('(')
p.pretty(getattr(self.typeID, 'name', self.typeID))
p.text(', ')
p.pretty(getattr(self.enum, 'name', self.enum))
p.text(')')
def get_name(self):
"""Get enum name."""
if len(self.enum) == 4:
try:
return Enum(self.enum).name
except ValueError:
pass
return str(self.enum)
@register(OSType.RAW_DATA)
@attr.s(repr=False)
class RawData(BaseElement):
"""
RawData structure.
.. py:attribute:: value
`bytes` value
"""
value = attr.ib(default=b'\x00\x00\x00\x00', type=bytes)
@classmethod
def read(cls, fp):
return cls(read_length_block(fp))
def write(self, fp):
def writer(f):
if hasattr(self.value, 'write'):
return self.value.write(f)
return write_bytes(f, self.value)
return write_length_block(fp, writer)
@register(OSType.CLASS1)
class Class1(Class):
"""
Class structure equivalent to
:py:class:`~psd_tools.psd.descriptor.Class`.
"""
pass
@register(OSType.CLASS2)
class Class2(Class):
"""
Class structure equivalent to
:py:class:`~psd_tools.psd.descriptor.Class`.
"""
pass
@register(OSType.CLASS3)
class Class3(Class):
"""
Class structure equivalent to
:py:class:`~psd_tools.psd.descriptor.Class`.
"""
pass
@register(OSType.REFERENCE)
class Reference(List):
"""
Reference structure equivalent to
:py:class:`~psd_tools.psd.descriptor.List`.
"""
pass
@register(OSType.ALIAS)
class Alias(RawData):
"""
Alias structure equivalent to
:py:class:`~psd_tools.psd.descriptor.RawData`.
"""
pass
@register(OSType.GLOBAL_OBJECT)
class GlobalObject(Descriptor):
"""
Global object structure equivalent to
:py:class:`~psd_tools.psd.descriptor.Descriptor`.
"""
pass
@register(OSType.PATH)
class Path(RawData):
"""
Undocumented path structure equivalent to
:py:class:`~psd_tools.psd.descriptor.RawData`.
"""
pass
@register(OSType.IDENTIFIER)
class Identifier(Integer):
"""
Identifier equivalent to
:py:class:`~psd_tools.psd.descriptor.Integer`.
"""
pass
@register(OSType.INDEX)
class Index(Integer):
"""
Index equivalent to :py:class:`~psd_tools.psd.descriptor.Integer`.
"""
pass
@register(OSType.NAME)
@attr.s(repr=False)
class Name(BaseElement):
"""
Name structure (Undocumented).
.. py:attribute:: name
str
.. py:attribute:: classID
bytes in :py:class:`~psd_tools.terminology.Klass`
.. py:attribute:: value
str
"""
name = attr.ib(default='', type=str)
classID = attr.ib(default=b'\x00\x00\x00\x00', type=bytes)
value = attr.ib(default='', type=str)
@classmethod
def read(cls, fp):
name = read_unicode_string(fp)
classID = read_length_and_key(fp)
value = read_unicode_string(fp)
return cls(name, classID, value)
def write(self, fp):
written = write_unicode_string(fp, self.name)
written += write_length_and_key(fp, self.classID)
written += write_unicode_string(fp, self.value)
return written
@attr.s(repr=False)
class DescriptorBlock(Descriptor):
"""
Dict-like Descriptor-based structure that has `version` field. See
:py:class:`~psd_tools.psd.descriptor.Descriptor`.
.. py:attribute:: version
"""
version = attr.ib(default=16, type=int, validator=in_((16, )))
@classmethod
def read(cls, fp, **kwargs):
version = read_fmt('I', fp)[0]
return cls(version=version, **cls._read_body(fp))
def write(self, fp, padding=4, **kwargs):
written = write_fmt(fp, 'I', self.version)
written += self._write_body(fp)
written += write_padding(fp, written, padding)
return written
@attr.s(repr=False)
class DescriptorBlock2(Descriptor):
"""
Dict-like Descriptor-based structure that has `version` and
`data_version` fields. See
:py:class:`~psd_tools.psd.descriptor.Descriptor`.
.. py:attribute:: version
.. py:attribute:: data_version
"""
version = attr.ib(default=1, type=int)
data_version = attr.ib(default=16, type=int, validator=in_((16, )))
@classmethod
def read(cls, fp, **kwargs):
version, data_version = read_fmt('2I', fp)
return cls(
version=version, data_version=data_version, **cls._read_body(fp)
)
def write(self, fp, padding=4, **kwargs):
written = write_fmt(fp, '2I', self.version, self.data_version)
written += self._write_body(fp)
written += write_padding(fp, written, padding)
return written
|
py | 1a3512a2e0a2e221262835f6f7d642ce37d1d062 | from tdw.controller import Controller
from tdw.tdw_utils import TDWUtils
from tdw.add_ons.object_manager import ObjectManager
from magnebot import Magnebot, ActionStatus
class CollisionDetection(Controller):
"""
Show the difference between arrived_offset values and collision detection settings.
"""
def __init__(self, port: int = 1071, check_version: bool = True, launch_build: bool = True):
super().__init__(port=port, check_version=check_version, launch_build=launch_build)
self.object_id = self.get_unique_id()
self.magnebot = Magnebot()
self.object_manager = ObjectManager()
self.add_ons.extend([self.object_manager, self.magnebot])
self.object_id: int = -1
def init_scene(self):
self.object_id = self.get_unique_id()
self.magnebot.reset()
commands = [{"$type": "load_scene",
"scene_name": "ProcGenScene"},
TDWUtils.create_empty_room(12, 12)]
commands.extend(self.get_add_physics_object(model_name="rh10",
position={"x": 0.04, "y": 0, "z": 1.081},
object_id=self.object_id))
self.communicate(commands)
def run(self, arrived_offset: float, objects: bool) -> None:
self.init_scene()
self.object_manager.initialized = False
self.magnebot.collision_detection.objects = objects
self.magnebot.move_to(self.object_id, arrived_at=0.3, aligned_at=1, arrived_offset=arrived_offset)
while self.magnebot.action.status == ActionStatus.ongoing:
self.communicate([])
self.communicate([])
print(self.magnebot.action.status)
print(self.object_manager.transforms[self.object_id].position)
if __name__ == "__main__":
c = CollisionDetection()
c.run(arrived_offset=0, objects=True)
c.run(arrived_offset=0.3, objects=True)
c.run(arrived_offset=0, objects=False)
c.communicate({"$type": "terminate"})
|
py | 1a3513dca89bae6a05aadd2e9d695d9eda900175 | import tensorflow as tf
import tensorflow_hub as hub
import numpy as np
import cv2
|
py | 1a351575358bbe73db1271089e89968177d9cacf | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class MicrosoftgraphlocaleInfo(Model):
"""localeInfo.
:param locale:
:type locale: str
:param display_name:
:type display_name: str
"""
_attribute_map = {
'locale': {'key': 'locale', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
}
def __init__(self, locale=None, display_name=None):
super(MicrosoftgraphlocaleInfo, self).__init__()
self.locale = locale
self.display_name = display_name
|
py | 1a3516b1856818c8928e286ba78ec5262a853dd2 | import os
import sys
from _io import BytesIO
from Tea.stream import BaseStream
from alibabacloud_tea_fileform.models import FileField
def _length(o):
if hasattr(o, 'len'):
return o.len
elif isinstance(o, BytesIO):
return o.getbuffer().nbytes
elif hasattr(o, 'fileno'):
return os.path.getsize(o.name)
return len(o)
class FileFormInputStream(BaseStream):
def __init__(self, form, boundary, size=1024):
super().__init__(size)
self.form = form
self.boundary = boundary
self.file_size_left = 0
self.forms = {}
self.files = {}
self.files_keys = []
self._to_map()
self.form_str = b''
self._build_str_forms()
self.str_length = len(self.form_str)
def _to_map(self):
for k, v in self.form.items():
if isinstance(v, FileField):
self.files[k] = v
self.files_keys.append(k)
else:
self.forms[k] = v
def _build_str_forms(self):
form_str = ''
str_fmt = '--%s\r\nContent-Disposition: form-data; name="%s"\r\n\r\n%s\r\n'
forms_list = sorted(list(self.forms))
for key in forms_list:
value = self.forms[key]
form_str += str_fmt % (self.boundary, key, value)
self.form_str = form_str.encode('utf-8')
def _get_stream_length(self):
file_length = 0
for k, ff in self.files.items():
field_length = len(ff.filename.encode('utf-8')) + len(ff.content_type) +\
len(k.encode('utf-8')) + len(self.boundary) + 78
file_length += _length(ff.content) + field_length
stream_length = self.str_length + file_length + len(self.boundary) + 6
return stream_length
def __len__(self):
return self._get_stream_length()
def __iter__(self):
return self
def __next__(self):
return self.read(self.size, loop=True)
def file_str(self, size):
# handle file object
form_str = b''
start_fmt = '--%s\r\nContent-Disposition: form-data; name="%s";'
content_fmt = b' filename="%s"\r\nContent-Type: %s\r\n\r\n%s'
if self.file_size_left:
for key in self.files_keys[:]:
if size <= 0:
break
file_field = self.files[key]
file_content = file_field.content.read(size)
if isinstance(file_content, str):
file_content = file_content.encode('utf-8')
if self.file_size_left <= size:
form_str += b'%s\r\n' % file_content
self.file_size_left = 0
size -= len(file_content)
self.files_keys.remove(key)
else:
form_str += file_content
self.file_size_left -= size
size -= len(file_content)
else:
for key in self.files_keys[:]:
if size <= 0:
break
file_field = self.files[key]
file_size = _length(file_field.content)
self.file_size_left = file_size
file_content = file_field.content.read(size)
if isinstance(file_content, str):
file_content = file_content.encode('utf-8')
# build form_str
start = start_fmt % (self.boundary, key)
content = content_fmt % (
file_field.filename.encode('utf-8'),
file_field.content_type.encode('utf-8'),
file_content
)
if self.file_size_left < size:
form_str += b'%s%s\r\n' % (start.encode('utf-8'), content)
self.file_size_left = 0
size -= len(file_content)
self.files_keys.remove(key)
else:
form_str += b'%s%s' % (start.encode('utf-8'), content)
self.file_size_left -= size
size -= len(file_content)
return form_str
def read(self, size=None, loop=False):
if not self.files_keys and not self.form_str:
self.refresh()
if loop:
raise StopIteration
else:
return b''
if size is None:
size = sys.maxsize
if self.form_str:
form_str = self.form_str[:size]
self.form_str = self.form_str[size:]
if len(form_str) < size:
form_str += self.file_str(size)
else:
form_str = self.file_str(size)
if not self.form_str and not self.files_keys:
form_str += b'--%s--\r\n' % self.boundary.encode('utf-8')
return form_str
def refresh_cursor(self):
for ff in self.files.values():
if hasattr(ff.content, 'seek'):
ff.content.seek(0, 0)
def refresh(self):
self.file_size_left = 0
self._to_map()
self._build_str_forms()
self.refresh_cursor()
|
py | 1a3516f50d32a439f08208730424ce1c6f2570e1 | """
In this example a Bell state is made.
"""
from qiskit import QuantumCircuit, ClassicalRegister, QuantumRegister
from qiskit import execute
from qiskit_qcgpu_provider import QCGPUProvider
Provider = QCGPUProvider()
# Create a Quantum Register with 2 qubits.
q = QuantumRegister(2)
# Create a Quantum Circuit with 2 Qubits
qc = QuantumCircuit(q)
# Add a H gate on qubit 0, putting this qubit in superposition.
qc.h(q[0])
# Add a CX (CNOT) gate on control qubit 0 and target qubit 1, putting
# the qubits in a Bell state.
qc.cx(q[0], q[1])
# See a list of available local simulators
print("QCGPU backends: ", Provider.backends())
backend_sim = Provider.get_backend('statevector_simulator')
# Compile and run the Quantum circuit on a simulator backend
job_sim = execute(qc, backend_sim)
result_sim = job_sim.result()
# Show the results
print("Simulation Results: ", result_sim)
print(result_sim.get_statevector(qc))
|
py | 1a35180ff44e2faffdca45912e09293e38d16bba | # This file is part of Scapy
# See http://www.secdev.org/projects/scapy for more information
# Copyright (C) Philippe Biondi <[email protected]>
# Modified by Maxence Tury <[email protected]>
# This program is published under a GPLv2 license
"""
ASN.1 (Abstract Syntax Notation One)
"""
from __future__ import absolute_import
from __future__ import print_function
import random
from datetime import datetime
from scapy.config import conf
from scapy.error import Scapy_Exception, warning
from scapy.volatile import RandField, RandIP, GeneralizedTime
from scapy.utils import Enum_metaclass, EnumElement, binrepr
from scapy.compat import plain_str, chb, orb
import scapy.modules.six as six
from scapy.modules.six.moves import range
class RandASN1Object(RandField):
def __init__(self, objlist=None):
self.objlist = [
x._asn1_obj
for x in six.itervalues(ASN1_Class_UNIVERSAL.__rdict__)
if hasattr(x, "_asn1_obj")
] if objlist is None else objlist
self.chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" # noqa: E501
def _fix(self, n=0):
o = random.choice(self.objlist)
if issubclass(o, ASN1_INTEGER):
return o(int(random.gauss(0, 1000)))
elif issubclass(o, ASN1_IPADDRESS):
z = RandIP()._fix()
return o(z)
elif issubclass(o, ASN1_GENERALIZED_TIME) or issubclass(o, ASN1_UTC_TIME): # noqa: E501
z = GeneralizedTime()._fix()
return o(z)
elif issubclass(o, ASN1_STRING):
z = int(random.expovariate(0.05) + 1)
return o("".join(random.choice(self.chars) for _ in range(z)))
elif issubclass(o, ASN1_SEQUENCE) and (n < 10):
z = int(random.expovariate(0.08) + 1)
return o([self.__class__(objlist=self.objlist)._fix(n + 1)
for _ in range(z)])
return ASN1_INTEGER(int(random.gauss(0, 1000)))
##############
# ASN1 #
##############
class ASN1_Error(Scapy_Exception):
pass
class ASN1_Encoding_Error(ASN1_Error):
pass
class ASN1_Decoding_Error(ASN1_Error):
pass
class ASN1_BadTag_Decoding_Error(ASN1_Decoding_Error):
pass
class ASN1Codec(EnumElement):
def register_stem(cls, stem):
cls._stem = stem
def dec(cls, s, context=None):
return cls._stem.dec(s, context=context)
def safedec(cls, s, context=None):
return cls._stem.safedec(s, context=context)
def get_stem(cls):
return cls.stem
class ASN1_Codecs_metaclass(Enum_metaclass):
element_class = ASN1Codec
class ASN1_Codecs(six.with_metaclass(ASN1_Codecs_metaclass)):
BER = 1
DER = 2
PER = 3
CER = 4
LWER = 5
BACnet = 6
OER = 7
SER = 8
XER = 9
class ASN1Tag(EnumElement):
def __init__(self, key, value, context=None, codec=None):
EnumElement.__init__(self, key, value)
self._context = context
if codec is None:
codec = {}
self._codec = codec
def clone(self): # not a real deep copy. self.codec is shared
return self.__class__(self._key, self._value, self._context, self._codec) # noqa: E501
def register_asn1_object(self, asn1obj):
self._asn1_obj = asn1obj
def asn1_object(self, val):
if hasattr(self, "_asn1_obj"):
return self._asn1_obj(val)
raise ASN1_Error("%r does not have any assigned ASN1 object" % self)
def register(self, codecnum, codec):
self._codec[codecnum] = codec
def get_codec(self, codec):
try:
c = self._codec[codec]
except KeyError:
raise ASN1_Error("Codec %r not found for tag %r" % (codec, self))
return c
class ASN1_Class_metaclass(Enum_metaclass):
element_class = ASN1Tag
def __new__(cls, name, bases, dct): # XXX factorise a bit with Enum_metaclass.__new__() # noqa: E501
for b in bases:
for k, v in six.iteritems(b.__dict__):
if k not in dct and isinstance(v, ASN1Tag):
dct[k] = v.clone()
rdict = {}
for k, v in six.iteritems(dct):
if isinstance(v, int):
v = ASN1Tag(k, v)
dct[k] = v
rdict[v] = v
elif isinstance(v, ASN1Tag):
rdict[v] = v
dct["__rdict__"] = rdict
cls = type.__new__(cls, name, bases, dct)
for v in six.itervalues(cls.__dict__):
if isinstance(v, ASN1Tag):
v.context = cls # overwrite ASN1Tag contexts, even cloned ones
return cls
class ASN1_Class(six.with_metaclass(ASN1_Class_metaclass)):
pass
class ASN1_Class_UNIVERSAL(ASN1_Class):
name = "UNIVERSAL"
ERROR = -3
RAW = -2
NONE = -1
ANY = 0
BOOLEAN = 1
INTEGER = 2
BIT_STRING = 3
STRING = 4
NULL = 5
OID = 6
OBJECT_DESCRIPTOR = 7
EXTERNAL = 8
REAL = 9
ENUMERATED = 10
EMBEDDED_PDF = 11
UTF8_STRING = 12
RELATIVE_OID = 13
SEQUENCE = 16 | 0x20 # constructed encoding
SET = 17 | 0x20 # constructed encoding
NUMERIC_STRING = 18
PRINTABLE_STRING = 19
T61_STRING = 20 # aka TELETEX_STRING
VIDEOTEX_STRING = 21
IA5_STRING = 22
UTC_TIME = 23
GENERALIZED_TIME = 24
GRAPHIC_STRING = 25
ISO646_STRING = 26 # aka VISIBLE_STRING
GENERAL_STRING = 27
UNIVERSAL_STRING = 28
CHAR_STRING = 29
BMP_STRING = 30
IPADDRESS = 0 | 0x40 # application-specific encoding
COUNTER32 = 1 | 0x40 # application-specific encoding
GAUGE32 = 2 | 0x40 # application-specific encoding
TIME_TICKS = 3 | 0x40 # application-specific encoding
class ASN1_Object_metaclass(type):
def __new__(cls, name, bases, dct):
c = super(ASN1_Object_metaclass, cls).__new__(cls, name, bases, dct)
try:
c.tag.register_asn1_object(c)
except Exception:
warning("Error registering %r for %r" % (c.tag, c.codec))
return c
class ASN1_Object(six.with_metaclass(ASN1_Object_metaclass)):
tag = ASN1_Class_UNIVERSAL.ANY
def __init__(self, val):
self.val = val
def enc(self, codec):
return self.tag.get_codec(codec).enc(self.val)
def __repr__(self):
return "<%s[%r]>" % (self.__dict__.get("name", self.__class__.__name__), self.val) # noqa: E501
def __str__(self):
return self.enc(conf.ASN1_default_codec)
def __bytes__(self):
return self.enc(conf.ASN1_default_codec)
def strshow(self, lvl=0):
return (" " * lvl) + repr(self) + "\n"
def show(self, lvl=0):
print(self.strshow(lvl))
def __eq__(self, other):
return self.val == other
def __lt__(self, other):
return self.val < other
def __le__(self, other):
return self.val <= other
def __gt__(self, other):
return self.val > other
def __ge__(self, other):
return self.val >= other
def __ne__(self, other):
return self.val != other
#######################
# ASN1 objects #
#######################
# on the whole, we order the classes by ASN1_Class_UNIVERSAL tag value
class ASN1_DECODING_ERROR(ASN1_Object):
tag = ASN1_Class_UNIVERSAL.ERROR
def __init__(self, val, exc=None):
ASN1_Object.__init__(self, val)
self.exc = exc
def __repr__(self):
return "<%s[%r]{{%r}}>" % (self.__dict__.get("name", self.__class__.__name__), # noqa: E501
self.val, self.exc.args[0])
def enc(self, codec):
if isinstance(self.val, ASN1_Object):
return self.val.enc(codec)
return self.val
class ASN1_force(ASN1_Object):
tag = ASN1_Class_UNIVERSAL.RAW
def enc(self, codec):
if isinstance(self.val, ASN1_Object):
return self.val.enc(codec)
return self.val
class ASN1_BADTAG(ASN1_force):
pass
class ASN1_INTEGER(ASN1_Object):
tag = ASN1_Class_UNIVERSAL.INTEGER
def __repr__(self):
h = hex(int(self.val))
if h[-1] == "L":
h = h[:-1]
# cut at 22 because with leading '0x', x509 serials should be < 23
if len(h) > 22:
h = h[:12] + "..." + h[-10:]
r = repr(self.val)
if len(r) > 20:
r = r[:10] + "..." + r[-10:]
return h + " <%s[%s]>" % (self.__dict__.get("name", self.__class__.__name__), r) # noqa: E501
class ASN1_BOOLEAN(ASN1_INTEGER):
tag = ASN1_Class_UNIVERSAL.BOOLEAN
# BER: 0 means False, anything else means True
def __repr__(self):
return '%s %s' % (not (self.val == 0), ASN1_Object.__repr__(self))
class ASN1_BIT_STRING(ASN1_Object):
"""
ASN1_BIT_STRING values are bit strings like "011101".
A zero-bit padded readable string is provided nonetheless,
which is stored in val_readable
"""
tag = ASN1_Class_UNIVERSAL.BIT_STRING
def __init__(self, val, readable=False):
if not readable:
self.val = val
else:
self.val_readable = val
def __setattr__(self, name, value):
if name == "val_readable":
if isinstance(value, (str, bytes)):
val = "".join(binrepr(orb(x)).zfill(8) for x in value)
else:
warning("Invalid val: should be bytes")
val = "<invalid val_readable>"
object.__setattr__(self, "val", val)
object.__setattr__(self, name, value)
object.__setattr__(self, "unused_bits", 0)
elif name == "val":
value = plain_str(value)
if isinstance(value, str):
if any(c for c in value if c not in ["0", "1"]):
warning("Invalid operation: 'val' is not a valid bit string.") # noqa: E501
return
else:
if len(value) % 8 == 0:
unused_bits = 0
else:
unused_bits = 8 - (len(value) % 8)
padded_value = value + ("0" * unused_bits)
bytes_arr = zip(*[iter(padded_value)] * 8)
val_readable = b"".join(chb(int("".join(x), 2)) for x in bytes_arr) # noqa: E501
else:
warning("Invalid val: should be str")
val_readable = b"<invalid val>"
unused_bits = 0
object.__setattr__(self, "val_readable", val_readable)
object.__setattr__(self, name, value)
object.__setattr__(self, "unused_bits", unused_bits)
elif name == "unused_bits":
warning("Invalid operation: unused_bits rewriting "
"is not supported.")
else:
object.__setattr__(self, name, value)
def __repr__(self):
s = self.val_readable
if len(s) > 16:
s = s[:10] + b"..." + s[-10:]
v = self.val
if len(v) > 20:
v = v[:10] + "..." + v[-10:]
return "<%s[%s]=%s (%d unused bit%s)>" % (
self.__dict__.get("name", self.__class__.__name__),
v,
s,
self.unused_bits,
"s" if self.unused_bits > 1 else ""
)
class ASN1_STRING(ASN1_Object):
tag = ASN1_Class_UNIVERSAL.STRING
class ASN1_NULL(ASN1_Object):
tag = ASN1_Class_UNIVERSAL.NULL
def __repr__(self):
return ASN1_Object.__repr__(self)
class ASN1_OID(ASN1_Object):
tag = ASN1_Class_UNIVERSAL.OID
def __init__(self, val):
val = plain_str(val)
val = conf.mib._oid(val)
ASN1_Object.__init__(self, val)
self.oidname = conf.mib._oidname(val)
def __repr__(self):
return "<%s[%r]>" % (self.__dict__.get("name", self.__class__.__name__), self.oidname) # noqa: E501
class ASN1_ENUMERATED(ASN1_INTEGER):
tag = ASN1_Class_UNIVERSAL.ENUMERATED
class ASN1_UTF8_STRING(ASN1_STRING):
tag = ASN1_Class_UNIVERSAL.UTF8_STRING
class ASN1_NUMERIC_STRING(ASN1_STRING):
tag = ASN1_Class_UNIVERSAL.NUMERIC_STRING
class ASN1_PRINTABLE_STRING(ASN1_STRING):
tag = ASN1_Class_UNIVERSAL.PRINTABLE_STRING
class ASN1_T61_STRING(ASN1_STRING):
tag = ASN1_Class_UNIVERSAL.T61_STRING
class ASN1_VIDEOTEX_STRING(ASN1_STRING):
tag = ASN1_Class_UNIVERSAL.VIDEOTEX_STRING
class ASN1_IA5_STRING(ASN1_STRING):
tag = ASN1_Class_UNIVERSAL.IA5_STRING
class ASN1_UTC_TIME(ASN1_STRING):
tag = ASN1_Class_UNIVERSAL.UTC_TIME
def __init__(self, val):
ASN1_STRING.__init__(self, val)
def __setattr__(self, name, value):
if isinstance(value, bytes):
value = plain_str(value)
if name == "val":
pretty_time = None
if isinstance(self, ASN1_GENERALIZED_TIME):
_len = 15
self._format = "%Y%m%d%H%M%S"
else:
_len = 13
self._format = "%y%m%d%H%M%S"
_nam = self.tag._asn1_obj.__name__[4:].lower()
if (isinstance(value, str) and
len(value) == _len and value[-1] == "Z"):
dt = datetime.strptime(value[:-1], self._format)
pretty_time = dt.strftime("%b %d %H:%M:%S %Y GMT")
else:
pretty_time = "%s [invalid %s]" % (value, _nam)
ASN1_STRING.__setattr__(self, "pretty_time", pretty_time)
ASN1_STRING.__setattr__(self, name, value)
elif name == "pretty_time":
print("Invalid operation: pretty_time rewriting is not supported.")
else:
ASN1_STRING.__setattr__(self, name, value)
def __repr__(self):
return "%s %s" % (self.pretty_time, ASN1_STRING.__repr__(self))
class ASN1_GENERALIZED_TIME(ASN1_UTC_TIME):
tag = ASN1_Class_UNIVERSAL.GENERALIZED_TIME
class ASN1_ISO646_STRING(ASN1_STRING):
tag = ASN1_Class_UNIVERSAL.ISO646_STRING
class ASN1_UNIVERSAL_STRING(ASN1_STRING):
tag = ASN1_Class_UNIVERSAL.UNIVERSAL_STRING
class ASN1_BMP_STRING(ASN1_STRING):
tag = ASN1_Class_UNIVERSAL.BMP_STRING
class ASN1_SEQUENCE(ASN1_Object):
tag = ASN1_Class_UNIVERSAL.SEQUENCE
def strshow(self, lvl=0):
s = (" " * lvl) + ("# %s:" % self.__class__.__name__) + "\n"
for o in self.val:
s += o.strshow(lvl=lvl + 1)
return s
class ASN1_SET(ASN1_SEQUENCE):
tag = ASN1_Class_UNIVERSAL.SET
class ASN1_IPADDRESS(ASN1_STRING):
tag = ASN1_Class_UNIVERSAL.IPADDRESS
class ASN1_COUNTER32(ASN1_INTEGER):
tag = ASN1_Class_UNIVERSAL.COUNTER32
class ASN1_GAUGE32(ASN1_INTEGER):
tag = ASN1_Class_UNIVERSAL.GAUGE32
class ASN1_TIME_TICKS(ASN1_INTEGER):
tag = ASN1_Class_UNIVERSAL.TIME_TICKS
conf.ASN1_default_codec = ASN1_Codecs.BER
|
py | 1a351848ba24e33660f5eed6cedbd477cfdaef7e | # Simple wrapper script needed to run epydoc
import sys
try:
from epydoc.cli import cli
except ImportError:
print>>sys.stderr, "No epydoc installed (see http://epydoc.sourceforge.net)"
sys.exit(2)
# Epydoc 3.0.1 has some trouble running with recent Docutils (>= 0.6),
# so we work around this bug, following the lines of the fix in
# https://bugs.gentoo.org/attachment.cgi?id=210118
# (see http://bugs.gentoo.org/287546)
try:
from docutils.nodes import Text
if not hasattr(Text, 'data'):
setattr(Text, 'data', property(lambda self: self.astext()))
except ImportError:
print>>sys.stderr, "docutils is needed for running epydoc " \
"(see http://docutils.sourceforge.net)"
sys.exit(2)
# Epydoc doesn't allow much control over the generated graphs. This is
# bad especially for the class graph for Component which has a lot of
# subclasses, so we need to force Left-to-Right mode.
# from epydoc.docwriter.html import HTMLWriter
# HTMLWriter_render_graph = HTMLWriter.render_graph
# def render_graph_LR(self, graph):
# if graph:
# graph.body += 'rankdir=LR\n'
# return HTMLWriter_render_graph(self, graph)
# HTMLWriter.render_graph = render_graph_LR
# Well, LR mode doesn't really look better...
# the ASCII-art version seems better in most cases.
# Workaround "visiting unknown node type" error due to `.. note ::`
# This was due to the lack of Admonitions transforms. Add it.
from epydoc.markup.restructuredtext import _DocumentPseudoWriter
from docutils.transforms import writer_aux
orig_get_transforms = _DocumentPseudoWriter.get_transforms
def pseudo_get_transforms(self):
return orig_get_transforms(self) + [writer_aux.Admonitions]
_DocumentPseudoWriter.get_transforms = pseudo_get_transforms
# Run epydoc
cli()
|
py | 1a3518f678ef3ad033d331ed7a88e90cc2ee7637 | #!/usr/bin/env python2.7
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
''' main.py '''
from __future__ import print_function
import argparse
import os
import signal
import sys
import tornado.httpserver
import tornado.ioloop
import tornado.web
from tornado.options import define
from tornado.httpclient import AsyncHTTPClient
import heron.tools.common.src.python.utils.config as common_config
import heron.common.src.python.utils.log as log
from heron.tools.tracker.src.python import constants
from heron.tools.tracker.src.python import handlers
from heron.tools.tracker.src.python import utils
from heron.tools.tracker.src.python.config import Config, STATEMGRS_KEY
from heron.tools.tracker.src.python.tracker import Tracker
Log = log.Log
class Application(tornado.web.Application):
""" Tornado server application """
def __init__(self, config):
AsyncHTTPClient.configure(None, defaults=dict(request_timeout=120.0))
self.tracker = Tracker(config)
self.tracker.synch_topologies()
tornadoHandlers = [
(r"/", handlers.MainHandler),
(r"/clusters", handlers.ClustersHandler, {"tracker":self.tracker}),
(r"/topologies", handlers.TopologiesHandler, {"tracker":self.tracker}),
(r"/topologies/states", handlers.StatesHandler, {"tracker":self.tracker}),
(r"/topologies/info", handlers.TopologyHandler, {"tracker":self.tracker}),
(r"/topologies/logicalplan", handlers.LogicalPlanHandler, {"tracker":self.tracker}),
(r"/topologies/config", handlers.TopologyConfigHandler, {"tracker":self.tracker}),
(r"/topologies/containerfiledata", handlers.ContainerFileDataHandler,
{"tracker":self.tracker}),
(r"/topologies/containerfiledownload", handlers.ContainerFileDownloadHandler,
{"tracker":self.tracker}),
(r"/topologies/containerfilestats",
handlers.ContainerFileStatsHandler, {"tracker":self.tracker}),
(r"/topologies/physicalplan", handlers.PhysicalPlanHandler, {"tracker":self.tracker}),
# Deprecated. See https://github.com/apache/incubator-heron/issues/1754
(r"/topologies/executionstate", handlers.ExecutionStateHandler, {"tracker":self.tracker}),
(r"/topologies/schedulerlocation", handlers.SchedulerLocationHandler,
{"tracker":self.tracker}),
(r"/topologies/metadata", handlers.MetaDataHandler, {"tracker":self.tracker}),
(r"/topologies/runtimestate", handlers.RuntimeStateHandler, {"tracker":self.tracker}),
(r"/topologies/metrics", handlers.MetricsHandler, {"tracker":self.tracker}),
(r"/topologies/metricstimeline", handlers.MetricsTimelineHandler, {"tracker":self.tracker}),
(r"/topologies/metricsquery", handlers.MetricsQueryHandler, {"tracker":self.tracker}),
(r"/topologies/exceptions", handlers.ExceptionHandler, {"tracker":self.tracker}),
(r"/topologies/exceptionsummary", handlers.ExceptionSummaryHandler,
{"tracker":self.tracker}),
(r"/machines", handlers.MachinesHandler, {"tracker":self.tracker}),
(r"/topologies/pid", handlers.PidHandler, {"tracker":self.tracker}),
(r"/topologies/jstack", handlers.JstackHandler, {"tracker":self.tracker}),
(r"/topologies/jmap", handlers.JmapHandler, {"tracker":self.tracker}),
(r"/topologies/histo", handlers.MemoryHistogramHandler, {"tracker":self.tracker}),
(r"(.*)", handlers.DefaultHandler),
]
settings = dict(
debug=True,
serve_traceback=True,
static_path=os.path.dirname(__file__)
)
tornado.web.Application.__init__(self, tornadoHandlers, **settings)
Log.info("Tracker has started")
def stop(self):
self.tracker.stop_sync()
# pylint: disable=protected-access
class _HelpAction(argparse._HelpAction):
""" HelpAction """
def __call__(self, parser, namespace, values, option_string=None):
parser.print_help()
# retrieve subparsers from parser
subparsers_actions = [
action for action in parser._actions
if isinstance(action, argparse._SubParsersAction)]
# there will probably only be one subparser_action,
# but better save than sorry
for subparsers_action in subparsers_actions:
# get all subparsers and print help
for choice, subparser in subparsers_action.choices.items():
print("Subparser '{}'".format(choice))
print(subparser.format_help())
parser.exit()
# pylint: disable=bad-super-call
class SubcommandHelpFormatter(argparse.RawDescriptionHelpFormatter):
""" Subcommand help formatter """
def _format_action(self, action):
parts = super(argparse.RawDescriptionHelpFormatter, self)._format_action(action)
if action.nargs == argparse.PARSER:
parts = "\n".join(parts.split("\n")[1:])
return parts
def add_titles(parser):
""" add titles """
parser._positionals.title = "Required arguments"
parser._optionals.title = "Optional arguments"
return parser
def add_arguments(parser):
""" add arguments """
default_config_file = os.path.join(
utils.get_heron_tracker_conf_dir(), constants.DEFAULT_CONFIG_FILE)
parser.add_argument(
'--config-file',
metavar='(a string; path to config file; default: "' + default_config_file + '")',
default=default_config_file)
parser.add_argument(
'--type',
metavar='(an string; type of state manager (zookeeper or file, etc.); example: ' \
+ str(constants.DEFAULT_STATE_MANAGER_TYPE) + ')',
choices=["file", "zookeeper"])
parser.add_argument(
'--name',
metavar='(an string; name to be used for the state manager; example: ' \
+ str(constants.DEFAULT_STATE_MANAGER_NAME) + ')')
parser.add_argument(
'--rootpath',
metavar='(an string; where all the states are stored; example: ' \
+ str(constants.DEFAULT_STATE_MANAGER_ROOTPATH) + ')')
parser.add_argument(
'--tunnelhost',
metavar='(an string; if ssh tunneling needs to be established to connect to it; example: ' \
+ str(constants.DEFAULT_STATE_MANAGER_TUNNELHOST) + ')')
parser.add_argument(
'--hostport',
metavar='(an string; only used to connect to zk, must be of the form \'host:port\';'\
' example: ' + str(constants.DEFAULT_STATE_MANAGER_HOSTPORT) + ')')
parser.add_argument(
'--port',
metavar='(an integer; port to listen; default: ' + str(constants.DEFAULT_PORT) + ')',
type=int,
default=constants.DEFAULT_PORT)
parser.add_argument(
'--verbose',
action='store_true')
return parser
def create_parsers():
""" create argument parser """
parser = argparse.ArgumentParser(
epilog='For detailed documentation, go to http://github.com/apache/incubator-heron',
usage="%(prog)s [options] [help]",
add_help=False)
parser = add_titles(parser)
parser = add_arguments(parser)
ya_parser = argparse.ArgumentParser(
parents=[parser],
formatter_class=SubcommandHelpFormatter,
add_help=False)
subparsers = ya_parser.add_subparsers(
title="Available commands")
help_parser = subparsers.add_parser(
'help',
help='Prints help',
add_help=False)
help_parser.set_defaults(help=True)
subparsers.add_parser(
'version',
help='Prints version',
add_help=True)
return parser, ya_parser
def define_options(port, config_file):
""" define Tornado global variables """
define("port", default=port)
define("config_file", default=config_file)
def create_tracker_config(namespace):
# try to parse the config file if we find one
config_file = namespace["config_file"]
config = utils.parse_config_file(config_file)
if config is None:
Log.debug("Config file does not exists: %s" % config_file)
config = {STATEMGRS_KEY:[{}]}
# update the config if we have any flags
config_flags = ["type", "name", "rootpath", "tunnelhost", "hostport"]
config_to_update = config[STATEMGRS_KEY][0]
for flag in config_flags:
value = namespace.get(flag, None)
if value is not None:
config_to_update[flag] = value
return config
def main():
""" main """
# create the parser and parse the arguments
(parser, _) = create_parsers()
(args, remaining) = parser.parse_known_args()
if remaining == ['help']:
parser.print_help()
parser.exit()
elif remaining == ['version']:
common_config.print_build_info()
parser.exit()
elif remaining != []:
Log.error('Invalid subcommand')
sys.exit(1)
namespace = vars(args)
log.set_logging_level(namespace)
# set Tornado global option
define_options(namespace['port'], namespace['config_file'])
config = Config(create_tracker_config(namespace))
# create Tornado application
application = Application(config)
# pylint: disable=unused-argument
# SIGINT handler:
# 1. stop all the running zkstatemanager and filestatemanagers
# 2. stop the Tornado IO loop
def signal_handler(signum, frame):
# start a new line after ^C character because this looks nice
print('\n', end='')
application.stop()
tornado.ioloop.IOLoop.instance().stop()
# associate SIGINT and SIGTERM with a handler
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
Log.info("Running on port: %d", namespace['port'])
if namespace["config_file"]:
Log.info("Using config file: %s", namespace['config_file'])
Log.info("Using state manager:\n" + str(config))
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(namespace['port'])
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
py | 1a351a877491037a894ff8695de146afc8698e78 | from polymorphic.managers import PolymorphicManager
from greenbudget.app.budget.managers import (
ModelTemplateManager, ModelDuplicateManager)
class BudgetAccountGroupManager(
ModelDuplicateManager(ModelTemplateManager(PolymorphicManager))):
template_cls = 'group.TemplateAccountGroup'
class BudgetSubAccountGroupManager(
ModelDuplicateManager(ModelTemplateManager(PolymorphicManager))):
template_cls = 'group.TemplateSubAccountGroup'
class TemplateAccountGroupManager(ModelDuplicateManager(PolymorphicManager)):
pass
class TemplateSubAccountGroupManager(ModelDuplicateManager(PolymorphicManager)):
pass
|
py | 1a351ba3627f3159d3295ac7b509a7f55faef0e6 | import unittest
from module.submodule.functions import subtract
class TestModuleFunctions(unittest.TestCase):
def test_add(self):
self.assertEqual(subtract(1, 2), -1)
if __name__ == "__main__":
unittest.main()
|
py | 1a351cfcbed2fc7e33fcb04a241f7a56ca838b38 | # SPDX-License-Identifier: Apache-2.0
"""
tf2onnx.tf2onnx - rewrite tensorflow graph to onnx graph
"""
import collections
import sys
import traceback
import numpy as np
from onnx import onnx_pb
import tf2onnx
import tf2onnx.onnx_opset # pylint: disable=unused-import
import tf2onnx.tflite_handlers # pylint: disable=unused-import
import tf2onnx.custom_opsets # pylint: disable=unused-import
from tf2onnx.graph import Graph
from tf2onnx.rewriter import * # pylint: disable=wildcard-import
from tf2onnx.tflite_rewriters import * # pylint: disable=wildcard-import
from tf2onnx.late_rewriters import rewrite_channels_last
from tf2onnx.shape_inference import infer_shape
from tf2onnx.tf_loader import is_function, resolve_functions, set_function, clear_functions
from tf2onnx.tf_utils import tensorflow_to_onnx, get_tf_version, compute_const_folding_using_tf
from tf2onnx.tflite_utils import graphs_from_tflite
from tf2onnx.tfjs_utils import graphs_from_tfjs
from . import constants, logging, schemas, utils, handler
logger = logging.getLogger(__name__)
# pylint: disable=useless-return,broad-except,logging-not-lazy,unused-argument,missing-docstring
# pylint: disable=unused-variable
def fold_constants_using_tf(g, outputs_to_values):
ops = list(g.get_nodes())
# pylint: disable=too-many-nested-blocks
keep_looking = True
while keep_looking:
keep_looking = False
for idx, op in enumerate(ops):
if op.output and op.output[0] in outputs_to_values:
logger.info("folding node using tf type=%s, name=%s" % (op.type, op.name))
val = outputs_to_values[op.output[0]]
new_node_name = utils.make_name(op.name)
new_output_name = new_node_name
old_output_name = op.output[0]
old_node_name = op.name
logger.debug("create const node [%s] replacing [%s]", new_node_name, old_node_name)
ops[idx] = g.make_const(new_node_name, val)
logger.debug("replace old output [%s] with new output [%s]", old_output_name, new_output_name)
# need to re-write the consumers input name to use the const name
consumers = g.find_output_consumers(old_output_name)
if consumers:
for consumer in consumers:
g.replace_input(consumer, old_output_name, new_output_name)
# keep looking until there is nothing we can fold.
keep_looking = True
g.reset_nodes(ops)
def rewrite_constant_fold(g, ops):
"""
We call tensorflow transform with constant folding but in some cases tensorflow does
fold all constants. Since there are a bunch of ops in onnx that use attributes where
tensorflow has dynamic inputs, we badly want constant folding to work. For cases where
tensorflow missed something, make another pass over the graph and fix want we care about.
"""
func_map = {
"Add": np.add,
"GreaterEqual": np.greater_equal,
"Cast": np.cast,
"ConcatV2": np.concatenate,
"Less": np.less,
"ListDiff": np.setdiff1d,
"Mul": np.multiply,
"Pack": np.stack,
"Range": np.arange,
"Sqrt": np.sqrt,
"Sub": np.subtract,
}
ops = list(ops)
# pylint: disable=too-many-nested-blocks
keep_looking = True
while keep_looking:
keep_looking = False
for idx, op in enumerate(ops):
func = func_map.get(op.type)
if func is None: continue
if set(op.output) & set(g.outputs): continue
try:
inputs = []
for node in op.inputs:
if not node.is_const():
break
inputs.append(node.get_tensor_value(as_list=False))
logger.debug("op name %s, %s, %s", op.name, len(op.input), len(inputs))
if inputs and len(op.input) == len(inputs):
logger.info("folding node type=%s, name=%s" % (op.type, op.name))
if op.type == "Cast":
dst = op.get_attr_int("to")
np_type = tf2onnx.utils.map_onnx_to_numpy_type(dst)
val = np.cast[np_type](*inputs)
elif op.type == "ConcatV2":
axis = inputs[-1]
values = inputs[:-1]
val = func(tuple(values), axis)
elif op.type == "ListDiff":
out_type = op.get_attr_int("out_idx")
np_type = tf2onnx.utils.map_onnx_to_numpy_type(out_type)
val = func(*inputs)
val = val.astype(np_type)
elif op.type in ["Pack"]:
# handle ops that need input array and axis
axis = op.get_attr_int("axis")
val = func(inputs, axis=axis)
elif op.type == "Range":
dtype = op.get_attr_int("Tidx")
np_type = tf2onnx.utils.map_onnx_to_numpy_type(dtype)
val = func(*inputs, dtype=np_type)
else:
val = func(*inputs)
new_node_name = utils.make_name(op.name)
new_output_name = new_node_name
old_output_name = op.output[0]
old_node_name = op.name
logger.debug("create const node [%s] replacing [%s]", new_node_name, old_node_name)
ops[idx] = g.make_const(new_node_name, val)
logger.debug("replace old output [%s] with new output [%s]", old_output_name, new_output_name)
# need to re-write the consumers input name to use the const name
consumers = g.find_output_consumers(old_output_name)
if consumers:
for consumer in consumers:
g.replace_input(consumer, old_output_name, new_output_name)
# keep looking until there is nothing we can fold.
# We keep the graph in topological order so if we folded,
# the result might help a following op.
keep_looking = True
except Exception as ex:
tb = traceback.format_exc() # pylint: disable=bare-except
logger.info("exception: %s, details: %s", ex, tb)
# ignore errors
# pylint: enable=too-many-nested-blocks
return ops
def rewrite_incomplete_type_support(g, ops, impacted_ops):
"""
for ops that have inclomplete type support, insert casts.
This is needed for some tensor ops in opset7 and for some ops in winml-rs5.
It is not helping performance but better than the model not working at all.
"""
ignored_input_index = {
"Tile": [1], # Tile's second input can only be int64
"Where": [0], # Where's first input is bool
}
new_ops = []
org_ops = list(ops)
for op in org_ops:
if op.type in impacted_ops:
cast_inserted = []
output_dtype = None
ignored_inputs = ignored_input_index.get(op.type)
# insert casts on inputs if the runtime only supports float
for i, input_node in enumerate(op.inputs):
if ignored_inputs and i in ignored_inputs:
continue
input_name = op.input[i]
dtype = g.get_dtype(input_name)
if dtype is None:
logger.warning("adding Cast for op %s (type is %s)' input: %s, dtype should not be None",
op.name, op.type, input_name)
if dtype != onnx_pb.TensorProto.FLOAT:
output_dtype = dtype
logger.debug("insert cast for node %s on input %s", op.name, input_name)
if input_node and input_node.type == "Cast" \
and len(g.find_output_consumers(input_node.output[0])) == 1:
input_node.set_attr("to", onnx_pb.TensorProto.FLOAT)
g.set_dtype(input_name, onnx_pb.TensorProto.FLOAT)
else:
cast_node = g.insert_new_node_on_input(op, "Cast", input_name,
to=onnx_pb.TensorProto.FLOAT)
g.set_dtype(cast_node.output[0], onnx_pb.TensorProto.FLOAT)
g.copy_shape(input_name, cast_node.output[0])
cast_inserted.append(cast_node)
if output_dtype:
# insert reverse cast if needed
for output_name in op.output:
name = utils.make_name(op.name)
logger.debug("insert cast back for node %s on output %s [dtype=%s]", op.name, output_name,
output_dtype)
output_cast = g.insert_new_node_on_output("Cast", output_name, name=name,
to=output_dtype)
g.set_dtype(output_cast.output[0], output_dtype)
g.copy_shape(output_name, output_cast.output[0])
cast_inserted.append(output_cast)
if cast_inserted:
new_ops.extend(cast_inserted)
new_ops.append(op)
return new_ops
def rewrite_incomplete_type_support_rs5(g, ops):
return rewrite_incomplete_type_support(g, ops, ["Unsqueeze", "Mul", "Concat", "Slice", "Transpose"])
def rewrite_incomplete_type_support_rs6(g, ops):
impacted_ops = [
"Div",
"IsNaN",
"Max",
"Min",
"ReduceSum",
"Slice",
"Split",
"Tile",
"Transpose",
"Where"
]
# TODO: logic to insert cast has bug, not all inputs of one node need cast
# for example, slice's input "starts" doesn't need it.
if g.opset == 10:
impacted_ops.remove("Slice")
return rewrite_incomplete_type_support(g, ops, impacted_ops)
def tensorflow_onnx_mapping(g, ops_mapping, initialized_tables=None, is_tflite=False, dequantize=False):
logger.verbose("Mapping TF node to ONNX node(s)")
mapped_op = collections.Counter()
unmapped_op = collections.Counter()
exceptions = []
if initialized_tables is None:
initialized_tables = {}
ops = list(g.get_nodes())
for node in ops:
logger.debug("Process node: %s\n%s", node.name, node.summary)
if node.need_skip():
logger.debug("explicitly skip node " + node.name)
continue
op = node.type
map_info = ops_mapping.get(op)
if map_info is None:
unmapped_op[op] += 1
if not is_tflite:
logger.error("Tensorflow op [%s: %s] is not supported", node.name, op)
continue
mapped_op[op] += 1
func, kwargs = map_info
if kwargs:
# if there is a tf_op/onnx_op key we'll map the old type to a new type
converted_op = kwargs.get("tf_op" if is_tflite else "onnx_op")
if converted_op:
# sometimes the handler wants to know what the old op name was
kwargs["tfl_op" if is_tflite else "tf_op"] = op
node.type = converted_op
body_graphs = node.get_body_graphs()
if body_graphs:
for attr, b_g in body_graphs.items():
logger.debug("start handling subgraph of %s's attribute %s", node.name, attr)
b_g.topological_sort(b_g.get_nodes())
# we assume only ONNX nodes have subgraph defined in pre-rewriters.
# that means, if we create node having subgraphs in this step, the
# created subgraphs' nodes won't be mapped.
m_ops, unm_ops, body_exceptions = tensorflow_onnx_mapping(b_g, ops_mapping)
mapped_op += m_ops
unmapped_op += unm_ops
# topological_sort on the body in case processing has changed the order
b_g.topological_sort(b_g.get_nodes())
exceptions.extend(body_exceptions)
logger.debug("finish handling subgraph of %s's attribute %s", node.name, attr)
try:
func(g, node, **kwargs, initialized_tables=initialized_tables, dequantize=dequantize)
if not is_tflite:
# tensorflow nodes must be converted in the next pass
node.skip_conversion = True
except Exception as ex:
try:
# If the graph is corrupt from the exception this can fail
summary = node.summary
except Exception:
summary = ""
logger.error("Failed to convert node %r (fct=%r)\n%r",
node.name, func, summary, exc_info=1)
exceptions.append(ex)
return mapped_op, unmapped_op, exceptions
def transpose_inputs(ctx, inputs_as_nchw):
"""Insert a transpose from NHWC to NCHW on model input on users request."""
ops = []
for node in ctx.get_nodes():
for idx, output_name in enumerate(node.output):
if output_name in inputs_as_nchw:
shape = ctx.get_shape(output_name)
if len(shape) != len(constants.NCHW_TO_NHWC):
logger.warning("transpose_input for %s: shape must be rank 4, ignored" % output_name)
ops.append(node)
continue
# insert transpose
op_name = utils.make_name(node.name)
transpose = ctx.insert_new_node_on_output("Transpose", output_name, name=op_name)
transpose.set_attr("perm", constants.NCHW_TO_NHWC)
ctx.copy_shape(output_name, transpose.output[0])
ctx.set_shape(output_name, np.array(shape)[constants.NHWC_TO_NCHW])
ops.append(transpose)
ops.append(node)
continue
ops.append(node)
ctx.reset_nodes(ops)
def topological_sort(g, continue_on_error):
ops = g.get_nodes()
if not continue_on_error:
g.topological_sort(ops)
else:
try:
g.topological_sort(ops)
except: # pylint: disable=bare-except
# if we continue on error, ignore graph cycles so we can report all missing ops
pass
def run_rewriters(g, funcs, continue_on_error):
"""Rewrite the original graph and body graphs of nodes"""
# NOTE(wayuanho):
# 1. we don't sort graph here, rewriter is expected to do it on its own.
# 2. the graph here may have circles, current topological_sort cannot handle it.
for func in funcs:
try:
ops = func(g, g.get_nodes())
g.reset_nodes(ops)
except Exception as ex:
type_, value_, traceback_ = sys.exc_info()
logger.error("rewriter %s: exception %s", func, ex)
ex_ext = traceback.format_exception(type_, value_, traceback_)
if continue_on_error:
logger.info(ex_ext)
else:
raise ex
if utils.is_debug_mode():
broken_outputs = g.check_integrity()
if broken_outputs:
logging.error(
"After rewriter %s, graph breaks at outputs %s",
func.__name__, broken_outputs
)
if g.contained_graphs:
for dict_val in g.contained_graphs.values():
for attr_name, b_g in dict_val.items():
run_rewriters(b_g, funcs, attr_name)
def process_tf_graph(tf_graph, continue_on_error=False, verbose=False, target=None,
opset=None, custom_op_handlers=None, custom_rewriter=None,
extra_opset=None, shape_override=None, inputs_as_nchw=None,
input_names=None, output_names=None, ignore_default=None, use_default=None,
is_subgraph=False, const_node_values=None, tensors_to_rename=None,
initialized_tables=None, tflite_path=None, dequantize=False, tfjs_path=None):
"""Convert tensorflow graph to onnx graph.
Args:
tf_graph: tensorflow graph
continue_on_error: if an op can't be processed (aka there is no mapping), continue
verbose: print summary stats (deprecated)
target: list of workarounds applied to help certain platforms
opset: the opset to be used (int, default is latest)
custom_op_handlers: dictionary of custom ops handlers
custom_rewriter: list of custom graph rewriters
extra_opset: list of extra opset's, for example the opset's used by custom ops
shape_override: dict with inputs that override the shapes given by tensorflow
inputs_as_nchw: transpose inputs in list from nchw to nhwc
input_names: list of input node names in graph, input name format as node_name:port_id. Optional.
output_names: list of output node names in graph, format is node_name:port_id. Optional for tflite.
ignore_default: list of node names of PlaceholderWithDefault ops to change into Placeholder ops
use_default: list of node names of PlaceholderWithDefault ops to change into Identity ops using the default
const_node_values: a dict returned by compress_graph_def mapping node names to tensor values
tensors_to_rename: an optional dict (string->string) mapping tensor names to new names
initialized_tables: mapping from table shared_names to tuple of keys and values of table
tflite_path: Path to a tflite file to convert. If used, pass None to tf_graph
Return:
onnx graph
"""
# NOTE: process_parsed_graph and Graph are always given tensors post-rename.
# process_tf_graph (this function) gets tensors pre-rename.
if verbose:
logger.warning("Argument verbose for process_tf_graph is deprecated. Please use --verbose option instead.")
del verbose
opset = utils.find_opset(opset)
logger.info("Using tensorflow=%s, onnx=%s, tf2onnx=%s/%s",
get_tf_version(), utils.get_onnx_version(), tf2onnx.__version__, tf2onnx.version.git_version[:6])
logger.info("Using opset <onnx, %s>", opset)
if opset > schemas.get_max_supported_opset_version():
logger.warning("Currently installed onnx package %s is too low to support opset %s, "
"please upgrade onnx package to avoid potential conversion issue.",
utils.get_onnx_version(), opset)
clear_functions()
if inputs_as_nchw is None:
inputs_as_nchw = []
is_tflite = False
if tflite_path is not None:
main_g, subgraphs = graphs_from_tflite(tflite_path, input_names, output_names)
is_tflite = True
elif tfjs_path is not None:
main_g, subgraphs = graphs_from_tfjs(tfjs_path, input_names, output_names, shape_override,
ignore_default, use_default)
else:
main_g, subgraphs = graphs_from_tf(tf_graph, input_names, output_names, shape_override, const_node_values,
ignore_default, use_default)
for g in [main_g] + subgraphs:
g.set_config(target, opset, extra_opset)
g = process_graphs(main_g, subgraphs, custom_op_handlers, inputs_as_nchw, continue_on_error, custom_rewriter,
initialized_tables, tensors_to_rename, is_tflite, dequantize)
return g
def graphs_from_tf(tf_graph, input_names, output_names, shape_override=None, const_node_values=None,
ignore_default=None, use_default=None):
"""make tf2onnx internal subgraphs from the tensorflow subgraphs"""
if shape_override is None:
shape_override = {}
ordered_func = resolve_functions(tf_graph)
subgraphs = []
for func in ordered_func:
f_inputs_names = [t.name for t in func.inputs]
f_output_names = [t.name for t in func.outputs]
outputs_to_values, _ = compute_const_folding_using_tf(func, const_node_values, output_names)
onnx_nodes, _, _, output_shapes, dtypes, _ = \
tensorflow_to_onnx(func, shape_override, const_node_values, ignore_default, use_default)
fg = Graph(onnx_nodes, output_shapes, dtypes, input_names=f_inputs_names, output_names=f_output_names,
is_subgraph=True, graph_name=func.name)
fold_constants_using_tf(fg, outputs_to_values)
subgraphs.append(fg)
is_func = is_function(tf_graph)
if not is_func:
tf_graph = infer_shape(tf_graph, shape_override)
outputs_to_values, _ = compute_const_folding_using_tf(tf_graph, const_node_values, output_names)
onnx_nodes, _, _, output_shapes, dtypes, _ = \
tensorflow_to_onnx(tf_graph, shape_override, const_node_values, ignore_default, use_default)
utils.check_io(input_names, output_names, output_shapes.keys())
main_g = Graph(onnx_nodes, output_shapes, dtypes, input_names=input_names, output_names=output_names)
fold_constants_using_tf(main_g, outputs_to_values)
return main_g, subgraphs
def process_graphs(main_g, subgraphs, custom_op_handlers, inputs_as_nchw, continue_on_error, custom_rewriter,
initialized_tables, tensors_to_rename, is_tflite=False, dequantize=False):
if tensors_to_rename is not None:
main_g.rename_tensors(tensors_to_rename)
inputs_as_nchw = [tensors_to_rename.get(t, t) for t in inputs_as_nchw]
for g in subgraphs:
fg = process_parsed_graph(g, custom_op_handlers, inputs_as_nchw, continue_on_error, custom_rewriter,
initialized_tables, is_tflite, dequantize)
set_function(fg.graph_name, fg)
g = process_parsed_graph(main_g, custom_op_handlers, inputs_as_nchw, continue_on_error, custom_rewriter,
initialized_tables, is_tflite,
dequantize)
return g
def process_parsed_graph(g, custom_op_handlers, inputs_as_nchw, continue_on_error, custom_rewriter,
initialized_tables, is_tflite=False, dequantize=False):
op_cnt, attr_cnt = g.dump_node_statistics(include_attrs=True, include_subgraphs=False)
if is_tflite:
tfl_rewriters = []
if dequantize:
tfl_rewriters.append(rewrite_tfl_qdq)
tfl_rewriters.append(rewrite_tfl_scan_outputs)
tfl_rewriters.append(rewrite_tfl_select_zero)
tfl_rewriters.append(rewrite_tfl_rfft)
run_rewriters(g, tfl_rewriters, continue_on_error)
tfl_ops_mapping = handler.tfl_op.create_tfl_to_tf_mapping()
_, _, exceptions = tensorflow_onnx_mapping(g, tfl_ops_mapping, is_tflite=True, dequantize=False)
if exceptions and not continue_on_error:
raise exceptions[0]
# create ops mapping for the desired opsets
ops_mapping = handler.tf_op.create_mapping(g.opset, g.extra_opset)
# apply custom ops on top of the assembled opset. We can either complement the opset
# or override existing ops with a custom op.
if custom_op_handlers is not None:
# below is a bit tricky since there are a few api's:
# 1. the future way we want custom ops to be registered with the @tf_op decorator. Those handlers will be
# registered via the decorator on load of the module ... nothing is required here.
# 2. the old custom op api: a dictionary of {name: (func, args[])
# We deal with this by using a compat_handler that wraps to old handler with a new style handler.
# This is tempoary to give people give to move to the new api and after tf2onnx-1.5 we want to remove this
custom_opset = {}
for k, v in custom_op_handlers.items():
# FIXME: remove this after tf2onnx-1.5
def compat_handler(ctx, node, **kwargs):
# wrap old handler
name = node.name
args = kwargs["args"]
func = kwargs["func"]
return func(ctx, node, name, args)
args = v[1]
kwargs = {"func": v[0]}
if args:
onnx_op = args[0]
kwargs["onnx_op"] = onnx_op
args = args[1:]
kwargs["args"] = args
new_handler = handler.tf_op(k,
domain=constants.TENSORFLOW_OPSET.domain,
kwargs=kwargs)
new_handler.register_compat_handler(compat_handler, 1)
custom_opset[k] = (compat_handler, kwargs)
ops_mapping.update(custom_opset)
if inputs_as_nchw:
transpose_inputs(g, inputs_as_nchw)
# pre-processing graph rewrites
# bi-directional re-writer should be placed after single directional re-writer
rewriters = [
# single directional
rewrite_constant_fold,
rewrite_quantize_and_dequantize,
rewrite_fused_ops,
rewrite_transpose,
rewrite_flatten,
rewrite_random_uniform,
rewrite_random_uniform_fold_const,
rewrite_random_normal,
rewrite_dropout,
rewrite_conv_dilations,
rewrite_eye,
rewrite_leakyrelu,
rewrite_thresholded_relu,
rewrite_conv2d_with_pad,
rewriter_lstm_tf2,
rewrite_gru_tf2,
rewrite_single_direction_lstm,
# bi-directional
rewrite_bi_direction_lstm,
rewrite_single_direction_gru,
rewrite_bi_direction_gru,
rewrite_custom_rnn_cell,
rewrite_generic_loop, rewrite_cond,
rewrite_biasadd_with_conv2d,
rewrite_layer_normalization,
rewrite_gemm,
rewrite_ragged_variant_shape,
]
if custom_rewriter is not None:
rewriters.extend(custom_rewriter)
run_rewriters(g, rewriters, continue_on_error)
# some nodes may already copied into inner Graph, so remove them from main Graph.
g.delete_unused_nodes(g.outputs)
topological_sort(g, continue_on_error)
mapped_op, unmapped_op, exceptions = \
tensorflow_onnx_mapping(g, ops_mapping, initialized_tables, dequantize=dequantize)
if unmapped_op:
logger.error("Unsupported ops: %s", unmapped_op)
if exceptions and not continue_on_error:
raise exceptions[0]
# post-processing rewriters
late_rewriters = []
if g.is_target(constants.TARGET_RS5):
late_rewriters.append(rewrite_incomplete_type_support_rs5)
if g.is_target(constants.TARGET_RS6):
late_rewriters.append(rewrite_incomplete_type_support_rs6)
if g.is_target(constants.TARGET_CHANNELS_LAST):
late_rewriters.append(rewrite_channels_last)
if late_rewriters:
run_rewriters(g, late_rewriters, continue_on_error)
# onnx requires topological sorting
topological_sort(g, continue_on_error)
g.update_proto()
logger.verbose(
"Summay Stats:\n"
"\ttensorflow ops: {}\n"
"\ttensorflow attr: {}\n"
"\tonnx mapped: {}\n"
"\tonnx unmapped: {}".format(op_cnt, attr_cnt, mapped_op, unmapped_op))
return g
def tf_optimize(input_names, output_names, graph_def):
"""optimize tensorflow graph. This is in tf_loader but some apps call this
so we proxy into tf_loader to keep them working."""
return tf2onnx.tf_loader.tf_optimize(input_names, output_names, graph_def)
|
py | 1a351d4d95533c4539fb25abf432ebe4581c4766 | import socket
import json
VALUE_TYPE_CONVERTER = {
'int': lambda v: int(v),
'float': lambda v: float(v),
'str': lambda v: str(v).strip(),
'boolean': lambda v: v.strip().lower() == 'true',
'json': lambda v: json.loads(v)
}
class Ok(object):
"""server ok response"""
def __str__(self):
return "Ok"
OK = Ok()
class SimpleSocketClient(object):
def __init__(self, host, port, buffer_size=2048, socket_lib=socket):
self.__buffer_size = buffer_size
self.__soc = socket_lib.socket(socket.AF_INET, socket.SOCK_STREAM)
self.__soc.connect((host, port))
def send(self, msg):
self.__soc.sendall(msg.encode())
data = self.__soc.recv(self.__buffer_size)
return data.decode()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.__soc.close()
class RemoteError(Exception):
def __init__(self, origin, message):
self.message = message
self.origin = origin
super().__init__()
def __str__(self):
return "{}({} - {})".format(
self.__class__.__name__,
self.origin,
self.message.strip()
)
class UnknownResponse(Exception):
def __init__(self, response):
self.response = response
super().__init__()
def __str__(self):
return "{}('{}')".format(
self.__class__.__name__,
self.response
)
class ConnectionClosed(Exception):
""" connection close by the server """
class Client(SimpleSocketClient):
BACKGROUND_COMMAND = 'bg'
def __init__(self, host, port, buffer_size=2048, socket_lib=socket,
run_in_background=False):
super().__init__(host, port, buffer_size=buffer_size, socket_lib=socket_lib)
self.run_in_background = run_in_background
def send(self, msg):
command = msg
if self.run_in_background:
command = "{} {}".format(self.BACKGROUND_COMMAND, command)
response = super().send(command).strip()
if not response:
raise ConnectionClosed()
if response.startswith('error'):
_, origin, message = response.split(' ', 2)
raise RemoteError(origin, message)
elif response == 'ok':
return OK
elif response.startswith('value'):
_, val_type, value = response.split(' ', 2)
converter = VALUE_TYPE_CONVERTER.get(val_type)
if converter:
return converter(value)
raise UnknownResponse(response)
|
py | 1a351d9dd04fa3ed954345ca9af473402a4124d5 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v8.errors',
marshal='google.ads.googleads.v8',
manifest={
'KeywordPlanAdGroupErrorEnum',
},
)
class KeywordPlanAdGroupErrorEnum(proto.Message):
r"""Container for enum describing possible errors from applying a
keyword plan ad group.
"""
class KeywordPlanAdGroupError(proto.Enum):
r"""Enum describing possible errors from applying a keyword plan
ad group.
"""
UNSPECIFIED = 0
UNKNOWN = 1
INVALID_NAME = 2
DUPLICATE_NAME = 3
__all__ = tuple(sorted(__protobuf__.manifest))
|
py | 1a351f69dab553675bb2e9d34ac94258ab0f313b | import os
ALLOWED_HOSTS = []
AUTH_PASSWORD_VALIDATORS = []
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
LANGUAGE_CODE = 'en-us'
ROOT_URLCONF = 'demo_proj.urls'
SECRET_KEY = '!%^#zpaq92v$s#fb^8$i(u+_(ba$^t2$3u*uwhv*tgf1z19zzj'
SITE_ID = 1
STATIC_URL = '/static/'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
WSGI_APPLICATION = 'demo_proj.wsgi.application'
DATABASES = {'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}}
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'django.contrib.sites',
'bootstrap3',
'travel',
'demo_proj',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
TEMPLATES = [{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'travel.context_processors.search',
],
},
}]
|
py | 1a351f7ae0c8f45895402a943730abe678f15f3f | """A naive example illustrating techniques to help
embed PostGIS functionality.
This example was originally developed in the hopes that it would be extrapolated into a comprehensive PostGIS integration layer. We are pleased to announce that this has come to fruition as `GeoAlchemy <http://www.geoalchemy.org/>`_.
The example illustrates:
* a DDL extension which allows CREATE/DROP to work in
conjunction with AddGeometryColumn/DropGeometryColumn
* a Geometry type, as well as a few subtypes, which
convert result row values to a GIS-aware object,
and also integrates with the DDL extension.
* a GIS-aware object which stores a raw geometry value
and provides a factory for functions such as AsText().
* an ORM comparator which can override standard column
methods on mapped objects to produce GIS operators.
* an attribute event listener that intercepts strings
and converts to GeomFromText().
* a standalone operator example.
The implementation is limited to only public, well known
and simple to use extension points.
E.g.::
print session.query(Road).filter(Road.road_geom.intersects(r1.road_geom)).all()
"""
|
py | 1a351fa5c4ad0647f5e47f61d98718d18c8f41de | #!/usr/bin/env python3
import argparse
import pono
import smt_switch as ss
from smt_switch.primops import And, BVAdd, BVSub, Equal, Ite
from smt_switch.sortkinds import BOOL, BV
def build_simple_alu_fts(s:ss.SmtSolver)->pono.Property:
'''
Creates a simple alu transition system
@param s - an SmtSolver from smt_switch
@return a property
'''
# Instantiate a functional transition system
fts = pono.FunctionalTransitionSystem(s)
# Create a bit-vector sorts
bvsort1 = s.make_sort(BV, 1)
bvsort8 = s.make_sort(BV, 8)
# Create the states
cfg = fts.make_statevar('cfg', bvsort1)
spec_res = fts.make_statevar('spec_res', bvsort8)
imp_res = fts.make_statevar('imp_res', bvsort8)
# Create the inputs
a = fts.make_inputvar('a', bvsort8)
b = fts.make_inputvar('b', bvsort8)
# Add logic for cfg
## Start at 0
fts.constrain_init(s.make_term(Equal, cfg, s.make_term(0, bvsort1)))
## Keeps the same value
fts.assign_next(cfg, cfg)
# Set logic for results
## they start equal
fts.constrain_init(s.make_term(Equal, spec_res, imp_res))
## spec_res is the sum: spec_res' = a + b
fts.assign_next(spec_res, s.make_term(BVAdd, a, b))
## imp_res depends on the configuration: imp_res' == (cfg == 0) ? a + b : a - b
fts.assign_next(imp_res, s.make_term(Ite,
s.make_term(Equal, cfg, s.make_term(0, bvsort1)),
s.make_term(BVAdd, a, b),
s.make_term(BVSub, a, b)))
# Create a property: spec_res == imp_res
prop = pono.Property(fts, s.make_term(Equal,
spec_res,
imp_res))
return prop
def k_induction_attempt():
# Create an smt_switch.SmtSolver with Boolector as the backend
# and no logging
s = ss.create_btor_solver(False)
s.set_opt('produce-models', 'true')
s.set_opt('incremental', 'true')
prop = build_simple_alu_fts(s)
fts = prop.transition_system
print('\n============== Running k-induction ==============')
print('INIT\n\t{}'.format(fts.init))
print('TRANS\n\t{}'.format(fts.trans))
print('PROP\n\t{}'.format(prop.prop))
# Create KInduction engine -- using same solver (in future can change the solver)
kind = pono.KInduction(prop, s)
res = kind.check_until(20)
print(res)
assert res is None, "Expecting k-induction not to prove property in 20 steps"
print("KInduction returned unknown")
def interpolant_attempt():
# Create solver and interpolator using MathSAT
# and no logging for the solver
s = ss.create_msat_solver(False)
itp = ss.create_msat_interpolator()
s.set_opt('produce-models', 'true')
s.set_opt('incremental', 'true')
prop = build_simple_alu_fts(s)
fts = prop.transition_system
print('\n============== Running Interpolant-based Model Checking ==============')
print('INIT\n\t{}'.format(fts.init))
print('TRANS\n\t{}'.format(fts.trans))
print('PROP\n\t{}'.format(prop.prop))
# Create InterpolantMC engine
itpmc = pono.InterpolantMC(prop, s, itp)
res = itpmc.check_until(20)
print(res)
assert res is True, "Expecting InterpolantMC to prove the property"
print("InterpolantMC returned true")
def k_induction_attempt_inductive():
# Create an smt_switch.SmtSolver with Boolector as the backend
# and no logging
s = ss.create_btor_solver(False)
s.set_opt('produce-models', 'true')
s.set_opt('incremental', 'true')
prop = build_simple_alu_fts(s)
fts = prop.transition_system
# store sets of states in a dictionary for accessing below
states = {str(sv):sv for sv in fts.statevars}
# make the property inductive manually
prop = pono.Property(fts,
s.make_term(And,
s.make_term(Equal,
states['cfg'],
s.make_term(0, s.make_sort(BV, 1))),
prop.prop))
print('\n============== Running k-induction on inductively strengthened property ==============')
print('INIT\n\t{}'.format(fts.init))
print('TRANS\n\t{}'.format(fts.trans))
print('PROP\n\t{}'.format(prop.prop))
# Create KInduction engine -- using same solver (in future can change the solver)
kind = pono.KInduction(prop, s)
res = kind.check_until(20)
print(res)
assert res is True, "Expecting k-induction to prove the inductively strengthened property"
print("KInduction returned true")
approaches = {
'kind': k_induction_attempt,
'interp': interpolant_attempt,
'kind-manual': k_induction_attempt_inductive
}
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Pono SimpleALU example')
parser.add_argument('approach', choices=['kind', 'interp', 'kind-manual'],
help='Select the approach: k-induction, interpolant-based,'
' or k-induction with a manually strengthened property')
parser.add_argument('-v', '--verbosity', type=int, default=0)
args = parser.parse_args()
pono.set_global_logger_verbosity(args.verbosity)
approaches[args.approach]()
|
py | 1a351fbff254e9a143246d40f365a5d1391a6801 | import pygame
from time import sleep
import emoji
print("{:=^70}".format("Bem-Vindo Ao Mini Jukebox"))
print("""
Escolha os Artistas ou bandas abaixo para tocar uma música:
(1) The Beatles
(2) Pink Floyd
(3) Tiny Tim
(4) Nirvana
(5) The Who
(6) Paul McCartiney
""")
opUser = int(input("Digite um número da lista: "))
if opUser == 0 or opUser > 6:
print("Numeração inválida!")
else:
print(emoji.emojize("Processando...:hourglass_flowing_sand:", use_aliases=True))
sleep(4)
print("Divita-se com esse som!")
if opUser == 1:
pygame.init()
pygame.mixer.music.load("sound/The End.ogg")
pygame.mixer.music.play()
print(emoji.emojize("Reproduzindo The Beatles :guitar: :guitar: :violin: :drum:", use_aliases=True))
sleep(200)
pygame.event.wait()
elif opUser == 2:
pygame.init()
pygame.mixer.music.load("sound/Eclips.ogg")
pygame.mixer.music.play()
print(emoji.emojize("Reporduzindo Pink Floyd :pig2: :factory: :hammer:", use_aliases=True))
sleep(200)
pygame.event.wait()
elif opUser == 3:
pygame.init()
pygame.mixer.music.load("sound/Livi.ogg")
pygame.mixer.music.play()
print(emoji.emojize("Reproduzindo Tiny Tim :violin: :tophat: :microphone:", use_aliases=True))
sleep(200)
pygame.event.wait()
elif opUser == 4:
pygame.init()
pygame.mixer.music.load("sound/About A Gir.ogg")
pygame.mixer.music.play()
print(emoji.emojize("Reporduzindo Nirvana :guitar: :guitar: :drum: :microphone:", use_aliases=True))
sleep(200)
pygame.event.wait()
elif opUser == 5:
pygame.init()
pygame.mixer.music.load("sound/Boris The Spide.ogg")
pygame.mixer.music.play()
print(emoji.emojize("Reproduzindo The Who :spider: :guitar: :microphone:", use_aliases=True))
sleep(200)
pygame.event.wait()
else:
pygame.init()
pygame.mixer.music.load("sound/Smile Away.ogg")
pygame.mixer.music.play()
print(emoji.emojize("Reproduzindo Paul McCartiney :musical_score: :violin: :guitar: :drum: :musical_keyboard:", use_aliases=True))
sleep(200)
pygame.event.wait()
|
py | 1a35214aa28bd8f87483738662bfefd754840ad4 | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'moviemons.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
py | 1a35218094e7d2d30798a41bedf50e8969007cf6 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) Philipp Wagner. All rights reserved.
# Licensed under the BSD license. See LICENSE file in the project root for full license information.
import cPickle
def save_model(filename, model):
output = open(filename, 'wb')
cPickle.dump(model, output)
output.close()
def load_model(filename):
pkl_file = open(filename, 'rb')
res = cPickle.load(pkl_file)
pkl_file.close()
return res
|
py | 1a3521b67ae8fecf9df0de38141b6e9fd9b00abe | from typing import List
def convert_decimal_to_hex(dec_list: List[int]):
hex_list: List[hex] = list()
for dec in dec_list:
hex_list.append(hex(dec))
response_data = (
{
'isResult' : True,
'code' : 'SUCCESS',
'data' : {
'output': hex_list
}
}, 200
)
return response_data
def convert_hex_to_decimal(hex_list: List[hex]):
dec_list: List[int] = list()
for hex_element in hex_list:
dec_list.append(int(hex_element, 16))
response_data = (
{
'isResult' : True,
'code' : 'SUCCESS',
'data' : {
'output': dec_list
}
}, 200
)
return response_data |
py | 1a3521e817f615ba065a76e172684ed006385610 | import websocket
import socket
try:
import thread
except ImportError:
import _thread as thread
import time
import json
import serial
import serial.tools.list_ports
import _thread
import logging
import time
import datetime
import serial
import serial.tools.list_ports
class MicroControllerConnection:
"""
Offers a connection to a microcontroller that can be used for data acquisition (and maybe at a later point changing parameters)
"""
def __init__(self, chromatogramid, samplerate, rheodyneswitch, portname,ws, baudrate=9600, timeout=2):
"""
:param portname:Name of the Port to be used. In Windows this could be COMX, on linux this could be a /dev/XXXX
The user running the software needs to have access to said device
:type portname:str
:param baudrate:the baud rate to be used to configure the serial port. defaults to 9600
:type baudrate:int
"""
self.chromatogram = chromatogramid
self.samplerate = samplerate
self.rheodyneswitch = rheodyneswitch
self.ws = ws
if self.isvalidportname(portname):
self.portname = portname
else:
raise ValueError("there is no serial port named " + portname)
self.baudrate = baudrate
self.timeout = timeout
self.stopacquisitionflag = False
self.prefix = self.portname.split('/')[-1]
self.prefixChannelName = True
self.serialInterface = serial.Serial(portname, baudrate, timeout=self.timeout)
def startacquisition(self):
self.stopacquisitionflag = False
self.thread = _thread.start_new_thread(self.acquisitionmethod,())
def stopacquisition(self):
self.stopacquisitionflag = True
@staticmethod
def isvalidportname(portname):
for tmp in serial.tools.list_ports.comports():
if tmp.device == portname:
return 1
return 0
def acquisitionmethod(self):
self.runNumber = 0
# Byte "x" senden, um moegliche Aktivitaeten zu stoppen
self.serialInterface.write(b"x")
# moegliche Sendereste abwarten
self.serialInterface.flushInput()
time.sleep(2)
#rest lesen und verwerfen
self.__throwRemainingBytesAway()
self.setRheodyneSwitch()
time.sleep(2)
self.__throwRemainingBytesAway()
self.__sendAquisitionMode()
time.sleep(2) # auf die ersten datensätze warten
self.__main_loop()
self.serialInterface.write(b"x")
def __sendAquisitionMode(self):
if self.samplerate == 1:
self.serialInterface.write(b"c")
elif self.chromatogram.SampleRate >= 1000:
self.serialInterface.write(b"W" + bytes(str(self.samplerate), 'ascii'))
else:
self.serialInterface.write(b"C" + bytes(str(self.samplerate), 'ascii'))
def __throwRemainingBytesAway(self):
if self.serialInterface.inWaiting() > 0:
# rest lesen und verwerfen
self.serialInterface.read(self.serialInterface.inWaiting())
def setRheodyneSwitch(self):
if self.rheodyneswitch:
self.serialInterface.write(b"m")
self.serialInterface.flushInput()
time.sleep(1.5)
res = self.serialInterface.read(1).decode("utf-8")
if res == "m":
self.serialInterface.write(b"t")
self.serialInterface.flushInput()
else:
self.serialInterface.write(b"m")
self.serialInterface.flushInput()
time.sleep(1.5)
res = self.serialInterface.read(1).decode("utf-8")
if res == "s":
self.serialInterface.write(b"t")
self.serialInterface.flushInput()
def setChromatogram(self,c):
self.chromatogram=c
def __main_loop(self):
buffer = ''
currentdatetime = 0
zyklusAlt = 1
while self.stopacquisitionflag == False:
inbuff = self.serialInterface.inWaiting()
# ab hier python3 fix, da sonst zu schnell daten gelesen werden und inWaiting immer 0 zurück gibt
if inbuff == 0:
time.sleep(0.33)
# ende python3 fix
while '\n' not in buffer:
buffer = buffer + self.serialInterface.read(1).decode("utf-8")
if '\n' in buffer: # genau dann ist eine Messreihe übertragen
zyklus, zeitInMin,uv, counts = buffer.split(',', 3)
if (int(zyklus) < int(zyklusAlt)):
zyklusAlt = 1
print("nextChromatogram")
self.runNumber = self.runNumber + 1
currentdatetime = 0
ws.send(json.dumps(
{'type':'nextChromatogram',
'chromatogram':self.chromatogram,
'runnumber':self.runNumber,
'portname':self.portname}))
zyklusAlt = int(zyklus)
# do db save here
data1 = {'type': 'data'}
data1['chromatogram'] = self.chromatogram
data1['value'] = counts.strip()
data1['datetime'] = currentdatetime
data1['channelName'] = "Counter"
if self.prefixChannelName:
data1['channelName'] = self.prefix+data1['channelName']
ws.send(json.dumps(data1))
data2 = {'type': 'data'}
data2['chromatogram'] = self.chromatogram
data2['value'] = uv
data2['datetime'] = currentdatetime
data2['channelName'] = "UV"
if self.prefixChannelName:
data2['channelName'] = self.prefix + data2['channelName']
ws.send(json.dumps(data2))
buffer = ''
currentdatetime += 1
connections = {}
def on_message(ws, message):
print(message)
text_data_json = json.loads(message)
if text_data_json['type'] == 'registrationRequest':
ports = []
for i in serial.tools.list_ports.comports():
ports.append(i.device+" - "+i.description)
response = json.dumps(
{'type': 'registration',
'ports': ports,
'fqdn': socket.getfqdn()
}
)
print("sending response:")
print(response)
ws.send(response)
if text_data_json['type'] == 'registrationResult':
print("Registration Result:")
print(text_data_json['message'])
if text_data_json['type'] == 'hplc.stopMeasurement':
print("hplc.stopMeasurement")
if text_data_json['port'] in connections:
connections[text_data_json['port']].stopacquisition()
connections.pop(text_data_json['port'])
if text_data_json['type'] == 'hplc.startMeasurement':
print("hplc.startMeasurement")
portname = text_data_json['port']
baudrate = text_data_json['baudrate']
con = MicroControllerConnection(text_data_json['id'],
text_data_json['samplerate'],
text_data_json['rheodyneswitch'],
portname,
ws,
baudrate,
2)
connections[portname] = con
con.startacquisition()
if text_data_json['type'] == 'nextChromatogram':
print("nextChromatogram ID is")
print(text_data_json['message']['id'])
print(connections[text_data_json['message']['portname']].chromatogram)
connections[text_data_json['message']['portname']].setChromatogram(int(text_data_json['message']['id']))
print(connections[text_data_json['message']['portname']].chromatogram)
def on_error(ws, error):
print(error)
def on_close(ws):
print("### closed ###")
def on_open(ws):
def run(*args):
while True:
time.sleep(1)
#ws.close()
print("thread terminating...")
thread.start_new_thread(run, ())
if __name__ == "__main__":
while True:
time.sleep(1)
try:
websocket.enableTrace(False)
ws = websocket.WebSocketApp("ws://hplc.inc-forschung.kfa-juelich.de/ws/JuHPLC/ThinClient/",
on_message=on_message,
on_error=on_error,
on_close=on_close)
ws.on_open = on_open
ws.run_forever()
except ex:
print("exception, restarting connection to server")
print(ex) |
py | 1a35239ecf44c4d32b64e22fa58a75106238f2c6 | """
please visit https://github.com/niutool/xuebao
for more detail
"""
import logging
class Brain(object):
def __init__(self, config):
"""
Instantiates a new Brain object, which cross-references user
input with a list of modules. Note that the order of brain.modules
matters, as the Brain will return the first module
that accepts a given input.
"""
self._plugins = []
self._logger = logging.getLogger(__name__)
self._config = config
def add_plugin(self, plugin):
self._plugins.append(plugin)
self._plugins = sorted(
self._plugins, key=lambda p: p.get_priority(), reverse=True)
def get_plugins(self):
return self._plugins
def get_standard_phrases(self):
return []
def get_plugin_phrases(self):
"""
Gets phrases from all plugins.
Returns:
A list of phrases from all plugins.
"""
phrases = []
for plugin in self._plugins:
phrases.extend(plugin.get_phrases())
return sorted(list(set(phrases)))
def get_all_phrases(self):
"""
Gets a combined list consisting of standard phrases and plugin phrases.
Returns:
A list of phrases.
"""
return self.get_standard_phrases() + self.get_plugin_phrases()
def query(self, texts):
"""
Passes user input to the appropriate module, testing it against
each candidate module's isValid function.
Arguments:
text -- user input, typically speech, to be parsed by a module
Returns:
A tuple containing a text and the module that can handle it
"""
for plugin in self._plugins:
for text in texts:
if plugin.is_valid(text):
self._logger.debug("'%s' is a valid phrase for module " +
"'%s'", text, plugin.info.name)
return (plugin, text)
self._logger.debug("No module was able to handle any of these " +
"phrases: %r", texts)
return (None, None)
|
py | 1a3523aff8ce81bb55377b980d7ecbaebde032e5 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from utilities.models import *
from honeycomb import *
@db_session
def load_statistics(statistic):
"""This function takes a string as parameter and returns a certain value, which is then displayed in the statistics
bar on the url-settings page.
"""
if statistic == 'total':
return select(p for p in Url).count()
elif statistic == 'scanned':
total = select(p for p in Url).count()
if total != 0:
scanned = select(p for p in Url if p.date_scanned is not None).count()
percentage = int(scanned/total*100)
else:
percentage = 0
return '{}%'.format(percentage)
elif statistic == 'scraped':
total = select(p for p in Url).count()
if total != 0:
scraped = select(p for p in Url if p.date_scraped is not None).count()
percentage = int(scraped/total*100)
else:
percentage = 0
return '{}%'.format(percentage)
# Creating a dataframe and filling it with one row: No data loaded.
df = pd.DataFrame(columns=['URL',
'Date Added',
'Date Scan',
'Date Scrape',
'Priority Scrape',
'Priority Scan'])
df = df.append({'URL': 'No data loaded'}, ignore_index=True)
# Defining the lay-out of this page.
layout = html.Div([
html.H3('URL Settings',
style={'text-align': 'center'}),
html.P('''On this page, you are able to add URLs to the database which will automatically receive
a priority flag. The statistics are refreshed every 30 seconds.''',
style={'width': 380,
'marginLeft': 'auto',
'marginRight': 'auto',
'textAlign': 'center',
'marginBottom': 10}),
html.Div([
html.Div([
html.Div(children=load_statistics('total'),
id='UrlStatisticsBox1',
className='statisticsBox'),
html.Div(children='Total',
className='title'),
html.Div(children='Amount of URLs in the database',
className='description')
], className='statisticsWrapper'),
html.Div([
html.Div(children=load_statistics('scanned'),
className='statisticsBox',
id='UrlStatisticsBox2'),
html.Div(children='Scanned',
className='title'),
html.Div(children='Percentage of scanned URLs',
className='description')
], className='statisticsWrapper'),
html.Div([
html.Div(children=load_statistics('scraped'),
className='statisticsBox',
id='UrlStatisticsBox3'),
html.Div(children='Scraped',
className='title'),
html.Div(children='Percentage of scraped URLs',
className='description')
], className='statisticsWrapper'),
html.Button('Refresh statistics',
id='refresh-url-statistics',
className='refresh_button')
], className='statisticsRow'),
html.Button('Load table',
id='reload-button',
style={'marginLeft': 20,
'float': 'right'}),
html.Div([
dcc.Input(id='input-box',
type='text',
style={'width': 480},
placeholder='URL which need to be added to the database.'),
html.Button('Submit',
id='urlsubmit',
style={'marginLeft': 20}),
html.Br(),
html.Br(),
html.Div(id='output-container-button')
]),
html.Br(),
html.Div(
dt.DataTable(
rows=df.to_dict('records'),
sortable=True,
id='url-table')
),
])
|
py | 1a3524115e2d602d260979a5db518109bbcb4e0a | # (C) 2015 by Mareike Picklum ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import argparse
import sys
from dnutils import logs
from prac.core.base import PRAC
from prac.core.inference import PRACInference
from prac.gui import PRACQueryGUI, DEFAULT_CONFIG
from prac.pracutils.utils import prac_heading
from pracmln.mln.util import headline
from pracmln.utils.project import PRACMLNConfig
logger = logs.getlogger(__name__)
try:
from pymongo import MongoClient
except ImportError:
logger.warning('MongoDB modules cannot be used.')
def are_requirements_set_to_load_module(module_name):
if module_name == 'role_look_up' or module_name == 'complex_achieved_by':
if 'pymongo' in sys.modules:
client = MongoClient()
try:
database_name_list = client.database_names()
if 'prac' in database_name_list:
database = client.prac
collections = database.collection_names()
if module_name == 'role_look_up':
if 'howtos' in collections:
return True
else:
print('"Role look up" module needs a "Frames" collection.')
return False
elif module_name == 'complex_achieved_by':
if 'howtos' in collections:
return True
else:
print('"Complex achieved by module" needs a "Instructions" collection.')
return False
else:
print('No PRAC database is stored at local MongoDB server instance.')
return False
except:
print('No local MongoDB server instance is running.')
return False
#IsCollection available
else:
return False
return True
def main():
logger.level = logs.DEBUG
usage = 'PRAC Query Tool'
parser = argparse.ArgumentParser(description=usage)
parser.add_argument("instruction", help="The instruction.")
parser.add_argument("-i", "--interactive", dest="interactive", default=False, action='store_true', help="Starts PRAC inference with an interactive GUI tool.")
parser.add_argument("-v", "--verbose", dest="verbose", default=1, type=int, action="store", help="Set verbosity level {0..3}. Default is 1.")
args = parser.parse_args()
opts_ = vars(args)
sentences = args.instruction
prac = PRAC()
prac.verbose = args.verbose
conf = PRACMLNConfig(DEFAULT_CONFIG)
if args.interactive: # use the GUI
from tkinter import Tk
root = Tk()
# in case we have natural-language parameters, parse them
infer = PRACInference(prac, sentences)
if len(sentences) > 0:
# module = prac.module('nl_parsing')
# prac.run(infer, module)
n = infer.runstep()
# print parsing result
for odb in n.outdbs:
odb.write()
# print input sentence
print(n.nlinstr())
#Started control structure handling
'''
cs_recognition = prac.module('cs_recognition')
prac.run(inference, cs_recognition)
dbs = inference.inference_steps[-1].output_dbs
dbs_ = []
for db in dbs:
dbs_.extend(parser.extract_multiple_action_cores(db))
inference.inference_steps[-1].output_dbs = dbs_
'''
app = PRACQueryGUI(root, infer.prac, n, conf, directory=args[0] if args else None)
root.mainloop()
exit(0)
# regular PRAC pipeline
infer = PRACInference(prac, sentences)
infer.run()
print(headline('inference results'))
print('instructions:')
for i in infer.root:
print(i)
frames = []
for step in infer.steps():
print(step.frame)
print(prac_heading('cram plans', color='blue'))
for step in infer.steps():
if hasattr(step, 'plan'):
print(step.plan)
# infer.write()
exit(0)
if __name__ == '__main__':
main()
|
py | 1a352483ba2a3d9855649921987fbaf3c4fbef28 | class Pomodorer:
"""Clase que representa la interfaz de comunicacion de un sistema pomodoro"""
def __init__(self, event_system):
self._report = [[]]
self.event_system = event_system
def run_pomodoro(self):
pass
def get_report(self):
agg = []
for series in self._report:
agg+series
return agg
def reset_series(self):
self._report = self._report + []
def main():
print('Pruebas no implementadas')
if __name__ == '__main__':
main()
|
py | 1a35265715780db2de9d3b610b475fee45f565dd | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 23 10:56:33 2018
@author: barnabasnomo
"""
import numpy as np
import random
import os
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.autograd as autograd
from torch.autograd import Variable
# Neural Network architecture
class Network(nn.Module):
def __init__(self, input_size, nb_action):
super(Network, self).__init__()
self.input_size = input_size
self.nb_action = nb_action
self.fc1 = nn.Linear(input_size, 30)
self.fc2 = nn.Linear(30, nb_action)
def forward(self, state):
x = F.relu(self.fc1(state))
q_values = self.fc2(x)
return q_values
# Experience Replay
class ReplayMemory(object):
def __init__(self, capacity):
self.capacity = capacity
self.memory = []
# Append events to memory's capacity
def push(self, event):
self.memory.append(event)
if len(self.memory) > self.capacity:
del self.memory[0]
def sample(self, batch_size):
samples = zip(* random.sample(self.memory, batch_size))
return map(lambda x: Variable(torch.cat(x, 0)), samples)
# Create Deep Q Learning Network
class Dqn():
def __init__(self, input_size, nb_action, gamma):
self.gamma = gamma
self.reward_window = []
self.model = Network(input_size, nb_action)
self.memory = ReplayMemory(100000)
self.optimizer = optim.Adam(self.model.parameters(), lr=0.001)
self.last_state = torch.Tensor(input_size).unsqueeze(0)
self.last_action = 0
self.last_reward = 0
|
py | 1a3526e548f9fac1bbc1f30d54ac2190ea5492df | import numpy as np
import openpnm as op
import openpnm.models.physics as pm
class MeniscusTest:
def setup_class(self):
np.random.seed(1)
self.net = op.network.Cubic(shape=[5, 1, 5], spacing=5e-5)
self.geo = op.geometry.SpheresAndCylinders(network=self.net,
pores=self.net.pores(),
throats=self.net.throats())
self.phase = op.phases.Water(network=self.net)
self.phys = op.physics.Standard(network=self.net,
phase=self.phase,
geometry=self.geo)
def test_toroidal_touch(self):
phys = self.phys
r_tor = 1e-6
self.geo["throat.touch_length"] = 2e-6
phys.add_model(propname="throat.tor_max",
model=pm.meniscus.purcell,
mode="max",
r_toroid=r_tor,)
phys.add_model(propname="throat.tor_touch",
model=pm.meniscus.purcell,
mode="touch",
r_toroid=r_tor,)
assert np.any(phys["throat.tor_touch"] < phys["throat.tor_max"])
def test_sinusoidal_touch(self):
phys = self.phys
self.geo["throat.amplitude"] = 5e-6
self.geo["throat.touch_length"] = 1e-6
phys.add_model(propname="throat.sin_pressure_max",
model=pm.meniscus.sinusoidal, mode="max")
phys.add_model(propname="throat.sin_pressure_touch",
model=pm.meniscus.sinusoidal,
mode="touch")
h = phys.project.check_data_health(phys)
for check in h.values():
if len(check) > 0:
assert 1 == 2
assert np.any(
(phys["throat.sin_pressure_touch"] < phys["throat.sin_pressure_max"])
)
def test_sinusoidal(self):
phys = self.phys
self.geo["throat.amplitude"] = 5e-6
phys.add_model(propname="throat.sin_pressure",
model=pm.meniscus.sinusoidal,
mode="max")
phys.add_model(propname="throat.sin_meniscus",
model=pm.meniscus.sinusoidal,
mode="men",
target_Pc=5000)
h = phys.project.check_data_health(phys)
for check in h.values():
if len(check) > 0:
assert 1 == 2
def test_toroidal(self):
phys = self.phys
r_tor = 1e-6
phys.add_model(propname="throat.purcell_pressure",
model=pm.capillary_pressure.purcell,
r_toroid=r_tor)
phys.add_model(propname="throat.tor_pressure",
model=pm.meniscus.purcell,
mode="max",
r_toroid=r_tor,
num_points=1000)
phys.add_model(propname="throat.tor_meniscus",
model=pm.meniscus.purcell,
mode="men",
r_toroid=r_tor,
target_Pc=5000)
a = np.around(phys["throat.purcell_pressure"], 10)
b = np.around(phys["throat.tor_pressure"], 10)
assert np.allclose(a, b)
h = phys.project.check_data_health(phys)
for check in h.values():
if len(check) > 0:
assert 1 == 2
def test_general_toroidal(self):
phys = self.phys
r_tor = 1e-6
phys.add_model(propname="throat.purcell_pressure",
model=pm.capillary_pressure.purcell,
r_toroid=r_tor)
phys["throat.scale_a"] = r_tor
phys["throat.scale_b"] = r_tor
phys.add_model(propname="throat.general_pressure",
model=pm.meniscus.general_toroidal,
mode="max",
num_points=1000)
a = np.around(phys["throat.purcell_pressure"], 10)
b = np.around(phys["throat.general_pressure"], 10)
assert np.allclose(a, b)
h = phys.project.check_data_health(phys)
for check in h.values():
if len(check) > 0:
assert 1 == 2
def test_exceptions(self):
phys = self.phys
r_tor = 1e-6
phys["throat.scale_a"] = r_tor
phys["throat.scale_b"] = r_tor
phys.add_model(propname="throat.elliptical_pressure",
model=pm.meniscus.general_toroidal,
mode="max",
profile_equation="elliptical",
num_points=1000)
phys.add_model(propname="throat.exception_pressure",
model=pm.meniscus.general_toroidal,
mode="max",
profile_equation="scooby-doo",
num_points=1000)
a = np.around(phys["throat.elliptical_pressure"], 10)
b = np.around(phys["throat.exception_pressure"], 10)
assert np.allclose(a, b)
phys.add_model(propname="throat.no_target_pressure",
model=pm.meniscus.general_toroidal,
mode="men",
num_points=1000)
phys.add_model(propname="throat.small_target_pressure",
model=pm.meniscus.general_toroidal,
mode="men",
target_Pc=1.0e-7,
num_points=1000)
a = np.around(phys["throat.no_target_pressure.radius"], 10)
b = np.around(phys["throat.small_target_pressure.radius"], 10)
assert np.allclose(a, b)
h = phys.project.check_data_health(phys)
for check in h.values():
if len(check) > 0:
assert 1 == 2
if __name__ == "__main__":
t = MeniscusTest()
self = t
t.setup_class()
for item in t.__dir__():
if item.startswith("test"):
print("running test: " + item)
t.__getattribute__(item)()
|
py | 1a3527c5f3e942ccbfeedd2216a0f945076b89a2 | #!/usr/bin/env python3
import numpy as np
import tensorflow as tf
import morpho_dataset
class Network:
def __init__(self, threads, seed=42):
# Create an empty graph and a session
graph = tf.Graph()
graph.seed = seed
self.session = tf.Session(graph=graph, config=tf.ConfigProto(inter_op_parallelism_threads=threads,
intra_op_parallelism_threads=threads))
def construct(self, args, source_chars, target_chars, bow, eow):
with self.session.graph.as_default():
if args.recodex:
tf.get_variable_scope().set_initializer(tf.glorot_uniform_initializer(seed=42))
# Inputs
self.sentence_lens = tf.placeholder(tf.int32, [None], name="sentence_lens")
self.source_ids = tf.placeholder(tf.int32, [None, None], name="source_ids")
self.source_seqs = tf.placeholder(tf.int32, [None, None], name="source_seqs")
self.source_seq_lens = tf.placeholder(tf.int32, [None], name="source_seq_lens")
self.target_ids = tf.placeholder(tf.int32, [None, None], name="target_ids")
self.target_seqs = tf.placeholder(tf.int32, [None, None], name="target_seqs")
self.target_seq_lens = tf.placeholder(tf.int32, [None], name="target_seq_lens")
# Append EOW after target_seqs
target_seqs = tf.reverse_sequence(self.target_seqs, self.target_seq_lens, 1)
target_seqs = tf.pad(target_seqs, [[0, 0], [1, 0]], constant_values=eow)
target_seq_lens = self.target_seq_lens + 1
target_seqs = tf.reverse_sequence(target_seqs, target_seq_lens, 1)
# Encoder
# Generate source embeddings for source chars, of shape [source_chars, args.char_dim].
source_embeddings = tf.get_variable("source_embeddings", [source_chars, args.char_dim])
# Embed the self.source_seqs using the source embeddings.
embedded_source_seqs = tf.nn.embedding_lookup(source_embeddings, self.source_seqs)
# Using a GRU with dimension args.rnn_dim, process the embedded self.source_seqs
# using forward RNN and store the resulting states into `source_states`.
__, source_states = tf.nn.dynamic_rnn(tf.nn.rnn_cell.GRUCell(args.rnn_dim),
embedded_source_seqs,
sequence_length=self.source_seq_lens,
dtype=tf.float32)
# Index the unique words using self.source_ids and self.target_id
sentence_mask = tf.sequence_mask(self.sentence_lens)
source_states = tf.boolean_mask(tf.nn.embedding_lookup(source_states, self.source_ids), sentence_mask)
source_lens = tf.boolean_mask(tf.nn.embedding_lookup(self.source_seq_lens, self.source_ids), sentence_mask)
target_seqs = tf.boolean_mask(tf.nn.embedding_lookup(target_seqs, self.target_ids), sentence_mask)
target_lens = tf.boolean_mask(tf.nn.embedding_lookup(target_seq_lens, self.target_ids), sentence_mask)
# Decoder
# Generate target embeddings for target chars, of shape [target_chars, args.char_dim].
target_embeddings = tf.get_variable("target_embeddings", [target_chars, args.char_dim])
# Embed the target_seqs using the target embeddings.
embedded_target_seqs = tf.nn.embedding_lookup(target_embeddings, target_seqs)
# Generate a decoder GRU with dimension args.rnn_dim.
decoder_rnn = tf.nn.rnn_cell.GRUCell(args.rnn_dim)
# Create a `decoder_layer` -- a fully connected layer with
# target_chars neurons used in the decoder to classify into target characters.
decoder_layer = tf.layers.Dense(target_chars)
# The DecoderTraining will be used during training. It will output logits for each
# target character.
class DecoderTraining(tf.contrib.seq2seq.Decoder):
@property
def batch_size(self): return tf.shape(source_states)[0] # Return size of the batch, using for example source_states size
@property
def output_dtype(self): return tf.float32 # Type for logits of target characters
@property
def output_size(self): return target_chars # Length of logits for every output
def initialize(self, name=None):
finished = tf.less_equal(target_lens, 0) # False if target_lens > 0, True otherwise
states = source_states # Initial decoder state to use
inputs = tf.nn.embedding_lookup(target_embeddings, tf.fill([self.batch_size], bow)) # embedded BOW characters of shape [self.batch_size]. You can use
# tf.fill to generate BOWs of appropriate size.
return finished, inputs, states
def step(self, time, inputs, states, name=None):
outputs, states = decoder_rnn(inputs, states) # Run the decoder GRU cell using inputs and states.
outputs = decoder_layer(outputs) # Apply the decoder_layer on outputs.
next_input = embedded_target_seqs[:, time] # Next input are words with index `time` in target_embedded.
finished = tf.less_equal(target_lens, time + 1) # False if target_lens > time + 1, True otherwise.
return outputs, states, next_input, finished
output_layer, _, _ = tf.contrib.seq2seq.dynamic_decode(DecoderTraining())
self.predictions_training = tf.argmax(output_layer, axis=2, output_type=tf.int32)
# The DecoderPrediction will be used during prediction. It will
# directly output the predicted target characters.
class DecoderPrediction(tf.contrib.seq2seq.Decoder):
@property
def batch_size(self): return tf.shape(source_states)[0] # Return size of the batch, using for example source_states size
@property
def output_dtype(self): return tf.int32 # Type for predicted target characters
@property
def output_size(self): return 1 # Will return just one output
def initialize(self, name=None):
finished = tf.fill([self.batch_size], False) # False of shape [self.batch_size].
states = source_states # Initial decoder state to use.
inputs = tf.nn.embedding_lookup(target_embeddings, tf.fill([self.batch_size], bow)) # embedded BOW characters of shape [self.batch_size]. You can use
# tf.fill to generate BOWs of appropriate size.
return finished, inputs, states
def step(self, time, inputs, states, name=None):
outputs, states = decoder_rnn(inputs, states) # Run the decoder GRU cell using inputs and states.
outputs = decoder_layer(outputs) # Apply the decoder_layer on outputs.
outputs = tf.argmax(outputs, output_type=tf.int32, axis=1) # Use tf.argmax to choose most probable class (supply parameter `output_type=tf.int32`).
next_input = tf.nn.embedding_lookup(target_embeddings, outputs) # Embed `outputs` using target_embeddings
finished = tf.equal(outputs, eow) # True where outputs==eow, False otherwise
return outputs, states, next_input, finished
self.predictions, _, self.prediction_lens = tf.contrib.seq2seq.dynamic_decode(
DecoderPrediction(), maximum_iterations=tf.reduce_max(source_lens) + 10)
# Training
weights = tf.sequence_mask(target_lens, dtype=tf.float32)
loss = tf.losses.sparse_softmax_cross_entropy(target_seqs, output_layer, weights=weights)
global_step = tf.train.create_global_step()
self.training = tf.train.AdamOptimizer().minimize(loss, global_step=global_step, name="training")
# Summaries
accuracy_training = tf.reduce_all(tf.logical_or(
tf.equal(self.predictions_training, target_seqs),
tf.logical_not(tf.sequence_mask(target_lens))), axis=1)
self.current_accuracy_training, self.update_accuracy_training = tf.metrics.mean(accuracy_training)
minimum_length = tf.minimum(tf.shape(self.predictions)[1], tf.shape(target_seqs)[1])
accuracy = tf.logical_and(
tf.equal(self.prediction_lens, target_lens),
tf.reduce_all(tf.logical_or(
tf.equal(self.predictions[:, :minimum_length], target_seqs[:, :minimum_length]),
tf.logical_not(tf.sequence_mask(target_lens, maxlen=minimum_length))), axis=1))
self.current_accuracy, self.update_accuracy = tf.metrics.mean(accuracy)
self.current_loss, self.update_loss = tf.metrics.mean(loss, weights=tf.reduce_sum(weights))
self.reset_metrics = tf.variables_initializer(tf.get_collection(tf.GraphKeys.METRIC_VARIABLES))
summary_writer = tf.contrib.summary.create_file_writer(args.logdir, flush_millis=10 * 1000)
self.summaries = {}
with summary_writer.as_default(), tf.contrib.summary.record_summaries_every_n_global_steps(10):
self.summaries["train"] = [tf.contrib.summary.scalar("train/loss", self.update_loss),
tf.contrib.summary.scalar("train/accuracy", self.update_accuracy_training)]
with summary_writer.as_default(), tf.contrib.summary.always_record_summaries():
for dataset in ["dev", "test"]:
self.summaries[dataset] = [tf.contrib.summary.scalar(dataset + "/loss", self.current_loss),
tf.contrib.summary.scalar(dataset + "/accuracy", self.current_accuracy)]
# Initialize variables
self.session.run(tf.global_variables_initializer())
with summary_writer.as_default():
tf.contrib.summary.initialize(session=self.session, graph=self.session.graph)
def train_epoch(self, train, batch_size):
import sys
while not train.epoch_finished():
sentence_lens, _, charseq_ids, charseqs, charseq_lens = train.next_batch(batch_size,
including_charseqs=True)
self.session.run(self.reset_metrics)
predictions, _, _ = self.session.run(
[self.predictions_training, self.training, self.summaries["train"]],
{self.sentence_lens: sentence_lens,
self.source_ids: charseq_ids[train.FORMS], self.target_ids: charseq_ids[train.LEMMAS],
self.source_seqs: charseqs[train.FORMS], self.target_seqs: charseqs[train.LEMMAS],
self.source_seq_lens: charseq_lens[train.FORMS],
self.target_seq_lens: charseq_lens[train.LEMMAS]})
form, gold_lemma, system_lemma = "", "", ""
for i in range(charseq_lens[train.FORMS][0]):
form += train.factors[train.FORMS].alphabet[charseqs[train.FORMS][0][i]]
for i in range(charseq_lens[train.LEMMAS][0]):
gold_lemma += train.factors[train.LEMMAS].alphabet[charseqs[train.LEMMAS][0][i]]
system_lemma += train.factors[train.LEMMAS].alphabet[predictions[0][i]]
print("Gold form: {}, gold lemma: {}, predicted lemma: {}".format(form, gold_lemma, system_lemma),
file=sys.stderr)
def evaluate(self, dataset_name, dataset, batch_size):
self.session.run(self.reset_metrics)
while not dataset.epoch_finished():
sentence_lens, _, charseq_ids, charseqs, charseq_lens = dataset.next_batch(batch_size,
including_charseqs=True)
self.session.run([self.update_accuracy, self.update_loss],
{self.sentence_lens: sentence_lens,
self.source_ids: charseq_ids[train.FORMS], self.target_ids: charseq_ids[train.LEMMAS],
self.source_seqs: charseqs[train.FORMS], self.target_seqs: charseqs[train.LEMMAS],
self.source_seq_lens: charseq_lens[train.FORMS],
self.target_seq_lens: charseq_lens[train.LEMMAS]})
return self.session.run([self.current_accuracy, self.summaries[dataset_name]])[0]
if __name__ == "__main__":
import argparse
import datetime
import os
import re
# Fix random seed
np.random.seed(42)
# Parse arguments
parser = argparse.ArgumentParser()
parser.add_argument("--batch_size", default=10, type=int, help="Batch size.")
parser.add_argument("--char_dim", default=64, type=int, help="Character embedding dimension.")
parser.add_argument("--epochs", default=10, type=int, help="Number of epochs.")
parser.add_argument("--recodex", default=False, action="store_true", help="ReCodEx mode.")
parser.add_argument("--rnn_dim", default=64, type=int, help="Dimension of the encoder and the decoder.")
parser.add_argument("--threads", default=1, type=int, help="Maximum number of threads to use.")
args = parser.parse_args()
# Create logdir name
args.logdir = "logs/{}-{}-{}".format(
os.path.basename(__file__),
datetime.datetime.now().strftime("%Y-%m-%d_%H%M%S"),
",".join(("{}={}".format(re.sub("(.)[^_]*_?", r"\1", key), value) for key, value in sorted(vars(args).items())))
)
if not os.path.exists("logs"): os.mkdir("logs") # TF 1.6 will do this by itself
# Load the data
train = morpho_dataset.MorphoDataset("czech-cac-train.txt", max_sentences=5000)
dev = morpho_dataset.MorphoDataset("czech-cac-dev.txt", train=train, shuffle_batches=False)
# Construct the network
network = Network(threads=args.threads)
network.construct(args, len(train.factors[train.FORMS].alphabet), len(train.factors[train.LEMMAS].alphabet),
train.factors[train.LEMMAS].alphabet_map["<bow>"],
train.factors[train.LEMMAS].alphabet_map["<eow>"])
# Train
for i in range(args.epochs):
network.train_epoch(train, args.batch_size)
accuracy = network.evaluate("dev", dev, args.batch_size)
print("{:.2f}".format(100 * accuracy))
|
py | 1a3528a75f69940b88ab287156018004d60f40ef | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2001, 2002, 2004, 2005 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Text Widget tests
"""
import datetime
import unittest
import doctest
from zope.component.testing import setUp, tearDown
from zope.interface.verify import verifyClass
from zope.schema import TextLine
from zope.publisher.browser import TestRequest
from zope.schema import Password
from zope.formlib.interfaces import IInputWidget
from zope.formlib.widgets import TextWidget
from zope.formlib.widgets import TextAreaWidget
from zope.formlib.widgets import BytesAreaWidget
from zope.formlib.widgets import PasswordWidget
from zope.formlib.widgets import FileWidget
from zope.formlib.widgets import IntWidget
from zope.formlib.widgets import FloatWidget
from zope.formlib.widgets import BytesWidget
from zope.formlib.widgets import ASCIIWidget
from zope.formlib.widgets import DateDisplayWidget
from zope.formlib.widgets import DatetimeDisplayWidget
from zope.formlib.widgets import URIDisplayWidget
from zope.formlib.tests.test_browserwidget import BrowserWidgetTest
from zope.formlib.tests.test_browserwidget import SimpleInputWidgetTest
from zope.formlib.tests.support import checker
class TextWidgetTest(SimpleInputWidgetTest):
"""Documents and tests the text widget.
>>> setUp()
>>> verifyClass(IInputWidget, TextWidget)
True
Converting Missing Values
-------------------------
String fields (TextLine, Text, etc.) values can be classified as one of the
following:
- Non-empty string
- Empty string
- None
Text browser widgets only support the first two types: non-empty strings
and empty strings. There's no facility to explicitly set a None value in a
text browser widget.
However, it is possible to interpret an empty string as None for some
applications. For example, when inputing a User Name, an empty string means
'the user hasn't provided a value'. In another application, an empty string
may mean 'the user has provided a value, specifically <empty string>'.
To support both modes, the text widget provides a 'convert_missing_value'
flag. When True, empty strings will be converted by the widget to the
field's 'missing_value' (None by default). This mode accommodates the
'user hasn't provided a value' scenario.
To illustrate this mode, we'll use an optional field, where missing_value
is None:
>>> field = TextLine(
... __name__='foo',
... missing_value=None,
... required=False)
The HTTP form submission contains an empty string for the field value:
>>> request = TestRequest(form={'field.foo':u''})
A text widget configured for the field, where convert_missing_value is True
(the default value)...
>>> widget = TextWidget(field, request)
>>> widget.convert_missing_value
True
will convert the form's empty string into the field's missing_value, which
is None:
>>> widget.getInputValue() is None
True
When 'convert_missing_value' is False, the text widget will not convert
an empty string to the field's missing_value. This supports the 'user has
provided a value, specifically <empty string>' mode:
>>> widget.convert_missing_value = False
>>> widget.getInputValue()
u''
>>> tearDown()
"""
_WidgetFactory = TextWidget
def testProperties(self):
self.assertEqual(self._widget.tag, 'input')
self.assertEqual(self._widget.type, 'text')
self.assertEqual(self._widget.cssClass, '')
self.assertEqual(self._widget.extra, '')
self.assertEqual(self._widget.default, '')
self.assertEqual(self._widget.displayWidth, 20)
self.assertEqual(self._widget.displayMaxWidth, '')
def testRender(self):
value = 'Foo Value'
self._widget.setRenderedValue(value)
check_list = ('type="text"', 'id="field.foo"', 'name="field.foo"',
'value="Foo Value"', 'size="20"')
self.verifyResult(self._widget(), check_list)
check_list = ('type="hidden"',) + check_list[1:-1]
self.verifyResult(self._widget.hidden(), check_list)
check_list = ('style="color: red"',) + check_list
self._widget.extra = 'style="color: red"'
self.verifyResult(self._widget.hidden(), check_list)
def testRenderUTF8Input(self):
value = u"☃".encode('utf-8') # results in \u2603
self._widget.setRenderedValue(value)
check_list = ('type="text"', 'id="field.foo"', 'name="field.foo"',
u'value="\u2603"', 'size="20"')
self.verifyResult(self._widget(), check_list)
class URIDisplayWidgetTest(BrowserWidgetTest):
_WidgetFactory = URIDisplayWidget
def testProperties(self):
# check the default linkTarget
self.assertFalse(self._widget.linkTarget)
def testRender(self):
value = "uri:fake"
self._widget.setRenderedValue(value)
self.verifyResult(self._widget(), ["<a", 'href="uri:fake"'])
self._widget.linkTarget = "there"
self.verifyResult(self._widget(), ["<a", 'href="uri:fake"',
'target="there"'])
def testEmptyRenderReturnsEmptyString(self):
self._widget.setRenderedValue(None)
self.assertEqual(self._widget(), "")
self._widget.setRenderedValue('')
self.assertEqual(self._widget(), "")
class DateDisplayWidgetTest(BrowserWidgetTest):
_WidgetFactory = DateDisplayWidget
expected_class = "date"
def setUp(self):
super(DateDisplayWidgetTest, self).setUp()
self._value = datetime.date(2004, 12, 0o1)
def testDefaultDisplayStyle(self):
self.assertFalse(self._widget.displayStyle)
def testRenderDefault(self):
self._widget.setRenderedValue(self._value)
self.verifyResult(self._widget(),
["<span",
'class="%s"' % self.expected_class,
"01.12.2004",
"</span"])
def testRenderShort(self):
self._widget.setRenderedValue(self._value)
self._widget.displayStyle = "short"
self.verifyResult(self._widget(),
["<span",
'class="%s"' % self.expected_class,
u"01.12.04",
"</span"])
def testRenderMedium(self):
self._widget.setRenderedValue(self._value)
self._widget.displayStyle = "medium"
self.verifyResult(self._widget(),
["<span",
'class="%s"' % self.expected_class,
u"01.12.2004",
"</span"])
def testRenderLong(self):
self._widget.setRenderedValue(self._value)
self._widget.displayStyle = "long"
self.verifyResult(self._widget(),
["<span",
'class="%s"' % self.expected_class,
u"1 \u0434\u0435\u043a\u0430\u0431\u0440\u044f"
u" 2004 \u0433.",
"</span"])
def testRenderFull(self):
self._widget.setRenderedValue(self._value)
self._widget.displayStyle = "full"
self.verifyResult(self._widget(),
["<span",
'class="%s"' % self.expected_class,
u"1 \u0434\u0435\u043a\u0430\u0431\u0440\u044f"
u" 2004 \u0433.",
"</span"])
class DatetimeDisplayWidgetTest(DateDisplayWidgetTest):
_WidgetFactory = DatetimeDisplayWidget
expected_class = "dateTime"
def setUp(self):
super(DatetimeDisplayWidgetTest, self).setUp()
self._value = datetime.datetime(2004, 12, 0o1, 14, 39, 0o1)
def testRenderDefault(self):
super(DatetimeDisplayWidgetTest, self).testRenderDefault()
self.verifyResult(self._widget(), ["14:39:01"])
def testRenderShort(self):
super(DatetimeDisplayWidgetTest, self).testRenderShort()
self.verifyResult(self._widget(), ["14:39"])
def testRenderMedium(self):
super(DatetimeDisplayWidgetTest, self).testRenderMedium()
self.verifyResult(self._widget(), ["14:39:01"])
def testRenderLong(self):
super(DatetimeDisplayWidgetTest, self).testRenderLong()
self.verifyResult(self._widget(), ["14:39:01 +000"])
def testRenderFull(self):
super(DatetimeDisplayWidgetTest, self).testRenderFull()
self.verifyResult(self._widget(), ["14:39:01 +000"])
class TextAreaDisplayWidgetTest(BrowserWidgetTest):
_WidgetFactory = TextAreaWidget
# It uses the default DisplayWidget
def testRender(self):
value = u"""
texttexttexttexttexttextexttexttext\xE9\xE9\xE9\xE9\xE9\xE9\xE9\xE9\xE9
texttexttexttexttextte\xE9\xE9\xE9\xE9\xE9xttexttexttexttexttexttexttex
texttexttexttexttexttexttexttexttexttexttexttexttexttexttext
"""
self._widget.setRenderedValue(value)
self.assertTrue(value, self._widget._toFieldValue(value))
self.verifyResult(self._widget(), ["<textarea",
self._widget._toFormValue(value)])
check_list = (
('id', 'field.foo'),
('name', 'field.foo'),
# ('value', ), tested above
('cols', '60'),
('rows', '15'),
)
for a, v in check_list:
self.verifyResult(self._widget(), [a, v])
class BytesAreaDisplayWidgetTest(BrowserWidgetTest):
_WidgetFactory = BytesAreaWidget
# It uses the default DisplayWidget
def testRender(self):
value = """
texttexttexttexttexttexttexttexttexttexttexttexttexttexttext
texttexttexttexttexttexttexttexttexttexttexttexttexttexttext
texttexttexttexttexttexttexttexttexttexttexttexttexttexttext
"""
self._widget.setRenderedValue(value)
self.assertTrue(value, self._widget._toFieldValue(value))
self.verifyResult(self._widget(), ["<textarea",
self._widget._toFormValue(value)])
check_list = (
('id', 'field.foo'),
('name', 'field.foo'),
# ('value', ), tested above
('cols', '60'),
('rows', '15'),
)
for a, v in check_list:
self.verifyResult(self._widget(), [a, v])
class BytesDisplayWidgetTest(BrowserWidgetTest):
_WidgetFactory = BytesWidget
# It uses the BytesDisplayWidget
def testRender(self):
value = "Food Value"
self._widget.setRenderedValue(value)
check_list = ('type="text"', 'id="field.foo"', 'name="field.foo"',
'value="%s"' % value, 'size="20"')
self.verifyResult(self._widget(), check_list)
class ASCIIDisplayWidgetTest(BrowserWidgetTest):
_WidgetFactory = ASCIIWidget
# It uses the default BytesDisplayWidget
def testRender(self):
value = "Food Value"
self._widget.setRenderedValue(value)
check_list = ('type="text"', 'id="field.foo"', 'name="field.foo"',
'value="%s"' % value, 'size="20"')
self.verifyResult(self._widget(), check_list)
class PasswordDisplayWidgetTest(BrowserWidgetTest):
_WidgetFactory = PasswordWidget
_FieldFactory = Password
# It uses the default DisplayWidget
def testRender(self):
value = 'Foo Value'
self._widget.setRenderedValue(value)
check_list = ('type="password"', 'id="field.foo"', 'name="field.foo"',
'value=""', 'size="20"')
self.verifyResult(self._widget(), check_list)
def testUnchangedPassword(self):
# The password hasn't been set yet, so an empty string
# is regarded as an empty field.
self.assertEqual(None, self._widget._toFieldValue(''))
# Now the password has been filled in, so the empty string
# is regarded as the special value for UNCHANGED_PASSWORD.
self._widget.context.context.foo = u'existing password'
self.assertEqual(self._widget.context.UNCHANGED_PASSWORD,
self._widget._toFieldValue(''))
class FileDisplayWidgetTest(BrowserWidgetTest):
_WidgetFactory = FileWidget
# It uses the default DisplayWidget
def testRender(self):
value = 'Foo Value'
self._widget.setRenderedValue(value)
check_list = ('type="file"', 'id="field.foo"', 'name="field.foo"',
'size="20"')
self.verifyResult(self._widget(), check_list)
check_list = ('type="hidden"',) + check_list[1:-1]
self.verifyResult(self._widget.hidden(), check_list)
check_list = ('style="color: red"',) + check_list
self._widget.extra = 'style="color: red"'
self.verifyResult(self._widget.hidden(), check_list)
class IntDisplayWidgetTest(BrowserWidgetTest):
_WidgetFactory = IntWidget
# It uses the default DisplayWidget
def testRender(self):
value = 1
self._widget.setRenderedValue(value)
check_list = ('type="text"', 'id="field.foo"', 'name="field.foo"',
'size="10"', 'value="%s"' % str(value))
self.verifyResult(self._widget(), check_list)
class FloatDisplayWidgetTest(BrowserWidgetTest):
_WidgetFactory = FloatWidget
# It uses the default DisplayWidget
def testRender(self):
value = 1.2
self._widget.setRenderedValue(value)
check_list = ('type="text"', 'id="field.foo"', 'name="field.foo"',
'size="10"', 'value="%s"' % str(value))
self.verifyResult(self._widget(), check_list)
def test_w_nonrequired_and_missing_value_and_no_inout():
"""
There was a bug that caused the value attribute to be set to
'value' under these circumstances.
>>> from zope.schema import TextLine
>>> field = TextLine(__name__='foo', title=u'on',
... required=False, missing_value=u'')
>>> request = TestRequest()
>>> widget = TextWidget(field, request)
>>> def normalize(s):
... return '\\n '.join(filter(None, s.split(' ')))
>>> print(normalize( widget() ))
<input
class="textType"
id="field.foo"
name="field.foo"
size="20"
type="text"
value=""
/>
"""
def test_no_error_on_render_only():
"""This is really a test of a bug fix to SimpleInputWidget.
_error shouldn't be set due to an *internal* call to getInputValue
when rendering.
>>> from zope.schema import TextLine
>>> field = TextLine(__name__='foo')
>>> request = TestRequest(form={'field.foo': ''})
>>> widget = TextWidget(field, request)
>>> ignored = widget()
>>> str(widget.error())
u''
"""
def test_text_area_works_with_missing_value():
"""
>>> from zope.schema import Text
>>> field = Text(__name__='foo', title=u'on',
... required=False, missing_value=u'')
>>> request = TestRequest()
>>> widget = TextAreaWidget(field, request)
>>> def normalize(s):
... return '\\n '.join(filter(None, s.split(' ')))
>>> print(normalize( widget() ))
<textarea
cols="60"
id="field.foo"
name="field.foo"
rows="15"
></textarea>
>>> print(normalize( widget.hidden() ))
<input
class="hiddenType"
id="field.foo"
name="field.foo"
type="hidden"
value=""
/>
"""
def test_suite():
return unittest.TestSuite((
unittest.makeSuite(TextWidgetTest),
unittest.makeSuite(URIDisplayWidgetTest),
unittest.makeSuite(DateDisplayWidgetTest),
unittest.makeSuite(DatetimeDisplayWidgetTest),
unittest.makeSuite(TextAreaDisplayWidgetTest),
unittest.makeSuite(BytesAreaDisplayWidgetTest),
unittest.makeSuite(PasswordDisplayWidgetTest),
unittest.makeSuite(FileDisplayWidgetTest),
unittest.makeSuite(IntDisplayWidgetTest),
unittest.makeSuite(FloatDisplayWidgetTest),
unittest.makeSuite(BytesDisplayWidgetTest),
unittest.makeSuite(ASCIIDisplayWidgetTest),
doctest.DocTestSuite(checker=checker),
))
|
py | 1a3529201eb5b151f04c3c5ed6026087ea8f81a2 | ###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2021, John McNamara, [email protected]
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('image01.xlsx')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with image(s)."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.insert_image('E9', self.image_dir + 'red.png')
workbook.close()
self.assertExcelEqual()
def test_create_file_in_memory(self):
"""Test the creation of a simple XlsxWriter file with image(s)."""
workbook = Workbook(self.got_filename, {'in_memory': True})
worksheet = workbook.add_worksheet()
worksheet.insert_image('E9', self.image_dir + 'red.png')
workbook.close()
self.assertExcelEqual()
|
py | 1a352993c42ee9ab3b7e7c4c95c60bad346646cb | '''
Created on Mar 10, 2019
@author: Burkhard A. Meier
'''
import sys
from PyQt5 import QtWidgets, QtGui
from Section4.Designer_code.Video2_2_slots_Design import Ui_MainWindow
class RunDesignerGUI():
def __init__(self):
app = QtWidgets.QApplication(sys.argv)
self.MainWindow = QtWidgets.QMainWindow()
self.ui = Ui_MainWindow()
self.ui.setupUi(self.MainWindow)
self.update_widgets()
self.widget_actions()
self.MainWindow.show()
sys.exit(app.exec_())
def widget_actions(self):
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("icons/new_icon.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) # correct relative path to icon
self.ui.actionNew.setIcon(icon)
self.ui.actionNew.setShortcut('Ctrl+N')
self.ui.actionExit.setStatusTip('Click to exit the application') # use ui reference to update status bar
self.ui.actionExit.triggered.connect(self.close_GUI) # connect widget to method when triggered (clicked)
self.ui.actionExit.setShortcut('Ctrl+Q') # keyboard shortcut, window has focus
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap("icons/exit_icon.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) # modify icon location
self.ui.actionExit.setIcon(icon1) # use: self.ui.
#-------------------------------
self.ui.pushButton.clicked.connect(self.set_label) # add functionality to second button
def set_label(self):
window_text = self.MainWindow.windowTitle()
self.ui.label.setText(window_text) # set label text to window title
def close_GUI(self):
self.MainWindow.close() # call MainWindow close method, which closes the GUI
def update_widgets(self):
self.MainWindow.setWindowTitle('PyQt5 GUI') # use: self.MainWindow
if __name__ == "__main__":
RunDesignerGUI()
|
py | 1a352afef6621302e69c00b04f7f55e4126a0603 | """
Various tools for extracting signal components from a fit of the amplitude
distribution
"""
from . import pdf
from .Classdef import Statfit
import numpy as np
import time
import random
import matplotlib.pyplot as plt
from lmfit import minimize, Parameters, report_fit
def param0(sample, method='basic'):
"""Estimate initial parameters for HK fitting
Arguments
---------
sample : sequence
amplitudes
Keywords
--------
method : string
method to compute the initial parameters
"""
if method is 'basic':
a = np.nanmean(sample)
s = np.nanstd(sample)
mu = 1.
return {'a':a, 's':s, 'mu':mu}
def lmfit(sample, fit_model='hk', bins='auto', p0 = None,
xtol=1e-4, ftol=1e-4):
"""Lmfit
Arguments
---------
sample : sequence
amplitudes between 0 and 1.
Keywords
--------
fit_model : string
name of the function (in pdf module) to use for the fit
bins : string
method to compute the bin width (inherited from numpy.histogram)
p0 : dict
Initial parameters. If None, estimated automatically.
xtol : float
??
ftol : float
??
Return
------
A Statfit Class
"""
start = time.time()
winsize = len(sample)
bad = False
#--------------------------------------------------------------------------
# Clean sample
#--------------------------------------------------------------------------
sample = np.array(sample)
sample = sample[~np.isnan(sample)]
if len(sample) == 0:
bad = True
sample = [random.random() for r in np.arange(winsize)]
#--------------------------------------------------------------------------
# Make the histogram
#--------------------------------------------------------------------------
# n, edges, patches = hist(sample, bins=bins, normed=True)
n, edges = np.histogram(sample, bins=bins, density=True)
plt.clf()
x = ((np.roll(edges, -1) + edges)/2.)[0:-1]
#--------------------------------------------------------------------------
# Initial Parameters for the fit
#--------------------------------------------------------------------------
if p0 is None:
p0 = param0(sample)
prm0 = Parameters()
# (Name, Value, Vary, Min, Max, Expr)
prm0.add('a', p0['a'], True, 0, 1, None)
prm0.add('s', p0['s'], True, 0, 1, None)
prm0.add('mu', p0['mu'], True, 0, 1000, None)
prm0.add('pt', np.average(sample)**2,None, 0, 1, 'a**2+2*s**2')
#--------------------------------------------------------------------------
# Fit
#--------------------------------------------------------------------------
pdf2use = getattr(pdf, fit_model)
# use 'lbfgs' fit if error with 'leastsq' fit
try:
p = minimize(pdf2use, prm0, args=(x, n), method='leastsq',
xtol=xtol, ftol=ftol)
except KeyboardInterrupt:
raise
except:
print('!! Error with LEASTSQ fit, use L-BFGS-B instead')
p = minimize(pdf2use, prm0, args=(x, n), method='lbfgs')
#--------------------------------------------------------------------------
# Output
#--------------------------------------------------------------------------
elapsed = time.time() - start
# Identify bad results
if bad is True:
p.success = False
# Create values dict For lmfit >0.9.0 compatibility since it is no longer
# in the minimize output
values = {}
for i in p.params.keys():
values[i] = p.params[i].value
# Results
result = Statfit(sample, pdf2use, values, p.params,
p.chisqr, p.redchi, elapsed, p.nfev, p.message, p.success,
p.residual, x, n, edges, bins=bins)
# result = Statfit(sample, p.userfcn, p.kws, p.values, p.params,
# p.chisqr, p.redchi, elapsed, p.nfev, p.message, p.success,
# p.residual, x, n, edges, bins=bins)
return result
|
py | 1a352d87a2396853ea7e3bb5aa478f5387b57307 | def add_new_bishop(location, attack_positions, board_size):
# count how many times existing bishops can attack
count = 0
if location in attack_positions:
count += 1
# add new attack positions for future bishops
new_attack_positions = list()
i, j = location
while i > 0 and j > 0:
i -= 1
j -= 1
new_attack_positions.append((i, j))
i, j = location
while i > 0 and j < board_size - 1:
i -= 1
j += 1
new_attack_positions.append((i, j))
i, j = location
while i < board_size - 1 and j > 0:
i += 1
j -= 1
new_attack_positions.append((i, j))
i, j = location
while i < board_size - 1 and j < board_size - 1:
i += 1
j += 1
new_attack_positions.append((i, j))
attack_positions.extend(new_attack_positions)
return count, attack_positions
def get_attack_vectors(bishop_locations, board_size):
attack_positions = list()
total_count = 0
for location in bishop_locations:
count, attack_positions = add_new_bishop(
location, attack_positions, board_size)
total_count += count
return total_count
assert get_attack_vectors([(0, 0), (1, 2), (2, 2), (4, 0)], 5) == 2
assert get_attack_vectors([(0, 0), (1, 2), (2, 2)], 5) == 1
|
py | 1a352d993a27adaed639ea5760aad46a1be83d91 | # uncompyle6 version 3.3.5
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.7.3 (default, Apr 24 2019, 15:29:51) [MSC v.1915 64 bit (AMD64)]
# Embedded file name: c:\Jenkins\live\output\win_64_static\Release\python-bundle\MIDI Remote Scripts\Push2\color_chooser.py
# Compiled at: 2018-11-30 15:48:11
from __future__ import absolute_import, print_function, unicode_literals
from ableton.v2.base import liveobj_changed, liveobj_valid, nop
from ableton.v2.control_surface import Component
from ableton.v2.control_surface.control import ButtonControl, control_matrix
from pushbase.colors import Pulse
from pushbase.message_box_component import Messenger
from .colors import IndexedColor, Rgb, inverse_translate_color_index, translate_color_index
from .skin_default import SELECTION_PULSE_SPEED
COLOR_CHOOSER_LAYOUT = (
(10, 11, 12, 13, 14, 15, 16, 17),
(9, None, None, None, None, None, None, 18),
(8, None, None, None, None, None, None, 19),
(7, None, None, None, None, None, None, 20),
(5, None, None, None, None, None, None, 21),
(6, None, None, None, None, None, None, 22),
(4, None, None, None, None, None, None, 23),
(3, 2, 1, None, None, 25, 26, 24))
class ColorChooserComponent(Component, Messenger):
matrix = control_matrix(ButtonControl, dimensions=(8, 8))
def __init__(self, *a, **k):
super(ColorChooserComponent, self).__init__(is_enabled=False, *a, **k)
self._object = None
self._notification_ref = nop
for button in self.matrix:
row, column = button.coordinate
button.color_index = COLOR_CHOOSER_LAYOUT[row][column]
return
@property
def object(self):
return self._object
@object.setter
def object(self, obj):
if liveobj_changed(self._object, obj):
self._object = obj
if obj is None:
notification = self._notification_ref()
if notification:
notification.hide()
self.set_enabled(False)
else:
self._render_color_palette(translate_color_index(obj.color_index))
self.set_enabled(True)
self._notification_ref = self.show_notification(b'Select a color for: %s' % obj.name, notification_time=-1)
return
@matrix.pressed
def matrix(self, button):
if liveobj_valid(self.object):
if button.color_index is None:
if hasattr(self.object, b'is_auto_colored'):
self.object.is_auto_colored = True
self.show_notification(b'Color automatically enabled for: %s' % self.object.name)
else:
self.object.color_index = inverse_translate_color_index(button.color_index)
self.object = None
return
def _render_color_palette(self, selected_color_index):
for button in self.matrix:
color_index = button.color_index
if color_index is not None:
if color_index == selected_color_index:
button.color = Pulse(IndexedColor.from_push_index(color_index, shade_level=2), IndexedColor.from_push_index(color_index), SELECTION_PULSE_SPEED)
else:
button.color = IndexedColor.from_push_index(color_index)
else:
button.color = Rgb.BLACK
return |
py | 1a352e11e045380b85beedb91ad03df4e1a9fbed | x = 0
x += 10
x -= 10
x *= 10
x @= 10
x /= 10
x //= 10
x %= 10
x **= 10
x >>= 10
x <<= 10
x &= 10
x ^= 10
x |= 10
x += 10 + 20 * 3
x -= 10 + 20 * 3
x *= 10 + 20 * 3
x @= 10 + 20 * 3
x /= 10 + 20 * 3
x //= 10 + 20 * 3
x %= 10 + 20 * 3
x **= 10 + 20 * 3
x >>= 10 + 20 * 3
x <<= 10 + 20 * 3
x &= 10 + 20 * 3
x ^= 10 + 20 * 3
x |= 10 + 20 * 3
|
py | 1a352e12affd949a94cb6c2938f6b6c1ff988419 | import random
import time
import datetime
from panda3d.core import Vec4, TextNode, CardMaker, NodePath
from direct.distributed import DistributedObject
from direct.task.Task import Task
from direct.gui.DirectGui import DirectLabel
from direct.gui import OnscreenText
from toontown.toonbase import ToontownGlobals
from toontown.parties.PartyInfo import PartyInfo
from toontown.toonbase import TTLocalizer
from toontown.toon import Toon
from toontown.parties import PartyGlobals
from toontown.parties.Decoration import Decoration
import PartyUtils
class DistributedParty(DistributedObject.DistributedObject):
notify = directNotify.newCategory('DistributedParty')
generatedEvent = 'distributedPartyGenerated'
def __init__(self, cr):
DistributedObject.DistributedObject.__init__(self, cr)
self.partyDoneEvent = 'partyDone'
self.load()
self.avIdsAtParty = []
base.distributedParty = self
self.titleText = ''
self.isPartyEnding = False
def setPartyState(self, partyState):
self.isPartyEnding = partyState
messenger.send('partyStateChanged', [partyState])
def getPartyState(self):
return self.isPartyEnding
def setPartyClockInfo(self, x, y, h):
x = PartyUtils.convertDistanceFromPartyGrid(x, 0)
y = PartyUtils.convertDistanceFromPartyGrid(y, 1)
h = PartyUtils.convertDegreesFromPartyGrid(h)
self.partyClockInfo = (x, y, h)
self.loadPartyCountdownTimer()
def setInviteeIds(self, inviteeIds):
self.inviteeIds = inviteeIds
def setPartyInfoTuple(self, partyInfoTuple):
self.partyInfo = PartyInfo(*partyInfoTuple)
self.loadDecorations()
allActIds = [ x.activityId for x in self.partyInfo.activityList ]
base.partyHasJukebox = PartyGlobals.ActivityIds.PartyJukebox in allActIds or PartyGlobals.ActivityIds.PartyJukebox40 in allActIds
self.grid = [[False,
False,
False,
False,
False,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
False,
False,
False],
[False,
False,
False,
False,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
False,
False,
False],
[False,
False,
False,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
False,
False],
[False,
False,
False,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
False,
False],
[False,
False,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
False],
[False,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True],
[True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True],
[True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True],
[True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True],
[True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True],
[False,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True],
[False,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
True,
False,
False,
False,
False],
[False,
False,
False,
False,
True,
True,
True,
True,
True,
True,
True,
True,
True,
False,
False,
False,
False,
False],
[False,
False,
False,
False,
False,
True,
True,
True,
True,
True,
True,
True,
False,
False,
False,
False,
False,
False],
[False,
False,
False,
False,
False,
False,
True,
True,
True,
True,
True,
False,
False,
False,
False,
False,
False,
False]]
def fillGrid(x, y, size):
for i in xrange(-size[1] / 2 + 1, size[1] / 2 + 1):
for j in xrange(-size[0] / 2 + 1, size[0] / 2 + 1):
self.grid[i + y][j + x] = False
for activityBase in self.partyInfo.activityList:
fillGrid(activityBase.x, activityBase.y, PartyGlobals.ActivityInformationDict[activityBase.activityId]['gridsize'])
for decorBase in self.partyInfo.decors:
fillGrid(decorBase.x, decorBase.y, PartyGlobals.DecorationInformationDict[decorBase.decorId]['gridsize'])
self.loadGrass()
def setPartyStartedTime(self, startedTime):
stime = time.strptime(startedTime, '%Y-%m-%d %H:%M:%S')
self.partyStartedTime = datetime.datetime(year=stime.tm_year, month=stime.tm_mon, day=stime.tm_mday, hour=stime.tm_hour, minute=stime.tm_min, second=stime.tm_sec, tzinfo=base.cr.toontownTimeManager.getCurServerDateTime().tzinfo)
def disable(self):
self.notify.debug('disable')
DistributedObject.DistributedObject.disable(self)
base.localAvatar.chatMgr.chatInputSpeedChat.removeInsidePartiesMenu()
def delete(self):
self.notify.debug('delete')
self.unload()
if hasattr(base, 'distributedParty'):
del base.distributedParty
DistributedObject.DistributedObject.delete(self)
def load(self):
Toon.loadMinigameAnims()
self.defaultSignModel = loader.loadModel('phase_13/models/parties/eventSign')
self.activityIconsModel = loader.loadModel('phase_4/models/parties/eventSignIcons')
model = loader.loadModel('phase_4/models/parties/partyStickerbook')
self.partyHat = model.find('**/Stickerbook_PartyIcon')
self.partyHat.setPos(0.0, 0.1, 2.5)
self.partyHat.setHpr(0.0, 0.0, -50.0)
self.partyHat.setScale(4.0)
self.partyHat.setBillboardAxis()
self.partyHat.reparentTo(hidden)
model.removeNode()
self.defaultLeverModel = loader.loadModel('phase_13/models/parties/partyLeverBase')
self.defaultStickModel = loader.loadModel('phase_13/models/parties/partyLeverStick')
def loadGrass(self):
self.grassRoot = NodePath('GrassRoot')
self.grassRoot.reparentTo(base.cr.playGame.hood.loader.geom)
grass = loader.loadModel('phase_13/models/parties/grass')
clearPositions = self.getClearSquarePositions()
numTufts = min(len(clearPositions) * 3, PartyGlobals.TuftsOfGrass)
for i in xrange(numTufts):
g = grass.copyTo(self.grassRoot)
pos = random.choice(clearPositions)
g.setPos(pos[0] + random.randint(-8, 8), pos[1] + random.randint(-8, 8), 0.0)
def loadDecorations(self):
self.decorationsList = []
for decorBase in self.partyInfo.decors:
self.decorationsList.append(Decoration(PartyGlobals.DecorationIds.getString(decorBase.decorId), PartyUtils.convertDistanceFromPartyGrid(decorBase.x, 0), PartyUtils.convertDistanceFromPartyGrid(decorBase.y, 1), PartyUtils.convertDegreesFromPartyGrid(decorBase.h)))
def unload(self):
if hasattr(self, 'decorationsList') and self.decorationsList:
for decor in self.decorationsList:
decor.unload()
del self.decorationsList
self.stopPartyClock()
self.grassRoot.removeNode()
del self.grassRoot
if hasattr(self, 'testGrid'):
self.testGrid.removeNode()
del self.testGrid
self.ignoreAll()
Toon.unloadMinigameAnims()
self.partyHat.removeNode()
del self.partyHat
if hasattr(base, 'partyHasJukebox'):
del base.partyHasJukebox
def announceGenerate(self):
DistributedObject.DistributedObject.announceGenerate(self)
self.sendUpdate('avIdEnteredParty', [base.localAvatar.doId])
globalClock.syncFrameTime()
self.startPartyClock()
base.localAvatar.chatMgr.chatInputSpeedChat.addInsidePartiesMenu()
self.spawnTitleText()
messenger.send(self.generatedEvent)
if config.GetBool('show-debug-party-grid', 0):
self.testGrid = NodePath('test_grid')
self.testGrid.reparentTo(base.cr.playGame.hood.loader.geom)
for i in xrange(len(self.grid)):
for j in xrange(len(self.grid[i])):
cm = CardMaker('gridsquare')
np = NodePath(cm.generate())
np.setScale(12)
np.setP(-90.0)
np.setPos(PartyUtils.convertDistanceFromPartyGrid(j, 0) - 6.0, PartyUtils.convertDistanceFromPartyGrid(i, 1) - 6.0, 0.1)
np.reparentTo(self.testGrid)
if self.grid[i][j]:
np.setColorScale(0.0, 1.0, 0.0, 1.0)
else:
np.setColorScale(1.0, 0.0, 0.0, 1.0)
def getClearSquarePos(self):
clearPositions = self.getClearSquarePositions()
if len(clearPositions) == 0:
raise StandardError, 'Party %s has no empty grid squares.' % self.doId
return random.choice(clearPositions)
def getClearSquarePositions(self):
clearPositions = []
for y in xrange(len(self.grid)):
for x in xrange(len(self.grid[0])):
if self.grid[y][x]:
pos = (PartyUtils.convertDistanceFromPartyGrid(x, 0), PartyUtils.convertDistanceFromPartyGrid(y, 1), 0.1)
clearPositions.append(pos)
return clearPositions
def startPartyClock(self):
self.partyClockModel.reparentTo(base.cr.playGame.hood.loader.geom)
curServerTime = base.cr.toontownTimeManager.getCurServerDateTime()
timePartyWillEnd = self.partyStartedTime + datetime.timedelta(hours=PartyGlobals.DefaultPartyDuration)
timeLeftInParty = timePartyWillEnd - curServerTime
if curServerTime < timePartyWillEnd:
self.secondsLeftInParty = timeLeftInParty.seconds
else:
self.secondsLeftInParty = 0
taskMgr.doMethodLater(0.5, self.partyClockTask, 'UpdatePartyClock')
self.partyClockSignFront = self.partyClockModel.find('**/signFrontText_locator')
self.partyClockSignBack = self.partyClockModel.find('**/signBackText_locator')
self.attachHostNameToSign(self.partyClockSignFront)
self.attachHostNameToSign(self.partyClockSignBack)
def attachHostNameToSign(self, locator):
if self.hostName == '':
return
nameText = TextNode('nameText')
nameText.setCardAsMargin(0.1, 0.1, 0.1, 0.1)
nameText.setCardDecal(True)
nameText.setCardColor(1.0, 1.0, 1.0, 0.0)
r = 232.0 / 255.0
g = 169.0 / 255.0
b = 23.0 / 255.0
nameText.setTextColor(r, g, b, 1)
nameText.setAlign(nameText.ACenter)
nameText.setFont(ToontownGlobals.getBuildingNametagFont())
nameText.setShadowColor(0, 0, 0, 1)
nameText.setBin('fixed')
if TTLocalizer.BuildingNametagShadow:
nameText.setShadow(*TTLocalizer.BuildingNametagShadow)
nameWordWrap = 11.0
nameText.setWordwrap(nameWordWrap)
scaleMult = 0.48
houseName = self.hostName
nameText.setText(houseName)
textWidth = nameText.getWidth()
xScale = 1.0 * scaleMult
if textWidth > nameWordWrap:
xScale = nameWordWrap / textWidth * scaleMult
sign_origin = locator
namePlate = sign_origin.attachNewNode(nameText)
namePlate.setDepthWrite(0)
namePlate.setPos(0, 0, 0)
namePlate.setScale(xScale)
def stopPartyClock(self):
self.partyClockModel.removeNode()
taskMgr.remove('UpdatePartyClock')
def partyClockTask(self, task):
self.secondsLeftInParty -= 0.5
if self.secondsLeftInParty < 0:
self.frontTimer['minute']['text'] = '--'
self.backTimer['minute']['text'] = '--'
self.frontTimer['second']['text'] = '--'
self.backTimer['second']['text'] = '--'
return
if self.frontTimer['colon'].isStashed():
self.frontTimer['colon'].unstash()
self.backTimer['colon'].unstash()
else:
self.frontTimer['colon'].stash()
self.backTimer['colon'].stash()
minutesLeft = int(int(self.secondsLeftInParty / 60) % 60)
if minutesLeft < 10:
minutesLeft = '0%d' % minutesLeft
else:
minutesLeft = '%d' % minutesLeft
secondsLeft = int(self.secondsLeftInParty % 60)
if secondsLeft < 10:
secondsLeft = '0%d' % secondsLeft
else:
secondsLeft = '%d' % secondsLeft
self.frontTimer['minute']['text'] = minutesLeft
self.backTimer['minute']['text'] = minutesLeft
self.frontTimer['second']['text'] = secondsLeft
self.backTimer['second']['text'] = secondsLeft
taskMgr.doMethodLater(0.5, self.partyClockTask, 'UpdatePartyClock')
if self.secondsLeftInParty != int(self.secondsLeftInParty):
self.partyClockModel.find('**/middleRotateFront_grp').setR(-6.0 * (self.secondsLeftInParty % 60))
self.partyClockModel.find('**/middleRotateBack_grp').setR(6.0 * (self.secondsLeftInParty % 60))
def getAvIdsAtParty(self):
return self.avIdsAtParty
def setAvIdsAtParty(self, avIdsAtParty):
self.avIdsAtParty = avIdsAtParty
def loadPartyCountdownTimer(self):
self.partyClockModel = loader.loadModel('phase_13/models/parties/partyClock')
self.partyClockModel.setPos(self.partyClockInfo[0], self.partyClockInfo[1], 0.0)
self.partyClockModel.setH(self.partyClockInfo[2])
self.partyClockModel.reparentTo(base.cr.playGame.hood.loader.geom)
self.partyClockModel.find('**/frontText_locator').setY(-1.1)
self.partyClockModel.find('**/backText_locator').setY(0.633)
self.frontTimer = self.getTimer(self.partyClockModel.find('**/frontText_locator'))
base.frontTimerLoc = self.partyClockModel.find('**/frontText_locator')
base.backTimerLoc = self.partyClockModel.find('**/backText_locator')
self.backTimer = self.getTimer(self.partyClockModel.find('**/backText_locator'))
self.partyClockModel.stash()
def getTimer(self, parent):
timeFont = ToontownGlobals.getMinnieFont()
timer = {}
timer['minute'] = DirectLabel(parent=parent, pos=TTLocalizer.DPtimerMinutePos, relief=None, text='59', text_align=TextNode.ACenter, text_font=timeFont, text_fg=(0.7, 0.3, 0.3, 1.0), scale=TTLocalizer.DPtimerMinute)
timer['colon'] = DirectLabel(parent=parent, pos=TTLocalizer.DPtimerColonPos, relief=None, text=':', text_align=TextNode.ACenter, text_font=timeFont, text_fg=(0.7, 0.3, 0.3, 1.0), scale=TTLocalizer.DPtimerColon)
timer['second'] = DirectLabel(parent=parent, relief=None, pos=TTLocalizer.DPtimerSecondPos, text='14', text_align=TextNode.ACenter, text_font=timeFont, text_fg=(0.7, 0.3, 0.3, 1.0), scale=TTLocalizer.DPtimerSecond)
timer['textLabel'] = DirectLabel(parent=parent, relief=None, pos=(0.0, 0.0, 1.15), text=TTLocalizer.PartyCountdownClockText, text_font=timeFont, text_fg=(0.7, 0.3, 0.3, 1.0), scale=TTLocalizer.DPtimerTextLabel)
return timer
def setHostName(self, hostName):
self.hostName = hostName
if hasattr(self, 'partyClockSignFront'):
self.attachHostNameToSign(self.partyClockSignFront)
if hasattr(self, 'partyClockSignBack'):
self.attachHostNameToSign(self.partyClockSignBack)
def spawnTitleText(self):
if not self.hostName:
return
partyText = TTLocalizer.PartyTitleText % TTLocalizer.GetPossesive(self.hostName)
self.doSpawnTitleText(partyText)
def doSpawnTitleText(self, text):
self.titleColor = (1.0, 0.5, 0.4, 1.0)
self.titleText = OnscreenText.OnscreenText(text, fg=self.titleColor, font=ToontownGlobals.getSignFont(), pos=(0, -0.5), scale=0.16, drawOrder=0, mayChange=1, wordwrap=16)
self.titleText.setText(text)
self.titleText.show()
self.titleText.setColor(Vec4(*self.titleColor))
self.titleText.clearColorScale()
self.titleText.setFg(self.titleColor)
seq = Task.sequence(Task.pause(0.1), Task.pause(6.0), self.titleText.lerpColorScale(Vec4(1.0, 1.0, 1.0, 1.0), Vec4(1.0, 1.0, 1.0, 0.0), 0.5), Task(self.hideTitleTextTask))
taskMgr.add(seq, 'titleText')
def hideTitleTextTask(self, task):
self.titleText.hide()
return Task.done
def hideTitleText(self):
if self.titleText:
self.titleText.hide()
|
py | 1a352e16c68dd0a5dab0399442e006b240160bb8 | """
Django settings for djangobackend project.
Generated by 'django-admin startproject' using Django 3.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ao5z(o(z@cvzodm99d32jkxa5e8a1!q_4sqss5-a%n6tg$#h$+'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
APPEND_SLASH = True
ALLOWED_HOSTS = ["localhost","jeetgor06-djangoapp.us-south.cf.appdomain.cloud"]
# Application definition
INSTALLED_APPS = [
'djangoapp.apps.DjangoappConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'djangobackend.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.media',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'djangobackend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(STATIC_ROOT, 'media')
MEDIA_URL = '/media/'
|
py | 1a35308e3e1033d1025ac934d26add8ef584b069 | import re, datetime
from Helpers.freezable_list import FrozenDict
from pytjson.Exceptions import ParseError
class Datatype:
# Initializer, will be overriden below
TAGS = {}
isScalar = re.compile(r'^[a-z0-9]*$')
isBin = re.compile('^[01]{8}$')
isOnlyNumbers = re.compile('^\-?(0|[1-9][0-9]*)$')
isNonScalar = re.compile(r'^([A-Z][a-z0-9]*)\<(.*)\>$')
@staticmethod
def parse(tag):
if not isinstance(tag, (str, unicode)):
raise TypeError("expected String, got {}".format(type(tag)))
if tag == "O":
# Object
return Datatype.TAGS[tag]
elif Datatype.isNonScalar.match(tag):
tmp_inner = Datatype.isNonScalar.match(tag).group(2)
tmp_type = Datatype.isNonScalar.match(tag).group(1)
inner = Datatype.parse(tmp_inner)
if tmp_type == "A":
tmp = Array(inner)
else:
tmp = Datatype.TAGS[tmp_type]
return tmp
elif Datatype.isScalar.match(tag):
# Scalar
return Datatype.TAGS[tag]
else:
raise ParseError("couldn't parse tag: {}".format(repr(tag)))
@staticmethod
def identify_type(obj, is_bytes):
if type(obj) is dict:
return Datatype.TAGS["O"]
elif type(obj) is list:
t = Array(None)
return t._identify_type(obj)
elif isinstance(obj, (str)):
return Datatype.TAGS["s"]
elif type(obj) is int:
return Datatype.TAGS["i"]
elif type(obj) is float:
return Datatype.TAGS["f"]
elif isinstance(obj, datetime.datetime):
return Datatype.TAGS["t"]
elif is_bytes:
return Datatype.TAGS["b"]
else:
raise TypeError("don't know how to serialize #{obj.class} as TJSON")
def datatype_generate(self, obj):
is_bytes = False if not isinstance(obj, bytes) else True
return self.identify_type(obj, is_bytes).generate(obj)
class Scalar(Datatype):
@staticmethod
def isScalar():
return True
class NonScalar(Datatype):
def __init__(self, inner_type):
self.inner_type = inner_type
@staticmethod
def isScalar():
return False
class Number(Scalar):
pass
class Integer:
@staticmethod
def generate(int_data):
# Integers are serialized as strings to sidestep the limits of some JSON parsers
return str(int_data).encode("utf-8")
class Binary(Scalar):
pass
from datatypes.string import String
from datatypes.timestamp import Timestamp
from datatypes.float import Float
from datatypes.integer import SignedInt, UnsignedInt
from datatypes.array import Array
from datatypes.binary import Binary16, Binary32, Binary64
from datatypes.object import Object
class Datatype(Datatype):
Datatype.TAGS = FrozenDict(
O = Object(None),
b = Binary64(),
b16 = Binary16(),
b32 = Binary32(),
b64 = Binary64(),
f = Float(),
i = SignedInt(),
s = String(),
t = Timestamp(),
u = UnsignedInt()
)
|
py | 1a35316a5a50e18ac19f04e24440f6660c5a6c83 | import folium
import pandas
import math
import re
data = pandas.read_excel("GVP_Volcano_List.xlsx",header = 1)
map = folium.Map(tiles="Mapbox Bright")
featureGroup = folium.FeatureGroup(name="Volcanoes")
#Debug
dumpfh = open('out.txt', 'w')
lonData = data["Latitude"]
latData = data["Longitude"]
nameData = data["Volcano Name"]
for lon, lat, name, i in zip(lonData, latData, nameData, range(0, len(nameData))):
if not math.isnan(lon) and not math.isnan(lat):
#Debug
dumpfh.write('{i}: {lon} {lat} {name}\n'.format(i=i, lon=lon, lat=lat, name=name))
dumpfh.flush()
name = re.sub("'", '', name)
featureGroup.add_child(folium.Marker(location=[lon,lat],popup=str(name),icon=folium.Icon(color="green")))
map.add_child(featureGroup)
map.save("VolcanoMap.html")
#Debug
print('finished')
|
py | 1a3533459ea7dda22b7aaa5c10bdf23f7e380167 | #
# Copyright (c) 2018 ISP RAS (http://www.ispras.ru)
# Ivannikov Institute for System Programming of the Russian Academy of Sciences
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
from difflib import unified_diff
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext_lazy as _
from bridge.vars import USER_ROLES, JOB_ROLES
from bridge.utils import BridgeException
import marks.SafeUtils as SafeUtils
import marks.UnsafeUtils as UnsafeUtils
import marks.UnknownUtils as UnknownUtils
from users.models import User
from reports.models import ReportUnsafe, ReportSafe, ReportUnknown
from marks.models import MarkSafe, MarkUnsafe, MarkUnknown, MarkSafeHistory, MarkUnsafeHistory,\
SafeTag, UnsafeTag, ConvertedTraces, MarkSafeReport, MarkUnsafeReport, MarkUnknownReport
STATUS_COLOR = {
'0': '#e81919',
'1': '#FF8533',
'2': '#FF8533',
'3': '#00c600',
}
UNSAFE_COLOR = {
'0': '#cb58ec',
'1': '#e81919',
'2': '#e81919',
'3': '#FF8533',
'4': '#D11919', # Incompatible marks
'5': '#000000', # Without marks
}
SAFE_COLOR = {
'0': '#cb58ec',
'1': '#FF8533',
'2': '#e81919',
'3': '#D11919', # Incompatible marks
'4': '#000000', # Without marks
}
class MarkAccess:
def __init__(self, user, mark=None, report=None):
self.user = user
self.mark = mark
self.report = report
def can_edit(self):
if not isinstance(self.user, User):
return False
if self.user.extended.role == USER_ROLES[2][0]:
return True
if not self.mark.is_modifiable or self.mark.version == 0:
return False
if self.user.extended.role == USER_ROLES[3][0]:
return True
if isinstance(self.mark, (MarkUnsafe, MarkSafe, MarkUnknown)):
first_vers = self.mark.versions.order_by('version').first()
else:
return False
if first_vers.author == self.user:
return True
if self.mark.job is not None:
first_v = self.mark.job.versions.order_by('version').first()
if first_v.change_author == self.user:
return True
last_v = self.mark.job.versions.get(version=self.mark.job.version)
if last_v.global_role in [JOB_ROLES[2][0], JOB_ROLES[4][0]]:
return True
try:
user_role = last_v.userrole_set.get(user=self.user)
if user_role.role in [JOB_ROLES[2][0], JOB_ROLES[4][0]]:
return True
except ObjectDoesNotExist:
return False
return False
def can_create(self):
if not isinstance(self.user, User):
return False
if isinstance(self.report, (ReportUnsafe, ReportSafe, ReportUnknown)):
if self.user.extended.role in [USER_ROLES[2][0], USER_ROLES[3][0]]:
return True
first_v = self.report.root.job.versions.order_by('version').first()
if first_v.change_author == self.user:
return True
try:
last_v = self.report.root.job.versions.get(version=self.report.root.job.version)
except ObjectDoesNotExist:
return False
if last_v.global_role in [JOB_ROLES[2][0], JOB_ROLES[4][0]]:
return True
try:
user_role = last_v.userrole_set.get(user=self.user)
if user_role.role in [JOB_ROLES[2][0], JOB_ROLES[4][0]]:
return True
except ObjectDoesNotExist:
return False
elif self.user.extended.role in [USER_ROLES[2][0], USER_ROLES[3][0]]:
return True
return False
def can_delete(self):
if not isinstance(self.user, User):
return False
if self.user.extended.role == USER_ROLES[2][0]:
return True
if not self.mark.is_modifiable or self.mark.version == 0:
return False
if self.user.extended.role == USER_ROLES[3][0]:
return True
authors = list(set(v_id for v_id, in self.mark.versions.values_list('author_id') if v_id is not None))
if len(authors) == 1 and authors[0] == self.user.id:
return True
return False
def can_remove_version(self, mark_version):
if not isinstance(self.user, User) or not isinstance(self.mark, (MarkUnsafe, MarkSafe, MarkUnknown)):
return False
# Nobody can remove first or last version. Also while mark is being deleted users can't clear versions.
if mark_version.version in {1, self.mark.version} or self.mark.version == 0:
return False
# Manager can remove all other versions
if self.user.extended.role == USER_ROLES[2][0]:
return True
# Others can't remove versions if mark is frozen.
if not self.mark.is_modifiable:
return False
# Expert can remove all versions.
if self.user.extended.role == USER_ROLES[3][0]:
return True
# Others can remove version only if they are authors of it.
if mark_version.author == self.user:
return True
return False
def can_freeze(self):
if not isinstance(self.user, User):
return False
return self.user.extended.role == USER_ROLES[2][0]
class TagsInfo:
def __init__(self, mark_type, mark=None):
self.mark = mark
self.type = mark_type
self.tags_old = []
self.tags_available = []
self.__get_tags()
def __get_tags(self):
if self.type not in ['unsafe', 'safe']:
return
if isinstance(self.mark, (MarkUnsafe, MarkSafe)):
last_v = self.mark.versions.get(version=self.mark.version)
self.tags_old = list(t['tag__tag'] for t in last_v.tags.order_by('tag__tag').values('tag__tag'))
elif isinstance(self.mark, (MarkUnsafeHistory, MarkSafeHistory)):
self.tags_old = list(t['tag__tag'] for t in self.mark.tags.order_by('tag__tag').values('tag__tag'))
if self.type == 'unsafe':
table = UnsafeTag
else:
table = SafeTag
self.tags_available = list(t['tag'] for t in table.objects.values('tag') if t['tag'] not in self.tags_old)
class NewMark:
def __init__(self, user, inst, data):
self._user = user
self._data = data
self._inst = inst
self._handler = self.__get_handler()
self.changes = {}
self.mark = None
def __get_handler(self):
if isinstance(self._inst, (ReportSafe, MarkSafe)):
return SafeUtils.NewMark(self._user, self._data)
elif isinstance(self._inst, (ReportUnsafe, MarkUnsafe)):
return UnsafeUtils.NewMark(self._user, self._data)
elif isinstance(self._inst, (ReportUnknown, MarkUnknown)):
return UnknownUtils.NewMark(self._user, self._data)
else:
raise ValueError('Unsupported type: %s' % type(self._inst))
def create_mark(self):
self.mark = self._handler.create_mark(self._inst)
self.changes = self._handler.changes
def change_mark(self):
self.mark = self._handler.change_mark(self._inst)
self.changes = self._handler.changes
class CompareMarkVersions:
def __init__(self, mark_type, version1, version2):
self.type = mark_type
self.v1 = version1
self.v2 = version2
self.verdict = self.__verdict_change()
self.status = self.__status_change()
self.tags = self.__tags_change()
self.et_func = self.__et_func_change()
self.et = self.__et_change()
self.attrs = self.__attr_change()
self.unknown_func = self.__unknown_func_change()
self.problem = self.__problem_change()
def __verdict_change(self):
if self.type == 'unknown' or self.v1.verdict == self.v2.verdict:
return None
if self.type == 'safe':
return [{'title': self.v1.get_verdict_display(), 'color': SAFE_COLOR[self.v1.verdict]},
{'title': self.v2.get_verdict_display(), 'color': SAFE_COLOR[self.v2.verdict]}]
else:
return [{'title': self.v1.get_verdict_display(), 'color': UNSAFE_COLOR[self.v1.verdict]},
{'title': self.v2.get_verdict_display(), 'color': UNSAFE_COLOR[self.v2.verdict]}]
def __status_change(self):
if self.v1.status == self.v2.status:
return None
return [{'title': self.v1.get_status_display(), 'color': STATUS_COLOR[self.v1.status]},
{'title': self.v2.get_status_display(), 'color': STATUS_COLOR[self.v2.status]}]
def __tags_change(self):
if self.type == 'unknown':
return None
tags1 = set(t for t, in self.v1.tags.values_list('tag__tag'))
tags2 = set(t for t, in self.v2.tags.values_list('tag__tag'))
if tags1 == tags2:
return None
return ['; '.join(sorted(tags1)), '; '.join(sorted(tags2))]
def __et_func_change(self):
if self.type != 'unsafe' or self.v1.function_id == self.v2.function_id:
return None
return [{
'compare_name': self.v1.function.name, 'compare_desc': self.v1.function.description,
'convert_name': self.v1.function.convert.name, 'convert_desc': self.v1.function.convert.description
}, {
'compare_name': self.v2.function.name, 'compare_desc': self.v2.function.description,
'convert_name': self.v2.function.convert.name, 'convert_desc': self.v2.function.convert.description
}]
def __et_change(self):
if self.type != 'unsafe' or self.v1.error_trace_id == self.v2.error_trace_id:
return None
diff_result = []
f1 = ConvertedTraces.objects.get(id=self.v1.error_trace_id)
f2 = ConvertedTraces.objects.get(id=self.v2.error_trace_id)
with f1.file as fp1, f2.file as fp2:
for line in unified_diff(fp1.read().decode('utf8').split('\n'), fp2.read().decode('utf8').split('\n')):
diff_result.append(line)
return '\n'.join(diff_result)
def __attr_change(self):
attrs1 = set(a_id for a_id, in self.v1.attrs.filter(is_compare=True).values_list('attr_id'))
attrs2 = set(a_id for a_id, in self.v2.attrs.filter(is_compare=True).values_list('attr_id'))
if attrs1 == attrs2:
return None
return [
list((a.attr.name.name, a.attr.value) for a in self.v1.attrs.filter(is_compare=True)
.select_related('attr', 'attr__name').order_by('id')),
list((a.attr.name.name, a.attr.value) for a in self.v2.attrs.filter(is_compare=True)
.select_related('attr', 'attr__name').order_by('id'))
]
def __unknown_func_change(self):
if self.type != 'unknown':
return None
if self.v1.is_regexp == self.v2.is_regexp and self.v1.function == self.v2.function:
return None
return [{'is_regexp': self.v1.is_regexp, 'func': self.v1.function},
{'is_regexp': self.v2.is_regexp, 'func': self.v2.function}]
def __problem_change(self):
if self.type != 'unknown':
return None
if self.v1.problem_pattern == self.v2.problem_pattern and self.v1.link == self.v2.link:
return None
return [{'pattern': self.v1.problem_pattern, 'link': self.v1.link},
{'pattern': self.v2.problem_pattern, 'link': self.v2.link}]
def delete_marks(user, marks_type, mark_ids, report_id=None):
if marks_type == 'safe':
marks = MarkSafe.objects.filter(id__in=mark_ids)
elif marks_type == 'unsafe':
marks = MarkUnsafe.objects.filter(id__in=mark_ids)
elif marks_type == 'unknown':
marks = MarkUnknown.objects.filter(id__in=mark_ids)
else:
raise ValueError('Unsupported marks type: %s' % marks_type)
if not all(MarkAccess(user, mark=mark).can_delete() for mark in marks):
if len(marks) > 1:
raise BridgeException(_("You can't delete one of the selected marks"))
elif len(marks) == 1:
raise BridgeException(_("You don't have an access to delete this mark"))
else:
raise BridgeException(_('Nothing to delete'))
if marks_type == 'safe':
SafeUtils.delete_marks(marks)
reports_model = ReportSafe
elif marks_type == 'unsafe':
UnsafeUtils.delete_marks(marks)
reports_model = ReportUnsafe
else:
UnknownUtils.delete_marks(marks)
reports_model = ReportUnknown
if report_id:
try:
report = reports_model.objects.get(id=report_id)
except ObjectDoesNotExist:
return None
return report.id if not isinstance(report, ReportUnsafe) else report.trace_id
class DownloadTags:
def __init__(self, tags_type):
self._type = tags_type
self._data = self.__get_tags_data()
def __iter__(self):
yield self._data
def file_size(self):
return len(self._data)
def __get_tags_data(self):
if self._type == 'safe':
tags_model = SafeTag
elif self._type == 'unsafe':
tags_model = UnsafeTag
else:
return b''
tags_data = []
for tag in tags_model.objects.all():
tag_data = {'name': tag.tag, 'description': tag.description}
if tag.parent is not None:
tag_data['parent'] = tag.parent.tag
tags_data.append(tag_data)
return json.dumps(tags_data, ensure_ascii=False, sort_keys=True, indent=4).encode('utf8')
class UpdateAssociationCache:
def __init__(self, association, recalc):
self._association = association
self._recalc = recalc
self.__update()
def __update(self):
if isinstance(self._association, MarkSafeReport):
self.__update_cache(SafeUtils)
elif isinstance(self._association, MarkUnsafeReport):
self.__update_cache(UnsafeUtils)
elif isinstance(self._association, MarkUnknownReport) and self._recalc:
UnknownUtils.update_unknowns_cache([self._association.report])
def __update_cache(self, leaf_lib):
if self._recalc:
changes = leaf_lib.UpdateVerdicts({self._association.mark_id: {
self._association.report: {'kind': '=', 'verdict1': self._association.report.verdict}
}}).changes.get(self._association.mark_id, {})
leaf_lib.RecalculateTags(list(changes))
leaf_lib.update_confirmed_cache([self._association.report])
|
py | 1a3533582e8382bf55858eb58c9b285f63f8dd01 | import abc
import functools
import logging
import pkg_resources
import six
import textwrap
from lymph.exceptions import Timeout, LookupFailure
logger = logging.getLogger(__name__)
docstring_format_vars = {k: textwrap.dedent(v).strip() for k, v in six.iteritems({
'COMMON_OPTIONS': """
Common Options:
--config=<file>, -c <file> Load configuration from the given path.
--help, -h Print this help message and exit.
--logfile=<file> Redirect log output to the given file.
--loglevel=<level> Set the log level to one of DEBUG, INFO, WARNING,
ERROR. [default: WARNING]
--version Show the lymph version and exit.
--color Force colored output.
--no-color Disable colored output.
--vars=<file> Load environment variables from the given path.
""",
'INSTANCE_OPTIONS': """
Instance Options:
--isolated, -i Don't register this service.
--port=<port>, -p <port> Use this port for the RPC endpoint.
--ip=<address> Use this IP for all sockets.
--guess-external-ip, -g Guess the public facing IP of this machine and
use it instead of the provided address.
--reload Automatically stop the service when imported
python files in the current working directory
change. The process will be restarted by the
node. Do not use this in production.
""",
})}
def format_docstring(doc):
return textwrap.dedent(doc).format(**docstring_format_vars).strip()
@six.add_metaclass(abc.ABCMeta)
class Command(object):
needs_config = True
short_description = ''
def __init__(self, args, config, terminal):
self.args = args
self.config = config
self.terminal = terminal
@classmethod
def get_help(cls):
return format_docstring(cls.__doc__)
@abc.abstractmethod
def run(self):
raise NotImplementedError
_command_class_cache = None
def get_command_classes():
global _command_class_cache
if _command_class_cache is None:
_command_class_cache, entry_points = {}, {}
for entry_point in pkg_resources.iter_entry_points('lymph.cli'):
name = entry_point.name
if name in entry_points:
logger.error('ignoring duplicate command definition for %s (already installed: %s)', entry_point, entry_points[name])
continue
entry_points[name] = entry_point
cls = entry_point.load()
cls.name = name
_command_class_cache[name] = cls
return _command_class_cache
def get_command_class(name):
return get_command_classes()[name]
def handle_request_errors(func):
@functools.wraps(func)
def decorated(*args, **kwargs):
try:
func(*args, **kwargs)
except LookupFailure as e:
logger.error("The specified service name could not be found: %s: %s" % (type(e).__name__, e))
return 1
except Timeout:
logger.error("The request timed out. Either the service is not available or busy.")
return 1
return decorated
|
py | 1a35337f833e8b4ea1c1434a01904b43c59ec85d | from pymystem3 import Mystem
import re
class Preprocessor:
def __init__(self, mapping):
self.m = Mystem()
self.mapping = mapping
def _filter(self, word):
return any(letter.isalpha() for letter in word)
def process(self, text):
processed = re.sub('[!"#$%&()*+,-./:;<=>?@[\]^_`{|}~]', ' ', text) if text is not None else None
processed = re.sub('[0-9]', ' ', processed) if processed is not None else None
processed = processed.lower() if processed is not None else None
processed = self.m.lemmatize(processed) if processed is not None else ''
processed = list(filter(self._filter, processed)) if processed is not None else ''
return processed
|
py | 1a3533a122ed8825db4a37879558e890c3636d80 | # -*- coding: utf-8 -*-
from __future__ import with_statement
import warnings
from almost import Approximate
from pytest import deprecated_call, raises
from conftest import various_backends
import trueskill as t
from trueskill import (
quality, quality_1vs1, rate, rate_1vs1, Rating, setup, TrueSkill)
warnings.simplefilter('always')
inf = float('inf')
nan = float('nan')
class almost(Approximate):
def normalize(self, value):
if isinstance(value, Rating):
return self.normalize(tuple(value))
elif isinstance(value, list):
try:
if isinstance(value[0][0], Rating):
# flatten transformed ratings
return list(sum(value, ()))
except (TypeError, IndexError):
pass
return super(almost, self).normalize(value)
@classmethod
def wrap(cls, f, *args, **kwargs):
return lambda *a, **k: cls(f(*a, **k), *args, **kwargs)
_rate = almost.wrap(rate)
_rate_1vs1 = almost.wrap(rate_1vs1)
_quality = almost.wrap(quality)
_quality_1vs1 = almost.wrap(quality_1vs1)
# usage
def test_compatibility_with_another_rating_systems():
"""All rating system modules should implement ``rate_1vs1`` and
``quality_1vs1`` to provide shortcuts for 1 vs 1 simple competition games.
"""
r1, r2 = Rating(30, 3), Rating(20, 2)
assert quality_1vs1(r1, r2) == quality([(r1,), (r2,)])
rated = rate([(r1,), (r2,)])
assert rate_1vs1(r1, r2) == (rated[0][0], rated[1][0])
rated = rate([(r1,), (r2,)], [0, 0])
assert rate_1vs1(r1, r2, drawn=True) == (rated[0][0], rated[1][0])
def test_compare_ratings():
assert Rating(1, 2) == Rating(1, 2)
assert Rating(1, 2) != Rating(1, 3)
assert Rating(2, 2) > Rating(1, 2)
assert Rating(3, 2) >= Rating(1, 2)
assert Rating(0, 2) < Rating(1, 2)
assert Rating(-1, 2) <= Rating(1, 2)
def test_rating_to_number():
assert int(Rating(1, 2)) == 1
assert float(Rating(1.1, 2)) == 1.1
assert complex(Rating(1.2, 2)) == 1.2 + 0j
try:
assert long(Rating(1, 2)) == long(1)
except NameError:
# Python 3 doesn't have `long` anymore
pass
def test_unsorted_groups():
t1, t2, t3 = generate_teams([1, 1, 1])
rated = rate([t1, t2, t3], [2, 1, 0])
assert almost(rated) == \
[(18.325, 6.656), (25.000, 6.208), (31.675, 6.656)]
def test_custom_environment():
env = TrueSkill(draw_probability=.50)
t1, t2 = generate_teams([1, 1], env=env)
rated = env.rate([t1, t2])
assert almost(rated) == [(30.267, 7.077), (19.733, 7.077)]
def test_setup_global_environment():
try:
setup(draw_probability=.50)
t1, t2 = generate_teams([1, 1])
rated = rate([t1, t2])
assert almost(rated) == [(30.267, 7.077), (19.733, 7.077)]
finally:
# rollback
setup()
def test_invalid_rating_groups():
env = TrueSkill()
with raises(ValueError):
env.validate_rating_groups([])
with raises(ValueError):
env.validate_rating_groups([()])
# need multiple groups not just one
with raises(ValueError):
env.validate_rating_groups([(Rating(),)])
# empty group is not allowed
with raises(ValueError):
env.validate_rating_groups([(Rating(),), ()])
# all groups should be same structure
with raises(TypeError):
env.validate_rating_groups([(Rating(),), {0: Rating()}])
def test_deprecated_methods():
env = TrueSkill()
r1, r2, r3 = Rating(), Rating(), Rating()
deprecated_call(t.transform_ratings, [(r1,), (r2,), (r3,)])
deprecated_call(t.match_quality, [(r1,), (r2,), (r3,)])
deprecated_call(env.Rating)
deprecated_call(env.transform_ratings, [(r1,), (r2,), (r3,)])
deprecated_call(env.match_quality, [(r1,), (r2,), (r3,)])
deprecated_call(env.rate_1vs1, r1, r2)
deprecated_call(env.quality_1vs1, r1, r2)
deprecated_call(lambda: Rating().exposure)
dyn = TrueSkill(draw_probability=t.dynamic_draw_probability)
deprecated_call(dyn.rate, [(r1,), (r2,)])
def test_deprecated_individual_rating_groups():
r1, r2, r3 = Rating(50, 1), Rating(10, 5), Rating(15, 5)
with raises(TypeError):
deprecated_call(rate, [r1, r2, r3])
with raises(TypeError):
deprecated_call(quality, [r1, r2, r3])
assert t.transform_ratings([r1, r2, r3]) == rate([(r1,), (r2,), (r3,)])
assert t.match_quality([r1, r2, r3]) == quality([(r1,), (r2,), (r3,)])
deprecated_call(t.transform_ratings, [r1, r2, r3])
deprecated_call(t.match_quality, [r1, r2, r3])
def test_rating_tuples():
r1, r2, r3 = Rating(), Rating(), Rating()
rated = rate([(r1, r2), (r3,)])
assert len(rated) == 2
assert isinstance(rated[0], tuple)
assert isinstance(rated[1], tuple)
assert len(rated[0]) == 2
assert len(rated[1]) == 1
assert isinstance(rated[0][0], Rating)
def test_rating_dicts():
class Player(object):
def __init__(self, name, rating, team):
self.name = name
self.rating = rating
self.team = team
p1 = Player('Player A', Rating(), 0)
p2 = Player('Player B', Rating(), 0)
p3 = Player('Player C', Rating(), 1)
rated = rate([{p1: p1.rating, p2: p2.rating}, {p3: p3.rating}])
assert len(rated) == 2
assert isinstance(rated[0], dict)
assert isinstance(rated[1], dict)
assert len(rated[0]) == 2
assert len(rated[1]) == 1
assert p1 in rated[0]
assert p2 in rated[0]
assert p3 in rated[1]
assert p1 not in rated[1]
assert p2 not in rated[1]
assert p3 not in rated[0]
assert isinstance(rated[0][p1], Rating)
p1.rating = rated[p1.team][p1]
p2.rating = rated[p2.team][p2]
p3.rating = rated[p3.team][p3]
def test_dont_use_0_for_min_delta():
with raises(ValueError):
rate([(Rating(),), (Rating(),)], min_delta=0)
def test_list_instead_of_tuple():
r1, r2 = Rating(), Rating()
assert rate([[r1], [r2]]) == rate([(r1,), (r2,)])
assert quality([[r1], [r2]]) == quality([(r1,), (r2,)])
def test_backend():
env = TrueSkill(backend=(NotImplemented, NotImplemented, NotImplemented))
with raises(TypeError):
env.rate_1vs1(Rating(), Rating())
with raises(ValueError):
# '__not_defined__' backend is not defined
TrueSkill(backend='__not_defined__')
# algorithm
def generate_teams(sizes, env=None):
rating_cls = Rating if env is None else env.create_rating
rating_groups = []
for size in sizes:
ratings = []
for x in range(size):
ratings.append(rating_cls())
rating_groups.append(tuple(ratings))
return rating_groups
def generate_individual(size, env=None):
return generate_teams([1] * size, env=env)
@various_backends
def test_n_vs_n():
# 1 vs 1
t1, t2 = generate_teams([1, 1])
assert _quality([t1, t2]) == 0.447
assert _rate([t1, t2]) == [(29.396, 7.171), (20.604, 7.171)]
assert _rate([t1, t2], [0, 0]) == [(25.000, 6.458), (25.000, 6.458)]
# 2 vs 2
t1, t2 = generate_teams([2, 2])
assert _quality([t1, t2]) == 0.447
assert _rate([t1, t2]) == \
[(28.108, 7.774), (28.108, 7.774), (21.892, 7.774), (21.892, 7.774)]
assert _rate([t1, t2], [0, 0]) == \
[(25.000, 7.455), (25.000, 7.455), (25.000, 7.455), (25.000, 7.455)]
# 4 vs 4
t1, t2 = generate_teams([4, 4])
assert _quality([t1, t2]) == 0.447
assert _rate([t1, t2]) == \
[(27.198, 8.059), (27.198, 8.059), (27.198, 8.059), (27.198, 8.059),
(22.802, 8.059), (22.802, 8.059), (22.802, 8.059), (22.802, 8.059)]
@various_backends
def test_1_vs_n():
t1, = generate_teams([1])
# 1 vs 2
t2, = generate_teams([2])
assert _quality([t1, t2]) == 0.135
assert _rate([t1, t2]) == \
[(33.730, 7.317), (16.270, 7.317), (16.270, 7.317)]
assert _rate([t1, t2], [0, 0]) == \
[(31.660, 7.138), (18.340, 7.138), (18.340, 7.138)]
# 1 vs 3
t2, = generate_teams([3])
assert _quality([t1, t2]) == 0.012
assert _rate([t1, t2]) == \
[(36.337, 7.527), (13.663, 7.527), (13.663, 7.527), (13.663, 7.527)]
assert almost(rate([t1, t2], [0, 0]), 2) == \
[(34.990, 7.455), (15.010, 7.455), (15.010, 7.455), (15.010, 7.455)]
# 1 vs 7
t2, = generate_teams([7])
assert _quality([t1, t2]) == 0
assert _rate([t1, t2]) == \
[(40.582, 7.917), (9.418, 7.917), (9.418, 7.917), (9.418, 7.917),
(9.418, 7.917), (9.418, 7.917), (9.418, 7.917), (9.418, 7.917)]
@various_backends
def test_individual():
# 3 players
players = generate_individual(3)
assert _quality(players) == 0.200
assert _rate(players) == \
[(31.675, 6.656), (25.000, 6.208), (18.325, 6.656)]
assert _rate(players, [0] * 3) == \
[(25.000, 5.698), (25.000, 5.695), (25.000, 5.698)]
# 4 players
players = generate_individual(4)
assert _quality(players) == 0.089
assert _rate(players) == \
[(33.207, 6.348), (27.401, 5.787), (22.599, 5.787), (16.793, 6.348)]
# 5 players
players = generate_individual(5)
assert _quality(players) == 0.040
assert _rate(players) == \
[(34.363, 6.136), (29.058, 5.536), (25.000, 5.420), (20.942, 5.536),
(15.637, 6.136)]
# 8 players
players = generate_individual(8)
assert _quality(players) == 0.004
assert _rate(players, [0] * 8) == \
[(25.000, 4.592), (25.000, 4.583), (25.000, 4.576), (25.000, 4.573),
(25.000, 4.573), (25.000, 4.576), (25.000, 4.583), (25.000, 4.592)]
# 16 players
players = generate_individual(16)
assert _rate(players) == \
[(40.539, 5.276), (36.810, 4.711), (34.347, 4.524), (32.336, 4.433),
(30.550, 4.380), (28.893, 4.349), (27.310, 4.330), (25.766, 4.322),
(24.234, 4.322), (22.690, 4.330), (21.107, 4.349), (19.450, 4.380),
(17.664, 4.433), (15.653, 4.524), (13.190, 4.711), (9.461, 5.276)]
@various_backends
def test_multiple_teams():
# 2 vs 4 vs 2
t1 = (Rating(40, 4), Rating(45, 3))
t2 = (Rating(20, 7), Rating(19, 6), Rating(30, 9), Rating(10, 4))
t3 = (Rating(50, 5), Rating(30, 2))
assert _quality([t1, t2, t3]) == 0.367
assert _rate([t1, t2, t3], [0, 1, 1]) == \
[(40.877, 3.840), (45.493, 2.934), (19.609, 6.396), (18.712, 5.625),
(29.353, 7.673), (9.872, 3.891), (48.830, 4.590), (29.813, 1.976)]
# 1 vs 2 vs 1
t1 = (Rating(),)
t2 = (Rating(), Rating())
t3 = (Rating(),)
assert _quality([t1, t2, t3]) == 0.047
@various_backends
def test_upset():
# 1 vs 1
t1, t2 = (Rating(),), (Rating(50, 12.5),)
assert _quality([t1, t2]) == 0.110
assert _rate([t1, t2], [0, 0]) == [(31.662, 7.137), (35.010, 7.910)]
# 2 vs 2
t1 = (Rating(20, 8), Rating(25, 6))
t2 = (Rating(35, 7), Rating(40, 5))
assert _quality([t1, t2]) == 0.084
assert _rate([t1, t2]) == \
[(29.698, 7.008), (30.455, 5.594), (27.575, 6.346), (36.211, 4.768)]
# 3 vs 2
t1 = (Rating(28, 7), Rating(27, 6), Rating(26, 5))
t2 = (Rating(30, 4), Rating(31, 3))
assert _quality([t1, t2]) == 0.254
assert _rate([t1, t2], [0, 1]) == \
[(28.658, 6.770), (27.484, 5.856), (26.336, 4.917), (29.785, 3.958),
(30.879, 2.983)]
assert _rate([t1, t2], [1, 0]) == \
[(21.840, 6.314), (22.474, 5.575), (22.857, 4.757), (32.012, 3.877),
(32.132, 2.949)]
# 8 players
players = [(Rating(10, 8),), (Rating(15, 7),), (Rating(20, 6),),
(Rating(25, 5),), (Rating(30, 4),), (Rating(35, 3),),
(Rating(40, 2),), (Rating(45, 1),)]
assert _quality(players) == 0.000
assert _rate(players) == \
[(35.135, 4.506), (32.585, 4.037), (31.329, 3.756), (30.984, 3.453),
(31.751, 3.064), (34.051, 2.541), (38.263, 1.849), (44.118, 0.983)]
@various_backends
def test_partial_play():
t1, t2 = (Rating(),), (Rating(), Rating())
# each results from C# Skills:
assert rate([t1, t2], weights=[(1,), (1, 1)]) == rate([t1, t2])
assert _rate([t1, t2], weights=[(1,), (1, 1)]) == \
[(33.730, 7.317), (16.270, 7.317), (16.270, 7.317)]
assert _rate([t1, t2], weights=[(0.5,), (0.5, 0.5)]) == \
[(33.939, 7.312), (16.061, 7.312), (16.061, 7.312)]
assert _rate([t1, t2], weights=[(1,), (0, 1)]) == \
[(29.440, 7.166), (25.000, 8.333), (20.560, 7.166)]
assert _rate([t1, t2], weights=[(1,), (0.5, 1)]) == \
[(32.417, 7.056), (21.291, 8.033), (17.583, 7.056)]
# match quality of partial play
t1, t2, t3 = (Rating(),), (Rating(), Rating()), (Rating(),)
assert _quality([t1, t2, t3], [(1,), (0.25, 0.75), (1,)]) == 0.2
assert _quality([t1, t2, t3], [(1,), (0.8, 0.9), (1,)]) == 0.0809
@various_backends
def test_partial_play_with_weights_dict():
t1, t2 = (Rating(),), (Rating(), Rating())
assert rate([t1, t2], weights={(0, 0): 0.5, (1, 0): 0.5, (1, 1): 0.5}) == \
rate([t1, t2], weights=[[0.5], [0.5, 0.5]])
assert rate([t1, t2], weights={(1, 0): 0}) == \
rate([t1, t2], weights=[[1], [0, 1]])
assert rate([t1, t2], weights={(1, 0): 0.5}) == \
rate([t1, t2], weights=[[1], [0.5, 1]])
@various_backends
def test_microsoft_research_example():
# http://research.microsoft.com/en-us/projects/trueskill/details.aspx
alice, bob, chris, darren, eve, fabien, george, hillary = \
Rating(), Rating(), Rating(), Rating(), \
Rating(), Rating(), Rating(), Rating()
_rated = rate([{'alice': alice}, {'bob': bob}, {'chris': chris},
{'darren': darren}, {'eve': eve}, {'fabien': fabien},
{'george': george}, {'hillary': hillary}])
rated = {}
list(map(rated.update, _rated))
assert almost(rated['alice']) == (36.771, 5.749)
assert almost(rated['bob']) == (32.242, 5.133)
assert almost(rated['chris']) == (29.074, 4.943)
assert almost(rated['darren']) == (26.322, 4.874)
assert almost(rated['eve']) == (23.678, 4.874)
assert almost(rated['fabien']) == (20.926, 4.943)
assert almost(rated['george']) == (17.758, 5.133)
assert almost(rated['hillary']) == (13.229, 5.749)
@various_backends
def test_dynamic_draw_probability():
from trueskillhelpers import calc_dynamic_draw_probability as calc
def assert_predictable_draw_probability(r1, r2, drawn=False):
dyn = TrueSkill(draw_probability=t.dynamic_draw_probability)
sta = TrueSkill(draw_probability=calc((r1,), (r2,), dyn))
assert dyn.rate_1vs1(r1, r2, drawn) == sta.rate_1vs1(r1, r2, drawn)
assert_predictable_draw_probability(Rating(100), Rating(10))
assert_predictable_draw_probability(Rating(10), Rating(100))
assert_predictable_draw_probability(Rating(10), Rating(100), drawn=True)
assert_predictable_draw_probability(Rating(25), Rating(25))
assert_predictable_draw_probability(Rating(25), Rating(25), drawn=True)
assert_predictable_draw_probability(Rating(-25), Rating(125))
assert_predictable_draw_probability(Rating(125), Rating(-25))
assert_predictable_draw_probability(Rating(-25), Rating(125), drawn=True)
assert_predictable_draw_probability(Rating(25, 10), Rating(25, 0.1))
# functions
@various_backends
def test_exposure():
env = TrueSkill()
assert env.expose(env.create_rating()) == 0
env = TrueSkill(1000, 200)
assert env.expose(env.create_rating()) == 0
# mathematics
def test_valid_gaussian():
from trueskill.mathematics import Gaussian
with raises(TypeError): # sigma argument is needed
Gaussian(0)
with raises(ValueError): # sigma**2 should be greater than 0
Gaussian(0, 0)
def test_valid_matrix():
from trueskill.mathematics import Matrix
with raises(TypeError): # src must be a list or dict or callable
Matrix(None)
with raises(ValueError): # src must be a rectangular array of numbers
Matrix([])
with raises(ValueError): # src must be a rectangular array of numbers
Matrix([[1, 2, 3], [4, 5]])
with raises(TypeError):
# A callable src must return an interable which generates a tuple
# containing coordinate and value
Matrix(lambda: None)
def test_matrix_from_dict():
from trueskill.mathematics import Matrix
mat = Matrix({(0, 0): 1, (4, 9): 1})
assert mat.height == 5
assert mat.width == 10
assert mat[0][0] == 1
assert mat[0][1] == 0
assert mat[4][9] == 1
assert mat[4][8] == 0
def test_matrix_from_item_generator():
from trueskill.mathematics import Matrix
def gen_matrix(height, width):
yield (0, 0), 1
yield (height - 1, width - 1), 1
mat = Matrix(gen_matrix, 5, 10)
assert mat.height == 5
assert mat.width == 10
assert mat[0][0] == 1
assert mat[0][1] == 0
assert mat[4][9] == 1
assert mat[4][8] == 0
with raises(TypeError):
# A callable src must call set_height and set_width if the size is
# non-deterministic
Matrix(gen_matrix)
def gen_and_set_size_matrix(set_height, set_width):
set_height(5)
set_width(10)
return [((0, 0), 1), ((4, 9), 1)]
mat = Matrix(gen_and_set_size_matrix)
assert mat.height == 5
assert mat.width == 10
assert mat[0][0] == 1
assert mat[0][1] == 0
assert mat[4][9] == 1
assert mat[4][8] == 0
def test_matrix_operations():
from trueskill.mathematics import Matrix
assert Matrix([[1, 2], [3, 4]]).inverse() == \
Matrix([[-2.0, 1.0], [1.5, -0.5]])
assert Matrix([[1, 2], [3, 4]]).determinant() == -2
assert Matrix([[1, 2], [3, 4]]).adjugate() == Matrix([[4, -2], [-3, 1]])
with raises(ValueError): # Bad size
assert Matrix([[1, 2], [3, 4]]) * Matrix([[5, 6]])
assert Matrix([[1, 2], [3, 4]]) * Matrix([[5, 6, 7], [8, 9, 10]]) == \
Matrix([[21, 24, 27], [47, 54, 61]])
with raises(ValueError): # Must be same size
Matrix([[1, 2], [3, 4]]) + Matrix([[5, 6, 7], [8, 9, 10]])
assert Matrix([[1, 2], [3, 4]]) + Matrix([[5, 6], [7, 8]]) == \
Matrix([[6, 8], [10, 12]])
# reported bugs
@various_backends
def test_issue3():
"""The `issue #3`_, opened by @youknowone.
These inputs led to ZeroDivisionError before 0.1.4. Also another TrueSkill
implementations cannot calculate this case.
.. _issue #3: https://github.com/sublee/trueskill/issues/3
"""
# @konikos's case 1
t1 = (Rating(42.234, 3.728), Rating(43.290, 3.842))
t2 = (Rating(16.667, 0.500), Rating(16.667, 0.500), Rating(16.667, 0.500),
Rating(16.667, 0.500), Rating(16.667, 0.500), Rating(16.667, 0.500),
Rating(16.667, 0.500), Rating(16.667, 0.500), Rating(16.667, 0.500),
Rating(16.667, 0.500), Rating(16.667, 0.500), Rating(16.667, 0.500),
Rating(16.667, 0.500), Rating(16.667, 0.500), Rating(16.667, 0.500))
rate([t1, t2], [6, 5])
# @konikos's case 2
t1 = (Rating(25.000, 0.500), Rating(25.000, 0.500), Rating(25.000, 0.500),
Rating(25.000, 0.500), Rating(33.333, 0.500), Rating(33.333, 0.500),
Rating(33.333, 0.500), Rating(33.333, 0.500), Rating(41.667, 0.500),
Rating(41.667, 0.500), Rating(41.667, 0.500), Rating(41.667, 0.500))
t2 = (Rating(42.234, 3.728), Rating(43.291, 3.842))
rate([t1, t2], [0, 28])
@various_backends(['scipy'])
def test_issue4():
"""The `issue #4`_, opened by @sublee.
numpy.float64 handles floating-point error by different way. For example,
it can just warn RuntimeWarning on n/0 problem instead of throwing
ZeroDivisionError.
.. _issue #4: https://github.com/sublee/trueskill/issues/4
"""
import numpy
r1, r2 = Rating(105.247, 0.439), Rating(27.030, 0.901)
# make numpy to raise FloatingPointError instead of warning
# RuntimeWarning
old_settings = numpy.seterr(divide='raise')
try:
rate([(r1,), (r2,)])
finally:
numpy.seterr(**old_settings)
@various_backends([None, 'scipy'])
def test_issue5(backend):
"""The `issue #5`_, opened by @warner121.
This error occurs when a winner has too low rating than a loser. Basically
Python cannot calculate correct result but mpmath_ can. I added ``backend``
option to :class:`TrueSkill` class. If it is set to 'mpmath' then the
problem will have gone.
The result of TrueSkill calculator by Microsoft is N(-273.092, 2.683) and
N(-75.830, 2.080), of C# Skills by Moserware is N(NaN, 2.6826) and
N(NaN, 2.0798). I choose Microsoft's result as an expectation for the test
suite.
.. _issue #5: https://github.com/sublee/trueskill/issues/5
.. _mpmath: http://mpmath.googlecode.com/
"""
assert _quality_1vs1(Rating(-323.263, 2.965), Rating(-48.441, 2.190)) == 0
with raises(FloatingPointError):
rate_1vs1(Rating(-323.263, 2.965), Rating(-48.441, 2.190))
assert _quality_1vs1(Rating(), Rating(1000)) == 0
with raises(FloatingPointError):
rate_1vs1(Rating(), Rating(1000))
@various_backends(['mpmath'])
def test_issue5_with_mpmath():
_rate_1vs1 = almost.wrap(rate_1vs1, 0)
assert _quality_1vs1(Rating(-323.263, 2.965), Rating(-48.441, 2.190)) == 0
assert _rate_1vs1(Rating(-323.263, 2.965), Rating(-48.441, 2.190)) == \
[(-273.361, 2.683), (-75.683, 2.080)]
assert _quality_1vs1(Rating(), Rating(1000)) == 0
assert _rate_1vs1(Rating(), Rating(1000)) == \
[(415.298, 6.455), (609.702, 6.455)]
@various_backends(['mpmath'])
def test_issue5_with_more_extreme():
"""If the input is more extreme, 'mpmath' backend also made an exception.
But we can avoid the problem with higher precision.
"""
import mpmath
try:
dps = mpmath.mp.dps
with raises(FloatingPointError):
rate_1vs1(Rating(), Rating(1000000))
mpmath.mp.dps = 50
assert almost(rate_1vs1(Rating(), Rating(1000000)), prec=-1) == \
[(400016.896, 6.455), (600008.104, 6.455)]
with raises(FloatingPointError):
rate_1vs1(Rating(), Rating(1000000000000))
mpmath.mp.dps = 100
assert almost(rate_1vs1(Rating(), Rating(1000000000000)), prec=-7) == \
[(400001600117.693, 6.455), (599998399907.307, 6.455)]
finally:
mpmath.mp.dps = dps
def test_issue9_weights_dict_with_object_keys():
"""The `issue #9`_, opened by @.
.. _issue #9: https://github.com/sublee/trueskill/issues/9
"""
class Player(object):
def __init__(self, rating, team):
self.rating = rating
self.team = team
p1 = Player(Rating(), 0)
p2 = Player(Rating(), 0)
p3 = Player(Rating(), 1)
teams = [{p1: p1.rating, p2: p2.rating}, {p3: p3.rating}]
rated = rate(teams, weights={(0, p1): 1, (0, p2): 0.5, (1, p3): 1})
assert rated[0][p1].mu > rated[0][p2].mu
assert rated[0][p1].sigma < rated[0][p2].sigma
assert rated[0][p1].sigma == rated[1][p3].sigma
|
py | 1a3533a66c166e27e70ca40196a0d8bb32eebbd4 | import itertools
import json
import os
import random
import numpy as np
from gym import spaces
from jsonmerge import Merger
from utils.constants import *
class PommermanJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
elif isinstance(obj, Item):
return obj.value
elif isinstance(obj, Action):
return obj.value
elif isinstance(obj, np.int64):
return int(obj)
elif hasattr(obj, 'to_json'):
return obj.to_json()
elif isinstance(obj, spaces.Discrete):
return obj.n
elif isinstance(obj, spaces.Tuple):
return [space.n for space in obj.spaces]
return json.JSONEncoder.default(self, obj)
def make_board(size, num_rigid=0, num_wood=0):
"""Make the random but symmetric board.
The numbers refer to the Item enum in constants. This is:
0 - passage
1 - rigid wall
2 - wood wall
3 - bomb
4 - flames
5 - fog
6 - extra bomb item
7 - extra firepower item
8 - kick
9 - skull
10 - 13: agents
Args:
size: The dimension of the board, i.e. it's sizeXsize.
num_rigid: The number of rigid walls on the board. This should be even.
num_wood: Similar to above but for wood walls.
Returns:
board: The resulting random board.
"""
def lay_wall(value, num_left, coordinates, board):
x, y = random.sample(coordinates, 1)[0]
coordinates.remove((x, y))
coordinates.remove((y, x))
board[x, y] = value
board[y, x] = value
num_left -= 2
return num_left
def make(size, num_rigid, num_wood):
# Initialize everything as a passage.
board = np.ones(
(size, size)).astype(np.uint8) * constants.Item.Passage.value
# Gather all the possible coordinates to use for walls.
coordinates = set([
(x, y) for x, y in \
itertools.product(range(size), range(size)) \
if x != y])
# Set the players down. Exclude them from coordinates.
# Agent0 is in top left. Agent1 is in bottom left.
# Agent2 is in bottom right. Agent 3 is in top right.
board[1, 1] = constants.Item.Agent0.value
board[size - 2, 1] = constants.Item.Agent1.value
board[size - 2, size - 2] = constants.Item.Agent2.value
board[1, size - 2] = constants.Item.Agent3.value
agents = [(1, 1), (size - 2, 1), (1, size - 2), (size - 2, size - 2)]
for position in agents:
if position in coordinates:
coordinates.remove(position)
# Exclude breathing room on either side of the agents.
for i in range(2, 4):
coordinates.remove((1, i))
coordinates.remove((i, 1))
coordinates.remove((1, size - i - 1))
coordinates.remove((size - i - 1, 1))
coordinates.remove((size - 2, size - i - 1))
coordinates.remove((size - i - 1, size - 2))
coordinates.remove((i, size - 2))
coordinates.remove((size - 2, i))
# Lay down wooden walls providing guaranteed passage to other agents.
wood = constants.Item.Wood.value
for i in range(4, size - 4):
board[1, i] = wood
board[size - i - 1, 1] = wood
board[size - 2, size - i - 1] = wood
board[size - i - 1, size - 2] = wood
coordinates.remove((1, i))
coordinates.remove((size - i - 1, 1))
coordinates.remove((size - 2, size - i - 1))
coordinates.remove((size - i - 1, size - 2))
num_wood -= 4
# Lay down the rigid walls.
while num_rigid > 0:
num_rigid = lay_wall(constants.Item.Rigid.value, num_rigid,
coordinates, board)
# Lay down the wooden walls.
while num_wood > 0:
num_wood = lay_wall(constants.Item.Wood.value, num_wood,
coordinates, board)
return board, agents
assert (num_rigid % 2 == 0)
assert (num_wood % 2 == 0)
board, agents = make(size, num_rigid, num_wood)
# Make sure it's possible to reach most of the passages.
while len(inaccessible_passages(board, agents)) > 4:
board, agents = make(size, num_rigid, num_wood)
return board
def make_items(board, num_items):
item_positions = {}
while num_items > 0:
row = random.randint(0, len(board) - 1)
col = random.randint(0, len(board[0]) - 1)
if board[row, col] != constants.Item.Wood.value:
continue
if (row, col) in item_positions:
continue
item_positions[(row, col)] = random.choice([
constants.Item.ExtraBomb, constants.Item.IncrRange,
constants.Item.Kick
]).value
num_items -= 1
return item_positions
def inaccessible_passages(board, agent_positions):
"""Return inaccessible passages on this board."""
seen = set()
agent_position = agent_positions.pop()
passage_positions = np.where(board == constants.Item.Passage.value)
positions = list(zip(passage_positions[0], passage_positions[1]))
Q = [agent_position]
while Q:
row, col = Q.pop()
for (i, j) in [(1, 0), (-1, 0), (0, 1), (0, -1)]:
next_position = (row + i, col + j)
if next_position in seen:
continue
if not position_on_board(board, next_position):
continue
if position_is_rigid(board, next_position):
continue
if next_position in positions:
positions.pop(positions.index(next_position))
if not len(positions):
return []
seen.add(next_position)
Q.append(next_position)
return positions
def is_valid_direction(board, position, direction, invalid_values=None):
row, col = position
if invalid_values is None:
invalid_values = [item.value for item in \
[constants.Item.Rigid, constants.Item.Wood]]
if constants.Action(direction) == constants.Action.Stop:
return True
if constants.Action(direction) == constants.Action.Up:
return row - 1 >= 0 and board[row - 1][col] not in invalid_values
if constants.Action(direction) == constants.Action.Down:
return row + 1 < len(board) and board[row +
1][col] not in invalid_values
if constants.Action(direction) == constants.Action.Left:
return col - 1 >= 0 and board[row][col - 1] not in invalid_values
if constants.Action(direction) == constants.Action.Right:
return col + 1 < len(board[0]) and \
board[row][col + 1] not in invalid_values
raise constants.InvalidAction("We did not receive a valid direction: ",
direction)
def _position_is_item(board, position, item):
return board[position] == item.value
def position_is_flames(board, position):
return _position_is_item(board, position, constants.Item.Flames)
def position_is_bomb(bombs, position):
"""Check if a given position is a bomb.
We don't check the board because that is an unreliable source. An agent
may be obscuring the bomb on the board.
"""
for bomb in bombs:
if position == bomb.position:
return True
return False
def position_is_powerup(board, position):
powerups = [
constants.Item.ExtraBomb, constants.Item.IncrRange, constants.Item.Kick
]
item_values = [item.value for item in powerups]
return board[position] in item_values
def position_is_wall(board, position):
return position_is_rigid(board, position) or \
position_is_wood(board, position)
def position_is_passage(board, position):
return _position_is_item(board, position, constants.Item.Passage)
def position_is_rigid(board, position):
return _position_is_item(board, position, constants.Item.Rigid)
def position_is_wood(board, position):
return _position_is_item(board, position, constants.Item.Wood)
def position_is_agent(board, position):
return board[position] in [
constants.Item.Agent0.value, constants.Item.Agent1.value,
constants.Item.Agent2.value, constants.Item.Agent3.value
]
def position_is_enemy(board, position, enemies):
return constants.Item(board[position]) in enemies
# TODO: Fix this so that it includes the teammate.
def position_is_passable(board, position, enemies):
return all([
any([
position_is_agent(board, position),
position_is_powerup(board, position),
position_is_passage(board, position)
]), not position_is_enemy(board, position, enemies)
])
def position_is_fog(board, position):
return _position_is_item(board, position, constants.Item.Fog)
def agent_value(id_):
return getattr(constants.Item, 'Agent%d' % id_).value
def position_in_items(board, position, items):
return any([_position_is_item(board, position, item) for item in items])
def position_on_board(board, position):
x, y = position
return all([len(board) > x, len(board[0]) > y, x >= 0, y >= 0])
def get_direction(position, next_position):
"""Get the direction such that position --> next_position.
We assume that they are adjacent.
"""
x, y = position
nx, ny = next_position
if x == nx:
if y < ny:
return constants.Action.Right
else:
return constants.Action.Left
elif y == ny:
if x < nx:
return constants.Action.Down
else:
return constants.Action.Up
raise constants.InvalidAction(
"We did not receive a valid position transition.")
def get_next_position(position, direction):
x, y = position
if direction == constants.Action.Right:
return (x, y + 1)
elif direction == constants.Action.Left:
return (x, y - 1)
elif direction == constants.Action.Down:
return (x + 1, y)
elif direction == constants.Action.Up:
return (x - 1, y)
elif direction == constants.Action.Stop:
return (x, y)
raise constants.InvalidAction("We did not receive a valid direction.")
def make_np_float(feature):
return np.array(feature).astype(np.float32)
def join_json_state(record_json_dir, agents, finished_at, config):
jsonSchema = {
"properties": {
"state": {
"mergeStrategy": "append"
}
}
}
jsonTemplate = {
"agents": agents,
"finished_at": finished_at,
"config": config,
"state": []
}
merger = Merger(jsonSchema)
base = merger.merge({}, jsonTemplate)
for root, dirs, files in os.walk(record_json_dir):
for name in files:
path = os.path.join(record_json_dir, name)
if name.endswith('.json') and "game_state" not in name:
with open(path) as data_file:
data = json.load(data_file)
head = {"state": [data]}
base = merger.merge(base, head)
with open(os.path.join(record_json_dir, 'game_state.json'), 'w') as f:
f.write(json.dumps(base, sort_keys=True, indent=4))
for root, dirs, files in os.walk(record_json_dir):
for name in files:
if "game_state" not in name:
os.remove(os.path.join(record_json_dir, name))
|
py | 1a35366840dff487fdc870424339669db2e7ea7e | coordinates_0009FF = ((109, 96),
(109, 97), (109, 98), (109, 99), (109, 100), (109, 101), (109, 102), (109, 103), (109, 107), (109, 109), (109, 110), (110, 91), (110, 92), (110, 93), (110, 94), (110, 95), (110, 104), (110, 105), (110, 106), (110, 111), (110, 112), (111, 88), (111, 90), (111, 96), (111, 97), (111, 98), (111, 99), (111, 100), (111, 101), (111, 102), (111, 103), (111, 107), (111, 108), (111, 109), (111, 110), (111, 114), (111, 115), (112, 85), (112, 86), (112, 87), (112, 91), (112, 92), (112, 93), (112, 94), (112, 95), (112, 96), (112, 97), (112, 98), (112, 99), (112, 100), (112, 101), (112, 102), (112, 103), (112, 104), (112, 105), (112, 106), (112, 107), (112, 108), (112, 109), (112, 110), (112, 111), (112, 112), (112, 113), (112, 117), (112, 118), (113, 82), (113, 84), (113, 88), (113, 89), (113, 90), (113, 91), (113, 92), (113, 93),
(113, 94), (113, 95), (113, 96), (113, 97), (113, 98), (113, 99), (113, 100), (113, 101), (113, 102), (113, 103), (113, 104), (113, 105), (113, 106), (113, 107), (113, 108), (113, 109), (113, 110), (113, 111), (113, 112), (113, 113), (113, 114), (113, 120), (114, 80), (114, 81), (114, 85), (114, 86), (114, 87), (114, 88), (114, 89), (114, 90), (114, 91), (114, 92), (114, 93), (114, 94), (114, 95), (114, 96), (114, 97), (114, 98), (114, 99), (114, 100), (114, 101), (114, 102), (114, 103), (114, 104), (114, 105), (114, 110), (114, 111), (114, 115), (114, 116), (114, 117), (114, 122), (115, 77), (115, 82), (115, 83), (115, 84), (115, 85), (115, 86), (115, 87), (115, 88), (115, 89), (115, 90), (115, 91), (115, 92), (115, 93), (115, 94), (115, 95), (115, 96), (115, 97), (115, 98), (115, 99), (115, 100), (115, 101),
(115, 102), (115, 106), (115, 107), (115, 108), (115, 109), (115, 112), (115, 113), (115, 114), (115, 118), (115, 121), (115, 123), (116, 75), (116, 79), (116, 80), (116, 81), (116, 82), (116, 83), (116, 84), (116, 85), (116, 86), (116, 87), (116, 88), (116, 89), (116, 90), (116, 91), (116, 92), (116, 93), (116, 94), (116, 95), (116, 96), (116, 97), (116, 98), (116, 99), (116, 100), (116, 104), (116, 105), (116, 111), (116, 122), (116, 124), (117, 73), (117, 77), (117, 78), (117, 79), (117, 80), (117, 81), (117, 82), (117, 83), (117, 84), (117, 85), (117, 86), (117, 87), (117, 88), (117, 89), (117, 90), (117, 91), (117, 92), (117, 93), (117, 94), (117, 95), (117, 96), (117, 97), (117, 98), (117, 102), (117, 121), (117, 123), (117, 125), (118, 71), (118, 75), (118, 76), (118, 77), (118, 78), (118, 79),
(118, 80), (118, 81), (118, 82), (118, 83), (118, 84), (118, 85), (118, 86), (118, 87), (118, 88), (118, 89), (118, 90), (118, 91), (118, 92), (118, 93), (118, 94), (118, 95), (118, 96), (118, 100), (118, 125), (119, 69), (119, 73), (119, 74), (119, 75), (119, 76), (119, 77), (119, 78), (119, 79), (119, 80), (119, 81), (119, 82), (119, 83), (119, 84), (119, 85), (119, 86), (119, 87), (119, 88), (119, 98), (119, 122), (119, 124), (119, 126), (120, 68), (120, 70), (120, 71), (120, 72), (120, 73), (120, 74), (120, 75), (120, 76), (120, 77), (120, 78), (120, 79), (120, 80), (120, 81), (120, 82), (120, 83), (120, 84), (120, 89), (120, 90), (120, 91), (120, 92), (120, 93), (120, 94), (120, 96), (120, 123), (120, 125), (120, 126), (121, 67), (121, 69), (121, 70), (121, 71), (121, 72), (121, 73),
(121, 74), (121, 75), (121, 76), (121, 77), (121, 78), (121, 79), (121, 80), (121, 81), (121, 85), (121, 86), (121, 87), (121, 88), (121, 124), (121, 127), (122, 66), (122, 68), (122, 69), (122, 70), (122, 71), (122, 72), (122, 73), (122, 74), (122, 75), (122, 76), (122, 77), (122, 78), (122, 82), (122, 84), (122, 125), (122, 128), (123, 66), (123, 68), (123, 69), (123, 70), (123, 71), (123, 72), (123, 73), (123, 74), (123, 79), (123, 81), (123, 125), (123, 128), (124, 65), (124, 67), (124, 68), (124, 69), (124, 70), (124, 71), (124, 75), (124, 76), (124, 77), (124, 78), (124, 126), (124, 129), (125, 64), (125, 66), (125, 67), (125, 68), (125, 72), (125, 73), (125, 127), (125, 130), (126, 64), (126, 66), (126, 69), (126, 70), (126, 127), (126, 130), (127, 63), (127, 65), (127, 66), (127, 68),
(127, 128), (127, 131), (128, 63), (128, 65), (128, 66), (128, 128), (128, 131), (129, 62), (129, 64), (129, 66), (129, 129), (129, 132), (130, 61), (130, 63), (130, 65), (130, 129), (130, 132), (131, 61), (131, 63), (131, 65), (131, 130), (131, 133), (132, 60), (132, 62), (132, 63), (132, 65), (132, 130), (132, 133), (133, 60), (133, 62), (133, 64), (133, 131), (133, 134), (134, 60), (134, 62), (134, 64), (134, 131), (134, 134), (135, 59), (135, 61), (135, 62), (135, 64), (135, 131), (135, 134), (136, 59), (136, 61), (136, 62), (136, 63), (136, 64), (136, 131), (136, 133), (136, 135), (137, 59), (137, 61), (137, 63), (137, 132), (137, 135), (138, 59), (138, 61), (138, 63), (138, 132), (138, 135), (139, 59), (139, 62), (139, 132), (139, 135), (140, 59), (140, 62), (140, 131), (140, 133), (140, 135), (141, 59),
(141, 62), (141, 131), (141, 133), (141, 135), (142, 59), (142, 61), (142, 62), (142, 63), (142, 130), (142, 132), (142, 133), (142, 135), (143, 59), (143, 61), (143, 63), (143, 129), (143, 131), (143, 132), (143, 134), (144, 59), (144, 61), (144, 63), (144, 129), (144, 131), (144, 132), (144, 134), (145, 59), (145, 61), (145, 63), (145, 128), (145, 130), (145, 133), (146, 59), (146, 61), (146, 63), (146, 127), (146, 129), (146, 132), (147, 59), (147, 61), (147, 63), (147, 124), (147, 125), (147, 130), (148, 59), (148, 61), (148, 62), (148, 63), (148, 64), (148, 120), (148, 121), (148, 122), (148, 123), (148, 127), (148, 129), (149, 60), (149, 62), (149, 64), (149, 116), (149, 117), (149, 118), (149, 119), (149, 125), (149, 126), (150, 60), (150, 62), (150, 64), (150, 112), (150, 113), (150, 114), (150, 115), (150, 122),
(150, 123), (150, 124), (151, 60), (151, 62), (151, 64), (151, 109), (151, 111), (151, 116), (151, 120), (151, 121), (152, 60), (152, 62), (152, 63), (152, 64), (152, 65), (152, 107), (152, 112), (152, 117), (152, 118), (152, 119), (153, 61), (153, 63), (153, 65), (153, 105), (153, 113), (153, 114), (153, 116), (154, 61), (154, 63), (154, 65), (154, 103), (154, 107), (154, 108), (154, 109), (154, 110), (154, 111), (154, 112), (155, 62), (155, 64), (155, 65), (155, 101), (155, 103), (155, 104), (155, 105), (156, 63), (156, 66), (156, 98), (156, 99), (156, 102), (157, 64), (157, 66), (157, 95), (157, 96), (157, 97), (158, 65), (158, 67), (158, 92), (158, 94), (158, 99), (159, 67), (159, 88), (159, 89), (159, 90), (159, 91), (159, 95), (159, 96), (160, 68), (160, 84), (160, 85), (160, 86), (160, 87), (160, 92),
(160, 93), (160, 95), (161, 69), (161, 74), (161, 75), (161, 76), (161, 77), (161, 78), (161, 79), (161, 80), (161, 81), (161, 82), (161, 83), (161, 88), (161, 89), (161, 90), (161, 91), (161, 92), (161, 93), (161, 94), (161, 96), (162, 70), (162, 72), (162, 73), (162, 74), (162, 89), (162, 90), (162, 91), (162, 92), (162, 93), (162, 94), (162, 96), (163, 70), (163, 74), (163, 75), (163, 76), (163, 77), (163, 78), (163, 82), (163, 83), (163, 84), (163, 85), (163, 86), (163, 87), (163, 88), (163, 89), (163, 93), (163, 95), (164, 70), (164, 79), (164, 81), (164, 90), (164, 91), (164, 92), (164, 96), (165, 69), (165, 72), (165, 73), (165, 74), (165, 75), (165, 76), (165, 77), (165, 78), (165, 94), (165, 95), (165, 98), (165, 99), (166, 68), (166, 70), (166, 71), (166, 97), (166, 98),
(166, 101), (166, 102), (166, 103), (167, 67), (167, 69), (167, 99), (167, 100), (167, 104), (167, 105), (167, 106), (167, 107), (168, 66), (168, 102), (168, 103), (168, 108), (168, 109), (168, 110), (168, 111), (168, 112), (169, 65), (169, 104), (169, 105), (169, 113), (169, 114), (169, 115), (169, 116), (170, 64), (170, 107), (170, 108), (170, 109), (170, 110), (170, 111), (170, 112), (170, 113), (170, 114), (170, 115), (170, 116), (170, 118), (170, 119), (170, 120), (170, 121), (171, 65), (171, 118), (171, 122), (171, 123), (171, 125), (172, 63), (172, 64), (172, 119), (172, 120), (172, 126), (173, 63), (173, 64), (173, 121), (173, 122), (173, 125), (173, 127), (174, 63), (174, 64), (174, 123), (174, 124), (174, 129), (175, 63), (175, 64), (175, 125), (175, 126), (175, 130), (176, 63), (176, 64), (176, 127), (176, 128), (176, 131),
(177, 62), (177, 64), (177, 129), (177, 132), (178, 61), (178, 64), (178, 131), (178, 133), (179, 60), (179, 62), (179, 64), (179, 131), (179, 133), (180, 60), (180, 62), (180, 64), (180, 131), (180, 132), (180, 134), (181, 60), (181, 62), (181, 64), (181, 132), (181, 134), (182, 60), (182, 62), (182, 64), (182, 132), (182, 135), (183, 60), (183, 62), (183, 64), (183, 132), (183, 135), (184, 60), (184, 63), (184, 132), (184, 134), (184, 136), (185, 60), (185, 63), (185, 132), (185, 134), (185, 136), (186, 60), (186, 63), (186, 131), (186, 133), (186, 135), (187, 61), (187, 64), (187, 131), (187, 133), (187, 135), (188, 61), (188, 64), (188, 131), (188, 133), (188, 135), (189, 61), (189, 64), (189, 131), (189, 134), (190, 62), (190, 64), (190, 130), (190, 132), (190, 134), (191, 62), (191, 65), (191, 130), (191, 133),
(192, 62), (192, 65), (192, 130), (192, 133), (193, 63), (193, 66), (193, 129), (193, 131), (193, 133), (194, 63), (194, 65), (194, 67), (194, 129), (194, 131), (194, 132), (194, 134), (195, 64), (195, 67), (195, 128), (195, 130), (195, 131), (195, 132), (195, 134), (196, 64), (196, 66), (196, 68), (196, 128), (196, 130), (196, 131), (196, 132), (196, 134), (197, 65), (197, 67), (197, 70), (197, 127), (197, 129), (197, 130), (197, 131), (197, 133), (198, 65), (198, 67), (198, 68), (198, 71), (198, 127), (198, 129), (198, 130), (198, 132), (199, 66), (199, 68), (199, 69), (199, 70), (199, 73), (199, 126), (199, 128), (199, 129), (199, 131), (200, 66), (200, 68), (200, 69), (200, 70), (200, 71), (200, 74), (200, 75), (200, 125), (200, 127), (200, 128), (201, 67), (201, 69), (201, 70), (201, 71), (201, 72), (201, 73),
(201, 76), (201, 77), (201, 78), (201, 124), (201, 126), (201, 127), (201, 128), (201, 130), (202, 70), (202, 71), (202, 72), (202, 73), (202, 74), (202, 75), (202, 80), (202, 81), (202, 82), (202, 83), (202, 84), (202, 126), (202, 127), (202, 129), (203, 68), (203, 71), (203, 72), (203, 73), (203, 74), (203, 75), (203, 76), (203, 77), (203, 78), (203, 79), (203, 85), (203, 86), (203, 87), (203, 88), (203, 89), (203, 90), (203, 91), (203, 92), (203, 93), (203, 94), (203, 95), (203, 96), (203, 97), (203, 98), (203, 99), (203, 100), (203, 101), (203, 102), (203, 103), (203, 104), (203, 105), (203, 106), (203, 107), (203, 122), (203, 125), (203, 126), (203, 128), (204, 69), (204, 72), (204, 73), (204, 74), (204, 75), (204, 76), (204, 77), (204, 78), (204, 79), (204, 80), (204, 81), (204, 82), (204, 83),
(204, 84), (204, 108), (204, 109), (204, 110), (204, 111), (204, 121), (204, 124), (204, 125), (204, 127), (205, 71), (205, 74), (205, 75), (205, 76), (205, 77), (205, 78), (205, 79), (205, 80), (205, 81), (205, 82), (205, 83), (205, 84), (205, 85), (205, 86), (205, 87), (205, 88), (205, 89), (205, 90), (205, 91), (205, 92), (205, 93), (205, 94), (205, 95), (205, 96), (205, 97), (205, 98), (205, 99), (205, 100), (205, 101), (205, 102), (205, 103), (205, 104), (205, 105), (205, 106), (205, 107), (205, 112), (205, 113), (205, 114), (205, 115), (205, 116), (205, 117), (205, 118), (205, 119), (205, 122), (205, 123), (205, 124), (205, 126), (206, 73), (206, 77), (206, 78), (206, 79), (206, 80), (206, 81), (206, 82), (206, 83), (206, 84), (206, 85), (206, 86), (206, 87), (206, 88), (206, 89), (206, 90), (206, 91),
(206, 92), (206, 93), (206, 94), (206, 95), (206, 96), (206, 97), (206, 98), (206, 99), (206, 100), (206, 101), (206, 102), (206, 103), (206, 104), (206, 105), (206, 106), (206, 107), (206, 108), (206, 109), (206, 110), (206, 111), (206, 120), (206, 121), (206, 122), (206, 123), (206, 125), (207, 75), (207, 79), (207, 80), (207, 81), (207, 82), (207, 83), (207, 84), (207, 85), (207, 86), (207, 87), (207, 88), (207, 89), (207, 90), (207, 91), (207, 92), (207, 93), (207, 94), (207, 95), (207, 96), (207, 97), (207, 98), (207, 99), (207, 100), (207, 101), (207, 102), (207, 103), (207, 104), (207, 105), (207, 106), (207, 107), (207, 108), (207, 109), (207, 110), (207, 111), (207, 112), (207, 113), (207, 114), (207, 115), (207, 116), (207, 117), (207, 118), (207, 119), (207, 120), (207, 121), (207, 122), (207, 124), (208, 77),
(208, 78), (208, 81), (208, 82), (208, 83), (208, 84), (208, 85), (208, 86), (208, 87), (208, 88), (208, 89), (208, 90), (208, 91), (208, 92), (208, 93), (208, 94), (208, 95), (208, 96), (208, 97), (208, 98), (208, 99), (208, 100), (208, 101), (208, 102), (208, 103), (208, 104), (208, 105), (208, 106), (208, 107), (208, 108), (208, 109), (208, 110), (208, 111), (208, 112), (208, 113), (208, 114), (208, 115), (208, 116), (208, 117), (208, 118), (208, 119), (208, 120), (208, 121), (208, 123), (209, 79), (209, 80), (209, 83), (209, 84), (209, 85), (209, 86), (209, 87), (209, 88), (209, 89), (209, 90), (209, 91), (209, 92), (209, 93), (209, 94), (209, 95), (209, 96), (209, 97), (209, 98), (209, 99), (209, 100), (209, 101), (209, 102), (209, 103), (209, 104), (209, 105), (209, 106), (209, 107), (209, 108), (209, 109),
(209, 110), (209, 111), (209, 112), (209, 113), (209, 114), (209, 115), (209, 116), (209, 117), (209, 118), (209, 119), (209, 120), (209, 122), (210, 82), (210, 85), (210, 86), (210, 87), (210, 88), (210, 89), (210, 90), (210, 91), (210, 92), (210, 93), (210, 94), (210, 95), (210, 96), (210, 97), (210, 98), (210, 99), (210, 100), (210, 101), (210, 102), (210, 103), (210, 104), (210, 105), (210, 106), (210, 107), (210, 108), (210, 109), (210, 110), (210, 111), (210, 112), (210, 113), (210, 117), (210, 118), (210, 121), (211, 83), (211, 84), (211, 87), (211, 88), (211, 89), (211, 90), (211, 91), (211, 92), (211, 93), (211, 94), (211, 95), (211, 96), (211, 97), (211, 98), (211, 99), (211, 100), (211, 101), (211, 102), (211, 103), (211, 104), (211, 105), (211, 106), (211, 107), (211, 108), (211, 109), (211, 110), (211, 111),
(211, 112), (211, 115), (211, 116), (211, 120), (212, 85), (212, 88), (212, 89), (212, 90), (212, 91), (212, 92), (212, 93), (212, 94), (212, 95), (212, 96), (212, 97), (212, 98), (212, 99), (212, 100), (212, 101), (212, 102), (212, 103), (212, 104), (212, 105), (212, 106), (212, 107), (212, 108), (212, 109), (212, 110), (212, 111), (212, 113), (212, 117), (212, 119), (213, 87), (213, 90), (213, 91), (213, 92), (213, 93), (213, 94), (213, 95), (213, 96), (213, 97), (213, 98), (213, 99), (213, 100), (213, 101), (213, 102), (213, 103), (213, 104), (213, 105), (213, 106), (213, 107), (213, 108), (213, 109), (213, 110), (213, 112), (214, 88), (214, 92), (214, 93), (214, 94), (214, 95), (214, 96), (214, 97), (214, 98), (214, 99), (214, 100), (214, 101), (214, 102), (214, 103), (214, 104), (214, 105), (214, 106), (214, 107),
(214, 108), (214, 111), (215, 90), (215, 93), (215, 94), (215, 95), (215, 96), (215, 97), (215, 98), (215, 99), (215, 100), (215, 101), (215, 102), (215, 103), (215, 104), (215, 105), (215, 106), (215, 107), (215, 110), (216, 92), (216, 95), (216, 96), (216, 97), (216, 98), (216, 99), (216, 100), (216, 101), (216, 102), (216, 103), (216, 104), (216, 105), (216, 106), (216, 109), (217, 93), (217, 94), (217, 98), (217, 99), (217, 100), (217, 101), (217, 102), (217, 103), (217, 104), (217, 107), (218, 96), (218, 97), (218, 106), (219, 99), (219, 100), (219, 101), (219, 102), (219, 104), )
coordinates_42FF00 = ((117, 106),
(117, 108), (117, 113), (117, 114), (117, 115), (117, 116), (117, 118), (118, 104), (118, 109), (118, 110), (118, 111), (118, 119), (119, 102), (119, 106), (119, 107), (119, 108), (119, 109), (119, 112), (119, 113), (119, 114), (119, 115), (119, 116), (119, 117), (119, 118), (119, 120), (120, 100), (120, 104), (120, 105), (120, 106), (120, 107), (120, 108), (120, 109), (120, 110), (120, 111), (120, 112), (120, 113), (120, 114), (120, 115), (120, 116), (120, 117), (120, 118), (120, 119), (120, 121), (121, 98), (121, 102), (121, 103), (121, 104), (121, 105), (121, 106), (121, 107), (121, 108), (121, 109), (121, 110), (121, 111), (121, 112), (121, 113), (121, 114), (121, 115), (121, 116), (121, 117), (121, 118), (121, 119), (122, 92), (122, 93), (122, 94), (122, 95), (122, 96), (122, 100), (122, 101), (122, 102), (122, 103), (122, 104), (122, 105),
(122, 106), (122, 107), (122, 108), (122, 109), (122, 110), (122, 111), (122, 112), (122, 113), (122, 114), (122, 115), (122, 116), (122, 117), (122, 118), (122, 119), (122, 120), (122, 122), (123, 86), (123, 87), (123, 88), (123, 89), (123, 90), (123, 91), (123, 97), (123, 98), (123, 99), (123, 100), (123, 101), (123, 102), (123, 103), (123, 104), (123, 105), (123, 106), (123, 107), (123, 108), (123, 109), (123, 110), (123, 111), (123, 112), (123, 113), (123, 114), (123, 115), (123, 116), (123, 117), (123, 118), (123, 119), (123, 120), (123, 121), (123, 123), (124, 83), (124, 84), (124, 92), (124, 93), (124, 94), (124, 95), (124, 96), (124, 97), (124, 98), (124, 99), (124, 100), (124, 101), (124, 102), (124, 103), (124, 104), (124, 105), (124, 106), (124, 107), (124, 108), (124, 109), (124, 110), (124, 111), (124, 112), (124, 113),
(124, 114), (124, 115), (124, 116), (124, 117), (124, 118), (124, 119), (124, 120), (124, 121), (124, 122), (124, 124), (125, 80), (125, 81), (125, 82), (125, 85), (125, 86), (125, 87), (125, 88), (125, 89), (125, 90), (125, 91), (125, 92), (125, 93), (125, 94), (125, 95), (125, 96), (125, 97), (125, 98), (125, 99), (125, 100), (125, 101), (125, 102), (125, 103), (125, 104), (125, 105), (125, 106), (125, 107), (125, 108), (125, 109), (125, 110), (125, 111), (125, 112), (125, 113), (125, 114), (125, 115), (125, 116), (125, 117), (125, 118), (125, 119), (125, 120), (125, 121), (125, 122), (125, 124), (126, 76), (126, 77), (126, 78), (126, 79), (126, 83), (126, 84), (126, 85), (126, 86), (126, 87), (126, 88), (126, 89), (126, 90), (126, 91), (126, 92), (126, 93), (126, 94), (126, 95), (126, 96), (126, 97), (126, 98),
(126, 99), (126, 100), (126, 101), (126, 102), (126, 103), (126, 104), (126, 105), (126, 106), (126, 107), (126, 108), (126, 109), (126, 110), (126, 111), (126, 112), (126, 113), (126, 114), (126, 115), (126, 116), (126, 117), (126, 118), (126, 119), (126, 120), (126, 121), (126, 122), (126, 123), (126, 125), (127, 72), (127, 73), (127, 74), (127, 75), (127, 80), (127, 81), (127, 82), (127, 83), (127, 84), (127, 85), (127, 86), (127, 87), (127, 88), (127, 89), (127, 90), (127, 91), (127, 92), (127, 93), (127, 94), (127, 95), (127, 96), (127, 97), (127, 98), (127, 99), (127, 100), (127, 101), (127, 102), (127, 103), (127, 104), (127, 105), (127, 106), (127, 107), (127, 108), (127, 109), (127, 110), (127, 111), (127, 112), (127, 113), (127, 114), (127, 115), (127, 116), (127, 117), (127, 118), (127, 119), (127, 120), (127, 121),
(127, 122), (127, 123), (127, 125), (128, 70), (128, 71), (128, 76), (128, 77), (128, 78), (128, 79), (128, 80), (128, 81), (128, 82), (128, 83), (128, 84), (128, 85), (128, 86), (128, 87), (128, 88), (128, 89), (128, 90), (128, 91), (128, 92), (128, 93), (128, 94), (128, 95), (128, 96), (128, 97), (128, 98), (128, 99), (128, 100), (128, 101), (128, 102), (128, 103), (128, 104), (128, 105), (128, 106), (128, 107), (128, 108), (128, 109), (128, 110), (128, 111), (128, 112), (128, 113), (128, 114), (128, 115), (128, 116), (128, 117), (128, 118), (128, 119), (128, 120), (128, 121), (128, 122), (128, 123), (128, 124), (128, 126), (129, 68), (129, 72), (129, 73), (129, 74), (129, 75), (129, 76), (129, 77), (129, 78), (129, 79), (129, 80), (129, 81), (129, 82), (129, 83), (129, 84), (129, 85), (129, 86), (129, 87),
(129, 88), (129, 89), (129, 90), (129, 91), (129, 92), (129, 93), (129, 94), (129, 95), (129, 96), (129, 97), (129, 98), (129, 99), (129, 100), (129, 101), (129, 102), (129, 103), (129, 104), (129, 105), (129, 106), (129, 107), (129, 108), (129, 109), (129, 110), (129, 111), (129, 112), (129, 113), (129, 114), (129, 115), (129, 116), (129, 117), (129, 118), (129, 119), (129, 120), (129, 121), (129, 122), (129, 123), (129, 124), (129, 126), (130, 68), (130, 70), (130, 71), (130, 72), (130, 73), (130, 74), (130, 75), (130, 76), (130, 77), (130, 78), (130, 79), (130, 80), (130, 81), (130, 82), (130, 83), (130, 84), (130, 85), (130, 86), (130, 87), (130, 88), (130, 89), (130, 90), (130, 91), (130, 92), (130, 93), (130, 94), (130, 95), (130, 96), (130, 97), (130, 98), (130, 99), (130, 100), (130, 101), (130, 102),
(130, 103), (130, 104), (130, 105), (130, 106), (130, 107), (130, 108), (130, 109), (130, 110), (130, 111), (130, 112), (130, 113), (130, 114), (130, 115), (130, 116), (130, 117), (130, 118), (130, 119), (130, 120), (130, 121), (130, 122), (130, 123), (130, 124), (130, 125), (130, 127), (131, 67), (131, 69), (131, 70), (131, 71), (131, 72), (131, 73), (131, 74), (131, 75), (131, 76), (131, 77), (131, 78), (131, 79), (131, 80), (131, 81), (131, 82), (131, 83), (131, 84), (131, 85), (131, 86), (131, 87), (131, 88), (131, 89), (131, 90), (131, 91), (131, 92), (131, 93), (131, 94), (131, 95), (131, 96), (131, 97), (131, 98), (131, 99), (131, 100), (131, 101), (131, 102), (131, 103), (131, 104), (131, 105), (131, 106), (131, 107), (131, 108), (131, 109), (131, 110), (131, 111), (131, 112), (131, 113), (131, 114), (131, 115),
(131, 116), (131, 117), (131, 118), (131, 119), (131, 120), (131, 121), (131, 122), (131, 123), (131, 124), (131, 125), (131, 127), (132, 67), (132, 69), (132, 70), (132, 71), (132, 72), (132, 73), (132, 74), (132, 75), (132, 76), (132, 77), (132, 78), (132, 79), (132, 80), (132, 81), (132, 82), (132, 83), (132, 84), (132, 85), (132, 86), (132, 87), (132, 88), (132, 89), (132, 90), (132, 91), (132, 92), (132, 93), (132, 94), (132, 95), (132, 96), (132, 97), (132, 98), (132, 99), (132, 100), (132, 101), (132, 102), (132, 103), (132, 104), (132, 105), (132, 106), (132, 107), (132, 108), (132, 109), (132, 110), (132, 111), (132, 112), (132, 113), (132, 114), (132, 115), (132, 116), (132, 117), (132, 118), (132, 119), (132, 120), (132, 121), (132, 122), (132, 123), (132, 124), (132, 125), (132, 126), (132, 128), (133, 67),
(133, 69), (133, 70), (133, 71), (133, 72), (133, 73), (133, 74), (133, 75), (133, 76), (133, 77), (133, 78), (133, 79), (133, 80), (133, 81), (133, 82), (133, 83), (133, 84), (133, 85), (133, 86), (133, 87), (133, 88), (133, 89), (133, 90), (133, 91), (133, 92), (133, 93), (133, 94), (133, 95), (133, 96), (133, 97), (133, 98), (133, 99), (133, 100), (133, 101), (133, 102), (133, 103), (133, 104), (133, 105), (133, 106), (133, 107), (133, 108), (133, 109), (133, 110), (133, 111), (133, 112), (133, 113), (133, 114), (133, 115), (133, 116), (133, 117), (133, 118), (133, 119), (133, 120), (133, 121), (133, 122), (133, 123), (133, 124), (133, 125), (133, 126), (133, 128), (134, 66), (134, 68), (134, 69), (134, 70), (134, 71), (134, 72), (134, 73), (134, 74), (134, 75), (134, 76), (134, 77), (134, 78), (134, 79),
(134, 80), (134, 81), (134, 82), (134, 83), (134, 84), (134, 85), (134, 86), (134, 87), (134, 88), (134, 89), (134, 90), (134, 91), (134, 92), (134, 93), (134, 94), (134, 95), (134, 96), (134, 97), (134, 98), (134, 99), (134, 100), (134, 101), (134, 102), (134, 103), (134, 104), (134, 105), (134, 106), (134, 107), (134, 108), (134, 109), (134, 110), (134, 111), (134, 112), (134, 113), (134, 114), (134, 115), (134, 116), (134, 117), (134, 118), (134, 119), (134, 120), (134, 121), (134, 122), (134, 123), (134, 124), (134, 125), (134, 126), (134, 127), (134, 129), (135, 66), (135, 68), (135, 69), (135, 70), (135, 71), (135, 72), (135, 73), (135, 74), (135, 75), (135, 76), (135, 77), (135, 78), (135, 79), (135, 80), (135, 81), (135, 82), (135, 83), (135, 84), (135, 85), (135, 86), (135, 87), (135, 88), (135, 89),
(135, 90), (135, 91), (135, 92), (135, 93), (135, 94), (135, 95), (135, 96), (135, 97), (135, 98), (135, 99), (135, 100), (135, 101), (135, 102), (135, 103), (135, 104), (135, 105), (135, 106), (135, 107), (135, 108), (135, 109), (135, 110), (135, 111), (135, 112), (135, 113), (135, 114), (135, 115), (135, 116), (135, 117), (135, 118), (135, 119), (135, 120), (135, 121), (135, 122), (135, 123), (135, 124), (135, 125), (135, 126), (135, 127), (135, 129), (136, 66), (136, 68), (136, 69), (136, 70), (136, 71), (136, 72), (136, 73), (136, 74), (136, 75), (136, 76), (136, 77), (136, 78), (136, 79), (136, 80), (136, 81), (136, 82), (136, 83), (136, 84), (136, 85), (136, 86), (136, 87), (136, 88), (136, 89), (136, 90), (136, 91), (136, 92), (136, 93), (136, 94), (136, 95), (136, 96), (136, 97), (136, 98), (136, 99),
(136, 100), (136, 101), (136, 102), (136, 103), (136, 104), (136, 105), (136, 106), (136, 107), (136, 108), (136, 109), (136, 110), (136, 111), (136, 112), (136, 113), (136, 114), (136, 115), (136, 116), (136, 117), (136, 118), (136, 119), (136, 120), (136, 121), (136, 122), (136, 123), (136, 124), (136, 125), (136, 126), (136, 127), (136, 129), (137, 66), (137, 68), (137, 69), (137, 70), (137, 71), (137, 72), (137, 73), (137, 74), (137, 75), (137, 76), (137, 77), (137, 78), (137, 79), (137, 80), (137, 81), (137, 82), (137, 83), (137, 84), (137, 85), (137, 86), (137, 87), (137, 88), (137, 89), (137, 90), (137, 91), (137, 92), (137, 93), (137, 94), (137, 95), (137, 96), (137, 97), (137, 98), (137, 99), (137, 100), (137, 101), (137, 102), (137, 103), (137, 104), (137, 105), (137, 106), (137, 107), (137, 108), (137, 109),
(137, 110), (137, 111), (137, 112), (137, 113), (137, 114), (137, 115), (137, 116), (137, 117), (137, 118), (137, 119), (137, 120), (137, 121), (137, 122), (137, 123), (137, 124), (137, 125), (137, 126), (137, 127), (137, 129), (138, 65), (138, 67), (138, 68), (138, 69), (138, 70), (138, 71), (138, 72), (138, 73), (138, 74), (138, 75), (138, 76), (138, 77), (138, 78), (138, 79), (138, 80), (138, 81), (138, 82), (138, 83), (138, 84), (138, 85), (138, 86), (138, 87), (138, 88), (138, 89), (138, 90), (138, 91), (138, 92), (138, 93), (138, 94), (138, 95), (138, 96), (138, 97), (138, 98), (138, 99), (138, 100), (138, 101), (138, 102), (138, 103), (138, 104), (138, 105), (138, 106), (138, 107), (138, 108), (138, 109), (138, 110), (138, 111), (138, 112), (138, 113), (138, 114), (138, 115), (138, 116), (138, 117), (138, 118),
(138, 119), (138, 120), (138, 121), (138, 122), (138, 123), (138, 124), (138, 125), (138, 126), (138, 127), (138, 129), (139, 65), (139, 67), (139, 68), (139, 69), (139, 70), (139, 71), (139, 72), (139, 73), (139, 74), (139, 75), (139, 76), (139, 77), (139, 78), (139, 79), (139, 80), (139, 81), (139, 82), (139, 83), (139, 84), (139, 85), (139, 86), (139, 87), (139, 88), (139, 89), (139, 90), (139, 91), (139, 92), (139, 93), (139, 94), (139, 95), (139, 96), (139, 97), (139, 98), (139, 99), (139, 100), (139, 101), (139, 102), (139, 103), (139, 104), (139, 105), (139, 106), (139, 107), (139, 108), (139, 109), (139, 110), (139, 111), (139, 112), (139, 113), (139, 114), (139, 115), (139, 116), (139, 117), (139, 118), (139, 119), (139, 120), (139, 121), (139, 122), (139, 123), (139, 124), (139, 125), (139, 126), (139, 127),
(139, 129), (140, 65), (140, 67), (140, 68), (140, 69), (140, 70), (140, 71), (140, 72), (140, 73), (140, 74), (140, 75), (140, 76), (140, 77), (140, 78), (140, 79), (140, 80), (140, 81), (140, 82), (140, 83), (140, 84), (140, 85), (140, 86), (140, 87), (140, 88), (140, 89), (140, 90), (140, 91), (140, 92), (140, 93), (140, 94), (140, 95), (140, 96), (140, 97), (140, 98), (140, 99), (140, 100), (140, 101), (140, 102), (140, 103), (140, 104), (140, 105), (140, 106), (140, 107), (140, 108), (140, 109), (140, 110), (140, 111), (140, 112), (140, 113), (140, 114), (140, 115), (140, 116), (140, 117), (140, 118), (140, 119), (140, 120), (140, 121), (140, 122), (140, 123), (140, 124), (140, 125), (140, 126), (140, 127), (140, 129), (141, 65), (141, 67), (141, 68), (141, 69), (141, 70), (141, 71), (141, 72), (141, 73),
(141, 74), (141, 75), (141, 76), (141, 77), (141, 78), (141, 79), (141, 80), (141, 81), (141, 82), (141, 83), (141, 84), (141, 85), (141, 86), (141, 87), (141, 88), (141, 89), (141, 90), (141, 91), (141, 92), (141, 93), (141, 94), (141, 95), (141, 96), (141, 97), (141, 98), (141, 99), (141, 100), (141, 101), (141, 102), (141, 103), (141, 104), (141, 105), (141, 106), (141, 107), (141, 108), (141, 109), (141, 110), (141, 111), (141, 112), (141, 113), (141, 114), (141, 115), (141, 116), (141, 117), (141, 118), (141, 119), (141, 120), (141, 121), (141, 122), (141, 123), (141, 124), (141, 125), (141, 126), (141, 128), (142, 65), (142, 67), (142, 68), (142, 69), (142, 70), (142, 71), (142, 72), (142, 73), (142, 74), (142, 75), (142, 76), (142, 77), (142, 78), (142, 79), (142, 80), (142, 81), (142, 82), (142, 83),
(142, 84), (142, 85), (142, 86), (142, 87), (142, 88), (142, 89), (142, 90), (142, 91), (142, 92), (142, 93), (142, 94), (142, 95), (142, 96), (142, 97), (142, 98), (142, 99), (142, 100), (142, 101), (142, 102), (142, 103), (142, 104), (142, 105), (142, 106), (142, 107), (142, 108), (142, 109), (142, 110), (142, 111), (142, 112), (142, 113), (142, 114), (142, 115), (142, 116), (142, 117), (142, 118), (142, 119), (142, 120), (142, 121), (142, 122), (142, 123), (142, 124), (142, 125), (142, 126), (142, 128), (143, 65), (143, 67), (143, 68), (143, 69), (143, 70), (143, 71), (143, 72), (143, 73), (143, 74), (143, 75), (143, 76), (143, 77), (143, 78), (143, 79), (143, 80), (143, 81), (143, 82), (143, 83), (143, 84), (143, 85), (143, 86), (143, 87), (143, 88), (143, 89), (143, 90), (143, 91), (143, 92), (143, 93),
(143, 94), (143, 95), (143, 96), (143, 97), (143, 98), (143, 99), (143, 100), (143, 101), (143, 102), (143, 103), (143, 104), (143, 105), (143, 106), (143, 107), (143, 108), (143, 109), (143, 110), (143, 111), (143, 112), (143, 113), (143, 114), (143, 115), (143, 116), (143, 117), (143, 118), (143, 119), (143, 120), (143, 121), (143, 122), (143, 123), (143, 124), (143, 125), (143, 127), (144, 65), (144, 67), (144, 68), (144, 69), (144, 70), (144, 71), (144, 72), (144, 73), (144, 74), (144, 75), (144, 76), (144, 77), (144, 78), (144, 79), (144, 80), (144, 81), (144, 82), (144, 83), (144, 84), (144, 85), (144, 86), (144, 87), (144, 88), (144, 89), (144, 90), (144, 91), (144, 92), (144, 93), (144, 94), (144, 95), (144, 96), (144, 97), (144, 98), (144, 99), (144, 100), (144, 101), (144, 102), (144, 103), (144, 104),
(144, 105), (144, 106), (144, 107), (144, 108), (144, 109), (144, 110), (144, 111), (144, 112), (144, 113), (144, 114), (144, 115), (144, 116), (144, 117), (144, 118), (144, 119), (144, 120), (144, 121), (144, 122), (144, 126), (145, 65), (145, 67), (145, 68), (145, 69), (145, 70), (145, 71), (145, 72), (145, 73), (145, 74), (145, 75), (145, 76), (145, 77), (145, 78), (145, 79), (145, 80), (145, 81), (145, 82), (145, 83), (145, 84), (145, 85), (145, 86), (145, 87), (145, 88), (145, 89), (145, 90), (145, 91), (145, 92), (145, 93), (145, 94), (145, 95), (145, 96), (145, 97), (145, 98), (145, 99), (145, 100), (145, 101), (145, 102), (145, 103), (145, 104), (145, 105), (145, 106), (145, 107), (145, 108), (145, 109), (145, 110), (145, 111), (145, 112), (145, 113), (145, 114), (145, 115), (145, 116), (145, 117), (145, 118),
(145, 123), (145, 125), (146, 65), (146, 67), (146, 68), (146, 69), (146, 70), (146, 71), (146, 72), (146, 73), (146, 74), (146, 75), (146, 76), (146, 77), (146, 78), (146, 79), (146, 80), (146, 81), (146, 82), (146, 83), (146, 84), (146, 85), (146, 86), (146, 87), (146, 88), (146, 89), (146, 90), (146, 91), (146, 92), (146, 93), (146, 94), (146, 95), (146, 96), (146, 97), (146, 98), (146, 99), (146, 100), (146, 101), (146, 102), (146, 103), (146, 104), (146, 105), (146, 106), (146, 107), (146, 108), (146, 109), (146, 110), (146, 111), (146, 112), (146, 113), (146, 114), (146, 119), (146, 120), (146, 121), (146, 122), (147, 66), (147, 68), (147, 69), (147, 70), (147, 71), (147, 72), (147, 73), (147, 74), (147, 75), (147, 76), (147, 77), (147, 78), (147, 79), (147, 80), (147, 81), (147, 82), (147, 83),
(147, 84), (147, 85), (147, 86), (147, 87), (147, 88), (147, 89), (147, 90), (147, 91), (147, 92), (147, 93), (147, 94), (147, 95), (147, 96), (147, 97), (147, 98), (147, 99), (147, 100), (147, 101), (147, 102), (147, 103), (147, 104), (147, 105), (147, 106), (147, 107), (147, 108), (147, 109), (147, 110), (147, 115), (147, 116), (147, 117), (147, 118), (148, 66), (148, 68), (148, 69), (148, 70), (148, 71), (148, 72), (148, 73), (148, 74), (148, 75), (148, 76), (148, 77), (148, 78), (148, 79), (148, 80), (148, 81), (148, 82), (148, 83), (148, 84), (148, 85), (148, 86), (148, 87), (148, 88), (148, 89), (148, 90), (148, 91), (148, 92), (148, 93), (148, 94), (148, 95), (148, 96), (148, 97), (148, 98), (148, 99), (148, 100), (148, 101), (148, 102), (148, 103), (148, 104), (148, 105), (148, 106), (148, 107),
(148, 108), (148, 111), (148, 112), (148, 113), (148, 114), (149, 66), (149, 68), (149, 69), (149, 70), (149, 71), (149, 72), (149, 73), (149, 74), (149, 75), (149, 76), (149, 77), (149, 78), (149, 79), (149, 80), (149, 81), (149, 82), (149, 83), (149, 84), (149, 85), (149, 86), (149, 87), (149, 88), (149, 89), (149, 90), (149, 91), (149, 92), (149, 93), (149, 94), (149, 95), (149, 96), (149, 97), (149, 98), (149, 99), (149, 100), (149, 101), (149, 102), (149, 103), (149, 104), (149, 105), (149, 106), (149, 109), (149, 110), (150, 66), (150, 68), (150, 69), (150, 70), (150, 71), (150, 72), (150, 73), (150, 74), (150, 75), (150, 76), (150, 77), (150, 78), (150, 79), (150, 80), (150, 81), (150, 82), (150, 83), (150, 84), (150, 85), (150, 86), (150, 87), (150, 88), (150, 89), (150, 90), (150, 91),
(150, 92), (150, 93), (150, 94), (150, 95), (150, 96), (150, 97), (150, 98), (150, 99), (150, 100), (150, 101), (150, 102), (150, 103), (150, 107), (151, 67), (151, 69), (151, 70), (151, 71), (151, 72), (151, 73), (151, 74), (151, 75), (151, 76), (151, 77), (151, 78), (151, 79), (151, 80), (151, 81), (151, 82), (151, 83), (151, 84), (151, 85), (151, 86), (151, 87), (151, 88), (151, 89), (151, 90), (151, 91), (151, 92), (151, 93), (151, 94), (151, 95), (151, 96), (151, 97), (151, 98), (151, 99), (151, 100), (151, 101), (151, 105), (152, 67), (152, 69), (152, 70), (152, 71), (152, 72), (152, 73), (152, 74), (152, 75), (152, 76), (152, 77), (152, 78), (152, 79), (152, 80), (152, 81), (152, 82), (152, 83), (152, 84), (152, 85), (152, 86), (152, 87), (152, 88), (152, 89), (152, 90), (152, 91),
(152, 92), (152, 93), (152, 94), (152, 95), (152, 96), (152, 97), (152, 98), (152, 99), (152, 103), (153, 67), (153, 69), (153, 70), (153, 71), (153, 72), (153, 73), (153, 74), (153, 75), (153, 76), (153, 77), (153, 78), (153, 79), (153, 80), (153, 81), (153, 82), (153, 83), (153, 84), (153, 85), (153, 86), (153, 87), (153, 88), (153, 89), (153, 90), (153, 91), (153, 92), (153, 93), (153, 94), (153, 95), (153, 96), (153, 100), (153, 101), (154, 67), (154, 69), (154, 70), (154, 71), (154, 72), (154, 73), (154, 74), (154, 75), (154, 76), (154, 77), (154, 78), (154, 79), (154, 80), (154, 81), (154, 82), (154, 83), (154, 84), (154, 85), (154, 86), (154, 87), (154, 88), (154, 89), (154, 90), (154, 91), (154, 92), (154, 93), (154, 97), (154, 99), (155, 68), (155, 70), (155, 71), (155, 72),
(155, 73), (155, 74), (155, 75), (155, 76), (155, 77), (155, 78), (155, 79), (155, 80), (155, 81), (155, 82), (155, 83), (155, 84), (155, 85), (155, 86), (155, 87), (155, 88), (155, 89), (155, 90), (155, 94), (155, 95), (155, 96), (156, 68), (156, 70), (156, 71), (156, 72), (156, 73), (156, 74), (156, 75), (156, 76), (156, 77), (156, 78), (156, 79), (156, 80), (156, 81), (156, 82), (156, 83), (156, 84), (156, 85), (156, 86), (156, 91), (156, 92), (156, 93), (157, 69), (157, 71), (157, 72), (157, 73), (157, 74), (157, 75), (157, 76), (157, 77), (157, 78), (157, 79), (157, 80), (157, 81), (157, 87), (157, 88), (157, 89), (157, 90), (158, 69), (158, 82), (158, 83), (158, 84), (158, 85), (158, 86), (159, 70), (159, 72), (159, 73), (159, 74), (159, 75), (159, 76), (159, 77), (159, 78),
(159, 79), (159, 80), (159, 81), (165, 83), (165, 85), (165, 86), (165, 87), (166, 80), (166, 81), (166, 88), (166, 89), (166, 90), (166, 91), (167, 74), (167, 75), (167, 76), (167, 77), (167, 78), (167, 79), (167, 83), (167, 84), (167, 85), (167, 86), (167, 87), (167, 93), (167, 94), (167, 95), (168, 70), (168, 72), (168, 73), (168, 80), (168, 81), (168, 82), (168, 83), (168, 84), (168, 85), (168, 86), (168, 87), (168, 88), (168, 89), (168, 90), (168, 91), (168, 92), (168, 96), (168, 97), (169, 69), (169, 74), (169, 75), (169, 76), (169, 77), (169, 78), (169, 79), (169, 80), (169, 81), (169, 82), (169, 83), (169, 84), (169, 85), (169, 86), (169, 87), (169, 88), (169, 89), (169, 90), (169, 91), (169, 92), (169, 93), (169, 94), (169, 95), (169, 99), (169, 100), (170, 68), (170, 70),
(170, 71), (170, 72), (170, 73), (170, 74), (170, 75), (170, 76), (170, 77), (170, 78), (170, 79), (170, 80), (170, 81), (170, 82), (170, 83), (170, 84), (170, 85), (170, 86), (170, 87), (170, 88), (170, 89), (170, 90), (170, 91), (170, 92), (170, 93), (170, 94), (170, 95), (170, 96), (170, 97), (170, 98), (170, 101), (170, 102), (170, 103), (171, 67), (171, 69), (171, 70), (171, 71), (171, 72), (171, 73), (171, 74), (171, 75), (171, 76), (171, 77), (171, 78), (171, 79), (171, 80), (171, 81), (171, 82), (171, 83), (171, 84), (171, 85), (171, 86), (171, 87), (171, 88), (171, 89), (171, 90), (171, 91), (171, 92), (171, 93), (171, 94), (171, 95), (171, 96), (171, 97), (171, 98), (171, 99), (171, 100), (171, 104), (171, 105), (172, 67), (172, 69), (172, 70), (172, 71), (172, 72), (172, 73),
(172, 74), (172, 75), (172, 76), (172, 77), (172, 78), (172, 79), (172, 80), (172, 81), (172, 82), (172, 83), (172, 84), (172, 85), (172, 86), (172, 87), (172, 88), (172, 89), (172, 90), (172, 91), (172, 92), (172, 93), (172, 94), (172, 95), (172, 96), (172, 97), (172, 98), (172, 99), (172, 100), (172, 101), (172, 102), (172, 103), (172, 106), (172, 107), (172, 108), (172, 109), (172, 110), (172, 111), (172, 112), (172, 113), (172, 114), (172, 116), (173, 66), (173, 68), (173, 69), (173, 70), (173, 71), (173, 72), (173, 73), (173, 74), (173, 75), (173, 76), (173, 77), (173, 78), (173, 79), (173, 80), (173, 81), (173, 82), (173, 83), (173, 84), (173, 85), (173, 86), (173, 87), (173, 88), (173, 89), (173, 90), (173, 91), (173, 92), (173, 93), (173, 94), (173, 95), (173, 96), (173, 97), (173, 98),
(173, 99), (173, 100), (173, 101), (173, 102), (173, 103), (173, 104), (173, 105), (173, 118), (174, 66), (174, 68), (174, 69), (174, 70), (174, 71), (174, 72), (174, 73), (174, 74), (174, 75), (174, 76), (174, 77), (174, 78), (174, 79), (174, 80), (174, 81), (174, 82), (174, 83), (174, 84), (174, 85), (174, 86), (174, 87), (174, 88), (174, 89), (174, 90), (174, 91), (174, 92), (174, 93), (174, 94), (174, 95), (174, 96), (174, 97), (174, 98), (174, 99), (174, 100), (174, 101), (174, 102), (174, 103), (174, 104), (174, 105), (174, 106), (174, 107), (174, 108), (174, 109), (174, 110), (174, 111), (174, 112), (174, 113), (174, 114), (174, 115), (174, 116), (174, 119), (174, 120), (175, 66), (175, 68), (175, 69), (175, 70), (175, 71), (175, 72), (175, 73), (175, 74), (175, 75), (175, 76), (175, 77), (175, 78),
(175, 79), (175, 80), (175, 81), (175, 82), (175, 83), (175, 84), (175, 85), (175, 86), (175, 87), (175, 88), (175, 89), (175, 90), (175, 91), (175, 92), (175, 93), (175, 94), (175, 95), (175, 96), (175, 97), (175, 98), (175, 99), (175, 100), (175, 101), (175, 102), (175, 103), (175, 104), (175, 105), (175, 106), (175, 107), (175, 108), (175, 109), (175, 110), (175, 111), (175, 112), (175, 113), (175, 114), (175, 115), (175, 116), (175, 117), (175, 118), (175, 121), (175, 122), (176, 66), (176, 68), (176, 69), (176, 70), (176, 71), (176, 72), (176, 73), (176, 74), (176, 75), (176, 76), (176, 77), (176, 78), (176, 79), (176, 80), (176, 81), (176, 82), (176, 83), (176, 84), (176, 85), (176, 86), (176, 87), (176, 88), (176, 89), (176, 90), (176, 91), (176, 92), (176, 93), (176, 94), (176, 95), (176, 96),
(176, 97), (176, 98), (176, 99), (176, 100), (176, 101), (176, 102), (176, 103), (176, 104), (176, 105), (176, 106), (176, 107), (176, 108), (176, 109), (176, 110), (176, 111), (176, 112), (176, 113), (176, 114), (176, 115), (176, 116), (176, 117), (176, 118), (176, 119), (176, 120), (176, 123), (176, 124), (177, 67), (177, 69), (177, 70), (177, 71), (177, 72), (177, 73), (177, 74), (177, 75), (177, 76), (177, 77), (177, 78), (177, 79), (177, 80), (177, 81), (177, 82), (177, 83), (177, 84), (177, 85), (177, 86), (177, 87), (177, 88), (177, 89), (177, 90), (177, 91), (177, 92), (177, 93), (177, 94), (177, 95), (177, 96), (177, 97), (177, 98), (177, 99), (177, 100), (177, 101), (177, 102), (177, 103), (177, 104), (177, 105), (177, 106), (177, 107), (177, 108), (177, 109), (177, 110), (177, 111), (177, 112), (177, 113),
(177, 114), (177, 115), (177, 116), (177, 117), (177, 118), (177, 119), (177, 120), (177, 121), (177, 122), (177, 125), (177, 126), (178, 67), (178, 69), (178, 70), (178, 71), (178, 72), (178, 73), (178, 74), (178, 75), (178, 76), (178, 77), (178, 78), (178, 79), (178, 80), (178, 81), (178, 82), (178, 83), (178, 84), (178, 85), (178, 86), (178, 87), (178, 88), (178, 89), (178, 90), (178, 91), (178, 92), (178, 93), (178, 94), (178, 95), (178, 96), (178, 97), (178, 98), (178, 99), (178, 100), (178, 101), (178, 102), (178, 103), (178, 104), (178, 105), (178, 106), (178, 107), (178, 108), (178, 109), (178, 110), (178, 111), (178, 112), (178, 113), (178, 114), (178, 115), (178, 116), (178, 117), (178, 118), (178, 119), (178, 120), (178, 121), (178, 122), (178, 123), (178, 124), (178, 128), (179, 67), (179, 69), (179, 70),
(179, 71), (179, 72), (179, 73), (179, 74), (179, 75), (179, 76), (179, 77), (179, 78), (179, 79), (179, 80), (179, 81), (179, 82), (179, 83), (179, 84), (179, 85), (179, 86), (179, 87), (179, 88), (179, 89), (179, 90), (179, 91), (179, 92), (179, 93), (179, 94), (179, 95), (179, 96), (179, 97), (179, 98), (179, 99), (179, 100), (179, 101), (179, 102), (179, 103), (179, 104), (179, 105), (179, 106), (179, 107), (179, 108), (179, 109), (179, 110), (179, 111), (179, 112), (179, 113), (179, 114), (179, 115), (179, 116), (179, 117), (179, 118), (179, 119), (179, 120), (179, 121), (179, 122), (179, 123), (179, 124), (179, 125), (179, 126), (179, 129), (180, 67), (180, 69), (180, 70), (180, 71), (180, 72), (180, 73), (180, 74), (180, 75), (180, 76), (180, 77), (180, 78), (180, 79), (180, 80), (180, 81), (180, 82),
(180, 83), (180, 84), (180, 85), (180, 86), (180, 87), (180, 88), (180, 89), (180, 90), (180, 91), (180, 92), (180, 93), (180, 94), (180, 95), (180, 96), (180, 97), (180, 98), (180, 99), (180, 100), (180, 101), (180, 102), (180, 103), (180, 104), (180, 105), (180, 106), (180, 107), (180, 108), (180, 109), (180, 110), (180, 111), (180, 112), (180, 113), (180, 114), (180, 115), (180, 116), (180, 117), (180, 118), (180, 119), (180, 120), (180, 121), (180, 122), (180, 123), (180, 124), (180, 125), (180, 126), (180, 127), (180, 129), (181, 67), (181, 69), (181, 70), (181, 71), (181, 72), (181, 73), (181, 74), (181, 75), (181, 76), (181, 77), (181, 78), (181, 79), (181, 80), (181, 81), (181, 82), (181, 83), (181, 84), (181, 85), (181, 86), (181, 87), (181, 88), (181, 89), (181, 90), (181, 91), (181, 92), (181, 93),
(181, 94), (181, 95), (181, 96), (181, 97), (181, 98), (181, 99), (181, 100), (181, 101), (181, 102), (181, 103), (181, 104), (181, 105), (181, 106), (181, 107), (181, 108), (181, 109), (181, 110), (181, 111), (181, 112), (181, 113), (181, 114), (181, 115), (181, 116), (181, 117), (181, 118), (181, 119), (181, 120), (181, 121), (181, 122), (181, 123), (181, 124), (181, 125), (181, 126), (181, 127), (181, 129), (182, 67), (182, 69), (182, 70), (182, 71), (182, 72), (182, 73), (182, 74), (182, 75), (182, 76), (182, 77), (182, 78), (182, 79), (182, 80), (182, 81), (182, 82), (182, 83), (182, 84), (182, 85), (182, 86), (182, 87), (182, 88), (182, 89), (182, 90), (182, 91), (182, 92), (182, 93), (182, 94), (182, 95), (182, 96), (182, 97), (182, 98), (182, 99), (182, 100), (182, 101), (182, 102), (182, 103), (182, 104),
(182, 105), (182, 106), (182, 107), (182, 108), (182, 109), (182, 110), (182, 111), (182, 112), (182, 113), (182, 114), (182, 115), (182, 116), (182, 117), (182, 118), (182, 119), (182, 120), (182, 121), (182, 122), (182, 123), (182, 124), (182, 125), (182, 126), (182, 127), (182, 129), (183, 66), (183, 68), (183, 69), (183, 70), (183, 71), (183, 72), (183, 73), (183, 74), (183, 75), (183, 76), (183, 77), (183, 78), (183, 79), (183, 80), (183, 81), (183, 82), (183, 83), (183, 84), (183, 85), (183, 86), (183, 87), (183, 88), (183, 89), (183, 90), (183, 91), (183, 92), (183, 93), (183, 94), (183, 95), (183, 96), (183, 97), (183, 98), (183, 99), (183, 100), (183, 101), (183, 102), (183, 103), (183, 104), (183, 105), (183, 106), (183, 107), (183, 108), (183, 109), (183, 110), (183, 111), (183, 112), (183, 113), (183, 114),
(183, 115), (183, 116), (183, 117), (183, 118), (183, 119), (183, 120), (183, 121), (183, 122), (183, 123), (183, 124), (183, 125), (183, 126), (183, 127), (183, 129), (184, 66), (184, 68), (184, 69), (184, 70), (184, 71), (184, 72), (184, 73), (184, 74), (184, 75), (184, 76), (184, 77), (184, 78), (184, 79), (184, 80), (184, 81), (184, 82), (184, 83), (184, 84), (184, 85), (184, 86), (184, 87), (184, 88), (184, 89), (184, 90), (184, 91), (184, 92), (184, 93), (184, 94), (184, 95), (184, 96), (184, 97), (184, 98), (184, 99), (184, 100), (184, 101), (184, 102), (184, 103), (184, 104), (184, 105), (184, 106), (184, 107), (184, 108), (184, 109), (184, 110), (184, 111), (184, 112), (184, 113), (184, 114), (184, 115), (184, 116), (184, 117), (184, 118), (184, 119), (184, 120), (184, 121), (184, 122), (184, 123), (184, 124),
(184, 125), (184, 126), (184, 127), (184, 129), (185, 66), (185, 68), (185, 69), (185, 70), (185, 71), (185, 72), (185, 73), (185, 74), (185, 75), (185, 76), (185, 77), (185, 78), (185, 79), (185, 80), (185, 81), (185, 82), (185, 83), (185, 84), (185, 85), (185, 86), (185, 87), (185, 88), (185, 89), (185, 90), (185, 91), (185, 92), (185, 93), (185, 94), (185, 95), (185, 96), (185, 97), (185, 98), (185, 99), (185, 100), (185, 101), (185, 102), (185, 103), (185, 104), (185, 105), (185, 106), (185, 107), (185, 108), (185, 109), (185, 110), (185, 111), (185, 112), (185, 113), (185, 114), (185, 115), (185, 116), (185, 117), (185, 118), (185, 119), (185, 120), (185, 121), (185, 122), (185, 123), (185, 124), (185, 125), (185, 126), (185, 127), (185, 129), (186, 66), (186, 68), (186, 69), (186, 70), (186, 71), (186, 72),
(186, 73), (186, 74), (186, 75), (186, 76), (186, 77), (186, 78), (186, 79), (186, 80), (186, 81), (186, 82), (186, 83), (186, 84), (186, 85), (186, 86), (186, 87), (186, 88), (186, 89), (186, 90), (186, 91), (186, 92), (186, 93), (186, 94), (186, 95), (186, 96), (186, 97), (186, 98), (186, 99), (186, 100), (186, 101), (186, 102), (186, 103), (186, 104), (186, 105), (186, 106), (186, 107), (186, 108), (186, 109), (186, 110), (186, 111), (186, 112), (186, 113), (186, 114), (186, 115), (186, 116), (186, 117), (186, 118), (186, 119), (186, 120), (186, 121), (186, 122), (186, 123), (186, 124), (186, 125), (186, 126), (186, 127), (186, 129), (187, 66), (187, 68), (187, 69), (187, 70), (187, 71), (187, 72), (187, 73), (187, 74), (187, 75), (187, 76), (187, 77), (187, 78), (187, 79), (187, 80), (187, 81), (187, 82),
(187, 83), (187, 84), (187, 85), (187, 86), (187, 87), (187, 88), (187, 89), (187, 90), (187, 91), (187, 92), (187, 93), (187, 94), (187, 95), (187, 96), (187, 97), (187, 98), (187, 99), (187, 100), (187, 101), (187, 102), (187, 103), (187, 104), (187, 105), (187, 106), (187, 107), (187, 108), (187, 109), (187, 110), (187, 111), (187, 112), (187, 113), (187, 114), (187, 115), (187, 116), (187, 117), (187, 118), (187, 119), (187, 120), (187, 121), (187, 122), (187, 123), (187, 124), (187, 125), (187, 126), (187, 127), (187, 129), (188, 66), (188, 68), (188, 69), (188, 70), (188, 71), (188, 72), (188, 73), (188, 74), (188, 75), (188, 76), (188, 77), (188, 78), (188, 79), (188, 80), (188, 81), (188, 82), (188, 83), (188, 84), (188, 85), (188, 86), (188, 87), (188, 88), (188, 89), (188, 90), (188, 91), (188, 92),
(188, 93), (188, 94), (188, 95), (188, 96), (188, 97), (188, 98), (188, 99), (188, 100), (188, 101), (188, 102), (188, 103), (188, 104), (188, 105), (188, 106), (188, 107), (188, 108), (188, 109), (188, 110), (188, 111), (188, 112), (188, 113), (188, 114), (188, 115), (188, 116), (188, 117), (188, 118), (188, 119), (188, 120), (188, 121), (188, 122), (188, 123), (188, 124), (188, 125), (188, 126), (188, 127), (188, 129), (189, 66), (189, 68), (189, 69), (189, 70), (189, 71), (189, 72), (189, 73), (189, 74), (189, 75), (189, 76), (189, 77), (189, 78), (189, 79), (189, 80), (189, 81), (189, 82), (189, 83), (189, 84), (189, 85), (189, 86), (189, 87), (189, 88), (189, 89), (189, 90), (189, 91), (189, 92), (189, 93), (189, 94), (189, 95), (189, 96), (189, 97), (189, 98), (189, 99), (189, 100), (189, 101), (189, 102),
(189, 103), (189, 104), (189, 105), (189, 106), (189, 107), (189, 108), (189, 109), (189, 110), (189, 111), (189, 112), (189, 113), (189, 114), (189, 115), (189, 116), (189, 117), (189, 118), (189, 119), (189, 120), (189, 121), (189, 122), (189, 123), (189, 124), (189, 125), (189, 126), (189, 128), (190, 67), (190, 69), (190, 70), (190, 71), (190, 72), (190, 73), (190, 74), (190, 75), (190, 76), (190, 77), (190, 78), (190, 79), (190, 80), (190, 81), (190, 82), (190, 83), (190, 84), (190, 85), (190, 86), (190, 87), (190, 88), (190, 89), (190, 90), (190, 91), (190, 92), (190, 93), (190, 94), (190, 95), (190, 96), (190, 97), (190, 98), (190, 99), (190, 100), (190, 101), (190, 102), (190, 103), (190, 104), (190, 105), (190, 106), (190, 107), (190, 108), (190, 109), (190, 110), (190, 111), (190, 112), (190, 113), (190, 114),
(190, 115), (190, 116), (190, 117), (190, 118), (190, 119), (190, 120), (190, 121), (190, 122), (190, 123), (190, 124), (190, 125), (190, 126), (190, 128), (191, 67), (191, 69), (191, 70), (191, 71), (191, 72), (191, 73), (191, 74), (191, 75), (191, 76), (191, 77), (191, 78), (191, 79), (191, 80), (191, 81), (191, 82), (191, 83), (191, 84), (191, 85), (191, 86), (191, 87), (191, 88), (191, 89), (191, 90), (191, 91), (191, 92), (191, 93), (191, 94), (191, 95), (191, 96), (191, 97), (191, 98), (191, 99), (191, 100), (191, 101), (191, 102), (191, 103), (191, 104), (191, 105), (191, 106), (191, 107), (191, 108), (191, 109), (191, 110), (191, 111), (191, 112), (191, 113), (191, 114), (191, 115), (191, 116), (191, 117), (191, 118), (191, 119), (191, 120), (191, 121), (191, 122), (191, 123), (191, 124), (191, 125), (191, 126),
(191, 128), (192, 68), (192, 70), (192, 71), (192, 72), (192, 73), (192, 74), (192, 75), (192, 76), (192, 77), (192, 78), (192, 79), (192, 80), (192, 81), (192, 82), (192, 83), (192, 84), (192, 85), (192, 86), (192, 87), (192, 88), (192, 89), (192, 90), (192, 91), (192, 92), (192, 93), (192, 94), (192, 95), (192, 96), (192, 97), (192, 98), (192, 99), (192, 100), (192, 101), (192, 102), (192, 103), (192, 104), (192, 105), (192, 106), (192, 107), (192, 108), (192, 109), (192, 110), (192, 111), (192, 112), (192, 113), (192, 114), (192, 115), (192, 116), (192, 117), (192, 118), (192, 119), (192, 120), (192, 121), (192, 122), (192, 123), (192, 124), (192, 125), (192, 127), (193, 68), (193, 70), (193, 71), (193, 72), (193, 73), (193, 74), (193, 75), (193, 76), (193, 77), (193, 78), (193, 79), (193, 80), (193, 81),
(193, 82), (193, 83), (193, 84), (193, 85), (193, 86), (193, 87), (193, 88), (193, 89), (193, 90), (193, 91), (193, 92), (193, 93), (193, 94), (193, 95), (193, 96), (193, 97), (193, 98), (193, 99), (193, 100), (193, 101), (193, 102), (193, 103), (193, 104), (193, 105), (193, 106), (193, 107), (193, 108), (193, 109), (193, 110), (193, 111), (193, 112), (193, 113), (193, 114), (193, 115), (193, 116), (193, 117), (193, 118), (193, 119), (193, 120), (193, 121), (193, 122), (193, 123), (193, 124), (193, 125), (193, 127), (194, 69), (194, 71), (194, 72), (194, 73), (194, 74), (194, 75), (194, 76), (194, 77), (194, 78), (194, 79), (194, 80), (194, 81), (194, 82), (194, 83), (194, 84), (194, 85), (194, 86), (194, 87), (194, 88), (194, 89), (194, 90), (194, 91), (194, 92), (194, 93), (194, 94), (194, 95), (194, 96),
(194, 97), (194, 98), (194, 99), (194, 100), (194, 101), (194, 102), (194, 103), (194, 104), (194, 105), (194, 106), (194, 107), (194, 108), (194, 109), (194, 110), (194, 111), (194, 112), (194, 113), (194, 114), (194, 115), (194, 116), (194, 117), (194, 118), (194, 119), (194, 120), (194, 121), (194, 122), (194, 123), (194, 124), (194, 126), (195, 70), (195, 73), (195, 74), (195, 75), (195, 76), (195, 77), (195, 78), (195, 79), (195, 80), (195, 81), (195, 82), (195, 83), (195, 84), (195, 85), (195, 86), (195, 87), (195, 88), (195, 89), (195, 90), (195, 91), (195, 92), (195, 93), (195, 94), (195, 95), (195, 96), (195, 97), (195, 98), (195, 99), (195, 100), (195, 101), (195, 102), (195, 103), (195, 104), (195, 105), (195, 106), (195, 107), (195, 108), (195, 109), (195, 110), (195, 111), (195, 112), (195, 113), (195, 114),
(195, 115), (195, 116), (195, 117), (195, 118), (195, 119), (195, 120), (195, 121), (195, 122), (195, 123), (195, 124), (195, 126), (196, 71), (196, 74), (196, 75), (196, 76), (196, 77), (196, 78), (196, 79), (196, 80), (196, 81), (196, 82), (196, 83), (196, 84), (196, 85), (196, 86), (196, 87), (196, 88), (196, 89), (196, 90), (196, 91), (196, 92), (196, 93), (196, 94), (196, 95), (196, 96), (196, 97), (196, 98), (196, 99), (196, 100), (196, 101), (196, 102), (196, 103), (196, 104), (196, 105), (196, 106), (196, 107), (196, 108), (196, 109), (196, 110), (196, 111), (196, 112), (196, 113), (196, 114), (196, 115), (196, 116), (196, 117), (196, 118), (196, 119), (196, 120), (196, 121), (196, 122), (196, 123), (196, 125), (197, 73), (197, 77), (197, 78), (197, 79), (197, 80), (197, 81), (197, 82), (197, 83), (197, 84),
(197, 85), (197, 86), (197, 87), (197, 88), (197, 89), (197, 90), (197, 91), (197, 92), (197, 93), (197, 94), (197, 95), (197, 96), (197, 97), (197, 98), (197, 99), (197, 100), (197, 101), (197, 102), (197, 103), (197, 104), (197, 105), (197, 106), (197, 107), (197, 108), (197, 109), (197, 110), (197, 111), (197, 112), (197, 113), (197, 114), (197, 115), (197, 116), (197, 117), (197, 118), (197, 119), (197, 120), (197, 121), (197, 122), (197, 123), (197, 125), (198, 75), (198, 76), (198, 80), (198, 81), (198, 82), (198, 83), (198, 84), (198, 85), (198, 86), (198, 87), (198, 88), (198, 89), (198, 90), (198, 91), (198, 92), (198, 93), (198, 94), (198, 95), (198, 96), (198, 97), (198, 98), (198, 99), (198, 100), (198, 101), (198, 102), (198, 103), (198, 104), (198, 105), (198, 106), (198, 107), (198, 108), (198, 109),
(198, 110), (198, 111), (198, 112), (198, 113), (198, 114), (198, 115), (198, 116), (198, 117), (198, 118), (198, 119), (198, 120), (198, 121), (198, 122), (198, 124), (199, 77), (199, 79), (199, 80), (199, 87), (199, 88), (199, 89), (199, 90), (199, 91), (199, 92), (199, 93), (199, 94), (199, 95), (199, 96), (199, 97), (199, 98), (199, 99), (199, 100), (199, 101), (199, 102), (199, 103), (199, 104), (199, 105), (199, 106), (199, 107), (199, 108), (199, 109), (199, 110), (199, 111), (199, 112), (199, 113), (199, 114), (199, 115), (199, 116), (199, 117), (199, 118), (199, 119), (199, 120), (199, 121), (199, 122), (199, 124), (200, 81), (200, 82), (200, 83), (200, 84), (200, 85), (200, 86), (200, 109), (200, 110), (200, 111), (200, 112), (200, 113), (200, 114), (200, 115), (200, 116), (200, 117), (200, 118), (200, 119), (200, 120),
(200, 121), (200, 123), (201, 87), (201, 88), (201, 89), (201, 90), (201, 91), (201, 92), (201, 93), (201, 94), (201, 95), (201, 96), (201, 97), (201, 98), (201, 99), (201, 100), (201, 101), (201, 102), (201, 103), (201, 104), (201, 105), (201, 106), (201, 107), (201, 108), (201, 113), (201, 114), (201, 115), (201, 116), (201, 117), (201, 118), (201, 119), (201, 122), (202, 110), (202, 111), (202, 112), (202, 121), (203, 113), (203, 114), (203, 115), (203, 116), (203, 117), (203, 118), (203, 119), )
coordinates_D2681C = ((124, 110),
(124, 111), (124, 112), (124, 113), (125, 108), (125, 114), (125, 115), (125, 116), (126, 106), (126, 109), (126, 110), (126, 111), (126, 112), (126, 113), (126, 117), (126, 119), (127, 104), (127, 107), (127, 108), (127, 109), (127, 110), (127, 111), (127, 112), (127, 113), (127, 114), (127, 115), (127, 116), (127, 120), (128, 98), (128, 100), (128, 101), (128, 102), (128, 106), (128, 107), (128, 108), (128, 109), (128, 110), (128, 111), (128, 112), (128, 113), (128, 114), (128, 115), (128, 116), (128, 117), (128, 118), (128, 119), (128, 121), (129, 98), (129, 103), (129, 104), (129, 105), (129, 106), (129, 107), (129, 108), (129, 109), (129, 110), (129, 111), (129, 112), (129, 113), (129, 114), (129, 115), (129, 116), (129, 117), (129, 118), (129, 119), (129, 120), (130, 89), (130, 90), (130, 91), (130, 92), (130, 93), (130, 94), (130, 95),
(130, 96), (130, 99), (130, 100), (130, 101), (130, 102), (130, 103), (130, 104), (130, 105), (130, 106), (130, 107), (130, 108), (130, 109), (130, 110), (130, 111), (130, 112), (130, 113), (130, 114), (130, 115), (130, 116), (130, 117), (130, 118), (130, 119), (130, 120), (130, 122), (131, 86), (131, 87), (131, 88), (131, 98), (131, 99), (131, 100), (131, 101), (131, 102), (131, 103), (131, 104), (131, 105), (131, 106), (131, 107), (131, 108), (131, 109), (131, 110), (131, 111), (131, 112), (131, 113), (131, 114), (131, 115), (131, 116), (131, 117), (131, 118), (131, 119), (131, 120), (131, 122), (132, 82), (132, 83), (132, 84), (132, 89), (132, 90), (132, 91), (132, 92), (132, 93), (132, 94), (132, 95), (132, 96), (132, 97), (132, 98), (132, 99), (132, 100), (132, 101), (132, 102), (132, 103), (132, 104), (132, 105), (132, 106),
(132, 107), (132, 108), (132, 109), (132, 110), (132, 111), (132, 112), (132, 113), (132, 114), (132, 115), (132, 116), (132, 117), (132, 118), (132, 119), (132, 120), (132, 121), (132, 122), (132, 123), (133, 77), (133, 79), (133, 80), (133, 81), (133, 86), (133, 87), (133, 88), (133, 89), (133, 90), (133, 91), (133, 92), (133, 93), (133, 94), (133, 95), (133, 96), (133, 97), (133, 98), (133, 99), (133, 100), (133, 101), (133, 102), (133, 103), (133, 104), (133, 105), (133, 106), (133, 107), (133, 108), (133, 109), (133, 110), (133, 111), (133, 112), (133, 113), (133, 114), (133, 115), (133, 116), (133, 117), (133, 118), (133, 119), (133, 120), (133, 121), (133, 123), (134, 75), (134, 82), (134, 83), (134, 84), (134, 85), (134, 86), (134, 87), (134, 88), (134, 89), (134, 90), (134, 91), (134, 92), (134, 93), (134, 94),
(134, 95), (134, 96), (134, 97), (134, 98), (134, 99), (134, 100), (134, 101), (134, 102), (134, 103), (134, 104), (134, 105), (134, 106), (134, 107), (134, 108), (134, 109), (134, 110), (134, 111), (134, 112), (134, 113), (134, 114), (134, 115), (134, 116), (134, 117), (134, 118), (134, 119), (134, 120), (134, 121), (134, 123), (135, 74), (135, 77), (135, 78), (135, 79), (135, 80), (135, 81), (135, 82), (135, 83), (135, 84), (135, 85), (135, 86), (135, 87), (135, 88), (135, 89), (135, 90), (135, 91), (135, 92), (135, 93), (135, 94), (135, 95), (135, 96), (135, 97), (135, 98), (135, 99), (135, 100), (135, 101), (135, 102), (135, 103), (135, 104), (135, 105), (135, 106), (135, 107), (135, 108), (135, 109), (135, 110), (135, 111), (135, 112), (135, 113), (135, 114), (135, 115), (135, 116), (135, 117), (135, 118), (135, 119),
(135, 120), (135, 121), (135, 123), (136, 74), (136, 76), (136, 77), (136, 78), (136, 79), (136, 80), (136, 81), (136, 82), (136, 83), (136, 84), (136, 85), (136, 86), (136, 87), (136, 88), (136, 89), (136, 90), (136, 91), (136, 92), (136, 93), (136, 94), (136, 95), (136, 96), (136, 97), (136, 98), (136, 99), (136, 100), (136, 101), (136, 102), (136, 103), (136, 104), (136, 105), (136, 106), (136, 107), (136, 108), (136, 109), (136, 110), (136, 111), (136, 112), (136, 113), (136, 114), (136, 115), (136, 116), (136, 117), (136, 118), (136, 119), (136, 120), (136, 121), (136, 123), (137, 73), (137, 75), (137, 76), (137, 77), (137, 78), (137, 79), (137, 80), (137, 81), (137, 82), (137, 83), (137, 84), (137, 85), (137, 86), (137, 87), (137, 88), (137, 89), (137, 90), (137, 91), (137, 92), (137, 93), (137, 94),
(137, 95), (137, 96), (137, 97), (137, 98), (137, 99), (137, 100), (137, 101), (137, 102), (137, 103), (137, 104), (137, 105), (137, 106), (137, 107), (137, 108), (137, 109), (137, 110), (137, 111), (137, 112), (137, 113), (137, 114), (137, 115), (137, 116), (137, 117), (137, 118), (137, 119), (137, 120), (137, 122), (138, 72), (138, 74), (138, 75), (138, 76), (138, 77), (138, 78), (138, 79), (138, 80), (138, 81), (138, 82), (138, 83), (138, 84), (138, 85), (138, 86), (138, 87), (138, 88), (138, 89), (138, 90), (138, 91), (138, 92), (138, 93), (138, 94), (138, 95), (138, 96), (138, 97), (138, 98), (138, 99), (138, 100), (138, 101), (138, 102), (138, 103), (138, 104), (138, 105), (138, 106), (138, 107), (138, 108), (138, 109), (138, 110), (138, 111), (138, 112), (138, 113), (138, 114), (138, 115), (138, 116), (138, 117),
(138, 118), (138, 119), (138, 120), (138, 122), (139, 72), (139, 74), (139, 75), (139, 76), (139, 77), (139, 78), (139, 79), (139, 80), (139, 81), (139, 82), (139, 83), (139, 84), (139, 85), (139, 86), (139, 87), (139, 88), (139, 89), (139, 90), (139, 91), (139, 92), (139, 93), (139, 94), (139, 95), (139, 96), (139, 97), (139, 98), (139, 99), (139, 100), (139, 101), (139, 102), (139, 103), (139, 104), (139, 105), (139, 106), (139, 107), (139, 108), (139, 109), (139, 110), (139, 111), (139, 112), (139, 113), (139, 114), (139, 115), (139, 116), (139, 117), (139, 118), (139, 119), (139, 120), (139, 122), (140, 71), (140, 73), (140, 74), (140, 75), (140, 76), (140, 77), (140, 78), (140, 79), (140, 80), (140, 81), (140, 82), (140, 83), (140, 84), (140, 85), (140, 86), (140, 87), (140, 88), (140, 89), (140, 90),
(140, 91), (140, 92), (140, 93), (140, 94), (140, 95), (140, 96), (140, 97), (140, 98), (140, 99), (140, 100), (140, 101), (140, 102), (140, 103), (140, 104), (140, 105), (140, 106), (140, 107), (140, 108), (140, 109), (140, 110), (140, 111), (140, 112), (140, 113), (140, 114), (140, 115), (140, 116), (140, 117), (140, 118), (140, 119), (140, 121), (141, 71), (141, 73), (141, 74), (141, 75), (141, 76), (141, 77), (141, 78), (141, 79), (141, 80), (141, 81), (141, 82), (141, 83), (141, 84), (141, 85), (141, 86), (141, 87), (141, 88), (141, 89), (141, 90), (141, 91), (141, 92), (141, 93), (141, 94), (141, 95), (141, 96), (141, 97), (141, 98), (141, 99), (141, 100), (141, 101), (141, 102), (141, 103), (141, 104), (141, 105), (141, 106), (141, 107), (141, 108), (141, 109), (141, 110), (141, 111), (141, 112), (141, 113),
(141, 114), (141, 115), (141, 116), (141, 117), (141, 118), (141, 119), (141, 121), (142, 70), (142, 72), (142, 73), (142, 74), (142, 75), (142, 76), (142, 77), (142, 78), (142, 79), (142, 80), (142, 81), (142, 82), (142, 83), (142, 84), (142, 85), (142, 86), (142, 87), (142, 88), (142, 89), (142, 90), (142, 91), (142, 92), (142, 93), (142, 94), (142, 95), (142, 96), (142, 97), (142, 98), (142, 99), (142, 100), (142, 101), (142, 102), (142, 103), (142, 104), (142, 105), (142, 106), (142, 107), (142, 108), (142, 109), (142, 110), (142, 111), (142, 112), (142, 113), (142, 114), (142, 115), (142, 116), (142, 117), (142, 120), (143, 70), (143, 72), (143, 73), (143, 74), (143, 75), (143, 76), (143, 77), (143, 78), (143, 79), (143, 80), (143, 81), (143, 82), (143, 83), (143, 84), (143, 85), (143, 86), (143, 87),
(143, 88), (143, 89), (143, 90), (143, 91), (143, 92), (143, 93), (143, 94), (143, 95), (143, 96), (143, 97), (143, 98), (143, 99), (143, 100), (143, 101), (143, 102), (143, 103), (143, 104), (143, 105), (143, 106), (143, 107), (143, 108), (143, 109), (143, 110), (143, 111), (143, 112), (143, 113), (143, 114), (143, 119), (144, 70), (144, 72), (144, 73), (144, 74), (144, 75), (144, 76), (144, 77), (144, 78), (144, 79), (144, 80), (144, 81), (144, 82), (144, 83), (144, 84), (144, 85), (144, 86), (144, 87), (144, 88), (144, 89), (144, 90), (144, 91), (144, 92), (144, 93), (144, 94), (144, 95), (144, 96), (144, 97), (144, 98), (144, 99), (144, 100), (144, 101), (144, 102), (144, 103), (144, 115), (144, 117), (145, 70), (145, 72), (145, 73), (145, 74), (145, 75), (145, 76), (145, 77), (145, 78), (145, 79),
(145, 80), (145, 81), (145, 82), (145, 83), (145, 84), (145, 85), (145, 86), (145, 87), (145, 88), (145, 89), (145, 90), (145, 91), (145, 92), (145, 93), (145, 94), (145, 95), (145, 96), (145, 97), (145, 98), (145, 99), (145, 104), (145, 105), (145, 106), (145, 107), (145, 108), (145, 109), (145, 110), (145, 111), (145, 112), (145, 113), (145, 114), (146, 70), (146, 72), (146, 73), (146, 74), (146, 75), (146, 76), (146, 77), (146, 78), (146, 79), (146, 80), (146, 81), (146, 82), (146, 83), (146, 84), (146, 85), (146, 86), (146, 87), (146, 88), (146, 89), (146, 90), (146, 91), (146, 92), (146, 93), (146, 94), (146, 95), (146, 96), (146, 97), (146, 100), (146, 101), (146, 102), (146, 103), (147, 71), (147, 72), (147, 73), (147, 74), (147, 75), (147, 76), (147, 77), (147, 78), (147, 79), (147, 80),
(147, 81), (147, 82), (147, 83), (147, 84), (147, 85), (147, 86), (147, 87), (147, 88), (147, 89), (147, 90), (147, 91), (147, 92), (147, 93), (147, 94), (147, 95), (147, 98), (147, 99), (148, 71), (148, 73), (148, 74), (148, 75), (148, 76), (148, 77), (148, 78), (148, 79), (148, 80), (148, 81), (148, 82), (148, 83), (148, 84), (148, 85), (148, 86), (148, 87), (148, 88), (148, 89), (148, 90), (148, 91), (148, 92), (148, 93), (148, 94), (148, 96), (149, 71), (149, 73), (149, 74), (149, 75), (149, 76), (149, 77), (149, 78), (149, 79), (149, 80), (149, 81), (149, 82), (149, 83), (149, 84), (149, 85), (149, 86), (149, 87), (149, 88), (149, 89), (149, 90), (149, 91), (149, 92), (149, 95), (150, 72), (150, 74), (150, 75), (150, 76), (150, 77), (150, 78), (150, 79), (150, 80), (150, 81),
(150, 82), (150, 83), (150, 84), (150, 85), (150, 86), (150, 87), (150, 88), (150, 89), (150, 90), (150, 91), (150, 94), (151, 73), (151, 75), (151, 76), (151, 77), (151, 78), (151, 79), (151, 80), (151, 81), (151, 82), (151, 83), (151, 84), (151, 85), (151, 86), (151, 87), (151, 88), (151, 89), (151, 92), (152, 73), (152, 75), (152, 76), (152, 77), (152, 78), (152, 79), (152, 80), (152, 81), (152, 82), (152, 83), (152, 84), (152, 85), (152, 86), (152, 87), (152, 88), (152, 91), (153, 74), (153, 76), (153, 77), (153, 78), (153, 79), (153, 80), (153, 81), (153, 82), (153, 83), (153, 84), (153, 85), (153, 86), (153, 89), (154, 75), (154, 78), (154, 79), (154, 80), (154, 81), (154, 82), (154, 83), (154, 84), (154, 88), (155, 76), (155, 86), (156, 78), (156, 80), (156, 81), (156, 82),
(156, 84), (171, 82), (171, 83), (171, 84), (171, 85), (171, 86), (171, 87), (171, 88), (171, 89), (171, 90), (172, 79), (172, 92), (172, 93), (173, 77), (173, 81), (173, 82), (173, 83), (173, 84), (173, 85), (173, 86), (173, 87), (173, 88), (173, 89), (173, 90), (173, 91), (173, 95), (173, 96), (174, 75), (174, 79), (174, 80), (174, 81), (174, 82), (174, 83), (174, 84), (174, 85), (174, 86), (174, 87), (174, 88), (174, 89), (174, 90), (174, 91), (174, 92), (174, 93), (174, 98), (175, 74), (175, 77), (175, 78), (175, 79), (175, 80), (175, 81), (175, 82), (175, 83), (175, 84), (175, 85), (175, 86), (175, 87), (175, 88), (175, 89), (175, 90), (175, 91), (175, 92), (175, 93), (175, 94), (175, 95), (175, 96), (175, 100), (176, 74), (176, 76), (176, 77), (176, 78), (176, 79), (176, 80),
(176, 81), (176, 82), (176, 83), (176, 84), (176, 85), (176, 86), (176, 87), (176, 88), (176, 89), (176, 90), (176, 91), (176, 92), (176, 93), (176, 94), (176, 95), (176, 96), (176, 97), (176, 98), (176, 101), (176, 102), (177, 73), (177, 75), (177, 76), (177, 77), (177, 78), (177, 79), (177, 80), (177, 81), (177, 82), (177, 83), (177, 84), (177, 85), (177, 86), (177, 87), (177, 88), (177, 89), (177, 90), (177, 91), (177, 92), (177, 93), (177, 94), (177, 95), (177, 96), (177, 97), (177, 98), (177, 99), (177, 100), (177, 103), (177, 104), (177, 105), (178, 73), (178, 75), (178, 76), (178, 77), (178, 78), (178, 79), (178, 80), (178, 81), (178, 82), (178, 83), (178, 84), (178, 85), (178, 86), (178, 87), (178, 88), (178, 89), (178, 90), (178, 91), (178, 92), (178, 93), (178, 94), (178, 95),
(178, 96), (178, 97), (178, 98), (178, 99), (178, 100), (178, 101), (178, 102), (178, 106), (178, 107), (178, 108), (178, 109), (179, 72), (179, 74), (179, 75), (179, 76), (179, 77), (179, 78), (179, 79), (179, 80), (179, 81), (179, 82), (179, 83), (179, 84), (179, 85), (179, 86), (179, 87), (179, 88), (179, 89), (179, 90), (179, 91), (179, 92), (179, 93), (179, 94), (179, 95), (179, 96), (179, 97), (179, 98), (179, 99), (179, 100), (179, 101), (179, 102), (179, 103), (179, 104), (179, 105), (179, 110), (179, 111), (179, 112), (179, 113), (180, 72), (180, 74), (180, 75), (180, 76), (180, 77), (180, 78), (180, 79), (180, 80), (180, 81), (180, 82), (180, 83), (180, 84), (180, 85), (180, 86), (180, 87), (180, 88), (180, 89), (180, 90), (180, 91), (180, 92), (180, 93), (180, 94), (180, 95), (180, 96),
(180, 97), (180, 98), (180, 99), (180, 100), (180, 101), (180, 102), (180, 103), (180, 104), (180, 105), (180, 106), (180, 107), (180, 108), (180, 109), (180, 114), (180, 115), (180, 116), (180, 117), (180, 118), (181, 72), (181, 74), (181, 75), (181, 76), (181, 77), (181, 78), (181, 79), (181, 80), (181, 81), (181, 82), (181, 83), (181, 84), (181, 85), (181, 86), (181, 87), (181, 88), (181, 89), (181, 90), (181, 91), (181, 92), (181, 93), (181, 94), (181, 95), (181, 96), (181, 97), (181, 98), (181, 99), (181, 100), (181, 101), (181, 102), (181, 103), (181, 104), (181, 105), (181, 106), (181, 107), (181, 108), (181, 109), (181, 110), (181, 111), (181, 112), (181, 113), (181, 119), (181, 122), (182, 72), (182, 74), (182, 75), (182, 76), (182, 77), (182, 78), (182, 79), (182, 80), (182, 81), (182, 82), (182, 83),
(182, 84), (182, 85), (182, 86), (182, 87), (182, 88), (182, 89), (182, 90), (182, 91), (182, 92), (182, 93), (182, 94), (182, 95), (182, 96), (182, 97), (182, 98), (182, 99), (182, 100), (182, 101), (182, 102), (182, 103), (182, 104), (182, 105), (182, 106), (182, 107), (182, 108), (182, 109), (182, 110), (182, 111), (182, 112), (182, 113), (182, 114), (182, 115), (182, 116), (182, 117), (182, 118), (182, 122), (183, 72), (183, 74), (183, 75), (183, 76), (183, 77), (183, 78), (183, 79), (183, 80), (183, 81), (183, 82), (183, 83), (183, 84), (183, 85), (183, 86), (183, 87), (183, 88), (183, 89), (183, 90), (183, 91), (183, 92), (183, 93), (183, 94), (183, 95), (183, 96), (183, 97), (183, 98), (183, 99), (183, 100), (183, 101), (183, 102), (183, 103), (183, 104), (183, 105), (183, 106), (183, 107), (183, 108),
(183, 109), (183, 110), (183, 111), (183, 112), (183, 113), (183, 114), (183, 115), (183, 116), (183, 117), (183, 118), (183, 119), (183, 120), (183, 121), (183, 123), (184, 72), (184, 74), (184, 75), (184, 76), (184, 77), (184, 78), (184, 79), (184, 80), (184, 81), (184, 82), (184, 83), (184, 84), (184, 85), (184, 86), (184, 87), (184, 88), (184, 89), (184, 90), (184, 91), (184, 92), (184, 93), (184, 94), (184, 95), (184, 96), (184, 97), (184, 98), (184, 99), (184, 100), (184, 101), (184, 102), (184, 103), (184, 104), (184, 105), (184, 106), (184, 107), (184, 108), (184, 109), (184, 110), (184, 111), (184, 112), (184, 113), (184, 114), (184, 115), (184, 116), (184, 117), (184, 118), (184, 119), (184, 120), (184, 121), (184, 123), (185, 72), (185, 74), (185, 75), (185, 76), (185, 77), (185, 78), (185, 79), (185, 80),
(185, 81), (185, 82), (185, 83), (185, 84), (185, 85), (185, 86), (185, 87), (185, 88), (185, 89), (185, 90), (185, 91), (185, 92), (185, 93), (185, 94), (185, 95), (185, 96), (185, 97), (185, 98), (185, 99), (185, 100), (185, 101), (185, 102), (185, 103), (185, 104), (185, 105), (185, 106), (185, 107), (185, 108), (185, 109), (185, 110), (185, 111), (185, 112), (185, 113), (185, 114), (185, 115), (185, 116), (185, 117), (185, 118), (185, 119), (185, 120), (185, 121), (185, 123), (186, 72), (186, 74), (186, 75), (186, 76), (186, 77), (186, 78), (186, 79), (186, 80), (186, 81), (186, 82), (186, 83), (186, 84), (186, 85), (186, 86), (186, 87), (186, 88), (186, 89), (186, 90), (186, 91), (186, 92), (186, 93), (186, 94), (186, 95), (186, 96), (186, 97), (186, 98), (186, 99), (186, 100), (186, 101), (186, 102),
(186, 103), (186, 104), (186, 105), (186, 106), (186, 107), (186, 108), (186, 109), (186, 110), (186, 111), (186, 112), (186, 113), (186, 114), (186, 115), (186, 116), (186, 117), (186, 118), (186, 119), (186, 120), (186, 121), (186, 123), (187, 72), (187, 74), (187, 75), (187, 76), (187, 77), (187, 78), (187, 79), (187, 80), (187, 81), (187, 82), (187, 83), (187, 84), (187, 85), (187, 86), (187, 87), (187, 88), (187, 89), (187, 90), (187, 91), (187, 92), (187, 93), (187, 94), (187, 95), (187, 96), (187, 97), (187, 98), (187, 99), (187, 100), (187, 101), (187, 102), (187, 103), (187, 104), (187, 105), (187, 106), (187, 107), (187, 108), (187, 109), (187, 110), (187, 111), (187, 112), (187, 113), (187, 114), (187, 115), (187, 116), (187, 117), (187, 118), (187, 119), (187, 120), (187, 121), (187, 123), (188, 73), (188, 75),
(188, 76), (188, 77), (188, 78), (188, 79), (188, 80), (188, 81), (188, 82), (188, 83), (188, 84), (188, 85), (188, 86), (188, 87), (188, 88), (188, 89), (188, 90), (188, 91), (188, 92), (188, 93), (188, 94), (188, 95), (188, 96), (188, 97), (188, 98), (188, 99), (188, 100), (188, 101), (188, 102), (188, 103), (188, 104), (188, 105), (188, 106), (188, 107), (188, 108), (188, 109), (188, 110), (188, 111), (188, 112), (188, 113), (188, 114), (188, 115), (188, 116), (188, 117), (188, 118), (188, 119), (188, 120), (188, 121), (188, 123), (189, 73), (189, 75), (189, 76), (189, 77), (189, 78), (189, 79), (189, 80), (189, 81), (189, 82), (189, 83), (189, 84), (189, 85), (189, 86), (189, 87), (189, 88), (189, 89), (189, 90), (189, 91), (189, 92), (189, 93), (189, 94), (189, 95), (189, 96), (189, 97), (189, 98),
(189, 99), (189, 100), (189, 101), (189, 102), (189, 103), (189, 104), (189, 105), (189, 106), (189, 107), (189, 108), (189, 109), (189, 110), (189, 111), (189, 112), (189, 113), (189, 114), (189, 115), (189, 116), (189, 117), (189, 118), (189, 119), (189, 120), (189, 121), (189, 123), (190, 74), (190, 76), (190, 77), (190, 78), (190, 79), (190, 80), (190, 81), (190, 82), (190, 83), (190, 84), (190, 85), (190, 86), (190, 87), (190, 88), (190, 89), (190, 90), (190, 91), (190, 92), (190, 93), (190, 94), (190, 95), (190, 96), (190, 97), (190, 98), (190, 99), (190, 100), (190, 101), (190, 102), (190, 103), (190, 104), (190, 105), (190, 106), (190, 107), (190, 108), (190, 109), (190, 110), (190, 111), (190, 112), (190, 113), (190, 114), (190, 115), (190, 116), (190, 117), (190, 118), (190, 119), (190, 120), (190, 121), (190, 123),
(191, 78), (191, 79), (191, 80), (191, 81), (191, 82), (191, 83), (191, 84), (191, 85), (191, 86), (191, 87), (191, 88), (191, 89), (191, 90), (191, 91), (191, 92), (191, 97), (191, 98), (191, 99), (191, 100), (191, 101), (191, 102), (191, 103), (191, 104), (191, 105), (191, 106), (191, 107), (191, 108), (191, 109), (191, 110), (191, 111), (191, 112), (191, 113), (191, 114), (191, 115), (191, 116), (191, 117), (191, 118), (191, 119), (191, 120), (191, 121), (191, 123), (192, 76), (192, 92), (192, 93), (192, 94), (192, 95), (192, 96), (192, 97), (192, 102), (192, 103), (192, 104), (192, 105), (192, 106), (192, 107), (192, 108), (192, 109), (192, 110), (192, 111), (192, 112), (192, 113), (192, 114), (192, 115), (192, 116), (192, 117), (192, 118), (192, 119), (192, 120), (192, 122), (193, 78), (193, 80), (193, 81), (193, 82),
(193, 83), (193, 84), (193, 85), (193, 86), (193, 87), (193, 88), (193, 89), (193, 90), (193, 91), (193, 92), (193, 97), (193, 98), (193, 99), (193, 100), (193, 101), (193, 104), (193, 105), (193, 106), (193, 107), (193, 108), (193, 109), (193, 110), (193, 111), (193, 112), (193, 113), (193, 114), (193, 115), (193, 116), (193, 117), (193, 118), (193, 122), (194, 102), (194, 103), (194, 119), (194, 121), (195, 104), (195, 105), (195, 106), (195, 107), (195, 108), (195, 109), (195, 110), (195, 111), (195, 112), (195, 113), (195, 114), (195, 115), (195, 116), (195, 117), (195, 118), )
coordinates_F0FFF0 = ((150, 128),
(151, 127), (152, 123), (152, 125), (153, 121), (153, 124), (154, 118), (154, 119), (154, 122), (155, 115), (155, 116), (155, 117), (155, 121), (156, 108), (156, 109), (156, 110), (156, 111), (156, 112), (156, 113), (156, 114), (156, 119), (157, 103), (157, 105), (157, 106), (157, 107), (157, 115), (157, 116), (157, 118), (158, 102), (158, 108), (158, 109), (158, 110), (158, 111), (158, 112), (158, 113), (158, 114), (158, 115), (158, 117), (159, 101), (159, 103), (159, 104), (159, 105), (159, 106), (159, 107), (159, 108), (159, 109), (159, 110), (159, 111), (159, 112), (159, 113), (159, 114), (159, 115), (159, 117), (160, 102), (160, 103), (160, 104), (160, 105), (160, 106), (160, 107), (160, 108), (160, 109), (160, 110), (160, 111), (160, 112), (160, 113), (160, 114), (160, 115), (160, 117), (161, 98), (161, 101), (161, 102), (161, 103), (161, 104),
(161, 105), (161, 106), (161, 107), (161, 108), (161, 109), (161, 110), (161, 111), (161, 112), (161, 113), (161, 114), (161, 115), (161, 116), (161, 118), (162, 98), (162, 101), (162, 102), (162, 103), (162, 104), (162, 105), (162, 106), (162, 107), (162, 108), (162, 109), (162, 110), (162, 111), (162, 112), (162, 113), (162, 114), (162, 115), (162, 116), (162, 118), (163, 98), (163, 100), (163, 105), (163, 106), (163, 107), (163, 108), (163, 109), (163, 110), (163, 111), (163, 112), (163, 113), (163, 114), (163, 115), (163, 116), (163, 117), (163, 119), (164, 101), (164, 102), (164, 103), (164, 104), (164, 109), (164, 110), (164, 111), (164, 112), (164, 113), (164, 114), (164, 115), (164, 116), (164, 117), (164, 118), (164, 120), (165, 105), (165, 106), (165, 107), (165, 108), (165, 114), (165, 115), (165, 116), (165, 117), (165, 118), (165, 119),
(165, 122), (166, 109), (166, 110), (166, 111), (166, 112), (166, 113), (166, 119), (166, 120), (166, 123), (166, 124), (167, 114), (167, 115), (167, 116), (167, 117), (167, 118), (167, 125), (168, 119), (168, 120), (168, 121), (168, 122), (168, 127), (169, 123), (169, 124), (169, 125), (169, 129), (170, 126), (170, 130), (171, 128), (171, 131), (172, 129), (172, 131), (173, 130), (173, 132), (174, 131), (174, 132), (175, 132), )
|
py | 1a35374cb6daaf6897b0b8323518d2168ffa9f0c | # Copyright (c) 2015, 2016, 2017, 2018, 2019, 2020, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY LOG OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import os
import sys
import glob
import math
import textwrap
from apps import apps
class HplCpuAppConf(apps.AppConf):
@staticmethod
def name():
return 'hpl_cpu'
def __init__(self, num_nodes, mach, perc_dram_per_node=0.9, cores_per_node=None):
'''
num_nodes: Number of MPI ranks (1 node per rank) -- 2, 4, 8 or 16.
perc_dram_per_node: Ratio of the total node DRAM that should be used for the
HPL matrix (assuming DP).
80-90% is a good amount to maximize efficiency.
Default is 0.9.
cores_per_node: Number of Xeon cores that each MPI process can offload to via OMP.
Total number of physical cores will be selected if this is not set
(defauilt=None).
'''
dram_for_app = num_nodes * mach.total_node_memory_bytes() * perc_dram_per_node
if cores_per_node is None:
cores_per_node = mach.num_core()
benchmark_dir = os.path.dirname(os.path.abspath(__file__))
self.exe_path = os.path.join(benchmark_dir, 'hpl-2.3/bin/Linux_Intel64/xhpl')
self.NBs=384 # This is the recommended size for Intel Scalable Xeon family.
process_grid_ratios = {
1: {'P': 1, 'Q': 1},
2: {'P': 1, 'Q': 2},
4: {'P': 2, 'Q': 2},
8: {'P': 2, 'Q': 4},
16: {'P': 4, 'Q': 4}
}
if num_nodes not in process_grid_ratios:
raise RuntimeError("Number of nodes {num_nodes} is not defined for HPL.".format(num_nodes=num_nodes))
self.P = process_grid_ratios[num_nodes]['P']
self.Q = process_grid_ratios[num_nodes]['Q']
self.N = int(round(math.sqrt(dram_for_app / 8)))
self._cpu_per_rank = cores_per_node
sys.stdout.write('DRAM reserved for APP: {dram_for_app:0.2f}GB\n'.format(dram_for_app=dram_for_app/2**30))
sys.stdout.write('Cores for app: {cores_per_node}\n'.format(cores_per_node=cores_per_node))
sys.stdout.write('N={N}\n'.format(N=self.N))
def get_bash_setup_commands(self):
input_file = textwrap.dedent('''
HPLinpack benchmark input file
Innovative Computing Laboratory, University of Tennessee
HPL.out output file name (if any)
6 device out (6=stdout,7=stderr,file)
1 # of problems sizes (N)
{N} Ns
1 # of NBs
{NBs} NBs
0 PMAP process mapping (0=Row-,1=Column-major)
1 # of process grids (P x Q)
{P} Ps
{Q} Qs
16.0 threshold
1 # of panel fact
1 PFACTs (0=left, 1=Crout, 2=Right)1
1 # of recursive stopping criterium
4 NBMINs (>= 1)
1 # of panels in recursion
2 NDIVs
1 # of recursive panel fact.
1 RFACTs (0=left, 1=Crout, 2=Right)
1 # of broadcast
0 BCASTs (0=1rg,1=1rM,2=2rg,3=2rM,4=Lng,5=LnM)
1 # of lookahead depth
0 DEPTHs (>=0)
2 SWAP (0=bin-exch,1=long,2=mix)
64 swapping threshold
0 L1 in (0=transposed,1=no-transposed) form
0 U in (0=transposed,1=no-transposed) form
1 Equilibration (0=no,1=yes)
8 memory alignment in double (> 0)
EOF
'''.format(N=self.N, NBs=self.NBs, P=self.P, Q=self.Q))
setup_commands = 'export MKL_NUM_THREADS={cpu_per_rank}\n'.format(cpu_per_rank=self._cpu_per_rank)
setup_commands += 'cat > ./HPL.dat << EOF {input_file}\n'.format(input_file=input_file)
return setup_commands
def get_rank_per_node(self):
return 1
def get_cpu_per_rank(self):
return self._cpu_per_rank
def get_bash_exec_path(self):
return self.exe_path
def get_bash_exec_args(self):
return ''
def get_custom_geopm_args(self):
# See README.md for an explanation of why
# HPL cannot start in process control mode.
# Also hyperthreading does not benefit HPL and
# it is turned off.
return ['--geopm-ctl=application',
'--geopm-hyperthreads-disable']
def parse_fom(self, log_path):
result = None
key = 'WR00'
with open(log_path) as fid:
for line in fid.readlines():
if key in line:
result = float(line.split(' ')[-1])
break
return result
|
py | 1a3537f8336c81011f0822717da952511efb45c6 | __description__ = \
"""
Class for generating simulated epistasis maps with options for various
distributions of values.
"""
__author__ = "Zach Sailer"
from functools import wraps
from epistasis.mapping import EpistasisMap
from numpy import random
class DistributionSimulation(EpistasisMap):
"""
Just like an epistasis map, but with extra methods for setting epistatic
coefficients
"""
def __init__(self, gpm, df=None, sites=None, values=None, uncertainties=None):
super().__init__(df=df, sites=sites, values=values, uncertainties=uncertainties)
self._gpm = gpm
@property
def avail_distributions(self):
return random.__all__
def set_order_from_distribution(self, orders, dist="normal", **kwargs):
"""
Sets epistatic coefficients to values drawn from a statistical
distribution.
Distributions are found in SciPy's `random` module. Kwargs are passed
directly to these methods
"""
# Get distribution
try:
method = getattr(random, dist)
except AttributeError:
err = "Distribution now found. Check the `avail_distribution` \n"
err += "attribute for available distributions.\n"
raise ValueError(err)
idx = self.data.orders.isin(orders)
self.data.loc[idx, "values"] = method(
size=sum(idx),
**kwargs
)
self._gpm.build()
@wraps(EpistasisMap.set_values)
def set_values(self, values, filter=None):
super().set_values(values, filter=filter)
self._gpm.build()
|
py | 1a35384395def00ded1b05a113ebda21b13192ef | """A transformer for gen3 project,reads genetrails_variants bcc, writes to DEFAULT_OUTPUT_DIR."""
import hashlib
import os
import json
from gen3_etl.utils.ioutils import reader
from defaults import DEFAULT_OUTPUT_DIR, DEFAULT_EXPERIMENT_CODE, DEFAULT_PROJECT_ID, default_parser, emitter, obscure_dates
from gen3_etl.utils.schema import generate, template
LOOKUP_PATHS = """
source/bcc/genetrails_classification.json
source/bcc/genetrails_copy_number_result_type.json
source/bcc/genetrails_protein_variant_type.json
source/bcc/genetrails_result_significance.json
source/bcc/genetrails_result_type.json
source/bcc/genetrails_run_status.json
source/bcc/genetrails_transcript_priority.json
source/bcc/genetrails_variant_type.json
source/bcc/chromosome.json
source/bcc/assay_categories.json
source/bcc/assay_version.json
source/bcc/gene.json
source/bcc/genome_build.json
""".strip().split()
def transform(item_paths, output_dir, experiment_code, compresslevel=0, callback=None):
"""Read bcc labkey json and writes gen3 json."""
genetrails_emitter = emitter('wes_result', output_dir=output_dir)
with open('output/reference/gene_lookup.tsv') as f:
gene_lookup = {k: v for k,v in (line.split() for line in f) }
for p in item_paths:
source = os.path.splitext(os.path.basename(p))[0]
for line in reader(p):
line['source'] = source
if callback:
line = callback(line)
submitter_id = line.get('participantid', line.get('ParticipantID', None))
aliquot_id = '{}-sample-aliquot'.format(submitter_id)
genetrails_variant = {
'type': 'wes_result',
'project_id': DEFAULT_PROJECT_ID,
'aliquot': {'submitter_id': aliquot_id},
'submitter_id': line['lsid']}
if 'gene_symbol' in line and line['gene_symbol'].lower() in gene_lookup:
line['gene'] = {'submitter_id': gene_lookup[line['gene_symbol'].lower()], 'project_id': 'smmart-reference'}
genetrails_variant.update(line)
genetrails_emitter.write(genetrails_variant)
genetrails_emitter.close()
def lookups():
look_ups = {}
for p in LOOKUP_PATHS:
c = p.replace('source/bcc/','').replace('genetrails_','').replace('.json','')
look_ups[c] = {}
print(p, c)
for line in reader(p):
name = line['display_name']
val = [line[k] for k in line if not k.startswith('_') and k.endswith('_id')][0]
look_ups[c][val] = name
return look_ups
LOOKUPS = lookups()
def my_callback(line):
"""Remove fields that start with _, fix key names with embedded /, fix id lookups """
for k in [k for k in line if k.startswith('_')]:
del line[k]
for k in [k for k in line if '/' in k]:
line[k.split('/')[1]] = line[k]
del line[k]
for k in [k for k in line if k.endswith('_id')]:
if k in ['project_id', 'submitter_id']:
continue
lup = k.replace('_id', '')
if line[k]:
try:
line[lup] = LOOKUPS[lup][line[k]]
except Exception as e:
print(lup, k, line[k])
print('******')
print(LOOKUPS[lup])
print('******')
raise e
del line[k]
if 'chromosome' in line:
line['chromosome'] = str(line['chromosome'].replace('chr',''))
if 'gene' in line:
line['gene_symbol'] = line['gene']
del line['gene']
return line
def my_schema_callback(schema):
"""Remove fields that start with _, fix key names with embedded /, fix id lookups """
for k in [k for k in schema['properties'] if k.startswith('_')]:
del schema['properties'][k]
for k in [k for k in schema['properties'] if '/' in k]:
schema['properties'][k.split('/')[1]] = schema['properties'][k]
del schema['properties'][k]
for k in [k for k in schema['properties'] if k.endswith('_id')]:
if k in ['project_id', 'submitter_id']:
continue
schema['properties'][k.replace('_id', '')] = {'type': ['string', "'null'"]} # schema['properties'][k]
del schema['properties'][k]
# adds the source property
schema['category'] = 'bcc extention'
schema['properties']['aliquot'] = {'$ref': '_definitions.yaml#/to_one'}
return schema
if __name__ == "__main__":
item_paths = ['source/bcc/WESResults.json']
args = default_parser(DEFAULT_OUTPUT_DIR, DEFAULT_EXPERIMENT_CODE, DEFAULT_PROJECT_ID).parse_args()
transform(item_paths, output_dir=args.output_dir, experiment_code=args.experiment_code, callback=my_callback)
item_paths = ['output/bcc/wes_result.json']
link = {'name':'aliquot', 'backref':'wes_result', 'label':'derived_from', 'target_type':'aliquot', 'multiplicity': 'many_to_one', 'required': False }
schema_path = generate(item_paths,'wes_result', output_dir='output/bcc', links=[link], callback=my_schema_callback)
assert os.path.isfile(schema_path), 'should have an schema file {}'.format(schema_path)
print(schema_path)
|
py | 1a35385f62f7f1e775677e9104390b81b83d6c54 | import sys
import pytest
from astropy.samp import conf
from astropy.samp.hub_script import hub_script
def setup_module(module):
conf.use_internet = False
def setup_function(function):
function.sys_argv_orig = sys.argv
sys.argv = ["samp_hub"]
def teardown_function(function):
sys.argv = function.sys_argv_orig
@pytest.mark.slow
def test_hub_script():
sys.argv.append('-m') # run in multiple mode
sys.argv.append('-w') # disable web profile
hub_script(timeout=3)
|
py | 1a35391bfb646cb4e49505c79a971c3c50cad29e |
import skeleton
def main_window():
skeleton.window_screen.title("Welcome to GitInit")
skeleton.window_screen.geometry('400x320')
tabs()
skeleton.window_screen.mainloop()
def tabs():
skeleton.tab_control.add(skeleton.tab1, text='Choose directory')
skeleton.tab_control.add(skeleton.tab2, text='Initialize the repo')
skeleton.tab_control.add(skeleton.tab3, text='Tutorial')
skeleton.tab1_skeleton()
skeleton.tab2_skeleton()
skeleton.tab3_skeleton()
skeleton.tab_control.pack(expand=1, fill='both')
|
py | 1a353968f4501666a641c6885684654c662ae09a | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import SDKClient
from msrest import Serializer, Deserializer
from ._configuration import HDInsightManagementClientConfiguration
from .operations import ClustersOperations
from .operations import ApplicationsOperations
from .operations import LocationsOperations
from .operations import ConfigurationsOperations
from .operations import ExtensionsOperations
from .operations import ScriptActionsOperations
from .operations import ScriptExecutionHistoryOperations
from .operations import Operations
from .operations import VirtualMachinesOperations
from . import models
class HDInsightManagementClient(SDKClient):
"""HDInsight Management Client
:ivar config: Configuration for client.
:vartype config: HDInsightManagementClientConfiguration
:ivar clusters: Clusters operations
:vartype clusters: azure.mgmt.hdinsight.operations.ClustersOperations
:ivar applications: Applications operations
:vartype applications: azure.mgmt.hdinsight.operations.ApplicationsOperations
:ivar locations: Locations operations
:vartype locations: azure.mgmt.hdinsight.operations.LocationsOperations
:ivar configurations: Configurations operations
:vartype configurations: azure.mgmt.hdinsight.operations.ConfigurationsOperations
:ivar extensions: Extensions operations
:vartype extensions: azure.mgmt.hdinsight.operations.ExtensionsOperations
:ivar script_actions: ScriptActions operations
:vartype script_actions: azure.mgmt.hdinsight.operations.ScriptActionsOperations
:ivar script_execution_history: ScriptExecutionHistory operations
:vartype script_execution_history: azure.mgmt.hdinsight.operations.ScriptExecutionHistoryOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.hdinsight.operations.Operations
:ivar virtual_machines: VirtualMachines operations
:vartype virtual_machines: azure.mgmt.hdinsight.operations.VirtualMachinesOperations
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: The subscription credentials which uniquely
identify Microsoft Azure subscription. The subscription ID forms part of
the URI for every service call.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
self.config = HDInsightManagementClientConfiguration(credentials, subscription_id, base_url)
super(HDInsightManagementClient, self).__init__(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self.api_version = '2018-06-01-preview'
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.clusters = ClustersOperations(
self._client, self.config, self._serialize, self._deserialize)
self.applications = ApplicationsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.locations = LocationsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.configurations = ConfigurationsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.extensions = ExtensionsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.script_actions = ScriptActionsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.script_execution_history = ScriptExecutionHistoryOperations(
self._client, self.config, self._serialize, self._deserialize)
self.operations = Operations(
self._client, self.config, self._serialize, self._deserialize)
self.virtual_machines = VirtualMachinesOperations(
self._client, self.config, self._serialize, self._deserialize)
|
py | 1a353a141daa4f48b1d782ed3334eb3bd3bb5494 | import numba
import numpy as np
from scipy.sparse import csr_matrix
from .base import BasePointer, GraphBlasContainer
from .context import handle_panic, return_error
from .exceptions import GrB_Info
class MatrixPtr(BasePointer):
def set_matrix(self, matrix):
self.instance = matrix
class Matrix(GraphBlasContainer):
def __init__(self, matrix):
assert isinstance(matrix, csr_matrix)
self.matrix = matrix
@classmethod
def new_from_dtype(cls, dtype, nrows, ncols):
matrix = csr_matrix((nrows, ncols), dtype=dtype)
return cls(matrix)
@classmethod
def new_from_existing(cls, other):
matrix = csr_matrix(other)
return cls(matrix)
@classmethod
def get_pointer(cls):
return MatrixPtr()
@handle_panic
def Matrix_new(A: MatrixPtr, dtype: type, nrows: int, ncols: int):
if nrows <= 0:
return_error(GrB_Info.GrB_INVALID_VALUE, "nrows must be > 0")
if ncols <= 0:
return_error(GrB_Info.GrB_INVALID_VALUE, "ncols must be > 0")
matrix = Matrix.new_from_dtype(dtype, nrows, ncols)
A.set_matrix(matrix)
return GrB_Info.GrB_SUCCESS
@handle_panic
def Matrix_dup(C: MatrixPtr, A: Matrix):
matrix = Matrix.new_from_existing(A)
C.set_matrix(matrix)
return GrB_Info.GrB_SUCCESS
@handle_panic
def Matrix_resize(C: Matrix, nrows: int, ncols: int):
if nrows <= 0:
return_error(GrB_Info.GrB_INVALID_VALUE, "nrows must be > 0")
if ncols <= 0:
return_error(GrB_Info.GrB_INVALID_VALUE, "ncols must be > 0")
C.matrix.resize((nrows, ncols))
return GrB_Info.GrB_SUCCESS
# TODO: this is just the essential code; it needs to handle descriptors, masks, accumulators, etc
@handle_panic
def mxm(C, A, B, semiring):
cr, cc = C.shape
ar, ac = A.shape
br, bc = B.shape
if cr != ar:
return_error(GrB_Info.GrB_DIMENSION_MISMATCH, "C.nrows != A.nrows")
if cc != bc:
return_error(GrB_Info.GrB_DIMENSION_MISMATCH, "C.ncols != B.ncols")
if ac != br:
return_error(GrB_Info.GrB_DIMENSION_MISMATCH, "A.nrows != B.ncols")
b = B.tocsc()
d, i, ip = _sparse_matmul(
A.data,
A.indices,
A.indptr,
b.data,
b.indices,
b.indptr,
semiring.plus.op,
semiring.times,
semiring.plus.identity,
C.dtype,
)
C.data = d
C.indices = i
C.indptr = ip
return GrB_Info.GrB_SUCCESS
@numba.njit
def _sparse_matmul(
a_data,
a_indices,
a_indptr,
b_data,
b_indices,
b_indptr,
plus,
times,
identity,
dtype,
):
# Final array size is unknown, so we give ourselves room and then adjust on the fly
tmp_output_size = a_data.size * 2
data = np.empty((tmp_output_size,), dtype=dtype)
indices = np.empty((tmp_output_size,), dtype=a_indices.dtype)
indptr = np.empty((a_indptr.size,), dtype=a_indptr.dtype)
output_counter = 0
for iptr in range(a_indptr.size - 1):
indptr[iptr] = output_counter
for jptr in range(b_indptr.size - 1):
a_counter = a_indptr[iptr]
a_stop = a_indptr[iptr + 1]
b_counter = b_indptr[jptr]
b_stop = b_indptr[jptr + 1]
val = identity
nonempty = False
while a_counter < a_stop and b_counter < b_stop:
a_k = a_indices[a_counter]
b_k = b_indices[b_counter]
if a_k == b_k:
val = plus(val, times(a_data[a_counter], b_data[b_counter]))
nonempty = True
a_counter += 1
b_counter += 1
elif a_k < b_k:
a_counter += 1
else:
b_counter += 1
if nonempty:
if output_counter >= tmp_output_size:
# We filled up the allocated space; copy existing data to a larger array
tmp_output_size *= 2
new_data = np.empty((tmp_output_size,), dtype=data.dtype)
new_indices = np.empty((tmp_output_size,), dtype=indices.dtype)
new_data[:output_counter] = data[:output_counter]
new_indices[:output_counter] = indices[:output_counter]
data = new_data
indices = new_indices
data[output_counter] = val
indices[output_counter] = jptr
output_counter += 1
# Add final entry to indptr (should indicate nnz in the output)
nnz = output_counter
indptr[iptr + 1] = nnz
# Trim output arrays
data = data[:nnz]
indices = indices[:nnz]
return (data, indices, indptr)
|
py | 1a353a48a2ba4ab6a4311ff5a86ce37f2c84fc88 | import json
data_files = {
'colors.json',
'default_key_mappings.txt',
'unicode_names.json'
}
with open('dist-js/skulpt-designer-files.js', 'w') as output_file:
for filename in data_files:
with open(f'designer/data/{filename}') as data_file:
data = data_file.read()
line = f"Sk.builtinFiles['files']['src/lib/designer/data/{filename}']={json.dumps(data)};\n"
output_file.write(line) |
py | 1a353b81e17aa6ace578ac756bc9a2130fe4bcbb | import logging
import os
import re
from collections import namedtuple
from tigertag import Pluggable
from tigertag.util import str2bool
logger = logging.getLogger(__name__)
FileInfo = namedtuple('FileInfo', 'name path hash temp ext_id')
class Scanner(Pluggable):
RESERVED_PROPS = ['NAME', 'ENABLED']
def __init__(self, name, enabled):
self.name = name
self.enabled = enabled
self.props = {}
self.listeners = [] # ScannerListeners
def scan(self):
raise NotImplementedError('The {} scanner has not implemented the scan method.'.format(self.name))
class ScannerListener:
def on_file(self, scanner: Scanner, file_info: FileInfo):
pass
class ScannerManager:
def __init__(self):
self.scanners = {}
self.listeners = [] # ScannerListener array
def add(self, scanner):
self.scanners[scanner.name] = scanner
def scan(self):
if len(self.scanners) == 0:
logger.warning('No scanners configured. Please check your configuration')
for scanner_name, scanner in self.scanners.items():
if scanner.enabled:
scanner.listeners = []
for scanner_listener in self.listeners:
scanner.listeners.append(scanner_listener)
scanner.scan()
class ScannerManagerBuilder:
def __init__(self, scanner_manager_klass):
self.scanner_manager_klass = scanner_manager_klass
def build(self):
raise NotImplementedError
class EnvironmentScannerManagerBuilder(ScannerManagerBuilder):
def __init__(self, scanner_manager_klass):
super().__init__(scanner_manager_klass)
def get_class(self, klass_name):
parts = klass_name.split('.')
module = ".".join(parts[:-1])
m = __import__(module)
for comp in parts[1:]:
m = getattr(m, comp)
return m
def build(self):
sm = self.scanner_manager_klass()
scanner_detect = re.compile('^SCANNER_(?P<scanner>[A-Z0-9]*)_NAME')
# Find and create the scanners
for env_name, env_value in os.environ.items():
match = scanner_detect.match(env_name)
if match is not None:
logger.debug('Configuring scanner {}:{}'.format(env_name, env_value))
scanner_env_name = match.group('scanner')
scanner_klass_name = env_value
enabled = False
if os.environ['SCANNER_{}_ENABLED'.format(scanner_env_name)] is not None:
enabled = str2bool(os.environ['SCANNER_{}_ENABLED'.format(scanner_env_name)])
scanner_klass = self.get_class(scanner_klass_name)
scanner = scanner_klass(scanner_env_name, enabled)
# Collect all the scanner properties
prop_detect = re.compile('^SCANNER_{}_(?P<prop>[A-Z0-9_]*)'.format(scanner_env_name))
for env_prop_name, env_prop_value in os.environ.items():
prop_match = prop_detect.match(env_prop_name)
if prop_match is not None:
prop_name = prop_match.group('prop')
if prop_name not in [Scanner.RESERVED_PROPS]:
scanner.props[prop_name] = env_prop_value
sm.add(scanner)
return sm
|
py | 1a353b96691eb19152b268006e329930b5397fad | # coding=utf-8
from __future__ import unicode_literals
from .. import Provider as PersonProvider
from collections import OrderedDict
class Provider(PersonProvider):
formats_female = (
'{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{last_name}}-{{last_name}}',
)
formats_male = (
'{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{last_name}}-{{last_name}}',
)
formats = formats_female + formats_male
prefixes = (
'Herra', 'hra', 'Rouva', 'rva', 'Tohtori', 'tri', 'prof.', 'arkkit.',
)
suffixes = ('DI', 'PhD', 'MSc', 'BSc')
# List of most popular given names in Finland:
# https://www.avoindata.fi/data/en/dataset/none/resource/d35f8973-53da-4b66-8a49-bc2fee1a2996
first_names_female = OrderedDict((
('Aada', 0.001877),
('Aila', 0.002778),
('Aili', 0.001542),
('Aino', 0.00803),
('Airi', 0.001591),
('Aleksandra', 0.002884),
('Alexandra', 0.002718),
('Alina', 0.002114),
('Alisa', 0.001607),
('Amanda', 0.004584),
('Anita', 0.005061),
('Anja', 0.005627),
('Anna', 0.010413),
('Anna-Liisa', 0.00232),
('Anne', 0.007823),
('Anneli', 0.0277),
('Anni', 0.004397),
('Anniina', 0.003366),
('Annika', 0.005193),
('Annikki', 0.013414),
('Annukka', 0.001424),
('Anu', 0.003005),
('Arja', 0.004539),
('Aulikki', 0.002522),
('Aune', 0.00208),
('Aurora', 0.003709),
('Birgitta', 0.003324),
('Carita', 0.001574),
('Christina', 0.001534),
('Eeva', 0.006403),
('Eija', 0.00491),
('Eila', 0.005377),
('Eliisa', 0.00163),
('Elina', 0.014353),
('Elisa', 0.00424),
('Elisabet', 0.005925),
('Elisabeth', 0.004305),
('Ella', 0.002958),
('Ellen', 0.002243),
('Elli', 0.002258),
('Elsa', 0.002284),
('Emilia', 0.014649),
('Emma', 0.003571),
('Emmi', 0.002183),
('Erika', 0.002084),
('Essi', 0.001576),
('Esteri', 0.001672),
('Eveliina', 0.005899),
('Hanna', 0.005409),
('Hannele', 0.0193),
('Heidi', 0.005315),
('Helena', 0.028118),
('Heli', 0.003711),
('Helinä', 0.002151),
('Hellevi', 0.002416),
('Helmi', 0.003888),
('Helvi', 0.001462),
('Henna', 0.002525),
('Hilkka', 0.003745),
('Hillevi', 0.001577),
('Ida', 0.003067),
('Iida', 0.003699),
('Iiris', 0.001461),
('Ilona', 0.004816),
('Inkeri', 0.009444),
('Irene', 0.005164),
('Irja', 0.002691),
('Irma', 0.002772),
('Irmeli', 0.006537),
('Jaana', 0.005125),
('Jasmin', 0.001789),
('Jenna', 0.002567),
('Jenni', 0.004011),
('Johanna', 0.025061),
('Jonna', 0.002053),
('Josefiina', 0.001757),
('Julia', 0.004716),
('Juulia', 0.001411),
('Kaarina', 0.022441),
('Kaija', 0.003216),
('Kaisa', 0.004424),
('Karoliina', 0.006727),
('Katariina', 0.010602),
('Kati', 0.002463),
('Katja', 0.00381),
('Katri', 0.00337),
('Katriina', 0.004651),
('Kerttu', 0.002839),
('Kirsi', 0.004856),
('Kirsti', 0.003699),
('Krista', 0.001465),
('Kristiina', 0.016656),
('Kristina', 0.002653),
('Kyllikki', 0.008537),
('Laura', 0.005985),
('Lea', 0.002827),
('Leena', 0.011052),
('Leila', 0.00267),
('Liisa', 0.015791),
('Lilja', 0.001584),
('Linda', 0.001706),
('Linnea', 0.004089),
('Lotta', 0.002416),
('Maaria', 0.00335),
('Maarit', 0.012853),
('Maija', 0.00721),
('Maire', 0.001814),
('Margareta', 0.002525),
('Margit', 0.002057),
('Mari', 0.005431),
('Maria', 0.044412),
('Marianne', 0.00481),
('Marika', 0.005912),
('Marita', 0.005339),
('Maritta', 0.002299),
('Marja', 0.010093),
('Marja-Leena', 0.002611),
('Marja-Liisa', 0.002389),
('Marjaana', 0.004377),
('Marjatta', 0.020442),
('Marjo', 0.002613),
('Marjukka', 0.001486),
('Marjut', 0.003021),
('Marketta', 0.004413),
('Martta', 0.001663),
('Matilda', 0.004284),
('Merja', 0.004704),
('Mervi', 0.002193),
('Mia', 0.001736),
('Miia', 0.002146),
('Milla', 0.002204),
('Minna', 0.006615),
('Mira', 0.001706),
('Mirja', 0.003558),
('Mirjam', 0.002435),
('Mirjami', 0.003726),
('Nea', 0.001605),
('Niina', 0.002776),
('Nina', 0.003539),
('Noora', 0.002609),
('Olivia', 0.00384),
('Oona', 0.001707),
('Orvokki', 0.007473),
('Outi', 0.002278),
('Päivi', 0.007556),
('Päivikki', 0.002189),
('Paula', 0.004438),
('Pauliina', 0.006648),
('Petra', 0.001455),
('Pia', 0.002752),
('Piia', 0.00155),
('Pirjo', 0.006778),
('Pirkko', 0.005904),
('Raija', 0.005237),
('Raili', 0.003592),
('Riikka', 0.00301),
('Riitta', 0.008817),
('Ritva', 0.007408),
('Roosa', 0.001641),
('Saara', 0.002931),
('Sanna', 0.005027),
('Sanni', 0.001827),
('Sara', 0.003165),
('Sari', 0.00656),
('Satu', 0.005599),
('Seija', 0.005422),
('Siiri', 0.002066),
('Sini', 0.002038),
('Sinikka', 0.010005),
('Sirkka', 0.004487),
('Sirpa', 0.005252),
('Sisko', 0.005153),
('Sofia', 0.012669),
('Sonja', 0.001978),
('Susanna', 0.012647),
('Suvi', 0.003093),
('Taina', 0.002224),
('Tanja', 0.002577),
('Tarja', 0.005886),
('Taru', 0.001492),
('Teija', 0.001634),
('Tellervo', 0.007298),
('Terhi', 0.001779),
('Terttu', 0.004408),
('Tiia', 0.002003),
('Tiina', 0.006154),
('Tuija', 0.002932),
('Tuula', 0.007947),
('Tuuli', 0.001425),
('Tuulia', 0.004341),
('Tuulikki', 0.013373),
('Ulla', 0.004552),
('Veera', 0.002453),
('Venla', 0.001985),
('Viivi', 0.001505),
('Vilhelmiina', 0.002004),
('Vilma', 0.001724),
('Virpi', 0.00213),
('Vuokko', 0.001466),
))
first_names_male = OrderedDict((
('Aapo', 0.001263),
('Aarne', 0.001939),
('Aaro', 0.001601),
('Aaron', 0.001246),
('Aatos', 0.001552),
('Ahti', 0.001192),
('Aimo', 0.001399),
('Aki', 0.001881),
('Akseli', 0.002333),
('Aleksanteri', 0.002618),
('Aleksi', 0.008346),
('Alexander', 0.002728),
('Allan', 0.00227),
('Anders', 0.001411),
('Anssi', 0.001464),
('Antero', 0.029891),
('Anton', 0.002652),
('Antti', 0.011971),
('Ari', 0.006403),
('Armas', 0.003609),
('Arto', 0.004059),
('Arttu', 0.00228),
('Artturi', 0.001853),
('Arvo', 0.001578),
('Asko', 0.001363),
('Atte', 0.001392),
('Aukusti', 0.002011),
('Aulis', 0.002725),
('Benjamin', 0.002089),
('Christian', 0.002142),
('Daniel', 0.002919),
('Edvard', 0.001248),
('Eelis', 0.001359),
('Eemeli', 0.004734),
('Eemil', 0.002606),
('Eerik', 0.001629),
('Eero', 0.005572),
('Eetu', 0.003098),
('Einari', 0.002263),
('Eino', 0.004304),
('Elias', 0.005129),
('Elmeri', 0.001817),
('Emil', 0.003422),
('Ensio', 0.006508),
('Erik', 0.005296),
('Erkki', 0.007568),
('Esa', 0.0043),
('Esko', 0.004194),
('Hannu', 0.007429),
('Harri', 0.004739),
('Heikki', 0.011301),
('Henri', 0.003282),
('Henrik', 0.007534),
('Henrikki', 0.001325),
('Henry', 0.001412),
('Hermanni', 0.00167),
('Iisakki', 0.001193),
('Ilari', 0.002866),
('Ilkka', 0.003098),
('Ilmari', 0.015056),
('Ismo', 0.00148),
('Jaakko', 0.008225),
('Jalmari', 0.002645),
('Jan', 0.002011),
('Jani', 0.005117),
('Janne', 0.006361),
('Jari', 0.008664),
('Jarkko', 0.002672),
('Jarmo', 0.004396),
('Jarno', 0.001681),
('Jere', 0.002255),
('Jesse', 0.002586),
('Joel', 0.002105),
('Johan', 0.003528),
('Johannes', 0.028915),
('Joni', 0.003244),
('Joona', 0.002503),
('Joonas', 0.003828),
('Joonatan', 0.001565),
('Jorma', 0.005147),
('Jouko', 0.003962),
('Jouni', 0.004093),
('Juha', 0.011567),
('Juhana', 0.001862),
('Juhani', 0.061356),
('Juho', 0.005642),
('Jukka', 0.008652),
('Julius', 0.00209),
('Jussi', 0.004772),
('Juuso', 0.002224),
('Jyrki', 0.002127),
('Kaarlo', 0.002073),
('Kai', 0.001942),
('Kalervo', 0.008502),
('Kalevi', 0.021057),
('Kalle', 0.003829),
('Kari', 0.009761),
('Karl', 0.001779),
('Kasper', 0.001177),
('Kauko', 0.002169),
('Keijo', 0.002259),
('Kim', 0.001172),
('Kimmo', 0.003441),
('Kristian', 0.011096),
('Kullervo', 0.002234),
('Kustaa', 0.001144),
('Lasse', 0.002197),
('Lassi', 0.001214),
('Lauri', 0.00755),
('Leevi', 0.002015),
('Leo', 0.003319),
('Markku', 0.00843),
('Marko', 0.006297),
('Markus', 0.009181),
('Martti', 0.005521),
('Matias', 0.013377),
('Matti', 0.01756),
('Mauno', 0.001189),
('Mauri', 0.002098),
('Miika', 0.001845),
('Mika', 0.007765),
('Mikael', 0.021621),
('Mikko', 0.009719),
('Miro', 0.001274),
('Niilo', 0.002094),
('Niklas', 0.002024),
('Niko', 0.003908),
('Oiva', 0.001202),
('Olavi', 0.030903),
('Oliver', 0.003026),
('Olli', 0.003921),
('Onni', 0.004513),
('Oskar', 0.001185),
('Oskari', 0.007745),
('Osmo', 0.001531),
('Ossi', 0.001591),
('Otto', 0.002902),
('Paavo', 0.00381),
('Pasi', 0.004109),
('Patrik', 0.001474),
('Pauli', 0.003105),
('Pekka', 0.017016),
('Pentti', 0.006344),
('Pertti', 0.004406),
('Peter', 0.001704),
('Petri', 0.00786),
('Petteri', 0.015518),
('Raimo', 0.004575),
('Rainer', 0.001478),
('Rasmus', 0.001715),
('Rauno', 0.001688),
('Reijo', 0.003919),
('Reino', 0.002166),
('Riku', 0.001803),
('Risto', 0.004678),
('Robert', 0.001478),
('Roope', 0.001412),
('Sakari', 0.013891),
('Sami', 0.00587),
('Samu', 0.001237),
('Samuel', 0.00403),
('Samuli', 0.004994),
('Santeri', 0.00346),
('Sebastian', 0.002863),
('Seppo', 0.007305),
('Simo', 0.002313),
('Taisto', 0.001514),
('Taneli', 0.00129),
('Tapani', 0.02906),
('Tapio', 0.024776),
('Tauno', 0.001795),
('Teemu', 0.004605),
('Tero', 0.003188),
('Teuvo', 0.001714),
('Timo', 0.010557),
('Toivo', 0.003649),
('Tomi', 0.00341),
('Tommi', 0.003191),
('Toni', 0.003723),
('Topias', 0.001645),
('Tuomas', 0.005948),
('Tuomo', 0.002739),
('Tuukka', 0.001175),
('Uolevi', 0.002879),
('Väinö', 0.003176),
('Valdemar', 0.00152),
('Valtteri', 0.006312),
('Veeti', 0.001673),
('Veijo', 0.001517),
('Veikko', 0.007525),
('Veli', 0.004415),
('Verneri', 0.001164),
('Vesa', 0.003926),
('Vilhelm', 0.001591),
('Vilho', 0.002303),
('Viljami', 0.003563),
('Viljo', 0.00154),
('Ville', 0.007025),
('Yrjö', 0.001912),
))
first_names = first_names_male.copy()
first_names.update(first_names_female)
# List of most popular last names in Finland:
# https://www.avoindata.fi/data/en/dataset/none/resource/d25831d1-82a9-476f-8f7c-374c348efc14
last_names = OrderedDict((
('Aalto', 0.004189),
('Aaltonen', 0.004828),
('Aho', 0.003566),
('Ahokas', 0.001182),
('Ahola', 0.003697),
('Ahonen', 0.005301),
('Airaksinen', 0.001075),
('Alanen', 0.001124),
('Alanko', 0.001131),
('Alatalo', 0.001424),
('Andersson', 0.002447),
('Antikainen', 0.001061),
('Anttila', 0.004683),
('Anttonen', 0.00121),
('Aro', 0.00105),
('Asikainen', 0.002),
('Autio', 0.002187),
('Auvinen', 0.001732),
('Backman', 0.001331),
('Berg', 0.001362),
('Blomqvist', 0.001545),
('Eklund', 0.001737),
('Elo', 0.00113),
('Eloranta', 0.00109),
('Eriksson', 0.002454),
('Erkkilä', 0.001406),
('Eronen', 0.001765),
('Eskelinen', 0.002041),
('Eskola', 0.001747),
('Forsman', 0.001077),
('Grönroos', 0.001054),
('Gustafsson', 0.001571),
('Haapala', 0.001736),
('Haapanen', 0.00132),
('Haapaniemi', 0.001056),
('Haataja', 0.001222),
('Haavisto', 0.001782),
('Hakala', 0.004682),
('Hakkarainen', 0.00272),
('Häkkinen', 0.002513),
('Halme', 0.001566),
('Halonen', 0.003495),
('Hämäläinen', 0.009001),
('Hänninen', 0.003986),
('Hannula', 0.001522),
('Harju', 0.003153),
('Härkönen', 0.002434),
('Hartikainen', 0.002868),
('Hautala', 0.001909),
('Hautamäki', 0.00165),
('Haverinen', 0.001289),
('Heikkilä', 0.006931),
('Heikkinen', 0.008519),
('Heino', 0.00296),
('Heinonen', 0.007026),
('Heiskanen', 0.003335),
('Helenius', 0.001874),
('Helin', 0.001682),
('Helminen', 0.001458),
('Henriksson', 0.001408),
('Hietala', 0.002444),
('Hietanen', 0.00184),
('Hiltunen', 0.004889),
('Hirvonen', 0.004428),
('Hokkanen', 0.002165),
('Holappa', 0.00105),
('Holm', 0.001459),
('Holmberg', 0.001217),
('Holmström', 0.001188),
('Holopainen', 0.002501),
('Honkanen', 0.00323),
('Huhtala', 0.002066),
('Huotari', 0.001845),
('Huovinen', 0.001733),
('Huttunen', 0.003632),
('Huuskonen', 0.001163),
('Hytönen', 0.001515),
('Hyttinen', 0.001835),
('Hyvärinen', 0.002703),
('Hyvönen', 0.002406),
('Ihalainen', 0.001044),
('Ikonen', 0.00358),
('Immonen', 0.003231),
('Jaakkola', 0.002386),
('Jääskeläinen', 0.002913),
('Jaatinen', 0.001308),
('Jalonen', 0.001474),
('Jansson', 0.00146),
('Jäntti', 0.00125),
('Järvelä', 0.001204),
('Järvenpää', 0.001797),
('Järvi', 0.001061),
('Järvinen', 0.007928),
('Jauhiainen', 0.001305),
('Johansson', 0.003434),
('Jokela', 0.002356),
('Jokinen', 0.005951),
('Juntunen', 0.002955),
('Jussila', 0.002127),
('Juvonen', 0.001677),
('Kähkönen', 0.00158),
('Kaikkonen', 0.001253),
('Kainulainen', 0.001727),
('Kallio', 0.004876),
('Kämäräinen', 0.001118),
('Kanerva', 0.001436),
('Kangas', 0.002883),
('Kankaanpää', 0.001337),
('Kantola', 0.001513),
('Karhu', 0.00234),
('Karhunen', 0.001157),
('Kari', 0.001082),
('Karjalainen', 0.006036),
('Kärki', 0.001268),
('Kärkkäinen', 0.003561),
('Karlsson', 0.002809),
('Karppinen', 0.003072),
('Karttunen', 0.001799),
('Karvinen', 0.001394),
('Karvonen', 0.002385),
('Kauppila', 0.00126),
('Kauppinen', 0.003787),
('Keinänen', 0.001261),
('Kemppainen', 0.003777),
('Keränen', 0.002874),
('Keskinen', 0.001651),
('Keskitalo', 0.00109),
('Ketola', 0.001792),
('Kettunen', 0.003871),
('Kilpeläinen', 0.001374),
('Kinnunen', 0.006796),
('Kiuru', 0.001089),
('Kivelä', 0.002164),
('Kivimäki', 0.001619),
('Kivinen', 0.0013),
('Kiviniemi', 0.001402),
('Kivistö', 0.001447),
('Koistinen', 0.001988),
('Koivisto', 0.004667),
('Koivula', 0.002017),
('Koivunen', 0.001881),
('Kokko', 0.002672),
('Kokkonen', 0.003128),
('Kolehmainen', 0.002155),
('Komulainen', 0.001657),
('Konttinen', 0.001132),
('Koponen', 0.003424),
('Korhonen', 0.011042),
('Korpela', 0.002431),
('Korpi', 0.001281),
('Kortelainen', 0.001539),
('Koskela', 0.003733),
('Koski', 0.003231),
('Koskinen', 0.008414),
('Kosonen', 0.00231),
('Kovanen', 0.001198),
('Kuisma', 0.001348),
('Kujala', 0.002234),
('Kukkonen', 0.002415),
('Kulmala', 0.001901),
('Kumpulainen', 0.001781),
('Kuosmanen', 0.001577),
('Kurki', 0.001386),
('Kuronen', 0.001149),
('Kuusela', 0.001972),
('Kuusisto', 0.002479),
('Kyllönen', 0.001904),
('Laakkonen', 0.00201),
('Laakso', 0.00436),
('Laaksonen', 0.004505),
('Lähteenmäki', 0.001609),
('Lahti', 0.00373),
('Lahtinen', 0.005427),
('Laiho', 0.001374),
('Laine', 0.008802),
('Laitinen', 0.006223),
('Lammi', 0.00109),
('Lampinen', 0.002147),
('Lankinen', 0.001053),
('Lappalainen', 0.003902),
('Lassila', 0.001343),
('Latvala', 0.001139),
('Laukkanen', 0.002981),
('Laurila', 0.00268),
('Lehikoinen', 0.001339),
('Lehtimäki', 0.001726),
('Lehtinen', 0.007344),
('Lehto', 0.004389),
('Lehtola', 0.001536),
('Lehtonen', 0.00786),
('Leino', 0.002813),
('Leinonen', 0.004891),
('Lepistö', 0.001981),
('Leppänen', 0.005224),
('Leskinen', 0.002572),
('Liimatainen', 0.001943),
('Lilja', 0.00115),
('Lindberg', 0.001978),
('Lindfors', 0.001504),
('Lindgren', 0.00175),
('Lindholm', 0.003367),
('Lindqvist', 0.002171),
('Lindroos', 0.002225),
('Lindström', 0.002755),
('Linna', 0.001114),
('Lipponen', 0.00129),
('Liukkonen', 0.001696),
('Luoma', 0.00193),
('Luukkonen', 0.001845),
('Määttä', 0.003095),
('Mäenpää', 0.00279),
('Mäkelä', 0.009299),
('Mäki', 0.003044),
('Mäkinen', 0.009918),
('Makkonen', 0.002549),
('Malinen', 0.002249),
('Manninen', 0.004752),
('Männistö', 0.001155),
('Mäntylä', 0.001364),
('Markkanen', 0.001624),
('Martikainen', 0.002756),
('Marttila', 0.001834),
('Marttinen', 0.001083),
('Matikainen', 0.00149),
('Matilainen', 0.001526),
('Mattila', 0.005845),
('Mattsson', 0.001349),
('Meriläinen', 0.001503),
('Miettinen', 0.004877),
('Mikkola', 0.003284),
('Mikkonen', 0.00345),
('Moilanen', 0.004065),
('Moisio', 0.001273),
('Mononen', 0.001237),
('Muhonen', 0.001141),
('Mustonen', 0.004238),
('Myllymäki', 0.001733),
('Nevala', 0.001071),
('Nevalainen', 0.002639),
('Niemelä', 0.004065),
('Niemi', 0.006993),
('Nieminen', 0.009851),
('Niiranen', 0.001315),
('Nikula', 0.001193),
('Niskanen', 0.003346),
('Nissinen', 0.002092),
('Nousiainen', 0.002075),
('Nurmi', 0.004112),
('Nurminen', 0.003196),
('Nuutinen', 0.001781),
('Nyberg', 0.001381),
('Nykänen', 0.002561),
('Nylund', 0.001545),
('Nyman', 0.003435),
('Oikarinen', 0.00114),
('Oinonen', 0.001349),
('Ojala', 0.005237),
('Ojanen', 0.001396),
('Oksanen', 0.003372),
('Ollikainen', 0.001631),
('Ollila', 0.001614),
('Pääkkönen', 0.001404),
('Paananen', 0.002837),
('Paavilainen', 0.001028),
('Paavola', 0.001687),
('Pajunen', 0.001396),
('Pakarinen', 0.001818),
('Palomäki', 0.001161),
('Parkkinen', 0.001273),
('Partanen', 0.003879),
('Parviainen', 0.002908),
('Pasanen', 0.002364),
('Pehkonen', 0.001178),
('Pekkala', 0.001172),
('Pekkarinen', 0.0011),
('Pelkonen', 0.001933),
('Peltola', 0.003401),
('Peltonen', 0.004111),
('Peltoniemi', 0.001325),
('Pennanen', 0.001857),
('Penttilä', 0.001723),
('Penttinen', 0.001875),
('Perälä', 0.001592),
('Pesonen', 0.003534),
('Pietilä', 0.001874),
('Piirainen', 0.001336),
('Pirinen', 0.001318),
('Pitkänen', 0.004831),
('Pohjola', 0.001266),
('Pöllänen', 0.001097),
('Puhakka', 0.001413),
('Pulkkinen', 0.003995),
('Puranen', 0.001053),
('Puustinen', 0.001385),
('Raatikainen', 0.001244),
('Räisänen', 0.002146),
('Rajala', 0.002963),
('Ranta', 0.002422),
('Rantala', 0.004243),
('Rantanen', 0.006076),
('Räsänen', 0.004444),
('Räty', 0.001319),
('Rauhala', 0.001391),
('Rautiainen', 0.00292),
('Rautio', 0.002231),
('Reinikainen', 0.001112),
('Repo', 0.001805),
('Riihimäki', 0.001097),
('Riikonen', 0.001838),
('Rinne', 0.002123),
('Rintala', 0.001596),
('Rissanen', 0.003116),
('Ronkainen', 0.001757),
('Rönkkö', 0.001111),
('Rossi', 0.001203),
('Ruotsalainen', 0.002752),
('Ruuskanen', 0.001251),
('Rytkönen', 0.00144),
('Ryynänen', 0.00112),
('Saarela', 0.002292),
('Saari', 0.003871),
('Saarinen', 0.007247),
('Saastamoinen', 0.001741),
('Sainio', 0.001224),
('Sallinen', 0.001148),
('Salmela', 0.002572),
('Salmi', 0.003705),
('Salminen', 0.007146),
('Salo', 0.006336),
('Salomaa', 0.001099),
('Salonen', 0.006757),
('Savolainen', 0.005448),
('Seppä', 0.001142),
('Seppälä', 0.004007),
('Seppänen', 0.003731),
('Sihvonen', 0.001053),
('Sillanpää', 0.002264),
('Silvennoinen', 0.001614),
('Simola', 0.001116),
('Simonen', 0.001049),
('Sipilä', 0.001582),
('Sirén', 0.001129),
('Sirviö', 0.001089),
('Sjöblom', 0.001119),
('Soini', 0.001102),
('Soininen', 0.001422),
('Suhonen', 0.001834),
('Suomalainen', 0.001609),
('Suominen', 0.003582),
('Sutinen', 0.001056),
('Syrjälä', 0.001196),
('Tähtinen', 0.001028),
('Taipale', 0.001378),
('Takala', 0.001797),
('Tamminen', 0.002461),
('Tanskanen', 0.001536),
('Tarvainen', 0.001396),
('Taskinen', 0.001633),
('Tervo', 0.001419),
('Tiainen', 0.00234),
('Tiihonen', 0.001149),
('Tikka', 0.001325),
('Tikkanen', 0.00266),
('Timonen', 0.002211),
('Tirkkonen', 0.001193),
('Toivanen', 0.002668),
('Toivonen', 0.004311),
('Tolonen', 0.002122),
('Tolvanen', 0.001917),
('Tuomi', 0.001608),
('Tuominen', 0.006098),
('Tuovinen', 0.001894),
('Turpeinen', 0.001528),
('Turunen', 0.006523),
('Uotila', 0.001053),
('Uusitalo', 0.002687),
('Väänänen', 0.002319),
('Vainio', 0.003358),
('Väisänen', 0.004904),
('Välimäki', 0.001587),
('Valkama', 0.001139),
('Valkonen', 0.001248),
('Valtonen', 0.002171),
('Varis', 0.001436),
('Vartiainen', 0.002039),
('Väyrynen', 0.001426),
('Venäläinen', 0.001262),
('Vesterinen', 0.001259),
('Viitala', 0.001642),
('Viitanen', 0.002647),
('Viljanen', 0.001859),
('Virta', 0.002228),
('Virtanen', 0.01083),
('Voutilainen', 0.001853),
('Vuorela', 0.001156),
('Vuori', 0.001701),
('Vuorinen', 0.003188),
('Ylinen', 0.00105),
('Ylitalo', 0.001438),
('Ylönen', 0.00125),
))
|
py | 1a353da70c05666c37120696c76f7b47b7740257 | import psycopg2
import psycopg2.extras
from ..models.user import User
from twatter import config
def get_connection():
return psycopg2.connect(config.db['db_string'])
#GET STUFF
def get_one_with_id(table, id):
assert table
assert id
query = "SELECT * FROM {} WHERE id = %s".format(table)
conn = psycopg2.connect(config.db['db_string'])
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(query, (id,))
result = cur.fetchone()
cur.close()
conn.close()
return result
def get_fields_from_table_with_id(fields, table, id_name, id):
assert fields
assert table
assert id_name
assert id
query = '''select %s from {} where {} = %s'''.format(table, id_name)
conn = psycopg2.connect(config.db['db_string'])
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(query, (fields, id))
results = cur.fetchall()
cur.close()
conn.close()
return results
def get_all_from_table(table):
assert table
conn = psycopg2.connect(config.db['db_string'])
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute("SELECT * FROM %s", (table,))
result = cur.fetchone()
cur.close()
conn.close()
return results
def get_custom_query(query, vars=None):
assert query
conn = psycopg2.connect(config.db['db_string'])
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(query, vars)
results = cur.fetchall()
cur.close()
conn.close()
return results
def get_favorited_twaats_for_user(id):
assert id
query = """
SELECT * FROM favorited_twaats WHERE who_id = %s
"""
conn = psycopg2.connect(config.db['db_string'])
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(query, (id,))
twaats = [dict(record) for record in cur.fetchall()] # it calls .fecthone() in loop
cur.close()
conn.close()
return twaats
def get_search_results(type, term):
term = str(term.lower())
query = '';
if type.lower() in 'users':
query = """SELECT * FROM users WHERE lower(full_name) like %s """
if type.lower() == 'tags':
return ['not implemented yet']
try:
conn = psycopg2.connect(config.db['db_string'])
cur = conn.cursor()
cur.execute(query, (['%'+term+'%']))
search_results = cur.fetchall()
#some problems with psycon, doesnt accept loop here..
cur.close()
conn.close()
return [User(x[0], x[1], x[2], x[3], x[4], x[5], x[6]) for x in search_results]
except Exception as e:
print e
return e
# INSERT stuff
def insert_new_favorite_twaat_for_id(twaat_id, user_id):
try:
conn = psycopg2.connect(config.db['db_string'])
cur = conn.cursor()
cur.execute("""
INSERT INTO favorited_twaats (who_id, twaat_id) VALUES (%s, %s)
""", (user_id, twaat_id))
conn.commit()
cur.close()
conn.close()
except Exception as e:
if('duplicate' in e.pgerror):
return 'Twaat already favorited'
print e
return e
def insert_new_twaat(user_id, text, parent_id):
try:
conn = get_connection()
cur = conn.cursor()
cur.execute("""
INSERT INTO twaat (user_id, text, parent_id) VALUES (%s, %s, %s)
""", (user_id, text, parent_id))
conn.commit()
cur.close()
conn.close()
# session['username'] = request.form['username']
except Exception as e:
print e
return e
# UPDATE STUFF
def update_custom_query(query, vars=None):
try:
conn = get_connection()
cur = conn.cursor()
cur.execute(query, vars)
conn.commit()
cur.close()
conn.close()
except Exception as e:
if('duplicate' in e.pgerror):
return 'Twaat already favorited'
print e
return e
def update_user_avatar(id, path):
try:
conn = get_connection()
cur = conn.cursor()
cur.execute("""UPDATE users SET avatar = %s WHERE id = %s""", (path, id))
conn.commit()
cur.close()
conn.close()
except Exception as e:
if('duplicate' in e.pgerror):
return 'Twaat already favorited'
print e
return e |
py | 1a353e2d04c23251b3a1f06dd695936c2cf12490 | from . import ClientConstants as CC
from . import ClientDefaults
from . import ClientNetworkingContexts
from . import ClientNetworkingDomain
from . import ClientNetworkingJobs
from . import ClientParsing
from . import ClientThreading
from . import HydrusConstants as HC
from . import HydrusGlobals as HG
from . import HydrusData
from . import HydrusExceptions
from . import HydrusSerialisable
import itertools
import os
import json
import requests
import re
import threading
import time
import urllib.parse
VALIDITY_VALID = 0
VALIDITY_UNTESTED = 1
VALIDITY_INVALID = 2
validity_str_lookup = {}
validity_str_lookup[ VALIDITY_VALID ] = 'valid'
validity_str_lookup[ VALIDITY_UNTESTED ] = 'untested'
validity_str_lookup[ VALIDITY_INVALID ] = 'invalid'
LOGIN_ACCESS_TYPE_EVERYTHING = 0
LOGIN_ACCESS_TYPE_NSFW = 1
LOGIN_ACCESS_TYPE_SPECIAL = 2
LOGIN_ACCESS_TYPE_USER_PREFS_ONLY = 3
login_access_type_str_lookup = {}
login_access_type_str_lookup[ LOGIN_ACCESS_TYPE_EVERYTHING ] = 'Everything'
login_access_type_str_lookup[ LOGIN_ACCESS_TYPE_NSFW ] = 'NSFW'
login_access_type_str_lookup[ LOGIN_ACCESS_TYPE_SPECIAL ] = 'Special'
login_access_type_str_lookup[ LOGIN_ACCESS_TYPE_USER_PREFS_ONLY ] = 'User prefs'
login_access_type_default_description_lookup = {}
login_access_type_default_description_lookup[ LOGIN_ACCESS_TYPE_EVERYTHING ] = 'Login required to access any content.'
login_access_type_default_description_lookup[ LOGIN_ACCESS_TYPE_NSFW ] = 'Login required to access NSFW content.'
login_access_type_default_description_lookup[ LOGIN_ACCESS_TYPE_SPECIAL ] = 'Login required to access special content.'
login_access_type_default_description_lookup[ LOGIN_ACCESS_TYPE_USER_PREFS_ONLY ] = 'Login only required to access user preferences.'
PIXIV_NETWORK_CONTEXT = ClientNetworkingContexts.NetworkContext( CC.NETWORK_CONTEXT_DOMAIN, 'pixiv.net' )
HENTAI_FOUNDRY_NETWORK_CONTEXT = ClientNetworkingContexts.NetworkContext( CC.NETWORK_CONTEXT_DOMAIN, 'hentai-foundry.com' )
class NetworkLoginManager( HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_NETWORK_LOGIN_MANAGER
SERIALISABLE_NAME = 'Login Manager'
SERIALISABLE_VERSION = 1
SESSION_TIMEOUT = 60 * 45
def __init__( self ):
HydrusSerialisable.SerialisableBase.__init__( self )
# needs _dirty and setdirty and be on that serialisation check and so on
self.engine = None
self._dirty = False
self._lock = threading.Lock()
self._login_scripts = HydrusSerialisable.SerialisableList()
self._domains_to_login_info = {}
self._login_script_keys_to_login_scripts = {}
self._login_script_names_to_login_scripts = {}
self._hydrus_login_script = LoginScriptHydrus()
self._error_names = set()
def _GetBestLoginScript( self, login_domain ):
self._login_scripts.sort( key = lambda ls: len( ls.GetCredentialDefinitions() ) )
for login_script in self._login_scripts:
if login_domain in login_script.GetExampleDomains():
return login_script
return None
def _GetLoginDomainStatus( self, network_context ):
login_domain = None
login_expected = False
login_possible = True
login_error_text = ''
domain = network_context.context_data
potential_login_domains = ClientNetworkingDomain.ConvertDomainIntoAllApplicableDomains( domain, discard_www = False )
for potential_login_domain in potential_login_domains:
if potential_login_domain in self._domains_to_login_info:
login_domain = potential_login_domain
( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason ) = self._domains_to_login_info[ login_domain ]
if active or login_access_type == LOGIN_ACCESS_TYPE_EVERYTHING:
login_expected = True
if not active:
login_possible = False
login_error_text = 'Not active - ' + login_access_text
elif validity == VALIDITY_INVALID:
login_possible = False
login_error_text = validity_error_text
elif not HydrusData.TimeHasPassed( no_work_until ):
login_possible = False
login_error_text = no_work_until_reason
break
return ( login_domain, login_expected, login_possible, login_error_text )
def _GetLoginScriptAndCredentials( self, login_domain ):
if login_domain in self._domains_to_login_info:
( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason ) = self._domains_to_login_info[ login_domain ]
( login_script_key, login_script_name ) = login_script_key_and_name
if login_script_key in self._login_script_keys_to_login_scripts:
login_script = self._login_script_keys_to_login_scripts[ login_script_key ]
elif login_script_name in self._login_script_names_to_login_scripts:
login_script = self._login_script_names_to_login_scripts[ login_script_name ]
login_script_key_and_name = login_script.GetLoginScriptKeyAndName()
self._SetDirty()
self._domains_to_login_info[ login_domain ] = ( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason )
else:
validity = VALIDITY_INVALID
validity_error_text = 'Could not find the login script for "' + login_domain + '"!'
self._domains_to_login_info[ login_domain ] = ( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason )
self._SetDirty()
raise HydrusExceptions.ValidationException( validity_error_text )
try:
login_script.CheckCanLogin( credentials )
except HydrusExceptions.ValidationException as e:
validity = VALIDITY_INVALID
validity_error_text = str( e )
self._domains_to_login_info[ login_domain ] = ( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason )
self._SetDirty()
raise
if validity == VALIDITY_UNTESTED and validity_error_text != '':
# cleaning up the 'restart dialog to test validity in cases where it is valid
validity_error_text = ''
self._domains_to_login_info[ login_domain ] = ( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason )
return ( login_script, credentials )
else:
raise HydrusExceptions.ValidationException( 'Could not find any login entry for "' + login_domain + '"!' )
def _GetSerialisableInfo( self ):
serialisable_login_scripts = self._login_scripts.GetSerialisableTuple()
serialisable_domains_to_login_info = {}
for ( login_domain, ( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason ) ) in list(self._domains_to_login_info.items()):
( login_script_key, login_script_name ) = login_script_key_and_name
serialisable_login_script_key_and_name = ( login_script_key.hex(), login_script_name )
serialisable_domains_to_login_info[ login_domain ] = ( serialisable_login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason )
return ( serialisable_login_scripts, serialisable_domains_to_login_info )
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( serialisable_login_scripts, serialisable_domains_to_login_info ) = serialisable_info
self._login_scripts = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_login_scripts )
self._domains_to_login_info = {}
for ( login_domain, ( serialisable_login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason ) ) in list(serialisable_domains_to_login_info.items()):
( serialisable_login_script_key, login_script_name ) = serialisable_login_script_key_and_name
login_script_key_and_name = ( bytes.fromhex( serialisable_login_script_key ), login_script_name )
self._domains_to_login_info[ login_domain ] = ( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason )
def _RecalcCache( self ):
self._login_script_keys_to_login_scripts = { login_script.GetLoginScriptKey() : login_script for login_script in self._login_scripts }
self._login_script_names_to_login_scripts = { login_script.GetName() : login_script for login_script in self._login_scripts }
self._RevalidateCache()
def _RevalidateCache( self ):
for login_domain in list(self._domains_to_login_info.keys()):
try:
self._GetLoginScriptAndCredentials( login_domain )
except HydrusExceptions.ValidationException:
pass
def _SetDirty( self ):
self._dirty = True
def AlreadyHaveExactlyThisLoginScript( self, new_login_script ):
with self._lock:
# absent irrelevant variables, do we have the exact same object already in?
login_script_key_and_name = new_login_script.GetLoginScriptKeyAndName()
dupe_login_scripts = [ login_script.Duplicate() for login_script in self._login_scripts ]
for dupe_login_script in dupe_login_scripts:
dupe_login_script.SetLoginScriptKeyAndName( login_script_key_and_name )
if dupe_login_script.DumpToString() == new_login_script.DumpToString():
return True
return False
def AutoAddLoginScripts( self, login_scripts ):
with self._lock:
next_login_scripts = list( self._login_scripts )
for login_script in login_scripts:
login_script.RegenerateLoginScriptKey()
next_login_scripts.extend( login_scripts )
self.SetLoginScripts( next_login_scripts )
def CheckCanLogin( self, network_context ):
with self._lock:
if network_context.context_type == CC.NETWORK_CONTEXT_DOMAIN:
( login_domain, login_expected, login_possible, login_error_text ) = self._GetLoginDomainStatus( network_context )
if login_domain is None or not login_expected:
raise HydrusExceptions.ValidationException( 'The domain ' + login_domain + ' has no active login script--has it just been turned off?' )
elif not login_possible:
raise HydrusExceptions.ValidationException( 'The domain ' + login_domain + ' cannot log in: ' + login_error_text )
elif network_context.context_type == CC.NETWORK_CONTEXT_HYDRUS:
service_key = network_context.context_data
services_manager = self.engine.controller.services_manager
if not services_manager.ServiceExists( service_key ):
raise HydrusExceptions.ValidationException( 'Service does not exist!' )
service = services_manager.GetService( service_key )
try:
service.CheckFunctional( including_bandwidth = False, including_account = False )
except Exception as e:
message = 'Service has had a recent error or is otherwise not functional! Specific error was:'
message += os.linesep * 2
message += str( e )
message += os.linesep * 2
message += 'You might like to try refreshing its account in \'review services\'.'
raise HydrusExceptions.ValidationException( message )
def DelayLoginScript( self, login_domain, login_script_key, reason ):
with self._lock:
if login_domain not in self._domains_to_login_info:
return
( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason ) = self._domains_to_login_info[ login_domain ]
if login_script_key != login_script_key_and_name[0]:
return
no_work_until = HydrusData.GetNow() + 3600 * 4
no_work_until_reason = reason
self._domains_to_login_info[ login_domain ] = ( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason )
self._SetDirty()
def DeleteLoginDomain( self, login_domain ):
with self._lock:
if login_domain in self._domains_to_login_info:
del self._domains_to_login_info[ login_domain ]
self._RecalcCache()
self._SetDirty()
def DeleteLoginScripts( self, login_script_names ):
with self._lock:
login_scripts = [ login_script for login_script in self._login_scripts if login_script.GetName() not in login_script_names ]
self.SetLoginScripts( login_scripts )
def GenerateLoginProcess( self, network_context ):
with self._lock:
if network_context.context_type == CC.NETWORK_CONTEXT_DOMAIN:
( login_domain, login_expected, login_possible, login_error_text ) = self._GetLoginDomainStatus( network_context )
if login_domain is None or not login_expected:
raise HydrusExceptions.ValidationException( 'The domain ' + login_domain + ' has no active login script--has it just been turned off?' )
elif not login_possible:
raise HydrusExceptions.ValidationException( 'The domain ' + login_domain + ' cannot log in: ' + login_error_text )
else:
login_network_context = ClientNetworkingContexts.NetworkContext( context_type = CC.NETWORK_CONTEXT_DOMAIN, context_data = login_domain )
( login_script, credentials ) = self._GetLoginScriptAndCredentials( login_domain )
login_process = LoginProcessDomain( self.engine, login_network_context, login_script, credentials )
return login_process
elif network_context.context_type == CC.NETWORK_CONTEXT_HYDRUS:
login_process = LoginProcessHydrus( self.engine, network_context, self._hydrus_login_script )
return login_process
def GenerateLoginProcessForDomain( self, login_domain ):
network_context = ClientNetworkingContexts.NetworkContext.STATICGenerateForDomain( login_domain )
return self.GenerateLoginProcess( network_context )
def GetDomainsToLoginInfo( self ):
with self._lock:
self._RevalidateCache()
return dict( self._domains_to_login_info )
def GetLoginScripts( self ):
with self._lock:
return list( self._login_scripts )
def Initialise( self ):
self._RecalcCache()
def InvalidateLoginScript( self, login_domain, login_script_key, reason ):
with self._lock:
if login_domain not in self._domains_to_login_info:
return
( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason ) = self._domains_to_login_info[ login_domain ]
if login_script_key != login_script_key_and_name[0]:
return
validity = VALIDITY_INVALID
validity_error_text = reason
self._domains_to_login_info[ login_domain ] = ( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason )
HydrusData.ShowText( 'The login for "' + login_domain + '" failed! It will not be reattempted until the problem is fixed. The failure reason was:' + os.linesep * 2 + validity_error_text )
self._SetDirty()
def IsDirty( self ):
with self._lock:
return self._dirty
def NeedsLogin( self, network_context ):
with self._lock:
if network_context.context_type == CC.NETWORK_CONTEXT_DOMAIN:
( login_domain, login_expected, login_possible, login_error_text ) = self._GetLoginDomainStatus( network_context )
if login_domain is None or not login_expected:
return False # no login required, no problem
else:
try:
( login_script, credentials ) = self._GetLoginScriptAndCredentials( login_domain )
except HydrusExceptions.ValidationException:
# couldn't find the script or something. assume we need a login to move errors forward to checkcanlogin trigger phase
return True
login_network_context = ClientNetworkingContexts.NetworkContext( context_type = CC.NETWORK_CONTEXT_DOMAIN, context_data = login_domain )
return not login_script.IsLoggedIn( self.engine, login_network_context )
elif network_context.context_type == CC.NETWORK_CONTEXT_HYDRUS:
return not self._hydrus_login_script.IsLoggedIn( self.engine, network_context )
def OverwriteDefaultLoginScripts( self, login_script_names ):
with self._lock:
from . import ClientDefaults
default_login_scripts = ClientDefaults.GetDefaultLoginScripts()
for login_script in default_login_scripts:
login_script.RegenerateLoginScriptKey()
existing_login_scripts = list( self._login_scripts )
new_login_scripts = [ login_script for login_script in existing_login_scripts if login_script.GetName() not in login_script_names ]
new_login_scripts.extend( [ login_script for login_script in default_login_scripts if login_script.GetName() in login_script_names ] )
self.SetLoginScripts( new_login_scripts )
def SetClean( self ):
with self._lock:
self._dirty = False
def SetCredentialsAndActivate( self, login_domain, new_credentials ):
with self._lock:
if login_domain not in self._domains_to_login_info:
return
( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason ) = self._domains_to_login_info[ login_domain ]
credentials = new_credentials
active = True
validity = VALIDITY_UNTESTED
validity_error_text = ''
self._domains_to_login_info[ login_domain ] = ( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason )
self._SetDirty()
def SetDomainsToLoginInfo( self, domains_to_login_info ):
with self._lock:
self._domains_to_login_info = dict( domains_to_login_info )
self._RecalcCache()
self._SetDirty()
def SetLoginScripts( self, login_scripts ):
with self._lock:
self._login_scripts = HydrusSerialisable.SerialisableList( login_scripts )
# start with simple stuff first
self._login_scripts.sort( key = lambda ls: len( ls.GetCredentialDefinitions() ) )
for login_script in self._login_scripts:
login_script_key_and_name = login_script.GetLoginScriptKeyAndName()
example_domains_info = login_script.GetExampleDomainsInfo()
for ( login_domain, login_access_type, login_access_text ) in example_domains_info:
if '.' in login_domain:
# looks good, so let's see if we can update/add some info
if login_domain in self._domains_to_login_info:
( old_login_script_key_and_name, credentials, old_login_access_type, old_login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason ) = self._domains_to_login_info[ login_domain ]
if old_login_script_key_and_name[1] == login_script_key_and_name[1]:
self._domains_to_login_info[ login_domain ] = ( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason )
else:
credentials = {}
# if there is nothing to enter, turn it on by default, like HF click-through
active = len( login_script.GetCredentialDefinitions() ) == 0
validity = VALIDITY_UNTESTED
validity_error_text = ''
no_work_until = 0
no_work_until_reason = ''
self._domains_to_login_info[ login_domain ] = ( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason )
self._RecalcCache()
self._SetDirty()
def ValidateLoginScript( self, login_domain, login_script_key ):
with self._lock:
if login_domain not in self._domains_to_login_info:
return
( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason ) = self._domains_to_login_info[ login_domain ]
if login_script_key != login_script_key_and_name[0]:
return
validity = VALIDITY_VALID
validity_error_text = ''
self._domains_to_login_info[ login_domain ] = ( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason )
self._SetDirty()
def TryToLinkMissingLoginScripts( self, login_domains ):
with self._lock:
for login_domain in login_domains:
try:
( existing_login_script, existing_credentials ) = self._GetLoginScriptAndCredentials( login_domain )
continue # already seems to have a good login script, so nothing to fix
except HydrusExceptions.ValidationException:
pass
( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason ) = self._domains_to_login_info[ login_domain ]
login_script = self._GetBestLoginScript( login_domain )
if login_script is None:
continue
login_script_key_and_name = login_script.GetLoginScriptKeyAndName()
self._domains_to_login_info[ login_domain ] = ( login_script_key_and_name, credentials, login_access_type, login_access_text, active, validity, validity_error_text, no_work_until, no_work_until_reason )
self._SetDirty()
def LoginTumblrGDPR( self ):
# t-thanks, EU
# this is cribbed from poking around here https://github.com/johanneszab/TumblThree/commit/3563d6cebf1a467151d6b8d6eee9806ddd6e6364
network_job = ClientNetworkingJobs.NetworkJob( 'GET', 'http://www.tumblr.com/' )
network_job.SetForLogin( True )
self.engine.AddJob( network_job )
network_job.WaitUntilDone()
html = network_job.GetContentText()
formula = ClientParsing.ParseFormulaHTML( tag_rules = [ ClientParsing.ParseRuleHTML( rule_type = ClientParsing.HTML_RULE_TYPE_DESCENDING, tag_name = 'meta', tag_attributes = { 'id' : 'tumblr_form_key' } ) ], content_to_fetch = ClientParsing.HTML_CONTENT_ATTRIBUTE, attribute_to_fetch = "content" )
results = formula.Parse( {}, html )
if len( results ) != 1:
raise HydrusExceptions.ParseException( 'Could not figure out the tumblr form key for the GDPR click-through.' )
tumblr_form_key = results[0]
#
body = '{\"eu_resident\":true,\"gdpr_is_acceptable_age\":true,\"gdpr_consent_core\":true,\"gdpr_consent_first_party_ads\":true,\"gdpr_consent_third_party_ads\":true,\"gdpr_consent_search_history\":true,\"redirect_to\":\"\"}'
referral_url = 'https://www.tumblr.com/privacy/consent?redirect='
network_job = ClientNetworkingJobs.NetworkJob( 'POST', 'https://www.tumblr.com/svc/privacy/consent', body = body, referral_url = referral_url )
network_job.SetForLogin( True )
network_job.AddAdditionalHeader( 'Accept', 'application/json, text/javascript, */*; q=0.01')
network_job.AddAdditionalHeader( 'Content-Type', 'application/json' )
network_job.AddAdditionalHeader( 'X-Requested-With', 'XMLHttpRequest' )
network_job.AddAdditionalHeader( 'X-tumblr-form-key', tumblr_form_key )
self.engine.AddJob( network_job )
network_job.WaitUntilDone()
# test cookies here or something
HydrusData.ShowText( 'Looks like tumblr GDPR click-through worked! You should be good for a year, at which point we should have an automatic solution for this!' )
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_NETWORK_LOGIN_MANAGER ] = NetworkLoginManager
CREDENTIAL_TYPE_TEXT = 0
CREDENTIAL_TYPE_PASS = 1
credential_type_str_lookup = {}
credential_type_str_lookup[ CREDENTIAL_TYPE_TEXT ] = 'normal'
credential_type_str_lookup[ CREDENTIAL_TYPE_PASS ] = 'hidden (password)'
class LoginCredentialDefinition( HydrusSerialisable.SerialisableBaseNamed ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_LOGIN_CREDENTIAL_DEFINITION
SERIALISABLE_NAME = 'Login Credential Definition'
SERIALISABLE_VERSION = 1
def __init__( self, name = 'username', credential_type = CREDENTIAL_TYPE_TEXT, string_match = None ):
if string_match is None:
string_match = ClientParsing.StringMatch()
HydrusSerialisable.SerialisableBaseNamed.__init__( self, name )
self._credential_type = credential_type
self._string_match = string_match
def _GetSerialisableInfo( self ):
serialisable_string_match = self._string_match.GetSerialisableTuple()
return ( self._credential_type, serialisable_string_match )
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( self._credential_type, serialisable_string_match ) = serialisable_info
self._string_match = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_string_match )
def GetStringMatch( self ):
return self._string_match
def GetType( self ):
return self._credential_type
def SetStringMatch( self, string_match ):
self._string_match = string_match
def SetType( self, credential_type ):
self._credential_type = credential_type
def ShouldHide( self ):
return self._credential_type == CREDENTIAL_TYPE_PASS
def Test( self, text ):
if self._string_match is not None:
try:
self._string_match.Test( text )
except HydrusExceptions.StringMatchException as e:
raise HydrusExceptions.ValidationException( 'Could not validate "' + self._name + '" credential: ' + str( e ) )
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_LOGIN_CREDENTIAL_DEFINITION ] = LoginCredentialDefinition
class LoginProcess( object ):
def __init__( self, engine, network_context, login_script ):
self.engine = engine
self.network_context = network_context
self.login_script = login_script
self._done = False
def _Start( self ):
raise NotImplementedError()
def IsDone( self ):
return self._done
def Start( self ):
try:
self._Start()
finally:
self._done = True
class LoginProcessDomain( LoginProcess ):
def __init__( self, engine, network_context, login_script, credentials ):
LoginProcess.__init__( self, engine, network_context, login_script )
self.credentials = credentials
def _Start( self ):
login_domain = self.network_context.context_data
job_key = ClientThreading.JobKey( cancellable = True )
job_key.SetVariable( 'popup_title', 'Logging in ' + login_domain )
HG.client_controller.pub( 'message', job_key )
HydrusData.Print( 'Starting login for ' + login_domain )
result = self.login_script.Start( self.engine, self.network_context, self.credentials, job_key = job_key )
HydrusData.Print( 'Finished login for ' + self.network_context.context_data + '. Result was: ' + result )
job_key.SetVariable( 'popup_text_1', result )
job_key.Finish()
job_key.Delete( 4 )
class LoginProcessHydrus( LoginProcess ):
def _Start( self ):
self.login_script.Start( self.engine, self.network_context )
class LoginScriptHydrus( object ):
def _IsLoggedIn( self, engine, network_context ):
session = engine.session_manager.GetSession( network_context )
cookies = session.cookies
cookies.clear_expired_cookies()
return 'session_key' in cookies
def IsLoggedIn( self, engine, network_context ):
return self._IsLoggedIn( engine, network_context )
def Start( self, engine, network_context ):
service_key = network_context.context_data
service = engine.controller.services_manager.GetService( service_key )
base_url = service.GetBaseURL()
url = base_url + 'session_key'
access_key = service.GetCredentials().GetAccessKey()
network_job = ClientNetworkingJobs.NetworkJobHydrus( service_key, 'GET', url )
network_job.SetForLogin( True )
network_job.AddAdditionalHeader( 'Hydrus-Key', access_key.hex() )
engine.AddJob( network_job )
try:
network_job.WaitUntilDone()
if self._IsLoggedIn( engine, network_context ):
HydrusData.Print( 'Successfully logged into ' + service.GetName() + '.' )
else:
service.DelayFutureRequests( 'Could not log in for unknown reason.' )
except Exception as e:
e_string = str( e )
service.DelayFutureRequests( e_string )
class LoginScriptDomain( HydrusSerialisable.SerialisableBaseNamed ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_LOGIN_SCRIPT_DOMAIN
SERIALISABLE_NAME = 'Login Script - Domain'
SERIALISABLE_VERSION = 2
def __init__( self, name = 'login script', login_script_key = None, required_cookies_info = None, credential_definitions = None, login_steps = None, example_domains_info = None ):
if required_cookies_info is None:
required_cookies_info = {}
required_cookies_info = HydrusSerialisable.SerialisableDictionary( required_cookies_info )
if credential_definitions is None:
credential_definitions = []
credential_definitions = HydrusSerialisable.SerialisableList( credential_definitions )
if login_steps is None:
login_steps = []
login_steps = HydrusSerialisable.SerialisableList( login_steps )
if example_domains_info is None:
example_domains_info = []
HydrusSerialisable.SerialisableBaseNamed.__init__( self, name )
self._login_script_key = HydrusData.GenerateKey()
self._required_cookies_info = required_cookies_info # string match : string match
self._credential_definitions = credential_definitions
self._login_steps = login_steps
self._example_domains_info = example_domains_info # domain | login_access_type | login_access_text
def _GetSerialisableInfo( self ):
serialisable_login_script_key = self._login_script_key.hex()
serialisable_required_cookies = self._required_cookies_info.GetSerialisableTuple()
serialisable_credential_definitions = self._credential_definitions.GetSerialisableTuple()
serialisable_login_steps = self._login_steps.GetSerialisableTuple()
return ( serialisable_login_script_key, serialisable_required_cookies, serialisable_credential_definitions, serialisable_login_steps, self._example_domains_info )
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( serialisable_login_script_key, serialisable_required_cookies, serialisable_credential_definitions, serialisable_login_steps, self._example_domains_info ) = serialisable_info
self._login_script_key = bytes.fromhex( serialisable_login_script_key )
self._required_cookies_info = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_required_cookies )
self._credential_definitions = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_credential_definitions )
self._login_steps = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_login_steps )
# convert lists to tups for listctrl data hashing
self._example_domains_info = [ tuple( l ) for l in self._example_domains_info ]
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
if version == 1:
( serialisable_login_script_key, serialisable_required_cookies, serialisable_credential_definitions, serialisable_login_steps, example_domains_info ) = old_serialisable_info
old_required_cookies_info = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_required_cookies )
new_required_cookies_info = HydrusSerialisable.SerialisableDictionary()
for ( name, value_string_match ) in list(old_required_cookies_info.items()):
key_string_match = ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = name, example_string = name )
new_required_cookies_info[ key_string_match ] = value_string_match
serialisable_required_cookies = new_required_cookies_info.GetSerialisableTuple()
new_serialisable_info = ( serialisable_login_script_key, serialisable_required_cookies, serialisable_credential_definitions, serialisable_login_steps, example_domains_info )
return ( 2, new_serialisable_info )
def _IsLoggedIn( self, engine, network_context, validation_check = False ):
session = engine.session_manager.GetSession( network_context )
cookies = session.cookies
cookies.clear_expired_cookies()
search_domain = network_context.context_data
for ( cookie_name_string_match, value_string_match ) in list(self._required_cookies_info.items()):
try:
cookie = ClientNetworkingDomain.GetCookie( cookies, search_domain, cookie_name_string_match )
except HydrusExceptions.DataMissing as e:
if validation_check:
raise HydrusExceptions.ValidationException( 'Missing cookie "' + cookie_name_string_match.ToString() + '"!' )
return False
cookie_text = cookie.value
try:
value_string_match.Test( cookie_text )
except HydrusExceptions.StringMatchException as e:
if validation_check:
raise HydrusExceptions.ValidationException( 'Cookie "' + cookie_name_string_match.ToString() + '" failed: ' + str( e ) + '!' )
return False
return True
def CheckCanLogin( self, given_credentials ):
self.CheckIsValid()
given_cred_names = set( given_credentials.keys() )
required_cred_names = { name for name in itertools.chain.from_iterable( ( step.GetRequiredCredentials() for step in self._login_steps ) ) }
missing_givens = required_cred_names.difference( given_cred_names )
if len( missing_givens ) > 0:
missing_givens = list( missing_givens )
missing_givens.sort()
raise HydrusExceptions.ValidationException( 'Missing required credentials: ' + ', '.join( missing_givens ) )
#
cred_names_to_definitions = { credential_definition.GetName() : credential_definition for credential_definition in self._credential_definitions }
for ( pretty_name, text ) in given_credentials.items():
if pretty_name not in cred_names_to_definitions:
continue
credential_definition = cred_names_to_definitions[ pretty_name ]
credential_definition.Test( text )
def CheckIsValid( self ):
defined_cred_names = { credential_definition.GetName() for credential_definition in self._credential_definitions }
required_cred_names = { name for name in itertools.chain.from_iterable( ( step.GetRequiredCredentials() for step in self._login_steps ) ) }
missing_definitions = required_cred_names.difference( defined_cred_names )
if len( missing_definitions ) > 0:
missing_definitions = list( missing_definitions )
missing_definitions.sort()
raise HydrusExceptions.ValidationException( 'Missing required credential definitions: ' + ', '.join( missing_definitions ) )
#
temp_vars = set()
for login_step in self._login_steps:
( required_vars, set_vars ) = login_step.GetRequiredAndSetTempVariables()
missing_vars = required_vars.difference( temp_vars )
if len( missing_vars ) > 0:
missing_vars = list( missing_vars )
missing_vars.sort()
raise HydrusExceptions.ValidationException( 'Missing temp variables for login step "' + login_step.GetName() + '": ' + ', '.join( missing_vars ) )
temp_vars.update( set_vars )
def GetCredentialDefinitions( self ):
return self._credential_definitions
def GetExampleDomains( self ):
return [ domain for ( domain, login_access_type, login_access_text ) in self._example_domains_info ]
def GetExampleDomainsInfo( self ):
return self._example_domains_info
def GetExampleDomainInfo( self, given_domain ):
for ( domain, login_access_type, login_access_text ) in self._example_domains_info:
if domain == given_domain:
return ( login_access_type, login_access_text )
raise HydrusExceptions.DataMissing( 'Could not find that domain!' )
def GetRequiredCookiesInfo( self ):
return self._required_cookies_info
def GetLoginExpiry( self, engine, network_context ):
session = engine.session_manager.GetSession( network_context )
cookies = session.cookies
cookies.clear_expired_cookies()
search_domain = network_context.context_data
session_cookies = False
expiry_timestamps = []
for cookie_name_string_match in list(self._required_cookies_info.keys()):
try:
cookie = ClientNetworkingDomain.GetCookie( cookies, search_domain, cookie_name_string_match )
except HydrusExceptions.DataMissing as e:
return None
expiry = cookie.expires
if expiry is None:
session_cookies = True
else:
expiry_timestamps.append( expiry )
if session_cookies:
return None
else:
return min( expiry_timestamps )
def GetLoginScriptKey( self ):
return self._login_script_key
def GetLoginScriptKeyAndName( self ):
return ( self._login_script_key, self._name )
def GetLoginSteps( self ):
return self._login_steps
def GetRequiredCredentials( self ):
required_creds = []
for login_step in self._login_steps:
required_creds.extend( login_step.GetRequiredCredentials() ) # name with an order
return required_creds
def GetSafeSummary( self ):
return 'Login Script "' + self._name + '" - ' + ', '.join( self.GetExampleDomains() )
def IsLoggedIn( self, engine, network_context ):
return self._IsLoggedIn( engine, network_context )
def RegenerateLoginScriptKey( self ):
self._login_script_key = HydrusData.GenerateKey()
def SetLoginScriptKey( self, login_script_key ):
self._login_script_key = login_script_key
def SetLoginScriptKeyAndName( self, login_script_key_and_name ):
( login_script_key, name ) = login_script_key_and_name
self._login_script_key = login_script_key
self._name = name
def Start( self, engine, network_context, given_credentials, network_job_presentation_context_factory = None, test_result_callable = None, job_key = None ):
# don't mess with the domain--assume that we are given precisely the right domain
login_domain = network_context.context_data
temp_variables = {}
last_url_used = None
for login_step in self._login_steps:
if job_key is not None:
if job_key.IsCancelled():
message = 'User cancelled the login process.'
engine.login_manager.DelayLoginScript( login_domain, self._login_script_key, message )
return message
job_key.SetVariable( 'popup_text_1', login_step.GetName() )
try:
last_url_used = login_step.Start( engine, login_domain, given_credentials, temp_variables, referral_url = last_url_used, network_job_presentation_context_factory = network_job_presentation_context_factory, test_result_callable = test_result_callable )
except HydrusExceptions.ValidationException as e:
if test_result_callable is not None:
HydrusData.ShowException( e )
message = str( e )
engine.login_manager.InvalidateLoginScript( login_domain, self._login_script_key, message )
return 'Verification error: ' + message
except HydrusExceptions.NetworkException as e:
if test_result_callable is not None:
HydrusData.ShowException( e )
message = str( e )
engine.login_manager.DelayLoginScript( login_domain, self._login_script_key, message )
return 'Network error: ' + message
except Exception as e:
if test_result_callable is not None:
HydrusData.ShowException( e )
message = str( e )
engine.login_manager.InvalidateLoginScript( login_domain, self._login_script_key, message )
return 'Unusual error: ' + message
time.sleep( 2 )
try:
self._IsLoggedIn( engine, network_context, validation_check = True )
except Exception as e:
if test_result_callable is not None:
HydrusData.ShowException( e )
message = str( e )
engine.login_manager.InvalidateLoginScript( login_domain, self._login_script_key, message )
return 'Final cookie check failed: ' + message
engine.login_manager.ValidateLoginScript( login_domain, self._login_script_key )
return 'Login OK!'
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_LOGIN_SCRIPT_DOMAIN ] = LoginScriptDomain
LOGIN_PARAMETER_TYPE_PARAMETER = 0
LOGIN_PARAMETER_TYPE_COOKIE = 1
LOGIN_PARAMETER_TYPE_HEADER = 2
class LoginStep( HydrusSerialisable.SerialisableBaseNamed ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_LOGIN_STEP
SERIALISABLE_NAME = 'Login Step'
SERIALISABLE_VERSION = 2
def __init__( self, name = 'hit home page to establish session', scheme = 'https', method = 'GET', subdomain = None, path = '/' ):
HydrusSerialisable.SerialisableBaseNamed.__init__( self, name )
self._scheme = scheme
self._method = method
self._subdomain = subdomain
self._path = path
self._CleanseSubdomainAndPath()
self._required_credentials = {} # pretty_name : arg name
self._static_args = {} # arg name : string
self._temp_args = {} # temp arg name : arg name
self._required_cookies_info = HydrusSerialisable.SerialisableDictionary() # string match : string match
self._content_parsers = HydrusSerialisable.SerialisableList()
def _CleanseSubdomainAndPath( self ):
if self._subdomain is not None:
self._subdomain = re.sub( '[^a-z\.]+', '', self._subdomain )
if not self._path.startswith( '/' ):
self._path = '/' + self._path
def _GetSerialisableInfo( self ):
serialisable_required_cookies = self._required_cookies_info.GetSerialisableTuple()
serialisable_content_parsers = self._content_parsers.GetSerialisableTuple()
return ( self._scheme, self._method, self._subdomain, self._path, self._required_credentials, self._static_args, self._temp_args, serialisable_required_cookies, serialisable_content_parsers )
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( self._scheme, self._method, self._subdomain, self._path, self._required_credentials, self._static_args, self._temp_args, serialisable_required_cookies, serialisable_content_parsers ) = serialisable_info
self._CleanseSubdomainAndPath()
self._required_cookies_info = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_required_cookies )
self._content_parsers = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_content_parsers )
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
if version == 1:
( scheme, method, subdomain, path, required_credentials, static_args, temp_args, serialisable_required_cookies, serialisable_content_parsers ) = old_serialisable_info
old_required_cookies_info = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_required_cookies )
new_required_cookies_info = HydrusSerialisable.SerialisableDictionary()
for ( name, value_string_match ) in list(old_required_cookies_info.items()):
key_string_match = ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = name, example_string = name )
new_required_cookies_info[ key_string_match ] = value_string_match
serialisable_required_cookies = new_required_cookies_info.GetSerialisableTuple()
new_serialisable_info = ( scheme, method, subdomain, path, required_credentials, static_args, temp_args, serialisable_required_cookies, serialisable_content_parsers )
return ( 2, new_serialisable_info )
def GetRequiredCredentials( self ):
return [ pretty_name for ( pretty_name, arg_name ) in list(self._required_credentials.items()) ]
def GetRequiredAndSetTempVariables( self ):
required_temp_variables = set( self._temp_args.keys() )
set_temp_variables = { additional_info for [ ( name, content_type, additional_info ) ] in [ content_parser.GetParsableContent() for content_parser in self._content_parsers ] }
return ( required_temp_variables, set_temp_variables )
def SetComplicatedVariables( self, required_credentials, static_args, temp_args, required_cookies_info, content_parsers ):
self._required_credentials = required_credentials
self._static_args = static_args
self._temp_args = temp_args
self._required_cookies_info = HydrusSerialisable.SerialisableDictionary( required_cookies_info )
self._content_parsers = HydrusSerialisable.SerialisableList( content_parsers )
def Start( self, engine, domain, given_credentials, temp_variables, referral_url = None, network_job_presentation_context_factory = None, test_result_callable = None ):
def session_to_cookie_strings( sess ):
cookie_strings = set()
for cookie in sess.cookies:
s = cookie.name + ': ' + cookie.value + ' | ' + cookie.domain + ' | '
expiry = cookie.expires
if expiry is None:
expiry = -1
pretty_expiry = 'session'
else:
pretty_expiry = HydrusData.ConvertTimestampToPrettyExpires( expiry )
s += pretty_expiry
cookie_strings.add( s )
return cookie_strings
url = 'Did not make a url.'
test_result_body = None
downloaded_data = 'Did not download data.'
new_temp_variables = {}
original_cookie_strings = session_to_cookie_strings( engine.session_manager.GetSessionForDomain( domain ) )
test_script_result = 'Did not start.'
try:
domain_to_hit = domain
if self._subdomain is not None:
if domain.startswith( 'www.' ):
domain = domain[4:]
domain_to_hit = self._subdomain + '.' + domain
query_dict = {}
query_dict.update( self._static_args )
for ( pretty_name, arg_name ) in list(self._required_credentials.items()):
query_dict[ arg_name ] = given_credentials[ pretty_name ]
for ( temp_name, arg_name ) in list(self._temp_args.items()):
if temp_name not in temp_variables:
raise HydrusExceptions.ValidationException( 'The temporary variable \'' + temp_name + '\' was not found!' )
query_dict[ arg_name ] = temp_variables[ temp_name ]
scheme = self._scheme
netloc = domain_to_hit
path = self._path
params = ''
fragment = ''
if self._method == 'GET':
query = ClientNetworkingDomain.ConvertQueryDictToText( query_dict )
body = None
test_result_body = ''
elif self._method == 'POST':
query = ''
body = query_dict
test_result_body = ClientNetworkingDomain.ConvertQueryDictToText( query_dict )
r = urllib.parse.ParseResult( scheme, netloc, path, params, query, fragment )
url = r.geturl()
network_job = ClientNetworkingJobs.NetworkJob( self._method, url, body = body, referral_url = referral_url )
if self._method == 'POST' and referral_url is not None:
p = urllib.parse.urlparse( referral_url )
r = urllib.parse.ParseResult( p.scheme, p.netloc, '', '', '', '' )
origin = r.geturl() # https://accounts.pixiv.net
network_job.AddAdditionalHeader( 'origin', origin ) # GET/POST forms are supposed to have this for CSRF. we'll try it just with POST for now
network_job.SetForLogin( True )
engine.AddJob( network_job )
if network_job_presentation_context_factory is not None:
with network_job_presentation_context_factory( network_job ) as njpc:
network_job.WaitUntilDone()
else:
network_job.WaitUntilDone()
session = network_job.GetSession()
cookies = session.cookies
for ( cookie_name_string_match, string_match ) in list(self._required_cookies_info.items()):
try:
cookie = ClientNetworkingDomain.GetCookie( cookies, domain, cookie_name_string_match )
except HydrusExceptions.DataMissing as e:
raise HydrusExceptions.ValidationException( 'Missing cookie "' + cookie_name_string_match.ToString() + '" on step "' + self._name + '"!' )
cookie_text = cookie.value
try:
string_match.Test( cookie_text )
except HydrusExceptions.StringMatchException as e:
raise HydrusExceptions.ValidationException( 'Cookie "' + cookie_name_string_match.ToString() + '" failed on step "' + self._name + '": ' + str( e ) + '!' )
downloaded_text = network_job.GetContentText()
parsing_context = {}
parsing_context[ 'url' ] = url
for content_parser in self._content_parsers:
try:
parse_results = content_parser.Parse( parsing_context, downloaded_text )
except HydrusExceptions.VetoException as e:
raise HydrusExceptions.ValidationException( str( e ) )
result = ClientParsing.GetVariableFromParseResults( parse_results )
if result is not None:
( temp_name, value ) = result
new_temp_variables[ temp_name ] = value
temp_variables.update( new_temp_variables )
test_script_result = 'OK!'
return url
except Exception as e:
test_script_result = str( e )
raise
finally:
if test_result_callable is not None:
current_cookie_strings = session_to_cookie_strings( engine.session_manager.GetSessionForDomain( domain ) )
new_cookie_strings = tuple( current_cookie_strings.difference( original_cookie_strings ) )
new_temp_strings = tuple( ( key + ': ' + value for ( key, value ) in list(new_temp_variables.items()) ) )
test_result = ( self._name, url, test_result_body, downloaded_data, new_temp_strings, new_cookie_strings, test_script_result )
test_result_callable( test_result )
def ToTuple( self ):
return ( self._scheme, self._method, self._subdomain, self._path, self._required_credentials, self._static_args, self._temp_args, self._required_cookies_info, self._content_parsers )
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_LOGIN_STEP ] = LoginStep
|
py | 1a3542af68309565296c149c2d329fa050a9463f | import numpy as np
from numpy import random
SF = 0xF0
EF = 0x0F
VAL_MIN = 0
VAL_MAX = 255
DU_LEN_MAX = 1024
NUM_OF_VECTORS = 10
VECTOR_HEADER = 'test_vectors.hpp'
VECTOR_SRC = 'test_vectors.cpp'
VECTOR_PY = 'test_vectors.py'
vectors = []
for i in range(NUM_OF_VECTORS):
du = random.randint(VAL_MIN, VAL_MAX, size=random.randint(1, DU_LEN_MAX))
vectors.append(du.tolist())
with open(VECTOR_SRC, 'w') as fw:
fw.write('#include <cstdint>\n')
fw.write('#include <vector>\n\n')
fw.write('#include "{}"\n\n'.format(VECTOR_HEADER))
vectors_txt = 'std::vector<const uint8_t *> vectors = {'
vectors_sizes_txt = 'std::vector<size_t> vectors_sizes = {'
for i, v in enumerate(vectors):
fw.write('const uint8_t v' + str(i) + '[] = {\n')
tmp = ''
for j, val in enumerate(v):
tmp += '0x{0:02X},'.format(val)
if (0 == ((j + 1) % 8)):
tmp += '\n '
else:
tmp += ' '
tmp = tmp.strip(' ,')
fw.write(' ' + tmp)
fw.write('\n};\n\n')
vectors_txt += 'v' + str(i) + ' ,'
vectors_sizes_txt += str(len(v)) + ' ,'
vectors_txt = vectors_txt.strip(',') + '};\n'
fw.write(vectors_txt)
vectors_sizes_txt = vectors_sizes_txt.strip(',') + '};\n'
fw.write(vectors_sizes_txt)
with open(VECTOR_HEADER, 'w') as fw:
fw.write('#ifndef _TEST_VECTORS_HPP_\n')
fw.write('#define _TEST_VECTORS_HPP_\n\n')
fw.write('#include <cstdint>\n\n')
fw.write('#include <vector>\n\n')
fw.write('extern std::vector<const uint8_t *> vectors;\n')
fw.write('extern std::vector<size_t> vectors_sizes;\n\n')
fw.write('#endif // _TEST_VECTORS_HPP_\n')
with open(VECTOR_PY, 'w') as fw:
fw.write('vectors = []\n\n')
for i, v in enumerate(vectors):
fw.write('# Vector[{}]: {} bytes\n'.format(i, len(v)))
fw.write('vectors.append(bytes([\n')
tmp = ''
for j, val in enumerate(v):
tmp += '0x{0:02X},'.format(val)
if (0 == ((j + 1) % 8)):
tmp += '\n '
else:
tmp += ' '
tmp = tmp.strip(' ,')
fw.write(' ' + tmp + '\n')
fw.write(']))\n\n')
|
py | 1a3544dbc7c16bc0fcd076f538e5595637532f46 | import hashlib
import logging
import random
from django.conf import settings
from django.contrib.auth.models import Group
from uniauth.processors import (BaseProcessor,
NameIdBuilder)
from . unical_attributes_generator import UnicalAttributeGenerator
logger = logging.getLogger(__name__)
if 'ldap_peoples' in settings.INSTALLED_APPS:
from ldap_peoples.models import LdapAcademiaUser
if 'multildap' in settings.INSTALLED_APPS:
from multildap.client import LdapClient
class GroupProcessor(BaseProcessor):
"""
Example implementation of access control for users:
- superusers are allowed
- staff is allowed
- they have to belong to a certain group
"""
group = "ExampleGroup"
def has_access(self, user): # pragma: no cover
return user.is_superuser or \
user.is_staff or \
user.groups.filter(name=self.group).exists()
class LdapAcademiaProcessor(BaseProcessor):
""" Processor class used to retrieve attribute from LDAP server
and user nameID (userID) with standard formats
"""
def get_identity(self, user):
if isinstance(user, str):
username = user
else:
username = user.username
return LdapAcademiaUser.objects.filter(uid=username).first()
def create_identity(self, user, sp={}):
""" Generate an identity dictionary of the user based on the
given mapping of desired user attributes by the SP
"""
default_mapping = {'username': 'username'}
sp_mapping = sp['config'].get('attribute_mapping',
default_mapping)
# get ldap user
lu = self.get_identity(user)
#logging.info("{} doesn't have a valid computed ePPN in LDAP, please fix it!".format(user.username))
results = self.process_attributes(user, sp_mapping)
if not lu:
return results
results = self.process_attributes(lu, sp_mapping)
# add custom/legacy attribute made by processing
results = self.extra_attr_processing(results, sp_mapping)
# if targetedID is available give it to sp
if self.eduPersonTargetedID:
results['eduPersonTargetedID'] = [self.eduPersonTargetedID]
return results
class LdapUnicalAcademiaProcessor(LdapAcademiaProcessor):
"""
The same of its father but with a custom attribute processing
for legacy support to stange SP
"""
def extra_attr_processing(self, results, sp_mapping):
return UnicalAttributeGenerator.process(results, sp_mapping)
class LdapUnicalMultiAcademiaProcessor(LdapUnicalAcademiaProcessor):
"""
Uses pyMultiLDAP to gather an uid from multiple sources.
It will stop on the first occurrence.
"""
def get_identity(self, user):
if hasattr(self, 'request') and hasattr(self.request, 'session'):
if self.request.session.get('identity_attributes'):
return type('', (object,), self.request.session['identity_attributes'])()
if isinstance(user, str):
username = user
else:
username = user.username
# otherwise do another query ...
identity = None
for lc in settings.LDAP_CONNECTIONS: # pragma: no coverage
ldapfilter = '(uid={})'.format(username)
logging.debug("Processor {} searches for {} in {}".format(self.__class__,
username,
lc))
identity = lc.get(search=ldapfilter, format='object')
if identity:
return identity
|
py | 1a3544deed4f6579493920acbaa09683aff0b5dd | import os
import subprocess
from string import Template
from damn_at import logger
from damn_at.transcoder import TranscoderException
from damn_at.pluginmanager import ITranscoder
from damn_at.options import IntVectorOption, IntOption, expand_path_template
from damn_at.utilities import script_path, run_blender
class BlenderTranscoder(ITranscoder):
options = [
IntVectorOption(
name='size',
description='The target size of the image',
size=2,
min=1,
max=4096,
default=(128, 128)
),
IntOption(
name='pages',
description='Total number of frames.',
min=1,
max=4096,
default=1
),
]
convert_map = {
"application/x-blender.text": {
"image/jpg": options,
"image/png": options
},
}
def __init__(self):
ITranscoder.__init__(self)
def activate(self):
pass
def transcode(self, dest_path, file_descr,
asset_id, target_mimetype, **options):
path_template = expand_path_template(
target_mimetype.template,
target_mimetype.mimetype,
asset_id,
**options
)
abs_file_path = os.path.join(dest_path, path_template)
abs_file_path_txt = abs_file_path+'.txt'
arguments = [
'--',
asset_id.mimetype,
asset_id.subname,
abs_file_path_txt
]
logger.debug(abs_file_path)
stdoutdata, stderrdata, returncode = run_blender(
file_descr.file.filename,
script_path(__file__),
arguments
)
logger.debug(stdoutdata)
logger.debug(stderrdata)
logger.debug(returncode)
# print(returncode) #Todo: check return code
arguments = [
'convert',
'-pointsize',
'26',
'-resize',
str(options['size'][0]),
abs_file_path_txt + '[0]',
abs_file_path
]
# print arguments
pro = subprocess.Popen(
arguments,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
stdoutdata, stderrdata = pro.communicate()
logger.debug(stdoutdata)
logger.debug(stderrdata)
logger.debug(pro.returncode)
return [path_template]
|
py | 1a35451d5130c0f749a8b04061bc56e2fdcc0608 | # terrascript/split/__init__.py
# Automatically generated by tools/makecode.py ()
import warnings
warnings.warn(
"using the 'legacy layout' is deprecated", DeprecationWarning, stacklevel=2
)
import terrascript
class split(terrascript.Provider):
pass
|
py | 1a354544c23bb0cb5d796cdd8f99085e3b987283 |
import random
import pytest
from conftest import get_api_data
from assemblyline.common import forge
from assemblyline.odm.random_data import create_users, wipe_users, create_heuristics, wipe_heuristics
@pytest.fixture(scope="module")
def datastore(datastore_connection):
try:
create_users(datastore_connection)
create_heuristics(datastore_connection)
yield datastore_connection
finally:
wipe_users(datastore_connection)
wipe_heuristics(datastore_connection)
def test_get_heuristics(datastore, login_session):
_, session, host = login_session
heuristic = random.choice(datastore.heuristic.search("id:*", rows=100, as_obj=False)['items'])
resp = get_api_data(session, f"{host}/api/v4/heuristics/{heuristic['heur_id']}/")
assert resp['classification'] == heuristic['classification']
assert resp['description'] == heuristic['description']
assert resp['filetype'] == heuristic['filetype']
assert resp['heur_id'] == heuristic['heur_id']
assert resp['name'] == heuristic['name']
def test_heuristic_stats(datastore, login_session):
_, session, host = login_session
cache = forge.get_statistics_cache()
cache.delete()
resp = get_api_data(session, f"{host}/api/v4/heuristics/stats/")
assert len(resp) == 0
stats = datastore.calculate_heuristic_stats()
cache.set('heuristics', stats)
heuristic_count = datastore.heuristic.search("id:*", rows=0)['total']
resp = get_api_data(session, f"{host}/api/v4/heuristics/stats/")
assert len(resp) == heuristic_count
for sig_stat in resp:
assert sorted(list(sig_stat.keys())) == ['avg', 'classification', 'count', 'heur_id', 'max', 'min', 'name']
|
py | 1a35458476173b65b0246970ffb44a4036a346d4 | from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0070_remove_importtask_github_counters'),
]
operations = [
migrations.RemoveField(
model_name='category',
name='original_name',
),
migrations.RemoveField(
model_name='cloudplatform',
name='original_name',
),
migrations.RemoveField(
model_name='content',
name='original_name',
),
migrations.RemoveField(
model_name='contentversion',
name='original_name',
),
migrations.RemoveField(
model_name='namespace',
name='original_name',
),
migrations.RemoveField(
model_name='platform',
name='original_name',
),
migrations.RemoveField(
model_name='provider',
name='original_name',
),
migrations.RemoveField(
model_name='tag',
name='original_name',
),
]
|
py | 1a3545ad17a3b98e6307a89a3963daed10821bc3 | import os
from ats.attributedict import AttributeDict
statuses = AttributeDict()
_StatusCodesAbr = dict(
CREATED = "INIT",
INVALID = "INVD",
PASSED = "PASS",
FAILED = "FAIL",
SKIPPED = "SKIP",
RUNNING = 'EXEC',
FILTERED = 'FILT',
TIMEDOUT = 'TIME',
BATCHED = "BACH",
HALTED = "HALT",
EXPECTED = "EXPT",
LSFERROR = "LSFE",
)
class _StatusCode:
def __init__(self, name):
self.name = name
self.abr = _StatusCodesAbr[name]
def __str__(self):
return self.abr
def __eq__(self, other):
if isinstance(other, _StatusCode):
return self.name == other.name
elif isinstance(other, str):
return self.name == other or self.abr == other
else:
return False
def __ne__(self, other):
return self.name != other.name
def __repr__(self):
return "StatusCode(%s)" % repr(self.name)
def StatusCode(name):
"Return a status code so that they compare with 'is'. "
try:
return statuses[name]
except KeyError:
new = _StatusCode(name)
statuses[name] = new
return new
CREATED = StatusCode("CREATED")
INVALID = StatusCode("INVALID")
PASSED = StatusCode("PASSED")
FAILED = StatusCode("FAILED")
SKIPPED = StatusCode("SKIPPED")
RUNNING = StatusCode("RUNNING")
FILTERED = StatusCode("FILTERED")
TIMEDOUT = StatusCode("TIMEDOUT")
BATCHED = StatusCode("BATCHED")
HALTED = StatusCode("HALTED")
EXPECTED = StatusCode("EXPECTED")
LSFERROR = StatusCode("LSFERROR")
class AtsError (Exception):
"Exception class for Ats."
def __init__ (self, msg):
Exception.__init__ (self, msg)
def expandpath (path):
"Return a normalized, variable and ~-expanded version of path"
path = str(path)
path = os.path.expanduser(path)
path = os.path.expandvars(path)
path = os.path.normpath(path)
return path
def abspath(path):
"Return an absolute, expanded path."
return os.path.abspath(expandpath(path))
_debug = 0
def debug(value=None):
"Return the debug flag; if value given, set it."
global _debug
if value is None:
return _debug
else:
_debug = int(value)
def is_valid_file (path):
"Does path represent a valid file?"
path = abspath(path)
return os.path.isfile(path)
def is_valid_executable (path):
"Does path represent a valid executable?"
path = abspath(path)
return is_valid_file(path) and os.access(path, os.X_OK)
if __name__ == "__main__":
print(locals())
|
py | 1a3547c92b5ea917f0e340f0a7cfa1792567c505 | from distutils.version import LooseVersion
import os
import json
import pytest
import numpy as np
import pandas as pd
from sklearn import datasets
import xgboost as xgb
import matplotlib as mpl
import yaml
import mlflow
import mlflow.xgboost
from mlflow.models import Model
from mlflow.models.utils import _read_example
from mlflow.utils.autologging import BatchMetricsLogger
from unittest.mock import patch
mpl.use("Agg")
def get_latest_run():
client = mlflow.tracking.MlflowClient()
return client.get_run(client.list_run_infos(experiment_id="0")[0].run_id)
def get_model_conf(artifact_uri, model_subpath="model"):
model_conf_path = os.path.join(artifact_uri, model_subpath, "MLmodel")
return Model.load(model_conf_path)
@pytest.fixture(scope="session")
def bst_params():
return {
"objective": "multi:softprob",
"num_class": 3,
"eval_metric": "mlogloss",
}
@pytest.fixture(scope="session")
def dtrain():
iris = datasets.load_iris()
X = pd.DataFrame(iris.data[:, :2], columns=iris.feature_names[:2])
y = iris.target
return xgb.DMatrix(X, y)
@pytest.mark.large
def test_xgb_autolog_ends_auto_created_run(bst_params, dtrain):
mlflow.xgboost.autolog()
xgb.train(bst_params, dtrain)
assert mlflow.active_run() is None
@pytest.mark.large
def test_xgb_autolog_persists_manually_created_run(bst_params, dtrain):
mlflow.xgboost.autolog()
with mlflow.start_run() as run:
xgb.train(bst_params, dtrain)
assert mlflow.active_run()
assert mlflow.active_run().info.run_id == run.info.run_id
@pytest.mark.large
def test_xgb_autolog_logs_default_params(bst_params, dtrain):
mlflow.xgboost.autolog()
xgb.train(bst_params, dtrain)
run = get_latest_run()
params = run.data.params
expected_params = {
"num_boost_round": 10,
# In xgboost >= 1.3.0, the default value for `maximize` in `xgboost.train` is None:
# https://xgboost.readthedocs.io/en/latest/python/python_api.html#xgboost.train
# In < 1.3.0, it's False:
# https://xgboost.readthedocs.io/en/release_1.2.0/python/python_api.html#xgboost.train
"maximize": None if LooseVersion(xgb.__version__) >= LooseVersion("1.3.0") else False,
"early_stopping_rounds": None,
"verbose_eval": True,
}
expected_params.update(bst_params)
for key, val in expected_params.items():
assert key in params
assert params[key] == str(val)
unlogged_params = [
"dtrain",
"evals",
"obj",
"feval",
"evals_result",
"xgb_model",
"callbacks",
"learning_rates",
]
for param in unlogged_params:
assert param not in params
@pytest.mark.large
def test_xgb_autolog_logs_specified_params(bst_params, dtrain):
mlflow.xgboost.autolog()
expected_params = {
"num_boost_round": 20,
"early_stopping_rounds": 5,
"verbose_eval": False,
}
xgb.train(bst_params, dtrain, evals=[(dtrain, "train")], **expected_params)
run = get_latest_run()
params = run.data.params
expected_params.update(bst_params)
for key, val in expected_params.items():
assert key in params
assert params[key] == str(val)
unlogged_params = [
"dtrain",
"evals",
"obj",
"feval",
"evals_result",
"xgb_model",
"callbacks",
"learning_rates",
]
for param in unlogged_params:
assert param not in params
@pytest.mark.large
def test_xgb_autolog_logs_metrics_with_validation_data(bst_params, dtrain):
mlflow.xgboost.autolog()
evals_result = {}
xgb.train(
bst_params, dtrain, num_boost_round=20, evals=[(dtrain, "train")], evals_result=evals_result
)
run = get_latest_run()
data = run.data
metric_key = "train-mlogloss"
client = mlflow.tracking.MlflowClient()
metric_history = [x.value for x in client.get_metric_history(run.info.run_id, metric_key)]
assert metric_key in data.metrics
assert len(metric_history) == 20
assert metric_history == evals_result["train"]["mlogloss"]
@pytest.mark.large
def test_xgb_autolog_logs_metrics_with_multi_validation_data(bst_params, dtrain):
mlflow.xgboost.autolog()
evals_result = {}
evals = [(dtrain, "train"), (dtrain, "valid")]
xgb.train(bst_params, dtrain, num_boost_round=20, evals=evals, evals_result=evals_result)
run = get_latest_run()
data = run.data
client = mlflow.tracking.MlflowClient()
for eval_name in [e[1] for e in evals]:
metric_key = "{}-mlogloss".format(eval_name)
metric_history = [x.value for x in client.get_metric_history(run.info.run_id, metric_key)]
assert metric_key in data.metrics
assert len(metric_history) == 20
assert metric_history == evals_result[eval_name]["mlogloss"]
@pytest.mark.large
def test_xgb_autolog_logs_metrics_with_multi_metrics(bst_params, dtrain):
mlflow.xgboost.autolog()
evals_result = {}
params = {**bst_params, "eval_metric": ["merror", "mlogloss"]}
xgb.train(
params, dtrain, num_boost_round=20, evals=[(dtrain, "train")], evals_result=evals_result
)
run = get_latest_run()
data = run.data
client = mlflow.tracking.MlflowClient()
for metric_name in params["eval_metric"]:
metric_key = "train-{}".format(metric_name)
metric_history = [x.value for x in client.get_metric_history(run.info.run_id, metric_key)]
assert metric_key in data.metrics
assert len(metric_history) == 20
assert metric_history == evals_result["train"][metric_name]
@pytest.mark.large
def test_xgb_autolog_logs_metrics_with_multi_validation_data_and_metrics(bst_params, dtrain):
mlflow.xgboost.autolog()
evals_result = {}
params = {**bst_params, "eval_metric": ["merror", "mlogloss"]}
evals = [(dtrain, "train"), (dtrain, "valid")]
xgb.train(params, dtrain, num_boost_round=20, evals=evals, evals_result=evals_result)
run = get_latest_run()
data = run.data
client = mlflow.tracking.MlflowClient()
for eval_name in [e[1] for e in evals]:
for metric_name in params["eval_metric"]:
metric_key = "{}-{}".format(eval_name, metric_name)
metric_history = [
x.value for x in client.get_metric_history(run.info.run_id, metric_key)
]
assert metric_key in data.metrics
assert len(metric_history) == 20
assert metric_history == evals_result[eval_name][metric_name]
@pytest.mark.large
def test_xgb_autolog_logs_metrics_with_early_stopping(bst_params, dtrain):
mlflow.xgboost.autolog()
evals_result = {}
params = {**bst_params, "eval_metric": ["merror", "mlogloss"]}
evals = [(dtrain, "train"), (dtrain, "valid")]
model = xgb.train(
params,
dtrain,
num_boost_round=20,
early_stopping_rounds=5,
evals=evals,
evals_result=evals_result,
)
run = get_latest_run()
data = run.data
assert "best_iteration" in data.metrics
assert int(data.metrics["best_iteration"]) == model.best_iteration
assert "stopped_iteration" in data.metrics
assert int(data.metrics["stopped_iteration"]) == len(evals_result["train"]["merror"]) - 1
client = mlflow.tracking.MlflowClient()
for eval_name in [e[1] for e in evals]:
for metric_name in params["eval_metric"]:
metric_key = "{}-{}".format(eval_name, metric_name)
metric_history = [
x.value for x in client.get_metric_history(run.info.run_id, metric_key)
]
assert metric_key in data.metrics
assert len(metric_history) == 20 + 1
best_metrics = evals_result[eval_name][metric_name][model.best_iteration]
assert metric_history == evals_result[eval_name][metric_name] + [best_metrics]
@pytest.mark.large
def test_xgb_autolog_batch_metrics_logger_logs_expected_metrics(bst_params, dtrain):
patched_metrics_data = []
# Mock patching BatchMetricsLogger.record_metrics()
# to ensure that expected metrics are being logged.
original = BatchMetricsLogger.record_metrics
with patch(
"mlflow.utils.autologging.BatchMetricsLogger.record_metrics", autospec=True
) as record_metrics_mock:
def record_metrics_side_effect(self, metrics, step=None):
patched_metrics_data.extend(metrics.items())
original(self, metrics, step)
record_metrics_mock.side_effect = record_metrics_side_effect
mlflow.xgboost.autolog()
evals_result = {}
params = {**bst_params, "eval_metric": ["merror", "mlogloss"]}
evals = [(dtrain, "train"), (dtrain, "valid")]
model = xgb.train(
params,
dtrain,
num_boost_round=20,
early_stopping_rounds=5,
evals=evals,
evals_result=evals_result,
)
patched_metrics_data = dict(patched_metrics_data)
run = get_latest_run()
original_metrics = run.data.metrics
for metric_name in original_metrics:
assert metric_name in patched_metrics_data
assert original_metrics[metric_name] == patched_metrics_data[metric_name]
assert int(patched_metrics_data["best_iteration"]) == model.best_iteration
assert int(original_metrics["best_iteration"]) == model.best_iteration
@pytest.mark.large
def test_xgb_autolog_logs_feature_importance(bst_params, dtrain):
mlflow.xgboost.autolog()
model = xgb.train(bst_params, dtrain)
run = get_latest_run()
run_id = run.info.run_id
artifacts_dir = run.info.artifact_uri.replace("file://", "")
client = mlflow.tracking.MlflowClient()
artifacts = [x.path for x in client.list_artifacts(run_id)]
importance_type = "weight"
plot_name = "feature_importance_{}.png".format(importance_type)
assert plot_name in artifacts
json_name = "feature_importance_{}.json".format(importance_type)
assert json_name in artifacts
json_path = os.path.join(artifacts_dir, json_name)
with open(json_path, "r") as f:
loaded_imp = json.load(f)
assert loaded_imp == model.get_score(importance_type=importance_type)
@pytest.mark.large
def test_xgb_autolog_logs_specified_feature_importance(bst_params, dtrain):
importance_types = ["weight", "total_gain"]
mlflow.xgboost.autolog(importance_types=importance_types)
model = xgb.train(bst_params, dtrain)
run = get_latest_run()
run_id = run.info.run_id
artifacts_dir = run.info.artifact_uri.replace("file://", "")
client = mlflow.tracking.MlflowClient()
artifacts = [x.path for x in client.list_artifacts(run_id)]
for imp_type in importance_types:
plot_name = "feature_importance_{}.png".format(imp_type)
assert plot_name in artifacts
json_name = "feature_importance_{}.json".format(imp_type)
assert json_name in artifacts
json_path = os.path.join(artifacts_dir, json_name)
with open(json_path, "r") as f:
loaded_imp = json.load(f)
assert loaded_imp == model.get_score(importance_type=imp_type)
@pytest.mark.large
def test_no_figure_is_opened_after_logging(bst_params, dtrain):
mlflow.xgboost.autolog()
xgb.train(bst_params, dtrain)
assert mpl.pyplot.get_fignums() == []
@pytest.mark.large
def test_xgb_autolog_loads_model_from_artifact(bst_params, dtrain):
mlflow.xgboost.autolog()
model = xgb.train(bst_params, dtrain)
run = get_latest_run()
run_id = run.info.run_id
loaded_model = mlflow.xgboost.load_model("runs:/{}/model".format(run_id))
np.testing.assert_array_almost_equal(model.predict(dtrain), loaded_model.predict(dtrain))
@pytest.mark.large
def test_xgb_autolog_does_not_throw_if_importance_values_not_supported(dtrain):
# the gblinear booster does not support calling get_score on it,
# where get_score is used to create the importance values plot.
bst_params = {"objective": "multi:softprob", "num_class": 3, "booster": "gblinear"}
mlflow.xgboost.autolog()
# we make sure here that we do not throw while attempting to plot
# importance values on a model with a linear booster.
model = xgb.train(bst_params, dtrain)
with pytest.raises(Exception):
model.get_score(importance_type="weight")
@pytest.mark.large
def test_xgb_autolog_gets_input_example(bst_params):
mlflow.xgboost.autolog(log_input_examples=True)
# we cannot use dtrain fixture, as the dataset must be constructed
# after the call to autolog() in order to get the input example
iris = datasets.load_iris()
X = pd.DataFrame(iris.data[:, :2], columns=iris.feature_names[:2])
y = iris.target
dataset = xgb.DMatrix(X, y)
xgb.train(bst_params, dataset)
run = get_latest_run()
model_path = os.path.join(run.info.artifact_uri, "model")
model_conf = Model.load(os.path.join(model_path, "MLmodel"))
input_example = _read_example(model_conf, model_path)
assert input_example.equals(X[:5])
pyfunc_model = mlflow.pyfunc.load_model(os.path.join(run.info.artifact_uri, "model"))
# make sure reloading the input_example and predicting on it does not error
pyfunc_model.predict(input_example)
@pytest.mark.large
def test_xgb_autolog_infers_model_signature_correctly(bst_params):
mlflow.xgboost.autolog(log_model_signatures=True)
# we cannot use dtrain fixture, as the dataset must be constructed
# after the call to autolog() in order to get the input example
iris = datasets.load_iris()
X = pd.DataFrame(iris.data[:, :2], columns=iris.feature_names[:2])
y = iris.target
dataset = xgb.DMatrix(X, y)
xgb.train(bst_params, dataset)
run = get_latest_run()
run_id = run.info.run_id
artifacts_dir = run.info.artifact_uri.replace("file://", "")
client = mlflow.tracking.MlflowClient()
artifacts = [x.path for x in client.list_artifacts(run_id, "model")]
ml_model_filename = "MLmodel"
assert str(os.path.join("model", ml_model_filename)) in artifacts
ml_model_path = os.path.join(artifacts_dir, "model", ml_model_filename)
data = None
with open(ml_model_path, "r") as f:
data = yaml.load(f, Loader=yaml.FullLoader)
assert data is not None
assert "signature" in data
signature = data["signature"]
assert signature is not None
assert "inputs" in signature
assert json.loads(signature["inputs"]) == [
{"name": "sepal length (cm)", "type": "double"},
{"name": "sepal width (cm)", "type": "double"},
]
assert "outputs" in signature
assert json.loads(signature["outputs"]) == [
{"type": "tensor", "tensor-spec": {"dtype": "float32", "shape": [-1, 3]}},
]
@pytest.mark.large
def test_xgb_autolog_does_not_throw_if_importance_values_are_empty(bst_params, tmpdir):
tmp_csv = tmpdir.join("data.csv")
tmp_csv.write("1,0.3,1.2\n")
tmp_csv.write("0,2.4,5.2\n")
tmp_csv.write("1,0.3,-1.2\n")
mlflow.xgboost.autolog()
dataset = xgb.DMatrix(tmp_csv.strpath + "?format=csv&label_column=0")
# we make sure here that we do not throw while attempting to plot
# importance values on a dataset that returns no importance values.
model = xgb.train(bst_params, dataset)
assert model.get_score(importance_type="weight") == {}
@pytest.mark.large
def test_xgb_autolog_continues_logging_even_if_signature_inference_fails(bst_params, tmpdir):
tmp_csv = tmpdir.join("data.csv")
tmp_csv.write("1,0.3,1.2\n")
tmp_csv.write("0,2.4,5.2\n")
tmp_csv.write("1,0.3,-1.2\n")
mlflow.xgboost.autolog(importance_types=[], log_model_signatures=True)
# signature and input example inference should fail here since the dataset is given
# as a file path
dataset = xgb.DMatrix(tmp_csv.strpath + "?format=csv&label_column=0")
xgb.train(bst_params, dataset)
run = get_latest_run()
run_id = run.info.run_id
artifacts_dir = run.info.artifact_uri.replace("file://", "")
client = mlflow.tracking.MlflowClient()
artifacts = [x.path for x in client.list_artifacts(run_id, "model")]
ml_model_filename = "MLmodel"
assert os.path.join("model", ml_model_filename) in artifacts
ml_model_path = os.path.join(artifacts_dir, "model", ml_model_filename)
data = None
with open(ml_model_path, "r") as f:
data = yaml.load(f, Loader=yaml.FullLoader)
assert data is not None
assert "run_id" in data
assert "signature" not in data
@pytest.mark.large
def test_xgb_autolog_does_not_break_dmatrix_serialization(bst_params, tmpdir):
mlflow.xgboost.autolog()
# we cannot use dtrain fixture, as the dataset must be constructed
# after the call to autolog() in order to test the serialization
iris = datasets.load_iris()
X = pd.DataFrame(iris.data[:, :2], columns=iris.feature_names[:2])
y = iris.target
dataset = xgb.DMatrix(X, y)
xgb.train(bst_params, dataset)
save_path = tmpdir.join("dataset_serialization_test").strpath
dataset.save_binary(save_path) # serialization should not throw
xgb.DMatrix(save_path) # deserialization also should not throw
@pytest.mark.large
@pytest.mark.parametrize("log_input_examples", [True, False])
@pytest.mark.parametrize("log_model_signatures", [True, False])
def test_xgb_autolog_configuration_options(bst_params, log_input_examples, log_model_signatures):
iris = datasets.load_iris()
X = pd.DataFrame(iris.data[:, :2], columns=iris.feature_names[:2])
y = iris.target
with mlflow.start_run() as run:
mlflow.xgboost.autolog(
log_input_examples=log_input_examples, log_model_signatures=log_model_signatures
)
dataset = xgb.DMatrix(X, y)
xgb.train(bst_params, dataset)
model_conf = get_model_conf(run.info.artifact_uri)
assert ("saved_input_example_info" in model_conf.to_dict()) == log_input_examples
assert ("signature" in model_conf.to_dict()) == log_model_signatures
@pytest.mark.large
@pytest.mark.parametrize("log_models", [True, False])
def test_xgb_autolog_log_models_configuration(bst_params, log_models):
iris = datasets.load_iris()
X = pd.DataFrame(iris.data[:, :2], columns=iris.feature_names[:2])
y = iris.target
with mlflow.start_run() as run:
mlflow.xgboost.autolog(log_models=log_models)
dataset = xgb.DMatrix(X, y)
xgb.train(bst_params, dataset)
run_id = run.info.run_id
client = mlflow.tracking.MlflowClient()
artifacts = [f.path for f in client.list_artifacts(run_id)]
assert ("model" in artifacts) == log_models
def test_xgb_autolog_does_not_break_dmatrix_instantiation_with_data_none():
"""
This test verifies that `xgboost.DMatrix(None)` doesn't fail after patching.
XGBoost internally calls `xgboost.DMatrix(None)` to create a blank `DMatrix` object.
Example: https://github.com/dmlc/xgboost/blob/v1.2.1/python-package/xgboost/core.py#L701
"""
mlflow.xgboost.autolog()
xgb.DMatrix(None)
|
py | 1a3547f1b6dedaf915e7467aab0e72738a93ca65 | import os
from dodo_commands.framework import ramda as R
from dodo_commands.framework.config_io import ConfigIO
class Layers:
def __init__(self):
self.config_io = ConfigIO()
self.root_layer_path = None
self.root_layer = None
self.layer_by_target_path = {}
self.selected_layer_by_path = {}
self.metadata_by_layer_name = None
def get_ordered_layer_paths(self):
root_layer_path = self.config_io.glob([self.root_layer_path])[0]
x = R.concat(
self.selected_layer_by_path.keys(), self.layer_by_target_path.keys()
)
x = R.uniq(x)
x = sorted(x, key=os.path.basename)
x = self.config_io.glob(x)
x = R.filter(lambda x: x != root_layer_path)(x)
x = R.concat([root_layer_path], x)
return x
@staticmethod
def get(ctr):
return ctr.layers
def init_layers(self, root_layer_path):
self.root_layer_path = root_layer_path
return self
|
py | 1a3548c611f417e421fbf8f978518f13da9e1a4c | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
# -*- coding: utf-8 -*-
"""
# @Time : 2019/5/25
# @Author : Jiaqi&Zecheng
# @File : train.py
# @Software: PyCharm
"""
import time
import traceback
import os
import torch
import torch.optim as optim
import tqdm
import copy
from src import args as arg
from src import utils
from src.models.model import IRNet
from src.rule import semQL
def train(args):
"""
:param args:
:return:
"""
grammar = semQL.Grammar()
sql_data, table_data, val_sql_data,\
val_table_data= utils.load_dataset(args.dataset, use_small=args.toy)
model = IRNet(args, grammar)
if args.cuda: model.cuda()
# now get the optimizer
optimizer_cls = eval('torch.optim.%s' % args.optimizer)
optimizer = optimizer_cls(model.parameters(), lr=args.lr)
print('Enable Learning Rate Scheduler: ', args.lr_scheduler)
if args.lr_scheduler:
scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[21, 41], gamma=args.lr_scheduler_gammar)
else:
scheduler = None
print('Loss epoch threshold: %d' % args.loss_epoch_threshold)
print('Sketch loss coefficient: %f' % args.sketch_loss_coefficient)
if args.load_model:
print('load pretrained model from %s'% (args.load_model))
pretrained_model = torch.load(args.load_model,
map_location=lambda storage, loc: storage)
pretrained_modeled = copy.deepcopy(pretrained_model)
for k in pretrained_model.keys():
if k not in model.state_dict().keys():
del pretrained_modeled[k]
model.load_state_dict(pretrained_modeled)
model.word_emb = utils.load_word_emb(args.glove_embed_path)
# begin train
model_save_path = utils.init_log_checkpoint_path(args)
utils.save_args(args, os.path.join(model_save_path, 'config.json'))
best_dev_acc = .0
try:
with open(os.path.join(model_save_path, 'epoch.log'), 'w') as epoch_fd:
for epoch in tqdm.tqdm(range(args.epoch)):
if args.lr_scheduler:
scheduler.step()
epoch_begin = time.time()
loss = utils.epoch_train(model, optimizer, args.batch_size, sql_data, table_data, args,
loss_epoch_threshold=args.loss_epoch_threshold,
sketch_loss_coefficient=args.sketch_loss_coefficient)
epoch_end = time.time()
json_datas = utils.epoch_acc(model, args.batch_size, val_sql_data, val_table_data,
beam_size=args.beam_size)
acc = utils.eval_acc(json_datas, val_sql_data)
if acc > best_dev_acc:
utils.save_checkpoint(model, os.path.join(model_save_path, 'best_model.model'))
best_dev_acc = acc
utils.save_checkpoint(model, os.path.join(model_save_path, '{%s}_{%s}.model') % (epoch, acc))
log_str = 'Epoch: %d, Loss: %f, Sketch Acc: %f, Acc: %f, time: %f\n' % (
epoch + 1, loss, acc, acc, epoch_end - epoch_begin)
tqdm.tqdm.write(log_str)
epoch_fd.write(log_str)
epoch_fd.flush()
except Exception as e:
# Save model
utils.save_checkpoint(model, os.path.join(model_save_path, 'end_model.model'))
print(e)
tb = traceback.format_exc()
print(tb)
else:
utils.save_checkpoint(model, os.path.join(model_save_path, 'end_model.model'))
json_datas = utils.epoch_acc(model, args.batch_size, val_sql_data, val_table_data,
beam_size=args.beam_size)
acc = utils.eval_acc(json_datas, val_sql_data)
print("Sketch Acc: %f, Acc: %f, Beam Acc: %f" % (acc, acc, acc,))
if __name__ == '__main__':
arg_parser = arg.init_arg_parser()
args = arg.init_config(arg_parser)
print(args)
train(args) |
py | 1a354933ad535c31a88d40cac297fb79bb434fe6 | def factorial_recursive(n):
"""
:param n: Integer
:return: n * n-1 * n-2 * n-3..........1
"""
if n==1:
return 1
else:
return n * factorial_recursive(n-1)
number = int(input("Enter the number : "))
print("factorial using recursion: ", factorial_recursive(number))
|
py | 1a35498f0af4bbb13a915e488812aa0d6bab0490 | #!/usr/bin/env python3
# imports go here
#
# Free Coding session for 2015-01-15
# Written by Matt Warren
#
if __name__ == '__main__':
print("No time for free coding today :(")
print(":("[::-1])
|
py | 1a354b9ab734f3f3b58a0d6a3efbb7ec861b7751 | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Contains common helpers for working with Android manifests."""
import hashlib
import os
import re
import shlex
import xml.dom.minidom as minidom
from util import build_utils
from xml.etree import ElementTree
ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android'
TOOLS_NAMESPACE = 'http://schemas.android.com/tools'
DIST_NAMESPACE = 'http://schemas.android.com/apk/distribution'
EMPTY_ANDROID_MANIFEST_PATH = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', 'AndroidManifest.xml'))
# When normalizing for expectation matching, wrap these tags when they are long
# or else they become very hard to read.
_WRAP_CANDIDATES = (
'<manifest',
'<application',
'<activity',
'<provider',
'<receiver',
'<service',
)
# Don't wrap lines shorter than this.
_WRAP_LINE_LENGTH = 100
_xml_namespace_initialized = False
def _RegisterElementTreeNamespaces():
global _xml_namespace_initialized
if _xml_namespace_initialized:
return
_xml_namespace_initialized = True
ElementTree.register_namespace('android', ANDROID_NAMESPACE)
ElementTree.register_namespace('tools', TOOLS_NAMESPACE)
ElementTree.register_namespace('dist', DIST_NAMESPACE)
def ParseManifest(path):
"""Parses an AndroidManifest.xml using ElementTree.
Registers required namespaces, creates application node if missing, adds any
missing namespaces for 'android', 'tools' and 'dist'.
Returns tuple of:
doc: Root xml document.
manifest_node: the <manifest> node.
app_node: the <application> node.
"""
_RegisterElementTreeNamespaces()
doc = ElementTree.parse(path)
# ElementTree.find does not work if the required tag is the root.
if doc.getroot().tag == 'manifest':
manifest_node = doc.getroot()
else:
manifest_node = doc.find('manifest')
app_node = doc.find('application')
if app_node is None:
app_node = ElementTree.SubElement(manifest_node, 'application')
return doc, manifest_node, app_node
def SaveManifest(doc, path):
with build_utils.AtomicOutput(path) as f:
f.write(ElementTree.tostring(doc.getroot(), encoding='UTF-8'))
def GetPackage(manifest_node):
return manifest_node.get('package')
def AssertUsesSdk(manifest_node,
min_sdk_version=None,
target_sdk_version=None,
max_sdk_version=None,
fail_if_not_exist=False):
"""Asserts values of attributes of <uses-sdk> element.
Unless |fail_if_not_exist| is true, will only assert if both the passed value
is not None and the value of attribute exist. If |fail_if_not_exist| is true
will fail if passed value is not None but attribute does not exist.
"""
uses_sdk_node = manifest_node.find('./uses-sdk')
if uses_sdk_node is None:
return
for prefix, sdk_version in (('min', min_sdk_version), ('target',
target_sdk_version),
('max', max_sdk_version)):
value = uses_sdk_node.get('{%s}%sSdkVersion' % (ANDROID_NAMESPACE, prefix))
if fail_if_not_exist and not value and sdk_version:
assert False, (
'%sSdkVersion in Android manifest does not exist but we expect %s' %
(prefix, sdk_version))
if not value or not sdk_version:
continue
assert value == sdk_version, (
'%sSdkVersion in Android manifest is %s but we expect %s' %
(prefix, value, sdk_version))
def AssertPackage(manifest_node, package):
"""Asserts that manifest package has desired value.
Will only assert if both |package| is not None and the package is set in the
manifest.
"""
package_value = GetPackage(manifest_node)
if package_value is None or package is None:
return
assert package_value == package, (
'Package in Android manifest is %s but we expect %s' % (package_value,
package))
def _SortAndStripElementTree(root):
def sort_key(node):
ret = ElementTree.tostring(node)
# ElementTree.tostring inserts namespace attributes for any that are needed
# for the node or any of its descendants. Remove them so as to prevent a
# change to a child that adds/removes a namespace usage from changing sort
# order.
return re.sub(r' xmlns:.*?".*?"', '', ret.decode('utf8'))
def helper(node):
for child in node:
if child.text and child.text.isspace():
child.text = None
helper(child)
node[:] = sorted(node, key=sort_key)
def rename_attrs(node, from_name, to_name):
value = node.attrib.get(from_name)
if value is not None:
node.attrib[to_name] = value
del node.attrib[from_name]
for child in node:
rename_attrs(child, from_name, to_name)
# Sort alphabetically with two exceptions:
# 1) Put <application> node last (since it's giant).
# 2) Pretend android:name appears before other attributes.
app_node = root.find('application')
app_node.tag = 'zz'
rename_attrs(root, '{%s}name' % ANDROID_NAMESPACE, '__name__')
helper(root)
rename_attrs(root, '__name__', '{%s}name' % ANDROID_NAMESPACE)
app_node.tag = 'application'
def _SplitElement(line):
"""Parses a one-line xml node into ('<tag', ['a="b"', ...]], '/>')."""
# Shlex splits nicely, but removes quotes. Need to put them back.
def restore_quotes(value):
return value.replace('=', '="', 1) + '"'
# Simplify restore_quotes by separating />.
assert line.endswith('>'), line
end_tag = '>'
if line.endswith('/>'):
end_tag = '/>'
line = line[:-len(end_tag)]
# Use shlex to avoid having to re-encode ", etc.
parts = shlex.split(line)
start_tag = parts[0]
attrs = parts[1:]
return start_tag, [restore_quotes(x) for x in attrs], end_tag
def _CreateNodeHash(lines):
"""Computes a hash (md5) for the first XML node found in |lines|.
Args:
lines: List of strings containing pretty-printed XML.
Returns:
Positive 32-bit integer hash of the node (including children).
"""
target_indent = lines[0].find('<')
tag_closed = False
for i, l in enumerate(lines[1:]):
cur_indent = l.find('<')
if cur_indent != -1 and cur_indent <= target_indent:
tag_lines = lines[:i + 1]
break
elif not tag_closed and 'android:name="' in l:
# To reduce noise of node tags changing, use android:name as the
# basis the hash since they usually unique.
tag_lines = [l]
break
tag_closed = tag_closed or '>' in l
else:
assert False, 'Did not find end of node:\n' + '\n'.join(lines)
# Insecure and truncated hash as it only needs to be unique vs. its neighbors.
return hashlib.md5(('\n'.join(tag_lines)).encode('utf8')).hexdigest()[:8]
def _IsSelfClosing(lines):
"""Given pretty-printed xml, returns whether first node is self-closing."""
for l in lines:
idx = l.find('>')
if idx != -1:
return l[idx - 1] == '/'
assert False, 'Did not find end of tag:\n' + '\n'.join(lines)
def _AddDiffTags(lines):
# When multiple identical tags appear sequentially, XML diffs can look like:
# + </tag>
# + <tag>
# rather than:
# + <tag>
# + </tag>
# To reduce confusion, add hashes to tags.
# This also ensures changed tags show up with outer <tag> elements rather than
# showing only changed attributes.
hash_stack = []
for i, l in enumerate(lines):
stripped = l.lstrip()
# Ignore non-indented tags and lines that are not the start/end of a node.
if l[0] != ' ' or stripped[0] != '<':
continue
# Ignore self-closing nodes that fit on one line.
if l[-2:] == '/>':
continue
# Ignore <application> since diff tag changes with basically any change.
if stripped.lstrip('</').startswith('application'):
continue
# Check for the closing tag (</foo>).
if stripped[1] != '/':
cur_hash = _CreateNodeHash(lines[i:])
if not _IsSelfClosing(lines[i:]):
hash_stack.append(cur_hash)
else:
cur_hash = hash_stack.pop()
lines[i] += ' # DIFF-ANCHOR: {}'.format(cur_hash)
assert not hash_stack, 'hash_stack was not empty:\n' + '\n'.join(hash_stack)
def NormalizeManifest(manifest_contents):
_RegisterElementTreeNamespaces()
# This also strips comments and sorts node attributes alphabetically.
root = ElementTree.fromstring(manifest_contents)
package = GetPackage(root)
app_node = root.find('application')
if app_node is not None:
# android:debuggable is added when !is_official_build. Strip it out to avoid
# expectation diffs caused by not adding is_official_build. Play store
# blocks uploading apps with it set, so there's no risk of it slipping in.
debuggable_name = '{%s}debuggable' % ANDROID_NAMESPACE
if debuggable_name in app_node.attrib:
del app_node.attrib[debuggable_name]
# Trichrome's static library version number is updated daily. To avoid
# frequent manifest check failures, we remove the exact version number
# during normalization.
for node in app_node.getchildren():
if (node.tag in ['uses-static-library', 'static-library']
and '{%s}version' % ANDROID_NAMESPACE in node.keys()
and '{%s}name' % ANDROID_NAMESPACE in node.keys()):
node.set('{%s}version' % ANDROID_NAMESPACE, '$VERSION_NUMBER')
# We also remove the exact package name (except the one at the root level)
# to avoid noise during manifest comparison.
def blur_package_name(node):
for key in node.keys():
node.set(key, node.get(key).replace(package, '$PACKAGE'))
for child in node.getchildren():
blur_package_name(child)
# We only blur the package names of non-root nodes because they generate a lot
# of diffs when doing manifest checks for upstream targets. We still want to
# have 1 piece of package name not blurred just in case the package name is
# mistakenly changed.
for child in root.getchildren():
blur_package_name(child)
_SortAndStripElementTree(root)
# Fix up whitespace/indentation.
dom = minidom.parseString(ElementTree.tostring(root))
out_lines = []
for l in dom.toprettyxml(indent=' ').splitlines():
if not l or l.isspace():
continue
if len(l) > _WRAP_LINE_LENGTH and any(x in l for x in _WRAP_CANDIDATES):
indent = ' ' * l.find('<')
start_tag, attrs, end_tag = _SplitElement(l)
out_lines.append('{}{}'.format(indent, start_tag))
for attribute in attrs:
out_lines.append('{} {}'.format(indent, attribute))
out_lines[-1] += '>'
# Heuristic: Do not allow multi-line tags to be self-closing since these
# can generally be allowed to have nested elements. When diffing, it adds
# noise if the base file is self-closing and the non-base file is not
# self-closing.
if end_tag == '/>':
out_lines.append('{}{}>'.format(indent, start_tag.replace('<', '</')))
else:
out_lines.append(l)
# Make output more diff-friendly.
_AddDiffTags(out_lines)
return '\n'.join(out_lines) + '\n'
|
py | 1a354c94f4941c5f77e8a5469387079c1dae7d30 | """A Python Wrapper for accessing the ZeroTier API."""
import asyncio
import logging
import aiohttp
import async_timeout
from . import exceptions
_LOGGER = logging.getLogger(__name__)
WRITABLE_NETWORK = [
'name',
'private',
'enableBroadcast',
'v4AssignMode',
'v6AssignMode',
'mtu',
'multicastLimit'
'routes',
'ipAssignmentPools',
'rules',
'capabilities',
'tags',
'remoteTraceTarget',
'remoteTraceLevel',
]
WRITABLE_MEMBER = ['authorized', 'activeBridge', 'ipAssignments']
class ZeroTier(object):
"""A class for handling the data retrieval."""
def __init__(self, api_token, loop, session, host='localhost', port=9993):
"""Initialize the connection."""
self._loop = loop
self._session = session
self.headers = {'X-ZT1-Auth': api_token}
self.data = None
self.url = '{}:{}'.format(host, port)
async def get_data(self, endpoint):
"""Retrieve the data."""
try:
with async_timeout.timeout(5, loop=self._loop):
response = await self._session.get(
'http://{}/{}'.format(self.url, endpoint),
headers=self.headers)
_LOGGER.debug("Response status: %s", response.status)
self.data = await response.json()
_LOGGER.debug(self.data)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Can not load data from ZeroTier controller")
raise exceptions.ZeroTierConnectionError()
async def set_value(self, key, variable, endpoint):
"""Send a POST request toa controller."""
payload = {key: variable}
print(payload)
try:
with async_timeout.timeout(5, loop=self._loop):
response = await self._session.post(
'http://{}/{}'.format(self.url, endpoint),
headers=self.headers, data=payload)
_LOGGER.debug("Response status: %s", response.status)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Can not update entry of ZeroTier controller")
raise exceptions.ZeroTierConnectionError()
|
py | 1a354d80dbc2aa4b5e73d5101a622ee356446938 | #!/usr/bin/env python
import json
from random import randint
import momoko
import tornado.ioloop
import tornado.web
from tornado import gen
import tornado.options
from tornado.options import options
import tornado.httpserver
from commons import JsonHandler, JsonHelloWorldHandler, PlaintextHelloWorldHandler, BaseHandler
tornado.options.define('port', default=8888, type=int, help="Server port")
tornado.options.define('postgres', default="localhost",
type=str, help="PostgreSQL host")
tornado.options.define('backlog', default=8192, type=int,
help="Server backlog")
class SingleQueryHandler(JsonHandler):
SQL = "SELECT id, randomNumber FROM World WHERE id=%s"
@gen.coroutine
def get(self):
random_id = randint(1, 10000)
cursor = yield db.execute(self.SQL, (random_id,))
row = cursor.fetchone()
response = json.dumps({self.ID: row[0], self.RANDOM_NUMBER: row[1]})
self.finish(response)
class MultipleQueriesHandler(JsonHandler):
SQL = "SELECT id, randomNumber FROM World WHERE id=%s"
@gen.coroutine
def get(self):
queries = self.get_argument(self.QUERIES, "1")
try:
queries = int(queries.strip())
except ValueError:
queries = 1
queries = min(max(1, queries), 500)
worlds = []
cursors = yield [db.execute(self.SQL, (randint(1, 10000),)) for _ in xrange(queries)]
for cursor in cursors:
row = cursor.fetchone()
worlds.append({self.ID: row[0], self.RANDOM_NUMBER: row[1]})
response = json.dumps(worlds)
self.finish(response)
application = tornado.web.Application([
(r"/json", JsonHelloWorldHandler),
(r"/plaintext", PlaintextHelloWorldHandler),
(r"/db", SingleQueryHandler),
(r"/queries", MultipleQueriesHandler)
],
template_path="templates"
)
application.ui_modules = {}
if __name__ == "__main__":
tornado.options.parse_command_line()
server = tornado.httpserver.HTTPServer(application)
server.bind(options.port, backlog=options.backlog)
server.start(0)
ioloop = tornado.ioloop.IOLoop.instance()
dsn = "user=benchmarkdbuser password=benchmarkdbpass dbname=hello_world host=%s" % options.postgres
db = momoko.Pool(dsn, size=100, max_size=200)
ioloop.run_sync(db.connect)
ioloop.start()
|
py | 1a354db82b72090ec63afd8a5d09686f8b5fde74 | import typing
from ..base import BaseModel
from vkbottle.types import objects
Get = typing.List[objects.stats.Period]
class GetModel(BaseModel):
response: Get = None
GetPostReach = typing.List[objects.stats.WallpostStat]
class GetPostReachModel(BaseModel):
response: GetPostReach = None
|
py | 1a355121f256a5b74cadb9e599fc99807587d674 | import logging
import os
import random
import sys
import time
from threading import Thread
from termcolor import cprint
from core import OpenLeecher
from kbhit import KBHit
# Core class
# Handles the core, can be threaded
# Args : None
class Core(Thread):
def __init__(self):
Thread.__init__(self, target=self.run)
self.core = OpenLeecher()
def run(self):
self.core.run()
ol = Core()
# Show Help
# Display help in console
# Args : None
def show_help():
cprint("OpenLeecher v" + ol.core.VERSION, 'blue', attrs=['bold'])
cprint("Made by lwsk", 'blue')
cprint("https://openleecher.tk/\n", 'blue')
cprint("Command line arguments:", attrs=['bold'])
cprint("\t--gui : launch graphic user interface")
cprint("\t-b 0 : set behavior")
cprint("\t-g IPGenerator : set generator(s)")
cprint("\t-s WEBScanner FTPScanner : set scanner(s)")
cprint("\t-t 8 : set thread limit at 8")
cprint("\t-h : display help")
cprint("\nBehaviors", attrs=['bold'])
cprint("\t0 : Random")
cprint("\t1 : Intelligent")
cprint("\t2 : Complete")
cprint("\nAvailable generator(s):", attrs=['bold'])
for g in ol.core.generator.g:
cprint("\t" + g.__class__.__name__)
cprint("\nAvailable scanner(s):", attrs=['bold'])
for s in ol.core.scanner.s:
cprint("\t" + s.__class__.__name__)
# Reset UI
# Clear console screen and re-draw UI
# Args : None
def reset_UI():
os.system('cls' if os.name == 'nt' else 'clear')
print(""".-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-.
| |
| OpenLeecher v"""+ol.core.VERSION+""" |
| |
| Generators : |""")
for g in ol.core.generator.g:
if g.active is True:
print("""
| """+str(g.__class__.__name__).strip()+""" |""")
print("""
! !
: :
: Scanners : :""")
for s in ol.core.scanner.s:
if s.active is True:
print("""
. """+str(s.__class__.__name__).strip()+""" .""")
print("""
: :
: :
! !
| Controls : |
| 'o' Raise maximum thread limit |
| 'l' Lower maximum thread limit |
| space Pause/Resume generator |
| 'q' / esc Quit |
| |
`-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-'
""")
print("\n")
def run_core():
running = True
reset_UI()
kb = KBHit()
while running:
if kb.kbhit():
c = kb.getch()
if len(c) > 0:
if c == 'q' or ord(c) is 27: # esc
running = False
elif c == 'o':
ol.core.maxthreads += 1
elif c == 'l':
if ol.core.maxthreads > 0:
ol.core.maxthreads -= 1
elif ord(c) is 32 : # space
ol.core.paused = not ol.core.paused
#else:
#sys.stdout.write('\r' + 'Keycode not registered ' + str(ord(c)) + '\n')
pre = ""
if ol.core.paused is True:
pre += '[PAUSED] '
sys.stdout.write("\r" + pre + str(len(ol.core.threads)) + "/" + str(ol.core.maxthreads) + " threads running" + ('.' * random.randint(1, 3)) + " ")
# Notifications
if len(ol.core.scanner.results) > 0:
x = ol.core.scanner.results.pop(0)
sys.stdout.write("\r" + "Found " + str(x.p) + " @ " + str(x.v) + '\n')
time.sleep(0.04)
sys.stdout.write("\r" + "Quitting OpenLeecher..." + "\n")
logging.info("Quitting OpenLeecher...")
sys.exit(0)
def start():
if os.name == 'nt':
import gui
gui.launch()
else:
for i in range(1, len(sys.argv)):
a = sys.argv[i]
if str(a) == '-h':
show_help()
run = False
logging.info("Quitting OpenLeecher...")
sys.exit(0)
elif str(a) == '--gui':
import gui
gui.launch()
elif str(a) == '-t':
try:
ol.core.maxthreads = int(sys.argv[i + 1])
except:
run = False
cprint("Error !", 'red', attrs=['bold']),
print("Thread limit argument (-t) value is invalid")
elif str(a) == '-b':
try:
ol.core.behavior = int(sys.argv[i + 1])
except:
run = False
cprint("Error !", 'red', attrs=['bold']),
print("Thread limit argument (-t) value is invalid")
elif str(a) == '-g':
j = 1
try:
while str(sys.argv[i + j]).startswith('-') is False:
if ol.core.generator.activate(str(sys.argv[i + j])) is False:
cprint("Error !", 'red', attrs=['bold']),
print("Generator " + str(sys.argv[i + j]) + " cannot be loaded.")
logging.error("Error : Generator " + str(sys.argv[i + j]) + " cannot be loaded.")
break
j += 1
except:
pass
elif str(a) == '-s':
j = 1
try:
while str(sys.argv[i + j]).startswith('-') is False:
if ol.core.scanner.activate(str(sys.argv[i + j])) is False:
cprint("Error !", 'red', attrs=['bold']),
print("Scanner " + str(sys.argv[i + j]) + " cannot be loaded.")
logging.error("Error : Scanner " + str(sys.argv[i + j]) + " cannot be loaded.")
break
j += 1
except:
pass
if ol.core.can_run() is True:
ol.start()
run_core()
else:
cprint("Error !", 'red', attrs=['bold'])
print("There must be at least one generator and one scanner active.")
logging.error("Error : There must be at least one generator and one scanner active.")
logging.warning("Cannot start session.")
logging.info("Quitting OpenLeecher...")
sys.exit(0)
if __name__ == '__main__':
start()
sys.exit(0) |
py | 1a35512ed80df08923c153b1ea3120c825c79639 | import argparse
parser = argparse.ArgumentParser(description='Voice Activity Detection (VAD) using pytorch')
# mode to run the program
parser.add_argument('--mode', default='prediction', type=str, choices=['creating_data', 'training', 'prediction'])
# a set of dataset paths
parser.add_argument('--training-speech-dir', type=str, default='./dataset/raw_data/data', help='path where to find training speech data')
parser.add_argument('--test-speech-dir', type=str, default='./dataset/test_data', help='path where to find test speech data')
parser.add_argument('--label-dir', type=str, default='./dataset/raw_data/labels', help='path where to find segmented label of raw data')
parser.add_argument('--features-dir', type=str, default='./dataset/features', help='path where to save the feature and to load that')
parser.add_argument('--model-dir', type=str, default='./model', help='pretrained model directory')
parser.add_argument('--fig-path', type=str, default='./result/', help='ratio dividing Nb of training dataset and Nb of test dataset')
# hyperparameters of creating the input images
parser.add_argument('--parallel', type=bool, default=False, help='using the cpu parallel processing')
parser.add_argument('--test_img_ratio', type=float, default=0.1, help='ratio dividing Nb of training dataset and Nb of test dataset')
# hyperparameters of model
parser.add_argument('--train_val_ratio', type=float, default=0.8, help='ratio dividing Nb of training dataset and Nb of validation dataset')
parser.add_argument('--baseline_val_loss', type=float, default=0.01, help='early stopping parameter to stop the training')
parser.add_argument('--batch-size', '-bs', type=int, default=32, help='batch size of training: (?, 16, 65)')
parser.add_argument('--epochs', '-e', type=int, default=20, help='Nb of epochs for training')
parser.add_argument('--learning-rate', '-lr', type=float, default=0.00001, help='learning rate for training')
parser.add_argument('--n-filters', type=str, default='32-64-128')
parser.add_argument('--n-kernels', type=str, default='8-5-3')
parser.add_argument('--n-fc-units', type=str, default='2048-2048')
parser.add_argument('--n-classes', '-n', type=int, default=2, help='the number of classes')
# hyperparameters of creating the input images
parser.add_argument('--smoothing', type=bool, default=True, help='apply smoothing feature')
parser.add_argument('--visualize', type=bool, default=False, help='visualization') |
py | 1a35516363b5e910638aa6be23b33978ed5feea5 | import numpy as np
import cv2
class UISketch:
def __init__(self, img_size, img_path, scale, accu=True, nc=3):
self.img_size = img_size
self.scale = scale
self.nc = nc
if img_path is not "":
self.img = cv2.imread(img_path)
self.mask = cv2.imread(img_path,cv2.IMREAD_GRAYSCALE)
self.mask = np.expand_dims(self.mask,axis=2)
else:
self.img = np.zeros((img_size, img_size, self.nc), np.uint8)
self.mask = np.zeros((img_size, img_size, 1), np.uint8)
if self.nc == 1: # [hack]
self.width = 2
else:
self.width = 1
def update(self, points, color):
num_pnts = len(points)
c = 255 - int(color.red())
if c > 0:
c = 255
for i in range(0, num_pnts - 1):
pnt1 = (int(points[i].x()/self.scale), int(points[i].y()/self.scale))
pnt2 = (int(points[i + 1].x()/self.scale), int(points[i + 1].y()/self.scale))
if self.nc == 3:
cv2.line(self.img, pnt1, pnt2, (c,c,c), self.width)
else:
cv2.line(self.img, pnt1, pnt2, c, self.width)
cv2.line(self.mask, pnt1, pnt2, 255, self.width)
def update_width(self, d, color):
self.width = min(20, max(1, self.width+ d))
return self.width
def get_constraints(self):
return self.img, self.mask
def reset(self):
self.img = np.zeros((self.img_size, self.img_size, self.nc), np.uint8)
self.mask = np.zeros((self.img_size, self.img_size, 1), np.uint8)
|
py | 1a35517e223d5b736e5178edf4865f11d271f64d | from django.contrib import messages
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponseRedirect, JsonResponse
from django.shortcuts import get_object_or_404, render, redirect
from django.urls import reverse_lazy
from django.forms import formset_factory
from django.views.generic import (
CreateView, DetailView,
FormView,
ListView,
TemplateView,
UpdateView,
RedirectView,
)
from django.conf import settings
from django.db.models import Count
from django.core.serializers.json import DjangoJSONEncoder
from django.contrib.auth.decorators import login_required
import json
from .forms import (
CommentCreateForm,
DocumentUploadForm, LogbookCreateForm,
MarkForm,
BaseMarkFormSet,
StudentGroupForm,
StudentGroupJoinForm,
)
from .mixins import (
StudentGroupContextMixin, UserHasGroupAccessMixin, UserIsStudentMixin,
UserIsTeacherMixin)
from .models import Batch, Comment, Document, Logbook, StudentGroup, Notification
from ..registration.models import User
class GroupCreateJoinView(
LoginRequiredMixin, UserIsStudentMixin, TemplateView):
http_method_names = ['get']
template_name = 'thesis/group_create_join.html'
class GroupCreateView(LoginRequiredMixin, UserIsStudentMixin, CreateView):
model = StudentGroup
form_class = StudentGroupForm
success_url = reverse_lazy('thesis:document_list')
template_name = 'thesis/group_create.html'
http_method_names = ['get', 'post']
def get_form(self, form_class=None):
if form_class is None:
form_class = self.get_form_class()
return form_class(user=self.request.user, **self.get_form_kwargs())
def form_valid(self, form):
self.object = studentgroup = form.save()
user = self.request.user
user.studentgroup = studentgroup
user.save()
messages.success(
self.request,
'Created Group Successfully!',
extra_tags='is-success')
return HttpResponseRedirect(self.get_success_url())
class GroupJoinView(LoginRequiredMixin, UserIsStudentMixin, FormView):
model = StudentGroup
form_class = StudentGroupJoinForm
success_url = reverse_lazy('thesis:document_list')
template_name = 'thesis/group_join.html'
http_method_names = ['get', 'post']
def form_valid(self, form):
md5hash = form.cleaned_data.get('md5hash')
studentgroup = get_object_or_404(StudentGroup, md5hash=md5hash)
if studentgroup.status != 'Pending':
messages.error(
self.request,
"The Group has already been approved by admin. You can not join this group.",
extra_tags='is-danger',
)
return HttpResponseRedirect('/group/join/')
batch = studentgroup.batch
students_count = studentgroup.students.all().count()
if students_count >= batch.max_students_per_group:
messages.error(
self.request,
'The Group has already reached maximum capacity',
extra_tags='is-danger'
)
return HttpResponseRedirect('/group/join/')
user = self.request.user
user.studentgroup = studentgroup
user.save()
messages.success(
self.request,
'You joined the Group successfully!',
extra_tags='is-success')
return HttpResponseRedirect(self.get_success_url())
class DocumentListView(
LoginRequiredMixin, UserHasGroupAccessMixin,
StudentGroupContextMixin, ListView):
template_name = 'thesis/document_list.html'
http_method_names = ['get']
context_object_name = 'proposal_documents'
def filter_by_document_type(self, document_type):
return self.studentgroup.documents.filter(
document_type=document_type,
).order_by(
'-is_accepted', '-upload_time',
)
def get_queryset(self):
return self.filter_by_document_type(Document.DocumentType.PROPOSAL.value)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['comments'] = self.studentgroup.comments.order_by(
'-created_at')
context['pre_defense_documents'] = self.filter_by_document_type(
Document.DocumentType.PRE_DEFENSE.value)
context['defense_documents'] = self.filter_by_document_type(
Document.DocumentType.DEFENSE.value)
context['logbooks'] = self.studentgroup.logbooks.all().order_by('-time')
return context
def get(self, request, *args, **kwargs):
notifications = Notification.objects.filter(
user=request.user,
studentgroup=self.studentgroup,
is_viewed=False,
)
for notification in notifications:
notification.is_viewed = True
Notification.objects.bulk_update(notifications, ['is_viewed'])
response = super().get(request, *args, **kwargs)
return response
class DocumentUploadView(
LoginRequiredMixin, UserIsStudentMixin, UserHasGroupAccessMixin,
StudentGroupContextMixin, CreateView):
model = Document
template_name = 'thesis/document_upload.html'
form_class = DocumentUploadForm
success_url = reverse_lazy('thesis:document_list')
http_method_names = ['get', 'post']
def form_valid(self, form):
self.object = document = form.save(commit=False)
document.studentgroup = self.studentgroup
document.save()
messages.success(
self.request,
'Document Uploaded successfully!',
extra_tags='is-success')
return HttpResponseRedirect(self.get_success_url())
class LogbookCreateView(
LoginRequiredMixin, UserIsStudentMixin, UserHasGroupAccessMixin,
StudentGroupContextMixin, CreateView):
model = Logbook
template_name = 'thesis/logbook_upload.html'
form_class = LogbookCreateForm
success_url = reverse_lazy('thesis:document_list')
http_method_names = ['get', 'post']
def get_form(self, *args, **kwargs):
return self.form_class(
studentgroup=self.studentgroup,
**self.get_form_kwargs(),
)
def form_valid(self, form):
messages.success(
self.request,
'Logbook Created Successfully!',
extra_tags='is-success',
)
return super().form_valid(form)
class LogbookDetailView(
LoginRequiredMixin, UserHasGroupAccessMixin, StudentGroupContextMixin,
DetailView):
model = Logbook
context_object_name = 'logbook'
template_name = 'thesis/logbook_details.html'
class LogbookApprovedToggleView(
LoginRequiredMixin,
UserIsTeacherMixin,
StudentGroupContextMixin,
RedirectView):
def get_redirect_url(self, *args, **kwargs):
logbook = get_object_or_404(
Logbook,
studentgroup=self.studentgroup,
id=self.kwargs['logbook_id'],
)
logbook.approved = not logbook.approved
logbook.save()
if logbook.approved:
messages.success(
self.request,
f'Logbook {logbook.id} has been approved',
extra_tags='is-success',
)
else:
messages.error(
self.request,
f'Logbook #{logbook.id} has been disapproved',
extra_tags='is-danger',
)
return reverse_lazy(
'thesis:group_detail',
args=(self.studentgroup.md5hash,),
)
class DocumentAcceptedToggleView(
LoginRequiredMixin,
UserIsTeacherMixin,
StudentGroupContextMixin,
RedirectView):
def get_redirect_url(self, *args, **kwargs):
document = get_object_or_404(
Document,
studentgroup=self.studentgroup,
id=self.kwargs['document_id'],
)
document.is_accepted = not document.is_accepted
document.save()
if document.is_accepted:
messages.success(
self.request, 'Document has been approved', extra_tags='is-success')
else:
messages.error(
self.request, 'Document has been disapproved', extra_tags='is-danger')
return reverse_lazy('thesis:group_detail', args=(self.studentgroup.md5hash,))
class GroupInviteView(
LoginRequiredMixin, UserIsStudentMixin, UserHasGroupAccessMixin,
StudentGroupContextMixin, TemplateView):
http_method_names = ['get']
template_name = "thesis/group_invite.html"
class BaseGroupListView(LoginRequiredMixin, UserIsTeacherMixin, ListView):
template_name = "thesis/group_list.html"
http_method_names = ['get']
context_object_name = 'groups'
def get_context_data(self, *args, object_list=None, **kwargs):
context_data = super().get_context_data(
*args, object_list=object_list, **kwargs)
batch_number = self.kwargs.get('batch_number', '')
context_data['batches'] = Batch.objects.all()
context_data['batch_number'] = int(
batch_number) if batch_number else ''
return context_data
def get_studentgroups(self, studentgroup_related_name):
user = self.request.user
queryset = getattr(user, studentgroup_related_name).filter(
approved=True).order_by('id')
batch_number = self.kwargs.get('batch_number', '')
if batch_number:
return queryset.filter(batch__number=batch_number)
return queryset
class GroupListView(BaseGroupListView):
def get_queryset(self):
return self.get_studentgroups('studentgroups')
class InternalGroupListView(BaseGroupListView):
def get_queryset(self):
return self.get_studentgroups('internal_studentgroups')
class ExternalGroupListView(BaseGroupListView):
def get_queryset(self):
return self.get_studentgroups('external_studentgroups')
class NotificationListView(LoginRequiredMixin, ListView):
template_name = "thesis/notification_list.html"
http_method_names = ['get']
context_object_name = 'notifications'
def get_queryset(self):
return Notification.objects.filter(user=self.request.user, is_viewed=False).order_by('-created_at')
class GroupUpdateView(
LoginRequiredMixin, UserIsStudentMixin, UserHasGroupAccessMixin,
UpdateView):
model = StudentGroup
template_name = "thesis/group_update.html"
http_method_names = ['get', 'post']
form_class = StudentGroupForm
success_url = reverse_lazy('thesis:document_list')
def get_form(self, form_class=None):
if form_class is None:
form_class = self.get_form_class()
return form_class(user=self.request.user, **self.get_form_kwargs())
def get_object(self, *args, **kwargs):
return self.request.user.studentgroup
def form_valid(self, form):
response = super().form_valid(form)
messages.success(
self.request,
'Group Updated Successfully!',
extra_tags='is-success')
return response
class CommentCreateView(
LoginRequiredMixin, UserHasGroupAccessMixin, StudentGroupContextMixin,
CreateView):
model = Comment
http_method_names = ['post']
form_class = CommentCreateForm
success_url = reverse_lazy('thesis:document_list')
def get_success_url(self, *args, **kwargs):
if self.request.user.is_teacher:
return reverse_lazy(
'thesis:group_detail',
kwargs={'group_code': self.studentgroup.md5hash})
return self.success_url
def form_valid(self, form):
self.object = comment = form.save(commit=False)
comment.user = self.request.user
comment.studentgroup = self.studentgroup
comment.save()
return HttpResponseRedirect(self.get_success_url())
def form_invalid(self, form):
messages.error(
self.request,
'Comment can not be empty.',
extra_tags='is-danger'
)
return HttpResponseRedirect(self.get_success_url())
class StudentGroupApproveView(
LoginRequiredMixin, UserIsTeacherMixin, StudentGroupContextMixin,
TemplateView):
http_method_names = ['get', 'post']
template_name = 'thesis/group_approve.html'
def post(self, request, *args, **kwargs):
if self.studentgroup.approved:
self.studentgroup.approved = False
if self.studentgroup.progress == 100:
self.studentgroup.progress = 90
messages.success(
request,
'The StudentGroups Proposal has been disapproved!',
extra_tags='is-success')
else:
self.studentgroup.approved = True
self.studentgroup.progress = 100
messages.success(
request,
'The StudentGroups Proposal has been approved!',
extra_tags='is-success')
self.studentgroup.save()
return HttpResponseRedirect(
reverse_lazy(
'thesis:group_detail',
kwargs={'group_code': self.studentgroup.md5hash}))
class StudentGroupProgressUpdateView(
LoginRequiredMixin, UserIsTeacherMixin, StudentGroupContextMixin,
TemplateView):
http_method_names = ['post']
def post(self, request, *args, **kwargs):
data = json.loads(str(request.body.decode('utf-8')))
progress_value = int(data.get('progress_value'))
if progress_value > 100:
progress_value = 100
elif progress_value < 0:
progress_value = 0
self.studentgroup.progress = progress_value
self.studentgroup.save()
return JsonResponse({'progress_value': progress_value})
@login_required
def get_teachers_list_by_field_json(request, field_id):
available_teachers = User.objects.values(
'id', 'username', 'full_name',
group_count=Count('studentgroups')).filter(
fields__id=field_id,
group_count__lt=settings.MAXIMUM_GROUPS_UNDER_TEACHER
)
data = json.dumps(list(available_teachers), cls=DjangoJSONEncoder)
return JsonResponse(data, safe=False,)
@login_required
def grade_students(request, group_code):
studentgroup = get_object_or_404(StudentGroup, md5hash=group_code)
user = request.user
students = studentgroup.students.all().order_by('username')
students_count = students.count()
MarkFormSet = formset_factory(
MarkForm,
extra=students_count,
min_num=students_count,
max_num=students_count,
validate_min=True,
validate_max=True,
formset=BaseMarkFormSet,
)
formset_initial = {
"form_kwargs": {
'user': user,
'studentgroup': studentgroup,
},
"initial": [{"student_choice": student.id} for student in students],
}
if request.method == 'POST':
formset = MarkFormSet(request.POST, **formset_initial)
if formset.is_valid():
formset.save()
messages.success(
request,
'Grades have been submitted successfully.',
extra_tags='is-success')
return redirect(
reverse_lazy(
'thesis:group_detail',
args=(group_code,),
),
)
else:
return render(
request,
'thesis/create-mark.html',
context={
'studentgroup': studentgroup,
'user': user,
'formset': formset,
},
)
return render(
request,
'thesis/create-mark.html',
context={
'studentgroup': studentgroup,
'user': user,
'formset': MarkFormSet(**formset_initial)
},
)
|
py | 1a355200fb137878bc6d8745d038faecd2afc9e8 | # src/lyrical/ovh.py
"""Client for the lyrics.ovh REST API."""
from concurrent.futures import as_completed
from dataclasses import dataclass
from typing import List
from urllib.parse import unquote, urlparse
import click
import desert
import marshmallow
import requests
from requests.adapters import HTTPAdapter
from requests_futures.sessions import FuturesSession
from urllib3.util.retry import Retry
from . import __version__
USER_AGENT: str = f"Lyrical/{__version__} ( https://github.com/openfinch/lyrical )"
LYRICS_API_URL: str = "https://api.lyrics.ovh/v1/{artist}/{title}"
@dataclass
class LyricsCorpus:
"""LyricsCorpus resource.
Attributes:
title: Title of the track
artist: Name of the artist
lyrics: lyrics of the track
"""
title: str
artist: str
lyrics: str
lyrics_schema = desert.schema(LyricsCorpus, meta={"unknown": marshmallow.EXCLUDE})
def build_corpus(artist: str, tracklist: List) -> List[LyricsCorpus]:
"""Build a lyrics corpus.
Performs a GET request to the /recording?query=arid:{id} endpoint.
Args:
artist: The name of the artist
tracklist: A list of track names
Returns:
A LyricsCorpus resource
Raises:
ClickException: The HTTP request failed or the HTTP response
contained an invalid body.
"""
urls = []
batch_size = 10
corpus = []
try:
# Generate a list of pages for official releases
for track in tracklist:
urls.append(LYRICS_API_URL.format(artist=artist, title=track))
# Generate and iterate over a set of request futures
with FuturesSession(max_workers=batch_size) as session:
retries = 5
status_forcelist = [503]
retry = Retry(
total=retries,
read=retries,
connect=retries,
respect_retry_after_header=True,
backoff_factor=1,
status_forcelist=status_forcelist,
)
adapter = HTTPAdapter(max_retries=retry)
session.mount("https://", adapter)
futures = [
session.get(url, headers={"User-Agent": USER_AGENT}) for url in urls
]
for future in as_completed(futures):
resp = future.result()
if resp.status_code == 200:
resp_json = resp.json()
url = urlparse(resp.url)
path = url.path.split("/")
if (
resp_json["lyrics"] != "Instrumental"
or len(resp_json["lyrics"]) < 0
):
lyric = lyrics_schema.load(
{
"artist": unquote(path[2]),
"title": unquote(path[3]),
"lyrics": resp_json["lyrics"],
}
)
corpus.append(lyric)
return corpus
except (requests.RequestException, marshmallow.ValidationError) as error:
message: str = str(error)
raise click.ClickException(message)
|
py | 1a3553ae9e25908aa31290fd7a63f69e85bb244f | # qubit number=4
# total number=46
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += CNOT(0,3) # number=13
prog += CNOT(0,3) # number=17
prog += X(3) # number=18
prog += CNOT(0,3) # number=19
prog += CNOT(0,3) # number=15
prog += H(1) # number=2
prog += H(2) # number=3
prog += H(3) # number=4
prog += Y(3) # number=12
prog += H(0) # number=5
prog += H(1) # number=6
prog += H(2) # number=7
prog += H(3) # number=37
prog += CZ(0,3) # number=38
prog += H(3) # number=39
prog += CNOT(0,3) # number=40
prog += X(3) # number=41
prog += H(3) # number=43
prog += CZ(0,3) # number=44
prog += H(3) # number=45
prog += H(3) # number=30
prog += CZ(0,3) # number=31
prog += H(3) # number=32
prog += H(0) # number=33
prog += CZ(3,0) # number=34
prog += RX(0.33300882128051834,2) # number=36
prog += H(0) # number=35
prog += CNOT(3,0) # number=23
prog += Z(3) # number=24
prog += CNOT(3,0) # number=25
prog += CNOT(3,0) # number=22
prog += H(3) # number=8
prog += H(0) # number=9
prog += Y(2) # number=10
prog += Y(2) # number=11
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('4q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil3083.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
|
py | 1a3553cf5c0c0172ce7f5f4351b47b97f2896f39 | #!/usr/bin/env python3
import pandas as pd
import seaborn as sns
import sys
import matplotlib.pyplot as plt
import numpy as np
from macros import colors
def algorithm_font(algorithm):
return r'\textsf{{{}}}'.format(algorithm)
def combined(algorithm, regularity):
return '{}-{}'.format(algorithm, regularity)
def plot_tw_vs_simtime(data_filename, plot_filename, verbose):
# Use latex font
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
# Set up Seaborn style
sns.set(style="darkgrid")
# Import the dataframe
dataframe = pd.read_csv(data_filename)
dataframe = dataframe.sort_values(by=['algorithm'])
# Keep the data we want for the large facet
dataframe = dataframe.dropna(subset=['tree-decomp-width'])
dataframe = dataframe.loc[(dataframe['algorithm'] != 'quickbb')]
dataframe = dataframe.loc[dataframe['vertices'].isin([10, 14, 18,
22, 26, 30])]
dataframe['tree-decomp-width'] =\
pd.to_numeric(dataframe['tree-decomp-width'], downcast='integer')
dataframe['algorithm'] =\
np.vectorize(algorithm_font)(dataframe['algorithm'])
# If we want to have a different color for algorithm + regularity
# dataframe['combined'] =\
# np.vectorize(combined)(dataframe['algorithm'], dataframe['regularity'])
plot = sns.stripplot(x="tree-decomp-width",
y="simulation-time",
hue="algorithm",
data=dataframe,
dodge=True,
size=4,
jitter=True,
alpha=0.7,
linewidth=0.1,
palette=[colors[x] for x in ['freetdi', 'meiji']],
hue_order=['\\textsf{freetdi}', '\\textsf{meiji-e}'])
for i in range(len(dataframe["tree-decomp-width"].unique()) - 1):
plot.axvline(x=i+.5, c="white", dashes=(2, 1))
plot.set(ylim=(.01, 10000), yscale="log")
plot.set(xlabel="Contraction Complexity",
ylabel="Simulation Time (sec)")
# Add legend
plot.legend(loc="upper right")
# Save figure
for extension in ['.pdf', '.png']:
plt.savefig(plot_filename + extension)
if __name__ == '__main__':
data_filename = sys.argv[1]
plot_filename = sys.argv[2]
plot_tw_vs_simtime(data_filename, plot_filename, False)
|
py | 1a35542d3bce225a136eb16b5ae8169f6ff51e85 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Dataset is currently unstable. APIs subject to change without notice."""
import pyarrow as pa
from pyarrow.util import _stringify_path, _is_path_like
from pyarrow._dataset import ( # noqa
AndExpression,
CastExpression,
CompareOperator,
ComparisonExpression,
Dataset,
DatasetFactory,
DirectoryPartitioning,
Expression,
FieldExpression,
FileFormat,
FileSystemSource,
FileSystemSourceFactory,
FileSystemFactoryOptions,
HivePartitioning,
InExpression,
IpcFileFormat,
IsValidExpression,
NotExpression,
OrExpression,
ParquetFileFormat,
Partitioning,
PartitioningFactory,
ScalarExpression,
Scanner,
ScanTask,
Source,
TreeSource,
SourceFactory
)
def partitioning(schema=None, field_names=None, flavor=None):
"""
Specify a partitioning scheme.
The supported schemes include:
- "DirectoryPartitioning": this scheme expects one segment in the file path
for each field in the specified schema (all fields are required to be
present). For example given schema<year:int16, month:int8> the path
"/2009/11" would be parsed to ("year"_ == 2009 and "month"_ == 11).
- "HivePartitioning": a scheme for "/$key=$value/" nested directories as
found in Apache Hive. This is a multi-level, directory based partitioning
scheme. Data is partitioned by static values of a particular column in
the schema. Partition keys are represented in the form $key=$value in
directory names. Field order is ignored, as are missing or unrecognized
field names.
For example, given schema<year:int16, month:int8, day:int8>, a possible
path would be "/year=2009/month=11/day=15" (but the field order does not
need to match).
Parameters
----------
schema : pyarrow.Schema, default None
The schema that describes the partitions present in the file path.
If not specified, and `field_names` and/or `flavor` are specified,
the schema will be inferred from the file path (and a
PartitioningFactory is returned).
field_names : list of str, default None
A list of strings (field names). If specified, the schema's types are
inferred from the file paths (only valid for DirectoryPartitioning).
flavor : str, default None
The default is DirectoryPartitioning. Specify ``flavor="hive"`` for
a HivePartitioning.
Returns
-------
Partitioning or PartitioningFactory
Examples
--------
Specify the Schema for paths like "/2009/June":
>>> partitioning(pa.schema([("year", pa.int16()), ("month", pa.string())]))
or let the types be inferred by only specifying the field names:
>>> partitioning(field_names=["year", "month"])
For paths like "/2009/June", the year will be inferred as int32 while month
will be inferred as string.
Create a Hive scheme for a path like "/year=2009/month=11":
>>> partitioning(
... pa.schema([("year", pa.int16()), ("month", pa.int8())]),
... flavor="hive")
A Hive scheme can also be discovered from the directory structure (and
types will be inferred):
>>> partitioning(flavor="hive")
"""
if flavor is None:
# default flavor
if schema is not None:
if field_names is not None:
raise ValueError(
"Cannot specify both 'schema' and 'field_names'")
return DirectoryPartitioning(schema)
elif field_names is not None:
if isinstance(field_names, list):
return DirectoryPartitioning.discover(field_names)
else:
raise ValueError(
"Expected list of field names, got {}".format(
type(field_names)))
else:
raise ValueError(
"For the default directory flavor, need to specify "
"a Schema or a list of field names")
elif flavor == 'hive':
if field_names is not None:
raise ValueError("Cannot specify 'field_names' for flavor 'hive'")
elif schema is not None:
if isinstance(schema, pa.Schema):
return HivePartitioning(schema)
else:
raise ValueError(
"Expected Schema for 'schema', got {}".format(
type(schema)))
else:
return HivePartitioning.discover()
else:
raise ValueError("Unsupported flavor")
def _ensure_fs(filesystem, path):
# Validate or infer the filesystem from the path
from pyarrow.fs import FileSystem, LocalFileSystem
if filesystem is None:
try:
filesystem, _ = FileSystem.from_uri(path)
except Exception:
# when path is not found, we fall back to local file system
filesystem = LocalFileSystem()
return filesystem
def _ensure_fs_and_paths(path_or_paths, filesystem=None):
# Validate and convert the path-likes and filesystem.
# Returns filesystem and list of string paths or FileSelector
from pyarrow.fs import FileType, FileSelector
if isinstance(path_or_paths, list):
paths_or_selector = [_stringify_path(path) for path in path_or_paths]
# infer from first path
filesystem = _ensure_fs(filesystem, paths_or_selector[0])
else:
path = _stringify_path(path_or_paths)
filesystem = _ensure_fs(filesystem, path)
stats = filesystem.get_target_stats([path])[0]
if stats.type == FileType.Directory:
# for directory, pass a selector
paths_or_selector = FileSelector(path, recursive=True)
elif stats.type == FileType.File:
# for a single file path, pass it as a list
paths_or_selector = [path]
else:
raise FileNotFoundError(path)
return filesystem, paths_or_selector
def _ensure_partitioning(scheme):
# Validate input and return a Partitioning(Factory) or passthrough None
# for no partitioning
if scheme is None:
pass
elif isinstance(scheme, str):
scheme = partitioning(flavor=scheme)
elif isinstance(scheme, list):
scheme = partitioning(field_names=scheme)
elif isinstance(scheme, (Partitioning, PartitioningFactory)):
pass
else:
ValueError(
"Expected Partitioning or PartitioningFactory, got {}".format(
type(scheme)))
return scheme
def _ensure_format(obj):
if isinstance(obj, FileFormat):
return obj
elif obj == "parquet":
return ParquetFileFormat()
elif obj == "ipc":
return IpcFileFormat()
else:
raise ValueError("format '{}' is not supported".format(obj))
def source(path_or_paths, filesystem=None, partitioning=None,
format=None):
"""
Open a (multi-file) data source.
Parameters
----------
path_or_paths : str, pathlib.Path, or list of those
Path to a file or to a directory containing the data files, or
a list of paths.
filesystem : FileSystem, default None
By default will be inferred from the path.
partitioning : Partitioning(Factory), str or list of str
The partitioning scheme specified with the ``partitioning()``
function. A flavor string can be used as shortcut, and with a list of
field names a DirectionaryPartitioning will be inferred.
format : str, default None
Currently only "parquet" is supported.
Returns
-------
DataSource of DataSourceDiscovery
"""
fs, paths_or_selector = _ensure_fs_and_paths(path_or_paths, filesystem)
partitioning = _ensure_partitioning(partitioning)
format = _ensure_format(format or "parquet")
# TODO pass through options
options = FileSystemFactoryOptions()
if isinstance(partitioning, PartitioningFactory):
options.partitioning_factory = partitioning
elif isinstance(partitioning, Partitioning):
options.partitioning = partitioning
return FileSystemSourceFactory(fs, paths_or_selector, format, options)
def _ensure_source(src, **kwargs):
# Need to return SourceFactory since `dataset` might need to finish the
# factory with a unified schema.
# TODO: return Source if a specific schema was passed?
if _is_path_like(src):
return source(src, **kwargs)
elif isinstance(src, SourceFactory):
if any(v is not None for v in kwargs.values()):
# when passing a SourceFactory, the arguments cannot be specified
raise ValueError(
"When passing a Source(Factory), you cannot pass any "
"additional arguments"
)
return src
elif isinstance(src, Source):
raise TypeError(
"Source objects are currently not supported, only SourceFactory "
"instances. Use the source() function to create such objects."
)
else:
raise TypeError(
"Expected a path-like or Source, got {}".format(type(src))
)
def dataset(sources, filesystem=None, partitioning=None, format=None):
"""
Open a (multi-source) dataset.
Parameters
----------
sources : path or list of paths or source or list of sources
Path to a file or to a directory containing the data files, or a list
of paths for a multi-source dataset. To have more control, a list of
sources can be passed, created with the ``source()`` function (in this
case, the additional keywords will be ignored).
filesystem : FileSystem, default None
By default will be inferred from the path.
partitioning : Partitioning(Factory), str, list of str
The partitioning scheme specified with the ``partitioning()``
function. A flavor string can be used as shortcut, and with a list of
field names a DirectionaryPartitioning will be inferred.
format : str
Currently only "parquet" is supported.
Returns
-------
Dataset
Examples
--------
Opening a dataset for a single directory:
>>> dataset("path/to/nyc-taxi/", format="parquet")
Combining different sources:
>>> dataset([
... source("s3://old-taxi-data", format="parquet"),
... source("local/path/to/new/data", format="csv")
... ])
"""
if not isinstance(sources, list):
sources = [sources]
sources = [
_ensure_source(src, filesystem=filesystem, partitioning=partitioning,
format=format)
for src in sources
]
return DatasetFactory(sources).finish()
def field(name):
"""References a named column of the dataset.
Stores only the field's name. Type and other information is known only when
the expression is applied on a dataset having an explicit scheme.
Parameters
----------
name : string
The name of the field the expression references to.
Returns
-------
field_expr : FieldExpression
"""
return FieldExpression(name)
def scalar(value):
"""Expression representing a scalar value.
Parameters
----------
value : bool, int, float or string
Python value of the scalar. Note that only a subset of types are
currently supported.
Returns
-------
scalar_expr : ScalarExpression
"""
return ScalarExpression(value)
|
py | 1a355435772cf0884b74189f7a18802602020a9b | # Copyright (c) Yugabyte, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations
# under the License.
#
import argparse
import hashlib
import json
import os
import platform
import subprocess
import sys
import yaml
from typing import Optional, List, Set, Tuple, Dict, Any
from build_definitions import BUILD_TYPE_COMMON, get_build_def_module, BUILD_TYPE_UNINSTRUMENTED, \
BUILD_TYPE_CLANG_UNINSTRUMENTED, BUILD_TYPE_ASAN, BUILD_TYPE_TSAN, BUILD_TYPES, \
BUILD_GROUP_COMMON, BUILD_GROUP_INSTRUMENTED
from yugabyte_db_thirdparty.builder_helpers import PLACEHOLDER_RPATH, get_make_parallelism, \
get_rpath_flag, sanitize_flags_line_for_log, log_and_set_env_var_to_list
from yugabyte_db_thirdparty.builder_helpers import is_ninja_available
from yugabyte_db_thirdparty.builder_interface import BuilderInterface
from yugabyte_db_thirdparty.cmd_line_args import parse_cmd_line_args
from yugabyte_db_thirdparty.compiler_choice import CompilerChoice
from yugabyte_db_thirdparty.custom_logging import fatal, log, heading, log_output, colored_log, \
YELLOW_COLOR, SEPARATOR
from yugabyte_db_thirdparty.dependency import Dependency
from yugabyte_db_thirdparty.devtoolset import activate_devtoolset
from yugabyte_db_thirdparty.download_manager import DownloadManager
from yugabyte_db_thirdparty.env_helpers import write_env_vars
from yugabyte_db_thirdparty.os_detection import is_mac, is_linux
from yugabyte_db_thirdparty.string_util import indent_lines
from yugabyte_db_thirdparty.util import YB_THIRDPARTY_DIR, remove_path, \
mkdir_if_missing, PushDir, assert_list_contains, assert_dir_exists, EnvVarContext
from yugabyte_db_thirdparty.file_system_layout import FileSystemLayout
from yugabyte_db_thirdparty.toolchain import Toolchain, ensure_toolchain_installed
ASAN_FLAGS = [
'-fsanitize=address',
'-fsanitize=undefined',
'-DADDRESS_SANITIZER',
]
TSAN_FLAGS = [
'-fsanitize=thread',
'-DTHREAD_SANITIZER',
]
class Builder(BuilderInterface):
args: argparse.Namespace
ld_flags: List[str]
executable_only_ld_flags: List[str]
compiler_flags: List[str]
preprocessor_flags: List[str]
c_flags: List[str]
cxx_flags: List[str]
libs: List[str]
additional_allowed_shared_lib_paths: Set[str]
download_manager: DownloadManager
compiler_choice: CompilerChoice
fs_layout: FileSystemLayout
fossa_modules: List[Any]
toolchain: Optional[Toolchain]
remote_build: bool
"""
This class manages the overall process of building third-party dependencies, including the set
of dependencies to build, build types, and the directories to install dependencies.
"""
def __init__(self) -> None:
self.fs_layout = FileSystemLayout()
self.linuxbrew_dir = None
self.additional_allowed_shared_lib_paths = set()
self.toolchain = None
self.fossa_modules = []
def parse_args(self) -> None:
self.args = parse_cmd_line_args()
self.remote_build = self.args.remote_build_server and self.args.remote_build_dir
if self.remote_build:
return
if self.args.make_parallelism:
os.environ['YB_MAKE_PARALLELISM'] = str(self.args.make_parallelism)
self.download_manager = DownloadManager(
should_add_checksum=self.args.add_checksum,
download_dir=self.fs_layout.tp_download_dir)
single_compiler_type = None
if self.args.toolchain:
self.toolchain = ensure_toolchain_installed(
self.download_manager, self.args.toolchain)
compiler_prefix = self.toolchain.toolchain_root
if self.toolchain.toolchain_type != 'linuxbrew':
single_compiler_type = self.toolchain.get_compiler_type()
self.toolchain.write_url_and_path_files()
else:
compiler_prefix = self.args.compiler_prefix
single_compiler_type = self.args.single_compiler_type
self.compiler_choice = CompilerChoice(
single_compiler_type=single_compiler_type,
compiler_prefix=compiler_prefix,
compiler_suffix=self.args.compiler_suffix,
devtoolset=self.args.devtoolset,
use_compiler_wrapper=self.args.use_compiler_wrapper,
use_ccache=self.args.use_ccache
)
def finish_initialization(self) -> None:
self.compiler_choice.finish_initialization()
self.populate_dependencies()
self.select_dependencies_to_build()
if self.compiler_choice.devtoolset is not None:
activate_devtoolset(self.compiler_choice.devtoolset)
def populate_dependencies(self) -> None:
# We have to use get_build_def_module to access submodules of build_definitions,
# otherwise MyPy gets confused.
self.dependencies = [
# Avoiding a name collision with the standard zlib module, hence "zlib_dependency".
get_build_def_module('zlib_dependency').ZLibDependency(),
get_build_def_module('lz4').LZ4Dependency(),
get_build_def_module('openssl').OpenSSLDependency(),
get_build_def_module('libev').LibEvDependency(),
get_build_def_module('rapidjson').RapidJsonDependency(),
get_build_def_module('squeasel').SqueaselDependency(),
get_build_def_module('curl').CurlDependency(),
get_build_def_module('hiredis').HiRedisDependency(),
get_build_def_module('cqlsh').CQLShDependency(),
get_build_def_module('redis_cli').RedisCliDependency(),
get_build_def_module('flex').FlexDependency(),
get_build_def_module('bison').BisonDependency(),
get_build_def_module('libedit').LibEditDependency(),
get_build_def_module('openldap').OpenLDAPDependency(),
]
if is_linux():
self.dependencies += [
get_build_def_module('libuuid').LibUuidDependency(),
]
using_both_gcc_and_clang = (
not self.compiler_choice.use_only_gcc() and
not self.compiler_choice.use_only_clang())
if using_both_gcc_and_clang:
# Old LLVM. We will migrate away from this.
self.dependencies.append(get_build_def_module('llvm7').LLVM7Dependency())
standalone_llvm7_toolchain = self.toolchain and self.toolchain.toolchain_type == 'llvm7'
if using_both_gcc_and_clang or standalone_llvm7_toolchain:
self.dependencies.append(
get_build_def_module('llvm7_libcxx').Llvm7LibCXXDependency())
llvm_major_version: Optional[int] = self.compiler_choice.get_llvm_major_version()
if (self.compiler_choice.use_only_clang() and
llvm_major_version is not None and llvm_major_version >= 10):
if self.toolchain and self.toolchain.toolchain_type == 'llvm12':
# Still use libunwind/libcxxabi libraries from LLVM 11.x.
# TODO: fix the compilation errors and upgrade.
llvm_version_str = '11.1.0'
else:
llvm_version_str = self.compiler_choice.get_llvm_version_str()
self.dependencies += [
# New LLVM. We will keep supporting new LLVM versions here.
get_build_def_module('llvm1x_libunwind').Llvm1xLibUnwindDependency(
version=llvm_version_str
),
get_build_def_module('llvm1x_libcxx').Llvm1xLibCxxAbiDependency(
version=llvm_version_str
),
get_build_def_module('llvm1x_libcxx').Llvm1xLibCxxDependency(
version=llvm_version_str
),
]
else:
self.dependencies.append(get_build_def_module('libunwind').LibUnwindDependency())
self.dependencies.append(get_build_def_module('libbacktrace').LibBacktraceDependency())
self.dependencies += [
get_build_def_module('icu4c').Icu4cDependency(),
get_build_def_module('protobuf').ProtobufDependency(),
get_build_def_module('crypt_blowfish').CryptBlowfishDependency(),
get_build_def_module('boost').BoostDependency(),
get_build_def_module('gflags').GFlagsDependency(),
get_build_def_module('glog').GLogDependency(),
get_build_def_module('gperftools').GPerfToolsDependency(),
get_build_def_module('gmock').GMockDependency(),
get_build_def_module('snappy').SnappyDependency(),
get_build_def_module('crcutil').CRCUtilDependency(),
get_build_def_module('libcds').LibCDSDependency(),
get_build_def_module('libuv').LibUvDependency(),
get_build_def_module('cassandra_cpp_driver').CassandraCppDriverDependency(),
]
def select_dependencies_to_build(self) -> None:
self.selected_dependencies = []
if self.args.dependencies:
names = set([dep.name for dep in self.dependencies])
for dep in self.args.dependencies:
if dep not in names:
fatal("Unknown dependency name: %s. Valid dependency names:\n%s",
dep,
(" " * 4 + ("\n" + " " * 4).join(sorted(names))))
for dep in self.dependencies:
if dep.name in self.args.dependencies:
self.selected_dependencies.append(dep)
elif self.args.skip:
skipped = set(self.args.skip.split(','))
log("Skipping dependencies: %s", sorted(skipped))
self.selected_dependencies = []
for dependency in self.dependencies:
if dependency.name in skipped:
skipped.remove(dependency.name)
else:
self.selected_dependencies.append(dependency)
if skipped:
raise ValueError("Unknown dependencies, cannot skip: %s" % sorted(skipped))
else:
self.selected_dependencies = self.dependencies
def run(self) -> None:
self.compiler_choice.set_compiler(
'clang' if self.compiler_choice.use_only_clang() else 'gcc')
if self.args.clean or self.args.clean_downloads:
self.fs_layout.clean(self.selected_dependencies, self.args.clean_downloads)
self.prepare_out_dirs()
os.environ['PATH'] = ':'.join([
os.path.join(self.fs_layout.tp_installed_common_dir, 'bin'),
os.path.join(self.fs_layout.tp_installed_llvm7_common_dir, 'bin'),
os.environ['PATH']
])
self.build_one_build_type(BUILD_TYPE_COMMON)
build_types = []
if is_linux():
build_types.append(BUILD_TYPE_UNINSTRUMENTED)
if self.compiler_choice.use_only_gcc():
if is_linux() and not self.compiler_choice.using_linuxbrew():
# Starting to support ASAN for GCC compilers
# (not for the current GCC 5.5 build on Linuxbrew, though).
build_types.append(BUILD_TYPE_ASAN)
else:
if self.compiler_choice.using_linuxbrew() or is_mac():
build_types.append(BUILD_TYPE_CLANG_UNINSTRUMENTED)
if is_linux() and not self.args.skip_sanitizers:
build_types.append(BUILD_TYPE_ASAN)
build_types.append(BUILD_TYPE_TSAN)
log(f"Full list of build types: {build_types}")
for build_type in build_types:
self.build_one_build_type(build_type)
with open(os.path.join(YB_THIRDPARTY_DIR, 'fossa_modules.yml'), 'w') as output_file:
yaml.dump(self.fossa_modules, output_file, indent=2)
def get_build_types(self) -> List[str]:
build_types: List[str] = list(BUILD_TYPES)
if is_linux() and self.args.single_compiler_type is not None:
build_types.remove(BUILD_TYPE_CLANG_UNINSTRUMENTED)
return build_types
def prepare_out_dirs(self) -> None:
build_types = self.get_build_types()
dirs = [
os.path.join(self.fs_layout.tp_installed_dir, build_type) for build_type in build_types
]
libcxx_dirs = [os.path.join(dir, 'libcxx') for dir in dirs]
for dir in dirs + libcxx_dirs:
lib_dir = os.path.join(dir, 'lib')
mkdir_if_missing(lib_dir)
mkdir_if_missing(os.path.join(dir, 'include'))
# On some systems, autotools installs libraries to lib64 rather than lib. Fix
# this by setting up lib64 as a symlink to lib. We have to do this step first
# to handle cases where one third-party library depends on another. Make sure
# we create a relative symlink so that the entire PREFIX_DIR could be moved,
# e.g. after it is packaged and then downloaded on a different build node.
lib64_dir = os.path.join(dir, 'lib64')
if os.path.exists(lib64_dir):
if os.path.islink(lib64_dir):
continue
remove_path(lib64_dir)
os.symlink('lib', lib64_dir)
def add_include_path(self, include_path: str) -> None:
cmd_line_arg = f'-I{include_path}'
self.preprocessor_flags.append(cmd_line_arg)
self.compiler_flags.append(cmd_line_arg)
def init_compiler_independent_flags(self, dep: Dependency) -> None:
"""
Initialize compiler and linker flags for a particular build type. We try to limit this
function to flags that will work for most compilers we are using, which include various
versions of GCC and Clang.
"""
self.preprocessor_flags = []
self.ld_flags = []
self.executable_only_ld_flags = []
self.compiler_flags = []
self.c_flags = []
self.cxx_flags = []
self.libs = []
self.add_linuxbrew_flags()
for include_dir_component in set([BUILD_TYPE_COMMON, self.build_type]):
self.add_include_path(os.path.join(
self.fs_layout.tp_installed_dir, include_dir_component, 'include'))
self.add_lib_dir_and_rpath(os.path.join(
self.fs_layout.tp_installed_dir, include_dir_component, 'lib'))
self.compiler_flags += self.preprocessor_flags
# -fPIC is there to always generate position-independent code, even for static libraries.
self.compiler_flags += ['-fno-omit-frame-pointer', '-fPIC', '-O2', '-Wall']
if is_linux():
# On Linux, ensure we set a long enough rpath so we can change it later with chrpath or
# a similar tool.
self.add_rpath(PLACEHOLDER_RPATH)
self.dylib_suffix = "so"
elif is_mac():
self.dylib_suffix = "dylib"
# YugaByte builds with C++11, which on OS X requires using libc++ as the standard
# library implementation. Some of the dependencies do not compile against libc++ by
# default, so we specify it explicitly.
self.cxx_flags.append("-stdlib=libc++")
self.ld_flags += ["-lc++", "-lc++abi"]
# Build for macOS Mojave or later. See https://bit.ly/37myHbk
self.compiler_flags.append("-mmacosx-version-min=10.14")
self.ld_flags.append("-Wl,-headerpad_max_install_names")
else:
fatal("Unsupported platform: {}".format(platform.system()))
# The C++ standard must match CMAKE_CXX_STANDARD in the top-level CMakeLists.txt file in
# the YugabyteDB source tree.
self.cxx_flags.append('-std=c++14')
self.cxx_flags.append('-frtti')
if self.build_type == BUILD_TYPE_ASAN:
self.compiler_flags += ASAN_FLAGS
if self.build_type == BUILD_TYPE_TSAN:
self.compiler_flags += TSAN_FLAGS
def add_linuxbrew_flags(self) -> None:
if self.compiler_choice.using_linuxbrew():
lib_dir = os.path.join(self.compiler_choice.get_linuxbrew_dir(), 'lib')
self.ld_flags.append(" -Wl,-dynamic-linker={}".format(os.path.join(lib_dir, 'ld.so')))
self.add_lib_dir_and_rpath(lib_dir)
def add_lib_dir_and_rpath(self, lib_dir: str) -> None:
self.ld_flags.append("-L{}".format(lib_dir))
self.add_rpath(lib_dir)
def prepend_lib_dir_and_rpath(self, lib_dir: str) -> None:
self.ld_flags.insert(0, "-L{}".format(lib_dir))
self.prepend_rpath(lib_dir)
def add_rpath(self, path: str) -> None:
log("Adding RPATH: %s", path)
self.ld_flags.append(get_rpath_flag(path))
self.additional_allowed_shared_lib_paths.add(path)
def prepend_rpath(self, path: str) -> None:
self.ld_flags.insert(0, get_rpath_flag(path))
def log_prefix(self, dep: Dependency) -> str:
return '{} ({})'.format(dep.name, self.build_type)
def build_with_configure(
self,
log_prefix: str,
extra_args: List[str] = [],
configure_cmd: List[str] = ['./configure'],
install: List[str] = ['install'],
run_autogen: bool = False,
autoconf: bool = False,
src_subdir_name: Optional[str] = None) -> None:
os.environ["YB_REMOTE_COMPILATION"] = "0"
dir_for_build = os.getcwd()
if src_subdir_name:
dir_for_build = os.path.join(dir_for_build, src_subdir_name)
with PushDir(dir_for_build):
log("Building in %s", dir_for_build)
if run_autogen:
log_output(log_prefix, ['./autogen.sh'])
if autoconf:
log_output(log_prefix, ['autoreconf', '-i'])
configure_args = (
configure_cmd.copy() + ['--prefix={}'.format(self.prefix)] + extra_args
)
log_output(log_prefix, configure_args)
log_output(log_prefix, ['make', '-j{}'.format(get_make_parallelism())])
if install:
log_output(log_prefix, ['make'] + install)
def build_with_cmake(
self,
dep: Dependency,
extra_args: List[str] = [],
use_ninja_if_available: bool = True,
src_subdir_name: Optional[str] = None,
extra_build_tool_args: List[str] = [],
should_install: bool = True,
install_targets: List[str] = ['install'],
shared_and_static: bool = False) -> None:
build_tool = 'make'
if use_ninja_if_available:
ninja_available = is_ninja_available()
log('Ninja is %s', 'available' if ninja_available else 'unavailable')
if ninja_available:
build_tool = 'ninja'
log("Building dependency %s using CMake. Build tool: %s", dep, build_tool)
log_prefix = self.log_prefix(dep)
os.environ["YB_REMOTE_COMPILATION"] = "0"
remove_path('CMakeCache.txt')
remove_path('CMakeFiles')
src_path = self.fs_layout.get_source_path(dep)
if src_subdir_name is not None:
src_path = os.path.join(src_path, src_subdir_name)
args = ['cmake', src_path]
if build_tool == 'ninja':
args += ['-G', 'Ninja']
args += self.get_common_cmake_flag_args(dep)
if extra_args is not None:
args += extra_args
args += dep.get_additional_cmake_args(self)
if shared_and_static and any(arg.startswith('-DBUILD_SHARED_LIBS=') for arg in args):
raise ValueError(
"shared_and_static=True is specified but CMake arguments already mention "
"-DBUILD_SHARED_LIBS: %s" % args)
if '-DBUILD_SHARED_LIBS=OFF' not in args and not shared_and_static:
# TODO: a better approach for setting CMake arguments from multiple places.
args.append('-DBUILD_SHARED_LIBS=ON')
def build_internal(even_more_cmake_args: List[str] = []) -> None:
final_cmake_args = args + even_more_cmake_args
log("CMake command line (one argument per line):\n%s" %
"\n".join([(" " * 4 + sanitize_flags_line_for_log(line))
for line in final_cmake_args]))
log_output(log_prefix, final_cmake_args)
if build_tool == 'ninja':
dep.postprocess_ninja_build_file(self, 'build.ninja')
build_tool_cmd = [
build_tool, '-j{}'.format(get_make_parallelism())
] + extra_build_tool_args
log_output(log_prefix, build_tool_cmd)
if should_install:
log_output(log_prefix, [build_tool] + install_targets)
with open('compile_commands.json') as compile_commands_file:
compile_commands = json.load(compile_commands_file)
for command_item in compile_commands:
command_args = command_item['command'].split()
if self.build_type == BUILD_TYPE_ASAN:
assert_list_contains(command_args, '-fsanitize=address')
assert_list_contains(command_args, '-fsanitize=undefined')
if self.build_type == BUILD_TYPE_TSAN:
assert_list_contains(command_args, '-fsanitize=thread')
if shared_and_static:
for build_shared_libs_value, subdir_name in (
('ON', 'shared'),
('OFF', 'static')
):
build_dir = os.path.join(os.getcwd(), subdir_name)
mkdir_if_missing(build_dir)
build_shared_libs_cmake_arg = '-DBUILD_SHARED_LIBS=%s' % build_shared_libs_value
log("Building dependency '%s' for build type '%s' with option: %s",
dep.name, self.build_type, build_shared_libs_cmake_arg)
with PushDir(build_dir):
build_internal([build_shared_libs_cmake_arg])
else:
build_internal()
def build_one_build_type(self, build_type: str) -> None:
if (build_type != BUILD_TYPE_COMMON and
self.args.build_type is not None and
build_type != self.args.build_type):
log("Skipping build type %s because build type %s is specified in the arguments",
build_type, self.args.build_type)
return
self.set_build_type(build_type)
build_group = (
BUILD_GROUP_COMMON if build_type == BUILD_TYPE_COMMON else BUILD_GROUP_INSTRUMENTED
)
for dep in self.selected_dependencies:
if build_group == dep.build_group:
self.perform_pre_build_steps(dep)
should_build = dep.should_build(self)
should_rebuild = self.should_rebuild_dependency(dep)
if should_build and should_rebuild:
self.build_dependency(dep)
else:
log(f"Skipping dependency {dep.name}: "
f"should_build={should_build}, "
f"should_rebuild={should_rebuild}.")
def get_install_prefix_with_qualifier(self, qualifier: Optional[str] = None) -> str:
return os.path.join(
self.fs_layout.tp_installed_dir,
self.build_type + ('_%s' % qualifier if qualifier else ''))
def set_build_type(self, build_type: str) -> None:
self.build_type = build_type
self.prefix = self.get_install_prefix_with_qualifier(qualifier=None)
self.prefix_bin = os.path.join(self.prefix, 'bin')
self.prefix_lib = os.path.join(self.prefix, 'lib')
self.prefix_include = os.path.join(self.prefix, 'include')
if self.compiler_choice.building_with_clang(build_type):
compiler = 'clang'
else:
compiler = 'gcc'
self.compiler_choice.set_compiler(compiler)
heading("Building {} dependencies (compiler type: {})".format(
build_type, self.compiler_choice.compiler_type))
log("Compiler type: %s", self.compiler_choice.compiler_type)
log("C compiler: %s", self.compiler_choice.get_c_compiler())
log("C++ compiler: %s", self.compiler_choice.get_cxx_compiler())
def init_flags(self, dep: Dependency) -> None:
"""
Initializes compiler and linker flags. No flag customizations should be transferred from one
dependency to another.
"""
self.init_compiler_independent_flags(dep)
if not is_mac() and self.compiler_choice.building_with_clang(self.build_type):
# Special setup for Clang on Linux.
compiler_choice = self.compiler_choice
llvm_major_version: Optional[int] = compiler_choice.get_llvm_major_version()
if (compiler_choice.single_compiler_type == 'clang' and
llvm_major_version is not None and llvm_major_version >= 10):
# We are assuming that --single-compiler-type will only be used for Clang 10 and
# newer.
self.init_linux_clang1x_flags(dep)
elif llvm_major_version == 7 or compiler_choice.single_compiler_type is None:
# We are either building with LLVM 7 without Linuxbrew, or this is the
# Linuxbrew-based build with both GCC and Clang (which will go away).
self.init_linux_clang7_flags(dep)
else:
raise ValueError(f"Unsupported LLVM major version: {llvm_major_version}")
def get_libcxx_dirs(self, libcxx_installed_suffix: str) -> Tuple[str, str]:
libcxx_installed_path = os.path.join(
self.fs_layout.tp_installed_dir, libcxx_installed_suffix, 'libcxx')
libcxx_installed_include = os.path.join(libcxx_installed_path, 'include', 'c++', 'v1')
libcxx_installed_lib = os.path.join(libcxx_installed_path, 'lib')
return libcxx_installed_include, libcxx_installed_lib
def init_linux_clang7_flags(self, dep: Dependency) -> None:
"""
Flags used to build code with Clang 7 that we build here. As we move to newer versions of
Clang, this function will go away.
"""
if self.build_type == BUILD_TYPE_TSAN:
# Ensure that TSAN runtime is linked statically into every executable. TSAN runtime
# uses -fPIE while our shared libraries use -fPIC, and therefore TSAN runtime can only
# be linked statically into executables. TSAN runtime can't be built with -fPIC because
# that would create significant performance issues.
self.executable_only_ld_flags += ['-fsanitize=thread']
# This is used to build code with libc++ and Clang 7 built as part of thirdparty.
stdlib_suffix = self.build_type
stdlib_path = os.path.join(self.fs_layout.tp_installed_dir, stdlib_suffix, 'libcxx')
stdlib_include = os.path.join(stdlib_path, 'include', 'c++', 'v1')
stdlib_lib = os.path.join(stdlib_path, 'lib')
self.cxx_flags.insert(0, '-nostdinc++')
self.cxx_flags.insert(0, '-isystem')
self.cxx_flags.insert(1, stdlib_include)
self.cxx_flags.insert(0, '-stdlib=libc++')
# Clang complains about argument unused during compilation: '-stdlib=libc++' when both
# -stdlib=libc++ and -nostdinc++ are specified.
self.cxx_flags.insert(0, '-Wno-error=unused-command-line-argument')
self.prepend_lib_dir_and_rpath(stdlib_lib)
if self.compiler_choice.using_linuxbrew():
self.compiler_flags.append('--gcc-toolchain={}'.format(
self.compiler_choice.get_linuxbrew_dir()))
if self.toolchain and self.toolchain.toolchain_type == 'llvm7':
# This is needed when building with Clang 7 but without Linuxbrew.
# TODO: this might only be needed due to using an old version of libunwind that is
# different from libunwind included in the LLVM 7 repository. Just a hypothesis.
self.ld_flags.append('-lgcc_s')
def init_linux_clang1x_flags(self, dep: Dependency) -> None:
"""
Flags for Clang 10 and beyond. We are using LLVM-supplied libunwind and compiler-rt in this
configuration.
"""
self.ld_flags.append('-rtlib=compiler-rt')
if self.build_type == BUILD_TYPE_COMMON:
log("Not configuring any special Clang 10+ flags for build type %s", self.build_type)
return
# TODO mbautin: refactor to polymorphism
is_libcxxabi = dep.name.endswith('_libcxxabi')
is_libcxx = dep.name.endswith('_libcxx')
log("Dependency name: %s, is_libcxxabi: %s, is_libcxx: %s",
dep.name, is_libcxxabi, is_libcxx)
if self.build_type == BUILD_TYPE_ASAN:
self.compiler_flags.append('-shared-libasan')
if is_libcxxabi:
# To avoid an infinite loop in UBSAN.
# https://monorail-prod.appspot.com/p/chromium/issues/detail?id=609786
# This comment:
# https://gist.githubusercontent.com/mbautin/ad9ea4715669da3b3a5fb9495659c4a9/raw
self.compiler_flags.append('-fno-sanitize=vptr')
# TODO mbautin: a centralized way to find paths inside LLVM installation.
assert self.compiler_choice.cc is not None
compiler_rt_lib_dir_ancestor = os.path.join(
os.path.dirname(os.path.dirname(os.path.realpath(self.compiler_choice.cc))),
'lib', 'clang')
compiler_rt_lib_dir_candidates = []
nonexistent_compiler_rt_lib_dirs = []
for llvm_version_subdir in os.listdir(compiler_rt_lib_dir_ancestor):
compiler_rt_lib_dir = os.path.join(
compiler_rt_lib_dir_ancestor, llvm_version_subdir, 'lib', 'linux')
if os.path.isdir(compiler_rt_lib_dir):
compiler_rt_lib_dir_candidates.append(compiler_rt_lib_dir)
else:
nonexistent_compiler_rt_lib_dirs.append(compiler_rt_lib_dir)
if len(compiler_rt_lib_dir_candidates) != 1:
if not compiler_rt_lib_dir_candidates:
raise IOError(
"Could not find the compiler-rt library directory, looked at: %s" %
nonexistent_compiler_rt_lib_dirs)
raise IOError(
"Multiple possible compiler-rt library directories: %s" %
compiler_rt_lib_dir_candidates)
assert len(compiler_rt_lib_dir_candidates) == 1
compiler_rt_lib_dir = compiler_rt_lib_dir_candidates[0]
if not os.path.isdir(compiler_rt_lib_dir):
raise IOError("Directory does not exist: %s", compiler_rt_lib_dir)
self.add_lib_dir_and_rpath(compiler_rt_lib_dir)
self.ld_flags.append('-lclang_rt.ubsan_minimal-x86_64')
self.ld_flags += ['-lunwind']
libcxx_installed_include, libcxx_installed_lib = self.get_libcxx_dirs(self.build_type)
log("libc++ include directory: %s", libcxx_installed_include)
log("libc++ library directory: %s", libcxx_installed_lib)
if not is_libcxx and not is_libcxxabi:
log("Adding special compiler/linker flags for Clang 10+ for dependencies other than "
"libc++")
self.ld_flags += ['-lc++', '-lc++abi']
self.cxx_flags = [
'-stdlib=libc++',
'-isystem',
libcxx_installed_include,
'-nostdinc++'
] + self.cxx_flags
self.prepend_lib_dir_and_rpath(libcxx_installed_lib)
if is_libcxx:
log("Adding special compiler/linker flags for Clang 10 or newer for libc++")
# This is needed for libc++ to find libc++abi headers.
assert_dir_exists(libcxx_installed_include)
self.cxx_flags.append('-I%s' % libcxx_installed_include)
if is_libcxx or is_libcxxabi:
log("Adding special linker flags for Clang 10 or newer for libc++ or libc++abi")
# libc++abi needs to be able to find libcxx at runtime, even though it can't always find
# it at build time because libc++abi is built first.
self.add_rpath(libcxx_installed_lib)
self.cxx_flags.append('-Wno-error=unused-command-line-argument')
log("Flags after the end of setup for Clang 10 or newer:")
log("cxx_flags : %s", self.cxx_flags)
log("c_flags : %s", self.c_flags)
log("ld_flags : %s", self.ld_flags)
def get_effective_compiler_flags(self, dep: Dependency) -> List[str]:
return self.compiler_flags + dep.get_additional_compiler_flags(self)
def get_effective_cxx_flags(self, dep: Dependency) -> List[str]:
return (self.cxx_flags +
self.get_effective_compiler_flags(dep) +
dep.get_additional_cxx_flags(self))
def get_effective_c_flags(self, dep: Dependency) -> List[str]:
return (self.c_flags +
self.get_effective_compiler_flags(dep) +
dep.get_additional_c_flags(self))
def get_effective_ld_flags(self, dep: Dependency) -> List[str]:
return self.ld_flags + dep.get_additional_ld_flags(self)
def get_effective_executable_ld_flags(self, dep: Dependency) -> List[str]:
return self.ld_flags + self.executable_only_ld_flags + dep.get_additional_ld_flags(self)
def get_effective_preprocessor_flags(self, dep: Dependency) -> List[str]:
return list(self.preprocessor_flags)
def get_common_cmake_flag_args(self, dep: Dependency) -> List[str]:
c_flags_str = ' '.join(self.get_effective_c_flags(dep))
cxx_flags_str = ' '.join(self.get_effective_cxx_flags(dep))
# TODO: we are not using this. What is the best way to plug this into CMake?
preprocessor_flags_str = ' '.join(self.get_effective_preprocessor_flags(dep))
ld_flags_str = ' '.join(self.get_effective_ld_flags(dep))
exe_ld_flags_str = ' '.join(self.get_effective_executable_ld_flags(dep))
return [
'-DCMAKE_C_FLAGS={}'.format(c_flags_str),
'-DCMAKE_CXX_FLAGS={}'.format(cxx_flags_str),
'-DCMAKE_SHARED_LINKER_FLAGS={}'.format(ld_flags_str),
'-DCMAKE_EXE_LINKER_FLAGS={}'.format(exe_ld_flags_str),
'-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
'-DCMAKE_INSTALL_PREFIX={}'.format(dep.get_install_prefix(self)),
'-DCMAKE_POSITION_INDEPENDENT_CODE=ON'
]
def perform_pre_build_steps(self, dep: Dependency) -> None:
log("")
colored_log(YELLOW_COLOR, SEPARATOR)
colored_log(YELLOW_COLOR, "Building %s (%s)", dep.name, self.build_type)
colored_log(YELLOW_COLOR, SEPARATOR)
self.download_manager.download_dependency(
dep=dep,
src_path=self.fs_layout.get_source_path(dep),
archive_path=self.fs_layout.get_archive_path(dep))
archive_name = dep.get_archive_name()
if archive_name:
archive_path = os.path.join('downloads', archive_name)
self.fossa_modules.append({
"fossa_module": {
"name": f"{dep.name}-{dep.version}",
"type": "raw",
"target": os.path.basename(archive_path)
},
"yb_metadata": {
"url": dep.download_url,
"sha256sum": self.download_manager.get_expected_checksum(archive_name)
}
})
def build_dependency(self, dep: Dependency) -> None:
self.init_flags(dep)
# This is needed at least for glog to be able to find gflags.
self.add_rpath(os.path.join(self.fs_layout.tp_installed_dir, self.build_type, 'lib'))
if self.build_type != BUILD_TYPE_COMMON:
# Needed to find libunwind for Clang 10 when using compiler-rt.
self.add_rpath(os.path.join(self.fs_layout.tp_installed_dir, BUILD_TYPE_COMMON, 'lib'))
if self.args.download_extract_only:
log("Skipping build of dependency %s, build type %s, --download-extract-only is "
"specified.", dep.name, self.build_type)
return
env_vars: Dict[str, Optional[str]] = {
"CPPFLAGS": " ".join(self.preprocessor_flags)
}
log_and_set_env_var_to_list(env_vars, 'CXXFLAGS', self.get_effective_cxx_flags(dep))
log_and_set_env_var_to_list(env_vars, 'CFLAGS', self.get_effective_c_flags(dep))
log_and_set_env_var_to_list(env_vars, 'LDFLAGS', self.get_effective_ld_flags(dep))
log_and_set_env_var_to_list(env_vars, 'LIBS', self.libs)
log_and_set_env_var_to_list(
env_vars, 'CPPFLAGS', self.get_effective_preprocessor_flags(dep))
if self.build_type == BUILD_TYPE_ASAN:
# To avoid errors similar to:
# https://gist.githubusercontent.com/mbautin/4b8eec566f54bcc35706dcd97cab1a95/raw
#
# This could also be fixed to some extent by the compiler flags
# -mllvm -asan-use-private-alias=1
# but applying that flag to all builds is complicated in practice and is probably
# best done using a compiler wrapper script, which would slow things down.
env_vars["ASAN_OPTIONS"] = "detect_odr_violation=0"
with PushDir(self.create_build_dir_and_prepare(dep)):
with EnvVarContext(**env_vars):
write_env_vars('yb_dependency_env.sh')
dep.build(self)
self.save_build_stamp_for_dependency(dep)
log("")
log("Finished building %s (%s)", dep.name, self.build_type)
log("")
# Determines if we should rebuild a component with the given name based on the existing "stamp"
# file and the current value of the "stamp" (based on Git SHA1 and local changes) for the
# component. The result is returned in should_rebuild_component_rv variable, which should have
# been made local by the caller.
def should_rebuild_dependency(self, dep: Dependency) -> bool:
stamp_path = self.fs_layout.get_build_stamp_path_for_dependency(dep, self.build_type)
old_build_stamp = None
if os.path.exists(stamp_path):
with open(stamp_path, 'rt') as inp:
old_build_stamp = inp.read()
new_build_stamp = self.get_build_stamp_for_dependency(dep)
if dep.dir_name is not None:
src_dir = self.fs_layout.get_source_path(dep)
if not os.path.exists(src_dir):
log("Have to rebuild %s (%s): source dir %s does not exist",
dep.name, self.build_type, src_dir)
return True
if old_build_stamp == new_build_stamp:
log("Not rebuilding %s (%s) -- nothing changed.", dep.name, self.build_type)
return False
log("Have to rebuild %s (%s):", dep.name, self.build_type)
log("Old build stamp for %s (from %s):\n%s",
dep.name, stamp_path, indent_lines(old_build_stamp))
log("New build stamp for %s:\n%s",
dep.name, indent_lines(new_build_stamp))
return True
# Come up with a string that allows us to tell when to rebuild a particular third-party
# dependency. The result is returned in the get_build_stamp_for_component_rv variable, which
# should have been made local by the caller.
def get_build_stamp_for_dependency(self, dep: Dependency) -> str:
module_name = dep.__class__.__module__
assert isinstance(module_name, str), "Dependency's module is not a string: %s" % module_name
assert module_name.startswith('build_definitions.'), "Invalid module name: %s" % module_name
module_name_components = module_name.split('.')
assert len(module_name_components) == 2, (
"Expected two components: %s" % module_name_components)
module_name_final = module_name_components[-1]
input_files_for_stamp = [
'python/yugabyte_db_thirdparty/yb_build_thirdparty_main.py',
'build_thirdparty.sh',
os.path.join('python', 'build_definitions', '%s.py' % module_name_final)
]
for path in input_files_for_stamp:
abs_path = os.path.join(YB_THIRDPARTY_DIR, path)
if not os.path.exists(abs_path):
fatal("File '%s' does not exist -- expecting it to exist when creating a 'stamp' "
"for the build configuration of '%s'.", abs_path, dep.name)
with PushDir(YB_THIRDPARTY_DIR):
git_commit_sha1 = subprocess.check_output(
['git', 'log', '--pretty=%H', '-n', '1'] + input_files_for_stamp
).strip().decode('utf-8')
build_stamp = 'git_commit_sha1={}\n'.format(git_commit_sha1)
for git_extra_arg in (None, '--cached'):
git_extra_args = [git_extra_arg] if git_extra_arg else []
git_diff = subprocess.check_output(
['git', 'diff'] + git_extra_args + input_files_for_stamp)
git_diff_sha256 = hashlib.sha256(git_diff).hexdigest()
build_stamp += 'git_diff_sha256{}={}\n'.format(
'_'.join(git_extra_args).replace('--', '_'),
git_diff_sha256)
return build_stamp
def save_build_stamp_for_dependency(self, dep: Dependency) -> None:
stamp = self.get_build_stamp_for_dependency(dep)
stamp_path = self.fs_layout.get_build_stamp_path_for_dependency(dep, self.build_type)
log("Saving new build stamp to '%s':\n%s", stamp_path, indent_lines(stamp))
with open(stamp_path, "wt") as out:
out.write(stamp)
def create_build_dir_and_prepare(self, dep: Dependency) -> str:
src_dir = self.fs_layout.get_source_path(dep)
if not os.path.isdir(src_dir):
fatal("Directory '{}' does not exist".format(src_dir))
build_dir = self.fs_layout.get_build_dir_for_dependency(dep, self.build_type)
mkdir_if_missing(build_dir)
if dep.copy_sources:
log("Bootstrapping %s from %s", build_dir, src_dir)
subprocess.check_call(['rsync', '-a', src_dir + '/', build_dir])
return build_dir
def is_release_build(self) -> bool:
"""
Distinguishes between build types that are potentially used in production releases from
build types that are only used in testing (e.g. ASAN+UBSAN, TSAN).
"""
return self.build_type in [
BUILD_TYPE_COMMON, BUILD_TYPE_UNINSTRUMENTED, BUILD_TYPE_CLANG_UNINSTRUMENTED
]
def cmake_build_type_for_test_only_dependencies(self) -> str:
return 'Release' if self.is_release_build() else 'Debug'
def check_cxx_compiler_flag(self, flag: str) -> bool:
compiler_path = self.compiler_choice.get_cxx_compiler()
log(f"Checking if the compiler {compiler_path} accepts the flag {flag}")
process = subprocess.Popen(
[compiler_path, '-x', 'c++', flag, '-'],
stdin=subprocess.PIPE)
assert process.stdin is not None
process.stdin.write("int main() { return 0; }".encode('utf-8'))
process.stdin.close()
return process.wait() == 0
def add_checked_flag(self, flags: List[str], flag: str) -> None:
if self.check_cxx_compiler_flag(flag):
flags.append(flag)
def get_openssl_dir(self) -> str:
return os.path.join(self.fs_layout.tp_installed_common_dir)
def get_openssl_related_cmake_args(self) -> List[str]:
"""
Returns a list of CMake arguments to use to pick up the version of OpenSSL that we should be
using. Returns an empty list if the default OpenSSL installation should be used.
"""
openssl_dir = self.get_openssl_dir()
openssl_options = ['-DOPENSSL_ROOT_DIR=' + openssl_dir]
openssl_crypto_library = os.path.join(openssl_dir, 'lib', 'libcrypto.' + self.dylib_suffix)
openssl_ssl_library = os.path.join(openssl_dir, 'lib', 'libssl.' + self.dylib_suffix)
openssl_options += [
'-DOPENSSL_CRYPTO_LIBRARY=' + openssl_crypto_library,
'-DOPENSSL_SSL_LIBRARY=' + openssl_ssl_library,
'-DOPENSSL_LIBRARIES=%s;%s' % (openssl_crypto_library, openssl_ssl_library)
]
return openssl_options
|
py | 1a3554d46a9c66253c17df6cac2d316f694a14de | import zabbix_items_from_csv
#-----------------------
# Limit reading to first 10 Lines.
#-----------------------
complete_list_dict_device_1 = zabbix_items_from_csv.reader_csv_file('oid_list_with_range_processed.csv', 10)
complete_list_name_dict_device_1 = zabbix_items_from_csv.read_csv_name_module('oid_names.csv')
dict_device_1 = zabbix_items_from_csv.merge_csv_data(complete_list_dict_device_1, complete_list_name_dict_device_1, True)
xml_tree_string_device_1 = zabbix_items_from_csv.\
generate_items_xml_file_complete(dict_device_1,
'BLR-DEVICE_1', 'BLR-DEVICE_1',
'10.12.51.11', 'DEVICE_1')
zabbix_items_from_csv.xml_pretty_me('BLR-DEVICE_1.xml', xml_tree_string_device_1)
#-----------------------
xml_tree_string_device_1 = zabbix_items_from_csv.\
generate_items_xml_file_complete(dict_device_1,
'CHN-DEVICE_1', 'CHN-DEVICE_1',
'10.12.51.11', 'DEVICE_1')
zabbix_items_from_csv.xml_pretty_me('CHN-DEVICE_1.xml', xml_tree_string_device_1)
#-----------------------
# Read all the lines from the csv file.
#-----------------------
complete_list_dict_device_2 = zabbix_items_from_csv.reader_csv_file('oid_list_with_range_processed.csv')
complete_list_name_dict_device_2 = zabbix_items_from_csv.read_csv_name_module('oid_names.csv')
dict_device_2 = zabbix_items_from_csv.merge_csv_data(complete_list_dict_device_2, complete_list_name_dict_device_2)
xml_tree_string_device_2 = zabbix_items_from_csv.\
generate_items_xml_file_complete(dict_device_2,
'BLR-DEVICE_2', 'BLR-DEVICE_2',
'12.12.54.66', 'DEVICE_2')
zabbix_items_from_csv.xml_pretty_me('BLR-DEVICE_2.xml', xml_tree_string_device_2)
#-----------------------
xml_tree_string_device_2 = zabbix_items_from_csv.\
generate_items_xml_file_complete(dict_device_2,
'CHN-DEVICE_2', 'CHN-DEVICE_2',
'12.12.52.74', 'DEVICE_2')
zabbix_items_from_csv.xml_pretty_me('CHN-DEVICE_2.xml', xml_tree_string_device_2) |
py | 1a355529db9de1764cf9cb80b828b9746958b293 | """MCMC sampling methods."""
import logging
import numpy as np
logger = logging.getLogger(__name__)
# TODO: combine ESS and Rhat?, consider transforming parameters to allowed
# region to increase acceptance ratio
def eff_sample_size(chains):
"""Calculate the effective sample size for 1 or more chains.
See:
Gelman, Carlin, Stern, Dunson, Vehtari, Rubin: Bayesian Data Analysis, 2013.
Stan modeling language user's guide and reference manual, v. 2.14.0.
Parameters
----------
chains : np.array of shape (N,) or (M, N)
Samples of a parameter from an MCMC algorithm. No burn-in subtracted here!
Returns
-------
ess : float
"""
chains = np.atleast_2d(chains)
n_chains, n_samples = chains.shape
means = np.mean(chains, axis=1)
variances = np.var(chains, ddof=1, axis=1)
var_between = 0 if n_chains == 1 else n_samples * np.var(means, ddof=1)
var_within = np.mean(variances)
var_pooled = ((n_samples - 1.) * var_within + var_between) / n_samples
# autocovariances for lags 1..n_samples
# https://en.wikipedia.org/wiki/Autocorrelation#Estimation
n_padded = int(2**np.ceil(1 + np.log2(n_samples)))
freqs = np.fft.rfft(chains - means[:, None], n_padded)
autocov = np.fft.irfft(np.abs(freqs)**2)[:, :n_samples].real
autocov = autocov / np.arange(n_samples, 0, -1)
estimator_sum = 0.
lag = 1
while lag < n_samples:
# estimate multi-chain autocorrelation using variogram
temp = 1. - (var_within - np.mean(autocov[:, lag])) / var_pooled
# only use the first non-negative autocorrelations to avoid noise
if temp >= 0:
estimator_sum += temp
lag += 1
else:
break
ess = n_chains * n_samples / (1. + 2. * estimator_sum)
return ess
def gelman_rubin(chains):
r"""Calculate the Gelman--Rubin convergence statistic.
Also known as the potential scale reduction factor, or \hat{R}.
Uses the split version, as in Stan.
See:
Gelman, Carlin, Stern, Dunson, Vehtari, Rubin: Bayesian Data Analysis, 2013.
Gelman, A. and D. B. Rubin: Inference from iterative simulation using
multiple sequences (with discussion). Statistical Science, 7:457-511, 1992.
Stan modeling language user's guide and reference manual, v. 2.14.0.
Parameters
----------
chains : np.array of shape (M, N)
Samples of a parameter from an MCMC algorithm, 1 row per chain. No burn-in subtracted here!
Returns
-------
psrf : float
Should be below 1.1 to support convergence, or at least below 1.2 for all parameters.
"""
chains = np.atleast_2d(chains)
n_chains, n_samples = chains.shape
# split chains in the middle
n_chains *= 2
n_samples //= 2 # drop 1 if odd
chains = chains[:, :2 * n_samples].reshape((n_chains, n_samples))
means = np.mean(chains, axis=1)
variances = np.var(chains, ddof=1, axis=1)
var_between = n_samples * np.var(means, ddof=1)
var_within = np.mean(variances)
var_pooled = ((n_samples - 1.) * var_within + var_between) / n_samples
# potential scale reduction factor, should be close to 1
psrf = np.sqrt(var_pooled / var_within)
return psrf
def nuts(n_iter,
params0,
target,
grad_target,
n_adapt=None,
target_prob=0.6,
max_depth=5,
seed=0,
info_freq=100,
max_retry_inits=20,
stepsize=None):
r"""Sample the target using the NUTS algorithm.
No-U-Turn Sampler, an improved version of the Hamiltonian (Markov Chain) Monte Carlo sampler.
Based on Algorithm 6 in
Hoffman & Gelman, depthMLR 15, 1351-1381, 2014.
Parameters
----------
n_iter : int
The number of iterations, including n_adapt and possible other warmup iterations.
params0 : np.array
Initial values for sampled parameters.
target : function
The target's log density to sample (possibly unnormalized).
grad_target : function
The gradient of target.
n_adapt : int, optional
The number of automatic adjustments to stepsize. Defaults to n_iter/2.
target_prob : float, optional
Desired average acceptance probability. (Parameter \delta in the original paper.)
max_depth : int, optional
Maximum recursion depth.
seed : int, optional
Seed for pseudo-random number generator.
info_freq : int, optional
How often to log progress to loglevel INFO.
max_retry_inits : int, optional
How many times to retry finding initial stepsize (if stepped outside allowed region).
stepsize : float, optional
Initial stepsize (will be still adapted). Defaults to finding by trial and error.
Returns
-------
samples : np.array
Samples from the MCMC algorithm, including those during adaptation.
"""
random_state = np.random.RandomState(seed)
n_adapt = n_adapt if n_adapt is not None else n_iter // 2
logger.info("NUTS: Performing {} iterations with {} adaptation steps.".format(n_iter, n_adapt))
target0 = target(params0)
if np.isinf(target0):
raise ValueError("NUTS: Bad initialization point {}, logpdf -> -inf.".format(params0))
# ********************************
# Find reasonable initial stepsize
# ********************************
if stepsize is None:
grad0 = grad_target(params0)
logger.debug("NUTS: Trying to find initial stepsize from point {} with gradient {}.".
format(params0, grad0))
init_tries = 0
while init_tries < max_retry_inits: # might step into region unallowed by priors
stepsize = np.exp(-init_tries)
init_tries += 1
momentum0 = random_state.randn(*params0.shape)
# leapfrog
momentum1 = momentum0 + 0.5 * stepsize * grad0
params1 = params0 + stepsize * momentum1
momentum1 += 0.5 * stepsize * grad_target(params1)
joint0 = target0 - 0.5 * momentum0.dot(momentum0)
joint1 = target(params1) - 0.5 * momentum1.dot(momentum1)
if np.isfinite(joint1):
break
else:
if init_tries == max_retry_inits:
raise ValueError(
"NUTS: Cannot find acceptable stepsize starting from point {}. All "
"trials ended in region with 0 probability.".format(params0))
# logger.debug("momentum0 {}, momentum1 {}, params1 {}, joint0 {}, joint1 {}"
# .format(momentum0, momentum1, params1, joint0, joint1))
logger.debug("NUTS: Problem finding acceptable stepsize, now {}. Retrying {}/{}."
.format(stepsize, init_tries, max_retry_inits))
plusminus = 1 if np.exp(joint1 - joint0) > 0.5 else -1
factor = 2. if plusminus == 1 else 0.5
while factor * np.exp(plusminus * (joint1 - joint0)) > 1.:
stepsize *= factor
if stepsize == 0. or stepsize > 1e7: # bounds as in STAN
raise SystemExit("NUTS: Found invalid stepsize {} starting from point {}."
.format(stepsize, params0))
# leapfrog
momentum1 = momentum0 + 0.5 * stepsize * grad0
params1 = params0 + stepsize * momentum1
momentum1 += 0.5 * stepsize * grad_target(params1)
joint1 = target(params1) - 0.5 * momentum1.dot(momentum1)
logger.debug("NUTS: Set initial stepsize {}.".format(stepsize))
# Some parameters from the NUTS paper, used for adapting the stepsize
target_stepsize = np.log(10. * stepsize)
log_avg_stepsize = 0.
accept_ratio = 0. # tends to target_prob
shrinkage = 0.05 # controls shrinkage accept_ratio to target_prob
ii_offset = 10. # stabilizes initialization
discount = -0.75 # reduce weight of past
# ********
# Sampling
# ********
samples = np.empty((n_iter + 1, ) + params0.shape)
samples[0, :] = params0
n_diverged = 0 # counter for proposals whose error diverged
n_outside = 0 # counter for proposals outside priors (pdf=0)
n_total = 0 # total number of proposals
for ii in range(1, n_iter + 1):
momentum0 = random_state.randn(*params0.shape)
samples_prev = samples[ii - 1, :]
log_joint0 = target(samples_prev) - 0.5 * momentum0.dot(momentum0)
log_slicevar = log_joint0 - random_state.exponential()
samples[ii, :] = samples_prev
params_left = samples_prev
params_right = samples_prev
momentum_left = momentum0
momentum_right = momentum0
depth = 0
n_ok = 1
all_ok = True # criteria for no U-turn, diverging error
while all_ok and depth <= max_depth:
direction = 1 if random_state.rand() < 0.5 else -1
if direction == -1:
params_left, momentum_left, _, _, params1, n_sub, sub_ok, mh_ratio, n_steps, \
is_div, is_out = _build_tree_nuts(
params_left, momentum_left, log_slicevar, -stepsize, depth, log_joint0,
target, grad_target, random_state)
else:
_, _, params_right, momentum_right, params1, n_sub, sub_ok, mh_ratio, n_steps, \
is_div, is_out = _build_tree_nuts(
params_right, momentum_right, log_slicevar, stepsize, depth, log_joint0,
target, grad_target, random_state)
if sub_ok == 1:
if random_state.rand() < float(n_sub) / n_ok:
samples[ii, :] = params1 # accept proposal
n_ok += n_sub
if not is_out: # params1 outside allowed region; don't count this as diverging error
n_diverged += is_div
n_outside += is_out
n_total += n_steps
all_ok = sub_ok and ((params_right - params_left).dot(momentum_left) >= 0) \
and ((params_right - params_left).dot(momentum_right) >= 0)
depth += 1
if depth > max_depth:
logger.debug("NUTS: Maximum recursion depth {} exceeded.".format(max_depth))
# adjust stepsize according to target acceptance ratio
if ii <= n_adapt:
accept_ratio = (1. - 1. / (ii + ii_offset)) * accept_ratio \
+ (target_prob - float(mh_ratio) / n_steps) / (ii + ii_offset)
log_stepsize = target_stepsize - np.sqrt(ii) / shrinkage * accept_ratio
log_avg_stepsize = ii ** discount * log_stepsize + \
(1. - ii ** discount) * log_avg_stepsize
stepsize = np.exp(log_stepsize)
elif ii == n_adapt + 1: # adaptation/warmup finished
stepsize = np.exp(log_avg_stepsize) # final stepsize
n_diverged = 0
n_outside = 0
n_total = 0
logger.info("NUTS: Adaptation/warmup finished. Sampling...")
logger.debug("NUTS: Set final stepsize {}.".format(stepsize))
if ii % info_freq == 0 and ii < n_iter:
logger.info("NUTS: Iterations performed: {}/{}...".format(ii, n_iter))
info_str = "NUTS: Acceptance ratio: {:.3f}".format(float(n_iter - n_adapt) / n_total)
if n_outside > 0:
info_str += ". After warmup {} proposals were outside of the region allowed by priors " \
"and rejected, decreasing acceptance ratio.".format(n_outside)
logger.info(info_str)
if n_diverged > 0:
logger.warning("NUTS: Diverged proposals after warmup (i.e. n_adapt={} steps): {}".format(
n_adapt, n_diverged))
return samples[1:, :]
def _build_tree_nuts(params, momentum, log_slicevar, step, depth, log_joint0, target, grad_target,
random_state):
"""Recursively build a balanced binary tree needed by NUTS.
Based on Algorithm 6 in
Hoffman & Gelman, JMLR 15, 1351-1381, 2014.
"""
# Base case: one leapfrog step
if depth == 0:
momentum1 = momentum + 0.5 * step * grad_target(params)
params1 = params + step * momentum1
momentum1 = momentum1 + 0.5 * step * grad_target(params1)
log_joint = target(params1) - 0.5 * momentum1.dot(momentum1)
n_ok = float(log_slicevar <= log_joint)
sub_ok = log_slicevar < (1000. + log_joint) # check for diverging error
is_out = False
if not sub_ok:
if np.isinf(target(params1)): # logpdf(params1) = -inf i.e. pdf(params1) = 0
is_out = True
else:
logger.debug(
"NUTS: Diverging error: log_joint={}, params={}, params1={}, momentum={}, "
"momentum1={}.".format(log_joint, params, params1, momentum, momentum1))
mh_ratio = 0. # reject
else:
mh_ratio = min(1., np.exp(log_joint - log_joint0))
return params1, momentum1, params1, momentum1, params1, n_ok, sub_ok, mh_ratio, 1., \
not sub_ok, is_out
else:
# Recursion to build subtrees, doubling size
params_left, momentum_left, params_right, momentum_right, params1, n_sub, sub_ok, \
mh_ratio, n_steps, is_div, is_out = _build_tree_nuts(
params, momentum, log_slicevar, step, depth - 1, log_joint0, target,
grad_target, random_state)
if sub_ok: # recurse further
if step < 0:
params_left, momentum_left, _, _, params2, n_sub2, sub_ok, mh_ratio2, n_steps2, \
is_div, is_out = _build_tree_nuts(
params_left, momentum_left, log_slicevar,
step, depth - 1, log_joint0, target, grad_target, random_state)
else:
_, _, params_right, momentum_right, params2, n_sub2, sub_ok, mh_ratio2, n_steps2, \
is_div, is_out = _build_tree_nuts(
params_right, momentum_right, log_slicevar,
step, depth - 1, log_joint0, target, grad_target, random_state)
if n_sub2 > 0:
if float(n_sub2) / (n_sub + n_sub2) > random_state.rand():
params1 = params2 # accept move
mh_ratio += mh_ratio2
n_steps += n_steps2
sub_ok = sub_ok and ((params_right - params_left).dot(momentum_left) >= 0) \
and ((params_right - params_left).dot(momentum_right) >= 0)
n_sub += n_sub2
return params_left, momentum_left, params_right, momentum_right, params1, n_sub, sub_ok, \
mh_ratio, n_steps, is_div, is_out
def metropolis(n_samples, params0, target, sigma_proposals, warmup=0, seed=0):
"""Sample the target with a Metropolis Markov Chain Monte Carlo using Gaussian proposals.
Parameters
----------
n_samples : int
The number of requested samples.
params0 : np.array
Initial values for each sampled parameter.
target : function
The target log density to sample (possibly unnormalized).
sigma_proposals : np.array
Standard deviations for Gaussian proposals of each parameter.
warmup : int
Number of warmup samples.
seed : int, optional
Seed for pseudo-random number generator.
Returns
-------
samples : np.array
"""
random_state = np.random.RandomState(seed)
samples = np.empty((n_samples + warmup + 1, ) + params0.shape)
samples[0, :] = params0
target_current = target(params0)
if np.isinf(target_current):
raise ValueError(
"Metropolis: Bad initialization point {},logpdf -> -inf.".format(params0))
n_accepted = 0
for ii in range(1, n_samples + warmup + 1):
samples[ii, :] = samples[ii - 1, :] + sigma_proposals * random_state.randn(*params0.shape)
target_prev = target_current
target_current = target(samples[ii, :])
if ((np.exp(target_current - target_prev) < random_state.rand())
or np.isinf(target_current)
or np.isnan(target_current)): # reject proposal
samples[ii, :] = samples[ii - 1, :]
target_current = target_prev
else:
n_accepted += 1
logger.info(
"{}: Total acceptance ratio: {:.3f}".format(__name__,
float(n_accepted) / (n_samples + warmup)))
return samples[(1 + warmup):, :]
|
py | 1a3555b76bb8faab8c07e9c5c37866b9a38e35ed | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .instance_view_status import InstanceViewStatus
from .sub_resource import SubResource
from .sku import Sku
from .availability_set import AvailabilitySet
from .virtual_machine_size import VirtualMachineSize
from .virtual_machine_extension_image import VirtualMachineExtensionImage
from .virtual_machine_image_resource import VirtualMachineImageResource
from .virtual_machine_extension_instance_view import VirtualMachineExtensionInstanceView
from .virtual_machine_extension import VirtualMachineExtension
from .purchase_plan import PurchasePlan
from .os_disk_image import OSDiskImage
from .data_disk_image import DataDiskImage
from .virtual_machine_image import VirtualMachineImage
from .usage_name import UsageName
from .usage import Usage
from .virtual_machine_capture_parameters import VirtualMachineCaptureParameters
from .virtual_machine_capture_result import VirtualMachineCaptureResult
from .plan import Plan
from .hardware_profile import HardwareProfile
from .image_reference import ImageReference
from .key_vault_secret_reference import KeyVaultSecretReference
from .key_vault_key_reference import KeyVaultKeyReference
from .disk_encryption_settings import DiskEncryptionSettings
from .virtual_hard_disk import VirtualHardDisk
from .managed_disk_parameters import ManagedDiskParameters
from .os_disk import OSDisk
from .data_disk import DataDisk
from .storage_profile import StorageProfile
from .additional_unattend_content import AdditionalUnattendContent
from .win_rm_listener import WinRMListener
from .win_rm_configuration import WinRMConfiguration
from .windows_configuration import WindowsConfiguration
from .ssh_public_key import SshPublicKey
from .ssh_configuration import SshConfiguration
from .linux_configuration import LinuxConfiguration
from .vault_certificate import VaultCertificate
from .vault_secret_group import VaultSecretGroup
from .os_profile import OSProfile
from .network_interface_reference import NetworkInterfaceReference
from .network_profile import NetworkProfile
from .boot_diagnostics import BootDiagnostics
from .diagnostics_profile import DiagnosticsProfile
from .virtual_machine_extension_handler_instance_view import VirtualMachineExtensionHandlerInstanceView
from .virtual_machine_agent_instance_view import VirtualMachineAgentInstanceView
from .disk_instance_view import DiskInstanceView
from .boot_diagnostics_instance_view import BootDiagnosticsInstanceView
from .virtual_machine_instance_view import VirtualMachineInstanceView
from .virtual_machine import VirtualMachine
from .upgrade_policy import UpgradePolicy
from .image_os_disk import ImageOSDisk
from .image_data_disk import ImageDataDisk
from .image_storage_profile import ImageStorageProfile
from .image import Image
from .virtual_machine_scale_set_os_profile import VirtualMachineScaleSetOSProfile
from .virtual_machine_scale_set_managed_disk_parameters import VirtualMachineScaleSetManagedDiskParameters
from .virtual_machine_scale_set_os_disk import VirtualMachineScaleSetOSDisk
from .virtual_machine_scale_set_data_disk import VirtualMachineScaleSetDataDisk
from .virtual_machine_scale_set_storage_profile import VirtualMachineScaleSetStorageProfile
from .api_entity_reference import ApiEntityReference
from .virtual_machine_scale_set_ip_configuration import VirtualMachineScaleSetIPConfiguration
from .virtual_machine_scale_set_network_configuration import VirtualMachineScaleSetNetworkConfiguration
from .virtual_machine_scale_set_network_profile import VirtualMachineScaleSetNetworkProfile
from .virtual_machine_scale_set_extension import VirtualMachineScaleSetExtension
from .virtual_machine_scale_set_extension_profile import VirtualMachineScaleSetExtensionProfile
from .virtual_machine_scale_set_vm_profile import VirtualMachineScaleSetVMProfile
from .virtual_machine_scale_set import VirtualMachineScaleSet
from .virtual_machine_scale_set_vm_instance_ids import VirtualMachineScaleSetVMInstanceIDs
from .virtual_machine_scale_set_vm_instance_required_ids import VirtualMachineScaleSetVMInstanceRequiredIDs
from .virtual_machine_status_code_count import VirtualMachineStatusCodeCount
from .virtual_machine_scale_set_instance_view_statuses_summary import VirtualMachineScaleSetInstanceViewStatusesSummary
from .virtual_machine_scale_set_vm_extensions_summary import VirtualMachineScaleSetVMExtensionsSummary
from .virtual_machine_scale_set_instance_view import VirtualMachineScaleSetInstanceView
from .virtual_machine_scale_set_sku_capacity import VirtualMachineScaleSetSkuCapacity
from .virtual_machine_scale_set_sku import VirtualMachineScaleSetSku
from .virtual_machine_scale_set_vm import VirtualMachineScaleSetVM
from .virtual_machine_scale_set_vm_instance_view import VirtualMachineScaleSetVMInstanceView
from .api_error_base import ApiErrorBase
from .inner_error import InnerError
from .api_error import ApiError
from .compute_long_running_operation_properties import ComputeLongRunningOperationProperties
from .resource import Resource
from .sub_resource_read_only import SubResourceReadOnly
from .operation_status_response import OperationStatusResponse
from .container_service_custom_profile import ContainerServiceCustomProfile
from .container_service_service_principal_profile import ContainerServiceServicePrincipalProfile
from .container_service_orchestrator_profile import ContainerServiceOrchestratorProfile
from .container_service_master_profile import ContainerServiceMasterProfile
from .container_service_agent_pool_profile import ContainerServiceAgentPoolProfile
from .container_service_windows_profile import ContainerServiceWindowsProfile
from .container_service_ssh_public_key import ContainerServiceSshPublicKey
from .container_service_ssh_configuration import ContainerServiceSshConfiguration
from .container_service_linux_profile import ContainerServiceLinuxProfile
from .container_service_vm_diagnostics import ContainerServiceVMDiagnostics
from .container_service_diagnostics_profile import ContainerServiceDiagnosticsProfile
from .container_service import ContainerService
from .resource_update import ResourceUpdate
from .image_disk_reference import ImageDiskReference
from .creation_data import CreationData
from .source_vault import SourceVault
from .key_vault_and_secret_reference import KeyVaultAndSecretReference
from .key_vault_and_key_reference import KeyVaultAndKeyReference
from .encryption_settings import EncryptionSettings
from .disk import Disk
from .disk_update import DiskUpdate
from .grant_access_data import GrantAccessData
from .access_uri import AccessUri
from .snapshot import Snapshot
from .snapshot_update import SnapshotUpdate
from .availability_set_paged import AvailabilitySetPaged
from .virtual_machine_size_paged import VirtualMachineSizePaged
from .usage_paged import UsagePaged
from .image_paged import ImagePaged
from .virtual_machine_paged import VirtualMachinePaged
from .virtual_machine_scale_set_paged import VirtualMachineScaleSetPaged
from .virtual_machine_scale_set_sku_paged import VirtualMachineScaleSetSkuPaged
from .virtual_machine_scale_set_vm_paged import VirtualMachineScaleSetVMPaged
from .container_service_paged import ContainerServicePaged
from .disk_paged import DiskPaged
from .snapshot_paged import SnapshotPaged
from .compute_management_client_enums import (
StatusLevelTypes,
OperatingSystemTypes,
VirtualMachineSizeTypes,
CachingTypes,
DiskCreateOptionTypes,
StorageAccountTypes,
PassNames,
ComponentNames,
SettingNames,
ProtocolTypes,
UpgradeMode,
OperatingSystemStateTypes,
VirtualMachineScaleSetSkuScaleType,
ContainerServiceOchestratorTypes,
ContainerServiceVMSizeTypes,
DiskCreateOption,
AccessLevel,
InstanceViewTypes,
)
__all__ = [
'InstanceViewStatus',
'SubResource',
'Sku',
'AvailabilitySet',
'VirtualMachineSize',
'VirtualMachineExtensionImage',
'VirtualMachineImageResource',
'VirtualMachineExtensionInstanceView',
'VirtualMachineExtension',
'PurchasePlan',
'OSDiskImage',
'DataDiskImage',
'VirtualMachineImage',
'UsageName',
'Usage',
'VirtualMachineCaptureParameters',
'VirtualMachineCaptureResult',
'Plan',
'HardwareProfile',
'ImageReference',
'KeyVaultSecretReference',
'KeyVaultKeyReference',
'DiskEncryptionSettings',
'VirtualHardDisk',
'ManagedDiskParameters',
'OSDisk',
'DataDisk',
'StorageProfile',
'AdditionalUnattendContent',
'WinRMListener',
'WinRMConfiguration',
'WindowsConfiguration',
'SshPublicKey',
'SshConfiguration',
'LinuxConfiguration',
'VaultCertificate',
'VaultSecretGroup',
'OSProfile',
'NetworkInterfaceReference',
'NetworkProfile',
'BootDiagnostics',
'DiagnosticsProfile',
'VirtualMachineExtensionHandlerInstanceView',
'VirtualMachineAgentInstanceView',
'DiskInstanceView',
'BootDiagnosticsInstanceView',
'VirtualMachineInstanceView',
'VirtualMachine',
'UpgradePolicy',
'ImageOSDisk',
'ImageDataDisk',
'ImageStorageProfile',
'Image',
'VirtualMachineScaleSetOSProfile',
'VirtualMachineScaleSetManagedDiskParameters',
'VirtualMachineScaleSetOSDisk',
'VirtualMachineScaleSetDataDisk',
'VirtualMachineScaleSetStorageProfile',
'ApiEntityReference',
'VirtualMachineScaleSetIPConfiguration',
'VirtualMachineScaleSetNetworkConfiguration',
'VirtualMachineScaleSetNetworkProfile',
'VirtualMachineScaleSetExtension',
'VirtualMachineScaleSetExtensionProfile',
'VirtualMachineScaleSetVMProfile',
'VirtualMachineScaleSet',
'VirtualMachineScaleSetVMInstanceIDs',
'VirtualMachineScaleSetVMInstanceRequiredIDs',
'VirtualMachineStatusCodeCount',
'VirtualMachineScaleSetInstanceViewStatusesSummary',
'VirtualMachineScaleSetVMExtensionsSummary',
'VirtualMachineScaleSetInstanceView',
'VirtualMachineScaleSetSkuCapacity',
'VirtualMachineScaleSetSku',
'VirtualMachineScaleSetVM',
'VirtualMachineScaleSetVMInstanceView',
'ApiErrorBase',
'InnerError',
'ApiError',
'ComputeLongRunningOperationProperties',
'Resource',
'SubResourceReadOnly',
'OperationStatusResponse',
'ContainerServiceCustomProfile',
'ContainerServiceServicePrincipalProfile',
'ContainerServiceOrchestratorProfile',
'ContainerServiceMasterProfile',
'ContainerServiceAgentPoolProfile',
'ContainerServiceWindowsProfile',
'ContainerServiceSshPublicKey',
'ContainerServiceSshConfiguration',
'ContainerServiceLinuxProfile',
'ContainerServiceVMDiagnostics',
'ContainerServiceDiagnosticsProfile',
'ContainerService',
'ResourceUpdate',
'ImageDiskReference',
'CreationData',
'SourceVault',
'KeyVaultAndSecretReference',
'KeyVaultAndKeyReference',
'EncryptionSettings',
'Disk',
'DiskUpdate',
'GrantAccessData',
'AccessUri',
'Snapshot',
'SnapshotUpdate',
'AvailabilitySetPaged',
'VirtualMachineSizePaged',
'UsagePaged',
'ImagePaged',
'VirtualMachinePaged',
'VirtualMachineScaleSetPaged',
'VirtualMachineScaleSetSkuPaged',
'VirtualMachineScaleSetVMPaged',
'ContainerServicePaged',
'DiskPaged',
'SnapshotPaged',
'StatusLevelTypes',
'OperatingSystemTypes',
'VirtualMachineSizeTypes',
'CachingTypes',
'DiskCreateOptionTypes',
'StorageAccountTypes',
'PassNames',
'ComponentNames',
'SettingNames',
'ProtocolTypes',
'UpgradeMode',
'OperatingSystemStateTypes',
'VirtualMachineScaleSetSkuScaleType',
'ContainerServiceOchestratorTypes',
'ContainerServiceVMSizeTypes',
'DiskCreateOption',
'AccessLevel',
'InstanceViewTypes',
]
|
py | 1a3555bdee16f133219afb0d9526bd8d6c6016ea | # The MIT License (MIT)
#
# Copyright (c) 2016 Damien P. George (original Neopixel object)
# Copyright (c) 2017 Ladyada
# Copyright (c) 2017 Scott Shawcroft for Adafruit Industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
`adafruit_dotstar` - DotStar strip driver
====================================================
* Author(s): Damien P. George, Limor Fried & Scott Shawcroft
"""
import busio
import digitalio
__version__ = "0.0.0-auto.0"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_DotStar.git"
START_HEADER_SIZE = 4
LED_START = 0b11100000 # Three "1" bits, followed by 5 brightness bits
# Pixel color order constants
RGB = (0, 1, 2)
RBG = (0, 2, 1)
GRB = (1, 0, 2)
GBR = (1, 2, 0)
BRG = (2, 0, 1)
BGR = (2, 1, 0)
class DotStar:
"""
A sequence of dotstars.
:param ~microcontroller.Pin clock: The pin to output dotstar clock on.
:param ~microcontroller.Pin data: The pin to output dotstar data on.
:param int n: The number of dotstars in the chain
:param float brightness: Brightness of the pixels between 0.0 and 1.0
:param bool auto_write: True if the dotstars should immediately change when
set. If False, `show` must be called explicitly.
:param tuple pixel_order: Set the pixel order on the strip - different
strips implement this differently. If you send red, and it looks blue
or green on the strip, modify this! It should be one of the values above
Example for Gemma M0:
.. code-block:: python
import adafruit_dotstar
import time
from board import *
RED = 0x100000
with adafruit_dotstar.DotStar(APA102_SCK, APA102_MOSI, 1) as pixels:
pixels[0] = RED
time.sleep(2)
"""
def __init__(self, clock, data, n, *, brightness=1.0, auto_write=True, pixel_order=BGR):
self._spi = None
try:
self._spi = busio.SPI(clock, MOSI=data)
while not self._spi.try_lock():
pass
self._spi.configure(baudrate=4000000)
except ValueError:
self.dpin = digitalio.DigitalInOut(data)
self.cpin = digitalio.DigitalInOut(clock)
self.dpin.direction = digitalio.Direction.OUTPUT
self.cpin.direction = digitalio.Direction.OUTPUT
self.cpin.value = False
self._n = n
# Supply one extra clock cycle for each two pixels in the strip.
self.end_header_size = n // 16
if n % 16 != 0:
self.end_header_size += 1
self._buf = bytearray(n * 4 + START_HEADER_SIZE + self.end_header_size)
self.end_header_index = len(self._buf) - self.end_header_size
self.pixel_order = pixel_order
# Four empty bytes to start.
for i in range(START_HEADER_SIZE):
self._buf[i] = 0x00
# Mark the beginnings of each pixel.
for i in range(START_HEADER_SIZE, self.end_header_index, 4):
self._buf[i] = 0xff
# 0xff bytes at the end.
for i in range(self.end_header_index, len(self._buf)):
self._buf[i] = 0xff
self._brightness = 1.0
# Set auto_write to False temporarily so brightness setter does _not_
# call show() while in __init__.
self.auto_write = False
self.brightness = brightness
self.auto_write = auto_write
def deinit(self):
"""Blank out the DotStars and release the resources."""
self.auto_write = False
for i in range(START_HEADER_SIZE, self.end_header_index):
if i % 4 != 0:
self._buf[i] = 0
self.show()
if self._spi:
self._spi.deinit()
else:
self.dpin.deinit()
self.cpin.deinit()
def __enter__(self):
return self
def __exit__(self, exception_type, exception_value, traceback):
self.deinit()
def __repr__(self):
return "[" + ", ".join([str(x) for x in self]) + "]"
def _set_item(self, index, value):
"""
value can be one of three things:
a (r,g,b) list/tuple
a (r,g,b, brightness) list/tuple
a single, longer int that contains RGB values, like 0xFFFFFF
brightness, if specified should be a float 0-1
Set a pixel value. You can set per-pixel brightness here, if it's not passed it
will use the max value for pixel brightness value, which is a good default.
Important notes about the per-pixel brightness - it's accomplished by
PWMing the entire output of the LED, and that PWM is at a much
slower clock than the rest of the LEDs. This can cause problems in
Persistence of Vision Applications
"""
offset = index * 4 + START_HEADER_SIZE
rgb = value
if isinstance(value, int):
rgb = (value >> 16, (value >> 8) & 0xff, value & 0xff)
if len(value) == 4:
brightness = value[3]
# Ignore value[3] below.
else:
brightness = 1
# LED startframe is three "1" bits, followed by 5 brightness bits
# then 8 bits for each of R, G, and B. The order of those 3 are configurable and
# vary based on hardware
# same as math.ceil(brightness * 31) & 0b00011111
# Idea from https://www.codeproject.com/Tips/700780/Fast-floor-ceiling-functions
brightness_byte = 32 - int(32 - brightness * 31) & 0b00011111
self._buf[offset] = brightness_byte | LED_START
self._buf[offset + 1] = rgb[self.pixel_order[0]]
self._buf[offset + 2] = rgb[self.pixel_order[1]]
self._buf[offset + 3] = rgb[self.pixel_order[2]]
def __setitem__(self, index, val):
if isinstance(index, slice):
start, stop, step = index.indices(self._n)
length = stop - start
if step != 0:
# same as math.ceil(length / step)
# Idea from https://fizzbuzzer.com/implement-a-ceil-function/
length = (length + step - 1) // step
if len(val) != length:
raise ValueError("Slice and input sequence size do not match.")
for val_i, in_i in enumerate(range(start, stop, step)):
self._set_item(in_i, val[val_i])
else:
self._set_item(index, val)
if self.auto_write:
self.show()
def __getitem__(self, index):
if isinstance(index, slice):
out = []
for in_i in range(*index.indices(self._n)):
out.append(
tuple(self._buf[in_i * 4 + (3 - i) + START_HEADER_SIZE] for i in range(3)))
return out
if index < 0:
index += len(self)
if index >= self._n or index < 0:
raise IndexError
offset = index * 4
return tuple(self._buf[offset + (3 - i) + START_HEADER_SIZE]
for i in range(3))
def __len__(self):
return self._n
@property
def brightness(self):
"""Overall brightness of the pixel"""
return self._brightness
@brightness.setter
def brightness(self, brightness):
self._brightness = min(max(brightness, 0.0), 1.0)
if self.auto_write:
self.show()
def fill(self, color):
"""Colors all pixels the given ***color***."""
auto_write = self.auto_write
self.auto_write = False
for i in range(self._n):
self[i] = color
if auto_write:
self.show()
self.auto_write = auto_write
def _ds_writebytes(self, buf):
for b in buf:
for _ in range(8):
self.cpin.value = True
self.dpin.value = (b & 0x80)
self.cpin.value = False
b = b << 1
def show(self):
"""Shows the new colors on the pixels themselves if they haven't already
been autowritten.
The colors may or may not be showing after this function returns because
it may be done asynchronously."""
# Create a second output buffer if we need to compute brightness
buf = self._buf
if self.brightness < 1.0:
buf = bytearray(self._buf)
# Four empty bytes to start.
for i in range(START_HEADER_SIZE):
buf[i] = 0x00
for i in range(START_HEADER_SIZE, self.end_header_index):
buf[i] = self._buf[i] if i % 4 == 0 else int(self._buf[i] * self._brightness)
# Four 0xff bytes at the end.
for i in range(self.end_header_index, len(buf)):
buf[i] = 0xff
if self._spi:
self._spi.write(buf)
else:
self._ds_writebytes(buf)
self.cpin.value = False
|
py | 1a35572ae5b0ea98bd81dd827732520756e2995d | import sys
import typing
def average_normals(average_type: int = 'CUSTOM_NORMAL',
weight: int = 50,
threshold: float = 0.01):
'''Average custom normals of selected vertices
:param average_type: Type, Averaging methodCUSTOM_NORMAL Custom Normal, Take Average of vert Normals.FACE_AREA Face Area, Set all vert normals by Face Area.CORNER_ANGLE Corner Angle, Set all vert normals by Corner Angle.
:type average_type: int
:param weight: Weight, Weight applied per face
:type weight: int
:param threshold: Threshold, Threshold value for different weights to be considered equal
:type threshold: float
'''
pass
def beautify_fill(angle_limit: float = 3.14159):
'''Rearrange some faces to try to get less degenerated geometry
:param angle_limit: Max Angle, Angle limit
:type angle_limit: float
'''
pass
def bevel(offset_type: int = 'OFFSET',
offset: float = 0.0,
offset_pct: float = 0.0,
segments: int = 1,
profile: float = 0.5,
vertex_only: bool = False,
clamp_overlap: bool = False,
loop_slide: bool = True,
mark_seam: bool = False,
mark_sharp: bool = False,
material: int = -1,
harden_normals: bool = False,
face_strength_mode: int = 'NONE',
miter_outer: int = 'SHARP',
miter_inner: int = 'SHARP',
spread: float = 0.1,
release_confirm: bool = False):
'''Cut into selected items at an angle to create flat or rounded bevel or chamfer
:param offset_type: Width Type, What distance Width measuresOFFSET Offset, Amount is offset of new edges from original.WIDTH Width, Amount is width of new face.DEPTH Depth, Amount is perpendicular distance from original edge to bevel face.PERCENT Percent, Amount is percent of adjacent edge length.
:type offset_type: int
:param offset: Width, Bevel amount
:type offset: float
:param offset_pct: Width Percent, Bevel amount for percentage method
:type offset_pct: float
:param segments: Segments, Segments for curved edge
:type segments: int
:param profile: Profile, Controls profile shape (0.5 = round)
:type profile: float
:param vertex_only: Vertex Only, Bevel only vertices
:type vertex_only: bool
:param clamp_overlap: Clamp Overlap, Do not allow beveled edges/vertices to overlap each other
:type clamp_overlap: bool
:param loop_slide: Loop Slide, Prefer slide along edge to even widths
:type loop_slide: bool
:param mark_seam: Mark Seams, Mark Seams along beveled edges
:type mark_seam: bool
:param mark_sharp: Mark Sharp, Mark beveled edges as sharp
:type mark_sharp: bool
:param material: Material, Material for bevel faces (-1 means use adjacent faces)
:type material: int
:param harden_normals: Harden Normals, Match normals of new faces to adjacent faces
:type harden_normals: bool
:param face_strength_mode: Face Strength Mode, Whether to set face strength, and which faces to set face strength onNONE None, Do not set face strength.NEW New, Set face strength on new faces only.AFFECTED Affected, Set face strength on new and modified faces only.ALL All, Set face strength on all faces.
:type face_strength_mode: int
:param miter_outer: Outer Miter, Pattern to use for outside of mitersSHARP Sharp, Outside of miter is sharp.PATCH Patch, Outside of miter is squared-off patch.ARC Arc, Outside of miter is arc.
:type miter_outer: int
:param miter_inner: Inner Miter, Pattern to use for inside of mitersSHARP Sharp, Inside of miter is sharp.ARC Arc, Inside of miter is arc.
:type miter_inner: int
:param spread: Spread, Amount to spread arcs for arc inner miters
:type spread: float
:param release_confirm: Confirm on Release
:type release_confirm: bool
'''
pass
def bisect(plane_co: float = (0.0, 0.0, 0.0),
plane_no: float = (0.0, 0.0, 0.0),
use_fill: bool = False,
clear_inner: bool = False,
clear_outer: bool = False,
threshold: float = 0.0001,
xstart: int = 0,
xend: int = 0,
ystart: int = 0,
yend: int = 0,
cursor: int = 1002):
'''Cut geometry along a plane (click-drag to define plane)
:param plane_co: Plane Point, A point on the plane
:type plane_co: float
:param plane_no: Plane Normal, The direction the plane points
:type plane_no: float
:param use_fill: Fill, Fill in the cut
:type use_fill: bool
:param clear_inner: Clear Inner, Remove geometry behind the plane
:type clear_inner: bool
:param clear_outer: Clear Outer, Remove geometry in front of the plane
:type clear_outer: bool
:param threshold: Axis Threshold, Preserves the existing geometry along the cut plane
:type threshold: float
:param xstart: X Start
:type xstart: int
:param xend: X End
:type xend: int
:param ystart: Y Start
:type ystart: int
:param yend: Y End
:type yend: int
:param cursor: Cursor, Mouse cursor style to use during the modal operator
:type cursor: int
'''
pass
def blend_from_shape(shape: int = '', blend: float = 1.0, add: bool = True):
'''Blend in shape from a shape key
:param shape: Shape, Shape key to use for blending
:type shape: int
:param blend: Blend, Blending factor
:type blend: float
:param add: Add, Add rather than blend between shapes
:type add: bool
'''
pass
def bridge_edge_loops(type: int = 'SINGLE',
use_merge: bool = False,
merge_factor: float = 0.5,
twist_offset: int = 0,
number_cuts: int = 0,
interpolation: int = 'PATH',
smoothness: float = 1.0,
profile_shape_factor: float = 0.0,
profile_shape: int = 'SMOOTH'):
'''Create a bridge of faces between two or more selected edge loops
:param type: Connect Loops, Method of bridging multiple loops
:type type: int
:param use_merge: Merge, Merge rather than creating faces
:type use_merge: bool
:param merge_factor: Merge Factor
:type merge_factor: float
:param twist_offset: Twist, Twist offset for closed loops
:type twist_offset: int
:param number_cuts: Number of Cuts
:type number_cuts: int
:param interpolation: Interpolation, Interpolation method
:type interpolation: int
:param smoothness: Smoothness, Smoothness factor
:type smoothness: float
:param profile_shape_factor: Profile Factor, How much intermediary new edges are shrunk/expanded
:type profile_shape_factor: float
:param profile_shape: Profile Shape, Shape of the profileSMOOTH Smooth, Smooth falloff.SPHERE Sphere, Spherical falloff.ROOT Root, Root falloff.INVERSE_SQUARE Inverse Square, Inverse Square falloff.SHARP Sharp, Sharp falloff.LINEAR Linear, Linear falloff.
:type profile_shape: int
'''
pass
def colors_reverse():
'''Flip direction of vertex colors inside faces
'''
pass
def colors_rotate(use_ccw: bool = False):
'''Rotate vertex colors inside faces
:param use_ccw: Counter Clockwise
:type use_ccw: bool
'''
pass
def convex_hull(delete_unused: bool = True,
use_existing_faces: bool = True,
make_holes: bool = False,
join_triangles: bool = True,
face_threshold: float = 0.698132,
shape_threshold: float = 0.698132,
uvs: bool = False,
vcols: bool = False,
seam: bool = False,
sharp: bool = False,
materials: bool = False):
'''Enclose selected vertices in a convex polyhedron
:param delete_unused: Delete Unused, Delete selected elements that are not used by the hull
:type delete_unused: bool
:param use_existing_faces: Use Existing Faces, Skip hull triangles that are covered by a pre-existing face
:type use_existing_faces: bool
:param make_holes: Make Holes, Delete selected faces that are used by the hull
:type make_holes: bool
:param join_triangles: Join Triangles, Merge adjacent triangles into quads
:type join_triangles: bool
:param face_threshold: Max Face Angle, Face angle limit
:type face_threshold: float
:param shape_threshold: Max Shape Angle, Shape angle limit
:type shape_threshold: float
:param uvs: Compare UVs
:type uvs: bool
:param vcols: Compare VCols
:type vcols: bool
:param seam: Compare Seam
:type seam: bool
:param sharp: Compare Sharp
:type sharp: bool
:param materials: Compare Materials
:type materials: bool
'''
pass
def customdata_custom_splitnormals_add():
'''Add a custom split normals layer, if none exists yet
'''
pass
def customdata_custom_splitnormals_clear():
'''Remove the custom split normals layer, if it exists
'''
pass
def customdata_mask_clear():
'''Clear vertex sculpt masking data from the mesh
'''
pass
def customdata_skin_add():
'''Add a vertex skin layer
'''
pass
def customdata_skin_clear():
'''Clear vertex skin layer
'''
pass
def decimate(ratio: float = 1.0,
use_vertex_group: bool = False,
vertex_group_factor: float = 1.0,
invert_vertex_group: bool = False,
use_symmetry: bool = False,
symmetry_axis: int = 'Y'):
'''Simplify geometry by collapsing edges
:param ratio: Ratio
:type ratio: float
:param use_vertex_group: Vertex Group, Use active vertex group as an influence
:type use_vertex_group: bool
:param vertex_group_factor: Weight, Vertex group strength
:type vertex_group_factor: float
:param invert_vertex_group: Invert, Invert vertex group influence
:type invert_vertex_group: bool
:param use_symmetry: Symmetry, Maintain symmetry on an axis
:type use_symmetry: bool
:param symmetry_axis: Axis, Axis of symmetry
:type symmetry_axis: int
'''
pass
def delete(type: str = 'VERT'):
'''Delete selected vertices, edges or faces
:param type: Type, Method used for deleting mesh data
:type type: str
'''
pass
def delete_edgeloop(use_face_split: bool = True):
'''Delete an edge loop by merging the faces on each side
:param use_face_split: Face Split, Split off face corners to maintain surrounding geometry
:type use_face_split: bool
'''
pass
def delete_loose(use_verts: bool = True,
use_edges: bool = True,
use_faces: bool = False):
'''Delete loose vertices, edges or faces
:param use_verts: Vertices, Remove loose vertices
:type use_verts: bool
:param use_edges: Edges, Remove loose edges
:type use_edges: bool
:param use_faces: Faces, Remove loose faces
:type use_faces: bool
'''
pass
def dissolve_degenerate(threshold: float = 0.0001):
'''Dissolve zero area faces and zero length edges
:param threshold: Merge Distance, Minimum distance between elements to merge
:type threshold: float
'''
pass
def dissolve_edges(use_verts: bool = True, use_face_split: bool = False):
'''Dissolve edges, merging faces
:param use_verts: Dissolve Verts, Dissolve remaining vertices
:type use_verts: bool
:param use_face_split: Face Split, Split off face corners to maintain surrounding geometry
:type use_face_split: bool
'''
pass
def dissolve_faces(use_verts: bool = False):
'''Dissolve faces
:param use_verts: Dissolve Verts, Dissolve remaining vertices
:type use_verts: bool
'''
pass
def dissolve_limited(angle_limit: float = 0.0872665,
use_dissolve_boundaries: bool = False,
delimit: typing.Set[int] = {'NORMAL'}):
'''Dissolve selected edges and verts, limited by the angle of surrounding geometry
:param angle_limit: Max Angle, Angle limit
:type angle_limit: float
:param use_dissolve_boundaries: All Boundaries, Dissolve all vertices inbetween face boundaries
:type use_dissolve_boundaries: bool
:param delimit: Delimit, Delimit dissolve operationNORMAL Regular, Delimit by face directions.MATERIAL Material, Delimit by face material.SEAM Seam, Delimit by edge seams.SHARP Sharp, Delimit by sharp edges.UV UVs, Delimit by UV coordinates.
:type delimit: typing.Set[int]
'''
pass
def dissolve_mode(use_verts: bool = False,
use_face_split: bool = False,
use_boundary_tear: bool = False):
'''Dissolve geometry based on the selection mode
:param use_verts: Dissolve Verts, Dissolve remaining vertices
:type use_verts: bool
:param use_face_split: Face Split, Split off face corners to maintain surrounding geometry
:type use_face_split: bool
:param use_boundary_tear: Tear Boundary, Split off face corners instead of merging faces
:type use_boundary_tear: bool
'''
pass
def dissolve_verts(use_face_split: bool = False,
use_boundary_tear: bool = False):
'''Dissolve verts, merge edges and faces
:param use_face_split: Face Split, Split off face corners to maintain surrounding geometry
:type use_face_split: bool
:param use_boundary_tear: Tear Boundary, Split off face corners instead of merging faces
:type use_boundary_tear: bool
'''
pass
def dupli_extrude_cursor(rotate_source: bool = True):
'''Duplicate and extrude selected vertices, edges or faces towards the mouse cursor
:param rotate_source: Rotate Source, Rotate initial selection giving better shape
:type rotate_source: bool
'''
pass
def duplicate(mode: int = 1):
'''Duplicate selected vertices, edges or faces
:param mode: Mode
:type mode: int
'''
pass
def duplicate_move(MESH_OT_duplicate=None, TRANSFORM_OT_translate=None):
'''Duplicate mesh and move
:param MESH_OT_duplicate: Duplicate, Duplicate selected vertices, edges or faces
:param TRANSFORM_OT_translate: Move, Move selected items
'''
pass
def edge_collapse():
'''Collapse selected edges
'''
pass
def edge_face_add():
'''Add an edge or face to selected
'''
pass
def edge_rotate(use_ccw: bool = False):
'''Rotate selected edge or adjoining faces
:param use_ccw: Counter Clockwise
:type use_ccw: bool
'''
pass
def edge_split():
'''Split selected edges so that each neighbor face gets its own copy
'''
pass
def edgering_select(extend: bool = False,
deselect: bool = False,
toggle: bool = False,
ring: bool = True):
'''Select an edge ring
:param extend: Extend, Extend the selection
:type extend: bool
:param deselect: Deselect, Remove from the selection
:type deselect: bool
:param toggle: Toggle Select, Toggle the selection
:type toggle: bool
:param ring: Select Ring, Select ring
:type ring: bool
'''
pass
def edges_select_sharp(sharpness: float = 0.523599):
'''Select all sharp-enough edges
:param sharpness: Sharpness
:type sharpness: float
'''
pass
def extrude_context(use_normal_flip: bool = False, mirror: bool = False):
'''Extrude selection
:param use_normal_flip: Flip Normals
:type use_normal_flip: bool
:param mirror: Mirror Editing
:type mirror: bool
'''
pass
def extrude_context_move(MESH_OT_extrude_context=None,
TRANSFORM_OT_translate=None):
'''Extrude region together along the average normal
:param MESH_OT_extrude_context: Extrude Context, Extrude selection
:param TRANSFORM_OT_translate: Move, Move selected items
'''
pass
def extrude_edges_indiv(use_normal_flip: bool = False, mirror: bool = False):
'''Extrude individual edges only
:param use_normal_flip: Flip Normals
:type use_normal_flip: bool
:param mirror: Mirror Editing
:type mirror: bool
'''
pass
def extrude_edges_move(MESH_OT_extrude_edges_indiv=None,
TRANSFORM_OT_translate=None):
'''Extrude edges and move result
:param MESH_OT_extrude_edges_indiv: Extrude Only Edges, Extrude individual edges only
:param TRANSFORM_OT_translate: Move, Move selected items
'''
pass
def extrude_faces_indiv(mirror: bool = False):
'''Extrude individual faces only
:param mirror: Mirror Editing
:type mirror: bool
'''
pass
def extrude_faces_move(MESH_OT_extrude_faces_indiv=None,
TRANSFORM_OT_shrink_fatten=None):
'''Extrude each individual face separately along local normals
:param MESH_OT_extrude_faces_indiv: Extrude Individual Faces, Extrude individual faces only
:param TRANSFORM_OT_shrink_fatten: Shrink/Fatten, Shrink/fatten selected vertices along normals
'''
pass
def extrude_region(use_normal_flip: bool = False, mirror: bool = False):
'''Extrude region of faces
:param use_normal_flip: Flip Normals
:type use_normal_flip: bool
:param mirror: Mirror Editing
:type mirror: bool
'''
pass
def extrude_region_move(MESH_OT_extrude_region=None,
TRANSFORM_OT_translate=None):
'''Extrude region and move result
:param MESH_OT_extrude_region: Extrude Region, Extrude region of faces
:param TRANSFORM_OT_translate: Move, Move selected items
'''
pass
def extrude_region_shrink_fatten(MESH_OT_extrude_region=None,
TRANSFORM_OT_shrink_fatten=None):
'''Extrude region together along local normals
:param MESH_OT_extrude_region: Extrude Region, Extrude region of faces
:param TRANSFORM_OT_shrink_fatten: Shrink/Fatten, Shrink/fatten selected vertices along normals
'''
pass
def extrude_repeat(offset: float = 2.0, steps: int = 10):
'''Extrude selected vertices, edges or faces repeatedly
:param offset: Offset
:type offset: float
:param steps: Steps
:type steps: int
'''
pass
def extrude_vertices_move(MESH_OT_extrude_verts_indiv=None,
TRANSFORM_OT_translate=None):
'''Extrude vertices and move result
:param MESH_OT_extrude_verts_indiv: Extrude Only Vertices, Extrude individual vertices only
:param TRANSFORM_OT_translate: Move, Move selected items
'''
pass
def extrude_verts_indiv(mirror: bool = False):
'''Extrude individual vertices only
:param mirror: Mirror Editing
:type mirror: bool
'''
pass
def face_make_planar(factor: float = 1.0, repeat: int = 1):
'''Flatten selected faces
:param factor: Factor
:type factor: float
:param repeat: Iterations
:type repeat: int
'''
pass
def face_split_by_edges():
'''Weld loose edges into faces (splitting them into new faces)
'''
pass
def faces_mirror_uv(direction: int = 'POSITIVE', precision: int = 3):
'''Copy mirror UV coordinates on the X axis based on a mirrored mesh
:param direction: Axis Direction
:type direction: int
:param precision: Precision, Tolerance for finding vertex duplicates
:type precision: int
'''
pass
def faces_select_linked_flat(sharpness: float = 0.0174533):
'''Select linked faces by angle
:param sharpness: Sharpness
:type sharpness: float
'''
pass
def faces_shade_flat():
'''Display faces flat
'''
pass
def faces_shade_smooth():
'''Display faces smooth (using vertex normals)
'''
pass
def fill(use_beauty: bool = True):
'''Fill a selected edge loop with faces
:param use_beauty: Beauty, Use best triangulation division
:type use_beauty: bool
'''
pass
def fill_grid(span: int = 1, offset: int = 0, use_interp_simple: bool = False):
'''Fill grid from two loops
:param span: Span, Number of grid columns
:type span: int
:param offset: Offset, Vertex that is the corner of the grid
:type offset: int
:param use_interp_simple: Simple Blending, Use simple interpolation of grid vertices
:type use_interp_simple: bool
'''
pass
def fill_holes(sides: int = 4):
'''Fill in holes (boundary edge loops)
:param sides: Sides, Number of sides in hole required to fill (zero fills all holes)
:type sides: int
'''
pass
def flip_normals():
'''Flip the direction of selected faces’ normals (and of their vertices)
'''
pass
def hide(unselected: bool = False):
'''Hide (un)selected vertices, edges or faces
:param unselected: Unselected, Hide unselected rather than selected
:type unselected: bool
'''
pass
def inset(use_boundary: bool = True,
use_even_offset: bool = True,
use_relative_offset: bool = False,
use_edge_rail: bool = False,
thickness: float = 0.0,
depth: float = 0.0,
use_outset: bool = False,
use_select_inset: bool = False,
use_individual: bool = False,
use_interpolate: bool = True,
release_confirm: bool = False):
'''Inset new faces into selected faces
:param use_boundary: Boundary, Inset face boundaries
:type use_boundary: bool
:param use_even_offset: Offset Even, Scale the offset to give more even thickness
:type use_even_offset: bool
:param use_relative_offset: Offset Relative, Scale the offset by surrounding geometry
:type use_relative_offset: bool
:param use_edge_rail: Edge Rail, Inset the region along existing edges
:type use_edge_rail: bool
:param thickness: Thickness
:type thickness: float
:param depth: Depth
:type depth: float
:param use_outset: Outset, Outset rather than inset
:type use_outset: bool
:param use_select_inset: Select Outer, Select the new inset faces
:type use_select_inset: bool
:param use_individual: Individual, Individual Face Inset
:type use_individual: bool
:param use_interpolate: Interpolate, Blend face data across the inset
:type use_interpolate: bool
:param release_confirm: Confirm on Release
:type release_confirm: bool
'''
pass
def intersect(mode: int = 'SELECT_UNSELECT',
separate_mode: int = 'CUT',
threshold: float = 1e-06):
'''Cut an intersection into faces
:param mode: SourceSELECT Self Intersect, Self intersect selected faces.SELECT_UNSELECT Selected/Unselected, Intersect selected with unselected faces.
:type mode: int
:param separate_mode: Separate ModeALL All, Separate all geometry from intersections.CUT Cut, Cut into geometry keeping each side separate (Selected/Unselected only).NONE Merge, Merge all geometry from the intersection.
:type separate_mode: int
:param threshold: Merge threshold
:type threshold: float
'''
pass
def intersect_boolean(operation: int = 'DIFFERENCE',
use_swap: bool = False,
threshold: float = 1e-06):
'''Cut solid geometry from selected to unselected
:param operation: Boolean
:type operation: int
:param use_swap: Swap, Use with difference intersection to swap which side is kept
:type use_swap: bool
:param threshold: Merge threshold
:type threshold: float
'''
pass
def knife_project(cut_through: bool = False):
'''Use other objects outlines & boundaries to project knife cuts
:param cut_through: Cut through, Cut through all faces, not just visible ones
:type cut_through: bool
'''
pass
def knife_tool(use_occlude_geometry: bool = True,
only_selected: bool = False,
wait_for_input: bool = True):
'''Cut new topology
:param use_occlude_geometry: Occlude Geometry, Only cut the front most geometry
:type use_occlude_geometry: bool
:param only_selected: Only Selected, Only cut selected geometry
:type only_selected: bool
:param wait_for_input: Wait for Input
:type wait_for_input: bool
'''
pass
def loop_multi_select(ring: bool = False):
'''Select a loop of connected edges by connection type
:param ring: Ring
:type ring: bool
'''
pass
def loop_select(extend: bool = False,
deselect: bool = False,
toggle: bool = False,
ring: bool = False):
'''Select a loop of connected edges
:param extend: Extend Select, Extend the selection
:type extend: bool
:param deselect: Deselect, Remove from the selection
:type deselect: bool
:param toggle: Toggle Select, Toggle the selection
:type toggle: bool
:param ring: Select Ring, Select ring
:type ring: bool
'''
pass
def loop_to_region(select_bigger: bool = False):
'''Select region of faces inside of a selected loop of edges
:param select_bigger: Select Bigger, Select bigger regions instead of smaller ones
:type select_bigger: bool
'''
pass
def loopcut(number_cuts: int = 1,
smoothness: float = 0.0,
falloff: int = 'INVERSE_SQUARE',
object_index: int = -1,
edge_index: int = -1,
mesh_select_mode_init=(False, False, False)):
'''Add a new loop between existing loops
:param number_cuts: Number of Cuts
:type number_cuts: int
:param smoothness: Smoothness, Smoothness factor
:type smoothness: float
:param falloff: Falloff, Falloff type the featherSMOOTH Smooth, Smooth falloff.SPHERE Sphere, Spherical falloff.ROOT Root, Root falloff.INVERSE_SQUARE Inverse Square, Inverse Square falloff.SHARP Sharp, Sharp falloff.LINEAR Linear, Linear falloff.
:type falloff: int
:param object_index: Object Index
:type object_index: int
:param edge_index: Edge Index
:type edge_index: int
'''
pass
def loopcut_slide(MESH_OT_loopcut=None, TRANSFORM_OT_edge_slide=None):
'''Cut mesh loop and slide it
:param MESH_OT_loopcut: Loop Cut, Add a new loop between existing loops
:param TRANSFORM_OT_edge_slide: Edge Slide, Slide an edge loop along a mesh
'''
pass
def mark_freestyle_edge(clear: bool = False):
'''(Un)mark selected edges as Freestyle feature edges
:param clear: Clear
:type clear: bool
'''
pass
def mark_freestyle_face(clear: bool = False):
'''(Un)mark selected faces for exclusion from Freestyle feature edge detection
:param clear: Clear
:type clear: bool
'''
pass
def mark_seam(clear: bool = False):
'''(Un)mark selected edges as a seam
:param clear: Clear
:type clear: bool
'''
pass
def mark_sharp(clear: bool = False, use_verts: bool = False):
'''(Un)mark selected edges as sharp
:param clear: Clear
:type clear: bool
:param use_verts: Vertices, Consider vertices instead of edges to select which edges to (un)tag as sharp
:type use_verts: bool
'''
pass
def merge(type: int = 'CENTER', uvs: bool = False):
'''Merge selected vertices
:param type: Type, Merge method to use
:type type: int
:param uvs: UVs, Move UVs according to merge
:type uvs: bool
'''
pass
def merge_normals():
'''Merge custom normals of selected vertices
'''
pass
def mod_weighted_strength(set: bool = False, face_strength: int = 'MEDIUM'):
'''Set/Get strength of face (used in Weighted Normal modifier)
:param set: Set value, Set Value of faces
:type set: bool
:param face_strength: Face Strength, Strength to use for assigning or selecting face influence for weighted normal modifier
:type face_strength: int
'''
pass
def normals_make_consistent(inside: bool = False):
'''Make face and vertex normals point either outside or inside the mesh
:param inside: Inside
:type inside: bool
'''
pass
def normals_tools(mode: int = 'COPY', absolute: bool = False):
'''Custom normals tools using Normal Vector of UI
:param mode: Mode, Mode of tools taking input from InterfaceCOPY Copy Normal, Copy normal to buffer.PASTE Paste Normal, Paste normal from buffer.ADD Add Normal, Add normal vector with selection.MULTIPLY Multiply Normal, Multiply normal vector with selection.RESET Reset Normal, Reset buffer and/or normal of selected element.
:type mode: int
:param absolute: Absolute Coordinates, Copy Absolute coordinates or Normal vector
:type absolute: bool
'''
pass
def offset_edge_loops(use_cap_endpoint: bool = False):
'''Create offset edge loop from the current selection
:param use_cap_endpoint: Cap Endpoint, Extend loop around end-points
:type use_cap_endpoint: bool
'''
pass
def offset_edge_loops_slide(MESH_OT_offset_edge_loops=None,
TRANSFORM_OT_edge_slide=None):
'''Offset edge loop slide
:param MESH_OT_offset_edge_loops: Offset Edge Loop, Create offset edge loop from the current selection
:param TRANSFORM_OT_edge_slide: Edge Slide, Slide an edge loop along a mesh
'''
pass
def point_normals(mode: int = 'COORDINATES',
invert: bool = False,
align: bool = False,
target_location: float = (0.0, 0.0, 0.0),
spherize: bool = False,
spherize_strength: float = 0.1):
'''Point selected custom normals to specified Target
:param mode: Mode, How to define coordinates to point custom normals toCOORDINATES Coordinates, Use static coordinates (defined by various means).MOUSE Mouse, Follow mouse cursor.
:type mode: int
:param invert: Invert, Invert affected normals
:type invert: bool
:param align: Align, Make all affected normals parallel
:type align: bool
:param target_location: Target, Target location to which normals will point
:type target_location: float
:param spherize: Spherize, Interpolate between original and new normals
:type spherize: bool
:param spherize_strength: Spherize Strength, Ratio of spherized normal to original normal
:type spherize_strength: float
'''
pass
def poke(offset: float = 0.0,
use_relative_offset: bool = False,
center_mode: int = 'MEDIAN_WEIGHTED'):
'''Split a face into a fan
:param offset: Poke Offset, Poke Offset
:type offset: float
:param use_relative_offset: Offset Relative, Scale the offset by surrounding geometry
:type use_relative_offset: bool
:param center_mode: Poke Center, Poke Face Center CalculationMEDIAN_WEIGHTED Weighted Median, Weighted median face center.MEDIAN Median, Median face center.BOUNDS Bounds, Face bounds center.
:type center_mode: int
'''
pass
def polybuild_dissolve_at_cursor():
'''Undocumented contribute <https://developer.blender.org/T51061>
'''
pass
def polybuild_face_at_cursor(mirror: bool = False,
use_proportional_edit: bool = False,
proportional_edit_falloff: int = 'SMOOTH',
proportional_size: float = 1.0,
use_proportional_connected: bool = False,
use_proportional_projected: bool = False,
release_confirm: bool = False,
use_accurate: bool = False):
'''Undocumented contribute <https://developer.blender.org/T51061>
:param mirror: Mirror Editing
:type mirror: bool
:param use_proportional_edit: Proportional Editing
:type use_proportional_edit: bool
:param proportional_edit_falloff: Proportional Falloff, Falloff type for proportional editing modeSMOOTH Smooth, Smooth falloff.SPHERE Sphere, Spherical falloff.ROOT Root, Root falloff.INVERSE_SQUARE Inverse Square, Inverse Square falloff.SHARP Sharp, Sharp falloff.LINEAR Linear, Linear falloff.CONSTANT Constant, Constant falloff.RANDOM Random, Random falloff.
:type proportional_edit_falloff: int
:param proportional_size: Proportional Size
:type proportional_size: float
:param use_proportional_connected: Connected
:type use_proportional_connected: bool
:param use_proportional_projected: Projected (2D)
:type use_proportional_projected: bool
:param release_confirm: Confirm on Release, Always confirm operation when releasing button
:type release_confirm: bool
:param use_accurate: Accurate, Use accurate transformation
:type use_accurate: bool
'''
pass
def polybuild_face_at_cursor_move(MESH_OT_polybuild_face_at_cursor=None,
TRANSFORM_OT_translate=None):
'''Undocumented contribute <https://developer.blender.org/T51061>
:param MESH_OT_polybuild_face_at_cursor: Poly Build Face at Cursor
:param TRANSFORM_OT_translate: Move, Move selected items
'''
pass
def polybuild_split_at_cursor(mirror: bool = False,
use_proportional_edit: bool = False,
proportional_edit_falloff: int = 'SMOOTH',
proportional_size: float = 1.0,
use_proportional_connected: bool = False,
use_proportional_projected: bool = False,
release_confirm: bool = False,
use_accurate: bool = False):
'''Undocumented contribute <https://developer.blender.org/T51061>
:param mirror: Mirror Editing
:type mirror: bool
:param use_proportional_edit: Proportional Editing
:type use_proportional_edit: bool
:param proportional_edit_falloff: Proportional Falloff, Falloff type for proportional editing modeSMOOTH Smooth, Smooth falloff.SPHERE Sphere, Spherical falloff.ROOT Root, Root falloff.INVERSE_SQUARE Inverse Square, Inverse Square falloff.SHARP Sharp, Sharp falloff.LINEAR Linear, Linear falloff.CONSTANT Constant, Constant falloff.RANDOM Random, Random falloff.
:type proportional_edit_falloff: int
:param proportional_size: Proportional Size
:type proportional_size: float
:param use_proportional_connected: Connected
:type use_proportional_connected: bool
:param use_proportional_projected: Projected (2D)
:type use_proportional_projected: bool
:param release_confirm: Confirm on Release, Always confirm operation when releasing button
:type release_confirm: bool
:param use_accurate: Accurate, Use accurate transformation
:type use_accurate: bool
'''
pass
def polybuild_split_at_cursor_move(MESH_OT_polybuild_split_at_cursor=None,
TRANSFORM_OT_translate=None):
'''Undocumented contribute <https://developer.blender.org/T51061>
:param MESH_OT_polybuild_split_at_cursor: Poly Build Split at Cursor
:param TRANSFORM_OT_translate: Move, Move selected items
'''
pass
def primitive_circle_add(vertices: int = 32,
radius: float = 1.0,
fill_type: int = 'NOTHING',
calc_uvs: bool = True,
enter_editmode: bool = False,
align: int = 'WORLD',
location: float = (0.0, 0.0, 0.0),
rotation: float = (0.0, 0.0, 0.0)):
'''Construct a circle mesh
:param vertices: Vertices
:type vertices: int
:param radius: Radius
:type radius: float
:param fill_type: Fill TypeNOTHING Nothing, Don’t fill at all.NGON Ngon, Use ngons.TRIFAN Triangle Fan, Use triangle fans.
:type fill_type: int
:param calc_uvs: Generate UVs, Generate a default UV map
:type calc_uvs: bool
:param enter_editmode: Enter Editmode, Enter editmode when adding this object
:type enter_editmode: bool
:param align: Align, The alignment of the new objectWORLD World, Align the new object to the world.VIEW View, Align the new object to the view.CURSOR 3D Cursor, Use the 3D cursor orientation for the new object.
:type align: int
:param location: Location, Location for the newly added object
:type location: float
:param rotation: Rotation, Rotation for the newly added object
:type rotation: float
'''
pass
def primitive_cone_add(vertices: int = 32,
radius1: float = 1.0,
radius2: float = 0.0,
depth: float = 2.0,
end_fill_type: int = 'NGON',
calc_uvs: bool = True,
enter_editmode: bool = False,
align: int = 'WORLD',
location: float = (0.0, 0.0, 0.0),
rotation: float = (0.0, 0.0, 0.0)):
'''Construct a conic mesh
:param vertices: Vertices
:type vertices: int
:param radius1: Radius 1
:type radius1: float
:param radius2: Radius 2
:type radius2: float
:param depth: Depth
:type depth: float
:param end_fill_type: Base Fill TypeNOTHING Nothing, Don’t fill at all.NGON Ngon, Use ngons.TRIFAN Triangle Fan, Use triangle fans.
:type end_fill_type: int
:param calc_uvs: Generate UVs, Generate a default UV map
:type calc_uvs: bool
:param enter_editmode: Enter Editmode, Enter editmode when adding this object
:type enter_editmode: bool
:param align: Align, The alignment of the new objectWORLD World, Align the new object to the world.VIEW View, Align the new object to the view.CURSOR 3D Cursor, Use the 3D cursor orientation for the new object.
:type align: int
:param location: Location, Location for the newly added object
:type location: float
:param rotation: Rotation, Rotation for the newly added object
:type rotation: float
'''
pass
def primitive_cube_add(size: float = 2.0,
calc_uvs: bool = True,
enter_editmode: bool = False,
align: int = 'WORLD',
location: float = (0.0, 0.0, 0.0),
rotation: float = (0.0, 0.0, 0.0)):
'''Construct a cube mesh
:param size: Size
:type size: float
:param calc_uvs: Generate UVs, Generate a default UV map
:type calc_uvs: bool
:param enter_editmode: Enter Editmode, Enter editmode when adding this object
:type enter_editmode: bool
:param align: Align, The alignment of the new objectWORLD World, Align the new object to the world.VIEW View, Align the new object to the view.CURSOR 3D Cursor, Use the 3D cursor orientation for the new object.
:type align: int
:param location: Location, Location for the newly added object
:type location: float
:param rotation: Rotation, Rotation for the newly added object
:type rotation: float
'''
pass
def primitive_cube_add_gizmo(
calc_uvs: bool = True,
enter_editmode: bool = False,
align: int = 'WORLD',
location: float = (0.0, 0.0, 0.0),
rotation: float = (0.0, 0.0, 0.0),
matrix: float = ((0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0),
(0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0))):
'''Construct a cube mesh
:param calc_uvs: Generate UVs, Generate a default UV map
:type calc_uvs: bool
:param enter_editmode: Enter Editmode, Enter editmode when adding this object
:type enter_editmode: bool
:param align: Align, The alignment of the new objectWORLD World, Align the new object to the world.VIEW View, Align the new object to the view.CURSOR 3D Cursor, Use the 3D cursor orientation for the new object.
:type align: int
:param location: Location, Location for the newly added object
:type location: float
:param rotation: Rotation, Rotation for the newly added object
:type rotation: float
:param matrix: Matrix
:type matrix: float
'''
pass
def primitive_cylinder_add(vertices: int = 32,
radius: float = 1.0,
depth: float = 2.0,
end_fill_type: int = 'NGON',
calc_uvs: bool = True,
enter_editmode: bool = False,
align: int = 'WORLD',
location: float = (0.0, 0.0, 0.0),
rotation: float = (0.0, 0.0, 0.0)):
'''Construct a cylinder mesh
:param vertices: Vertices
:type vertices: int
:param radius: Radius
:type radius: float
:param depth: Depth
:type depth: float
:param end_fill_type: Cap Fill TypeNOTHING Nothing, Don’t fill at all.NGON Ngon, Use ngons.TRIFAN Triangle Fan, Use triangle fans.
:type end_fill_type: int
:param calc_uvs: Generate UVs, Generate a default UV map
:type calc_uvs: bool
:param enter_editmode: Enter Editmode, Enter editmode when adding this object
:type enter_editmode: bool
:param align: Align, The alignment of the new objectWORLD World, Align the new object to the world.VIEW View, Align the new object to the view.CURSOR 3D Cursor, Use the 3D cursor orientation for the new object.
:type align: int
:param location: Location, Location for the newly added object
:type location: float
:param rotation: Rotation, Rotation for the newly added object
:type rotation: float
'''
pass
def primitive_grid_add(x_subdivisions: int = 10,
y_subdivisions: int = 10,
size: float = 2.0,
calc_uvs: bool = True,
enter_editmode: bool = False,
align: int = 'WORLD',
location: float = (0.0, 0.0, 0.0),
rotation: float = (0.0, 0.0, 0.0)):
'''Construct a grid mesh
:param x_subdivisions: X Subdivisions
:type x_subdivisions: int
:param y_subdivisions: Y Subdivisions
:type y_subdivisions: int
:param size: Size
:type size: float
:param calc_uvs: Generate UVs, Generate a default UV map
:type calc_uvs: bool
:param enter_editmode: Enter Editmode, Enter editmode when adding this object
:type enter_editmode: bool
:param align: Align, The alignment of the new objectWORLD World, Align the new object to the world.VIEW View, Align the new object to the view.CURSOR 3D Cursor, Use the 3D cursor orientation for the new object.
:type align: int
:param location: Location, Location for the newly added object
:type location: float
:param rotation: Rotation, Rotation for the newly added object
:type rotation: float
'''
pass
def primitive_ico_sphere_add(subdivisions: int = 2,
radius: float = 1.0,
calc_uvs: bool = True,
enter_editmode: bool = False,
align: int = 'WORLD',
location: float = (0.0, 0.0, 0.0),
rotation: float = (0.0, 0.0, 0.0)):
'''Construct an Icosphere mesh
:param subdivisions: Subdivisions
:type subdivisions: int
:param radius: Radius
:type radius: float
:param calc_uvs: Generate UVs, Generate a default UV map
:type calc_uvs: bool
:param enter_editmode: Enter Editmode, Enter editmode when adding this object
:type enter_editmode: bool
:param align: Align, The alignment of the new objectWORLD World, Align the new object to the world.VIEW View, Align the new object to the view.CURSOR 3D Cursor, Use the 3D cursor orientation for the new object.
:type align: int
:param location: Location, Location for the newly added object
:type location: float
:param rotation: Rotation, Rotation for the newly added object
:type rotation: float
'''
pass
def primitive_monkey_add(size: float = 2.0,
calc_uvs: bool = True,
enter_editmode: bool = False,
align: int = 'WORLD',
location: float = (0.0, 0.0, 0.0),
rotation: float = (0.0, 0.0, 0.0)):
'''Construct a Suzanne mesh
:param size: Size
:type size: float
:param calc_uvs: Generate UVs, Generate a default UV map
:type calc_uvs: bool
:param enter_editmode: Enter Editmode, Enter editmode when adding this object
:type enter_editmode: bool
:param align: Align, The alignment of the new objectWORLD World, Align the new object to the world.VIEW View, Align the new object to the view.CURSOR 3D Cursor, Use the 3D cursor orientation for the new object.
:type align: int
:param location: Location, Location for the newly added object
:type location: float
:param rotation: Rotation, Rotation for the newly added object
:type rotation: float
'''
pass
def primitive_plane_add(size: float = 2.0,
calc_uvs: bool = True,
enter_editmode: bool = False,
align: int = 'WORLD',
location: float = (0.0, 0.0, 0.0),
rotation: float = (0.0, 0.0, 0.0)):
'''Construct a filled planar mesh with 4 vertices
:param size: Size
:type size: float
:param calc_uvs: Generate UVs, Generate a default UV map
:type calc_uvs: bool
:param enter_editmode: Enter Editmode, Enter editmode when adding this object
:type enter_editmode: bool
:param align: Align, The alignment of the new objectWORLD World, Align the new object to the world.VIEW View, Align the new object to the view.CURSOR 3D Cursor, Use the 3D cursor orientation for the new object.
:type align: int
:param location: Location, Location for the newly added object
:type location: float
:param rotation: Rotation, Rotation for the newly added object
:type rotation: float
'''
pass
def primitive_torus_add(align: int = 'WORLD',
location: float = (0.0, 0.0, 0.0),
rotation: float = (0.0, 0.0, 0.0),
major_segments: int = 48,
minor_segments: int = 12,
mode: int = 'MAJOR_MINOR',
major_radius: float = 1.0,
minor_radius: float = 0.25,
abso_major_rad: float = 1.25,
abso_minor_rad: float = 0.75,
generate_uvs: bool = True):
'''Add a torus mesh
:param align: AlignWORLD World, Align the new object to the world.VIEW View, Align the new object to the view.CURSOR 3D Cursor, Use the 3D cursor orientation for the new object.
:type align: int
:param location: Location
:type location: float
:param rotation: Rotation
:type rotation: float
:param major_segments: Major Segments, Number of segments for the main ring of the torus
:type major_segments: int
:param minor_segments: Minor Segments, Number of segments for the minor ring of the torus
:type minor_segments: int
:param mode: Torus DimensionsMAJOR_MINOR Major/Minor, Use the major/minor radii for torus dimensions.EXT_INT Exterior/Interior, Use the exterior/interior radii for torus dimensions.
:type mode: int
:param major_radius: Major Radius, Radius from the origin to the center of the cross sections
:type major_radius: float
:param minor_radius: Minor Radius, Radius of the torus’ cross section
:type minor_radius: float
:param abso_major_rad: Exterior Radius, Total Exterior Radius of the torus
:type abso_major_rad: float
:param abso_minor_rad: Interior Radius, Total Interior Radius of the torus
:type abso_minor_rad: float
:param generate_uvs: Generate UVs, Generate a default UV map
:type generate_uvs: bool
'''
pass
def primitive_uv_sphere_add(segments: int = 32,
ring_count: int = 16,
radius: float = 1.0,
calc_uvs: bool = True,
enter_editmode: bool = False,
align: int = 'WORLD',
location: float = (0.0, 0.0, 0.0),
rotation: float = (0.0, 0.0, 0.0)):
'''Construct a UV sphere mesh
:param segments: Segments
:type segments: int
:param ring_count: Rings
:type ring_count: int
:param radius: Radius
:type radius: float
:param calc_uvs: Generate UVs, Generate a default UV map
:type calc_uvs: bool
:param enter_editmode: Enter Editmode, Enter editmode when adding this object
:type enter_editmode: bool
:param align: Align, The alignment of the new objectWORLD World, Align the new object to the world.VIEW View, Align the new object to the view.CURSOR 3D Cursor, Use the 3D cursor orientation for the new object.
:type align: int
:param location: Location, Location for the newly added object
:type location: float
:param rotation: Rotation, Rotation for the newly added object
:type rotation: float
'''
pass
def quads_convert_to_tris(quad_method: int = 'BEAUTY',
ngon_method: int = 'BEAUTY'):
'''Triangulate selected faces
:param quad_method: Quad Method, Method for splitting the quads into trianglesBEAUTY Beauty , Split the quads in nice triangles, slower method.FIXED Fixed, Split the quads on the first and third vertices.FIXED_ALTERNATE Fixed Alternate, Split the quads on the 2nd and 4th vertices.SHORTEST_DIAGONAL Shortest Diagonal, Split the quads based on the distance between the vertices.
:type quad_method: int
:param ngon_method: Polygon Method, Method for splitting the polygons into trianglesBEAUTY Beauty, Arrange the new triangles evenly (slow).CLIP Clip, Split the polygons with an ear clipping algorithm.
:type ngon_method: int
'''
pass
def region_to_loop():
'''Select boundary edges around the selected faces
'''
pass
def remove_doubles(threshold: float = 0.0001, use_unselected: bool = False):
'''Merge vertices based on their proximity
:param threshold: Merge Distance, Minimum distance between elements to merge
:type threshold: float
:param use_unselected: Unselected, Merge selected to other unselected vertices
:type use_unselected: bool
'''
pass
def reveal(select: bool = True):
'''Reveal all hidden vertices, edges and faces
:param select: Select
:type select: bool
'''
pass
def rip(mirror: bool = False,
use_proportional_edit: bool = False,
proportional_edit_falloff: int = 'SMOOTH',
proportional_size: float = 1.0,
use_proportional_connected: bool = False,
use_proportional_projected: bool = False,
release_confirm: bool = False,
use_accurate: bool = False,
use_fill: bool = False):
'''Disconnect vertex or edges from connected geometry
:param mirror: Mirror Editing
:type mirror: bool
:param use_proportional_edit: Proportional Editing
:type use_proportional_edit: bool
:param proportional_edit_falloff: Proportional Falloff, Falloff type for proportional editing modeSMOOTH Smooth, Smooth falloff.SPHERE Sphere, Spherical falloff.ROOT Root, Root falloff.INVERSE_SQUARE Inverse Square, Inverse Square falloff.SHARP Sharp, Sharp falloff.LINEAR Linear, Linear falloff.CONSTANT Constant, Constant falloff.RANDOM Random, Random falloff.
:type proportional_edit_falloff: int
:param proportional_size: Proportional Size
:type proportional_size: float
:param use_proportional_connected: Connected
:type use_proportional_connected: bool
:param use_proportional_projected: Projected (2D)
:type use_proportional_projected: bool
:param release_confirm: Confirm on Release, Always confirm operation when releasing button
:type release_confirm: bool
:param use_accurate: Accurate, Use accurate transformation
:type use_accurate: bool
:param use_fill: Fill, Fill the ripped region
:type use_fill: bool
'''
pass
def rip_edge(mirror: bool = False,
use_proportional_edit: bool = False,
proportional_edit_falloff: int = 'SMOOTH',
proportional_size: float = 1.0,
use_proportional_connected: bool = False,
use_proportional_projected: bool = False,
release_confirm: bool = False,
use_accurate: bool = False):
'''Extend vertices along the edge closest to the cursor
:param mirror: Mirror Editing
:type mirror: bool
:param use_proportional_edit: Proportional Editing
:type use_proportional_edit: bool
:param proportional_edit_falloff: Proportional Falloff, Falloff type for proportional editing modeSMOOTH Smooth, Smooth falloff.SPHERE Sphere, Spherical falloff.ROOT Root, Root falloff.INVERSE_SQUARE Inverse Square, Inverse Square falloff.SHARP Sharp, Sharp falloff.LINEAR Linear, Linear falloff.CONSTANT Constant, Constant falloff.RANDOM Random, Random falloff.
:type proportional_edit_falloff: int
:param proportional_size: Proportional Size
:type proportional_size: float
:param use_proportional_connected: Connected
:type use_proportional_connected: bool
:param use_proportional_projected: Projected (2D)
:type use_proportional_projected: bool
:param release_confirm: Confirm on Release, Always confirm operation when releasing button
:type release_confirm: bool
:param use_accurate: Accurate, Use accurate transformation
:type use_accurate: bool
'''
pass
def rip_edge_move(MESH_OT_rip_edge=None, TRANSFORM_OT_translate=None):
'''Extend vertices and move the result
:param MESH_OT_rip_edge: Extend Vertices, Extend vertices along the edge closest to the cursor
:param TRANSFORM_OT_translate: Move, Move selected items
'''
pass
def rip_move(MESH_OT_rip=None, TRANSFORM_OT_translate=None):
'''Rip polygons and move the result
:param MESH_OT_rip: Rip, Disconnect vertex or edges from connected geometry
:param TRANSFORM_OT_translate: Move, Move selected items
'''
pass
def screw(steps: int = 9,
turns: int = 1,
center: float = (0.0, 0.0, 0.0),
axis: float = (0.0, 0.0, 0.0)):
'''Extrude selected vertices in screw-shaped rotation around the cursor in indicated viewport
:param steps: Steps, Steps
:type steps: int
:param turns: Turns, Turns
:type turns: int
:param center: Center, Center in global view space
:type center: float
:param axis: Axis, Axis in global view space
:type axis: float
'''
pass
def select_all(action: int = 'TOGGLE'):
'''(De)select all vertices, edges or faces
:param action: Action, Selection action to executeTOGGLE Toggle, Toggle selection for all elements.SELECT Select, Select all elements.DESELECT Deselect, Deselect all elements.INVERT Invert, Invert selection of all elements.
:type action: int
'''
pass
def select_axis(orientation: int = 'LOCAL',
sign: int = 'POS',
axis: int = 'X',
threshold: float = 0.0001):
'''Select all data in the mesh on a single axis
:param orientation: Axis Mode, Axis orientationGLOBAL Global, Align the transformation axes to world space.LOCAL Local, Align the transformation axes to the selected objects’ local space.NORMAL Normal, Align the transformation axes to average normal of selected elements (bone Y axis for pose mode).GIMBAL Gimbal, Align each axis to the Euler rotation axis as used for input.VIEW View, Align the transformation axes to the window.CURSOR Cursor, Align the transformation axes to the 3D cursor.
:type orientation: int
:param sign: Axis Sign, Side to select
:type sign: int
:param axis: Axis, Select the axis to compare each vertex on
:type axis: int
:param threshold: Threshold
:type threshold: float
'''
pass
def select_face_by_sides(number: int = 4,
type: int = 'EQUAL',
extend: bool = True):
'''Select vertices or faces by the number of polygon sides
:param number: Number of Vertices
:type number: int
:param type: Type, Type of comparison to make
:type type: int
:param extend: Extend, Extend the selection
:type extend: bool
'''
pass
def select_interior_faces():
'''Select faces where all edges have more than 2 face users
'''
pass
def select_less(use_face_step: bool = True):
'''Deselect vertices, edges or faces at the boundary of each selection region
:param use_face_step: Face Step, Connected faces (instead of edges)
:type use_face_step: bool
'''
pass
def select_linked(delimit: typing.Set[int] = {'SEAM'}):
'''Select all vertices connected to the current selection
:param delimit: Delimit, Delimit selected regionNORMAL Regular, Delimit by face directions.MATERIAL Material, Delimit by face material.SEAM Seam, Delimit by edge seams.SHARP Sharp, Delimit by sharp edges.UV UVs, Delimit by UV coordinates.
:type delimit: typing.Set[int]
'''
pass
def select_linked_pick(deselect: bool = False,
delimit: typing.Set[int] = {'SEAM'},
index=-1):
'''(De)select all vertices linked to the edge under the mouse cursor
:param deselect: Deselect
:type deselect: bool
:param delimit: Delimit, Delimit selected regionNORMAL Regular, Delimit by face directions.MATERIAL Material, Delimit by face material.SEAM Seam, Delimit by edge seams.SHARP Sharp, Delimit by sharp edges.UV UVs, Delimit by UV coordinates.
:type delimit: typing.Set[int]
'''
pass
def select_loose(extend: bool = False):
'''Select loose geometry based on the selection mode
:param extend: Extend, Extend the selection
:type extend: bool
'''
pass
def select_mirror(axis: typing.Set[int] = {'X'}, extend: bool = False):
'''Select mesh items at mirrored locations
:param axis: Axis
:type axis: typing.Set[int]
:param extend: Extend, Extend the existing selection
:type extend: bool
'''
pass
def select_mode(use_extend: bool = False,
use_expand: bool = False,
type: int = 'VERT',
action: int = 'TOGGLE'):
'''Change selection mode
:param use_extend: Extend
:type use_extend: bool
:param use_expand: Expand
:type use_expand: bool
:param type: Type
:type type: int
:param action: Action, Selection action to executeDISABLE Disable, Disable selected markers.ENABLE Enable, Enable selected markers.TOGGLE Toggle, Toggle disabled flag for selected markers.
:type action: int
'''
pass
def select_more(use_face_step: bool = True):
'''Select more vertices, edges or faces connected to initial selection
:param use_face_step: Face Step, Connected faces (instead of edges)
:type use_face_step: bool
'''
pass
def select_next_item():
'''Select the next element (using selection order)
'''
pass
def select_non_manifold(extend: bool = True,
use_wire: bool = True,
use_boundary: bool = True,
use_multi_face: bool = True,
use_non_contiguous: bool = True,
use_verts: bool = True):
'''Select all non-manifold vertices or edges
:param extend: Extend, Extend the selection
:type extend: bool
:param use_wire: Wire, Wire edges
:type use_wire: bool
:param use_boundary: Boundaries, Boundary edges
:type use_boundary: bool
:param use_multi_face: Multiple Faces, Edges shared by 3+ faces
:type use_multi_face: bool
:param use_non_contiguous: Non Contiguous, Edges between faces pointing in alternate directions
:type use_non_contiguous: bool
:param use_verts: Vertices, Vertices connecting multiple face regions
:type use_verts: bool
'''
pass
def select_nth(nth: int = 2, skip: int = 1, offset: int = 0):
'''Deselect every Nth element starting from the active vertex, edge or face
:param nth: Nth Element, Skip every Nth element
:type nth: int
:param skip: Skip, Number of elements to skip at once
:type skip: int
:param offset: Offset, Offset from the starting point
:type offset: int
'''
pass
def select_prev_item():
'''Select the previous element (using selection order)
'''
pass
def select_random(percent: float = 50.0, seed: int = 0,
action: int = 'SELECT'):
'''Randomly select vertices
:param percent: Percent, Percentage of objects to select randomly
:type percent: float
:param seed: Random Seed, Seed for the random number generator
:type seed: int
:param action: Action, Selection action to executeSELECT Select, Select all elements.DESELECT Deselect, Deselect all elements.
:type action: int
'''
pass
def select_similar(type: int = 'NORMAL',
compare: int = 'EQUAL',
threshold: float = 0.0):
'''Select similar vertices, edges or faces by property types
:param type: Type
:type type: int
:param compare: Compare
:type compare: int
:param threshold: Threshold
:type threshold: float
'''
pass
def select_similar_region():
'''Select similar face regions to the current selection
'''
pass
def select_ungrouped(extend: bool = False):
'''Select vertices without a group
:param extend: Extend, Extend the selection
:type extend: bool
'''
pass
def separate(type: int = 'SELECTED'):
'''Separate selected geometry into a new mesh
:param type: Type
:type type: int
'''
pass
def set_normals_from_faces(keep_sharp: bool = False):
'''Set the custom normals from the selected faces ones
:param keep_sharp: Keep Sharp Edges, Do not set sharp edges to face
:type keep_sharp: bool
'''
pass
def shape_propagate_to_all():
'''Apply selected vertex locations to all other shape keys
'''
pass
def shortest_path_pick(edge_mode: int = 'SELECT',
use_face_step: bool = False,
use_topology_distance: bool = False,
use_fill: bool = False,
nth: int = 1,
skip: int = 1,
offset: int = 0,
index=-1):
'''Select shortest path between two selections
:param edge_mode: Edge Tag, The edge flag to tag when selecting the shortest path
:type edge_mode: int
:param use_face_step: Face Stepping, Traverse connected faces (includes diagonals and edge-rings)
:type use_face_step: bool
:param use_topology_distance: Topology Distance, Find the minimum number of steps, ignoring spatial distance
:type use_topology_distance: bool
:param use_fill: Fill Region, Select all paths between the source/destination elements
:type use_fill: bool
:param nth: Nth Element, Skip every Nth element
:type nth: int
:param skip: Skip, Number of elements to skip at once
:type skip: int
:param offset: Offset, Offset from the starting point
:type offset: int
'''
pass
def shortest_path_select(edge_mode: int = 'SELECT',
use_face_step: bool = False,
use_topology_distance: bool = False,
use_fill: bool = False,
nth: int = 1,
skip: int = 1,
offset: int = 0):
'''Selected shortest path between two vertices/edges/faces
:param edge_mode: Edge Tag, The edge flag to tag when selecting the shortest path
:type edge_mode: int
:param use_face_step: Face Stepping, Traverse connected faces (includes diagonals and edge-rings)
:type use_face_step: bool
:param use_topology_distance: Topology Distance, Find the minimum number of steps, ignoring spatial distance
:type use_topology_distance: bool
:param use_fill: Fill Region, Select all paths between the source/destination elements
:type use_fill: bool
:param nth: Nth Element, Skip every Nth element
:type nth: int
:param skip: Skip, Number of elements to skip at once
:type skip: int
:param offset: Offset, Offset from the starting point
:type offset: int
'''
pass
def smoothen_normals(factor: float = 0.5):
'''Smoothen custom normals based on adjacent vertex normals
:param factor: Factor, Specifies weight of smooth vs original normal
:type factor: float
'''
pass
def solidify(thickness: float = 0.01):
'''Create a solid skin by extruding, compensating for sharp angles
:param thickness: Thickness
:type thickness: float
'''
pass
def sort_elements(type: int = 'VIEW_ZAXIS',
elements: typing.Set[int] = {'VERT'},
reverse: bool = False,
seed: int = 0):
'''The order of selected vertices/edges/faces is modified, based on a given method
:param type: Type, Type of re-ordering operation to applyVIEW_ZAXIS View Z Axis, Sort selected elements from farthest to nearest one in current view.VIEW_XAXIS View X Axis, Sort selected elements from left to right one in current view.CURSOR_DISTANCE Cursor Distance, Sort selected elements from nearest to farthest from 3D cursor.MATERIAL Material, Sort selected elements from smallest to greatest material index (faces only!).SELECTED Selected, Move all selected elements in first places, preserving their relative order (WARNING: this will affect unselected elements’ indices as well!).RANDOMIZE Randomize, Randomize order of selected elements.REVERSE Reverse, Reverse current order of selected elements.
:type type: int
:param elements: Elements, Which elements to affect (vertices, edges and/or faces)
:type elements: typing.Set[int]
:param reverse: Reverse, Reverse the sorting effect
:type reverse: bool
:param seed: Seed, Seed for random-based operations
:type seed: int
'''
pass
def spin(steps: int = 9,
dupli: bool = False,
angle: float = 1.5708,
use_auto_merge: bool = True,
use_normal_flip: bool = False,
center: float = (0.0, 0.0, 0.0),
axis: float = (0.0, 0.0, 0.0)):
'''Extrude selected vertices in a circle around the cursor in indicated viewport
:param steps: Steps, Steps
:type steps: int
:param dupli: Duplicate, Make Duplicates
:type dupli: bool
:param angle: Angle, Rotation for each step
:type angle: float
:param use_auto_merge: Auto Merge, Merge first/last when the angle is a full revolution
:type use_auto_merge: bool
:param use_normal_flip: Flip Normals
:type use_normal_flip: bool
:param center: Center, Center in global view space
:type center: float
:param axis: Axis, Axis in global view space
:type axis: float
'''
pass
def split():
'''Split off selected geometry from connected unselected geometry
'''
pass
def split_normals():
'''Split custom normals of selected vertices
'''
pass
def subdivide(number_cuts: int = 1,
smoothness: float = 0.0,
ngon: bool = True,
quadcorner: int = 'STRAIGHT_CUT',
fractal: float = 0.0,
fractal_along_normal: float = 0.0,
seed: int = 0):
'''Subdivide selected edges
:param number_cuts: Number of Cuts
:type number_cuts: int
:param smoothness: Smoothness, Smoothness factor
:type smoothness: float
:param ngon: Create N-Gons, When disabled, newly created faces are limited to 3-4 sided faces
:type ngon: bool
:param quadcorner: Quad Corner Type, How to subdivide quad corners (anything other than Straight Cut will prevent ngons)
:type quadcorner: int
:param fractal: Fractal, Fractal randomness factor
:type fractal: float
:param fractal_along_normal: Along Normal, Apply fractal displacement along normal only
:type fractal_along_normal: float
:param seed: Random Seed, Seed for the random number generator
:type seed: int
'''
pass
def subdivide_edgering(number_cuts: int = 10,
interpolation: int = 'PATH',
smoothness: float = 1.0,
profile_shape_factor: float = 0.0,
profile_shape: int = 'SMOOTH'):
'''Subdivide perpendicular edges to the selected edge ring
:param number_cuts: Number of Cuts
:type number_cuts: int
:param interpolation: Interpolation, Interpolation method
:type interpolation: int
:param smoothness: Smoothness, Smoothness factor
:type smoothness: float
:param profile_shape_factor: Profile Factor, How much intermediary new edges are shrunk/expanded
:type profile_shape_factor: float
:param profile_shape: Profile Shape, Shape of the profileSMOOTH Smooth, Smooth falloff.SPHERE Sphere, Spherical falloff.ROOT Root, Root falloff.INVERSE_SQUARE Inverse Square, Inverse Square falloff.SHARP Sharp, Sharp falloff.LINEAR Linear, Linear falloff.
:type profile_shape: int
'''
pass
def symmetrize(direction: int = 'NEGATIVE_X', threshold: float = 0.0001):
'''Enforce symmetry (both form and topological) across an axis
:param direction: Direction, Which sides to copy from and to
:type direction: int
:param threshold: Threshold, Limit for snap middle vertices to the axis center
:type threshold: float
'''
pass
def symmetry_snap(direction: int = 'NEGATIVE_X',
threshold: float = 0.05,
factor: float = 0.5,
use_center: bool = True):
'''Snap vertex pairs to their mirrored locations
:param direction: Direction, Which sides to copy from and to
:type direction: int
:param threshold: Threshold, Distance within which matching vertices are searched
:type threshold: float
:param factor: Factor, Mix factor of the locations of the vertices
:type factor: float
:param use_center: Center, Snap middle vertices to the axis center
:type use_center: bool
'''
pass
def tris_convert_to_quads(face_threshold: float = 0.698132,
shape_threshold: float = 0.698132,
uvs: bool = False,
vcols: bool = False,
seam: bool = False,
sharp: bool = False,
materials: bool = False):
'''Join triangles into quads
:param face_threshold: Max Face Angle, Face angle limit
:type face_threshold: float
:param shape_threshold: Max Shape Angle, Shape angle limit
:type shape_threshold: float
:param uvs: Compare UVs
:type uvs: bool
:param vcols: Compare VCols
:type vcols: bool
:param seam: Compare Seam
:type seam: bool
:param sharp: Compare Sharp
:type sharp: bool
:param materials: Compare Materials
:type materials: bool
'''
pass
def unsubdivide(iterations: int = 2):
'''UnSubdivide selected edges & faces
:param iterations: Iterations, Number of times to unsubdivide
:type iterations: int
'''
pass
def uv_texture_add():
'''Add UV Map
'''
pass
def uv_texture_remove():
'''Remove UV Map
'''
pass
def uvs_reverse():
'''Flip direction of UV coordinates inside faces
'''
pass
def uvs_rotate(use_ccw: bool = False):
'''Rotate UV coordinates inside faces
:param use_ccw: Counter Clockwise
:type use_ccw: bool
'''
pass
def vert_connect():
'''Connect selected vertices of faces, splitting the face
'''
pass
def vert_connect_concave():
'''Make all faces convex
'''
pass
def vert_connect_nonplanar(angle_limit: float = 0.0872665):
'''Split non-planar faces that exceed the angle threshold
:param angle_limit: Max Angle, Angle limit
:type angle_limit: float
'''
pass
def vert_connect_path():
'''Connect vertices by their selection order, creating edges, splitting faces
'''
pass
def vertex_color_add():
'''Add vertex color layer
'''
pass
def vertex_color_remove():
'''Remove vertex color layer
'''
pass
def vertices_smooth(factor: float = 0.5,
repeat: int = 1,
xaxis: bool = True,
yaxis: bool = True,
zaxis: bool = True):
'''Flatten angles of selected vertices
:param factor: Smoothing, Smoothing factor
:type factor: float
:param repeat: Repeat, Number of times to smooth the mesh
:type repeat: int
:param xaxis: X-Axis, Smooth along the X axis
:type xaxis: bool
:param yaxis: Y-Axis, Smooth along the Y axis
:type yaxis: bool
:param zaxis: Z-Axis, Smooth along the Z axis
:type zaxis: bool
'''
pass
def vertices_smooth_laplacian(repeat: int = 1,
lambda_factor: float = 1.0,
lambda_border: float = 5e-05,
use_x: bool = True,
use_y: bool = True,
use_z: bool = True,
preserve_volume: bool = True):
'''Laplacian smooth of selected vertices
:param repeat: Number of iterations to smooth the mesh
:type repeat: int
:param lambda_factor: Lambda factor
:type lambda_factor: float
:param lambda_border: Lambda factor in border
:type lambda_border: float
:param use_x: Smooth X Axis, Smooth object along X axis
:type use_x: bool
:param use_y: Smooth Y Axis, Smooth object along Y axis
:type use_y: bool
:param use_z: Smooth Z Axis, Smooth object along Z axis
:type use_z: bool
:param preserve_volume: Preserve Volume, Apply volume preservation after smooth
:type preserve_volume: bool
'''
pass
def wireframe(use_boundary: bool = True,
use_even_offset: bool = True,
use_relative_offset: bool = False,
use_replace: bool = True,
thickness: float = 0.01,
offset: float = 0.01,
use_crease: bool = False,
crease_weight: float = 0.01):
'''Create a solid wire-frame from faces
:param use_boundary: Boundary, Inset face boundaries
:type use_boundary: bool
:param use_even_offset: Offset Even, Scale the offset to give more even thickness
:type use_even_offset: bool
:param use_relative_offset: Offset Relative, Scale the offset by surrounding geometry
:type use_relative_offset: bool
:param use_replace: Replace, Remove original faces
:type use_replace: bool
:param thickness: Thickness
:type thickness: float
:param offset: Offset
:type offset: float
:param use_crease: Crease, Crease hub edges for improved subsurf
:type use_crease: bool
:param crease_weight: Crease weight
:type crease_weight: float
'''
pass
|
py | 1a35596a15ea4161c1cfb5797f1635cbec5941b8 | # ***************************************************************
# Copyright (c) 2022 Jittor. All Rights Reserved.
# Maintainers:
# Zheng-Ning Liu <[email protected]>
# Dun Liang <[email protected]>.
#
# This file is subject to the terms and conditions defined in
# file 'LICENSE.txt', which is part of this source code package.
# ***************************************************************
import unittest
import numpy as np
try:
import torch
from emd import earth_mover_distance as TEMD
except:
skip_this_test = True
import jittor as jt
from jittor.loss3d import chamfer_loss
from jittor.loss3d import earth_mover_distance
class TestLoss3d(unittest.TestCase):
def test_chamfer(self):
def test():
pc1 = np.random.randn(10, 100, 3).astype(np.float32)
pc2 = np.random.randn(10, 100, 3).astype(np.float32)
Jpc1 = jt.array(pc1)
Jpc2 = jt.array(pc2)
Jcf = chamfer_loss(Jpc1, Jpc2, dims='BNC')
ppc1 = np.repeat(pc1[:, :, None, :], 100, axis=2)
ppc2 = np.repeat(pc2[:, None, :, :], 100, axis=1)
ncf = np.sqrt(((ppc1 - ppc2) ** 2).sum(axis=-1)).min(axis=-1)
ncf = ncf.mean()
self.assertTrue(np.allclose(ncf, Jcf.item()))
test()
if jt.has_cuda:
with jt.flag_scope(use_cuda=1):
test()
def test_chamfer_dims(self):
def test():
pc1 = np.random.randn(10, 100, 3).astype(np.float32)
pc2 = np.random.randn(10, 100, 3).astype(np.float32)
Jpc1 = jt.array(pc1.transpose([0, 2, 1]))
Jpc2 = jt.array(pc2.transpose([0, 2, 1]))
Jcf = chamfer_loss(Jpc1, Jpc2, dims='BCN')
ppc1 = np.repeat(pc1[:, :, None, :], 100, axis=2)
ppc2 = np.repeat(pc2[:, None, :, :], 100, axis=1)
ncf = np.sqrt(((ppc1 - ppc2) ** 2).sum(axis=-1)).min(axis=-1)
ncf = ncf.mean()
self.assertTrue(np.allclose(ncf, Jcf.item()))
test()
if jt.has_cuda:
with jt.flag_scope(use_cuda=1):
test()
@unittest.skipIf(skip_this_test, "No Pyorch_EMD found")
def test_emd_torch(self):
if jt.has_cuda:
jt.flags.use_cuda = True
pc1 = np.random.randn(10, 100, 3).astype(np.float32)
pc2 = np.random.randn(10, 50, 3).astype(np.float32)
Tpc1 = torch.from_numpy(pc1).cuda()
Tpc2 = torch.from_numpy(pc2).cuda()
Tpc1.requires_grad = True
Tpc2.requires_grad = True
Temdcost = TEMD(Tpc1, Tpc2, transpose=False)
Temd = Temdcost.mean()
Jpc1 = jt.array(pc1)
Jpc2 = jt.array(pc2)
Jemd = earth_mover_distance(Jpc1, Jpc2, dims='BNC')
Temd.backward()
Tgrad1 = Tpc1.grad.cpu().numpy()
Tgrad2 = Tpc2.grad.cpu().numpy()
Jgrad1, Jgrad2 = jt.grad(Jemd, [Jpc1, Jpc2])
self.assertTrue(np.allclose(Temd.item(), Jemd.item()), Temd.item() - Jemd.item())
self.assertTrue(np.allclose(Tgrad1, Jgrad1.data, atol=1e-4), np.abs(Tgrad1 - Jgrad1.data).max())
self.assertTrue(np.allclose(Tgrad2, Jgrad2.data, atol=1e-4), np.abs(Tgrad2 - Jgrad2.data).max())
if __name__ == '__main__':
unittest.main() |
py | 1a355a08587a5c02519e3fe47b6d17dca42477c4 | import streamlit as st
import pandas as pd
import numpy as np
import folium
import geopandas
import plotly.express as px
from streamlit_folium import folium_static
from folium.plugins import MarkerCluster
from datetime import datetime
st.set_page_config(layout='wide')
@st.cache(allow_output_mutation=True)
def get_data(path):
data = pd.read_csv(path)
return data
@st.cache(allow_output_mutation=True)
def get_geofile(url):
geofile = geopandas.read_file(url)
return geofile
#get data
path='Datasets/kc_house_data.csv'
data = get_data(path)
#get geofile
url='https://opendata.arcgis.com/datasets/83fc2e72903343aabff6de8cb445b81c_2.geojson'
geofile = get_geofile(url)
#add new features
data['price_m2']=data['price']/data['sqft_lot']
#===============================
#Data Overview
#===============================
f_attributes=st.sidebar.multiselect('Enter columns',data.columns)
f_zipcode=st.sidebar.multiselect('Enter Zip Code',data['zipcode'].unique())
st.title('Data Overview')
if (f_zipcode !=[])&(f_attributes !=[]):
data=data.loc[data['zipcode'].isin(f_zipcode),f_attributes]
elif (f_zipcode ==[])&(f_attributes !=[]):
data=data.loc[:,f_attributes]
elif (f_zipcode != []) & (f_attributes == []):
data = data.loc[data['zipcode'].isin(f_zipcode), :]
else:
data=data.copy()
st.dataframe(data)
c1,c2=st.beta_columns((1,1))
#Average metrics
df1=data[['id','zipcode']].groupby('zipcode').count().reset_index()
df2=data[['price','zipcode']].groupby('zipcode').mean().reset_index()
df3=data[['sqft_living','zipcode']].groupby('zipcode').mean().reset_index()
df4=data[['price_m2','zipcode']].groupby('zipcode').mean().reset_index()
# merge
m1=pd.merge(df1,df2,on='zipcode',how='inner')
m2=pd.merge(m1,df3,on='zipcode',how='inner')
df=pd.merge(m2,df4,on='zipcode',how='inner')
df.columns=['ZIPCODE','TOTAL HOUSES','PRICE','SQRT LIVING','PRICE/M2']
c1.header('Average Values')
c1.dataframe(df,height=600)
#Statistic Descriptive
num_attributes = data.select_dtypes(include=['int64','float64'])
media = pd.DataFrame(num_attributes.apply(np.mean))
mediana = pd.DataFrame(num_attributes.apply(np.median))
std = pd.DataFrame(num_attributes.apply(np.std))
max_= pd.DataFrame(num_attributes.apply(np.max))
min_= pd.DataFrame(num_attributes.apply(np.min))
df1=pd.concat([max_,min_,media,mediana,std],axis=1).reset_index()
df1.columns=['attributes','max','min','mean','median','std']
c2.header('Descritive Analysis')
c2.dataframe(df1,height=600)
#===============================
# Densidade de Portfolio
#===============================
st.title('Region Overview')
c1,c2=st.beta_columns((1,2))
c1.header('Portfolio Density')
df=data.sample(10)
#Base Map - Folium
density_map=folium.Map(location=[data['lat'].mean(),
data['long'].mean()],
default_zoom_start=25)
marker_cluster=MarkerCluster().add_to(density_map)
for name, row in df.iterrows():
folium.Marker([row['lat'], row['long']],
popup='Sold R${0} on: {1}. Features: {2} sqft,{3} bedrooms,'
'{4} bathrooms, year built: {5}'.format( row['price'],
row['date'],
row['sqft_living'],
row['bedrooms'],
row['bathrooms'],
row['yr_built'])).add_to(marker_cluster)
with c1:
folium_static(density_map)
############################
#Region Price Map
############################
c2.header('Price Density')
df=data[['price','zipcode']].groupby('zipcode').mean().reset_index()
df.columns=['ZIP','PRICE']
#df=df.sample(50)
geofile=geofile[geofile['ZIP'].isin(df['ZIP'].tolist())]
region_price_map=density_map=folium.Map(location=[data['lat'].mean(),
data['long'].mean()],
default_zoom_start=15)
region_price_map.choropleth(data = df,
geo_data=geofile,
columns=['ZIP','PRICE'],
key_on='feature.properties.ZIP',
fill_color='YlOrRd',
fill_opacity=0.7,
line_opacity=0.2,
legend_name='AVG PRICE')
with c2:
folium_static(region_price_map)
# ====================================================================================
# Distribuicao dos imoveis por categoria
# ====================================================================================
st.sidebar.title('Commercial Options')
st.title('Commercial Attributes')
# ----------Average Price per Year
data['date']=pd.to_datetime(data['date']).dt.strftime('%Y-%m-%d')
# filters
min_year_built = int( data['yr_built'].min())
max_year_built = int( data['yr_built'].max())
st.sidebar.subheader('Select Max Year Built')
f_year_built = st.sidebar.slider('Year Built', min_year_built,max_year_built,min_year_built)
st.header('Average Price per Year Built')
# data selection
df=data.loc[data['yr_built']<f_year_built]
df=df[['yr_built','price']].groupby('yr_built').mean().reset_index()
# plot
fig=px.line(df,x='yr_built',y='price')
st.plotly_chart(fig,use_container_width=True)
# ----------Average Price per Day
st.sidebar.title('Average Price per Day')
st.sidebar.subheader('Select Max Date')
# filters
min_date = datetime.strptime(data['date'].min(),'%Y-%m-%d')
max_date = datetime.strptime(data['date'].max(),'%Y-%m-%d')
f_date = st.sidebar.slider('Date', min_date,max_date,min_date)
# data selection
data['date']=pd.to_datetime(data['date'])
df = data.loc[data['date']<f_date]
df = df[['date','price']].groupby('date').mean().reset_index()
# plot
fig=px.line(df,x='date',y='price')
st.plotly_chart(fig,use_container_width=True)
# ----------Histograma
st.sidebar.title('Price Distribution')
st.sidebar.subheader('Select Max Price')
# filter
min_price=int(data['price'].min())
max_price=int(data['price'].max())
avg_price=int(data['price'].mean())
f_price = st.sidebar.slider('Price', min_price,max_price,avg_price)
df = data.loc[data['price']<f_price]
# plot
fig=px.histogram(df,x='price',nbins=50)
st.plotly_chart(fig,use_container_width=True)
#===================================================
# Distribuicao dos imoveis por categorias fisicas
#===================================================
st.sidebar.title('Attributes Options')
st.title('House Attributes')
# filters
f_bedrooms = st.sidebar.selectbox('Max_number_bedrooms',sorted(set(data['bedrooms'].unique())))
f_bathrooms = st.sidebar.selectbox('Max_number_bathrooms',sorted(set(data['bathrooms'].unique())))
c1, c2 = st.beta_columns(2)
# House per Bedrooms
c1.header('Houses per bedrooms')
df=data[data['bedrooms']<=f_bedrooms]
fig=px.histogram(df,x='bedrooms',nbins=19)
c1.plotly_chart(fig,use_container_width=True)
# House per Bathrooms
c2.header('Houses per bathrooms')
df=data[data['bathrooms']<=f_bathrooms]
fig=px.histogram(df,x='bathrooms',nbins=19)
c2.plotly_chart(fig,use_container_width=True)
# filters
f_floors = st.sidebar.selectbox('Max_number_floors',sorted(set(data['floors'].unique())))
f_water = st.sidebar.checkbox('Only water view')
c1, c2 = st.beta_columns(2)
# House per Floors
c1.header('Houses per floors')
df=data[data['floors']<=f_floors]
#plot
fig=px.histogram(df,x='floors',nbins=19)
c1.plotly_chart(fig,use_container_width=True)
# House per Water View
c2.header('Houses with Water View')
if f_water:
df=data[data['waterfront']==1]
else:
df=data.copy()
fig = px.histogram(df,x='waterfront',nbins=10)
c2.plotly_chart(fig,use_container_width=True)
|
py | 1a355a73f9c1c2c774c6808e57818f758859fd76 | import asyncio
import email.message
import enum
import inspect
import json
from typing import (
Any,
Callable,
Coroutine,
Dict,
List,
Optional,
Sequence,
Set,
Type,
Union,
)
from fastapi import params
from fastapi.datastructures import Default, DefaultPlaceholder
from fastapi.dependencies.models import Dependant
from fastapi.dependencies.utils import (
get_body_field,
get_dependant,
get_parameterless_sub_dependant,
solve_dependencies,
)
from fastapi.encoders import DictIntStrAny, SetIntStr, jsonable_encoder
from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError
from fastapi.openapi.constants import STATUS_CODES_WITH_NO_BODY
from fastapi.types import DecoratedCallable
from fastapi.utils import (
create_cloned_field,
create_response_field,
generate_operation_id_for_path,
get_value_or_default,
)
from pydantic import BaseModel
from pydantic.error_wrappers import ErrorWrapper, ValidationError
from pydantic.fields import ModelField, Undefined
from starlette import routing
from starlette.concurrency import run_in_threadpool
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import JSONResponse, Response
from starlette.routing import BaseRoute
from starlette.routing import Mount # noqa
from starlette.routing import (
compile_path,
get_name,
request_response,
websocket_session,
)
from starlette.status import WS_1008_POLICY_VIOLATION
from starlette.types import ASGIApp
from starlette.websockets import WebSocket
def _prepare_response_content(
res: Any,
*,
exclude_unset: bool,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
if isinstance(res, BaseModel):
return res.dict(
by_alias=True,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
if isinstance(res, list):
return [
_prepare_response_content(
item,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
for item in res
]
if isinstance(res, dict):
return {
k: _prepare_response_content(
v,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
for k, v in res.items()
}
return res
async def serialize_response(
*,
field: Optional[ModelField] = None,
response_content: Any,
include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
is_coroutine: bool = True,
) -> Any:
if field:
errors = []
response_content = _prepare_response_content(
response_content,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
if is_coroutine:
value, errors_ = field.validate(response_content, {}, loc=("response",))
else:
value, errors_ = await run_in_threadpool(
field.validate, response_content, {}, loc=("response",)
)
if isinstance(errors_, ErrorWrapper):
errors.append(errors_)
elif isinstance(errors_, list):
errors.extend(errors_)
if errors:
raise ValidationError(errors, field.type_)
return jsonable_encoder(
value,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
return jsonable_encoder(response_content)
async def run_endpoint_function(
*, dependant: Dependant, values: Dict[str, Any], is_coroutine: bool
) -> Any:
# Only called by get_request_handler. Has been split into its own function to
# facilitate profiling endpoints, since inner functions are harder to profile.
assert dependant.call is not None, "dependant.call must be a function"
if is_coroutine:
return await dependant.call(**values)
return await run_in_threadpool(dependant.call, **values)
def get_request_handler(
dependant: Dependant,
body_field: Optional[ModelField] = None,
status_code: Optional[int] = None,
response_class: Union[Type[Response], DefaultPlaceholder] = Default(JSONResponse),
response_field: Optional[ModelField] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
dependency_overrides_provider: Optional[Any] = None,
) -> Callable[[Request], Coroutine[Any, Any, Response]]:
assert dependant.call is not None, "dependant.call must be a function"
is_coroutine = asyncio.iscoroutinefunction(dependant.call)
is_body_form = body_field and isinstance(body_field.field_info, params.Form)
if isinstance(response_class, DefaultPlaceholder):
actual_response_class: Type[Response] = response_class.value
else:
actual_response_class = response_class
async def app(request: Request) -> Response:
try:
body: Any = None
if body_field:
if is_body_form:
body = await request.form()
else:
body_bytes = await request.body()
if body_bytes:
json_body: Any = Undefined
content_type_value = request.headers.get("content-type")
if not content_type_value:
json_body = await request.json()
else:
message = email.message.Message()
message["content-type"] = content_type_value
if message.get_content_maintype() == "application":
subtype = message.get_content_subtype()
if subtype == "json" or subtype.endswith("+json"):
json_body = await request.json()
if json_body != Undefined:
body = json_body
else:
body = body_bytes
except json.JSONDecodeError as e:
raise RequestValidationError([ErrorWrapper(e, ("body", e.pos))], body=e.doc)
except Exception as e:
raise HTTPException(
status_code=400, detail="There was an error parsing the body"
) from e
solved_result = await solve_dependencies(
request=request,
dependant=dependant,
body=body,
dependency_overrides_provider=dependency_overrides_provider,
)
values, errors, background_tasks, sub_response, _ = solved_result
if errors:
raise RequestValidationError(errors, body=body)
raw_response = await run_endpoint_function(
dependant=dependant, values=values, is_coroutine=is_coroutine
)
if isinstance(raw_response, Response):
if raw_response.background is None:
raw_response.background = background_tasks
return raw_response
response_data = await serialize_response(
field=response_field,
response_content=raw_response,
include=response_model_include,
exclude=response_model_exclude,
by_alias=response_model_by_alias,
exclude_unset=response_model_exclude_unset,
exclude_defaults=response_model_exclude_defaults,
exclude_none=response_model_exclude_none,
is_coroutine=is_coroutine,
)
response_args: Dict[str, Any] = {"background": background_tasks}
# If status_code was set, use it, otherwise use the default from the
# response class, in the case of redirect it's 307
if status_code is not None:
response_args["status_code"] = status_code
response = actual_response_class(response_data, **response_args)
response.headers.raw.extend(sub_response.headers.raw)
if sub_response.status_code:
response.status_code = sub_response.status_code
return response
return app
def get_websocket_app(
dependant: Dependant, dependency_overrides_provider: Optional[Any] = None
) -> Callable[[WebSocket], Coroutine[Any, Any, Any]]:
async def app(websocket: WebSocket) -> None:
solved_result = await solve_dependencies(
request=websocket,
dependant=dependant,
dependency_overrides_provider=dependency_overrides_provider,
)
values, errors, _, _2, _3 = solved_result
if errors:
await websocket.close(code=WS_1008_POLICY_VIOLATION)
raise WebSocketRequestValidationError(errors)
assert dependant.call is not None, "dependant.call must be a function"
await dependant.call(**values)
return app
class APIWebSocketRoute(routing.WebSocketRoute):
def __init__(
self,
path: str,
endpoint: Callable[..., Any],
*,
name: Optional[str] = None,
dependency_overrides_provider: Optional[Any] = None,
) -> None:
self.path = path
self.endpoint = endpoint
self.name = get_name(endpoint) if name is None else name
self.dependant = get_dependant(path=path, call=self.endpoint)
self.app = websocket_session(
get_websocket_app(
dependant=self.dependant,
dependency_overrides_provider=dependency_overrides_provider,
)
)
self.path_regex, self.path_format, self.param_convertors = compile_path(path)
class APIRoute(routing.Route):
def __init__(
self,
path: str,
endpoint: Callable[..., Any],
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[str]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
name: Optional[str] = None,
methods: Optional[Union[Set[str], List[str]]] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Union[Type[Response], DefaultPlaceholder] = Default(
JSONResponse
),
dependency_overrides_provider: Optional[Any] = None,
callbacks: Optional[List[BaseRoute]] = None,
) -> None:
# normalise enums e.g. http.HTTPStatus
if isinstance(status_code, enum.IntEnum):
status_code = int(status_code)
self.path = path
self.endpoint = endpoint
self.name = get_name(endpoint) if name is None else name
self.path_regex, self.path_format, self.param_convertors = compile_path(path)
if methods is None:
methods = ["GET"]
self.methods: Set[str] = {method.upper() for method in methods}
self.unique_id = generate_operation_id_for_path(
name=self.name, path=self.path_format, method=list(methods)[0]
)
self.response_model = response_model
if self.response_model:
assert (
status_code not in STATUS_CODES_WITH_NO_BODY
), f"Status code {status_code} must not have a response body"
response_name = "Response_" + self.unique_id
self.response_field = create_response_field(
name=response_name, type_=self.response_model
)
# Create a clone of the field, so that a Pydantic submodel is not returned
# as is just because it's an instance of a subclass of a more limited class
# e.g. UserInDB (containing hashed_password) could be a subclass of User
# that doesn't have the hashed_password. But because it's a subclass, it
# would pass the validation and be returned as is.
# By being a new field, no inheritance will be passed as is. A new model
# will be always created.
self.secure_cloned_response_field: Optional[
ModelField
] = create_cloned_field(self.response_field)
else:
self.response_field = None # type: ignore
self.secure_cloned_response_field = None
self.status_code = status_code
self.tags = tags or []
if dependencies:
self.dependencies = list(dependencies)
else:
self.dependencies = []
self.summary = summary
self.description = description or inspect.cleandoc(self.endpoint.__doc__ or "")
# if a "form feed" character (page break) is found in the description text,
# truncate description text to the content preceding the first "form feed"
self.description = self.description.split("\f")[0]
self.response_description = response_description
self.responses = responses or {}
response_fields = {}
for additional_status_code, response in self.responses.items():
assert isinstance(response, dict), "An additional response must be a dict"
model = response.get("model")
if model:
assert (
additional_status_code not in STATUS_CODES_WITH_NO_BODY
), f"Status code {additional_status_code} must not have a response body"
response_name = f"Response_{additional_status_code}_{self.unique_id}"
response_field = create_response_field(name=response_name, type_=model)
response_fields[additional_status_code] = response_field
if response_fields:
self.response_fields: Dict[Union[int, str], ModelField] = response_fields
else:
self.response_fields = {}
self.deprecated = deprecated
self.operation_id = operation_id
self.response_model_include = response_model_include
self.response_model_exclude = response_model_exclude
self.response_model_by_alias = response_model_by_alias
self.response_model_exclude_unset = response_model_exclude_unset
self.response_model_exclude_defaults = response_model_exclude_defaults
self.response_model_exclude_none = response_model_exclude_none
self.include_in_schema = include_in_schema
self.response_class = response_class
assert callable(endpoint), "An endpoint must be a callable"
self.dependant = get_dependant(path=self.path_format, call=self.endpoint)
for depends in self.dependencies[::-1]:
self.dependant.dependencies.insert(
0,
get_parameterless_sub_dependant(depends=depends, path=self.path_format),
)
self.body_field = get_body_field(dependant=self.dependant, name=self.unique_id)
self.dependency_overrides_provider = dependency_overrides_provider
self.callbacks = callbacks
self.app = request_response(self.get_route_handler())
def get_route_handler(self) -> Callable[[Request], Coroutine[Any, Any, Response]]:
return get_request_handler(
dependant=self.dependant,
body_field=self.body_field,
status_code=self.status_code,
response_class=self.response_class,
response_field=self.secure_cloned_response_field,
response_model_include=self.response_model_include,
response_model_exclude=self.response_model_exclude,
response_model_by_alias=self.response_model_by_alias,
response_model_exclude_unset=self.response_model_exclude_unset,
response_model_exclude_defaults=self.response_model_exclude_defaults,
response_model_exclude_none=self.response_model_exclude_none,
dependency_overrides_provider=self.dependency_overrides_provider,
)
class APIRouter(routing.Router):
def __init__(
self,
*,
prefix: str = "",
tags: Optional[List[str]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
default_response_class: Type[Response] = Default(JSONResponse),
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
callbacks: Optional[List[BaseRoute]] = None,
routes: Optional[List[routing.BaseRoute]] = None,
redirect_slashes: bool = True,
default: Optional[ASGIApp] = None,
dependency_overrides_provider: Optional[Any] = None,
route_class: Type[APIRoute] = APIRoute,
on_startup: Optional[Sequence[Callable[[], Any]]] = None,
on_shutdown: Optional[Sequence[Callable[[], Any]]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
) -> None:
super().__init__(
routes=routes, # type: ignore # in Starlette
redirect_slashes=redirect_slashes,
default=default, # type: ignore # in Starlette
on_startup=on_startup, # type: ignore # in Starlette
on_shutdown=on_shutdown, # type: ignore # in Starlette
)
if prefix:
assert prefix.startswith("/"), "A path prefix must start with '/'"
assert not prefix.endswith(
"/"
), "A path prefix must not end with '/', as the routes will start with '/'"
self.prefix = prefix
self.tags: List[str] = tags or []
self.dependencies = list(dependencies or []) or []
self.deprecated = deprecated
self.include_in_schema = include_in_schema
self.responses = responses or {}
self.callbacks = callbacks or []
self.dependency_overrides_provider = dependency_overrides_provider
self.route_class = route_class
self.default_response_class = default_response_class
def add_api_route(
self,
path: str,
endpoint: Callable[..., Any],
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[str]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
methods: Optional[Union[Set[str], List[str]]] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Union[Type[Response], DefaultPlaceholder] = Default(
JSONResponse
),
name: Optional[str] = None,
route_class_override: Optional[Type[APIRoute]] = None,
callbacks: Optional[List[BaseRoute]] = None,
) -> None:
route_class = route_class_override or self.route_class
responses = responses or {}
combined_responses = {**self.responses, **responses}
current_response_class = get_value_or_default(
response_class, self.default_response_class
)
current_tags = self.tags.copy()
if tags:
current_tags.extend(tags)
current_dependencies = self.dependencies.copy()
if dependencies:
current_dependencies.extend(dependencies)
current_callbacks = self.callbacks.copy()
if callbacks:
current_callbacks.extend(callbacks)
route = route_class(
self.prefix + path,
endpoint=endpoint,
response_model=response_model,
status_code=status_code,
tags=current_tags,
dependencies=current_dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=combined_responses,
deprecated=deprecated or self.deprecated,
methods=methods,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema and self.include_in_schema,
response_class=current_response_class,
name=name,
dependency_overrides_provider=self.dependency_overrides_provider,
callbacks=current_callbacks,
)
self.routes.append(route)
def api_route(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[str]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
methods: Optional[List[str]] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
) -> Callable[[DecoratedCallable], DecoratedCallable]:
def decorator(func: DecoratedCallable) -> DecoratedCallable:
self.add_api_route(
path,
func,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=methods,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
)
return func
return decorator
def add_api_websocket_route(
self, path: str, endpoint: Callable[..., Any], name: Optional[str] = None
) -> None:
route = APIWebSocketRoute(
path,
endpoint=endpoint,
name=name,
dependency_overrides_provider=self.dependency_overrides_provider,
)
self.routes.append(route)
def websocket(
self, path: str, name: Optional[str] = None
) -> Callable[[DecoratedCallable], DecoratedCallable]:
def decorator(func: DecoratedCallable) -> DecoratedCallable:
self.add_api_websocket_route(path, func, name=name)
return func
return decorator
def include_router(
self,
router: "APIRouter",
*,
prefix: str = "",
tags: Optional[List[str]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
default_response_class: Type[Response] = Default(JSONResponse),
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
callbacks: Optional[List[BaseRoute]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
) -> None:
if prefix:
assert prefix.startswith("/"), "A path prefix must start with '/'"
assert not prefix.endswith(
"/"
), "A path prefix must not end with '/', as the routes will start with '/'"
else:
for r in router.routes:
path = getattr(r, "path")
name = getattr(r, "name", "unknown")
if path is not None and not path:
raise Exception(
f"Prefix and path cannot be both empty (path operation: {name})"
)
if responses is None:
responses = {}
for route in router.routes:
if isinstance(route, APIRoute):
combined_responses = {**responses, **route.responses}
use_response_class = get_value_or_default(
route.response_class,
router.default_response_class,
default_response_class,
self.default_response_class,
)
current_tags = []
if tags:
current_tags.extend(tags)
if route.tags:
current_tags.extend(route.tags)
current_dependencies: List[params.Depends] = []
if dependencies:
current_dependencies.extend(dependencies)
if route.dependencies:
current_dependencies.extend(route.dependencies)
current_callbacks = []
if callbacks:
current_callbacks.extend(callbacks)
if route.callbacks:
current_callbacks.extend(route.callbacks)
self.add_api_route(
prefix + route.path,
route.endpoint,
response_model=route.response_model,
status_code=route.status_code,
tags=current_tags,
dependencies=current_dependencies,
summary=route.summary,
description=route.description,
response_description=route.response_description,
responses=combined_responses,
deprecated=route.deprecated or deprecated or self.deprecated,
methods=route.methods,
operation_id=route.operation_id,
response_model_include=route.response_model_include,
response_model_exclude=route.response_model_exclude,
response_model_by_alias=route.response_model_by_alias,
response_model_exclude_unset=route.response_model_exclude_unset,
response_model_exclude_defaults=route.response_model_exclude_defaults,
response_model_exclude_none=route.response_model_exclude_none,
include_in_schema=route.include_in_schema
and self.include_in_schema
and include_in_schema,
response_class=use_response_class,
name=route.name,
route_class_override=type(route),
callbacks=current_callbacks,
)
elif isinstance(route, routing.Route):
methods = list(route.methods or []) # type: ignore # in Starlette
self.add_route(
prefix + route.path,
route.endpoint,
methods=methods,
include_in_schema=route.include_in_schema,
name=route.name,
)
elif isinstance(route, APIWebSocketRoute):
self.add_api_websocket_route(
prefix + route.path, route.endpoint, name=route.name
)
elif isinstance(route, routing.WebSocketRoute):
self.add_websocket_route(
prefix + route.path, route.endpoint, name=route.name
)
for handler in router.on_startup:
self.add_event_handler("startup", handler)
for handler in router.on_shutdown:
self.add_event_handler("shutdown", handler)
def get(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[str]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["GET"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
)
def put(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[str]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["PUT"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
)
def post(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[str]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["POST"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
)
def delete(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[str]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["DELETE"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
)
def options(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[str]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["OPTIONS"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
)
def head(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[str]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["HEAD"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
)
def patch(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[str]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["PATCH"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
)
def trace(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[str]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["TRACE"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
)
|
py | 1a355aa1f41f65ebc5417a3140533e3dfb58df46 | import marshal;exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\t\x00\x00\x00@\x00\x00\x00s4\x00\x00\x00d\x00d\x01l\x00m\x01Z\x01\x01\x00e\x02d\x02\xa0\x03e\x04e\x05d\x03d\x04\x84\x00d\x05\xa0\x06d\x06\xa1\x01D\x00\x83\x01\x83\x02\xa1\x01\x83\x01\x01\x00d\x07S\x00)\x08\xe9\x00\x00\x00\x00)\x01\xda\x0bOrderedDict\xda\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x08\x00\x00\x00C\x00\x00\x00s(\x00\x00\x00g\x00|\x00] }\x01t\x00d\x00\xa0\x01d\x01d\x02\x84\x00|\x01\xa0\x02\xa1\x00D\x00\x83\x01\xa1\x01\x83\x01\x91\x02q\x04S\x00)\x03r\x03\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\r\x00\x00\x00s\x00\x00\x00s2\x00\x00\x00|\x00]*}\x01t\x00t\x01d\x00d\x01d\x02d\x03d\x04d\x05d\x06d\x07d\x08d\tg\n\x83\x01|\x01\x19\x00\x83\x01V\x00\x01\x00q\x02d\nS\x00)\x0b)\x02\xfa\x01|r\x01\x00\x00\x00)\x02u\x03\x00\x00\x00\xe2\x88\x9a\xe9\x01\x00\x00\x00)\x02u\x02\x00\x00\x00\xcf\x80\xe9\x02\x00\x00\x00)\x02\xf5\x02\x00\x00\x00\xc3\xb7\xe9\x03\x00\x00\x00)\x02\xf5\x02\x00\x00\x00\xc3\x97\xe9\x04\x00\x00\x00)\x02\xf5\x02\x00\x00\x00\xc2\xb6\xe9\x05\x00\x00\x00)\x02u\x03\x00\x00\x00\xe2\x88\x86\xe9\x06\x00\x00\x00)\x02\xf5\x02\x00\x00\x00\xc2\xa3\xe9\x07\x00\x00\x00)\x02\xf5\x02\x00\x00\x00\xc2\xa2\xe9\x08\x00\x00\x00)\x02\xf5\x02\x00\x00\x00\xc2\xa5\xe9\t\x00\x00\x00N)\x02\xda\x03strr\x02\x00\x00\x00)\x02\xda\x02.0\xda\x01i\xa9\x00r\x17\x00\x00\x00\xfa\x05<zbl>\xfa\t<genexpr>\x02\x00\x00\x00s\x02\x00\x00\x00\x04\tz\x14<listcomp>.<genexpr>)\x03\xda\x03int\xda\x04join\xda\x05split)\x02r\x15\x00\x00\x00\xda\x01xr\x17\x00\x00\x00r\x17\x00\x00\x00r\x18\x00\x00\x00\xfa\n<listcomp>\x02\x00\x00\x00s\x02\x00\x00\x00\x06\tr\x1e\x00\x00\x00uhJ\x00\x00\xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\x97 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\x97 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc2\xa3 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x86 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa5 \xc3\x97 | \xc3\xb7 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a | \xc2\xa2 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\xb7 \xc2\xa5 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a | | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\x97 \xe2\x88\x9a \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa5 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa5 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a | \xc2\xa3 \xc2\xa5 \xc2\xa2 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xc2\xa5 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xcf\x80 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa5 \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a | \xc2\xa2 \xc2\xa5 \xc2\xa3 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xc3\x97 | \xc3\xb7 \xc2\xa5 \xe2\x88\x86 \xc3\xb7 \xc3\xb7 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\xb7 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa3 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xcf\x80 \xcf\x80 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xcf\x80 \xe2\x88\x9a \xcf\x80 | \xc3\x97 \xc2\xa5 \xc2\xa5 \xc2\xa2 \xc2\xa5 \xe2\x88\x9a \xc3\xb7 \xc2\xa3 \xe2\x88\x9a | | \xc2\xb6 \xc2\xa5 \xc3\x97 \xc2\xa5 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa3 \xc3\xb7 \xcf\x80 \xc3\x97 | \xc2\xb6 \xc2\xa3 \xc3\x97 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\x97 \xc3\xb7 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa5 \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xc3\x97 | \xe2\x88\x9a \xcf\x80 \xcf\x80 \xc3\x97 \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\x97 \xc3\xb7 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xcf\x80 \xe2\x88\x9a \xcf\x80 | \xc3\x97 \xc2\xa5 \xc2\xa5 \xc2\xa2 \xc2\xa5 \xe2\x88\x9a \xc3\x97 \xc2\xa2 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xc2\xa5 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a | \xc2\xa2 \xc2\xa5 \xc2\xa3 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xc3\x97 | \xc3\xb7 \xc2\xa5 \xe2\x88\x86 \xc3\xb7 \xc3\x97 \xc2\xa2 \xc3\xb7 \xc2\xa5 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xcf\x80 \xe2\x88\x9a \xcf\x80 | \xc3\x97 \xc2\xa5 \xc2\xa5 \xc2\xa2 \xc2\xa5 \xe2\x88\x9a \xc3\x97 \xc2\xa2 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xc2\xa5 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\x97 | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\xb7 \xc3\x97 \xc3\xb7 \xc3\x97 \xc3\xb7 \xc3\x97 \xe2\x88\x9a | \xe2\x88\x86 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc2\xa5 \xc2\xb6 \xc2\xa5 \xc2\xb6 \xc2\xa5 \xc2\xb6 \xc2\xa5 \xc2\xb6 \xc2\xa5 \xc2\xb6 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x86 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\x97 \xc2\xa3 \xc3\x97 | \xc3\x97 \xe2\x88\x9a \xc3\x97 \xc3\x97 \xc3\x97 | \xc3\x97 \xe2\x88\x9a \xc2\xa5 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x86 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc2\xa5 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\x97 \xc2\xa3 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x86 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\x97 \xc2\xa3 \xc3\x97 \xc2\xa3 \xe2\x88\x9a \xcf\x80 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xcf\x80 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xcf\x80 \xc3\x97 \xc2\xa5 \xcf\x80 \xc2\xa5 \xcf\x80 \xc2\xa5 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x86 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc2\xa5 \xcf\x80 \xc2\xa5 \xcf\x80 \xc2\xa5 \xcf\x80 \xe2\x88\x9a \xcf\x80 \xc3\x97 \xc2\xa5 \xc2\xb6 \xe2\x88\x9a \xcf\x80 \xc3\x97 \xc2\xa5 \xc2\xb6 \xe2\x88\x9a \xcf\x80 \xc3\x97 \xc3\x97 \xc2\xa3 \xc3\x97 \xc2\xa3 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x86 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xcf\x80 \xc3\x97 \xe2\x88\x9a \xcf\x80 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xcf\x80 \xc3\x97 \xe2\x88\x9a \xcf\x80 \xc3\x97 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x86 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\x97 | \xc2\xa5 \xc2\xb6 \xe2\x88\x9a \xcf\x80 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xcf\x80 \xc3\x97 \xc2\xa5 \xc2\xb6 \xc3\x97 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x86 \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 | \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x86 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xb6 \xc3\xb7 \xcf\x80 \xc2\xa3 \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xc2\xa2 \xc3\x97 \xe2\x88\x9a | \xe2\x88\x9a \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x86 \xe2\x88\x86 \xe2\x88\x9a | \xc2\xa2 \xc2\xa5 \xc2\xa3 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xc2\xa3 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a | \xc3\x97 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xc3\xb7 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xc2\xa5 \xc2\xa2 \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc2\xb6 \xc2\xa2 \xc3\x97 \xc2\xa3 \xc3\x97 \xc2\xa3 \xe2\x88\x9a | \xc3\xb7 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xc2\xa5 \xc2\xa2 \xc3\x97 \xe2\x88\x86 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa5 \xc3\x97 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xcf\x80 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xc2\xa5 \xc2\xa2 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a | \xc2\xb6 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc3\xb7 \xc3\x97 \xc3\xb7 \xc3\x97 \xc3\xb7 \xc3\x97 \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa5 \xc3\x97 | \xc3\xb7 \xc2\xa5 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a | \xe2\x88\x9a \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\xb7 \xc2\xa5 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 | \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\x97 | \xc3\xb7 \xc3\x97 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc3\x97 \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xc2\xb6 \xc3\x97 \xc2\xb6 | \xc2\xb6 \xe2\x88\x86 \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a \xcf\x80 | \xc2\xa5 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa5 \xcf\x80 \xc3\x97 \xc2\xa2 \xc2\xb6 \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xc2\xa5 \xe2\x88\x9a \xc3\x97 \xc2\xa5 \xc2\xb6 \xc2\xa5 \xc2\xb6 \xc2\xa3 \xc2\xb6 | \xe2\x88\x9a | \xc2\xa5 \xc2\xa3 \xc2\xa2 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xc2\xa2 \xc3\x97 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xc3\xb7 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xc2\xa5 \xcf\x80 \xc3\x97 \xc2\xa2 \xc2\xb6 \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xc2\xa5 \xe2\x88\x9a \xc3\x97 \xc2\xa2 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xc3\x97 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a | \xc2\xa2 \xc2\xa5 \xc2\xa3 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xc3\x97 | \xc3\xb7 \xc3\x97 \xc3\x97 \xc2\xa2 \xc2\xb6 \xe2\x88\x86 \xc3\xb7 \xc3\x97 \xc3\x97 \xc3\x97 \xc3\xb7 \xc3\x97 \xc2\xb6 \xc3\x97 \xc2\xb6 | \xc3\xb7 \xc3\x97 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc3\x97 | \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\x97 \xc2\xa2 \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\x97 | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\x97 | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a | | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xcf\x80 \xc3\xb7 \xcf\x80 \xc2\xa5 \xc2\xa2 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa5 \xc3\x97 | \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xc2\xa2 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x86 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xc2\xa2 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\x97 \xe2\x88\x9a \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc2\xa5 \xc2\xa5 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\x97 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\x97 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa2 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\x97 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa5 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a | \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc2\xa5 \xc2\xa5 \xc3\xb7 \xcf\x80 \xe2\x88\x86 | \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\x97 | \xe2\x88\x9a | \xe2\x88\x86 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xc2\xa2 \xc3\x97 \xe2\x88\x9a \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\x97 | \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xc2\xa2 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | | \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc2\xa5 \xc2\xa3 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\xb7 \xcf\x80 \xc3\x97 \xc3\xb7 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\x97 \xc2\xa5 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 | \xc3\xb7 \xc3\x97 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc3\x97 \xc2\xa5 \xc2\xa2 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xc3\x97 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa2 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xc3\x97 \xc3\xb7 \xc2\xa3 \xc3\x97 | \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\x97 | \xc3\x97 \xc2\xa5 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a | \xe2\x88\x9a \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa2 \xc3\xb7 \xcf\x80 \xc3\x97 \xc3\xb7 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\x97 \xc2\xa5 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 | \xc3\xb7 \xc3\x97 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xc3\xb7 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc3\xb7 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa2 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xc3\x97 \xc3\xb7 \xc2\xa3 \xc3\x97 | \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc2\xa5 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\x97 \xc3\xb7 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\x97 \xc2\xa5 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xc3\xb7 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xc2\xa5 \xc2\xa5 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\xb7 \xc2\xa5 \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa5 \xe2\x88\x9a | \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | | \xc3\xb7 \xc2\xa5 \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a \xcf\x80 \xc2\xb6 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\x97 \xe2\x88\x86 \xe2\x88\x86 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a \xc2\xa5 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x86 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 | \xc3\xb7 \xc3\x97 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | | \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa3 \xc3\xb7 \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | | \xc2\xa5 \xc2\xa3 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xc3\x97 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\x97 | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a | | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xc3\x97 | \xc3\x97 \xe2\x88\x9a \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x86 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xc2\xa2 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc3\x97 \xc2\xa5 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | | \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc2\xa5 \xc2\xa3 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xc3\xb7 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | | \xc3\xb7 \xc2\xa5 \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xc2\xa5 \xe2\x88\x9a \xc3\xb7 \xc2\xa5 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc2\xb6 \xc2\xa2 \xc3\x97 \xc2\xa3 \xc3\x97 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa5 \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xa5 \xc2\xa5 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a | \xc3\x97 \xc3\x97 \xe2\x88\x86 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa5 \xc3\x97 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xc3\x97 \xc3\x97 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xc2\xa5 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xc3\xb7 \xc2\xa5 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a | | \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xc2\xa5 \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 \xc3\x97 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xc2\xa3 \xc2\xa5 \xc2\xa5 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\xb7 \xc2\xa5 \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xc2\xa5 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a | \xc2\xa2 \xc3\xb7 \xc2\xa5 \xe2\x88\x9a \xcf\x80 \xc2\xb6 \xc3\x97 \xc3\x97 \xc3\xb7 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 \xc2\xb6 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc2\xa5 \xc2\xb6 \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xc3\xb7 \xe2\x88\x9a \xcf\x80 \xc2\xb6 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a | | \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a | \xc2\xa2 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xc3\x97 | \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xc2\xa2 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\x97 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xe2\x88\x9a | | \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc2\xa5 \xc2\xa3 \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\x97 | \xc3\x97 \xe2\x88\x9a \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc2\xa5 \xc2\xa2 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa5 \xc3\x97 | \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a | \xc2\xa2 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x86 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xc2\xa2 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xc2\xa5 \xe2\x88\x9a \xc3\xb7 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xc2\xa5 \xc2\xa5 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\xb7 \xc2\xa5 \xc2\xa5 \xc3\xb7 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xc2\xa5 \xe2\x88\x9a \xc3\xb7 \xc2\xa5 \xe2\x88\x9a | \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | | \xc3\xb7 \xc2\xa5 \xc2\xa5 \xc3\xb7 \xc2\xa5 \xc3\xb7 \xe2\x88\x9a \xcf\x80 \xc2\xb6 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xa5 \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a | | \xc2\xa5 \xc2\xa3 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xc3\x97 | \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 | \xc3\xb7 \xc3\x97 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a | | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc3\xb7 \xc3\x97 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xc3\x97 | \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\x97 \xc3\x97 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa2 \xc3\x97 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xa5 \xc3\x97 \xe2\x88\x86 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\x97 | \xc3\x97 \xe2\x88\x9a \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 | \xc3\xb7 \xc2\xa5 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc3\x97 \xc2\xa5 \xe2\x88\x9a \xc3\x97 \xc3\xb7 \xc2\xa5 \xc3\xb7 \xc3\xb7 \xcf\x80 \xc2\xa2 \xc3\xb7 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | | \xc3\xb7 \xcf\x80 \xc2\xa2 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xc3\xb7 \xcf\x80 \xc2\xa2 \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc3\xb7 \xcf\x80 \xc2\xa3 \xc2\xa2 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa5 \xc3\x97 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 | \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 \xe2\x88\x9a \xc3\x97 \xc3\xb7 \xc3\xb7 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa5 \xc3\x97 \xc3\xb7 \xc3\xb7 \xc2\xa5 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc3\x97 \xc2\xa2 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xc2\xa5 \xc2\xa5 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\x97 \xc3\xb7 \xc3\xb7 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa5 \xc3\x97 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 | \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xc2\xa3 \xc2\xa5 \xc2\xa5 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xc3\x97 \xe2\x88\x9a \xc3\x97 \xc3\xb7 \xc3\xb7 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa5 \xc3\x97 \xc3\xb7 \xc3\xb7 \xc2\xa5 \xe2\x88\x86 \xc3\xb7 \xe2\x88\x9a | \xc2\xa5 \xc2\xa3 | \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa2 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a | | \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa5 \xc3\x97 \xc3\xb7 \xc3\xb7 \xc2\xa5 \xc3\xb7 \xcf\x80 \xc2\xb6 \xc2\xa2 \xc3\xb7 \xcf\x80 \xc3\xb7 \xc2\xa5 \xc3\x97 \xc3\xb7 \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xe2\x88\x9a \xe2\x88\x9a \xc3\x97 \xc3\x97 | \xe2\x88\x9a | \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a | \xc2\xa2 \xc3\x97 \xe2\x88\x9a \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xc2\xa5 \xc2\xa5 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc2\xb6 \xc2\xa2 \xe2\x88\x9a | \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xc3\xb7 \xcf\x80 \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 | \xe2\x88\x9a | \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xe2\x88\x86 \xc3\x97 | \xc3\x97 \xe2\x88\x9a \xe2\x88\x9a | \xe2\x88\x9a | \xe2\x88\x9a \xe2\x88\x9a \xc2\xb6 \xe2\x88\x9a \xe2\x88\x9a \xcf\x80 \xc2\xa5 \xc2\xa3 \xe2\x88\x9a | \xc2\xa5 \xc3\x97 | \xc3\x97 \xe2\x88\x9az\x02 N)\x07Z\x0bcollectionsr\x02\x00\x00\x00\xda\x04execr\x1b\x00\x00\x00\xda\x03map\xda\x03chrr\x1c\x00\x00\x00r\x17\x00\x00\x00r\x17\x00\x00\x00r\x17\x00\x00\x00r\x18\x00\x00\x00\xda\x08<module>\x01\x00\x00\x00s\x08\x00\x00\x00\x0c\x01\x10\x7f\x00?\x04\x01')) |
py | 1a355b6d421f85b5a8202a69cece35cd1fec67cc | from ._anchor import _Anchor
from ._shape import _Shape
from nanome.util.enums import ShapeType
class _Sphere(_Shape):
def __init__(self):
_Shape.__init__(self, ShapeType.Sphere)
self._anchors = [_Anchor._create()]
self._radius = 1.0
@classmethod
def _create(cls):
return cls()
|
py | 1a355d5c3ddf2ecb4bceb6061d6808c02f9ae732 | __author__ = 'ali-pc'
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.