content
stringlengths 5
1.05M
|
---|
from export import ShpResponder
from upload import upload |
"""
Get/Watch all information of a single key in the Configuration Database.
Usage:
ska-sdp (get|watch) [options] <key>
ska-sdp (get|watch) [options] pb <pb_id>
ska-sdp (get|watch) (-h|--help)
Arguments:
<key> Key within the Config DB.
To get the list of all keys:
ska-sdp list -a
<pb_id> Processing block id to list all entries and their values for.
Else, use key to get the value of a specific pb.
Options:
-h, --help Show this screen
-q, --quiet Cut back on unnecessary output
"""
import logging
from docopt import docopt
LOG = logging.getLogger("ska-sdp")
def cmd_get(txn, key, quiet=False):
"""
Get raw value from database.
:param txn: Config object transaction
:param key: Key within the Config DB to get the values of
:param quiet: quiet logging
"""
val = txn.raw.get(key)
if quiet:
LOG.info(val)
else:
LOG.info("%s = %s", key, val)
def main(argv, config):
"""Run ska-sdp get."""
args = docopt(__doc__, argv=argv)
try:
if args["<key>"]:
key = args["<key>"]
if key == "pb":
LOG.error(
"Cannot 'get' processing block without its ID. Run 'ska-sdp get pb <pb_id>'"
)
return
for txn in config.txn():
try:
cmd_get(txn, key, args["--quiet"])
except ValueError:
# when not the full key/path is given, Config returns a ValueError
LOG.error(
"'%s' is not a valid key in the Config DB. "
"Run 'ska-sdp list -a' to list all valid keys.",
key,
)
return
if args["watch"]:
txn.loop(wait=True)
elif args["pb"]:
for txn in config.txn():
keys = txn.raw.list_keys("/pb", recurse=8)
for k in keys:
if args["<pb_id>"] in k:
cmd_get(txn, k, args["--quiet"])
if args["watch"]:
txn.loop(wait=True)
except KeyboardInterrupt:
if not args["watch"]:
raise
|
# -*- coding: utf-8 -*-
import sys
from cx_Freeze import setup, Executable
# Dependencies are automatically detected, but it might need fine tuning.
build_exe_options = {"packages": ["os"], "excludes": ["tkinter"]}#, include_files": ['player.png']
setup( name = "CSCI-413-Project-Version-1.0",
version = "1.0",
description = "A maze-based game written in Python using Pygame.",
options = {"build_exe": build_exe_options},
executables = [Executable("Main.py", base=None)]) |
from radical.entk.utils.init_transition import transition
import pika
from radical.entk import Task, Stage, Pipeline
import radical.utils as ru
import os
from threading import Thread
import json
from radical.entk import states
MLAB = 'mongodb://entk:[email protected]:43511/entk_0_7_4_release'
def func(obj, obj_type, new_state, queue1, logger, profiler):
hostname = os.environ.get('RMQ_HOSTNAME', 'localhost')
port = int(os.environ.get('RMQ_PORT', 5672))
mq_connection = pika.BlockingConnection(pika.ConnectionParameters(host=hostname, port=port))
mq_channel = mq_connection.channel()
transition( obj,
obj_type,
new_state,
mq_channel,
queue1,
profiler,
logger)
mq_connection.close()
def master(obj, obj_type, new_state):
hostname = os.environ.get('RMQ_HOSTNAME', 'localhost')
port = int(os.environ.get('RMQ_PORT', 5672))
mq_connection = pika.BlockingConnection(pika.ConnectionParameters(host=hostname, port=port))
mq_channel = mq_connection.channel()
queue1 = 'test-1-2-3' # Expected queue name structure 'X-A-B-C'
queue2 = 'test-3-2-1' # Expected queue name structure 'X-C-B-A'
mq_channel.queue_declare(queue=queue1)
mq_channel.queue_declare(queue=queue2)
logger = ru.Logger('radical.entk.test')
profiler = ru.Profiler('radical.entk.test')
thread1 = Thread(target=func, args=(obj, obj_type, new_state, queue1, logger, profiler))
thread1.start()
while True:
method_frame, props, body = mq_channel.basic_get(queue=queue1)
if body:
msg = json.loads(body)
assert msg['object']['state'] == new_state
mq_channel.basic_publish(exchange='',
routing_key=queue2,
properties=pika.BasicProperties(correlation_id=props.correlation_id),
body='ack')
mq_channel.basic_ack(delivery_tag=method_frame.delivery_tag)
break
mq_channel.queue_delete(queue=queue1)
mq_channel.queue_delete(queue=queue2)
mq_connection.close()
thread1.join()
def test_utils_sync_with_master():
obj = Task()
obj_type = 'Task'
master(obj, obj_type, states.DONE)
obj = Stage()
obj_type = 'Stage'
master(obj, obj_type, states.DONE)
obj = Pipeline()
obj_type = 'Pipeline'
master(obj, obj_type, states.DONE)
|
import uvicorn
from src.web.app.factory import create_app
main_app = create_app()
if __name__ in "__main__":
uvicorn.run(main_app, host="0.0.0.0", port=8080)
|
import argparse
import sys
import pycopier
def coerceArgsToArgparseCompatible(args):
'''
Turns /MT:<num> into /MT <num> ... without the user knowing.
This is to keep compatibility with robocopy
'''
args = list(args)
for idx, arg in enumerate(args):
if arg.startswith('/MT:') and arg.count(':') == 1:
args[idx] = '/MT'
args.insert(idx + 1, arg.split(':')[-1])
elif arg.startswith('--'):
# coerce to // prefix
args[idx]= arg.replace('-', '/', 2)
elif arg.startswith('-'):
# coerce to / prefix
args[idx]= arg.replace('-', '/', 1)
return args
def main():
parser = argparse.ArgumentParser(prefix_chars='/', usage="\n" + pycopier.ASCII_ART + "\n ... a Python 3 replacement for Robocopy, including multithreaded copy.")
arg_group_robocopy = parser.add_argument_group("Robocopy Arguments", "Arguments that more/less match Robocopy")
arg_group_robocopy.add_argument('Source', type=str, nargs=1, help='Specifies the path to the source directory.')
arg_group_robocopy.add_argument('Destination', type=str, nargs=1, help='Specifies the path to the destination directory.')
arg_group_robocopy.add_argument('/MT', type=int, help='Creates multi-threaded copies with N threads. The default value for N is 8', default=8)
arg_group_robocopy.add_argument('/create', action='store_true', help='Creates a directory tree and zero-length files only.')
arg_group_robocopy.add_argument('/quit', action='store_true', help='Quits after processing command line (to view parameters).')
arg_group_robocopy.add_argument('/purge', action='store_true', help='Deletes destination files and directories that no longer exist in the source.')
arg_group_robocopy.add_argument('/move', action='store_true', help='Moves files and directories, and deletes them from the source after they are copied.')
arg_group_robocopy.add_argument('/copyall', action='store_true', help='Copies all file information.')
arg_group_robocopy.add_argument('/s', action='store_true', help='Copies subdirectories. Note that this option excludes empty directories. (robocopy\'s /e option for subdirectories including empties is default for pycopier)')
# options specific to pycopier (and not in robocopy)
arg_group_robocopy = parser.add_argument_group("PyCopier Arguments", "Arguments that are specific to PyCopier")
arg_group_robocopy.add_argument('/quiet', action='store_true', help='If set, be completely quiet during execution.')
argv = coerceArgsToArgparseCompatible(sys.argv)
args = parser.parse_args(argv[1:])
p = pycopier.PyCopier(source=args.Source[0],
destination=args.Destination[0],
numWorkers=args.MT,
zeroLengthFiles=args.create,
purgeDestination=args.purge,
move=args.move,
copyPermissions=args.copyall,
ignoreEmptyDirectories=args.s,
quiet=args.quiet,
skipSameLookingFiles=True, # not sure if this matches robocopy or not
)
if args.quit:
print(p)
sys.exit(0)
p.execute()
# assume success at this point.
# todo: need to check for errors and keep track of them in PyCopier object
# based off that change the error code to 8
sys.exit(1)
if __name__ == '__main__':
main() |
import psutil
import re
#User inputs name of process they want to try and use.
#checks that string against a list of processes running.
#If theres a match return it.
PROCESS_LIST = []
PPID_LIST = []
PROCESSES_REMAINING_STR = input("Enter how many processes you want to monitor? (1-4): ")
while int(PROCESSES_REMAINING_STR) >= 4:
print("Pick a number from 1 to 4.")
PROCESSES_REMAINING_STR = input("Enter how many processes you want to monitor? (1-4): ")
PROCESSES_REMAINING = int(PROCESSES_REMAINING_STR)
while PROCESSES_REMAINING != 0:
PROCESS_NAME = input("Enter a program name: ")
for ACTIVE_PROCESS in psutil.process_iter():
try:
SET_NAME = ACTIVE_PROCESS.name()
SET_PPID = ACTIVE_PROCESS.pid
except psutil.NoSuchProcess:
print("PROCESS DOES NOT EXIST")
continue
if re.search(PROCESS_NAME, SET_NAME, re.IGNORECASE):
if SET_NAME not in PROCESS_LIST:
PROCESS_LIST.append(SET_NAME)
PPID_LIST.append(SET_PPID)
if PROCESS_NAME.lower() in str(PROCESS_LIST).lower():
(PROCESSES_REMAINING) = (PROCESSES_REMAINING) - 1
print("ADDED TO THE LIST! REMAINING: ", PROCESSES_REMAINING, "\n")
else:
print("COULD NOT ADD. TRY AGAIN. ", PROCESSES_REMAINING, " PROCESSES LEFT. \n")
for PROCESS_PPID_PAIRS in range(len(PROCESS_LIST)):
print("Process: ", PROCESS_LIST[PROCESS_PPID_PAIRS], " | PID: ", PPID_LIST[PROCESS_PPID_PAIRS])
|
# Copyright 2019 the ProGraML authors.
#
# Contact Chris Cummins <[email protected]>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Run the device mapping models.
Usage:
$ bazel run //deeplearning/ml4pl/experiments/devmap:run_models -- \
--db_stem='sqlite:////tmp/programl/db/'
--dataset=amd,nvidia \
--model=zero_r,lstm_opencl,lstm_ir,lstm_inst2vec,ggnn \
--tag_suffix=v1
"""
import time
from deeplearning.ml4pl.graphs.labelled import graph_tuple_database
from deeplearning.ml4pl.ir import ir_database
from deeplearning.ml4pl.models import log_database
from deeplearning.ml4pl.models import run
from deeplearning.ml4pl.models.ggnn import ggnn
from deeplearning.ml4pl.models.lstm import lstm
from deeplearning.ml4pl.models.zero_r import zero_r
from labm8.py import app
from labm8.py.internal import flags_parsers
FLAGS = app.FLAGS
app.DEFINE_list(
"dataset",
["amd", "nvidia"],
"The name of the dataset to evaluate. One of {amd,nvidia}.",
)
app.DEFINE_list(
"model",
["zero_r", "lstm_opencl", "lstm_ir", "lstm_inst2vec", "ggnn"],
"The names of the models to evaluate.",
)
app.DEFINE_string(
"tag_suffix",
f"v{time.strftime('%y-%m-%dT%H:%M:%S')}",
"The tag suffix to use for runs.",
)
app.DEFINE_string(
"db_stem",
"file:///var/phd/db/cc1.mysql?programl",
"The stem for database names.",
)
def Main():
"""Main entry point."""
db_stem = FLAGS.db_stem
models = FLAGS.model
tag_suffix = FLAGS.tag_suffix
datasets = FLAGS.dataset
# Set model and dataset-invariant flags.
FLAGS.log_db = flags_parsers.DatabaseFlag(
log_database.Database, f"{db_stem}_devmap_logs", must_exist=True
)
FLAGS.ir_db = flags_parsers.DatabaseFlag(
ir_database.Database, f"{db_stem}_ir", must_exist=True
)
FLAGS.k_fold = True
FLAGS.test_on = "improvement_and_last"
for dataset in datasets:
# Set model-invariant flags.
FLAGS.graph_db = flags_parsers.DatabaseFlag(
graph_tuple_database.Database,
f"{db_stem}_devmap_{dataset}",
must_exist=True,
)
for model in models:
FLAGS.tag = f"devmap_{dataset}_{model}_{tag_suffix}"
if model == "zero_r":
FLAGS.epoch_count = 1
run.Run(zero_r.ZeroR)
elif model == "lstm_opencl":
FLAGS.epoch_count = 50
FLAGS.ir2seq = flags_parsers.EnumFlag(
lstm.Ir2SeqType, lstm.Ir2SeqType.OPENCL
)
FLAGS.padded_sequence_length = 1024
FLAGS.batch_size = 64
run.Run(lstm.GraphLstm)
elif model == "lstm_ir":
FLAGS.epoch_count = 50
FLAGS.ir2seq = flags_parsers.EnumFlag(
lstm.Ir2SeqType, lstm.Ir2SeqType.LLVM
)
FLAGS.padded_sequence_length = 15000
FLAGS.batch_size = 64
run.Run(lstm.GraphLstm)
elif model == "lstm_inst2vec":
FLAGS.epoch_count = 50
FLAGS.ir2seq = flags_parsers.EnumFlag(
lstm.Ir2SeqType, lstm.Ir2SeqType.INST2VEC
)
FLAGS.padded_sequence_length = 15000
FLAGS.batch_size = 64
run.Run(lstm.GraphLstm)
elif model == "ggnn":
# Reduced batch size because OOM errors with larger batches on my
# NVIDIA GTX 1080 GPU.
FLAGS.graph_batch_size = 32
FLAGS.epoch_count = 100
run.Run(ggnn.Ggnn)
else:
raise app.UsageError(f"Unknown model: {model}")
if __name__ == "__main__":
app.Run(Main)
|
from pathlib import Path
import pytest
from pipeline.recon import tool_paths, defaults, web_ports, top_tcp_ports, top_udp_ports
def test_tool_paths_absolute():
for path in tool_paths.values():
assert Path(path).is_absolute()
@pytest.mark.parametrize("test_input", ["database-dir", "tools-dir", "gobuster-wordlist"])
def test_defaults_dirs_absolute(test_input):
assert Path(defaults.get(test_input)).is_absolute()
@pytest.mark.parametrize("test_input", ["threads", "masscan-rate", "aquatone-scan-timeout"])
def test_defaults_are_numeric(test_input):
assert defaults.get(test_input).isnumeric()
def test_webports_exist():
assert web_ports is not None
def test_webports_numeric():
for port in web_ports:
assert port.isnumeric()
def test_top_tcp_ports_exist():
assert top_tcp_ports is not None
assert len(top_tcp_ports) >= 1
def test_top_udp_ports_exist():
assert top_udp_ports is not None
assert len(top_udp_ports) >= 1
|
"""Throttling serializers
Serializers convert data models to Python datatypes. However, Django REST Framework cannot handle relations between fields automatically.
'SlugRelatedField' objects are needed to serialize the relations.
The UWKGM project
:copyright: (c) 2020 Ichise Laboratory at NII & AIST
:author: Rungsiman Nararatwong
"""
from rest_framework import serializers
from accounts.models import CustomUser, ThrottleBurstPermit, ThrottleBurstRequest
class BurstRequestSerializer(serializers.ModelSerializer):
user = serializers.SlugRelatedField(queryset=CustomUser.objects.all(), slug_field='username')
class Meta:
model = ThrottleBurstRequest
fields = '__all__'
class BurstPermitSerializer(serializers.ModelSerializer):
granter = serializers.SlugRelatedField(queryset=CustomUser.objects.all(), slug_field='username')
request = serializers.SlugRelatedField(queryset=ThrottleBurstRequest.objects.all(), slug_field='id', required=False)
user = serializers.SlugRelatedField(queryset=CustomUser.objects.all(), slug_field='username')
class Meta:
model = ThrottleBurstPermit
fields = '__all__'
class BurstPermitLimitedSerializer(BurstPermitSerializer):
"""An additional burst permit serializer that removes confidential data when users with no administrative or burst-granting privilege
request the detail of any permits they were granted
"""
granter = None
user = None
class Meta(BurstPermitSerializer.Meta):
fields = None
exclude = ['granter', 'user']
|
# price-saving-block
# Out of stock
# class="product-sub-title-block product-out-of-stock"
# href="/ip/LEGO-Star-Wars-Imperial-Trooper-Battle-Pack-75165/55126217"
import re
from urllib.request import urlopen
from bs4 import BeautifulSoup
file_name = "scrapedata\legofromwalmart.csv"
f = open(file_name, "w")
headers = "Product_name,Link,Sale price"
f.write(headers + '\n')
#get pages from page1 to page i
for i in range(1, 5):
print("*************************************************************")
print("This is page {}.".format(i))
my_url = "https://www.walmart.com/search/?cat_id=0&facet=retailer%3AWalmart.com&grid=true&page="+str(i)+"&query=star+wars+lego+sets+clearance&typeahead=star+wars+lego+sets&vertical_whitelist=home%2C#searchProductResult"
u_client = urlopen(my_url)
page_html = u_client.read()
u_client.close()
page_soup = BeautifulSoup(page_html, "html.parser")
match = page_soup.find_all("ul", {"class": "search-result-gridview-items"})
for i in match:
for l in i.find_all("li"):
save_price = l.find("span",
{"class": "display-inline-block arrange-fit Price u-textColor price-saving"})
final_price = l.find("div", {"class": "price-main-block"})
# print("final price ",final_price)
get_link = l.find("a")
print("*************************************************************")
#only get data if price is onsale
if save_price is not None:
#get name
name = l.div.div.span.text.strip()
print("Name :", name)
#get price
price_list = str(final_price)
get_price = re.findall(r"(?s)title=\"(\$\d+.\d+)\s?", price_list)
price = get_price[0].strip()
print("get price:", price)
#get link
link_list = str(get_link)
match_link = re.findall(r"(?s)href=\"(.*?)\"", link_list)
final_link = "https://www.walmart.com" + match_link[0]
link=final_link.replace("amp;", "")
print("link: ", link)
f.write(name + ',' + link + ',' + price + '\n')
f.close()
|
"""
Unrolled Compressed Sensing (3D)
by Christopher M. Sandino ([email protected]), 2020.
"""
import os, sys
import torch
from torch import nn
import sigpy.plot as pl
import utils.complex_utils as cplx
from utils.transforms import SenseModel
from utils.layers3D import ResNet
from unet.unet_model import UNet
from utils.flare_utils import ConjGrad
import matplotlib
# matplotlib.use('TkAgg')
class Operator(torch.nn.Module):
def __init__(self, A):
super(Operator, self).__init__()
self.operator = A
def forward(self, x):
return self.operator(x)
def adjoint(self, x):
return self.operator(x, adjoint=True)
def normal(self, x):
out = self.adjoint(self.forward(x))
return out
class CG_module(nn.Module):
def __init__(self, A=None, adjoint=None, verbose=False, lam_l2=0, cg_max=10):
super(CG_module, self).__init__()
self.A = None
self.adj = None
self.lam = lam_l2
self.cg = cg_max
self.verbose = verbose
def initiate(self, A, adjoint):
self.A = A
self.adj = adjoint
def forward(self, x):
rhs = self.adj + self.lam * x
out = ConjGrad(
Aop_fun=self.A.normal,
b=rhs,
verbose=self.verbose,
l2lam=self.lam,
max_iter=self.cg,
).forward(rhs)
return out
def reverse(self, x):
out = (1 / self.lam) * ((self.A.normal(x) + self.lam * x) - self.adj)
return out
class UnrolledModel(nn.Module):
"""
PyTorch implementation of Unrolled Compressed Sensing.
Implementation is based on:
CM Sandino, et al. "DL-ESPIRiT: Accelerating 2D cardiac cine
beyond compressed sensing" arXiv:1911.05845 [eess.SP]
"""
def __init__(self, params):
"""
Args:
params (dict): Dictionary containing network parameters
"""
super().__init__()
# Extract network parameters
self.num_grad_steps = params.num_grad_steps
# num_resblocks = params.num_resblocks
# num_features = params.num_features
# kernel_size = params.kernel_size
# drop_prob = params.drop_prob
# circular_pad = params.circular_pad
# fix_step_size = params.fix_step_size
share_weights = params.share_weights
self.num_cg_steps = params.num_cg_steps
self.modl_lamda = params.modl_lamda
# print(self.modl_lamda)
# sys.exit()
self.cp = params.meld_cp
self.device = params.device
# Data dimensions
# self.num_emaps = params.num_emaps
# convtype = params.conv_type
# # ResNet parameters
# resnet_params = dict(num_resblocks=num_resblocks,
# in_chans=2 * self.num_emaps,
# chans=num_features,
# kernel_size=kernel_size,
# drop_prob=drop_prob,
# circular_pad=circular_pad,
# conv_type=convtype
# )
self.CGM = CG_module(False, False, False, self.modl_lamda, self.num_cg_steps)
# Declare ResNets and RNNs for each unrolled iteration
if share_weights:
print("shared weights")
self.unets = nn.ModuleList(
[nn.ModuleList([UNet(2, 2), self.CGM])] * self.num_grad_steps
)
else:
print("No shared weights")
self.unets = nn.ModuleList(
[
nn.ModuleList([UNet(2, 2), self.CGM])
for i in range(self.num_grad_steps)
]
)
# Declare step sizes for each iteration
# init_step_size = torch.tensor([-2.0], dtype=torch.float32).to(params.device)
# if fix_step_size:
# self.step_sizes = [init_step_size] * num_grad_steps
# else:
# self.step_sizes = [torch.nn.Parameter(init_step_size) for i in range(num_grad_steps)]
def complex2real(self, image):
"""
Convert complex torch image to two-channels image (real, imag)
Args:
image (torch.Tensor, dtype=torch.complex64): complex image of size [N, height, weight]
Returns:
image (torch.Tensor, dtype=torch.float32): real image of size [N, 2, height, weight]
"""
return torch.cat((image.real[:, None, ...], image.imag[:, None, ...]), 1)
def real2complex(self, image):
"""
Convert real torch image to complex image.
Args:
image (torch.Tensor, dtype=torch.float32): real image of size [N, 2, height, weight]
Returns:
image (torch.Tensor, dtype=torch.complex64): complex image of size [N, height, weight]
"""
return image[:, 0, ...] + 1j * image[:, 1, ...]
def initiate(self, kspace, maps, mask=None):
# if self.num_emaps != maps.size()[-2]:
# raise ValueError('Incorrect number of ESPIRiT maps! Re-prep data...')
"""
From pytorch 1.8, it supports natural complex data, this branch uses torch.fft instead of the old version of two seperate channels.
"""
# print(kspace.shape)
# sys.exit()
if mask is None:
mask = abs(kspace) > 0
kspace *= mask
# Get data dimensions
self.dims = tuple(kspace.size())
# Declare signal model
A = SenseModel(maps, weights=mask)
self.Sense = Operator(A)
# Compute zero-filled image reconstruction
self.zf_image = self.Sense.adjoint(kspace)
# pl.ImagePlot(self.zf_image.detach().cpu().numpy())
self.CGM.initiate(self.Sense, self.zf_image)
def evaluate(self):
with torch.no_grad():
if self.cp:
size = [len(self.resnets)] + [a for a in self.zf_image.shape]
self.Xcp = torch.zeros(size, device=self.device)
else:
self.Xcp = None
self.dims = None
self.num_emaps = None
image = self.zf_image.clone()
# Begin unrolled proximal gradient descent
cpp = 0
for resnet in self.resnets:
if self.cp:
self.Xcp[cpp, ...] = image
cpp += 1
# dc update
# pl.ImagePlot(image.detach().cpu())
image = image.reshape(self.dims[0:4] + (self.num_emaps * 2,)).permute(
0, 4, 3, 2, 1
)
image = resnet[0](image)
image = image.permute(0, 4, 3, 2, 1).reshape(
self.dims[0:4] + (self.num_emaps, 2)
)
image = resnet[1](image)
# print("I Love you")
return image, self.Xcp, self.dims, self.num_emaps
def forward(self):
"""
Args:
kspace (torch.Tensor): Input tensor of shape [batch_size, height, width, time, num_coils, 2]
maps (torch.Tensor): Input tensor of shape [batch_size, height, width, 1, num_coils, num_emaps, 2]
mask (torch.Tensor): Input tensor of shape [batch_size, height, width, time, 1, 1]
Returns:
(torch.Tensor): Output tensor of shape [batch_size, height, width, time, num_emaps, 2]
"""
# if self.num_emaps != maps.size()[-2]:
# raise ValueError('Incorrect number of ESPIRiT maps! Re-prep data...')
# CG_alg = ConjGrad(Aop_fun=Sense.normal,b=zf_image,verbose=False,l2lam=0.05,max_iter=self.c)
# cg_image = CG_alg.forward(zf_image)
# pl.ImagePlot(zf_image.detach().cpu())
# sys.exit()
image = self.zf_image.clone()
# Begin unrolled proximal gradient descent
for unet in self.unets:
# dc update
# pl.ImagePlot(image.detach().cpu())
image = self.complex2real(image)
image = unet[0](image)
image = self.real2complex(image)
image = unet[1](image)
# pl.ImagePlot(image.detach().cpu().numpy())
image = self.complex2real(image)
# print("I Love you")
return image
|
import FWCore.ParameterSet.Config as cms
pfClustersFromL1EGClusters = cms.EDProducer("PFClusterProducerFromL1EGClusters",
src = cms.InputTag("L1EGammaClusterEmuProducer","L1EGXtalClusterEmulator"),
etMin = cms.double(0.5),
corrector = cms.string("L1Trigger/Phase2L1ParticleFlow/data/emcorr_barrel.root"),
resol = cms.PSet(
etaBins = cms.vdouble( 0.700, 1.200, 1.600),
offset = cms.vdouble( 0.873, 1.081, 1.563),
scale = cms.vdouble( 0.011, 0.015, 0.012),
kind = cms.string('calo'),
)
)
# use phase2_hgcalV10 to customize for 106X L1TDR MC even in the barrel, since there's no other modifier for it
from Configuration.Eras.Modifier_phase2_hgcalV10_cff import phase2_hgcalV10
phase2_hgcalV10.toModify(pfClustersFromL1EGClusters,
corrector = "L1Trigger/Phase2L1ParticleFlow/data/emcorr_barrel_106X.root",
resol = cms.PSet(
etaBins = cms.vdouble( 0.700, 1.200, 1.600),
offset = cms.vdouble( 1.047, 1.096, 1.633),
scale = cms.vdouble( 0.014, 0.031, 0.019),
kind = cms.string('calo')
)
)
|
from ThesisAnalysis.plotting.setup import ThesisPlotter
from ThesisAnalysis import get_data, get_plot, ThesisHDF5Reader
import numpy as np
import os
class TFPlotter(ThesisPlotter):
def plot(self, x, y):
ymax = np.max(y, 0)
ymin = np.min(y, 0)
color = next(self.ax._get_lines.prop_cycler)['color']
self.ax.fill_between(x, ymin, ymax,
facecolor='black', edgecolor='black',
label="Range Across Cells")
self.ax.plot(x, y[0], color=color, lw=1,
label="Single Cell")
self.ax.set_xlabel("Sample (ADC)")
self.ax.set_ylabel("Calibrated Sample (mV)")
self.add_legend(2)
class TFComparison(ThesisPlotter):
def plot(self, x, y, label):
color = next(self.ax._get_lines.prop_cycler)['color']
self.ax.plot(x, y[0], color=color, lw=1, label=label)
def process(input_path, output_path):
with ThesisHDF5Reader(input_path) as reader:
x = reader.read("x")['x'].values
y_flat = reader.read("y")['y'].values
metadata = reader.read_metadata()
n_cells = metadata['n_cells']
n_pnts = metadata['n_pnts']
y = y_flat.reshape((n_cells, n_pnts))
p_tf = TFPlotter(sidebyside=True)
p_tf.plot(x, y)
p_tf.save(output_path)
def process_comparison(input_path1, input_path2, output_path):
with ThesisHDF5Reader(input_path1) as reader:
x1 = reader.read("x")['x'].values
y_flat = reader.read("y")['y'].values
metadata = reader.read_metadata()
n_cells = metadata['n_cells']
n_pnts = metadata['n_pnts']
y1 = y_flat.reshape((n_cells, n_pnts))
with ThesisHDF5Reader(input_path2) as reader:
x2 = reader.read("x")['x'].values
y_flat = reader.read("y")['y'].values
metadata = reader.read_metadata()
n_cells = metadata['n_cells']
n_pnts = metadata['n_pnts']
y2 = y_flat.reshape((n_cells, n_pnts))
base = os.path.splitext(output_path)[0]
p_tf = TFComparison()
p_tf.plot(x1, y1, "Direct")
p_tf.plot(x2, y2, "Poly")
p_tf.ax.set_xlabel("Sample (ADC)")
p_tf.ax.set_ylabel("Calibrated Sample (mV)")
p_tf.add_legend(2)
p_tf.save(base + ".pdf")
p_tf = TFComparison(sidebyside=True)
p_tf.plot(x1, y1, "Direct")
p_tf.plot(x2, y2, "Poly")
p_tf.ax.set_xlim(-50, 50)
p_tf.ax.set_ylim(-50, 50)
p_tf.save(base + "_zoom.pdf")
def main():
input_path = get_data("tf/t5_lookup.h5")
output_path = get_plot("tf/lookup_t5.pdf")
process(input_path, output_path)
input_path = get_data("tf/tc_lookup.h5")
output_path = get_plot("tf/lookup_tc.pdf")
process(input_path, output_path)
input_path1 = get_data("tf/tc_direct_lookup.h5")
input_path2 = get_data("tf/tc_lookup.h5")
output_path = get_plot("tf/lookup_comparison.pdf")
process_comparison(input_path1, input_path2, output_path)
if __name__ == '__main__':
main()
|
from ...strategies.coordinator.scorer import TableScorer, PossibilityScorer, OpeningScorer, WinLoseScorer, NumberScorer, EdgeScorer, CornerScorer, BlankScorer, EdgeCornerScorer # noqa: E501
from ...strategies.coordinator.selector import Selector, Selector_W
from ...strategies.coordinator.orderer import Orderer, Orderer_B, Orderer_C, Orderer_P, Orderer_BC, Orderer_CB, Orderer_PCB
from ...strategies.coordinator.evaluator import Evaluator, Evaluator_T, Evaluator_P, Evaluator_O, Evaluator_W, Evaluator_N, Evaluator_N_Fast, Evaluator_E, Evaluator_C, Evaluator_B, Evaluator_Ec, Evaluator_TP, Evaluator_TPO, Evaluator_NW, Evaluator_PW, Evaluator_TPW, Evaluator_TPW_Fast, Evaluator_TPOW, Evaluator_TPWE, Evaluator_TPWE_Fast, Evaluator_TPWEC, Evaluator_PWE, Evaluator_BW, Evaluator_EcW, Evaluator_BWEc, Evaluator_PBWEc, Evaluator_TPWEB # noqa: E501
__all__ = [
'TableScorer',
'PossibilityScorer',
'OpeningScorer',
'WinLoseScorer',
'NumberScorer',
'EdgeScorer',
'CornerScorer',
'BlankScorer',
'EdgeCornerScorer',
'Selector',
'Selector_W',
'Orderer',
'Orderer_B',
'Orderer_C',
'Orderer_P',
'Orderer_BC',
'Orderer_CB',
'Orderer_PCB',
'Evaluator',
'Evaluator_T',
'Evaluator_P',
'Evaluator_O',
'Evaluator_W',
'Evaluator_N',
'Evaluator_N_Fast',
'Evaluator_E',
'Evaluator_C',
'Evaluator_B',
'Evaluator_Ec',
'Evaluator_TP',
'Evaluator_TPO',
'Evaluator_NW',
'Evaluator_PW',
'Evaluator_TPW',
'Evaluator_TPW_Fast',
'Evaluator_TPOW',
'Evaluator_TPWE',
'Evaluator_TPWE_Fast',
'Evaluator_TPWEC',
'Evaluator_PWE',
'Evaluator_BW',
'Evaluator_EcW',
'Evaluator_BWEc',
'Evaluator_PBWEc',
'Evaluator_TPWEB',
]
|
#! python2.7
## -*- coding: utf-8 -*-
## kun for Apk View Tracking
## ViewTree.py
import copy
from TreeType import CRect,CTreeNode,CPoint
from ParseElement import ParseElement
from ViewState import ViewState
class ViewTree():
'''
View Tree
'''
def __init__(self, logger):
self.m_logger = logger
def getStructure(self, dump_data):
list_data = dump_data.split("\n")
print "length of list: %s" %len(list_data)
# pop the last element "DONE"
list_data.remove("DONE")
print "length of list: %s" %len(list_data)
elements_list=[]
blanks_list=[]
for element in list_data:
index = 0
count = 0
while " " == element[index]:
index = index + 1
count = count + 1
#===================================================================
# # another method which can get blanks count in head of element
# tag_list = element.split(" ")
# head_tag = tag_list[0]
# while (0 == len(head_tag)):
# count += 1
#===================================================================
blanks_list.append(count)
elements_list.append(element)
return elements_list,blanks_list
def buildTree(self, elements_list, blanks_list):
tree_nodes_list=[]
root_node= CTreeNode()
root_node.mParentNode=None
total_count = len(blanks_list)
depth = 0
pre_depth = depth-1
for x in range(total_count):
index = x
blanks_count = blanks_list[index]
depth = blanks_count
node = CTreeNode()
## set node depth in this tree
node.mTreeDepth = blanks_count
if 0 == blanks_count:
root_node.mElement = elements_list[index]
root_node.mDepth = 0
tree_nodes_list.append(root_node)
else:
pre_index = x-1
pre_depth = blanks_list[pre_index]
pre_depth = tree_nodes_list[pre_index].mDepth
node.mElement = elements_list[index]
node.mDepth = blanks_count
delta_depth = (depth - pre_depth)
if (1 == delta_depth):
## 本节点是上一个节点的子节点
## current node is a child node of last node
node.mParentNode = tree_nodes_list[pre_index]
tree_nodes_list.append(node)
elif (0 == delta_depth):
## 等深度, 取上一个的父节点作为自己的父节点
## these two nodes have same depth, so that they have same parent node
node.mParentNode = tree_nodes_list[pre_index].mParentNode
tree_nodes_list.append(node)
elif (0 > delta_depth):
## 向上递归寻找和自己等深度的节点
## Recurse down to up, seek the node which has same depth
new_delta_depth = delta_depth
new_pre_depth = pre_depth
new_pre_index = pre_index
while True:
if 0==new_delta_depth:
node.mParentNode = tree_nodes_list[new_pre_index].mParentNode
tree_nodes_list.append(node)
break
else:
new_pre_index -= 1
new_pre_depth = tree_nodes_list[new_pre_index].mDepth
new_delta_depth = depth - new_pre_depth
else:
raise Exception, "Raise an Exception when Build Elements Tree!"
break
return tree_nodes_list
## Left: newLeft = (Root Node)->mLeft + (ParentNode)->mLeft + ... + self->mLeft
## Right: newRight = newLeft + (self->mRight - self->mLeft)
## Top : newTop = (Root Node)->mTop + (ParentNode)->mTop + ... + self->mTop
## Bottom: newBottom = newTop + (self->mBottom - self->mTop)
def getAbsoluteRect(self, node):
absoluteRect = CRect()
temp_rect = CRect()
current_node = CTreeNode()
current_node = copy.deepcopy(node)
temp_rect=current_node.mRect
## print "/////////////////begin trace ////////////////////////////////"
## print node.mRect.mTop, node.mRect.mBottom, node.mRect.mLeft, node.mRect.mRight
while True:
parent_node = CTreeNode()
if None == current_node.mParentNode:
break
else:
## print "before [Top] %s [Left] %s" %(str(temp_rect.mTop),str(temp_rect.mLeft))
parent_node = current_node.mParentNode
temp_rect.mLeft+=parent_node.mRect.mLeft
temp_rect.mTop+=parent_node.mRect.mTop
current_node = parent_node
## print "after [Top] %s [Left] %s" %(str(temp_rect.mTop),str(temp_rect.mLeft))
temp_rect.mRight=temp_rect.mLeft+(node.mRect.mRight-node.mRect.mLeft)
temp_rect.mBottom=temp_rect.mTop+(node.mRect.mBottom-node.mRect.mTop)
absoluteRect=temp_rect
## print node.mRect.mTop, node.mRect.mBottom, node.mRect.mLeft, node.mRect.mRight
## print "///////////////// end trace ///////////////////////////////"
return absoluteRect
def getViewCenterPoint(self, node):
width = node.mAbsoluteRect.mRight - node.mAbsoluteRect.mLeft
height = node.mAbsoluteRect.mBottom - node.mAbsoluteRect.mTop
location = CPoint()
location.x = node.mAbsoluteRect.mLeft + width/2
location.y = node.mAbsoluteRect.mTop + height/2
return location
def getChildNodesList(self, tree_nodes_list, tree_node):
child_nodes_list = []
start_flag = False
end_flag = False
for node in tree_nodes_list:
if end_flag:
break
if node.mHashCode == tree_node.mHashCode:
start_flag = True
if (node.mDepth == (tree_node.mDepth+1)) and start_flag:
child_nodes_list.append(node)
if (node.mDepth == tree_node.mDepth) and start_flag and (node.mHashCode!=tree_node.mHashCode):
end_flag = True
# print tree_node.mClassName
# print len(child_nodes_list)
return child_nodes_list
def setNodeValue(self, node):
element = node.mElement
if None == element:
print "Failed to set Node Value because Error in Node!"
return False
element_parser = ParseElement(node.mElement)
element_parser.parseElmentData()
node.mClassName = element_parser.getClassName()
node.mHashCode = element_parser.getHashCode()
node.mId = element_parser.getID()
node.mText = element_parser.getText()
node.mRect = element_parser.getRectArea()
active_state = ViewState(node)
node.mActive = active_state.getActiveState()
node.mAbsoluteRect = self.getAbsoluteRect(node)
node.mLocation = self.getViewCenterPoint(node)
node.mVisible = element_parser.getVisible()
def build(self, data):
elements_list, blanks_list = self.getStructure(data)
tree_nodes_list = self.buildTree(elements_list, blanks_list)
for node in tree_nodes_list:
## set node value from root node to child node
self.setNodeValue(node)
node.mChildNodes = self.getChildNodesList(tree_nodes_list, node)
self.m_logger.info("*************************************************************************")
self.m_logger.info("mClassName: %s" %node.mClassName)
self.m_logger.info("mTreeDepth: %s" %node.mTreeDepth)
self.m_logger.info("mId: %s " %node.mId)
self.m_logger.info("mText: %s" %node.mText)
self.m_logger.info("mActive: %s" %node.mActive)
self.m_logger.info("mRect.(mTop, mBottom, mLeft, mRight): %s %s %s %s" %(node.mRect.mTop, node.mRect.mBottom, node.mRect.mLeft, node.mRect.mRight))
self.m_logger.info("mAbsoluteRect: %s %s %s %s" %(node.mAbsoluteRect.mTop, node.mAbsoluteRect.mBottom, node.mAbsoluteRect.mLeft, node.mAbsoluteRect.mRight))
self.m_logger.info("*************************************************************************")
return tree_nodes_list
if __name__=="__main__":
vt = ViewTree()
|
from sumpy.annotators._annotator_base import _AnnotatorBase
from sumpy.annotators import SentenceTokenizerMixin, WordTokenizerMixin
from sumpy.document import Summary
from abc import ABCMeta, abstractmethod
import pandas as pd
import numpy as np
import networkx as nx
class _SystemBase(object):
"""Abstract base class for summarizer systems."""
__metaclass__ = ABCMeta
def __init__(self, verbose=False):
self.verbose = verbose
self._dependency_graph = None
self._annotators = None
self._pipeline = None
@abstractmethod
def build_summary(self, input_df, ndarray_data):
pass
def summarize(self, inputs):
if not hasattr(self, "_pipeline") or self._pipeline is None:
self.build_pipeline()
input_df, ndarray_data = self.prepare_inputs(inputs)
processed_df, processed_ndarray_data = self.process_input(
input_df, ndarray_data)
return self.build_summary(processed_df, processed_ndarray_data)
def build_pipeline(self):
self.build_dependency_graph()
self._pipeline = []
for node in nx.topological_sort(self._dependency_graph):
if node in self._annotators:
self._pipeline.append(self._annotators[node])
if self.verbose:
print("{} ({}) build".format(self.__class__.__name__,
self._annotators[node].name(self)))
self._annotators[node].build(self)
def prepare_inputs(self, inputs, ndarray_data=None):
requires = set()
returns = set()
ndarray_requires = set()
ndarray_returns = set()
for ann in self._pipeline:
requires.update(ann.requires(self))
returns.update(ann.returns(self))
ndarray_requires.update(ann.ndarray_requires(self))
ndarray_returns.update(ann.ndarray_returns(self))
# Allocate keys for ndarray dependencies.
if ndarray_data is None:
ndarray_data = {}
for key in ndarray_requires.union(ndarray_returns):
if key not in ndarray_data:
ndarray_data[key] = None
# Allocate columns for dataframe data dependencies.
all_cols = list(requires.union(returns))
if isinstance(inputs, list) or isinstance(inputs, tuple):
df = pd.DataFrame([{"doc id": doc_id, "doc text": doc_text}
for doc_id, doc_text in enumerate(inputs)],
columns=["doc id"] + all_cols)
return df, ndarray_data
elif isinstance(inputs, pd.DataFrame):
if "doc id" not in inputs:
raise Exception("input DataFrame must have column 'doc id'")
cols = list(set(inputs.columns.tolist() + all_cols))
df = pd.DataFrame(inputs.to_dict(), columns=cols)
df.reset_index(inplace=True)
return df, ndarray_data
else:
raise Exception("Bad input: list of strings or dataframe only.")
def process_input(self, input_df, ndarray_data):
cols = set(input_df.columns.tolist())
for ann in self._pipeline:
for rtype in ann.returns(self):
assert rtype in cols
for req in ann.requires(self):
assert req in cols
run_stage = input_df[ann.returns(self)].isnull().any().any() \
or np.any([ndarray_data[rtype] is None
for rtype in ann.ndarray_returns(self)])
if run_stage:
if self.verbose:
print("{} ({}) process".format(
self.__class__.__name__, ann.name(self)))
input_df, ndarray_data = ann.process(
self, input_df, ndarray_data)
return input_df, ndarray_data
def build_dependency_graph(self):
G = nx.DiGraph()
self._annotators = {}
def check_mixins(clazz, visited=set()):
if not issubclass(clazz, _SystemBase):
if issubclass(clazz, _AnnotatorBase):
name = clazz.name(self)
self._annotators[name] = clazz
for req in clazz.requires(self):
G.add_edge(req, name)
for req in clazz.ndarray_requires(self):
G.add_edge(req, name)
for rtype in clazz.returns(self):
G.add_edge(name, rtype)
for rtype in clazz.ndarray_returns(self):
G.add_edge(name, rtype)
visited.add(clazz)
for base in clazz.__bases__:
if base in visited:
continue
if not issubclass(base, _AnnotatorBase):
continue
if base == _AnnotatorBase:
continue
check_mixins(base, visited)
check_mixins(self.__class__)
self._dependency_graph = G
def print_dependency_graph(self, filename=None, to_iPython=True):
import pygraphviz as pgv
if not hasattr(self, "_dependency_graph") or \
self._dependency_graph is None:
self.build_dependency_graph()
if filename is None:
filename = "sumpy.tmp.png"
G = pgv.AGraph(strict=False, directed=True)
for node in self._dependency_graph:
if node in self._annotators:
G.add_node(node)
G.get_node(node).attr["shape"] ="rectangle"
elif node.startswith("f:"):
G.add_node(node)
G.get_node(node).attr["shape"] ="parallelogram"
for edge in self._dependency_graph.in_edges(node):
G.add_edge(edge[0], edge[1], color="green")
else:
for in_edge in self._dependency_graph.in_edges(node):
for out_edge in self._dependency_graph.out_edges(node):
G.add_edge(in_edge[0], out_edge[1],
label=node, key=node)
G.layout("dot")
G.draw(filename)
if to_iPython is True:
from IPython.display import Image
return Image(filename=filename)
class AverageFeatureRankerBase(
WordTokenizerMixin, _SystemBase):
def build_summary(self, input_df, ndarray_data):
cols = [f for f in input_df.columns.tolist() if f.startswith("f:")]
X = input_df[cols].values
input_df["rank"] = (X / X.max(axis=0)).mean(axis=1)
output_df = input_df.sort_values(["rank"], ascending=False)
return Summary(output_df)
|
import time
import sys
from typing import Iterable, Any, Mapping, Union, Iterator, Sequence
from elasticsearch import ElasticsearchException, NotFoundError
from elasticsearch.helpers import streaming_bulk, bulk
from . import connections
from .search import Search
class Exporter:
"""
Base class helper to export stuff to elasticsearch.
Derive from class and define class attributes:
- ``INDEX_NAME``: ``str``
Name of index, might contain a wildcard `*`
- ``MAPPINGS``: ``dict``
The `mapping <https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping.html>`__
definition for the index.
And optionally override methods:
- :meth:`.transform_document`
Convert a document to elasticsearch.
- :meth:`.get_document_id`
Return a unique id for the elasticsearch document.
- :meth:`.get_document_index`
Return an alternative index name for the document.
"""
# Name of the elasticsearch index where things are exported
INDEX_NAME: str = None
# dict with mapping parameters
MAPPINGS: dict = None
def __init__(
self,
client=None,
index_prefix: str = None,
index_postfix: str = None,
update_index: bool = True,
):
"""
Create a new instance of the exporter.
:param client:
An optional instance of an elasticsearch.Elasticsearch compatible object
If omitted elastipy.connections.get("default") will be used
:param index_prefix: ``str``
Optional string that is put before the class-attribute ``INDEX_NAME``
:param index_postfix: ``str``
Optional string that is put after the class-attribute ``INDEX_NAME``
:param update_index: ``bool``
If ``True``, the elasticsearch index will be created or updated with
the current ``MAPPINGS`` before the first export of a document.
"""
for required_attribute in ("INDEX_NAME", "MAPPINGS"):
if not getattr(self, required_attribute, None):
raise ValueError(f"Need to define class attribute {self.__class__.__name__}.{required_attribute}")
self._client = client
self.index_prefix = index_prefix
self.index_postfix = index_postfix
self._do_update_index = update_index
self._index_updated = dict()
@property
def client(self):
"""
Access to the elasticsearch client.
If none was defined in constructor
then ``elastipy.connections.get("default")`` is returned.
"""
if self._client is None:
self._client = connections.get()
return self._client
def index_name(self) -> str:
"""
Returns the configured ``index_prefix - INDEX_NAME - index_suffix``
:return: str
"""
name = self.INDEX_NAME
if self.index_prefix:
name = f"{self.index_prefix}-{name}"
if self.index_postfix:
name = f"{name}-{self.index_postfix}"
return name
def search(self, **kwargs) -> Search:
"""
Return a new ``Search`` object for this index and client.
:return: Search instance
"""
from .search import Search
return Search(index=self.index_name(), client=self._client, **kwargs)
def get_document_id(self, es_data: Mapping):
"""
Override this to return a single elasticsearch object's id.
:param es_data: ``dict``
Single object as returned by transform_document()
:return: str, int etc..
"""
return None
def get_document_index(self, es_data: Mapping) -> str:
"""
Override to define an index per document.
The default function returns the result from ``index_name()``
but it's possible to put objects into separate indices.
For example you might define ``INDEX_NAME = "documents-*"``
and ``get_document_index`` might return
.. CODE::
self.index_name().replace("*", es_data["type"]
:param es_data: ``dict``
Single document as returned by transform_document()
:return: str
"""
return self.index_name()
def transform_document(self, data: Mapping) -> Union[Mapping, Iterator[Mapping]]:
"""
Override this to transform each documents's data into
an elasticsearch document.
It's possible to return a **list** or **yield** multiple
elasticsearch documents.
:param data: dict
:return: dict or iterable of dict
"""
return data
def update_index(self) -> None:
"""
Create the index or update changes to the mapping.
Can only be called if ``INDEX_NAME`` does not contain a ``'*'``
:return: None
"""
if "*" in self.index_name():
raise ValueError(f"update_index() can not be called for wildcard indices like '{self.index_name()}'")
self._update_index(self.index_name())
def delete_index(self) -> bool:
"""
Try to delete the index. Ignore if not found.
:return: ``bool``
True if deleted, False otherwise.
If the index name contains a wildcard ``*``,
True is always returned.
"""
from .aggregation.helper import wildcard_match
name = self.index_name()
try:
self.client.indices.delete(index=name)
self._index_updated.pop(self.index_name(), None)
if "*" in name:
for key in list(self._index_updated):
if wildcard_match(key, name):
self._index_updated.pop(key)
return True
except NotFoundError:
return False
def export_list(
self,
object_list: Iterable[Any],
chunk_size: int = 500,
refresh: bool = False,
verbose: bool = False,
verbose_total: int = None,
file=None,
**kwargs
):
"""
Export a list of objects.
:param object_list: ``sequence of dict``
This can be a list or generator of dictionaries, containing the
objects that should be exported.
:param chunk_size: ``int``
Number of objects per bulk request.
:param refresh: ``bool``
if ``True`` require the immediate refresh of the index
when finished exporting.
:param verbose: ``bool``
If True print some progress to stderr
(using `tqdm <https://pypi.org/project/tqdm/>`__ if present)
:param verbose_total: ``int``
Provide the number of objects for the **verbosity** if
``object_list`` is a generator.
:param file:
Optional string stream to output verbose info, default is ``stderr``.
All other parameters are passed to
`elasticsearch.helpers.bulk <https://elasticsearch-py.readthedocs.io/en/v7.10.1/helpers.html#elasticsearch.helpers.bulk>`__
:return: ``dict``
Response of elasticsearch bulk call.
"""
def bulk_actions():
for object_data in self._verbose_iter(object_list, verbose, verbose_total, file):
es_data_iter = self.transform_document(object_data)
if isinstance(es_data_iter, Mapping):
es_data_iter = [es_data_iter]
for es_data in es_data_iter:
object_id = self.get_document_id(es_data)
index_name = self.get_document_index(es_data)
if index_name not in self._index_updated:
self._update_index(index_name)
action = {
"_index": self.get_document_index(es_data),
"_source": es_data,
}
if object_id is not None:
action["_id"] = object_id
yield action
response = bulk(
client=self.client,
actions=bulk_actions(),
chunk_size=chunk_size,
refresh=refresh,
**kwargs,
)
if verbose:
# TODO: print error status
print(f"{self.__class__.__name__}: exported {response[0]} objects", file=file)
return response
def get_index_params(self) -> dict:
"""
Returns the complete index parameters.
Override if you need to specialize things.
:return: dict
"""
return {
"mappings": self.MAPPINGS
}
def _update_index(self, name):
try:
self.client.indices.get_mapping(index=name)
self.client.indices.put_mapping(index=name, body=self.MAPPINGS)
self._index_updated[name] = True
return
except NotFoundError:
pass
self.client.indices.create(index=name, body=self.get_index_params())
@classmethod
def _verbose_iter(cls, iter, verbose: bool, count=None, file=None):
if not verbose:
yield from iter
return
if file is None:
file = sys.stderr
# this is just a unittest switch
if verbose != "simple":
try:
import tqdm
yield from tqdm.tqdm(iter, total=count, file=file)
return
except ImportError:
pass
if count is None:
try:
count = len(iter)
except (TypeError, ):
pass
last_time = None
for i, item in enumerate(iter):
ti = time.time()
if last_time is None or ti - last_time >= 1.:
last_time = ti
if count:
print(f"{cls.__name__} {i}/{count}", file=file)
else:
print(f"{cls.__name__} {i}", file=file)
yield item
|
from ._venv import is_venv
from ._check import install_packages, check_module, require_module, check_fun, require_fun, check_attr, require_attr, check_class, require_class
|
n, q = map(int, input().split())
G = [[] for _ in range(n)]
for _ in range(n - 1):
a, b = map(lambda x: int(x) - 1, input().split())
G[a].append(b)
G[b].append(a)
from collections import deque
P = [0] * n
dq = deque([0])
while dq:
v = dq.popleft()
for u in G[v]:
if P[u] > 0: continue
P[u] = P[v] + 1
dq.append(u)
for _ in range(q):
c, d = map(lambda x: int(x) - 1, input().split())
if (P[c] + P[d]) % 2: print('Road')
else: print('Town')
|
import os
from warnings import warn
from flask import current_app
from .core import Config, lazy
from . import default_config
from flex.utils.local import LocalProxy
from flex.utils.lazy import LazyObject, empty
from flex.core.exc import ImproperlyConfigured
ENVIRONMENT_VARIABLE = 'FLEX_CONFIG_PATH'
ROOT_PATH_ENVAR = 'FLEX_ROOT_DIR'
class LazyConfig(LazyObject):
"""
A lazy proxy for either global Django settings or a custom settings object.
The user can manually configure settings prior to using them. Otherwise,
Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
"""
__slots__ = ()
def _setup(self, name=None):
"""Load the config path pointed to by the environment variable.
"""
config_path = os.environ.get(ENVIRONMENT_VARIABLE)
if not config_path:
desc = ("config %s" % name) if name else "config"
raise ImproperlyConfigured(
"Requested %s, but configuration has not been initialized. "
"You must either define the environment variable %s "
"or call config.initialize() before accessing configurations."
% (desc, ENVIRONMENT_VARIABLE))
root_path = os.environ.get(ROOT_PATH_ENVAR)
if not root_path:
root_path = os.getcwd()
warn(
'Environment variable %s for config root path not defined. '
'The current working directory %s will be used instead.'
% (ROOT_PATH_ENVAR, root_path), RuntimeWarning
)
self._wrapped = Config(root_path)
self._wrapped.from_object(default_config)
self._wrapped.from_envvar(ENVIRONMENT_VARIABLE)
def __repr__(self):
if self._wrapped is empty:
return '<LazyConfig [Unevaluated]>'
return '<LazyConfig %s>' % str(self._wrapped)
@property
def top(self):
"""Returns configuration for the current_app if any."""
if current_app:
return current_app.config
return self
@property
def _config(self):
if self._wrapped is empty:
self._setup()
return self._wrapped
@property
def has_init(self):
"""Returns True if the configuration has already been initialized."""
return self._wrapped is not empty
config = LazyConfig()
|
import sys
import io
from twisted.logger import (
eventsFromJSONLogFile, textFileLogObserver
)
output = textFileLogObserver(sys.stdout)
for event in eventsFromJSONLogFile(io.open("log.json")):
output(event)
|
CELEBA_PATH = "/home/aixile/Workspace/dataset/celeba/"
GAME_FACE_PATH = "/home/aixile/Workspace/dataset/game_face_170701/"
|
#!/usr/bin/env python3
import unittest
from util import connect, load_data
LISTINGS_DATA = "YVR_Airbnb_listings_summary.csv"
REVIEWS_DATA = "YVR_Airbnb_reviews.csv"
CREATE_LISTINGS_TABLE = '''
CREATE TABLE listings (
id INTEGER,
-- ID of the listing
name TEXT,
-- Title of the listing
host_id INTEGER,
-- ID of the host for the listing
host_name TEXT,
-- Name of the host
neighbourhood TEXT,
-- Location of the listing
room_type TEXT,
-- The type of the room offered
price INTEGER,
-- The price of the listing
minimum_nights INTEGER,
-- The minimum nights the listing can be booked
availability_365 INTEGER,
-- The availability of the listing in a year
PRIMARY KEY(id)
);
'''
CREATE_REVIEWS_TABLE = '''
CREATE TABLE reviews (
listing_id INTEGER,
id INTEGER,
date TEXT,
reviewer_id INTEGER,
reviewer_name TEXT,
comments TEXT,
PRIMARY KEY(id)
);
'''
def main() -> None:
listings_data = load_data(LISTINGS_DATA)
review_data = load_data(REVIEWS_DATA)
make_main()
populate_listings_table(listings_data)
populate_review_table(review_data)
def make_main() -> None:
connection = connect()
delete_listings_table = '''
DROP TABLE IF EXISTS listings;
'''
delete_reviews_table = '''
DROP TABLE IF EXISTS reviews;
'''
connection.cursor().execute(delete_listings_table)
connection.cursor().execute(delete_reviews_table)
connection.cursor().execute(CREATE_LISTINGS_TABLE)
connection.cursor().execute(CREATE_REVIEWS_TABLE)
connection.close()
def populate_review_table(review_data) -> None:
connection = connect()
query = '''
INSERT INTO
reviews
VALUES(
:listing_id,
:id,
:date,
:reviewer_id,
:reviewer_name,
:comments
);
'''
insertions = []
for i in range(0, len(review_data)):
insertions.append({
"listing_id": review_data[i][0],
"id": review_data[i][1],
"date": review_data[i][2],
"reviewer_id": review_data[i][3],
"reviewer_name": review_data[i][4],
"comments": review_data[i][5]
})
connection.executemany(query, insertions)
connection.commit()
connection.close()
def populate_listings_table(listings_data) -> None:
connection = connect()
query = '''
INSERT INTO
listings
VALUES(
:id,
:name,
:host_id,
:host_name,
:neighbourhood,
:room_type,
:price,
:minimum_nights,
:availability_365
);
'''
insertions = []
for i in range(0, len(listings_data)):
pass_val = False
for j in range(0, len(listings_data[i])):
if listings_data[i][j] is None:
pass_val = True
if pass_val is not True:
insertions.append({
"id": listings_data[i][0],
"name": listings_data[i][1],
"host_id": listings_data[i][2],
"host_name": listings_data[i][3],
"neighbourhood": listings_data[i][4],
"room_type": listings_data[i][5],
"price": listings_data[i][6],
"minimum_nights": listings_data[i][7],
"availability_365": listings_data[i][8]
})
connection.executemany(query, insertions)
connection.commit()
connection.close()
class DatabaseTest(unittest.TestCase):
# Checksum verification
def test_listings_table(self):
connection = connect()
test_query = '''
SELECT
MIN(host_id),
MAX(host_id),
AVG(host_id),
COUNT(host_id)
FROM
listings;
'''
data = connection.cursor().execute(test_query).fetchone()
print(data)
connection.close()
self.assertTupleEqual(
data, (6033, 387534175, 115176061.85829493, 4340))
def test_reviews_table(self):
connection = connect()
test_query = '''
SELECT
MIN(id),
MAX(id),
AVG(id),
COUNT(id)
FROM
reviews;
'''
data = connection.cursor().execute(test_query).fetchone()
print(data)
connection.close()
self.assertTupleEqual(
data, (26444, 730124064, 370354766.84915775, 147936))
if __name__ == "__main__":
main()
unittest.main()
|
import os
import logging
import pathlib
import random
import numbers
from tqdm import tqdm
import numpy as np
import torch
from torch.utils.data import Dataset, DataLoader
from torchvision import transforms
import torchvision.datasets as datasets
import torchvision.transforms.functional as TF
import torch.nn.functional as F
from PIL import Image
try:
import accimage
except ImportError:
accimage = None
from os.path import splitext
from os import listdir
from glob import glob
def _is_pil_image(img):
if accimage is not None:
return isinstance(img, (Image.Image, accimage.Image))
else:
return isinstance(img, Image.Image)
def _is_numpy(img):
return isinstance(img, np.ndarray)
def _is_numpy_image(img):
return img.ndim in {2, 3}
class BasicDataset(Dataset):
def __init__(self, imgs_dir, masks_dir, scale=1):
self.imgs_dir = imgs_dir
self.imgs_path = pathlib.Path(imgs_dir)
self.masks_dir = masks_dir
self.masks_path = pathlib.Path(masks_path)
self.scale = scale
assert 0 < scale <= 1, 'Scale must be between 0 and 1'
self.ids = [p.stem for p in self.imgs_path.iterdir()
if not p.startswith('.')]
logging.info(f'Creating dataset with {len(self.ids)} examples')
def __len__(self):
return len(self.ids)
@classmethod
def preprocess(cls, pil_img, scale):
w, h = pil_img.size
newW, newH = int(scale * w), int(scale * h)
assert newW > 0 and newH > 0, 'Scale is too small'
pil_img = pil_img.resize((newW, newH))
img_nd = np.array(pil_img)
if len(img_nd.shape) == 2:
img_nd = np.expand_dims(img_nd, axis=2)
# HWC to CHW
img_trans = img_nd.transpose((2, 0, 1))
if img_trans.max() > 1:
img_trans = img_trans / 255
return img_trans
def __getitem__(self, i):
idx = self.ids[i]
mask_file = glob(self.masks_dir + idx + '*')
img_file = glob(self.imgs_dir + idx + '*')
assert len(mask_file) == 1, \
f'Either no mask or multiple masks found for the ID {idx}: {mask_file}'
assert len(img_file) == 1, \
f'Either no image or multiple images found for the ID {idx}: {img_file}'
mask = Image.open(mask_file[0])
img = Image.open(img_file[0])
assert img.size == mask.size, \
f'Image and mask {idx} should be the same size, but are {img.size} and {mask.size}'
img = self.preprocess(img, self.scale)
mask = self.preprocess(mask, self.scale)
return {'image': torch.from_numpy(img), 'mask': torch.from_numpy(mask)}
####################################################################################################################
########### C E L E B A #############-------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------
# CelebA face image dataset, only returns images and not metadata
# ------------------------------------------------------------------------------------------------------------------
class CelebA(Dataset):
def __init__(self, path='/root/data/CelebA/img_align_celeba/', part='train'):
if part=='train':
self.data = [os.path.join(path, file) for file in os.listdir(path)][:182637]
else:
self.data = [os.path.join(path, file) for file in os.listdir(path)][182637:]
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
return self.transform(Image.open(self.data[idx]))
def make_celeba_dataloader(dataset, batch_size, image_size=4):
dataset.transform = transforms.Compose([
transforms.Resize((image_size, image_size)),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5), inplace=True)])
return DataLoader(dataset, shuffle=True, batch_size=batch_size, num_workers=4, drop_last=True)
####################################################################################################################
########### F R A C T A L #############-------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------
# Custom Fractal Images dataset with high resolution images. Must apply crop to use.
# ------------------------------------------------------------------------------------------------------------------
class Fractal(Dataset):
def __init__(self, path='/content/all/', part='all', cache='memory'):
self.all_data = [str(p.absolute()) for p in pathlib.Path(path).glob("*")]
self.total = len(self.all_data)
if part == 'all':
self.data = self.all_data
elif part=='train':
self.data = self.all_data[:int(self.total*0.9)]
else:
self.data = self.all_data[int(self.total*0.9):]
self.cache = cache
if self.cache == 'memory':
logging.info(f"Using in memory cache for {self.total} images")
cache_temp = []
for p in tqdm(self.data):
try:
cache_temp.append(Image.open(p).convert('RGB'))
except Exception as e:
logging.error(f"Failed loading image in dataset:\n{e}")
self.data = cache_temp
del cache_temp
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
return self.transform(Image.open(self.data[idx]).convert('RGB'))
# ------------------------------------------------------------------------------------------------------------------
# Prepares a set of transformations that crops a certain scale square area randomly from each images
# in a batch, effectively making a much larger dataset than individual image count suggests.
# ------------------------------------------------------------------------------------------------------------------
def make_fractal_alae_dataloader(dataset, batch_size,
image_size=4,
crop_size=512,
num_workers=3,
crop_mode='random',
mean=(0.5, 0.5, 0.5),
std=(0.5, 0.5, 0.5),
jitter_settings={"brightness": 0.1,
"contrast": 0.3,
"saturation": 0.3,
"hue": 0.3}):
transform_list = []
if isinstance(crop_mode, str):
if crop_mode == 'random':
transform_list.append(transforms.RandomCrop(crop_size,
pad_if_needed=True,
padding_mode='symmetric'))
elif crop_mode == 'center':
transform_list.append(transforms.CenterCrop(crop_size))
transform_list.append(transforms.Resize((image_size, image_size)))
transform_list.append(transforms.RandomHorizontalFlip(p=0.5))
transform_list.append(transforms.RandomVerticalFlip(p=0.5))
transform_list.append(transforms.ColorJitter(**jitter_settings))
#transform_list.append(transforms.RandomGrayscale(p=0.1))
transform_list.append(transforms.ToTensor())
transform_list.append(transforms.Normalize(mean, std, inplace=True))
dataset.transform = transforms.Compose(transform_list)
return DataLoader(dataset, shuffle=True, batch_size=batch_size, num_workers=num_workers, drop_last=True)
# ------------------------------------------------------------------------------------------------------------------
# Custom Fractal Images dataset with high resolution images. Must apply crop and also return the coordinates of
# of the crop in the form of the upper left and lower right points (bounding box - [x1,y1,x2,y2]). Supports the concept
# of multiple crops from each image so that Contrastive Learning can be used with each crop from the same image has a label
# applied in this class based on the index
# ------------------------------------------------------------------------------------------------------------------
class FractalLabel(Dataset):
def __init__(self, path='/content/all/', part='train'):
self.all_data = all_paths = [str(p.absolute()) for p in pathlib.Path(path).glob("*")]
self.total = len(self.all_data)
if part=='train':
self.data = self.all_data[:int(self.total*0.9)]
else:
self.data = self.all_data[int(self.total*0.9):]
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
result, coords = self.transform(Image.open(self.data[idx]).convert('RGB'))
label = torch.full((result.shape[0],), fill_value=idx, dtype=torch.int)
return (result, label, coords)
# ------------------------------------------------------------------------------------------------------------------
# Prepares a set of transformations that makes many crops of a certain scale square area randomly from each image
# in a batch, effectively making a much larger dataset than individual image count suggests. Also returns the coordinates
# of each crop. Results in a 4-d tensor [N, C, H, W] with N being number of crops
# ------------------------------------------------------------------------------------------------------------------
def make_fractal_clr_dataloader(dataset, batch_size, image_size=4, crop_size=512, num_workers=3, crop_mode=5, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)):
transform_list = []
transform_list.append(transforms.RandomHorizontalFlip(p=0.5))
transform_list.append(transforms.RandomVerticalFlip(p=0.5))
transform_list.append(transforms.ColorJitter(brightness=0.1, contrast=0.3, saturation=0.3, hue=0.2))
#transform_list.append(transforms.RandomGrayscale(p=0.1))
transform_list.append(MultiCropCoordV2(crop_size, image_size, count=crop_mode))
transform_list.append(BuildOutput(mean, std))
dataset.transform = transforms.Compose(transform_list)
return DataLoader(dataset, shuffle=True, batch_size=batch_size, num_workers=num_workers, drop_last=True)
def _get_image_size(img):
if _is_pil_image(img):
return img.size
elif isinstance(img, torch.Tensor) and img.dim() > 2:
return img.shape[-2:][::-1]
else:
raise TypeError("Unexpected type {}".format(type(img)))
class MultiCropCoord:
def __init__(self, crop_size, resize_size,
count=5,
crop_pad=0.,
use_pad=False,
seed=42):
self.crop_size = crop_size
self.resize_size = resize_size
self.count = count
self.crop_pad = crop_pad
self.use_pad = use_pad
self.seed = seed
def __call__(self, x):
x = self._check_size(x)
results = []
coords = []
for i in range(self.count):
data, coord = self._random_crop(x)
results.append(data)
coords.append(coord)
return (self._resize_img(results), self._resize_coords(coords))
def _check_size(self, x):
""" Ensures the image is big enough to
"""
self.h, self.w = _get_image_size(x)
# if not using padding boundary for valid crop area, then total size is just crop size
# if use pad is enforced, there is an extra amount of padding that is not valid, so the resulting image is larger
total_h = self.crop_size + (self.h * self.crop_pad) if self.use_pad else self.crop_size
total_w = self.crop_size + (self.w * self.crop_pad) if self.use_pad else self.crop_size
if self.h < total_h or self.w < total_w:
pad_amount = int(self.crop_size * self.crop_pad)
# calculate image size ratio to preserve preportions after resize
if self.h < self.w:
# smaller side will be equal to crop size + pad amount
ratio_h = 1
# larger side will be scaled up so that it stays larger
ratio_w = self.w / self.h
# unified ratio to increase size by based on smaller side
ratio_r = self.crop_size / self.h
else:
ratio_h = self.h / self.w
ratio_w = 1
ratio_r = self.crop_size / self.w
# do resize based on if either PIL or Tensor
if _is_pil_image(x):
x = x.resize(int(int(self.w * ratio_r) + pad_amount * ratio_w),
int(int(self.h * ratio_r) + pad_amount * ratio_h)
)
# get new size
self.h, self.w = _get_image_size(x)
return x
elif isinstance(img, torch.Tensor) and img.dim() > 2:
x = x.resize(int(int(self.w * ratio_r) + pad_amount * ratio_w),
int(int(self.h * ratio_r) + pad_amount * ratio_h)
)
# get new size
self.h, self.w = _get_image_size(x)
return x
else:
# Numpy? shouldn't happen...
return x
else:
# image is large enough already
return x
def _random_crop(self, x):
# get total height and width of crop
if isinstance(self.crop_size, int):
th, tw = self.crop_size, self.crop_size
elif isinstance(self.crop_size, float):
th, tw = int(self.crop_size), int(self.crop_size)
else:
th, tw = int(self.crop_size[0]), int(self.crop_size[1])
if self.use_pad:
# calculate ratio to modify padding by to make it balanced on rectangles
if self.h < self.w:
ratio_h = self.h / self.w
ratio_w = 1.
else:
ratio_w = self.w / self.h
ratio_h = 1.
# calculate padding to ensure no overlap with corners
ph = int(self.h * self.crop_pad * ratio_h)
pw = int(self.w * self.crop_pad * ratio_w)
else:
ph = pw = 0
# calculate available space left over after crop and padding (max x/y)
available_h = self.h - th - ph
available_w = self.w - tw - pw
padding_h = padding_w = 0
if available_h < 0:
# this much extra room needed in height
padding_h = abs(available_h)
if available_w < 0:
# this many extra pixels needed in width
padding_w = abs(available_w)
available_h += padding_h
available_w += padding_w
if available_h > 0 and available_h > pw:
mod_h = random.randint(pw, available_h)
else:
diff = pw - available_h
mod_h = random.randint(available_h-diff, available_h)
if available_w > 0 and available_w > ph:
mod_w = random.randint(ph, available_w)
else:
diff = ph - available_w
mod_w = random.randint(available_w-diff, available_w)
x1, y1, x2, y2 = mod_h, mod_w, mod_h + th - padding_h, mod_w + tw - padding_w
# torchvision.transforms.functional.crop(img, top, left, height, width)
#return TF.crop(x, y1, x1, abs(y2-y1), abs(x2-x1)), (x1, y1, x2, y2, self.h, self.w)
return transforms.RandomResizedCrop(
self.crop_size,
scale=(self.resize_size, self.resize_size),
)(x)
def _resize_img(self, results):
resized = []
for result in results:
resized.append(result.resize((self.resize_size, self.resize_size)))
return resized
def _resize_coords(self, coords):
""" Scale the coordinates by the amount the crop was resized
"""
resized = []
for coord in coords:
ratio = self.resize_size / self.crop_size
x1 = int(coord[0] * ratio)
y1 = int(coord[1] * ratio)
x2 = int(coord[2] * ratio)
y2 = int(coord[3] * ratio)
h = int(coord[4] * ratio)
w = int(coord[5] * ratio)
resized.append((x1, y1, x2, y2, h, w))
return resized
class MultiCropCoordV2(object):
def __init__(self, crop_size, resize_size,
count=5,
padding=None,
pad_if_needed=False,
fill=0,
padding_mode='constant',
interpolation=Image.BILINEAR):
if isinstance(crop_size, numbers.Number):
self.crop_size = (int(crop_size), int(crop_size))
else:
self.crop_size = crop_size
self.count = count
self.padding = padding
self.pad_if_needed = pad_if_needed
self.fill = fill
self.padding_mode = padding_mode
if isinstance(resize_size, numbers.Number):
self.resize_size = (int(resize_size), int(resize_size))
else:
self.resize_size = resize_size
self.interp = interpolation
self.resizecrop = transforms.Resize(self.resize_size, interpolation=self.interp)
@staticmethod
def get_params(img, output_size):
"""Get parameters for ``crop`` for a random crop.
Args:
img (PIL Image): Image to be cropped.
output_size (tuple): Expected output size of the crop.
Returns:
tuple: params (i, j, h, w) to be passed to ``crop`` for random crop.
"""
w, h = _get_image_size(img)
th, tw = output_size
if w == tw and h == th:
return 0, 0, h, w
i = random.randint(0, h - th)
j = random.randint(0, w - tw)
return i, j, th, tw
def __call__(self, img):
img = self._check_size(img)
results = []
coords = []
for i in range(self.count):
data, coord = self._random_crop(img)
data = self.resizecrop(data)
results.append(data)
coords.append(self._resize_coord(coord))
return (results, coords)
def _check_size(self, x):
""" Ensures the image is big enough to
"""
self.h, self.w = _get_image_size(x)
# if not using padding boundary for valid crop area, then total size is just crop size
# if use pad is enforced, there is an extra amount of padding that is not valid, so the resulting image is larger
total_h = self.crop_size[0]
total_w = self.crop_size[1]
if self.h < total_h or self.w < total_w:
pad_amount = 0
# calculate image size ratio to preserve preportions after resize
if self.h < self.w:
# smaller side will be equal to crop size + pad amount
ratio_h = 1
# larger side will be scaled up so that it stays larger
ratio_w = self.w / self.h
# unified ratio to increase size by based on smaller side
ratio_r = total_w / self.h
else:
ratio_h = self.h / self.w
ratio_w = 1
ratio_r = total_h / self.w
# do resize based on if either PIL or Tensor
if _is_pil_image(x):
x = x.resize(int(int(self.w * ratio_r) + pad_amount * ratio_w),
int(int(self.h * ratio_r) + pad_amount * ratio_h)
)
# get new size
self.h, self.w = _get_image_size(x)
return x
elif isinstance(img, torch.Tensor) and img.dim() > 2:
x = x.resize(int(int(self.w * ratio_r) + pad_amount * ratio_w),
int(int(self.h * ratio_r) + pad_amount * ratio_h)
)
# get new size
self.h, self.w = _get_image_size(x)
return x
else:
# Numpy? shouldn't happen...
return x
else:
# image is large enough already
return x
def _random_crop(self, img):
"""
Args:
img (PIL Image): Image to be cropped.
Returns:
PIL Image: Cropped image.
"""
if self.padding is not None:
img = F.pad(img, self.padding, self.fill, self.padding_mode)
# pad the width if needed
if self.pad_if_needed and img.size[0] < self.crop_size[1]:
img = F.pad(img, (self.crop_size[1] - img.size[0], 0), self.fill, self.padding_mode)
# pad the height if needed
if self.pad_if_needed and img.size[1] < self.crop_size[0]:
img = F.pad(img, (0, self.crop_size[0] - img.size[1]), self.fill, self.padding_mode)
i, j, h, w = self.get_params(img, self.crop_size)
x1 = i
y1 = j
x2 = x1 + h
y2 = y1 + w
return TF.crop(img, i, j, h, w), (x1, y1, x2, y2, h, w)
def _resize_coord(self, coord):
""" Scale the coordinates by the amount the crop was resized
"""
ratio_x = self.resize_size[0] / self.crop_size[1]
ratio_y = self.resize_size[0] / self.crop_size[1]
x1 = int(coord[0] * ratio_x)
y1 = int(coord[1] * ratio_y)
x2 = int(coord[2] * ratio_x)
y2 = int(coord[3] * ratio_y)
h = int(coord[4] * ratio_x)
w = int(coord[5] * ratio_y)
return (x1, y1, x2, y2, h, w)
class BuildOutput:
def __init__(self, mean, std):
self.mean = mean
self.std = std
def __call__(self, x):
y = x[1]
x = x[0]
data = torch.stack([transforms.Normalize(self.mean, self.std, inplace=True)(
torch.from_numpy(np.array(crop, np.float32, copy=False).transpose((2, 0, 1))).contiguous()) for crop in x])
label = torch.Tensor(y)
return data, label
# ------------------------------------------------------------------------------------------------------------------
# MultiCropDataset from SWAV that makes multiple crops of various sizes - close, but we want all the same size
# ------------------------------------------------------------------------------------------------------------------
class MultiCropDataset(datasets.ImageFolder):
def __init__(
self,
data_path,
size_crops,
nmb_crops,
min_scale_crops,
max_scale_crops,
size_dataset=-1,
return_index=False,
mean=[0.485, 0.456, 0.406],
std=[0.228, 0.224, 0.225]
):
super().__init__(data_path)
assert len(size_crops) == len(nmb_crops)
assert len(min_scale_crops) == len(nmb_crops)
assert len(max_scale_crops) == len(nmb_crops)
if size_dataset >= 0:
self.samples = self.samples[:size_dataset]
self.return_index = return_index
trans = []
color_transform = transforms.Compose([get_color_distortion(), RandomGaussianBlur()])
for i in range(len(size_crops)):
randomresizedcrop = transforms.RandomResizedCrop(
size_crops[i],
scale=(min_scale_crops[i], max_scale_crops[i]),
)
trans.extend([transforms.Compose([
randomresizedcrop,
transforms.RandomHorizontalFlip(p=0.5),
color_transform,
transforms.ToTensor(),
transforms.Normalize(mean=mean, std=std)])
] * nmb_crops[i])
self.trans = trans
def __getitem__(self, index):
path, _ = self.samples[index]
image = self.loader(path)
multi_crops = list(map(lambda trans: trans(image), self.trans))
if self.return_index:
return index, multi_crops
return multi_crops
class RandomGaussianBlur(object):
def __call__(self, img):
do_it = np.random.rand() > 0.5
if not do_it:
return img
sigma = np.random.rand() * 1.9 + 0.1
return cv2.GaussianBlur(np.asarray(img), (23, 23), sigma)
def get_color_distortion(s=1.0):
# s is the strength of color distortion.
color_jitter = transforms.ColorJitter(0.8*s, 0.8*s, 0.8*s, 0.2*s)
rnd_color_jitter = transforms.RandomApply([color_jitter], p=0.8)
rnd_gray = transforms.RandomGrayscale(p=0.2)
color_distort = transforms.Compose([rnd_color_jitter, rnd_gray])
return color_distort
|
'''https://leetcode.com/problems/same-tree/'''
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def isSameTree(self, p: TreeNode, q: TreeNode) -> bool:
def check(p, q):
if p is None and q is None:
return True
if p is None or q is None:
return False
if p.val==q.val:
return True
else:
return False
l = list()
l.append([p,q])
while l:
p, q = l.pop(0)
if not check(p,q):
return False
if p:
l.append([p.left, q.left])
l.append([p.right, q.right])
return True
|
_C='ALTER TABLE '
_B='SELECT {} FROM {}'
_A=','
import mysql.connector
def table(x):global table;table=str(x)
def connect(*A):
if len(A)==3:B=mysql.connector.connect(host=A[0],user=A[1],password=A[2])
else:B=mysql.connector.connect(host=A[0],user=A[1],password=A[2],database=A[3])
return B
def query(x,y):A=x.cursor();A.execute(y);x.commit()
def createDb(x,y):A=x.cursor();A.execute('CREATE DATABASE '+y+'');x.commit()
def select(x,y):
B=[]
for D in y:B.append(D)
E=_A.join(B);C=x[0].cursor();A=_B;A=A.format(E,x[1]);C.execute(A);return C.fetchall()
def selectAll(x):B=x[0].cursor();A=_B;A=A.format('*',x[1]);B.execute(A);return B.fetchall()
def selectWhere(x,y,z):
B=[]
for D in y:B.append(D)
E=_A.join(B);C=x[0].cursor()
if type(z[1])==str:A="SELECT {} FROM {} WHERE {}='{}'"
else:A='SELECT {} FROM {} WHERE {}={}'
A=A.format(E,x[1],z[0],z[1]);C.execute(A);return C.fetchall()
def dropTable(x):A=x[0].cursor();B='DROP TABLE {}';A.execute(B.format(x[1]));x[0].commit()
def dropDb(x):A=x[0].cursor();B='DROP DATABASE {}';A.execute(B.format(x[1]));x[0].commit()
def createTable(db,data):
A=[]
for B in data:A.append(B+' '+data[B])
C=_A.join(A);D='CREATE TABLE '+db[1]+' ({})';E=D.format(C);F=db[0].cursor();F.execute(E);db[0].commit()
def addColumn(db,data):
A=[]
for B in data:A.append(B);A.append(data[B])
C=_C+db[1]+' ADD {} {}';D=C.format(A[0],A[1]);E=db[0].cursor();E.execute(D);db[0].commit()
def modifyColumn(db,data):
A=[]
for B in data:A.append(B);A.append(data[B])
C=_C+db[1]+' MODIFY {} {}';D=C.format(A[0],A[1]);E=db[0].cursor();E.execute(D);db[0].commit()
def dropColumn(db,data):A='ALTER TABLE {} DROP COLUMN {}';A=A.format(db[1],data);B=db[0].cursor();B.execute(A);db[0].commit()
def insert(db,data):
J='"';A=[];B=[];C=[]
for D in data:A.append(D);B.append(str(J+data[D]+J));C.append(str('%s'))
E=_A.join(A);F=_A.join(B);K=_A.join(C);G='INSERT INTO '+db[1]+' ({}) VALUES ({})';H=G.format(E,F);I=db[0].cursor();I.execute(H);db[0].commit()
def updateAll(x,d):
B=x[0].cursor()
if type(d[1])==str:A="UPDATE {} SET {}='{}'"
else:A='UPDATE {} SET {}={}'
B.execute(A.format(x[1],d[0],d[1]));x[0].commit()
def update(x,d,c):
B=x[0].cursor()
if type(d[1])==str:
if type(c[1])==str:A="UPDATE {} SET {}='{}' WHERE {}='{}'"
else:A="UPDATE {} SET {}='{}' WHERE {}={}"
elif type(c[1])==str:A="UPDATE {} SET {}={} WHERE {}='{}'"
else:A='UPDATE {} SET {}={} WHERE {}={}'
B.execute(A.format(x[1],d[0],d[1],c[0],c[1]));x[0].commit()
def delete(x,d):
B=x[0].cursor()
if type(d[1])==str:A="DELETE FROM {} WHERE {}='{}'"
else:A='DELETE FROM {} WHERE {}={}'
B.execute(A.format(x[1],d[0],d[1]));x[0].commit()
def deleteAll(x):A=x[0].cursor();B='DELETE FROM {}';A.execute(B.format(x[1]));x[0].commit()
|
import time
def isholiday(date):
singeday = ["01-01","01-02","01-03","01-04","01-05","01-06","01-07","01-08","01-11","01-12","01-13","01-14","01-15","01-16","01-17","06-18","07-08","07-09","07-10","07-11","07-12","08-01","08-02","08-03","08-04","08-05","08-06","08-07","08-08","08-09","08-10","10-03","10-04","10-05","10-27","10-28","10-29","11-11","12-12"]
# allday = ["02","03","04","05","06","09"]
# bb = now.split("-")
# if bb[0] in allday:
# return True
# elif now in singeday:
# return True
# else:
# return False
if date in singeday:
return 1
else:
return 0 |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from __future__ import print_function
"""
Created on Fri Feb 17 12:38:44 2017
@author: ahefny, zmarinho
"""
from collections import defaultdict
import numpy as np
import theano
import theano.printing
import theano.tensor as T
import theano.tensor.slinalg
from theano.tensor.nlinalg import matrix_inverse
import rpsp.rpspnets.psr_lite.psr_base as psr_base
import rpsp.rpspnets.psr_lite.rnn_filter as rnn_filter
import rpsp.rpspnets.psr_lite.utils.nn as nn
from rpsp.rpspnets.psr_lite.utils.nn import cg_solve, cg_solve_batch, neumann_inv, neumann_inv_batch, \
batched_matrix_inverse
from rpsp.rpspnets.psr_lite.utils.nn import reshape_mat_f
class RFFPSR_RNN(rnn_filter.BaseRNNFilter):
'''
Theano wrapper of RFFPSR.
'''
def __init__(self, psr, optimizer='sgd', optimizer_step=1.0,
optimizer_iterations=0, val_trajs=0,
optimizer_min_step=1e-5, rng=None, opt_h0=False,
psr_norm='I', psr_cond='kbr', psr_iter=0, psr_smooth='I'):
rnn_filter.BaseRNNFilter.__init__(self, psr.state_dimension, psr.horizon_length,
optimizer, optimizer_step, optimizer_iterations,
optimizer_min_step, val_trajs, rng=rng, opt_h0=opt_h0)
self._psr_iter = psr_iter
self._psr_cond = psr_cond
self._state_norm = psr_norm
smooth_toks = psr_smooth.split('_')
self._state_smooth = smooth_toks[0]
if len(smooth_toks)>1:
self._state_smooth_coeff = float(smooth_toks[1])
self._f_obs = None
self._f_act = None
self._f_fut_act = None
self._reset_psr(psr)
self._obs_dim = 0
solve_dict = defaultdict(lambda: self._tf_solve_inverse, {'kbrcg': self._tf_solve_cg, 'kbrMIA': self._tf_solve_mia, 'I': self._tf_solve_ignore})
solve_dict_batch = defaultdict(lambda: self._tf_solve_inverse_batch, {'kbrcg': self._tf_solve_cg_batch, 'kbrMIA': self._tf_solve_mia_batch, 'I': self._tf_solve_ignore})
self._solve = solve_dict[self._psr_cond]
self._solve_batch = solve_dict_batch[self._psr_cond]
self._norm_method = defaultdict(lambda: self._t_state_noop , {'l2': self._t_state_l2norm,
'l2clamp': self._t_clamp_state_l2norm,
'coord':self._t_clamp_state_coord})[self._state_norm]
self._smooth = defaultdict(lambda: self._t_state_noop, {'interp': self._t_state_interpolate})[self._state_smooth]
self._max_state_norm2 = 100.0
self._max_state_norm = 10.0
self._max_state_coord = 10.0
self._min_state_coord = 1e-6
def _t_rff(self, x, V):
y = T.dot(x, V)
return T.concatenate([T.sin(y), T.cos(y)], axis=y.ndim-1) / T.sqrt(V.shape[1].astype(theano.config.floatX))
def _t_rffpca(self, fext, name):
'''
Given an RFFPCA feature extractor return:
- A handle to an equivalent symbolic function.for vectors
- A shared variable storing projection matrix.
- A shared variable storing RFF matrix.
'''
U = theano.shared(name='U_%s' % name, value=fext._U.astype(theano.config.floatX))
V = theano.shared(name='V_%s' % name, value=fext._base_extractor._V.astype(theano.config.floatX))
f = lambda x: T.dot(self._t_rff(x, V), U)
return f, U, V
def set_psr(self, rff_psr):
self._rffpsr = rff_psr
self._fut = self._rffpsr._fut
self._feat_dim = self._rffpsr._feat_dim
self._state_dim = self._rffpsr.state_dimension
self._fext_fut_act = self._rffpsr._fext_fut_act
self._fext_act = self._rffpsr._fext_act
self._fext_obs = self._rffpsr._fext_obs
self._feat_dim = self._rffpsr._feat_dim
return
#overrides
def _load(self, params):
print('load rffpsr rnn')
self._rffpsr._load(params['rffpsr'])
self._reset_psr(self._rffpsr)
return
#overrides
def _save(self):
params={}
params['rffpsr'] = self._rffpsr._save()
return params
def _reset_psr(self, psr):
self.set_psr(psr)
self._f_obs = lambda x: x
self._f_act = lambda x: x
self._f_fut_act = lambda x: x
return
def train(self, traj_obs, traj_act, traj_act_probs=None, on_unused_input='raise'):
self._reset_psr(self._rffpsr)
return rnn_filter.BaseRNNFilter.train(self, traj_obs, traj_act, traj_act_probs=traj_act_probs, on_unused_input=on_unused_input)
def _process_traj(self, traj_obs, traj_act):
if traj_obs.shape[0] <= self._fut + 3:
return None
else:
data = psr_base.extract_timewins([traj_obs], [traj_act], self._fut, 1)[0]
return self._process_obs(data.obs), \
self._process_act(data.act), \
self._process_fut_act(data.fut_act), \
data.fut_obs
def _process_obs(self, obs):
ofeat = self._fext_obs.process(obs)
assert not np.isnan(ofeat).any(), 'obsfeat is not nan'
assert not np.isinf(ofeat).any(), 'obsfeat is not inf'
return ofeat
def _process_act(self, act):
afeat = self._fext_act.process(act)
assert not np.isnan(afeat).any(), 'actfeat is not nan'
assert not np.isinf(afeat).any(), 'actfeat is not inf'
return afeat
def _process_fut_act(self, fut_act):
futafeat = self._fext_fut_act.process(fut_act)
assert not np.isnan(futafeat).any(), 'futafeat is not nan'
assert not np.isinf(futafeat).any(), 'futafeat is not inf'
return futafeat
def _init_params(self, traj_obs, traj_act):
psr = self._rffpsr
self._lambda = psr._lambda
self._feat_dim = psr._feat_dim
self._t_W_s2ex = theano.shared(name='W_s2ex', value=psr._W_s2ex.astype(theano.config.floatX))
self._t_W_s2oo = theano.shared(name='W_s2oo', value=psr._W_s2oo.astype(theano.config.floatX))
self._t_W_h = theano.shared(name='W_h', value=psr._W_h.astype(theano.config.floatX))
self._t_W_1s = theano.shared(name='W_1s', value=psr._W_1s.astype(theano.config.floatX))
K = self._feat_dim
self._t_UU_efa = theano.shared(name='UU_efa', value=psr._U_efa.T.reshape((-1, K.act), order='F').astype(theano.config.floatX))
self._t_UU_efo = theano.shared(name='UU_efo', value=psr._U_efo.reshape((K.obs,-1), order='F').astype(theano.config.floatX))
self._t_U_oo = theano.shared(name='U_oo', value=psr._U_oo.astype(theano.config.floatX))
self._t_UT_st = theano.shared(name='U_st', value=psr._U_st.T.astype(theano.config.floatX))
s0 = psr.initial_state
self._t_state0 = theano.shared(name='state0',value=s0.astype(theano.config.floatX))
self._params_state = [self._t_W_s2ex,self._t_W_s2oo]
self._params_obs = [self._t_W_1s]
self._params_guide = [self._t_W_h]
t_prestates_mat = T.matrix()
t_fa_mat = T.matrix()
self._pred_horizon = theano.function(inputs=[t_prestates_mat,t_fa_mat],
outputs=self.tf_predict_guide(t_prestates_mat,t_fa_mat))
return
def get_projs(self):
projs = self._rffpsr.get_projs()
return projs
def predict_horizon(self, state, fut_act):
fafeat = self._process_fut_act(fut_act.reshape(-1)).reshape(1,-1)
o = self._pred_horizon(state.reshape((1,-1)), fafeat)
assert not np.isnan(o).any(), 'predict horizon is not nan'
assert not np.isinf(o).any(), 'predict horizon is not inf'
return o.reshape((self._fut, -1))
def get_params(self):
return np.hstack([p.get_value().ravel() for p in self.params])
def set_params(self, param_vec, check_before_update=False):
i = 0
if np.isnan(param_vec).any() or np.isinf(param_vec).any():
print ('param is nan rffpsr policy! not updated')
return
if check_before_update:
params_before = np.copy(self.get_params())
for p in self.params:
x = p.get_value(borrow=True)
s = x.shape
n = np.size(x)
p.set_value(param_vec[i:i+n].reshape(s))
i += n
return
def _tf_solve_inverse(self, A, b, reg):
''' solve via pseudo inverse Ax=b return x= inv(A).b'''
A2 = T.dot(A.T, A)
A2reg = A2 + T.eye(A.shape[1]) * reg
vv = T.dot(b, A)
v = T.dot(vv, matrix_inverse(A2reg))
return v
def _tf_solve_ignore(self, A, b, reg):
return b
def _tf_solve_cg(self, A, b, reg):
A2 = T.dot(A.T, A)
vv = T.dot(b, A)
v = cg_solve(A2, vv, iter=self._psr_iter, reg=reg)
return v
def _tf_solve_mia(self, A, b, reg):
A2 = T.dot(A.T, A)
vv = T.dot(b, A)
B = neumann_inv(A2, it=self._psr_iter, reg=reg)
return T.dot(B, vv)
def _tf_solve_batch_invalid(self, AA, B, reg):
raise NotImplementedError
def _tf_solve_inverse_batch(self, AA, B, reg):
''' solve via pseudo inverse Ax=b return x= inv(A).b'''
N,d = B.shape
AA2 = T.batched_dot(AA.transpose(0,2,1), AA)
R = T.repeat(T.reshape(T.eye(d) * reg, (1,d,d)), N, axis=0)
AA2reg = AA2 + R
VV = T.batched_dot(B, AA)
AAi = batched_matrix_inverse(AA2reg)
V = T.batched_dot(VV, AAi)
return V
def _tf_solve_cg_batch(self, AA, B, reg):
A2 = T.batched_dot(AA.transpose(0,2,1), AA)
VV = T.batched_dot(B, AA)
V = cg_solve_batch(A2, VV, iter=self._psr_iter, reg=reg)
return V
def _tf_solve_mia_batch(self, AA, B, reg):
A2 = T.batched_dot(AA.transpose(0,2,1), AA)
V = T.batched_dot(B, AA)
B = neumann_inv_batch(A2, iter=self._psr_iter, reg=reg)
return T.batched_dot(B, V)
def tf_update_state(self, t_state, t_obs, t_act):
t_ofeat = self._f_obs(t_obs)
t_afeat = self._f_act(t_act)
K = self._feat_dim
# Obtain extended state
UU_efa = self._t_UU_efa
dot1 = T.dot(t_state, self._t_W_s2ex)
dot1.name='tf_update_state::dot1'
C_ex = T.reshape(dot1,(K.exfut_obs, K.exfut_act))
C_ex.name='tf_update_state::C_ex'
# Condition on action
B = reshape_mat_f(T.dot(UU_efa, t_afeat), (K.exfut_act, K.fut_act))
B.name='tf_update_state::B'
C_efo_fa = T.dot(C_ex, B)
C_efo_fa.name='tf_update_state::C_efo_fa'
# Obtain v = C_oo \ o_feat
C_oo_prj = T.dot(T.reshape(T.dot(t_state,self._t_W_s2oo), (K.oo, K.act)), t_afeat)
C_oo_prj.name = 'tf_update_state::Cooprj'
C_oo = reshape_mat_f(T.dot(self._t_U_oo, C_oo_prj), (K.obs, K.obs))
C_oo.name='tf_update_state::C_oo'
v = self._solve(C_oo,t_ofeat, self._lambda['filter'])
v.name = 'tf_update_state::v'
# Multply by v to condition on observation
UU = self._t_UU_efo
A = reshape_mat_f(T.dot(v, UU), (K.fut_obs, K.exfut_obs))
A.name = 'tf_update_state::A'
ss = T.reshape(T.dot(A, C_efo_fa), [-1])
ss.name = 'tf_update_state::ss_Cefodot'
ss = T.dot(self._t_UT_st, ss)
ss.name = 'tf_update_state::Uss_dot'
ss = self._norm_method(ss)
ss = self._smooth(ss, t_state)
self._dbg = lambda : None
self._dbg.out = C_ex, C_oo, B, A, ss
# Adding the sum of parameters fixes a Theano bug.
return ss + sum(T.sum(p)*1e-30 for p in self.params)
def _t_state_noop(self, state, *args):
return state
def _t_state_l2norm(self, state):
ss_norm2 = T.sum(state**2)
state = T.switch(T.lt(ss_norm2 ,self._max_state_norm2),
state*(self._max_state_norm / T.sqrt(ss_norm2)),
state / T.sqrt(ss_norm2))
return state
def _t_clamp_state_l2norm(self, state):
ss_norm2 = T.sum(state**2)
state = T.switch(T.lt(ss_norm2 ,self._max_state_norm2),
state*(self._max_state_norm / T.sqrt(ss_norm2)),
state)
return state
def _t_clamp_state_coord(self, state):
return T.minimum(self._max_state_coord, T.maximum(self._min_state_coord, state))
def _t_state_interpolate(self, state, prev_state):
''' convex interpolation with previous state to ensure smoothness'''
interp = self._state_smooth_coeff
#TODO: implement search direction and normalize
state = (1.0-interp)*state + interp* prev_state
return state
def tf_update_state_batch(self, t_state_mat, t_obs_mat, t_act_mat):
t_ofeat_mat = self._f_obs(t_obs_mat)
t_afeat_mat = self._f_act(t_act_mat)
K = self._feat_dim
N = t_state_mat.shape[0]
# Obtain extended state
UU_efa = self._t_UU_efa
C_ex = T.reshape(T.dot(t_state_mat, self._t_W_s2ex),(N, K.exfut_obs, K.exfut_act))
C_ex.name='tf_update_state::C_ex'
# Condition on action
B = T.reshape(T.dot(t_afeat_mat, UU_efa.T), (N, K.fut_act, K.exfut_act)).transpose(0,2,1)
B.name = 'tf_update_state::B'
#import pdb; pdb.set_trace()
C_efo_fa = T.batched_dot(C_ex, B)
C_efo_fa.name='tf_update_state::C_efo_fa'
# Obtain v = C_oo\o_feat
C_oo_prj = T.batched_dot(T.reshape(T.dot(t_state_mat,self._t_W_s2oo), (N, K.oo, K.act)), t_afeat_mat)
C_oo_prj.name = 'tf_update_state::Cooprj'
C_oo = T.reshape(T.dot(C_oo_prj, self._t_U_oo.T), (N, K.obs, K.obs))
C_oo.name='tf_update_state::C_oo'
v = self._solve_batch(C_oo, t_ofeat_mat, self._lambda['filter'])
v.name = 'tf_update_state::v'
# Multply by v to condition on observation
UU = self._t_UU_efo
vproj = T.dot(v, UU)
vproj.name ='tf_update_state::vproj'
A = T.reshape(vproj,(N, K.exfut_obs, K.fut_obs)).transpose(0,2,1)
A.name = 'tf_update_state::A'
ss = T.batched_dot(A, C_efo_fa).reshape([N,-1])
ss.name = 'tf_update_state::ss_Cefodot'
ss = T.dot(ss, self._t_UT_st.T)
ss.name = 'tf_update_state::Uss_dot'
ss = self._norm_method(ss)
ss = self._smooth(ss, t_state_mat)
self._dbg_batch = lambda : None
self._dbg_batch.out = C_ex, C_oo, B, A, ss
# Adding the sum of parameters fixes a Theano bug.
return ss + sum(T.sum(p)*1e-30 for p in self.params)
def tf_predict_obs(self, t_state, t_act):
is_vec = False
if t_state.ndim == 1:
is_vec = True
t_state = t_state.reshape((1,-1))
t_act = t_act.reshape((1,-1))
t_obs = self._tf_predict_obs(t_state, t_act)
if is_vec:
t_obs = t_obs.reshape((1,-1))
return t_obs
def _tf_predict_obs(self, t_prestates_mat, t_act_mat):
t_afeat_mat = self._f_act(t_act_mat)
t_in = nn.row_kr_product(t_prestates_mat, t_afeat_mat,
name='_tf_predict_obs::t_in')
t_out = T.dot(t_in, self._t_W_1s)
t_out.name = '_tf_predict_obs::t_out'
return t_out
def tf_predict_guide(self, t_prestates_mat, t_fa_mat):
t_fafeat_mat = self._f_fut_act(t_fa_mat)
t_in = nn.row_kr_product(t_prestates_mat, t_fafeat_mat)
t_out = T.dot(t_in, self._t_W_h)
return t_out
class Extended_RFFPSR_RNN(RFFPSR_RNN):
def __init__(self, *args, **kwargs):
obs_dim = kwargs.pop('x_dim')
win = kwargs.pop('win')
super(Extended_RFFPSR_RNN, self).__init__(*args, **kwargs)
self._obs_dim = obs_dim
self._win = win
self._win_dim = self._obs_dim * self._win
def _process_obs(self, obs):
if obs.ndim == 1:
obs = obs.reshape(1, -1)
last_obs = obs[:, -self._obs_dim:]
ofeat = self._fext_obs.process(last_obs)
assert not np.isnan(ofeat).any(), 'obsfeat is not nan'
assert not np.isinf(ofeat).any(), 'obsfeat is not inf'
new_obs = np.concatenate([ofeat.T, obs.T], axis=0).T
return new_obs
def tf_extract_obs(self, obs):
if obs.ndim == 2:
last_obs = obs[:, -self._obs_dim:]
else:
last_obs = obs[-self._obs_dim:]
return last_obs
def _process_traj(self, traj_obs, traj_act):
if traj_obs.shape[0] <= self._fut + 3:
return None
else:
data = psr_base.extract_timewins([traj_obs], [traj_act], self._fut, 1)[0]
return self._fext_obs.process(data.obs), \
self._process_act(data.act), \
self._process_fut_act(data.fut_act), \
data.fut_obs
@property
def state_dimension(self):
return self._state_dim + self._win_dim
@property
def extended_dimension(self):
return self._win_dim
@property
def initial_state(self):
# return np.concatenate([self._t_state0.get_value(), np.zeros(self._win_dim)])
return self.t_initial_state.eval()
@property
def t_initial_state(self):
# return theano.shared(name='initstate0',value=self.initial_state.astype(theano.config.floatX))
return T.concatenate([self._t_state0, T.zeros(self._win_dim)], axis=0)
def tf_update_state(self, t_state, t_ofeat, t_afeat):
t_obswin = t_ofeat[-self._win_dim:]
t_state = super(Extended_RFFPSR_RNN, self).tf_update_state(t_state[:-self._win_dim], t_ofeat[:-self._win_dim],
t_afeat)
es = T.concatenate([t_state, t_obswin], axis=0)
return es + sum(T.sum(p) * 1e-30 for p in (self.params + self._params_proj))
def tf_update_state_batch(self, t_state_mat, t_ofeat_mat, t_afeat_mat):
t_obswin_mat = t_ofeat_mat[:, -self._win_dim:]
t_state_mat = super(Extended_RFFPSR_RNN, self).tf_update_state_batch(t_state_mat[:, :-self._win_dim],
t_ofeat_mat[:, :-self._win_dim],
t_afeat_mat)
es = T.concatenate([t_state_mat, t_obswin_mat], axis=1)
return es
def tf_compute_post_states(self, t_ofeat_mat, t_afeat_mat):
# Use scan function
state_0 = self.t_initial_state
hs, _ = theano.scan(fn=lambda fo, fa, h: self.tf_update_state(h, fo, fa),
outputs_info=state_0,
sequences=[t_ofeat_mat, t_afeat_mat])
return hs
def tf_compute_pre_states(self, t_ofeat_mat, t_afeat_mat):
state_0 = self.t_initial_state # initial_state
hs = self.tf_compute_post_states(t_ofeat_mat[:-1], t_afeat_mat[:-1])
return T.concatenate([T.reshape(state_0, (1, -1)), hs], axis=0)
def _tf_predict_obs(self, t_extprestates_mat, t_act_mat):
t_prestates_mat = t_extprestates_mat[:, :-self._win_dim]
return super(Extended_RFFPSR_RNN, self)._tf_predict_obs(t_prestates_mat, t_act_mat)
def tf_predict_guide(self, t_extprestates_mat, t_fa_mat):
t_prestates_mat = t_extprestates_mat[:, :-self._win_dim]
return super(Extended_RFFPSR_RNN, self).tf_predict_guide(t_prestates_mat, t_fa_mat)
|
from __future__ import print_function, absolute_import, division
import KratosMultiphysics
import KratosMultiphysics.EmpireApplication
import KratosMultiphysics.KratosUnittest as KratosUnittest
import KratosMultiphysics.kratos_utilities as kratos_utils
from compare_two_files_check_process import CompareTwoFilesCheckProcess
import os
import co_simulation_test_case
try:
import scipy
import sympy
scipy_and_sympy_available = True
except ImportError:
scipy_and_sympy_available = False
try:
import numpy
numpy_available = True
except ImportError:
numpy_available = False
def compareResults(reference_file, results_file):
settings_check_process = KratosMultiphysics.Parameters("""
{
"reference_file_name" : "",
"output_file_name" : "",
"comparison_type" : "dat_file",
"remove_output_file" : true,
"tolerance" : 1e-6
}
""")
settings_check_process["reference_file_name"].SetString(reference_file)
settings_check_process["output_file_name"].SetString(results_file)
# creating a dummy model
check_process = CompareTwoFilesCheckProcess(settings_check_process)
check_process.ExecuteInitialize()
check_process.ExecuteBeforeSolutionLoop()
check_process.ExecuteInitializeSolutionStep()
check_process.ExecuteFinalizeSolutionStep()
check_process.ExecuteBeforeOutputStep()
check_process.ExecuteAfterOutputStep()
check_process.ExecuteFinalize()
class TestKratosSolver(co_simulation_test_case.CoSimulationTestCase):
def test_KratosStructuralMechanicsSolver(self):
with co_simulation_test_case.ControlledExecutionScope(os.path.dirname(os.path.realpath(__file__))):
# self.createTest('test_structural_mesh_motion_2d/rectangle_2D3N_test')
# self.runTest()
kratos_utils.DeleteFileIfExisting("./test_mdpa_files/rectangle_2D3N_test.time")
def test_KratosFluidDynamicsSolver(self):
with co_simulation_test_case.ControlledExecutionScope(os.path.dirname(os.path.realpath(__file__))):
# self.createTest('test_structural_mesh_motion_2d/rectangle_2D3N_test')
# self.runTest()
kratos_utils.DeleteFileIfExisting("./test_mdpa_files/rectangle_2D3N_test.time")
class TestSDoFSolver(co_simulation_test_case.CoSimulationTestCase):
def test_SDoFSolver(self):
if not numpy_available:
self.skipTest("Numpy not available")
with co_simulation_test_case.ControlledExecutionScope(os.path.dirname(os.path.realpath(__file__))):
folder_name = "sdof_solver"
self.createTest("sdof_solver", "cosim_sdof")
self.runTest()
reference_file = os.path.join(folder_name,"results_sdof_ref.dat")
result_file = os.path.join(folder_name,"results_sdof.dat")
compareResults(reference_file, result_file)
class TestSDoFStaticSolver(co_simulation_test_case.CoSimulationTestCase):
def test_SDoFStaticSolver(self):
if not numpy_available:
self.skipTest("Numpy not available")
with co_simulation_test_case.ControlledExecutionScope(os.path.dirname(os.path.realpath(__file__))):
folder_name = "sdof_static_solver"
self.createTest("sdof_static_solver", "cosim_static_sdof")
self.runTestSteady()
reference_file = os.path.join(folder_name,"results_sdof_static_ref.dat")
result_file = os.path.join(folder_name,"results_sdof_static.dat")
compareResults(reference_file, result_file)
class TestMDoFSolver(co_simulation_test_case.CoSimulationTestCase):
def test_MDoFSDoFModel(self):
if not numpy_available:
self.skipTest("Numpy not available")
with co_simulation_test_case.ControlledExecutionScope(os.path.dirname(os.path.realpath(__file__))):
folder_name = "mdof_solver"
self.createTest(folder_name, "cosim_mdof_sdof")
self.runTest()
reference_file = os.path.join(folder_name,"results_mdof_sdof_ref.dat")
result_file = os.path.join(folder_name,"results_mdof_sdof.dat")
compareResults(reference_file, result_file)
def test_MDoFGenericModel(self):
if not numpy_available:
self.skipTest("Numpy not available")
with co_simulation_test_case.ControlledExecutionScope(os.path.dirname(os.path.realpath(__file__))):
folder_name = "mdof_solver"
self.createTest(folder_name, "cosim_mdof_generic")
self.runTest()
reference_file = os.path.join(folder_name,"results_mdof_generic_ref.dat")
result_file = os.path.join(folder_name,"results_mdof_generic.dat")
compareResults(reference_file, result_file)
def test_MDoFCantileverShear2DModel(self):
if not numpy_available:
self.skipTest("Numpy not available")
if not scipy_and_sympy_available:
self.skipTest("Scipy/Sympy not available")
with co_simulation_test_case.ControlledExecutionScope(os.path.dirname(os.path.realpath(__file__))):
folder_name = "mdof_solver"
self.createTest(folder_name, "cosim_mdof_cantilever_shear_2d")
self.runTest()
reference_file = os.path.join(folder_name,"results_mdof_cantilever_shear_2d_ref.dat")
result_file = os.path.join(folder_name,"results_mdof_cantilever_shear_2d.dat")
compareResults(reference_file, result_file)
def test_MDoFBridge2DoFModel(self):
if not numpy_available:
self.skipTest("Numpy not available")
if not scipy_and_sympy_available:
self.skipTest("Scipy/Sympy not available")
with co_simulation_test_case.ControlledExecutionScope(os.path.dirname(os.path.realpath(__file__))):
folder_name = "mdof_solver"
self.createTest(folder_name, "cosim_mdof_bridge_2dof")
self.runTest()
reference_file = os.path.join(folder_name,"results_mdof_bridge_2dof_ref.dat")
result_file = os.path.join(folder_name,"results_mdof_bridge_2dof.dat")
compareResults(reference_file, result_file)
class TestEmpireSolver(co_simulation_test_case.CoSimulationTestCase):
def test_EmpireSolverWrapper(self):
if "EMPIRE_API_LIBSO_ON_MACHINE" not in os.environ:
self.skipTest("EMPIRE is not available")
with co_simulation_test_case.ControlledExecutionScope(os.path.dirname(os.path.realpath(__file__))):
# self.createTest('test_structural_mesh_motion_2d/rectangle_2D3N_test')
# self.runTest()
kratos_utils.DeleteFileIfExisting("./test_mdpa_files/rectangle_2D3N_test.time")
if __name__ == '__main__':
KratosUnittest.main()
|
# -*- coding: utf-8 -*-
#$HeadURL: https://rst2pdf.googlecode.com/svn/trunk/rst2pdf/tests/test_rst2pdf.py $
#$LastChangedDate: 2008-08-29 16:09:08 +0200 (Fri, 29 Aug 2008) $
#$LastChangedRevision: 160 $
from unittest import TestCase
from os.path import join, abspath, dirname, basename
PREFIX = abspath(dirname(__file__))
from rst2pdf.createpdf import RstToPdf
def input_file_path(file):
return join(PREFIX, 'input', file)
class rst2pdfTests(TestCase):
def setUp(self):
self.converter=RstToPdf()
|
import io
import os
import sys
from abc import ABC, abstractmethod
from typing import Optional
from demisto_sdk.commands.common.constants import (DIR_TO_PREFIX,
INTEGRATIONS_DIR,
SCRIPTS_DIR)
from demisto_sdk.commands.common.errors import Errors
from demisto_sdk.commands.common.handlers import YAML_Handler
from demisto_sdk.commands.common.tools import get_yml_paths_in_dir, print_error
UNSUPPORTED_INPUT_ERR_MSG = '''Unsupported input. Please provide either:
1. a directory of an integration or a script.
2. a path of a GenericModule file.'''
class YAMLUnifier(ABC):
"""Interface to YAML objects that need to be unified
Attributes:
package_path (str): The directory path to the files to unify.
dest_path (str, optional): The output dir to write the unified YAML to.
use_force(bool): Forcefully overwrites the preexisting yml if one exists.
yaml(YAML_Handler): Wrapper object to handle YAML files.
yml_path(str): The YAML file path.
yml_data(dict): The YAML doucment Python object.
"""
def __init__(
self,
input: str,
output: Optional[str] = None,
force: bool = False,
):
directory_name = ''
# Changing relative path to current abspath fixed problem with default output file name.
input = os.path.abspath(input)
if not os.path.isdir(input):
print_error(UNSUPPORTED_INPUT_ERR_MSG)
sys.exit(1)
for optional_dir_name in DIR_TO_PREFIX:
if optional_dir_name in input:
directory_name = optional_dir_name
if not directory_name:
print_error(UNSUPPORTED_INPUT_ERR_MSG)
self.package_path = input
self.package_path = self.package_path.rstrip(os.sep)
self.use_force = force
self.dest_path = output
yml_paths, self.yml_path = get_yml_paths_in_dir(self.package_path, Errors.no_yml_file(self.package_path))
for path in yml_paths:
# The plugin creates a unified YML file for the package.
# In case this script runs locally and there is a unified YML file in the package we need to ignore it.
# Also,
# we don't take the unified file by default because
# there might be packages that were not created by the plugin.
if 'unified' not in path and os.path.basename(os.path.dirname(path)) not in [SCRIPTS_DIR, INTEGRATIONS_DIR]:
self.yml_path = path
break
self.yaml = YAML_Handler(width=50000) # make sure long lines will not break (relevant for code section)
if self.yml_path:
with io.open(self.yml_path, 'r', encoding='utf8') as yml_file:
self.yml_data = self.yaml.load(yml_file)
else:
self.yml_data = {}
print_error(f'No yml found in path: {self.package_path}')
@abstractmethod
def unify(self):
"""Merges the various components to create an output yml file."""
...
|
# Generated by Django 2.2.10 on 2020-03-08 18:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('team_fundraising', '0020_auto_20200308_1152'),
]
operations = [
migrations.AlterField(
model_name='campaign',
name='campaign_message',
field=models.TextField(),
),
]
|
# Imports
from flask import Blueprint
# SetUp
api = Blueprint('api', __name__)
# Routes
@api.route('/')
def index():
return 'urls index route'
|
import base64
import csv
import os
from zipfile import ZipFile
import openpyxl
from main import app
from core import action_hub, get_output_file_name, get_temp_file_name, get_temp_dir, send_email
from api_types import ActionDefinition, ActionList, ActionFormField, ActionRequest
slug = 'tabbed_spreadsheet'
icon_data_uri = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAABAAAAAQACAYAAAB/HSuDAAAAIGNIUk0AAHolAACAgwAA+f8AAIDpAAB1MAAA6mAAADqYAAAXb5JfxUYAAAAGYktHRAD/AP8A/6C9p5MAAAAJcEhZcwAACxMAAAsTAQCanBgAAAAJdnBBZwAABAAAAAQAAOaUMcYAAIAASURBVHja7P1ZbFxZvu/5/dbeMXOeSZEURYqTRFHUPA85Z1ZWVt06p+rc2+cauPZDt+GHfjkNXMAN2C/9ZLttwA3YaKPtBtxG4957zumqU1lDDpVKpVJDap4lUgMpiRqokaJEinPE3n7YQYlScZQ4RezvB4ikRDEl8r93kLF+a63/kgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIC3ZygBAACAVPPvP530z278t99SIABAygtQAgAA4FdvDPptSTFJxZLKJFVLGpT0laQBqgUASHUEAAAAIO1MNZs/jpH3WqhIUpWkGkkNkhqTv66W1C7piAgAAABpgAAAAACktCkG+0aSlXzYkjLlze6Xyhvgr5Q32K+UlCcpI/kISwqO+3tcqgwASAcEAAAAIGVMM7MfHveI6dWsflXyUStphbxBflBSKPmYqidSXAQAAIA0QQAAAACWpCkG+7akqLwZ/SxJuZKWyxvgV8ub2V8mb7Y/K/nxY6sBaIAMAPAtAgAAALDophjshyTly1uinydvv/5KSeXJR6W8wX+peF0DAMCU+EEJAAAW1BSD/Yi87vtjg/tieYP76uSvCyQVJh8WlQQAYHYIAAAAwJyYZn++Gfc2IG/ZfrGkCnn78lfIm9kvkzfQz5W3fD8mlu0DADAnCAAAAMCsTTPYt+S9xrCTjwK9OlZvubyGfHXyAoCYvKZ8MXnL/QEAwDwhAAAAAFOaZrAfkNd1P5p85Msb7K+S15RvubxZ/ZLkxwXGPQAAwALihy8AAHhpmsF+TN6y/CxJOfKW75fLG+Sv1KsZ/mx5M/9jXfdZwg8AwBJAAAAAgE9NM9jPljdrP9Z8r0TeIH+5vCP2SuXN7GdTSQAAUgMBAAAAPjHJgD8qbzZ/rCFfpbw9+uMH+gXyZv2jVBEAgNRFAAAAQAqbZhZferUE38hbwr9M3uC+Qt5Af6W8QX+uvCAgT96sPsv2AQBIMwQAAACkkCkG/EbevvuApKC8zvrLJNXLG+TXytuvXy5vsB+RN6MfEYN9AAB8gQAAAIAlaorB/ljn/bFBfJG8gf0yeYP9Vcm3xck/H+u6P9aUDwAA+BABAAAAS8A0g/0cebP2ufKO2auWtELe0v2xPful8mb+xy/5BwAAeIkAAACABTbFYD9T3v77Inmz92OD+yp5M/wVyT/LljfYBwAAmDECAAAA5tEkg30jqVCvjtIrlTfIr5U3wC9Ivi0VnfcBAMAcIQAAAOAtTNN93+jVfvuAvMF+lbyl+zV61YG/RN5sfqa8Y/YiVBYAAMwXAgAAAKYxzWB/rOt+QN4xe2XJR7W8Gf16eYP+HL1q2heS17EfAABgwRAAAAAwzjSD/Yi84/ViyUe5vAH+cr3aq18jb1bf1qtj+WjIBwAAFh0BAADAt6YY7NvyBvg5kvLkdd4vlzejP9aBv0zePv0cecv9xzDYBwAASxIBAADAF6Y5Zm+s636hvH35Y/v0SyUtSz5KxM9NAACQwnghAwBIO1MM9qPyZu5XyDtir0zezH61vAAgL/nIFXv0AQBAmiEAAACkjBl23rfk/XzL1avu+yvlzeqvlDfoz5XXdT9TUlivL+EHAABISwQAAIAlaZrBviWvk35IXgf+PL06Yq9arwb9ZfIa90XkDfSDVBYAAPgVAQAAYEmYYsAflDeAz5Q3a18gbwl/vaS65K+Lk4+IvHDAFrP6AAAAryEAAAAsuCkG+xnylufnypvVL5c3m18qb2a/Wl5zvhx5g3w67gMAAMwQAQAAYN5Ms4w/V97Aviz5KNar7vslejWrn00lAQAA3h0BAADgrUwzuB9j5HXez5d31F6lvEF+jaTl8gb4BcnHWEM+AAAAzAMCAADAtKYZ7I/tt7fl7cGvkDe4r5S3fL9W3tL9bHlL/MfeskcfAABgAREAAABeM8Vgf+x4vbHu+5ny9ujXymvGVytv0D92zN74Lv0M9gEAABYZAQAA+NgUg31b3ix9hqSYvOX7yyUtk9d1v0ZeF/4yecv2x1YBGNGYDwAAYEkiAAAAn5hmsF8w7pGvV4P8cnmD/HJ5g/8QlQQAAEhNBAAAkIamGOxn6VX3/WXy9utXy5vdL00+CpIfF6SSAAAA6YMAAABSyAw6748tv7fkzeRXyhvklyV/3SCv836upLzkx2RQWQAAgPRHAAAAS9Q0g/2xrvu2vO/lOZJWymvGt1KvBv7l8gb40eQjQmUBAAD8iQAAAJaAaQb7QXmN9sLyGvKNLd+vkdd5vzb563y96rofkBcOAAAAAJIIAABgwU0z2I/Im83PlrcPv0het/1qebP6lZKq5C3ft+Ut+R/rvg8AAABMigAAAObZJAP+sWP28uTtyS+St1x/hV415SuTt1c/R8zmAwAA4B0RAADAHJliZj8ob5BfKm9QX6pXM/pFyd+XSCoUnfcBAAAwTwgAAGAGplm2b8Y9Ino1wB9rxrdM3sx+obyl/WPL+/keDAAAgAXDi08AeMMUg/03O+/nyZu5XyZvn369vEF/mbxB/lj3/WDy/wUAAAAWDQEAAN+awTF7Eb06Pi9bXqf96uRjRfJRKa87f0Cvuu8DAAAASw4vVAH4xhQD/pC84/Wy5c3qF8lrwlcjb0a/Jvm+guTHjXXdp/M+AAAAUgYBAIC0NMVgP1PeQD5f3p78sWP1SuV14V8uqUJe930G+AAAAEgbBAAAUt4kg30j7/i8cnkD+gp5e/Wr5A368+UFAYXyGvIBAAAAaY0AAMCSNYvO+zF5g/lieTP41fIa8q2QN8DPST4y5S33BwAAAHyHAADAkjDNYD+gV032QvJm9avlzeZXSaqVt1c/W68a943t1QcAAAAgAgAAi2CKwb6RN8Af67yfJe9IvTp5g/xaecv3S+U16wvq1bF8DPYBAACAKRAAAJhXUwz2LXnL8rPlDfTz5c3mV8gb5K+Q132/Qt6s/viu+zTnAwAAAGaJAADAnJlisG/L24tfKqlE3n79Snkz+mXy9u6XJP88TCUBAACAuUcAAOCtTTDgt+TN5ufJ67hfqVdN+ZZLKko+cuU15AtSRQAAAGBhEAAAeM0MOu9b497myFu2v1yvBvt18mb1s8c9YlQWAAAAWFwEAIDPTbNHPyCvKV9Y3kC+SlKDvCP2quTN8pfJm/Uf+ziO2QMAAACWIAIAwCemmdkPyZulH3uUyTtqb7m8wX6NvGX8+cmPpfM+AAAAkGIIAIA0NM1gPypvj36BvL34hfKa8a2Qt4x/mbzO+4V6NcCn6z4AAACQ4ggAgDQwyYA/oFcN+cqSj0p5S/cr5Q3yS+SFANlikA8AAACkNQIAIIVMMbMfkjeYXyZv6X65vGX7K+QFAIXJR77ovA8AAAD4EgEAsERMMbg3etV135bXaK9E0kp5g/w6vb5sP1NSRvLBcxwAAACAJAYHwKKYpvN+MPkIyZu9L5U3o18taZW8pnxl8gb64eQjKJbwAwAAAJgCAQAwz6aZ2c8c98iSt2S/Vt4e/RXJR7m8xn1jKwBsqgoAAABgtggAgDk0xWA/LG+QnytvmX6JvCP2apJvl8ub1c+Vt3Sf2XwAAAAAc4oAAHhLUwz2syQVSSpOvq2QN6tfknyUyduzn0cVAQAAACwUAgDgDVMM7Mcba8qXrVcz+FXylutXyRv050rKkTfQz6KyAAAAABYTAQB8bZrB/tiee0veEv5CeQ35quTN6I89CuUt28+Ut1c/RGUBAAAALDUEAPCNaQb7Qb3qqD92zF6dvCZ8VXo16C+QN8Af69JPQz4AAAAAKYEAAGlpms77Yw35suQt4S+RN7ivk9eUr0Le/v2C5HPE0qsl/wAAAACQkggAkPKmGeznyhvI50nKlzeTv1xeE77l8o7bK5e3hB8AAAAA0hYBAFLKFIN9W95e/Irko0TeIL86+bZIXhBQKClCJQEAAAD4DQEAloxp9uib5Nuxzvv58gb2NfJm9VfKm8kvkjfbnyMpxj0OAAAAAB4GR1hwMxjo28l705a3V79KXjO+ar0a7FfKW7afIW+gH6WyAAAAADA5AgDMu2mW7QflDd4j8vbrV0qql9Qgb3a/NPnISt6vYw9DZQEAAABg5ggAMKemGOyPdd7PlLeEv1TeYH+ZvBn9lfJm+PPlHa831nWfgT4AAAAAzAECALy1KQb7UXnN9orkHadXJG8J/wpJZfL26pcmP4aj9QAAAABgARAAYEYmGewH5c3mF8gb2C+XN8hfLm+QXyxvoJ8jb68+s/kAAAAAsEgIAHxuBg35xh5Bvb5sf7leX7afo1fd94NUFgAAAACWFgIAH5lisD+2336swV5E3ux9rbxBfr1eDfyLkn8+1riPewgAAAAAUgCDtzQ1zWA/knyE5c3YV8pbwl8tqU5eB/4KeQ37AvJm9G2xhB8AAAAAUhYBQBqYZhl/TvKRK2+//nJ5g/wKefv0K5Pvy0h+/Phl/wAAAACANEEAkGKmOWYvW15n/bHme1XyZvXHBvvF8vbpx6gkAAAAAPgLAcASNsVgP1PefvzS5KNc3qz+Mnkd+YuSj3yqCAAAAACQCAAWxQw771vJR4a8Jfq1kmrkzeZXJN+XIykr+TFZVBYAAAAAMBkCgHk2zWDf1uud9wvlzeivkDfgr5U3s18kb9n+WOM+jtkDAAAAAMwKAcAcmmawH5Q3Ux9LPgrkHbFXK282f7m8/frFetV1PyBvFQAAAAAAAO+EAOAdTDHgj8pbkp8vr+ne+Fn9anl79ouSfxbSq477dN4HAAAAAMwLAoAZmGKgb+QN8gvlDeiL9Wo2v1TeXv1lkspE530AAAAAwCIiAJjAJAP+gLyB/nJ5x+tVyhvkVyffjs3258nbqw8AAAAAwJLhqwBgmpn88Z33s+TN6JfoVSO+GnmD/jxJ2cmPifithgAAAACA1JSWg9cZHLMXlLf3PiRvv/4KeTP54x9V8pr2heUN9Mfv1QcAAAAAIKWkXQDwxuDfljeAz5A3Y58rb1/++CP2yuTN9mfLm/23k28Z7AMAAAAA0kY6rgAw8vbkb5K3bL9E3tL9Gnkz/QViNh8AAAAA4DPpGADYkt6X9H+TFwQAAAAAAOB76drALltex34AAIB3Nk1/IQBAGrnx336btl9bugYAjiSXWxcAALyjsVOCbEoBAGnPleTW/PtP3XQNATjCDgAAYHIFkn4l6ZnoHwQA6cyW9FzSGUn3JSXS8YskAAAAAJhcrby+QqwsBID0Zku6Kun/KOk7SYPp+EUSAAAAAEzOkhSlDADgC1FJwXT+Ai2uMQAAAAAAcpKPtEUAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+ECAEgAAAEzMGKOAZcvIyKUcAPAaI8mVq4STkOPyXTIVEAAAAABMoqGsWv/1z/9z5WXkSLy4BYDXGaOBoUH9X77+H3XyxiXZtk1NljgCAAAAgElkBCNaVVajgsxcigEAExgYGVJ2NFOOXDH8X/roAQAAADAFl5l/AJhU3Eko4TgUIkUQAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMBSgAAADCxuJPQ88E+2bbt70K4royxlBmJKWClVi2GRoc1MDIkYww3NOb4eeG9iYUjCgdC1AMpgQAAAABgEtce3NL/7n/6bxSw/P2SaSg+rJKcQv0ffvG/1ZryupT5vONOQn8494P+H/v+ozJCUW5ozKnRRFzGSP/wyf9an7fspiBICQQAAAAAkxgcHdaNx3d9X4eE4yhoBzQwMpRSn7fruuobGtCdp/dlGXa+Yu7vr3AwpGeDvRQDKYPvhAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMBSgAAWIqMjGTG//6NX5nX32ckuXKVcByKBwAAMAECAADAlNxx/x17M/HHTPw7jzdAN8Z7K2NkGSPbsmVblmzLVsC2ZZvkW8tW0LIVCUUUCYQUCgS9t8GQIsGwIsGQwoGQosGwwsGQwsGQMsMZuvXkrv6Xk99pcHSIC4c5YRlLoUBwfBblS3HHUSgQlGVSb/GobdmKBMOyU/Bzx9LmuK7CwaAClk0xkDIIAAAgBb0clLuT/dkbv3In/L8ljc2ij5tLN96gxyQH6a9+bXkPa9yvjZFlWRN8nHn5sUZGwUBAITukcCDoDdgDyV+HwgrbQYUCIYUDAQXtoIKBoEJ2UMFAQLFQROFASEE7oHAgqFDA+9hQIKSQHVA4mAwHgt77vzyzX+5kKQXwFpblFevvNn+qzHDU13WIJ+LKCMdUmVeaUp+3bVnaULVK//DJv1MoEOSGxpxKOI6McbVueSPFQMogAACARea4rlzXW7b+cujqvvqdK2/+fGxAboxRwLLeeN+rQbc3g27JMuNm15NvX5txt2zZtq2gFVDADiiYnHkP2UFFgslBdjCYHKAHkwNwb8AdtIPJwbc3aA/ZQYUCAe/PrLFfe39uGeuv/s1A8nMZe2vMmzHEJMzrv3n1W1c3H9/T1xcPa2h0mJsKc6YkK1//ZvOnKszKoxjSy+dqqrCMpeaKejWX13HxwPMCEAEAACy6kuwCLcstlDGWgnbg5SA9YAdeDpDHZsAjwbCCgYA3QLe9pe8h2xvA28b7f0J2UAHbTg7qvYG99zaYfBtQMBBU0LIVDAQUSL4/kPx3AlZAAdt6OaOfCoZGR/W70/t0/vZVbijMOZMM3pCi18+7iBQCAEQAAACLKhqK6L/Y82vtqFsnMzZ7P27m3h43cx+Y4H0WL2olSW1dHfrzhYNyXBoAAgAATIYAAAAWieu6aiyr1mfNu1WeX0xB3tLQ6LD+fOFH3et5RDEAAACmQAAALHGvt2vTDLqwT9UA7dU+a68bu5ExkjGWxPFpC862bO2sW6/8zByK8Q5O3ryk7y8f02h8lGXaAAAAUyAAAN7BRJ3YJxqwT3ZE2vj3j2/oJulls7ex94/9enzXdWMs2cZK/t5KHqtmybLsl+8PWK83XAvYttdlPfCquVssFNFwfERHrp/V4xc9r7VWw/xwXEcrCsu1q26DoqEwBXlLzwf79fsz+3W35yGDfwAAgGkQAMBXXHf88Dz539fbro9rvj7ZYWKujMafXx54Oeh+rdO6ebVf27Ksl/u63zz7fGyQ7nVeD7881iw8rst6wPY6qY81aXvZxM32uq6H7LHj0YIKJ7u0j7192aV97Ii1ZIM4a1y4YBtLAyND+m/+8P/S7898r4ST4GaZZ0ZGW1c2q7ZkOcV4h+fzT9fP6HjHBcUdh34IAAAA0yAAgG8YY5QRjikaDMmyrNe7or88yizwcoAdsAPJ49FeHV326pi1gHeOeTCoSCCsSCiksB3yzjRPHpE2NtsesLyu7i//Hst6+f6xt8Fkx/bxvx77txdqVjMzEtPHTdv1w5UT6u7rYTZ1HjmOq5KcfO2u36S8WDYFeUsPe7v1p/M/quvZIwb/AAAAM0AAAF9w5SpoBfRx03Z9sW7vy4H22NFndvLty4H5+K7r9qvl9CYZAIwFAWO/ty0rLeq0paZZzRV1OnT1lBzX5caZt/vR0eplK7W2op5ivKV4Iq7D18/qWMcFuRKbVgAAAGaAAAC+YGQ0HB9V3+ALrSqtVmluEUWZQHYkQ1+07NWJGxc1ODJEQeaB67rKiWZqZ916leYUUpC39Kjvqb6+cFA9/c+Z/QcAAJih9Ji2BGbASDrVeVmHrp/lrPDJamSMdtVvUENpFcWYJ67rqjyvVFtqmtNm5chCizsJHbhyUkc7zrNVBQAAYBZ49QnfMMboaX+vvrlwSF3PHlOQSeTGsvTpml2yLZtizINwMKStNc1aWUzzv7d1/9kj/e70Pg2PjlAMAACAWSAAgM9ueKMTNy/p5I2LiifodD+RkB3UnoaNqi5cNuk5CHg7ruuqKCtPH67eplgoQkHeQtyJ6w9nf9DFu9coBgAAwCwRAMBXjDF6MdyvL8/+oJ7+Xgoyicr8Mu1p2CzHIQCYS7Zlq6WyUS3LGynGW2p/eEd/PHeAAA8AAOAtEADAfze9sXT6VquOtJ/lvPtJxMIR7W3cpIr8ErmcBjBnoqGIPlmzQ5nhKMV4C/FEXL899RfdetJFMQAAAN4CAQB8aWBkUP904hs9ZRXAhIyMVpXVaHN1kxI0TJyzmq5eVqPttetoXPeWTt68rP1txzUcZ+8/AADA2yAAgG9dvHddP7SdYIZ7EvkZOdpRu14FGTnUaA7Ylq29DZuUG82iGG/hxdCA/nzhoO48fSjL8KMLAADgbfAqCr41ODKkP184qO7+5xRjAsYYbaharfrSFRyb+I5cuaosKNX22nUKBAIUZLb1c10dv3lRh6+d1mhilIIAAAC8JQIA+JbjOLp495oOtJ2gGJNYUViunXXrlRGOcR7AO9pdv1EriyvF4v/Z6+5/rq/PH9Tdpw/ZPgEAAPAOCADgW8YYPR/o0zeXjuhJXw8FmYBtWdpdv1HleUVyWQXwVlzXVUl2gfY0bFJmOEZBZslxHZ28cVEHr55SwnVkiFAAAADeGgEAfM1xHZ2/c0WHr5+hGJNoLKvRjroNCgdCrAJ4y3tsQ1WTVi9byez1W3jS16Mvz/6gxy+eyaJ+AAAA74QAAP5+AhhLj/t6tK/1GKsAJhEJhvTR6m3KjWVJNAOcFdd1lRvL1nuNm1SUlUtBZinuJHTw6mkdunaawT8AAMAcIACA7xkZnbhxUac7W+l2P4m1lY1av3yVLItvGbPhuI7qS1do3fJGOte/hSe9PfrHE99oaHSYYgAAAMwBXpGCJ4ExetjbrW8vHtbTAU4EmEhmOKLPmncpGopQjBlyJUVDEW2qblJV4TIKMkuO6+irCwd1+d51igEAADBHCAAASbaxdPDaaZ2/fU0OqwD+imUsbV25VqvLaijGDLmOo9KcQu2u36CQHaQgs9TZfV+/P7tfw3GO/QMAAJgrBACAvBMBul8805/OH1DfUD8FmUB+Rq4+atquoM059jMRsANat7xRayvqKcYsjSRG9dtT3+n6w06KAQAAMIcIAICxJ4OxdODKCZ3tbKMXwASCgYB21m1QbXEV9ZmG67rKjmbog1VblcHRf7PW1nVD+1uPaWh0hGIAAADMIQIAYJzngy/021N/0cDIIMV4g5FUVVCmXfXrOc5uGpax1FBWo5116ynGLL0YGtC/nN6n9kd36PwPAAAwxwgAgPFc6af2czp58xK1mEBGOKo9DZtUkVdCr4QphIJBfdK0XTnRLIoxu6efznS26cerpzTC3n8AAIA5RwAAvKFvaEBfnv1BL4YGKMYEGkurtal6jUQAMKnKvFLtadgkm2MTZ6V3sE9fXzykW0/uUTsAAIB5wCss4A2jibiOdVzQiRsXKcYE8jJztLNuvQqz8lgFMIkPV29TeW4xhZgFx3V06marDl49JW/DCQAAAOYaAQDw5pPCGHW/6NE3l46od/AFBXmDkbRxxWo1lK5gFcAbXNdVUVa+3l+1RaEAR//NRk9/n/5wdr/uP3vM3n8AAIB5QgAATGA0EdeJGxd0prONYkygPK9Euxs2KiuawYkA47iuqy01zaopqqBR4izEnYQOXjulQ9dOUzcAAIB5RAAATPTEMJbuPH2gfZePqm+wn4K8IWDZeq9xiyrySuWKAECSXLnKjMT04aqtysvIpiCz8KSvR7879Z16ea4BAADMKwIAYBIJx9FP7ed06d51ijGBlcUV2lHbolAgRDEkOY6j5sp6NVfWyzJ8a51x3VxXf7n0k852thEmAQAAzDNepQKTsC1Lnd339H3rMU4EmEDACujjNTtUkJHj+4GbK1eRUFi76tarMr+Um2MW7j97rD+eO6DB0WGKAQAAMM8IAIApuK707aWf1PbgJsWYwKqyGm1Ysdr3M96O46i6sFw7atcraAe4MWb6/JKrP577QW33b9BLAgAAYAEQAABTMMao69kj/fHsAb0YZhXAm7IiGfpFy3vKiWb6tgauXAXtoDauaPJORsCMXbrXrq8uHNLA8CDN/wAAABYAAQAwA99cPKQr92+yR/kNxhhtWLFazRV1vq2B67oqzinQ+41bFA1FuClmaGhkWH86e0BX79/0zpYEAADAvCMAAGagZ6BXvz/9vYZHRyjGG3Jj2fpg1TbfbgOwjaXG0hVaX7WKm2GGXEln71zR/rbjGomPypAAAAAALAgCAGAGHMfR/rbjutJFL4A3BW1bm6vXqK6kSo7r+O7rj4Wj2tu4RfkZOdwMM9Q7+ELfXDikm0/uyrL4MQQAALBQeOUFzNDT/uf68twPGo6zCuBNVYXL9P6qLT5cBWC0PL9Mexs2cRPMkOM6One7Tfvbjstx2FIDAACwkAgAgBkaiY/q0LVTunjnGsV4QywU0baVLSrPK5bj+GcVgG1Z2tu4WeW5xdwEM9Q72K8vz/ygu08f0vgPAABggXFeFTBDxhjdefpAX108pOaKeoWDIYoyTnNFnTZXN+t29wPffM3FWXn6qGmbbJaxz0jCcXSs47x+vHqKpf9InfvWdTU4OqzB0WHvbFifcuX9HAwHgim32iueiGskQb8RzJ9gIKiAZVMIpAQCAGAWhkdHdazjglrvd2j9cpq+jZcby9Lu+g368eopdb/o8cV2gF31G1VbvJyZ7BnqHXqhfzzxjZ4N9FIMpIz2R7f1D//h/6xgwN8vmUYTcZXmFOq//PDv1VhWkzKfd9xJ6KuLh/T/PfR7gnvMuUQiIcuy9F/s+Y0+bNpGQZASCACAWbAtS+0Pb2t/63GtXrZS4QAvJsbbXtuiNeUrdeDqqbT/WrOjmXq/cYsywjEu/Aw4rqN9l37SmVutFAMp5cVQv850ct/GnYTqS6vUO9SfUp+367p61PtUp29dls0MLeaY4zoKB0N60NdNMZAyCACAWRpJjOrHq6f0yZodaq6opyDjFGcXalf9Rp3pbNOL4YG0XW7puI42rVittZUNspj9n5GuZ4/1u9Pfq39kkGIg5bDKR7KMlbIru4wxsiyL64i5f17Ie15wZyG17lsAs3vSGKO2rhv6oe2EhkY5EWA8I2lPw0ZV5BWn7VZZV97Rf3saNqkwM5eLPgMJx9E3Fw/pcleHr5pEAgAALDUEAMAsGRnFnYS+uXhYt57cpSBvWFlUqW0r1ylkp+cCI8dxtLK4UhtXNPl+T/BMXX94S1+dP6S+oRfMwAEAACwiAgDgbZ44xqj90W0daDuh4TirAMazLVu/WP+eCtJwdtyVFLBsbV6xRisKyrnYMzASj+vrC4fU2tXB4B8AAGCREQAAbynhOPqXs/t19+lDivGGxrJqba9bl3YDPsdxVJ5fot31GxULR7jQM9DW1aHvW49rcHSEI7gAAAAWGQEA8A5uPenSH879oFEnQTHGiQTC+uW69xQLpdcg2baM1pTXqrmijos8Ay+GBvSHcz/ocleHAhY/bgAAABYbr8iAdxBPxPXtxSO6/aSLYoxjjNGa8jptrl4jJ026Abquq9xYjvY2bE7L7Q3zUa/LXe36y6WfKAYAAMASQQAAvANXrm5339fvz+xXnFUAr8mOZuqzNbsUDYbT4usxxqi+tErba1u4uDPQO9SvL8/8oK5njzgqEQAAYIkgAADegZHR0OiwfrhyQtcedFKQcYJ2QJuq16ipfGVaHP0WDoa0q26DKvJKuLjTcFxXZ2616vvWoxQDAABgCSEAAN6VkW48vqt9l39SglUAr1mWV6y9jZsVsANK9Y0AJdkF2tu4mU72M9D94pn++eS36n7xnGIAAAAsIQQAwDsyMhoYHtSha2fU8egOBRknHAhqa81aVRctS+lVAJZlaXfdBtUWV3JRp+HK1eHrZ3TsxgW5KR/7AAAApBcCAGAunkjGqO3+Df149RS9AN7QULpCG1Y0yUrhLvCZ4ajeX71VoUCICzqNh8+79Ycz+9U7+IJiAAAALDEEAMAcMMboxdCAvrv0k248uktBxsmOZuq9xs0qyS6Q46beKgBXrjavWKOm8lqa2U0j4Tj69tIRnbtzNS36PgAAAKQbAgBgrp5MxujC3Ws61nFeo4k4BRlnY1WTVpVVyyj1BtCxUFQfrN6q/Fg2F3Iand1d+urCIT0b6KVXAgAAwBJEAADMEWOMhuMj+vP5H9X17BEFGacoK097GzYrK5Ih102dfeGO46ixrFqbqpsVsANcyCkMx0f0l0s/qa2rg8E/AADAEkUAAMzlE8pYunD3mg5fP8sqgDfsbdyk6qLylBocBuyAdtWtV3luMRdwGtcfdurrC4fUN9Sfkis9AAAA/IAAAJhjI/FR/e7UX/S0/xnFGKciv0wfNe1QKBBMic834TiqKlymHbXrFQ2FuYBTGBwZ1neXjupyV7ssw48VAACApYpXasA8aOu6qT+fP5RSy93nm21Z2lO/UcVZ+Snx+VpG2lDVqIayFVy8KbiSrj24qa8vHlaCxn8AAABLGgEAMA+G4yP60/kDetzXQzGSjKS60irtrt+45LcBOK6joqx87ahdr5xoFhdvCv3DA/qXs9+r49FtTkkAAABY4ggAgHnguq7aH3bqqwsHKcY44UBQH6/ZoZxo5hL/TI2qiyu0dnkDF22a+/zsrTZ9e/EIxQAAAEgBBADAPDDG6MXQoL6+cEi3u7soSJJlLDVX1Gln7Xo5S3R7hOu6yghFtLV6rSpyS7hoU+gbHtA/nfpWT3pZ6QIAAJAKCACAeeJKuvrgln68eopeAOPkxrL0UdN2ZUaicrX06uJKKsjM1Y7adSnTsHCx/HT9rI61n1diCV5HAAAA/DUCAGC+nlzG6Plgn/5y6ag6WQUwri6W1i1fpY1VTXKdpTdwDFi21lY2aE1FHRdrCt0vnul3p/epp7+XQ/8AAABSBAEAMI+MjC7cuaqj7efokD7Ostwi7a7fqGgosuTmjqOhsD5cvVWxUIQLNQnHcfTdpZ90prNVCZf7GgAAIFUQAADzyBijZ4N92td6TA+eP6YgSQHb1qbqNVpZXCnHSSyd6yWj6qIKbahqWvInFSymzqf39dWFQ3ra/5zO/wAAACmEAACYZ7Zl6+TNyzp+44LiS2iwu9jqS6q0fYntszfG6P3GzSrLKeACTWI0Map9l4/q7O02sfUfAAAgtRAAAPPMSOodfKGvLhxSd98zCpIUDYW1o26dSnMK5SyB7RGupLLcIm2vXaeAHeACTeLm43v65uJh9Q71s0oCAAAgxRAAAAvAtiwd77ig4zcuLInB7lKxtqJeLZUNsm178T8Z19Xm6jWqL6liYDuJwdFh7W87rtZ77QpYS+CaAQAAYFYIAIAFMjAypN+e2qfnQy8oRlJeRrbea9yinGjWoh+VmBGOaXtti7KjWVyYSdx4dEd/PHdAw/FRigEAAJCCCACABXT2dqsOXTuz6IPdpcLIaHtti1YWVy5qM7mEk1B9aZXWL2+UbfFtcSIDI0P68ux+XX/YyQoJAACAFMUrXWABDYwM6Xenv9PzgT6KkVSaXaj3V21WcBGbAdqWrW01a7Ust5gLMgFXUuu9dn178QjHWQIAAKQwAgBgASUcRxfuXNXBa6cpRpJlWfqgcavKcooW5d93HEflecXaUtOsaCjCBZlA//Cg/tOJb9T1jKMsAQAAUhkBALCQTzhj9Hzghb48u1+P+55SkKTlBcv0XuOmxdkGYKRVZStVX7aCCzGJY+3ndPjaaSVcjrEEAABIZQQAwAJzXEfnbl/Rwaun5XKQuiQpEgzp46YdKs4uXNCaOK6rnGiWdtatU3FWPhdiAs8G+vTb0/v0pO+ZjNj7DwAAkMoIAICFftIZS90vnum71qN60tdDQSQZY1RfukKbq9csbINE11VFfok2rFgtm2PtJiiPqwNXTurMrVYlXPb+AwAApDoCAGARGGN09larTt68xIkASbmxLL3XuFnZ0cwF+zdDgZA2rVij+pIVXIAJ3H/+WH86f0CP+p4u6ikNAAAAmBsEAMBiPPGMpQe93fr24hE97X9GQeR14l9f1ag15bWKO/O/19x1XeXGsrSjdp1Ci3gCwVKVcBLa33ZCJ25cZOE/AABAmiAAABaJbVk62nFepzvb5LC8WpJUllOknXUbFA2GpXleGWEZSyuLK7WhahWFn8Ct7vv6+vxBPR/sk2H2HwAAIC0QAACLxMjoSV+P/nTugJ4P9FEQSaFAUFtrmlVdVKHEPAcAoUBQexo2KS8jh8K/YWh0WN9d/kmnO9vojQAAAJBGCACARWSM0ZHrZ3T+zjV6ASTVl67QxqrVCgWC83oeQGFWrrauXCvb4tvgmzq77+vL099rOD5M538AAIA0witfYJH1Dvbry7Pf68XwIMWQlBXJ0I66dSrIzJ23UMQYo521G1RXUkXB3zA4Oqw/njug9sd3ZBl+RAAAAKQTXt0Bi8xxXf10/ZzO326jGEkbqlartrhS1jxNPmeEotpZt07RUJhiv+H6g059ff6gHIe+FAAAAOmGAABYAnoGevX7M/vVPzxAMSSV5hRqb+NmxULROf+7HddVXWmVWpY3ymaG+zX9Q4P63envdLfnIcUAAABIQ7z6BZaAeCKhox3nderWZYqRtKtug8pyi+TOcScAyxhtX9miwsxcivyGEzcv6vu24xpNxCkGAABAGiIAAJYAY4we9T7Vn87/qBdDrAKQpBWFy7SjtkVBKzBnf6fruirLLdKO2nWKhiIUeZxnA3363el9uv/sMcf+AQAApCkCAGCJiDtxnbrZqnN3rlAMSeFASB+u3q78OTymz3FdratapZUlyynwOK7r6vC10zp165IS7P0HAABIWwQAwFJ5MhpLnd339O3FIxoYGfJ9PYwxWlvRoHXLV83J3+fKVVYkpk1Vq+c0VEgHj/ue6o/nDujBsyeymP0HAABIWwQAwBLiuq5+aj+n87dZBSBJGeGI9jZuUjgQeue/y3EcLS8o07rlqxSwbIqbFHcSOnz9rM50trH0HwAAIM0RAABL6QlpLN14fEffXDys3sF+39cjYAe0pXqNGsuq5bjOO/9dayrqVFNUzo02zt2nD/T7M/v15MUzAgAAAIA0RwAALDlGP7Sd0PWHtyiFpLK8Yu1u2Cj7HWbtHddRXka2ttWsVXY0k6ImjSRGdfjaGZ2+dYnBPwAAgA8QAABL7UlpjLqePdY3Fw7rxfCg7+sRDYa1o3adlheUyXnrBnVGVQXl2rCiiRtsnPvPHutfznyvgZEhMfwHAABIfwQAwBLkytWfLx5U+8NOiiGppqhCzeV1ct9ylBoOhLR+eaMq8kooZtJIfFR/Pn9Ql++1yzL8KAAAAPADXvUBS9STvh59efYHDcWHfV+LgsxcbattUX4sR67rzur/dV1XObEMbVu5VrbFt7wx7Y9u67en/qLRRJxiAAAA+ASvhoElKuE42t92XFfv3/J9LSxjaUPValUXlcuZZQBgWZbqS6rVVF7LTZU0ODqkfzzxje4+fUQxAAAAfIQAAFjCHjx/ot+e+k7D8RHf16K6sFybqpsUCc7uSMCAbWtX3XoVZ+VxQyWdudWq/a3HNJoYpRgAAAA+QgAALGEj8VEdvn5GF+9c830tQoGgdtSuV1FW3qyaARZm5mlz9RoZlv9Lkp4N9Ol3p/bpwfNuOv8DAAD4DK+IgSXMGKN7Tx/qzxcOaniUVQDNFXVaVbZyxkcCGmO0fnmjVhYvl6HPvSTpbGebTt66pLjD3n8AAAC/IQAAljAjaTg+ouMdF9Xa1e77euRlZGtn/XplhKOaSSeAoB3QrvqNyoxEuZkkdfc/05/O/6h7PY/o/A8AAOBDvAIElvqT1LJ04/Ed/eXSUQ2MDPm6FkZGO2rXqTK/ZNr5fFeulheUaUPVqhmvGEhnCcfRT9fP6fC103Jcx/f1AAAA8CMCAGCJMzIaGh3RgasndeX+Td/XY0VhuXbVb5j2SD/XdbWlplkl2QXcRJIe9j7W7898r4e9T5n9BwAA8CleBQIpwLYstT/s1I9XTmjQ56sAArat91dtVXY0a9KPceUqN5atzSvWKDMS8/39M5qI6+DVMzpx49K0wQkAAADSV4ASAKkhnkjou9Zj+rR5l1YvW+nbOhgZ1Zes0Maq1drXenTCj3EcRw0lK7R62UpmuyXd7r6vfzn9vfqHB+j8D8ySZSyFA0HJ58+d0XhclrFkp+D3VMsYBa2AQrM8RhaYTiKRUMCyFbAYUiF1cLcCKcKyLHU8vK0frpxUTXGlIgH/vpDJimRob8Mm/Xj1pEYTf93NPmAHtKayTqU5Rb6/b0YTo/q+7Zgu3rvG4B94C7Uly/VfffrvlBvLluu6vq2DI1eRQEgriytT6vMOWLY+W7NLjaXV9IPBnHPlykhaUVRBMZAyCACAFBJ3EvryzPf6ePU21Zeu8G0dAratDStWq66kSpfutcsaN7B1XFfFWXnaWLVaGZGI7++Zzu77+uPZHzhGEnhLWeGYNlStVkFmLsVIQcYYleUWqSyXQBgAJHoAACmns/u+/nDugOJOwtd1KMst0vqqVbLemNR2XUcrCpapvrRKRn5fsjuq35/Zr45Hd3niAO/AzzP/AID0QgAApJh4Iq59l3/SvZ6Hvq5DdiRDO2rXqSAzT864F+fhQEhrK+tVmV/m+3vl0r12fXvxiIbizP4DAACAAABISbe77+v3Z/b7elbKGKOm8lqtKquR43jn2juOo/zMHG2uXqNQIOjre+TF8IC+PPuD7jy97/N1EAAAABhDAACkoKHREe1rPaobPl/aXZZbpM3VaxQJheW6rowxWl5QplU+PiVhzOmbl3Xg6gkNx0d5wgAAAEASAQCQmozU8fCOvrp46LXl734TsGxtrG5SZX6JHNdRJBjW5hVrVJpT6Ovb48mLZ/rj+QO6033/tQaJAAAA8DcCACAFGRkNjAzpwJUTan/Y6etarFpWozUVdbItW1nRDG2uaVbQ9u8BJ67r6sytyzp87Ywc+pYBAABgHAIAIFWfvMao7f4NHbhyUvFE3Ld1yI1maVtNi3JjWaoprFBTub+X/z950aM/nvtRD3q7mf0HAADAawgAgBRljFH/8KD2tR7TzSf3fF2LdcsbVFNcqU3VTcqNZfu2DgknoaMd53Xo2ilZtP4DAADAG/y7ThZIA7Zl69Ldazpy/ayqCpb5tvN9dVGF/ssP/14VeaWyLf/mmt0vnuufT3yr3qF+GWb/AQAA8AZWAAApzEgaHBnWt5eOqOvZY9/WIRwIaW/DZq0srpTx6cx33EnoL5d/0tnOK76tAQAAAKZGAACk+pPYsnSus037245p1Me9APzu7tMH+k/HvtLg6BDFAAAAwIQIAIA0MJKI65uLR/Skr4di+FDcSegPZ3/QjSd3KQYAAAAmRQAApInLd6/rL5eOyHEdiuEzl+5e15/P/aihkWGKAQAAgEkRAABpYjg+oj+e/1GPerspho8MjgzpD+d+UOfT+xQDAAAAUyIAANKEI+nag1v67tJRiuEjZ2636ccrJzU0OkLnfwAAAEyJAABIE0ZS/9Cg/nzhoO50MxvsB88GevX1+UPq7L4vy8fHHwIAAGBmeMUIpBFXrq7cv6EfrpyU67oUJJ2vtevqXOcVHbx2SgnH4eA/AAAATIsAAEgjxhg9G3yh79uO696zRxQkjXX3P9OX5/br7tMHslj6DwAAgBkgAADS7kltdP72Ff10/SwnAqQpx3V0+larDl07KzH3DwAAgBkiAADSjDFGPQPPta/1GCcCpKknfc/0Tye+UU//cxr/AQAAYMYIAIA0ZFu2jnWc10/t55VwWAWQThKOo32tR3Xy5iWKAQAAgFkhAADSkJFR72C/vr54SN0veihIGrnd3aXfndqn/uFBigEAAIBZIQAA0pRtWTrecUHHOi7IYRVAWkg4jr66cFBXHtykGAAAAJg1AgAgjQ0MD+lfznyvZ4N9FCMNtHV16E/nD2pgeIBiAAAAYNYIAIA05srVyZsXdazjvFzKkdKG4yP6+uIh3XrSJTr/AwAA4G0QAABpbmhkRP988i/q6X9OMVJY670O7W87rsGRITr/AwAA4K0QAABpzpGj87ev6McrJylGiuob6tfXFw6q/dEdWRbftgEAAPB2eCUJpDkjo+eD/frm0mH19PdSkBTjSrp0r13ftR5TPB5n8T8AAADeGgEA4AOO6+hs5xUduHKCYqSYZ/29+vLsfnU+uc/sPwAAAN4JryYBPzzRjdHj3qf6vvW4ntILIGU4rqszna3ad+koff8AAADwzggAAL8wRqc7L+tox3k5LmcCpIJn/b36x+Nf69lAH+N/AAAAvDMCAMAvT3ZjdP/ZE3178YievOihIEuc67r68epJHbtxQS6HOAIAAGAOEAAAfnrCG6Mj18/q9K1WVgEscfefP9Z/PP6V+ocHKQYAAADmBAEA4CPGGD3tf659l4/q+QAnAixVruvqj+d+VGvXDeb+AQAAMGcIAAC/PemN0aFrp3XpXjuDyyXqxuO7+ur8QfUPD7D3HwAAAHOGAADwoZ7+5/ry7A/qG+ynGEvMSDyuP53/Udcfdsow/AcAAMAcIgAAfMhxXR26elpnb7dRjCXm6v0b+u7STxoYGZQxBAAAAACYOwQAgE/19D/XH88d0ODoEMVYIgZGBvXd5aO69vCWbMumIAAAAJhTBACAT8WdhI51nNeJG5coxhLR2tWhby4d1mg8TjEAAAAw5wgAAJ8yxuhRb7e+Pn9IA8OsAlhsvYMv9KfzB3Xj8R1ZFt+aAQAAMPd4lQn42Eg8ruM3zuvULVYBLCbXdXWms03fXDgsl6MZgCWHfhwAgHQRoASAf1mW0Z2eh9rXekwbVzQpIxylKIvg+cAL/fPJb/XkRQ/FAJaYhOuof3hQ0WBErs8PTzWSIsFwyq1SGk3ENRwf4WQVzAtXUiQQUsCmdw9SAwEA4GNGRvFEQoevndHnzbu1rbaFoiyCw9dP66f2c3KZ/geWnK5nj/Tf//CPigYjko8DgITjqDArV79c/76qCpal1Od96tYlfXPxiGzDwlfMLcd1ZVuWPl+7WxtXNFEQpAQCAMDnbMtSZ3eXvms9qtUVK5UdyaQoC+hhb7f++eRf1DfUTzGApfgcfd6t/3D0Tz6f+/cG0g1lK7SlpjmlAgDHdXTpXrv+x4O/lUUAgDnmuq7CwZCqi8oJAJAyCAAAyHEd/eXyT/q4abu2rWQVwEJx5er71mO6cPeaHNdheSqwBBljFLB5uWQZRwHLTsl+CJaxFLQDBACYc67rcm8h5XC3ApBlLHX1PNL+thPqHx6kIAvkxqO7+sPZA3o+2MfgHwAAAPOOAADAS1+dP6jrD29TiAWQcBL6vvWYLt+77udtxQAAAFhABAAAXnrY163fn9mnwdFhijHPOh7d1XeXf1Lf0ABHjAEAAGBBEAAAeMlxHO27fExXum5QjHk0ODKsby8d0cW712Uz+AcAAMACIQAA8JrHfU/1x3M/ynEcijFPrty/oa8vHNTQ6IhEAAAAAIAFQgAA4DWjibgOXT+ly/faKcY8eDE0oK8uHNL1h7dlW3wLBgAAwMLh1SeA1xhjdLv7vr4894PiToKCzLFL99r11YWDSrissAAAAMDCIgAA8FeGR0d16Nppnb99hWLMob6hAf3TyW/1qPcph/4BAABgwREAAPgrljG69eSevrt8VAMjQxRkjhzrOK8f207IYfYfAAAAi4AAAMBfMcZoeHREh6+fUcejOxRkDjzs7dY/nvhavUMvKAYAAAAWBQEAgEmFAkEF7QCFmAMHr57Suc4rSnC6AgAAABYJAQCAv+K6rjLDGfpkzU7VFFVQkDkQC0fp+g8AAIBFxatRAH/FkavGZdX6pGmHQoEgBZkDe+o3atvKtbItm2IAAABgURAAAHiNK1fZ4Qz9rHmXqgvLKcgcyQzH9JvNnyovI5tiAAAAYFEQAAB4jZHUuKxaP2/ZK2M4rG7O6mqMNlat1vurtlAMAAAALAoCAACviYVj+vWmT1SaU0gx5lg0FNHfbvxIy3KL5bouBQEAAMCCIgAA8JIxRmsr6vXh6m0UY57qu6a8Tp8175Jl+PYLAACAhcUrUAAvhQJB/Zstnyk/I4dizJNYKKLPW3arprhCjsuRgAAAAFg4BAAAJHlH/+2sXa/ttetksfd/3hhj1FBarQ9Xb5dtOBEAAAAAC4cAAIBc11VWJEN/s/FDFWTmUpB5FgtF9OmaHWooW6GEwyoAAAAALAwCAAByXVd7Gzdrc/UaZv8XSGNZtfY2bFY0GJYrGgICAABg/hEAAD7nuK5Kcgr1+drdKsrKpyALJBIM6/OW3aorqRIHAgAAAGAhEAAAPmdktK22RZurmynGAmsordbHa3YoMxzlWEAAAADMOwIAwMcc11FZbqE+bdqh/Ew6/y+0oB3Qz1v2qKaoUoatFwAAAJhnBACAjwUsW5ur12h73Tox/FwcVQVl+tWGDxUNhikGAAAA5hUBAOBTruuqKKtAv1z/vnKjWRRkkdiWrU/W7FDDshqKAQAAgHlFAAD4lG3b2l67VttWrqUYi6wkp0B/u/EjRUOsAgAAAMD8IQAAfKo4K19/u/FjxUIRirHIApat3fUbtaFqtRyaAQIAAGCeEAAAfnziG0sfrNqqluUNErv/l4Ty3CL9av0Hyo5mcCIAAAAA5gUBAOAzrqTS3EL9auOHygzHKMgSYVu2ttet08669XJch4IAAABgzhEAAD5jJH28ervqS6ooxhJTml2on6/do6KsfEIAAAAAzDkCAMBHHMdRVeEy/WztbmVFMijIEmOM0ZaaZm1d2SLD1gwAAADMMQIAwEds29bHq7drFUfOLVlFWfn6vGW3irPz5TisAgAAAMDcIQAAfCLhOKotrtTHTdvZ+7/E7axdry3VzQrYAYoBAACAOUMAAPiA67oKB4J6v3GLmivrKcgSlxPN1K83fayCzBxOBAAAAMCcIQAAfMB1XVUXleuz5l0KB0IUJAVsqWnWrvoNCtg2xQAAAMCcIAAAfCAcDGtvwxY1VdRRjBQRCYb1n239XEVZ+RQDAAAAc4IAAEh7rirzS/U3Gz9UwGI2OVUYY9RUXqsvWvZyIgAAAADmBAEAkOYCdkA/b9mt+tIqipFiosGwPm/Zq8qCUnoBAAAA4J0RAABpzEiqKarUrzZ8KMvwdE9FdSXL9fO1e2RbXD8AAAC8G15RAun8BLds/WbzJ6rMK6UYKSoWiuiDVVu1sni5HFYBAAAA4B0QAABpynEdramo00ertsmmk3xKa6qo1edrdysSDMkVIQAAAADeDgEAkKaCdlC/XP++luUVU4wUFw6E9FHTdq0qq5HjOBQEAAAAb4UAAEhDCcfRhqpV2l23QUE7QEHSQENZtT5cvU2ZkRirAAAAAPBWCACANOPKVU40Qz9r3qXlBWUUJE3YxtJnzbvUUFpNMQAAAPBWCACANOM4jporG/T+qq3M/qeZ6sJyfbR6m2KhKKsAAAAAMGsEAEAacV1XeRnZ+nj1dlXkl1CQNGNZln7eskerl9VwrCMAAABmjVeQQBoxxqi5ol4fr9nOADFNleeV6FcbPlQkGKIYAAAAmBVGCECacOUqO5qpL9a9p7LcIgqSpmzL1sdN27WucpVcl20AAAAAmDkCACBdnszG0tqKBr3fuEVGhoKksbyMHP1688fKimRQDAAAAMwYAQCQJjJCUf1600cqzMylGGkuYNnasXKdttasVcJxKAgAAABmhAAASBOba5q1o3adjGH23w8Ks3L1+bo9Ks7OZysAAAAAZoQAAEhxrqScWJZ+s+ljFTD77xu2ZWt7TYu21jRzJCAAAABmhAAASHGu42hn7XptqFrN7L/PFGfn67Pm3SrJLpTDKgAAAABMgwAASGGO66gwK0+/WLdXRdn5FMRnjDHaUbtO21au5dhHAAAATItXjECK29O4SRtXNNH336fyMrL1i3XvqSQ7n1UAAAAAmBIBAJCiHNdRWW6RPl69nb3/PrdtZYu2165T0A5QDAAAAEyKAABIUbaxtG1li3bWracYPpcRjuo3mz5WYVYuxQAAAMCkCACAFOS4jkpzi/RFy15lRTJ8+fUPjY5w/N04Lcsbtadhk2yLb+sAAACYGOtFgVR84tqBl8u+/caVq0t32/Wn8wf0b7d9oRWFy7ghJMVCEf39lp/p8LUzuv/sMQUB5hBho1eDVD5yNNU/fyzh+4rvD0gxBABAinHlqji7QH+/9XNFg2Hfff2O4+pI+xn94ewBVRdVqKqgjOMPk+pKVuiX697T//Djb+W6DgUB5oBtWQoHQr7/PjOaiCtoB1JylZFt2YqGwgraQW5ozKmE4ygYCNCDBymFuxVIMbZl64NVW7W2st6XX//AyKDO37mmJy96dOT6WX26ZqfyM3K4MSTFwhH9vGWv9l85oav3b3I0IDAHluUW619v/lQZkZiv6xBPxBULR1WRV5pSn7dtWdqwfJX+4dP/jcIBAgDMrYSTkCS1VDZQDKQMAgAgxSzLKdLfbf5EAcv25dff2tWhq/dvKJ6I6+Kd67p497r2NmzixkhaWVypz9fu0Y1HdxRPJFgdAbyjkuwC/dvtXyg3lkUxUpBlLK2trPdtaA4Ab2J6CEghRkafrd2tuuIqXw7sXElnOtv0qK9HlmXpbs8DHbx6Sv3DA9wcSZFgWO81blZjWY0c9iUC78x1XcUTcQoBAEgLBABAinBcVyuLK/Tzlj2KhEK+rMGj3m4daz+vodFhGRklHEenbl7Sre4ubpBxVpXV6OOmbYqGwjQnAgAAwEsEAECKsC1Ln63drdri5TLy57Lu6w86dfPJPTmO1+DOsixde9Cp4x0XNZoY5SZJCtoBfbh6hxpLq+l6DQAAgJcIAIAUkHASaihdoQ9XbVUsFPFpDRyd6WxV94tnLwMQI2lodFhH2s/ocV8PN8o4jWXV+rR5lzLDMSIAAAAASCIAAJY8V65ioag+btqhupIq39bh/rPHOnWrVf3Dg6/1PzDG6GznFZ29feVlN15IljH6Yt1eNZbVUAwAAABIIgAAljzHcVVXulyfrNmhqE9n/yXp6oObuvHojt7sfWiM0fP+Xn136ScNjg5zw4xTkVeiL9btVTQYphgAAAAgAACWMtd1lRmJ6sNV21Tv49n/vqF+Heu4oEe93ROfbW+Mjt+4oKv3b9H07g0fN+3Q2op6jgMEAAAAAQCw1K0oKNfP1u5RwA74tgYPnj/RuTtXNDJFo7+nL57rL5eOKO5wXNd4JdkF+tuNHykWilIMAAAAnyMAAJawWDiiz1t2a2VRhW9r4LiuWrs61P7w9sSz/0lxJ6GjHed19+kjbpxxbMvSnsZN2lqzhhMBAAAAfI4AAFjCakuq9EXLe7Is/z5Vewdf6Ej7OfX09065jN2Vq1tP7umHK8flsA3gNYWZufrl+g+UG8smBAAAAPAxAgBgiQpYtv5mw4cqzyv2dR1ud9/XyRsXpw1BjIz6hwd14MpJPe59yg00jm3Z2rpyrbbXrpPjOBQEAADApwgAgCXIdV21LG/Uh6u3ybZs39ZhJD6qo+3n1PXskWbSws5xXbV13dSxjgvcRG8oyszTz5p3qTgrn0aJAAAAPkUAACxB4WBIf7fpE5XlFPm6Dk/7n+vw9TOKJxIz+njLGHW/6NHh66f1YniAG2kcY4y2rVyrbXXrxHkAAAAA/kQAACwxCcfRphVN2rpyrWwf7/13XFfnbl9Ra1f77P+/O1d06e51bqY3FGTm6udr96gst1iOy1YAAAAAvyEAAJYQ13WVGYnpl+vfV2V+qa9rMRIf1aFrZ9Q3NLuZfMsyut19X8c6LmgkPspNNY6R0fbaddq6slkBH28tAQAA8CsCAGAJcVxXO1a2aFfd+ik73vtBx6M7OnHjghKzbFpnZDQ8OqpjHRfU2X2fm+oN2ZEM/av1H6g4u4BeAAAAAD5DAAAsEY7rqjAzV5+s2aGyXH93/pekI9fP6P7zx281SLUsS5fvXde5221KOIk0rM672Vy9RttqW2TbAYoBAADgIwQAwFJ5MhqjTdVN+mD1Nt/XouvZYx26dlqDI0NvtRLCSOobGtAPbSfU09/LzfWGaCiif7v15yrOyqUYAAAAPkIAACwBruuqIDNHP1u7W/kZOb6vx7nbbWp/dFvOO6xQty1Lx29c0MV71+Ww1P2vNJXX6rPm3bIMPwYAAAD8gld+wFJ4IlqW1let1t6Gjb6vRe/QC/3Ufk5PXzx/5z4IzwdfaN/loxocGeIme0MkGNKvN32k5YVlckVAAgAA4AcEAMASkBvL0t9u/Eg50Wzf1+Lag06dunlZI/HRdz6v3nVdHW0/pxuP7nKTTaC6sFK/Wv+BbE4EAAAA8AUCAGCRGUk7atdpW02L7zv/jyRGdfLGJXV2d81ZLe4/f6zD7acVpxngX4mGwnqvcbPqSpbLmeVpCwAAAEg9BADAInIl5WXk6DebPlF2NMP39bj/7LGOXD/z1s3/JjIaj2t/63F1PXvEDTeB2uLl+rx5t6KhCBsBAAAA0hwBALCYXFd76jdqbWWD72f/HdfVpbvtutzVIcuau29Njlxde3BLh6+d4X6bQDQU0cdrdmjVsmo5rJIAAABIawQAwCJxXVeluYX65Yb3lR3N9H09ng/26eDVk3o+2CejuQtDjIz6hwd18OopPR/s48abwMri5fq4aaeyIhlyOTEBAAAgbREAAIvElav3G7eopbJRls9n/11J7Y/u6PiNi/Py9ydcR5futetEx8XUKswCCVi2frZ2l1aV1bANAAAAII0RAACLwHEcVeSV6KOm7cqNZfm+HiPxER26ckr3eh7N6ez/GGOMHj7v1qFrZzQwMsgNOIHlBWX62do9yo7ECAEAAADSFAEAsAhs29bO+o3asHw1xZDXqf+71qPztvzcyGg0EdepW5d05f5NCj5JjT5q2q6m8lpZ/l6QAgAAkLYIAIAF5jiOyvNK9IuWPcqJsfffcV0dbT+nziddcudx7tmyLHU8uqND185ocHSYG3EC5bnF+lcbPlIsFKUYAAAAaYgAAFhArqRQIKhddRu0cUUTBZHU0/9c+1qPaSQxOq//jpE0FB/Rketn1NXDkYATsSyj9xs3a0PVat+fSgEAAJCOCACABeS6rkpyCvV3Wz5VJBimIJJOd7bq0t0OOY4z7/9WwLLVdv+mLt1tl+M6KVCdhVeYlad/vflTVgEAAACkIQIAYAEF7YA+XLVVTctWUgxJL4YHdaDthJ4NPF+wGef+oQF9e/mIul885wJMwDJGm2rWaFfdejkcCQgAAJBWCACABbQst1i/3vSxArZNMSRdvX9Tp25e1sjo6IL9m8YYnbxxUa332jnzfhKFmXn6xbr3VJCRQ40AAADSCAEAsFBPNmP0Rcse1ZeumJej7lJN3Enop+tndbfn4YLvN+8Z6NX3rcc0ODrEjTnJvbq5pll7GjbNa2NGAAAALCwCAGABuK6r2pIq/axlt4KBAAWRdLv7vo52nNfAyNCCBwCu6+rgtdO6dr+TGe5JFGbm6pM1O1SWW0S/BAAAgDRBAAAsgIBl6/O1e7SyaDmz/5ISTkLHb1xQa1fHonWbv//ssb6+eFhxJ8ENOomddeu1u36jbGOzDgAAACANEAAA8yzhOGooq9b7q7YoEgxREEndL57pp/azejbYJ2uRAoDRRFw/Xj2pW0/ucUEmkRXJ0M+ad2tZXrFcVgEAAACkPAIAYB65kiLBkD5Zs1N1JcspiLzl95fvdejUzcuyFnk1xP1nj3Xo6mm2AUxhe22LdtS2KGgHKQYAAECKIwAA5pHjOFq9bKU+WL1V4QCz/5LUPzyg/W3H9fB596It/5e80wBeDA3o8PUzetjbzYWZRDgQ0m82faLSnAK2AQAAAKQ4AgBgnrhylRWJ6YNVW1VXzOz/WE1uPenS4etnlsTn47iOLt9r17H283S7n0JzZZ0+XL1NAYsfGQAAAKmMV3PAPHFdVyuLK/VZ8y6FAiyflrx999+3HtO9RTj6byKWZenxix5933ZMj3qfcoEmEQ6E9ZtNn2h5fhnFAAAASGGcRwbMA1euMsNeA7XqogoKknS/57H2tR5Twlk6DeVc19WpW606f/uaPlmznYs0ibEw63/48X9RgpMT4CP9I4O6fK9DeRnZvl4p5DiuQoGAVhSWKyMcTZnP23VdPe57qns9j2Sxiglz/bxwJSNXlfmlKsjMpSBICQQAwDwwMlpZXKEv1r0nmxcckrwXYT91nNOtJ11L6vOyjKUHzx7r8LXT2rayWdnRTC7WBMLBsH7eslc/Xj2ly/fal8QKDmAhdDy6o3//T/9XWcbf38sTjqOa4nL973/+n2v98lUp9HkntK/tuP7v3/5PsozNDY055bquQoGg/qtP/53+ZuNHFAQpgQAAmAfRUET/2dafqTSnkGIkPRvo1VcXDmpwZGjpfXJGOnbjvG496dLaynou1sQl0oqicn3avEtXHtyUs4RWcQDzaTQR15O+Ht/XIeE4yo1lajQRT6nP25U0ODKkR71PfR/iYB7uL9dVOBjS4OgwxUDKIAAA5sH65Y36cPV2Zv/HOdJ+Tle6birhOrKW2OyxZSx1PrmvI+1n1VC2ghMbJhENhvXh6q3ad/knnbt9lfsbvsGKF68GqVyHVP/8sbTvLSCV8OoNmGOxUFR/t/kT5WdkU4ykF0MD2nf5qHr6e5fc4H/MaGJU3136SXefPuSCTWFlcaU+X7tHmeEoJycAAACkGAIAYA45rqMtNc3aVN0s22Kv4ZgLd6/pwp1rijtLd+moMUZXH9zUwaunFE+xJa4LKWQH9cGqrVpdvlKOQwAAAACQSggAgDniuq7yYjn61foPVJJdQEGShkdHdPDqad3tebjkQ5Gh0RH9+fwhPWK/75Sqi8r1cdMO5cSy5LqEAAAAAKmCAACYI67rauvKZm2qaWJv9Dg3H9/VyZsXNRIfSYnP99rDm/rx6ikGtlOwLVsfN+1QQ+kKGbH3EQAAIFUwSgHmgOM6KsrO1ydrdtL5f5y4k9CJGxfV1tWRMt2X+0eG9N2lI+oZ6OMCTqGqoEy/WLdXGZHUOQ8cAADA7wgAgDlgjKVNK5q0s3Y9M6LjPHj+RN+1HtXAyHDKdMl1HEeX73XoaPtZLuCU97zRZ827taaijnseAAAgRRAAAO/IcR0VZubq0+ZdKs7OpyBJCcfR+dtXdLazLaW2RFjG6Gn/c+1rPabng6wCmEpBZq7+fuvnioTCFAMAACAFEAAA78i2bG2sWq09DRspxjgvhvr1fesx9Y8MptznPpqI6/StVp3tbONCTsEyRjvr1mlH7TqKAQAAkAIIAIB34LqucmNZ+lcbPlBeLJuCjNPx6K4OXTuTMnv/x7MtS/d6Hmpf63H1DfVzMaeQG8vRv978qXcigGicCAAAsJQRAADvwLYsbV/Zot31zP6PN5IY1beXDutZCi+hTziOTnRc0JWuG1zQKVjGqGV5o/Y2bJY4OQEAAGBJIwAA3kFONEu/2fSpYqEIxRinq+eR9rUek+M4Kfs12JalG0/u6uC10xocGeKiTqEwK08/b9mj0pwiOW7qXnMAAIB0RwAAvCXXdfVR03ZtXNGUMh3uF8q3l47owfMnKf91JBIJHbx6WreedHFRp2Akbahapd31G1NyywcAAIBf8EoNeAuu66o0p1B/s+EjZYSZ/R/vyYtn+vHKKQ2NDqf812JZlq49vKUj7Wc0Eh/l4k4hPyNHn6zZoWW5xSm98gMAACCdEQAAb8GRN/u/uryG2f83HOs4r45Ht+WmyX7w4fiIvr14RA97u7m409i6cq121q1XMBCgHSAAAMASRAAAzJLjOqourNDnzbuVGYlRkHEGRoZ0oO2Enrx4njbBiJFRW9cN/XT9rBJOgos8hcxwTL9Y955Kc4rSJgACAABIJwQAwCwF7aA+Wr1VayrrZMTs/3iX77Xrwt1rSjjxtPq6huIj+uO5Ayl9qsFC2VzdpA9WbVHIDlAMAACAJYYAAJgFx3VUXVSuT9bsUGaY2f/xRuKjOnTttDqfdKVdIzjHdXS5q0OnbrZyoacRCoSSqwAKKQYAAMASQwAAzEI4ENLu+o1aU1FPMd5w++l9He84r+H4SNr1RTAy6h8e1L7Wo+ofGeRiT6OpvFafNe9SwLIpBgAAwBJCAADMkOu6qi6q0BctexUNhinIOAknobOdV9R2/6YsKz2/rcQTcZ26eUnnO69ywacRDoT0y/Xvq6pwGc0AAQAAlhACAGCGwsGQ9jZsUlN5LcV4Q89An45cP6Pewf607YtgjNH9Z4+1r+1YWhxxON+1qi2p0hcte2VzSgYAAMCSQQAAzFBpdqF+0fKegjQ3e40r6dqDWzrWcV6Wld6DvaHREZ25dVk3Ht/lwk8jHAjq4zU71LisRo7rUBAAAIAlgAAAmIGAHdCvNnyourIqivGG4dFhHbx6Sk/6etL+VATLGLU/vK2j7eeVcBjUTmdlcaW+aNmroB1kKwAAAMASQAAATMOVVFVQpp+37FHIDlKQN6rT9eyxDl477YsBnjFGfcMDOnj1lO49e8Tln0bIDmpvw2Y1V9Qr4SQoCAAAwCIjAACmEbBt/XrTx6rML6UYb0g4jr5vPaabPloSbxtb5+5c0ckbFxVnUDut2pLl+rR5h7IjGXJd1gEAAAAsJgIAYAqO62hNea0+atqucDBEQd7Q/eK59ree0Ggi7puv2Ripd/CFfmg7rp7+59wE0whYtj5t2qmWyoa0Ox4SAAAg1RAAAJNw5SoSCOsXLe9peX4ZBZnA6VuXdf3hLTk+m9k1xuh0Z5uuP+yUy+72aVUWlOqjpu3KYhUAAADAoiIAACbhuK7Wr1ilPQ2b6Pw/geH4iL5vPa7ewRdp3vrvrxkZPe57qu8uH9XA8BA3wzQsY+mz5l1aU1HHKgAAAIBFRAAATMBxXWWFY/po1TbVFFVQkAm03uvQ2dttGnX8s/x/PNd19f3l48n+B8xqT6cku0C/XP+esiIZFAMAAGCREAAAEzCSmspr9V7jZmYsJzCSGNWBKyf14PnjtD/6byoPe5/o60uHORJwJs8pY/TR6u3auGI1xQAAAFgkBADAG1zXVVYkQ5+s2akVReUUZAK3u7t07MYFDYwM+TogiTsJ/dB2Ql3PHnNTzEBuLEu/3vSJsqOZFAMAAGAREAAAbzDGqKm8Vp8175JleIq8yXFdnbp5WdcedEo+nv0fc/fpA+1vO86NMQOWsbSlZo121q2jGSAAAMAiYHQDvCEjHNWvNn6ostxCijGB7hfPdLT9vHr6n8tie4ReDA/ou0s/6WFvNzfHDORn5OgX695XUVY+IQAAAMACIwAAxjEyWlNRp/cbt/h6b/tkXEntj27r/J2rYuyfrIkrXX1wS0eun6UYM2AZSxuqVmtn/XpaJwIAACwwAgBgnGgorH+z+VPlZ2RTjAkMDg/p8LXTuvP0Adsjkixj9KSvR/tbj+v5QB8FmYHCrFz9ouU9VeSV0EARAABgAfEKHkhKuK62167T7oZNsi2bgkzgTs8DHWk/J4el23+ltatDl+61U4gZMDLaULVK22tbZFksJQEAAFgoBACAvKXt+RnZ+vWmj5Uby6IgExhNxHWs47yuPbjF3v83WJal291dOnj1pPqHBynIDOTEsvT52j0qzy2RwyoAAACABUEAAEhynITea9ykTdVNLG2fRO/gC/3QdkJDoyMUYwJxx9GPV0/pxuM7FGOGttQ0a2/jJoWCIbl0BAAAAJh3jHTge47rqDS3SD9r3q38WA4FmbBGrs7fueI1/6McE7ItS+0P7+hI+zkNxwlJZiIWiujXmz7RspwiMf4HAACYfwQA8D0jaWvNWm2oapJhafuERuKj+u7yUfUPD1CMKSTchA5fPa0nvT0UY4bWlNfq85Y9CgdCFAMAAGCeEQDA1xKuo9KcIn2+drcKMpn9n8z1h506eeMSHdunYRlLZzpbdbqzlUaJMxSwbf28ZY8qCkopBgAAwDwjAICvBS1bm6ubtLVmLcWYRNxJ6PvWY7r//An7tGdgJB7XH88d0DOOBJwRI6OVxcv1+drdCtlBCgIAADCPCADgW67rqiAzV/9qw4d0/p/C3acPdOjaaQ2ODMnQAWD6+0quTt68pFO3LrEKYIZCgaB+1rxbDWUr5FIzAACAeUMAAN8K2La21KzVlupmijGF4zcu6OaTuxRiFvpHBvXNhcNKOAmKMQNGUk1RhX7eslcRTgQAAACYNwQA8K28jGz9my2fKTMSoxiT6Ol/riPXz+pZfx8NEmchkUjo9K1WXbp7nWLMUCgQ1N6GTWquqGflBAAAwDwhAIA/b3xj9GnTTq1fvopiTOHC3eu6cOcay7JnyRijR33d+vriIVYBzEJNcaU+ad6pjFCUVQAAAADzgAAAvuPKVUl2gX618UPFwhEKMomh0WEd77igez0PZSy+VczW4MiwDl07o2sPOinGDAUsW3sbNmltZT2hEwAAwDzgVT38d9MbS79c/4Eay2ooxhQ6n9zX0Y7zGk0kaP33VveZ0Z3u+zrQdkKOy/GJM1VdVK7P1+5WXiybugEAAMwxAgD4iuM6qimq0MdNOxQLMfs/mXgirtO3Lqutq0M2s/9vxRijvuF+/dRxTl09jyjIDNnG1serd2htZb0sw70HAAAwl3h1Bd9w5SpoB/Rx0w41lK2gIFPofvFM37Ue1fDoCMV4B5axdLmrQ6duXaYYs1CSU6BPmncqK5LBVgAAAIA5RAAA33AdRw1l1fq4aTuz/1PVyXXV2nVDZ261ymL2/51YxtLT/ufa33ZCj3qfUpAZMsbok6ad3ioA7kEAAIA5wysr+IIrKRIKa2/9Zq1eVktBpjA4Oqz9bcfVPzJIMeaAJaMTNy+q9X4Hs9mzkJ+Rrb/b/IkywxzTCQAAMFcIAOALruuourBCHzVtVygQoCCT1UnS7e77+vHqSQarc8QYo4fPu3Xgygn1Dw9QkBmyjKW9DZv1XuNmjgQEAACYIwQA8IVIIKTdDRvVVL6SYkzBdV395dIRPertoRhzyEj66fo53XrSxVB2FjIjGfrVhg+Vn5FDMQAAAOYAAQDSnuu6qios16/Wf6igzez/VO4/e6zvLh/VaGKUYswhY4xuPrmn/VdOaDRObWfKMkYtlQ16v3ELRwICAADMAQIApL28jGz9cv0Hqi+tohjTOHD1hDq777Pkeh44jqO/XDqih8+fUIxZyI1l6YuWvarIK2VbCgAAwDsiAEBaM8bo7zZ/ol9v+kiu6zKLOIW+wX4dvnpG/SODMjIUZB7cenJPR9rPcR/O8jncXNmgXXUbZBnuSwAAgHfBemikvTtPH+qfTnyjnFiWSrLzVZxdoJxopgJWQEE7qFAgoHAgpFAwqJAd9G2dzt5uU2tXh5xEgqPX5snw6Ii+u/yTPm3eqbxYNgWZobyMLH3eslsnbl7Qjcd3ZRnuTwAAgLdBAIC05rquvr10RN9eOiJjjCKBkDLDMcXCUcVCEeXGslSQmaPCzDwVZeerMDNXWdFMZYUzlBWNKRaKKiMcVSQYVsgOKGgHFEjDPgIj8VEdunZa958/lmGWdd4kHEfn71zTketn9UXLXgoyQ0ZG65Y3atvKFt15+kAJhxUUAAAAb4MAAGlvbN+w67oaGBlS/8iQ1Jt8X/K/rustNQ5YlsKBkLKimcqNZSk3lq28WJayohmKBr3AoDg7X/kZOcqOZioWiigaCiszHFNmJENZkVhKzk5ef9ip4zcuajg+qoBlc9PMF2P0fKBP+y4f1a66DcqNZVGTGcqKZOiLde/pp3bvNAW2AwAAAMweAQB8x0hScvDw8r/JsYTjuhoYHdLAyJAePH8s1/XCgbGmeKFAULFgRLFwRLHkyoBIMKTMUFT5Gbkqys5TTixLsVBUOdFM5WfkeI/MHOXGshQJhhWw7CU1yx534jp585I6Ht2RzdL/eb/3Rp24znZe0YU7V7WnYRNFmYUNVav0/qot+g9H/6yRRJxOFQAAALNEAAC8wSQDATMuGBjjuq5eDA+ob7hf3sKCV/3ybWMpYNuyLVtBO6BQIJgMCMLKCEeVGY4qO5KlwuxcFWXmqTi7QEVZecqNZSszHFVGJKasSIaioYgsmWRGYWSM9+v5asz38PlTHbl+Vv3DgwQAC8A2lu4/f6wj7ee0dWWLwgH/9p2YrXAgpF9v+kQH2k6qs7uLgmBBZISjWlG4TAHL3y+ZRuIjKs4pUGY4llKftzFGebFsNZbVKBoMc0NjTsWdhIwxKszMpRhIGQQAwCwZYyYMByQp7jiKJxIaHh15Y3uB9+eWsRSwbAVsWwE7oKAVkJ0MDXKjmSrMzldxVr4KklsMvB4FuSrKzFdRdp4yIlHZxpZteX+P/fLxdgN313XVer9D5+5cYUn1AhqJj+hYx3lde3BLzRV1FGQW6kuq9Mv17+m//+GfFE/EKQjm3cqiSv2f/u4flJ+Z4+s6uK4ry1jKSbGtS7Zl67PmXdpZt56fc5iH54X3NjuaQTGQMggAgDk01faCMXEnobiTkDs6LI071vzh8ye69rBTxpjkCgDvYRtbGeGosqIZyV4DUeVn5KgoGRYUZeWrKCtPWZEMZYRjiobCCgdCCgeCCgWCL0OCiV749I8M6tDVU+rue0bzvwVkWZauPbilI9fPqrFshYI+Pn1itoJ2QD9bu1t/Pn9IHY/vsA0AC3LPFWblqYAZvpRkJMVCEcVCEYoBACIAABbNZFsMXNfV6z3ORzU4OqQnL3om+DskK7nlIBaOKi+WpfzMXBVk5KgwM1fZ0UxlhKPKzchWUaa33SAa8rYlRENhdfU80tH281yMRbj2QyPDOnj1pD5ds0PVRRUUZRZWFJTrbzd+qP/uu/9Zo6wCwAIYayYLAECqIwAAUpgrKeEklHASGhwdUndfj9yHnX/VnyAWiigrkqGsSIYyIzFlhKLKisTUNzSg20/vM/u/CCzLUmtXh87eblNlQRmnL8xCOBjSJ2t26Pu24zp963JKnrwBAACwGAgAgDQxVfPC4fiohvt69Ljv6atwwHW9LQYMPBdN72C/fmg7ofcatyg/w9/7i2drecEyfdGyV21dNzQ4MkSIBQAAMANMmwA+YOQ1L7SMJduyZFteE0IG/4t8XYzR0fbzOn/nqhzXoSCzELQD2lW/QWsr6+WwPBsAAGBGCAAAYBE9H+zTV+cPanB0mGLMUnVhhT5v2aOcaCZ7tAEAAGaAAAAAFpHjujp87azaH3ZSjFmyLUvvNWxWS2XDuI4XAAAAmAwBAAAssp7BXu1rPSbHYRvAbFXml+qT5p3KiWbJEfUDAACYCgEAACyykfiofrxySje771GMt/DBqi1qrqiXzY80AACAKfFqCQAWnaubj+9qf+txlrK/hbKcIv1600fKimZQPQAAgCkQAADAIjMyejE8oB/ajuve04cUZLb1M0bvN27V5ppmWRwHCAAAMCkCAABYItof3dHpzjYK8Rayohn6u02fKBaKUAwAAIBJEAAAwBJgGUuPe7t18OpJ9Q32U5BZ189oc/Uavde4hWIAAABMIkAJAGCJMEbHOy7ofz76RzVX1CsjHFVmJKaMcFSxUEThQEihQFC2ZVOrCWRHs/TrTR/rxM2LetTbLSO2AwAAAIxHAAAAS4RlLHU9e6z/5/f/UbmxbGVFY8qKZCgrkuGFAeGYCrJyVZyVr5xo1ss/y45mKDPifWxmOCbj033wljFqqWzQ+41b9J+Of+XbOgAAAEyGAAAAlhBjjAZGhtQ/Mii3R96pAK53NoBlLIWDQUUCYYWDIUWCIUWCYUWCIWWEYyrIyFFeRo6yohnKi2UpL5aj/MwcFWTmKi8jR7nRTEWCYdmWJWOsV/PjJn3mynNimfpZ8y791H5Wt7sf0BQQAABgHAIAAFhijDHe8vUJxq6jibhG43H1DvVLcuW6kuRKMrItS5axFLBtBeyAQnZAQTuocCCoaCiizEhMhZm5KszKU3FWvgozc1WQlaf8jBxlhsdWG8QUCgRlGUvGMrKMJct4b1OidjJaX7Vaexo26T8e+0quy8GAAAAAYwgAACCFjAUDJvm7N0MCx3U0HHc0PDqisVaCXjzg/ccylmxjybJevbWMpWgwrLzMHBUmVwvkx7KVl5mj/Ixs5WfkqigrTwWZuYoEQwraAQXsgIJjDysgy1o6AUFWJKafNe/S0fbz6nh4e0l9bgAAAIuJAAAA0ow32Dev/z7JdV3F3YTkJF77f/qG+vWo76mumbFowST/CqOQHVBGOKZYOKLMcEz5GdleSJCZq8IMb4tBQVaecqKZyghFFQtHFA6GFQ2GFA68CgwWUktlo/bWb9Ldpw80mohzUwAAAIgAAAAwztiSea/3gPe+QSehwdFh6cXr7x9bXB+wbIUCQWVHMpQTy1JeLFs5sUyvUWE0Q9nRTBVm5Co/0+tPkBHyGhrGwhFFgxFlhKMKBYJz+nVkhKP6Yv17OnT9tDoe3hEHAgAAABAAAABmYXxvgrExteM6Ghwd1uDIkB70PpHrvt68MGDZioUiiiWPM8wIR70VBaGIFw5k5io/2bwwJ5qpnFiWcmPZyo1mKjuapaxITMFAcNZj+KZlK/V+4xbd7r7PKgAAAAARAAAA5sDYtoPJmhd6JxsMaaxx4VhAYMaaFlq2goGgoi9PNgh7jQvDMeXFslSQlaeCjBwVZOWqICNX+RnZyo1lK5Y8HjEWininG7zskWAUtAP6zeZPdaT9rK503fT+TQAAAB8jAAAAzDvz8qjBvw4IXNfVSGJUI/FRvRga0F+fbmBkW+OaDiYfAdtWRjimoqw8FWXlKS/m9SMozs5XfmaOijLzVZydp0/X7NS1B51KvNH3AAAAwG8IAAAAi2660w0STkJxJ67BkVfvGzvd4PrDToUDIUVDYWWEvaMOi7LzVJJVoLLcYt3recTsPwAAgAgAAAAp4s3tBWO/dF1XQ6PDGhodVk9/r+49fTiuT4GRK/dlc0MAAAA/IwAAAKSV108qYOAPAAAwxqIEAAAAAACkPwIAAAAAAAB8gAAAAAAAAAAfIAAAAAAAAMAHCAAAAAAAAPABAgAAAAAAAHyAAAAAAAAAAB8gAAAAAAAAwAcIAAAAAAAA8AECAAAAAAAAfIAAAAAAAAAAHyAAAAAAAADABwgAAAAAAADwgQAlAAAAmJjruoon4hpNxCmGpIBlyxiTUp+z4zpKOA4XD/PGtixZhnlVpAYCAAAAgEl0PX+s/8+hf1FGKCLXx3WIJ0aVHc3UL9a9p/K8kpT5vB3X0elbrdrfekzBQIgbGnN8fyVkJH3StFPNlfUUBCmBAAAAAGASD3u79f878qXv65BwHNWVLtfGFU0pFQAkHEcX713X//vgb5mhxZxzXVfhYEjL8koIAJAyCAAAAAAm4bquEm7C93VIOI4cx5GbgusgXNdVwknINX5ew4F5vbdc7i2kDqJQAAAAAAB8gAAAAAAAAAAfIAAAAAAAAMAHCAAAAAAAAPABAgAAAAAAAHyAAAAAAAAAAB8gAAAAAAAAwAcIAAAAAAAA8AECAAAAAAAAfIAAAAAAAAAAHyAAAAAAAADABwgAAAAAAADwAQIAAAAAAAB8gAAAAAAAAAAfIAAAAAAAAMAHCAAAAAAAAPABAgAAAAAAAHyAAAAAAAAAAB8gAAAAAAAAwAcIAAAAAAAA8AECAAAAAAAAfIAAAAAAAAAAHyAAAAAAAADABwgAAAAAAADwAQIAAAAAAAB8gAAAAAAAAAAfIAAAAAAAAMAHCAAAAAAAAPABAgAAAAAAAHyAAAAAAAAAAB8gAAAAAAAAwAcIAAAAAAAA8AECAAAAAAAAfIAAAAAAAAAAHyAAAAAAAADABwgAAAAAAADwAQIAAAAAAAB8gAAAAAAAAAAfIAAAAAAAAMAHCAAAAAAAAPABAgAAAAAAAHyAAAAAAAAAAB8gAAAAAAAAwAcIAAAAAAAA8AECAAAAAAAAfIAAAAAAAAAAHyAAAAAAAADABwgAAAAAAADwAQIAAAAAAAB8gAAAAAAAAAAfIAAAAAAAAMAHCAAAAAAAAPABAgAAAAAAAHyAAAAAAAAAAB8gAAAAAAAAwAcIAAAAAAAA8AECAAAAAAAAfIAAAAAAAAAAHyAAAAAAAADABwgAAAAAAADwAQIAAAAATMuVSxEAIMUFKAEAAMDEssIx1ZYsVzAQ9HUdhkZHVJiVp+xIRkp93sYYFWbmam1FvWLhKDc05tRoPC5jpJLsAoqBlEEAAAAAMIm6kir9d/+r/1oFWXk+r4QrI6OAnVovHQOWrS9a9uqz5l2SDDc05vx54d1nDKmQOrhbAQAAJmGMUSgQVNjnKwBSmW3Zsi2bQgCA6AEAAAAAAIAvEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAAAAAAPkAAAAAAAACADxAAAAAAAADgAwQAAAAAAAD4AAEAAAAAAAA+QAAAAAAAAIAPEAAAAAAAAOADBAAAAAAAAPgAAQAAAAAAAD5AAAAAAAAAgA8QAAAAAAAA4AMEAAAAAAAA+AABAAAA///27iy5rus6A/APggD7BgABoiGITpSU10yC7xlAZsFRKIPIADKAyKmKKyk35T6yEzlSWVRs2bJaymoos0ce1j4+VxRxQYkUCdzzfVWrrnDPfeERJeD8WHstAIABEAAAAADAAAgAAAAAYAAEAAAAADAAAgAAAAAYAAEAAAAADIAAAAAAAAZAAAAAMMbU1JSbALCHmemjmT4y7UYcEkfdAgCAR3uwu5vbd+/kzr272c2uGwIwYipTuf/gfo4ISg8NAQAAwB7eufFe/ulf/zknZo9ld1cAADBqamoqt+/dzevvvqUL4JAQAAAA7OHDz2/kX37+PQ//AHuYmprK9JFpXQCHhAAAAGAPU1NTmZn24xIAk8EQQAAAABgAAQAAAAAMgAAAAAAABkAAAAAAAAMgAAAAAIABEAAAAADAAAgAAAAAYAAEAAAAADAAAgAAAAAYAAEAAAAADIAAAAAAAAZAAAAAAAADIAAAAACAARAAAAAAwAAIAAAAAGAABAAAAAAwAAIAAAAAGAABAAAAAAyAAAAAAAAGQAAAAAAAAyAAAAAAgAEQAAAAAMAACAAAAABgAAQAAAAAMAACAAAAABgAAQAAAAAMgAAAAAAABkAAAAAAAAMgAAAAAIABEAAAAADAAAgAAAAAYAAEAAAAADAAAgAAAAAYAAEAAAAADIAAAAAAAAZgUgOAI0mm/esFAACAMokBwG6SD5P8b5IPknyZ5IF/1QAAAAzZ0Qn8Mz1I8u9J3k+ykeRikktJtpIsJrmQZL6VLgEAAAAGYRIDgN0knyT5QaskmUk98F9IspBkKRUIrCZZSwUE6+39WX8tAAAAmDQTFwBcf+XVr3y9fe1qktxNdQS8P3JpOsnxJGeTnEsFBCtJriR5IclmKiBYSHK+fX5qpAAAAODQmMQOgK94OBBI/hYK3E9ys9WfRy4fS3IyFQ6cSIUCL6UCgY32upkKDWbaPZyJUAAAAIADbOIDgEcZEwokye1Wf/t4kp+kOgCmk5xKzRJYTbLT6koqHJhv10+kgoRB3l8AAAAOHg+ozT6hwL1WSW0V+DDJb1PDBrv7OJdkOzVbYCPVObCZmjFwJnXU4GwqHAAAAOBgmfjj3gKAMR4VCozYbZXta1fvpJ8x8OORzywkWU5tIlhKBQM77Z8X2/vLqa4BAAAAnp+jSY5M+h+QJzSme+DjVv8zculkan7AQmorwWr6roH11EaCpdTgQRsJAOD5ei/J91IdgOb9AEyu6dT/899K3/09cXwje4ZGjhQ87GjqeEB3VGAu1SmwlQoF1lPHCZZTocBUKpma6HQKAA6Anyb5xyQfuBUAE20qNSj+yyT3rr/y6u4k/iF1ADxDYzoF7iW50arzw9Qgwdn2upiaJ3ApNWvgxdR2gqXUxoLZ1DaCbl0hAPDk7iX5pBUAHGoeFA+gMZ0C3W/+u40Ep1OhwE6SF9prFxKcTw0cPNledQsAwDf3oyT/cP2VV993KwA47HQAHEAPdwqMBAK7qbaU++3rv6Y2Erw28vGTqQBgNRUGXE4FA5dTswfmUvMHzkcABAAAMBgCgENgn20ESdtGkCTb167eTPJGq87x1GyBbujgemro4OX29UrqKMHZVIAAAADAhBEATJg9ugdutfogyesjl8+m30YwnxoyeCXVQbCWvovgbHQLAAAAHGoCgAk3ZvBgknzW6u2RyydbnU49+K+lP0pwJf1xgjOpoYPdNgIBAQAAwAEmABigvY4UtGDgy1Yftbd/3f6ezKS2EZxIdQtsJ3k5FQpcTh0juJivbiTw9wsAAOCA8Ftb9jRmG0HS/+a/20iwkGQr/WyBzdRmgsVUN8GZ9jrrzgJwiNgCAMDE8Bta9rRPp8CDVvfa218meWfkYzOprQMLqUDgcvqAYC0VDMynHzxoTSEAAMB3SADAN/YYWwmyfe3q3dQxgo/y1Y0Es6njAiupoYNLqa0EO6kjBIvt9UKEAgAAAE+NAIDvxJjhg3eS/L5VZybJXKpj4FwqANhIbSNYT3+k4GIqQHB0BQAA4BsSAPDMjAkF7qZWFH4wculIaqDgqVbnUoHAS6nZAlupNYUX27VuHoGNBAAAAI8gAOC5GhMKPEi/keDDdum1JP+WfsvA8fTDBrs5AzupcOB0amtBV0IBAABg0AQAHDhjQoGkjhDcGbn0XpKfpP/N/+nUXIHl1KrC7dSqws0k51PbCM6k1hn6+w8AAAyGByAOhTGDB3dbPUiS7WtXP0nySWrw4H+0z0ynhguuj9RqqlNgJTV/oKsT7jYAADCJBABMlD26B+6nOgXeS/KzkUsLrS6kAoLNVoupgOBSez3lzgIAAIedAICJN+ZIwcet3hy5dCJ1jKDrCOiOEmylNhNspgKCuXx1I4EZAwAAwIEmAGCQHg4FRmYM/LXVhyOXZ5KcHKmF1ODBjVZbrS62/6a6OuJOAwAAB4UAALL3jIGRNYWftkqSt5L8Iv2D/mzqGMFqkrUkLyZ5OdU5sJQKDU6kthHMuNsAAMDzIACAMcYcH7jf6naSm6nBg79LHQWYSg0ePJWaI7CVvmNgNTWEcC79RoLT7jQAAPBdEwDAN/QYGwm6wYN3UsHAb0Y+cyo1V2Al/aDBnfb1QqpjYCkVEJgrAAAAPDUCAPgOjDlScDN1hOCtkbePpboALqTmCKykgoHNVNfApVQocD42EgAAAN+SAACeoT2GD95u9XGSN0Yun0l1ApxPcjY1X2AnFQxcarXePpf0xw90DgAAAF8jAIDnaMyMgST5vNUfRi4fTz9Q8GzqGMFqkiupcOCFJJdTocBM+298OjYSAADA4AkA4IDZJxS41SpJ3kvyZuoBv9tIcCzJfCoQeDk1gHA91T2wnD48OB4bCQAAYFAEAHAI7BMKPLyR4EZqxsCr6TcSLKXmCXRhwGaqY+BC6ojBuVbH3G0AAJhMAgA4pMZsI0i+vpHgj60606kjBAupQOBS6ujAZiokWEqymAoHzsRcAQAAOPQEADDh9ugeuJ9aUfhJkt+NXDqW/uH/Qqtu8OBy+hWGS6kQAQAAOCQEADBAY44U3E7yTqvObKoL4HR7XUzNFlhNBQPbrZZScwWmUkMHdQ0AAMABIgAAkowNBe6kVhR+PHLpP1MP+8dbnUsdIXgxtYlgJxUQLKeOGhwdKcEAAAA8BwIAYE8PhwKPGDz48EaC76dfPXgy1SmwkX6+wJXUjIFT7Xr3CgAAfMcEAMBj22vwYAsGdpPca5XURoIPk/ysfX0kdYRgIdUZsJHqFNhJDSHsNhLMpUIB/38CAICnyA/YwBN7jI0E3eDBv7R6K8kP2/Xp1MP/6kitpIKB5XZtqb0ed7cBAODbEQAAz8SYGQP3k7zf6lcjl+dSHQHzqYf/jdTxgeX0RwpWU8MJAQCAfQgAgOdmnyMF3ZrCt9vbU6k1hSdTgwXPpToDdkZqo703l9pe0G0jOOJuAwAwdAIA4MDZY/jgbmro4K0kN0Yufz8VDBxvr4upMGAr/RDCnVQXwexITbvTAAAMiQAAOPDGHB9I+sGDN9vX7yb5Teq3/tOph/3FVhvpVxVup0KBs6ljBCfaZwEAYCIJAIBDaZ9Q4EGre0luJ/k8yfUkP0l/JOB0krVUELCZGjy43mohdcTgfCogAACAQ08AAEyMfbYRdHbbRoJPW70+cu1UqlNgub2upt9GsDLyOp8KEgAA4NAQAACDM6Z74Gar/xu5NJsKBhZSD/6LqS6Bbs7A5fbefJIzEQwAAHBACQAAMjYUuNPqk4cun0498J9OBQSXk7zUXtfb60Zqa0E3j6DbSgAAAM+cAABgD/vMGfiiVee1JK8mmWl1LnVcYC3VLXCl1XpqrkC3jWAm1hQCAPAMCAAAvoExocBuauDg7fb2J0l+n37o4JHUA/9SahNBt6bwUvpjBKdGasbdBgDgaRIAADyhMcMHd1s9SJLta1fvpLoGro98ZibJxVSnwEpq8OBWqmtgvtWFVkIBAAC+NQEAwDOyR/fA3SR/bNWZTs0XmEuFAyupLoGuY6ALCuZTMwgcIQAAYF8CAIDn6OFQoB0nuJ/kL63eHrl8PPXQfz59Z8ALSTZToUB3nOBCKkRIDB0EAKARAAAcIHsdJ2jBwK0k77bqzKY2DRxPzQ7oBg+uJ9lOP3xwrn12Ov1GAgAABkQAAHAIPMaaws5bqYf7o6mZAcdSWwc2Ut0CL6bmC6ymwoLT7TOz7RUAgAklAAA4pPY4PpDU0MEuGLiZ5EZqI8EP0m8lOJs6LrDeXjdSnQKrqVDgXKuT7jQAwGQQAABMiDHbCJLaRnC/+2L72tWPknyU5Jftral8dfBgN3RwMzVbYKnVfKwpBAA4lAQAAAP0iO6B3SSftfp9kp+2S0eSLKTfRrCYmjGwlf4YwcVWx91ZAICDSwAAwLgZAw+SfNjqv9ulI6lugbOpYwJzqSMEq6nOge30AcHJ9vluG4GtBAAAz4kAAIBH2icU+LTVO+3SD1LHAk6kHvrPpI4M7CR5KTV8cC3VKbAQGwkAAJ45AQAAj21MKLCbfvDgp0n+nOTNJD9OBQNdraS6A7ZTXQObqe0Ec6kjBCdaTbvbAABPlwAAgCcyJhRIavDg/SS32tc3kryeOgowlXroX0jNFthMBQPb7Z8vJjmfOmZwKtYUAgA8EQEAAE/dY2wk2E2S7WtXb6ZWFf4hyS9GPnMmtX3gcurowHKqY+BykgupbQQLqXAAAIDHIAAA4LkZ0z3weZLftuqcEXG9vAAABdxJREFUTD38L7RaTXUKrLV/vtRqPuYKAAB8jQAAgANlTCjwZapT4A8jl2ZSxwO6jQQLqU6BK6lZA5upowSLqQBh6qECABgMAQAAB97DocDIjIG7Sf7SqjOVfpjg8dRxgu3URoLLqYBgJ8l6u3601Yw7DQBMMgEAAIfOXjMGRjYSfNmq80bqe163evB8aq7AWioMeCG1rnAtFRh06wxn4zgBADAhBAAATIx91hTebZXU4ME/Jfll+qMAx1OhQHd0YCP9IMLFVDBwLnXcwPdPAODQ8QMMABPtG2wk+DLJ9VadmdTD/3KSpdSwwZ1UMHAhNV9gub36ngoAHGh+WAGA7Nk9cDfJu60606nBg+dTIcBSKhDYSnULdKsL51PdAtPuLgBwEAgAAGAPewwfvJ/ks1ajGwmOpx74z6aOC1xMzRbYSA0c3EiFAwupuQJd2UYAADwTAgAAeExjZgwkya1WH4xcnklyLDVM8GTquMBqKhB4qdVWkrmRz3XDCgEAnioBAAA8gX1CgdHBgzeS/DH9b/6nUwHBudTQwZ0kL6Y/QrCW5HQqOOhWGgIAfGsCAAB4yvYJBR60upfkdpIvUhsJfjTy8fnUXIG19JsIrqTmDcy163OpgAAA4LEIAADgGdhnG0HSthEkyfa1qx8n+TjJayPXT6e6BZZTocBq+rkCa0lWUsHAmdRRAgCArxAAAMABs8fwwS/Sdwv8ol06knrov9BqLjVfYKe9rqaCgZXUMQIAYMAEAABwwI05UvAgyUetOkdSawq7mk+FAWupYGC71VoqFJhOv43ARgIAmGACAAA4hPYJBT5vlSRvJfl56nt+t2ngVGqewN+lNhF0gcByqpNgNjWgcDYVDgAAE0AAAAATYkwosJuvbyR4J8l/pd9KcCx1VGA7NVfgcvpugfOp0OB0K2sKAeAQEgAAwATbZyPB/VZJbST4LMkbIx89kZorcDEVCKynAoHNVLfAfPrBg9YUAsABJwAAgIF5jI0ESZLta1f/muSvSd5N8quRS2fTDxe8mDpO8EIqJFhMHSO4mOocAAAOCAEAAPBIY7oHPsvXuwVOpR74z7W6lOoUWEkFAxtJttpnuqGDAMAzJAAAAB7bmFDgZqs/jVyaSR0N6OYHLKSOELyQ2kiwlQoILiQ5mX4egRkDAPAdEAAAAE/k4VBgZMZAN3iw20jwuyQ/S20X6DYNnE8FAVvpBw9eSbKaGkzY1aw7DQBPRgAAADxVe80YGFlTeKtVknyc5Hr6YwFHU4MHF1OrCbeSvJjqGlhJHS840+pYrCkEgMcmAAAAnokxwwd307YRbF+7ei/Jn1v9euQzx1MBwHqqU+BS+oBgIf1GgvPx8w0APJJvkADAgTFmxsCtJG+36syk5gcspDoGllOBwFpqC8GlVGCw2D4LAIMmAAAADrQxRwrupu8W6EwnOZFaVTifCghWUrMFNtvrpdQxg/Pt891GApsJAJhoAgAA4FDaY/jg/SRftHp35PKx1DaCU6mAYCU1bHDzoTqX+vloeuRVMADARBAAAAATYczxgSS53epG+/rNJD9M/Sw0kwoGllJHB7aTvJzk71NrDe+5uwBMAok2ADAoI6HAo0y16lYUHk3y3vVXXr3vzgFw2AkAAAAe8nBIMGaDAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABxk/w/f2avCtI4t7wAAACV0RVh0ZGF0ZTpjcmVhdGUAMjAxMy0wNC0xMVQwNTowOToyOS0wNzowMP6upGwAAAAldEVYdGRhdGU6bW9kaWZ5ADIwMTMtMDQtMTFUMDU6MDk6MjktMDc6MDCP8xzQAAAAAElFTkSuQmCC'
definition = ActionDefinition(
name= slug,
url= f'{action_hub}/actions/{slug}/action',
label= 'Excel (tabbed)',
icon_data_uri= icon_data_uri,
form_url= f'{action_hub}/actions/{slug}/form',
supported_action_types= ['dashboard'],
description= 'This action will generate an Excel spreadsheet containing all the datasets in the dashboard',
params= [],
supported_formats= ['csv_zip'],
supported_formattings= ["unformatted"],
supported_visualization_formattings= ["apply", "noapply"],
)
@app.post(f'/actions/{slug}/form')
def form():
"""Form for the Tabbed Spreadsheet action: email details and file name for the spreadsheet."""
return [
ActionFormField(
name='email_address',
label='Email Address',
description='Email address to send spreadsheet',
required=True,
),
ActionFormField(
name='email_subject',
label='Subject',
description='Email subject line',
required=True,
),
ActionFormField(
name='email_body',
label='Body',
description='Email body text',
required=True,
type='textarea'
),
ActionFormField(
name='file_name',
label='Filename',
description='Filename for the generated spreadsheet',
required=True,
),
]
@app.post(f'/actions/{slug}/action')
def action(payload: ActionRequest):
"""Endpoint for the Tabbed Spreadsheet action: converts the zipped csv file of data into a multi-tabbed Excel spreadsheet."""
attachment_binary = payload.attachment.data.encode('utf-8')
attachment_binary = base64.decodestring(attachment_binary)
attachment_file = get_temp_file_name(slug, 'csv_files.zip')
with open(attachment_file, 'wb') as file:
file.write(attachment_binary)
csv_files = []
temp_dir = get_temp_dir('tabbed_spreadsheet')
with ZipFile(attachment_file, 'r') as zip_file:
# Extract all the contents of zip file in different directory
zip_file.extractall(temp_dir)
csv_files = zip_file.namelist()
workbook = openpyxl.Workbook()
for csv_file_name in csv_files:
worksheet = workbook.create_sheet()
with open(os.path.join(temp_dir, csv_file_name)) as file:
reader = csv.reader(file)
for row in reader:
worksheet.append(row)
if payload.form_params['file_name']:
file_name = get_output_file_name(slug, payload.form_params['file_name'], timestamp=True)
else:
file_name = get_output_file_name(slug, 'tabbed_spreadsheet.xlsx', timestamp=True)
workbook.save(file_name)
response = send_email(
to_emails= payload.form_params['email_address'],
subject=payload.form_params['email_subject'],
body=payload.form_params['email_body'],
file_name=file_name,
file_type='xlsx'
)
return {'response': 'response'}
|
"""Classes for describing work and results.
"""
import enum
import json
import pathlib
class StrEnum(str, enum.Enum):
"An Enum subclass with str values."
class WorkerOutcome(StrEnum):
"""Possible outcomes for a worker.
"""
NORMAL = 'normal' # The worker exited normally, producing valid output
EXCEPTION = 'exception' # The worker exited with an exception
ABNORMAL = 'abnormal' # The worker did not exit normally or with an exception (e.g. a segfault)
NO_TEST = 'no-test' # The worker had no test to run
SKIPPED = 'skipped' # The job was skipped (worker was not executed)
class TestOutcome(StrEnum):
"""A enum of the possible outcomes for any mutant test run.
"""
SURVIVED = 'survived'
KILLED = 'killed'
INCOMPETENT = 'incompetent'
class WorkResult:
"""The result of a single mutation and test run.
"""
def __init__(self,
worker_outcome,
output=None,
test_outcome=None,
diff=None):
if worker_outcome is None:
raise ValueError('Worker outcome must always have a value.')
self._output = output
self._test_outcome = test_outcome
self._worker_outcome = worker_outcome
self._diff = diff
@property
def worker_outcome(self):
"A `WorkerOutcome` indicating how the worker finished."
return self._worker_outcome
@property
def test_outcome(self):
"A `TestOutcome` indicating how the test runner finished. Possibly `None`."
return self._test_outcome
@property
def output(self):
"Any output returned by the test command. Possibly `None`."
return self._output
@property
def diff(self):
"A sequence of strings containing the diff generated by the mutation. Possibly `None`."
return self._diff
def as_dict(self):
"Get the WorkResult as a dict."
return {
'output': self.output,
'test_outcome': self.test_outcome,
'worker_outcome': self.worker_outcome,
'diff': self.diff,
}
@property
def is_killed(self):
"Whether the mutation should be considered 'killed'"
return self.test_outcome != TestOutcome.SURVIVED
def __eq__(self, rhs):
return self.as_dict() == rhs.as_dict()
def __neq__(self, rhs):
return not self == rhs
class WorkItem:
"""Description of the work for a single mutation and test run.
"""
# pylint: disable=R0913
def __init__(self,
module_path=None,
operator_name=None,
occurrence=None,
start_pos=None,
end_pos=None,
job_id=None):
if start_pos[0] > end_pos[0]:
raise ValueError('Start line must not be after end line')
if start_pos[0] == end_pos[0]:
if start_pos[1] >= end_pos[1]:
raise ValueError(
'End position must come after start position.')
self._module_path = pathlib.Path(module_path)
self._operator_name = operator_name
self.occurrence = occurrence
self._start_pos = start_pos
self._end_pos = end_pos
self._job_id = job_id
@property
def module_path(self):
"pathlib.Path to module being mutated."
return self._module_path
@property
def operator_name(self):
"The name of the operator (i.e. as defined by the provider)"
return self._operator_name
@property
def start_pos(self):
"Start of the mutation location as a `(line, column)` tuple."
return self._start_pos
@property
def end_pos(self):
"""End of the mutation location as a `(line, column)` tuple.
Note that this represents the offset *one past* the end of the mutated
segment. If the mutated segment is at the end of a file, this offset
will be past the end of the file.
"""
return self._end_pos
@property
def job_id(self):
"The unique ID of the job"
return self._job_id
def as_dict(self):
"""Get fields as a dict.
"""
return {
'module_path': str(self.module_path),
'operator_name': self.operator_name,
'occurrence': self.occurrence,
'start_pos': self.start_pos,
'end_pos': self.end_pos,
'job_id': self.job_id,
}
def __eq__(self, rhs):
return self.as_dict() == rhs.as_dict()
def __neq__(self, rhs):
return not self == rhs
class WorkItemJsonEncoder(json.JSONEncoder):
"Custom JSON encoder for workitems and workresults."
def default(self, o): # pylint: disable=E0202
if isinstance(o, WorkItem):
return {"_type": "WorkItem", "values": o.as_dict()}
if isinstance(o, WorkResult):
return {"_type": "WorkResult", "values": o.as_dict()}
return super().default(o)
class WorkItemJsonDecoder(json.JSONDecoder):
"Custom JSON decoder for WorkItems and WorkResults."
def __init__(self):
json.JSONDecoder.__init__(self, object_hook=self._decode_work_items)
@staticmethod
def _decode_work_items(obj):
if (obj.get('_type') == 'WorkItem') and ('values' in obj):
values = obj['values']
return WorkItem(**values)
if (obj.get('_type') == 'WorkResult') and ('values' in obj):
values = obj['values']
return WorkResult(**values)
return obj
|
"""
_InsertCMSSWVersion_
Oracle implementation of InsertCMSSWVersion
"""
from WMCore.Database.DBFormatter import DBFormatter
class InsertCMSSWVersion(DBFormatter):
def execute(self, binds, conn = None, transaction = False):
sql = """DECLARE
cnt NUMBER(1);
BEGIN
SELECT COUNT(*)
INTO cnt
FROM cmssw_version
WHERE name = :VERSION
;
IF (cnt = 0)
THEN
INSERT INTO cmssw_version
(ID, NAME)
VALUES(cmssw_version_SEQ.nextval, :VERSION)
;
END IF;
EXCEPTION
WHEN DUP_VAL_ON_INDEX THEN NULL;
END;
"""
self.dbi.processData(sql, binds, conn = conn,
transaction = transaction)
return
|
import datetime as dt
from dataclasses import dataclass, field
from typing import Any, ClassVar, Dict, List, Optional, cast
from ..types import VerificationDocument, VerificationDocumentStep
from .base import Resource
@dataclass
class Verification(Resource):
_endpoint: ClassVar[str] = '/v2/verifications'
id: str
expired: bool
steps: list
documents: List[VerificationDocument]
metadata: Dict[str, Dict[str, str]]
identity: Dict[str, str] = field(default_factory=dict)
hasProblem: Optional[bool] = None
computed: Optional[Dict[str, Any]] = None
obfuscatedAt: Optional[dt.datetime] = None
flow: Optional[Dict[str, Any]] = None
@classmethod
def retrieve(cls, verification_id: str, client=None) -> 'Verification':
client = client or cls._client
endpoint = f'{cls._endpoint}/{verification_id}'
resp = client.get(endpoint)
docs = []
for doc in resp['documents']:
doc['steps'] = [
VerificationDocumentStep(**step) for step in doc['steps']
]
docs.append(VerificationDocument(**doc))
resp['documents'] = docs
return cast('Verification', cls._from_dict(resp))
|
# A mocked event controller to check whether our routines
# into the starting and restarting are touched correctly
from ev_core.config import Config
class EventControllerMock:
def __init__(self, config: Config):
self.started = True
self.restarted_cnt = 0
self.config = config
def start(self):
self.restarted_cnt += 1
self.started = True
def stop(self):
self.started = False
def reload(self):
self.stop()
self.start()
|
# See the NOTICE file distributed with this work for additional information
# regarding copyright ownership.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from django.contrib.auth import get_user_model
from django.urls import reverse
from django.core.exceptions import ValidationError
from rest_framework import status
from rest_framework.test import APITestCase
from ensembl.production.dbcopy.models import RequestJob
User = get_user_model()
class RequestJobTest(APITestCase):
""" Test module for RequestJob model """
fixtures = ['ensembl_dbcopy']
# Test requestjob endpoint
def testRequestJobGetAll(self):
response = self.client.get(reverse('dbcopy_api:requestjob-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def testCreateRequestJob(self):
response = self.client.post(reverse('dbcopy_api:requestjob-list'),
{'src_host': 'mysql-ens-sta-1:4519', 'src_incl_db': 'homo_sapiens_core_99_38',
'tgt_host': 'mysql-ens-general-dev-1:4484', 'user': 'testuser'})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
las_rq_job = RequestJob.objects.all().order_by('-request_date').first()
self.assertEqual("mysql-ens-sta-1:4519", las_rq_job.src_host)
self.assertEqual("mysql-ens-general-dev-1:4484", las_rq_job.tgt_host)
self.assertIn('job_id', response.data)
# Test user email set default
self.assertEqual("[email protected]", las_rq_job.email_list)
self.assertEqual("testuser", las_rq_job.user.username)
def testCreateRequestJobBadRequest(self):
response = self.client.post(reverse('dbcopy_api:requestjob-list'),
{'src_host': '', 'src_incl_db': 'homo_sapiens_core_99_38',
'tgt_host': 'mysql-ens-general-dev-1:3306'})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('src_host', response.data)
self.assertIn('user', response.data)
self.assertEqual('blank', response.data['src_host'][0].code)
self.assertEqual('required', response.data['user'][0].code)
def testSaveRequestJobEquivalentAdding(self):
job = RequestJob.objects.get(job_id='ddbdc15a-07af-11ea-bdcd-9801a79243a5')
job.status = 'Processing Requests'
job.save()
eq_job = RequestJob()
eq_job.src_host = job.src_host
eq_job.src_incl_db = job.src_incl_db
eq_job.tgt_host = job.tgt_host
eq_job.tgt_db_name = job.tgt_db_name
eq_job.username = job.username
with self.assertRaises(ValidationError):
eq_job.save()
def testSaveRequestJobEquivalentFromDB(self):
job = RequestJob.objects.get(job_id='ddbdc15a-07af-11ea-bdcd-9801a79243a5')
job.status = 'Processing Requests'
job.save()
eq_job = RequestJob.objects.get(job_id="8f084180-07ae-11ea-ace0-9801a79243a5")
eq_job.src_host = job.src_host
eq_job.src_incl_db = job.src_incl_db
eq_job.tgt_host = job.tgt_host
eq_job.tgt_db_name = job.tgt_db_name
eq_job.username = job.username
eq_job.save()
def testCreateRequestJobBadRequestEquivalentRunning(self):
job = RequestJob.objects.get(job_id='ddbdc15a-07af-11ea-bdcd-9801a79243a5')
job.status = 'Processing Requests'
job.save()
params = {
"src_host": job.src_host,
"src_incl_db": job.src_incl_db,
"tgt_host": job.tgt_host,
"tgt_db_name": job.tgt_db_name,
}
active_equivalent_jobs = list(filter(lambda x: x.is_active, RequestJob.objects.equivalent_jobs(**params)))
self.assertEqual(len(active_equivalent_jobs), 1)
response = self.client.post(reverse('dbcopy_api:requestjob-list'),
{**params, "user": "testuser"})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
error = response.json()["error"][0]
job_id = response.json()["job_id"][0]
self.assertRegex(error, r"^A job with the same parameters")
self.assertEqual(job_id, job.job_id)
def testCreateRequestJobEquivalentNotRunning(self):
job = RequestJob.objects.get(job_id='ddbdc15a-07af-11ea-bdcd-9801a79243a5')
job.status = 'Transfer Ended'
job.save()
params = {
"src_host": job.src_host,
"src_incl_db": job.src_incl_db,
"tgt_host": job.tgt_host,
"tgt_db_name": job.tgt_db_name,
}
active_equivalent_jobs = list(filter(lambda x: x.is_active, RequestJob.objects.equivalent_jobs(**params)))
self.assertEqual(len(active_equivalent_jobs), 0)
response = self.client.post(reverse('dbcopy_api:requestjob-list'),
{**params, "user": "testuser"})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def testCreateRequestJobEquivalentThreeParamsRunning(self):
job = RequestJob.objects.get(job_id='ddbdc15a-07af-11ea-bdcd-9801a79243a5')
job.status = 'Processing Requests'
job.save()
params = {
"src_host": job.src_host,
"src_incl_db": job.src_incl_db,
"tgt_host": job.tgt_host,
}
active_equivalent_jobs = list(filter(lambda x: x.is_active, RequestJob.objects.equivalent_jobs(**params)))
self.assertEqual(len(active_equivalent_jobs), 0)
response = self.client.post(reverse('dbcopy_api:requestjob-list'),
{**params, "user": "testuser"})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def testCreateRequestJobUser(self):
response = self.client.post(reverse('dbcopy_api:requestjob-list'),
{'src_host': 'mysql-ens-sta-1:4519', 'src_incl_db': 'homo_sapiens_core_99_38',
'tgt_host': 'mysql-ens-general-dev-1:4484', 'user': 'testuser'})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
las_rq_job = RequestJob.objects.all().order_by('-request_date').first()
self.assertEqual("testuser", las_rq_job.user.username)
def testCreateRequestJobWrongUser(self):
response = self.client.post(reverse('dbcopy_api:requestjob-list'),
{'src_host': 'mysql-ens-sta-1:4519', 'src_incl_db': 'homo_sapiens_core_99_38',
'tgt_host': 'mysql-ens-general-dev-1:4484', 'user': 'inexistantuser'})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual('invalid', response.data['user'][0].code)
def testGetRequestJob(self):
response = self.client.get(
reverse('dbcopy_api:requestjob-detail', kwargs={'job_id': '8f084180-07ae-11ea-ace0-9801a79243a5'}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def testGetRequestJobNotFound(self):
response = self.client.get(
reverse('dbcopy_api:requestjob-detail', kwargs={'job_id': 'd662656c-0a18-11ea-ab6c-9801a79243a5'}))
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def testGetRequestJobDetail(self):
response = self.client.get(
reverse('dbcopy_api:requestjob-detail', kwargs={'job_id': 'ddbdc15a-07af-11ea-bdcd-9801a79243a5'}))
response_dict = json.loads(response.content.decode('utf-8'))
self.assertIn('transfer_logs', response_dict)
def testPutRequestJob(self):
response = self.client.put(
reverse('dbcopy_api:requestjob-detail', kwargs={'job_id': '8f084180-07ae-11ea-ace0-9801a79243a5'}),
{'src_host': 'mysql-ens-sta-1:4519', 'src_incl_db': 'homo_sapiens_core_99_38',
'tgt_host': 'mysql-ens-general-dev-2:4586,mysql-ens-general-dev-1:4484,', 'user': 'testuser'})
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def testPatchRequestJob(self):
response = self.client.patch(
reverse('dbcopy_api:requestjob-detail', kwargs={'job_id': '8f084180-07ae-11ea-ace0-9801a79243a5'}),
{'src_incl_db': 'homo_sapiens_funcgen_99_38'})
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def testDeleteRequestJob(self):
response = self.client.delete(
reverse('dbcopy_api:requestjob-detail', kwargs={'job_id': '8f084180-07ae-11ea-ace0-9801a79243a5'}))
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
job = RequestJob.objects.filter(job_id='8f084180-07ae-11ea-ace0-9801a79243a5').count()
# job has actually been deleted from DB
self.assertEqual(0, job)
def testDeleteRequestJobNotFound(self):
response = self.client.delete(
reverse('dbcopy_api:requestjob-detail', kwargs={'job_id': '673f3b10-09e6-11ea-9206-9801a79243a5'}))
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def testDeleteRequestJobNotAcceptable(self):
req = RequestJob.objects.get(job_id='ddbdc15a-07af-11ea-bdcd-9801a79243a5')
req.status = 'Processing Requests'
req.save()
response = self.client.delete(
reverse('dbcopy_api:requestjob-detail', kwargs={'job_id': 'ddbdc15a-07af-11ea-bdcd-9801a79243a5'}))
self.assertEqual(response.status_code, status.HTTP_406_NOT_ACCEPTABLE)
# Test Source host endpoint
def testSourceHostGet(self):
response = self.client.get(reverse('dbcopy_api:srchost-detail', kwargs={'name': 'mysql-ens-sta-1'}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def testSourceHostGetNotFound(self):
response = self.client.get(reverse('dbcopy_api:srchost-detail', kwargs={'name': 'mysql-ens-compara-2'}))
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def testSourceHostGetMultiple(self):
# Test getting 2 mysql-ens-sta-2 servers
response = self.client.get(reverse('dbcopy_api:srchost-list'), {'name': 'mysql-ens-sta'})
self.assertEqual(len(response.data), 2)
# Test getting mysql-ens-general-dev-1 server
response = self.client.get(reverse('dbcopy_api:srchost-list'), {'name': 'mysql-ens-general'})
self.assertIsInstance(json.loads(response.content.decode('utf-8')), list)
self.assertEqual(len(response.data), 2)
# Test Target host endpoint
def testTargetHostGet(self):
logged = self.client.login(username='testuser', password='testgroup123')
self.assertTrue(logged)
response = self.client.get(reverse('dbcopy_api:tgthost-detail', kwargs={'name': 'mysql-ens-sta-1'}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def testTargetHostGetNotFound(self):
logged = self.client.login(username='testuser', password='testgroup123')
self.assertTrue(logged)
response = self.client.get(reverse('dbcopy_api:tgthost-detail', kwargs={'name': 'mysql-ens-compara-2'}))
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def testTargetHostGetMultipleWithAllowedUser(self):
logged = self.client.login(username='testuser', password='testgroup123')
self.assertTrue(logged)
# Test getting 2 mysql-ens-sta servers with allowed user
response = self.client.get(reverse('dbcopy_api:tgthost-list'), {'name': 'mysql-ens-sta'})
self.assertEqual(len(response.data), 2)
def testTargetHostGetMultipleWithNonAllowedUser(self):
# Test getting 2 mysql-ens-sta servers with non-allowed user
User.objects.get(username='testuser2')
self.client.login(username='testuser2', password='testgroup1234')
response = self.client.get(reverse('dbcopy_api:tgthost-list'), {'name': 'mysql-ens-sta'})
self.assertEqual(len(response.data), 1)
def testTargetHostGetMultipleServers(self):
# Test getting mysql-ens-general-dev-1 server
response = self.client.get(reverse('dbcopy_api:tgthost-list'), {'name': 'mysql-ens-general'})
self.assertEqual(len(response.data), 2)
def testRequestModelCleanRaises(self):
with self.assertRaises(ValidationError):
# test db_name repeated on same target
RequestJob.objects.create(src_host="host1:3306",
tgt_host="host4:3306,host1:3306",
src_incl_db="db1,db4",
tgt_db_name="db5,db1",
username='testuser')
with self.assertRaises(ValidationError):
# test target db name not set at all 9same target dn names
RequestJob.objects.create(src_host="host1:3306",
tgt_host="host1:3306,host3:3306",
src_incl_db="db1",
username='testuser')
with self.assertRaises(ValidationError):
# test target host contains src host and all db selected
RequestJob.objects.create(src_host="host1:3306",
tgt_host="host2:3306,host1:3306",
username='testuser')
with self.assertRaises(ValidationError):
# test target host contains src host and all db selected
RequestJob.objects.create(tgt_db_name="new_db_name",
tgt_host="host2:3306,host1:3306",
username='testuser')
def testRequestModelCleanSuccess(self):
# Test a normal job would pass.
job = RequestJob.objects.create(src_host="host2:3306",
tgt_host="host4:3306,host3:3306",
src_incl_db="db1,db4",
tgt_db_name="db5,db1",
username='testuser')
self.assertIsNotNone(job)
# test a job with same target but different db name would pass
job = RequestJob.objects.create(src_host="host2:3306",
tgt_host="host2:3306",
src_incl_db="db1",
tgt_db_name="db5",
username='testuser')
self.assertIsNotNone(job)
class LookupsTest(APITestCase):
fixtures = ('host_group',)
def testHostLookup(self):
response = self.client.get(reverse('ensembl_dbcopy:src-host-autocomplete'))
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.client.login(username='testusergroup', password='testgroup123')
response = self.client.get(reverse('ensembl_dbcopy:src-host-autocomplete'))
# retrieve all
data = json.loads(response.content)
self.assertEqual(len(data['results']), 10)
# filter query
response = self.client.get(reverse('ensembl_dbcopy:src-host-autocomplete') + '?q=sta-3')
data = json.loads(response.content)
self.assertEqual(len(data['results']), 2)
self.client.login(username='testusergroup2', password='testgroup1234')
response = self.client.get(reverse('ensembl_dbcopy:tgt-host-autocomplete'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
# retrieve all
data = json.loads(response.content)
self.assertEqual(len(data['results']), 40)
# filter query permission should not allow sta as target
response = self.client.get(reverse('ensembl_dbcopy:tgt-host-autocomplete') + '?q=sta-3')
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = json.loads(response.content)
self.assertEqual(len(data['results']), 0)
class DBIntrospectTest(APITestCase):
databases = {'default', 'homo_sapiens'}
fixtures = ('introspect.homo_sapiens.json',)
def testDatabaseList(self):
# Test getting test Production dbs
args = {'host': 'localhost', 'port': 3306}
response = self.client.get(reverse('dbcopy_api:databaselist', kwargs=args),
{'search': 'test_homo'})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertGreaterEqual(len(response.data), 1)
self.assertEqual(response.data[0], 'test_homo_sapiens')
response = self.client.get(reverse('dbcopy_api:databaselist',
kwargs={**args, 'host': 'bad-host'}),
{'search': 'test_production_services'})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
response = self.client.get(reverse('dbcopy_api:databaselist', kwargs=args),
{'search': 'no_result_search'})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 0)
response = self.client.get(reverse('dbcopy_api:databaselist', kwargs=args),
{'matches[]': ['test_homo_sapiens']})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
response = self.client.get(reverse('dbcopy_api:databaselist', kwargs=args),
{'matches[]': ['no_match']})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 0)
def testTableList(self):
args = {'host': 'localhost',
'port': 3306,
'database': 'test_homo_sapiens'}
# Test getting meta_key table for Production dbs
response = self.client.get(reverse('dbcopy_api:tablelist', kwargs=args),
{'search': 'ass'})
response_list = json.loads(response.content.decode('utf-8'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response_list), 2)
args['host'] = 'badhost-name'
response = self.client.get(reverse('dbcopy_api:tablelist', kwargs=args),
{'search': 'meta'})
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
args['host'] = 'localhost'
response = self.client.get(reverse('dbcopy_api:tablelist', kwargs=args),
{'search': 'unknown'})
response_list = json.loads(response.content.decode('utf-8'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response_list), 0)
|
"""SCOPE:
1: Read the input given by the user
"""
print("Enter the number")
input_number=input()
print("The number is",input_number)
print(type(input_number))
input_number=int(input_number)
if(input_number>10):
print("It is greater than 10")
else:
print("It is less than 10")
|
from django.conf import settings
from disturbance import helpers
def disturbance_url(request):
template_group = 'disturbance'
TERMS = "/know/online-disturbance-apiary-terms-and-conditions"
is_officer = False
is_admin = False
is_customer = False
if request.user.is_authenticated:
is_admin = helpers.is_disturbance_admin(request)
is_apiary_admin = helpers.is_disturbance_admin(request)
is_customer = helpers.is_customer(request)
return {
'APIARY_SEARCH': '/external/payment',
'APIARY_CONTACT': '/contact-us',
'APIARY_TERMS': TERMS,
'DEV_STATIC': settings.DEV_STATIC,
'DEV_STATIC_URL': settings.DEV_STATIC_URL,
'TEMPLATE_GROUP': template_group,
'SYSTEM_NAME': settings.SYSTEM_NAME,
'IS_OFFICER': is_officer,
'IS_ADMIN': is_admin,
'IS_APIARY_ADMIN': is_apiary_admin,
'IS_CUSTOMER': is_customer,
'PUBLIC_URL': settings.PUBLIC_URL
}
def template_context(request):
"""Pass extra context variables to every template.
"""
context = disturbance_url(request)
return context
|
from setuptools import setup, find_packages
import archapp
setup(
name='archapp',
version=archapp.__version__,
#packages=['archapp'],
packages=find_packages(),
description='Archiver Appliance Python Interface',
author='Zachary Lentz',
author_email='[email protected]'
)
|
#!/home/bernard/acenv/bin/python3
import os, threading, sys, hashlib, uuid, pathlib
from indi_mr import mqtttoredis, mqtt_server, redis_server, tools
from indiredis import make_wsgi_app
from skipole import WSGIApplication, FailPage, GoTo, ValidateError, ServerError, use_submit_list, skis
from waitress import serve
mqtt_host = mqtt_server(host='localhost', port=1883)
redis_host = redis_server(host='localhost', port=6379)
# This service needs a redis connection to store cookies
rconn = tools.open_redis(redis_host)
PROJ_DATA={"rconn":rconn, # redis connection
"username":"localcontrol", # the username which must be used to log in
"password": "6f852ab4bb9e13ac5095377eddb251a09afd27dbb95c788e075ca63860f9ce8cac75fa9165bb739c0e629f2be201ddf57f261ab982cfd7f88687412ff0d1ea64"
}
# The password above is an hashed password, being the result of running
# python3 hashpassword.py, and copying the result here, currently password is 'remscope'
# Set a directory of your choice where blobs will be stored
BLOBS = '/home/bernard/indiblobs'
PROJECTFILES = os.path.dirname(os.path.realpath(__file__))
PROJECT = "indiclient"
def _is_user_logged_in(skicall):
received_cookies = skicall.received_cookies
if PROJECT not in received_cookies:
return False
# get cookie
rconn = skicall.proj_data["rconn"]
# the current cookiestring is stored in redis at key 'cookiestring'
cookievalue = rconn.get('cookiestring')
if not cookievalue:
return False
cookiestring = cookievalue.decode('utf-8')
if received_cookies[PROJECT] != cookiestring:
return False
return True
def _hash_password(username, password):
"Return hashed password, as a string, on failure return None"
seed_password = username + password
hashed_password = hashlib.sha512( seed_password.encode('utf-8') ).hexdigest()
return hashed_password
def _create_cookie(skicall):
"Generates a random cookie, store it in redis, and return the cookie"
rconn = skicall.proj_data["rconn"]
# generate a cookie string
cookiestring = uuid.uuid4().hex
rconn.set('cookiestring', cookiestring, ex=3600) # expire after one hour
return cookiestring
def start_call(called_ident, skicall):
"When a call is initially received this function is called."
# to serve static files, you can map a url to a server static directory
# the user does not have to be logged in to access these
servedfile = skicall.map_url_to_server("images", "/home/bernard/indiblobs")
if servedfile:
return servedfile
if _is_user_logged_in(skicall):
# The user is logged in, so do not show the index page, or check login page
if (called_ident == (PROJECT, 1)) or (called_ident == (PROJECT, 10)):
# instead jump straight to indi client
return ('indiredis', 1)
# any other page, such as css or image files are ok
return called_ident
# You may wish to apply the decorator '@use_submit_list' to the submit_data
# function below. See the skipole documentation for details.
def submit_data(skicall):
"This function is called when a Responder wishes to submit data for processing in some manner"
if skicall.ident_list[-1] == (PROJECT, 10):
# this call is to checklogin from the login page
skicall.call_data['authenticate'] = False
username = skicall.proj_data["username"]
if (("login", "input_text1") in skicall.call_data) and (skicall.call_data["login", "input_text1"] == username):
if ("login", "input_text2") in skicall.call_data:
password = skicall.call_data["login", "input_text2"]
hashed = _hash_password(username, password)
if hashed == skicall.proj_data["password"]:
skicall.call_data['authenticate'] = True
if skicall.call_data['authenticate']:
return
else:
raise FailPage("Invalid input")
if skicall.ident_list[-1] == (PROJECT, 20):
# this call is to populate the showfiles page
serverpath = pathlib.Path(BLOBS)
serverfiles = [f.name for f in serverpath.iterdir() if f.is_file()]
if not serverfiles:
skicall.page_data['nothingfound', 'show'] = True
skicall.page_data['filelinks', 'show'] = False
return
skicall.page_data['nothingfound', 'show'] = False
skicall.page_data['filelinks', 'show'] = True
# The widget has links formed from a list of lists
# 0 : The url, label or ident of the target page of the link
# 1 : The displayed text of the link
# 2 : If True, ident is appended to link even if there is no get field
# 3 : The get field data to send with the link
serverfiles.sort(reverse=True)
filelinks = []
for sf in serverfiles:
# create a link to urlfolder/sf
filelinks.append([ "images/" + sf, sf, False, ""])
skicall.page_data['filelinks', 'nav_links'] = filelinks
return
if skicall.ident_list[-1] == (PROJECT, 30):
# this call is to log out
skicall.call_data['logout'] = True
return
def end_call(page_ident, page_type, skicall):
"""This function is called at the end of a call prior to filling the returned page with skicall.page_data,
it can also return an optional session cookie string."""
if ('authenticate' in skicall.call_data) and skicall.call_data['authenticate']:
# a user has logged in, set a cookie
return _create_cookie(skicall)
if ('logout' in skicall.call_data) and skicall.call_data['logout']:
# a user has been logged out, set a new random cookie in redis, and an invalid cookie in the client
_create_cookie(skicall)
return "xxxxxxxx"
return
def check_cookies_function(received_cookies, proj_data):
"""Returns None if call can proceed to sub project"""
if PROJECT not in received_cookies:
# no cookie, must go to top login page
return (PROJECT, 1)
# get cookie
rconn = proj_data["rconn"]
# the current cookiestring is stored in redis at key 'cookiestring'
cookievalue = rconn.get('cookiestring')
if not cookievalue:
return (PROJECT, 1)
cookiestring = cookievalue.decode('utf-8')
if received_cookies[PROJECT] != cookiestring:
# invalid cookie, return to top page
return (PROJECT, 1)
return
# The above functions are required as arguments to the skipole.WSGIApplication object
# and will be called as required.
# create the wsgi application
application = WSGIApplication(project=PROJECT,
projectfiles=PROJECTFILES,
proj_data=PROJ_DATA,
start_call=start_call,
submit_data=submit_data,
end_call=end_call,
url="/")
skis_application = skis.makeapp()
application.add_project(skis_application, url='/lib')
indi_application = make_wsgi_app(redis_host, blob_folder=BLOBS)
application.add_project(indi_application, url='/indi', check_cookies=check_cookies_function)
from skipole import skiadmin, set_debug
set_debug(True)
skiadmin_application = skiadmin.makeapp(editedprojname=PROJECT)
application.add_project(skiadmin_application, url='/skiadmin')
# serve the application with the python waitress web server in its own thread
webapp = threading.Thread(target=serve, args=(application,), kwargs={'host':'0.0.0.0', 'port':8000})
# and start it
webapp.start()
# and start mqtttoredis
mqtttoredis('indi_localclient', mqtt_host, redis_host, blob_folder=BLOBS)
|
#!/usr/bin/env python3
import os, sys
import multiprocessing
import subprocess as sp
import shutil
import shlex
import time
import csv
import json
sys.path.append('/home/jrchang/workspace/gym-OptClang/gym_OptClang/envs/')
import RemoteWorker as rwork
def getMultiAppsTargets(path):
"""
path: the root path for "test-suite" to search ".test" file
"""
prog = rwork.Programs()
AllTargetsDict = prog.getAvailablePrograms()
ListOfAvailableTarget = list(AllTargetsDict.keys())
# search all test target in Apps
AppTargets = {}
test_pattern = '.test'
for root, dirs, files in os.walk(path):
for file in files:
if file.endswith(test_pattern):
# remove .test in the file name
file = file[:-5]
# filter out those are not in our consideration.
if file in ListOfAvailableTarget:
AppTargets[file] = root
return AppTargets
def Eval(TargetDict, threadNum):
"""
TargetDict = {"target": "target root path"}
threadNum: make -j[threadNum]
return BuildTimeDict = {"target": build-time}
"""
BuildTimeDict = {}
prevCwd = os.getcwd()
lit = os.getenv('LLVM_THESIS_lit', "Error")
CpuNum = multiprocessing.cpu_count()
for target, targetRoot in TargetDict.items():
isBuilt = False
measuredTime = 0
try:
os.chdir(targetRoot)
# make clean
os.system("make clean")
# build
try:
cmd = "taskset -c 0-{} make -j{}".format(threadNum-1, threadNum)
print('------------------------------------')
print("build cmd={}".format(cmd))
startTime = time.perf_counter()
p = sp.Popen(shlex.split(cmd), stdout=sp.PIPE, stderr= sp.PIPE)
out, err = p.communicate()
p.wait()
endTime = time.perf_counter()
if err.decode('utf-8').strip() is "":
isBuilt = True
measuredTime = endTime - startTime
except Exception as e:
print("{} build failed: {}".format(target, e))
if isBuilt:
# verify
try:
cmd = "{} -j{} -q {}.test".format(lit, CpuNum, target)
print("verify cmd={}".format(cmd))
p = sp.Popen(shlex.split(cmd), stdout=sp.PIPE, stderr= sp.PIPE)
out, err = p.communicate()
p.wait()
if out.decode('utf-8').strip() is "" and err.decode('utf-8').strip() is "":
print("Verify successfully.")
print('------------------------------------')
print("{} use {} secs".format(target, measuredTime))
BuildTimeDict[target] = measuredTime
else:
BuildTimeDict[target] = 'Failed'
except Exception as e:
print("{} verified failed: {}".format(target, e))
except Exception as e:
print("{} unexpected failed: {}".format(target, e))
os.chdir(prevCwd)
return BuildTimeDict
def runEval(TargetRoot, key_1, key_2, jsonPath):
"""
TargetRoot: the root path in your test-suite/build
return {"target": {key_1: first_time, key_2: second_time}}
"""
# get all .test target in MultiSource/Application
Targets = getMultiAppsTargets(TargetRoot)
# Build, verify and log time
# 1 thread
BuildTimeDict_1 = Eval(Targets, 1)
with open(key_1 + ".json", 'w') as js:
json.dump(BuildTimeDict_1, js)
# 12 thread
BuildTimeDict_12 = Eval(Targets, 12)
with open(key_2 + ".json", 'w') as js:
json.dump(BuildTimeDict_12, js)
# combine the results
retDict = {}
for target, _time in BuildTimeDict_1.items():
retDict[target] = {}
retDict[target][key_1] = _time
for target, _time in BuildTimeDict_12.items():
if retDict.get(target) is None:
retDict[target] = {}
retDict[target][key_2] = _time
with open(jsonPath, 'w') as js:
json.dump(retDict, js)
return retDict
def WriteToCsv(writePath, Dict1, Dict2, keys_1, keys_2):
"""
Dict1 must contains all the "keys"
"""
ResultDict = dict.fromkeys(list(Dict1.keys()), {})
# write csv header
fieldnames = ['target', keys_1[0], keys_2[0], keys_1[1], keys_2[1]]
with open(writePath, 'w', newline='') as csv_file:
writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
writer.writeheader()
for key, _time in Dict1.items():
if Dict1.get(key) is not None:
if Dict1[key].get(keys_1[0]) is not None:
ResultDict[key][keys_1[0]] = Dict1[key][keys_1[0]]
else:
print("target: {} missing {}".format(key, keys_1[0]))
ResultDict[key][keys_1[0]] = -1
if Dict1[key].get(keys_1[1]) is not None:
ResultDict[key][keys_1[1]] = Dict1[key][keys_1[1]]
else:
print("target: {} missing {}".format(key, keys_1[1]))
ResultDict[key][keys_1[1]] = -1
else:
print("target: {} missing {} and {}".format(key, keys_1[0],keys_1[1]))
ResultDict[key][keys_1[0]] = -1
ResultDict[key][keys_1[1]] = -1
if Dict2.get(key) is not None:
if Dict2[key].get(keys_2[0]) is not None:
ResultDict[key][keys_2[0]] = Dict2[key][keys_2[0]]
else:
print("target: {} missing {}".format(key, keys_2[0]))
ResultDict[key][keys_2[0]] = -1
if Dict2[key].get(keys_2[1]) is not None:
ResultDict[key][keys_2[1]] = Dict2[key][keys_2[1]]
else:
print("target: {} missing {}".format(key, keys_2[1]))
ResultDict[key][keys_2[1]] = -1
else:
print("target: {} missing {} and {}".format(key, keys_2[0],keys_2[1]))
ResultDict[key][keys_2[0]] = -1
ResultDict[key][keys_2[1]] = -1
# write ResultDict to csv
with open(writePath, 'a', newline='') as csv_file:
writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
tmp = ResultDict[key]
tmp['target'] = key
writer.writerow(tmp)
if __name__ == '__main__':
for i in range(10):
startTime = time.perf_counter()
'''
Measure the build time for original clang
'''
key_1 = "Original-1-thread"
key_2 = "Original-12-threads"
Orig_results = runEval("/home/jrchang/workspace/llvm-official/test-suite/build/MultiSource/Applications", key_1, key_2, "Original.json")
'''
Measure the build time for ABC
'''
key_3 = "ABC-1-thread"
key_4 = "ABC-12-threads"
ABC_results = runEval("/home/jrchang/workspace/llvm-thesis-inference/test-suite/build-worker-6/MultiSource/Applications", key_3, key_4, "ABC.json")
'''
If you already ran, just read the data.
'''
#Orig_results = json.load(open("Original.json"))
#ABC_results = json.load(open("ABC.json"))
# Merge all results into csv-format file
WriteToCsv("./raw-data/BuildTime/buildEval_" + str(i) + ".csv", Orig_results, ABC_results, [key_1, key_2], [key_3, key_4])
endTime = time.perf_counter()
print("The evaluation procedure takse:{} mins".format((endTime - startTime)/60))
|
#!/usr/bin/env python3
#
# ===============LICENSE_START=======================================================
# Acumos
# ===================================================================================
# Copyright (C) 2018 AT&T Intellectual Property. All rights reserved.
# ===================================================================================
# This Acumos software file is distributed by AT&T
# under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============LICENSE_END=========================================================
from flask_restplus import Resource
from modelbuilder.api.namespaces import model_builder_namespace as api
from modelbuilder.api.v2.serializers import builder_fields
from modelbuilder.api.v2.parsers import error_response_body, error_response_body_500
from modelbuilder.api.business import get_algorithms, create_builder, get_builder_status, export_model
@api.route('/algorithms')
class AlgorithmsResource(Resource):
@api.response(200, 'OK')
@api.response(400, 'Bad Request', error_response_body)
@api.response(404, 'Not Found')
@api.response(500, 'Internal Server Error', error_response_body_500)
def get(self):
"""Get the list of supported algorithms"""
return get_algorithms()
@api.route('/builders')
class BuildersResource(Resource):
@api.response(202, 'Accepted')
@api.response(400, 'Bad Request', error_response_body)
@api.response(404, 'Not Found')
@api.response(500, 'Internal Server Error', error_response_body_500)
@api.expect(builder_fields)
def post(self):
"""Create a model builder resource"""
return create_builder()
@api.route('/builders/<string:key>/status')
class BuildersStatusResource(Resource):
@api.response(200, 'Ok')
@api.response(404, 'Not Found')
@api.response(500, 'Internal Server Error', error_response_body_500)
def get(self, key):
"""Get the status for the builder"""
return get_builder_status(key)
@api.route('/builders/<string:key>/exporter')
class BuilderSaveResource(Resource):
@api.response(201, 'Ok')
@api.response(404, 'Not Found')
@api.response(500, 'Internal Server Error', error_response_body_500)
def post(self, key):
"""Export this model to the model manager service"""
return export_model(key)
|
import paho.mqtt.client as mqtt
import os
import serial
import time
import random
from time import strftime
from datetime import datetime
import requests
import json
import schedule
import numpy as np
import tensorflow as tf
import random
import time
model2 = tf.keras.models.load_model('./my_model')
def on_message(client, obj, msg):
print(msg.topic + " " + str(msg.qos) + " " + str(msg.payload))
def on_publish(client, obj, mid):
print("mid: " + str(mid))
# gettiing dict with temperature, date and icon for forecast
def day_forecast():
temp_day = []
for i in forecast_response['list']:
foo = '12:00:00'
if foo in i['dt_txt']:
dictor = {
'date': i['dt'],
'temp': i['main']['temp'],
'icon': i['weather'][0]['icon'],
'date_txt': i['dt_txt']
}
temp_day.append(dictor)
# This for loop is selecting all DT from respoonse and making list of it
temport = []
for d in temp_day:
temport.append(d['date'])
# This loop converting timestamp DT format to week days names and making list of it
dates_formated = []
for value in temport:
dates_formated.append(
datetime.utcfromtimestamp(value).strftime('%A'))
return [temp_day, dates_formated]
def night_forecast():
temp_night = []
for i in forecast_response['list']:
foo = '03:00:00'
if foo in i['dt_txt']:
dictor = {
'date': i['dt_txt'],
'temp': i['main']['temp'],
}
temp_night.append(dictor)
return temp_night
def send_mail(city, temperature, humidity, pressure, wind, description):
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
mail= MIMEMultipart()
sender_email = "[email protected]" # replace with sender mail
rec_email = "[email protected]" # replace with reciver mail
password = "Passwd" # replace with sender mail password
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(sender_email, password)
mail['From']='Weather Notification System'
mail['To'] = rec_email
mail['Subject']='Weather App – Alert'
city = city
temperature = str(temperature)+ " C"
humidity = str(humidity) + " %"
pressure = str(pressure) + " hPa"
wind = str(wind) + " m/s"
description = description
body=" City: "+str(city)+"\n Temperature: "+str(temperature)+"\n Humidity: "+str(humidity)+"\n Pressure: "+str(pressure)+"\n Wind: "+str(wind)+"\n Description: "+ str(description)
mail.attach(MIMEText(body,'plain'))
msg=mail.as_string()
server.sendmail(sender_email, rec_email, msg)
print('Mail Sent')
email = "Email Will Send Your Mail."
def email12():
global email
email = "Email Send At 12PM. Please Check Your Mail."
def email06():
global email
email = "Email Send At 06PM. Please Check Your Mail."
schedule.every().day.at("00:00").do(lambda: send_mail(city_float, temp_float, hum_float, pre_float, wind_float, des_float))
schedule.every().day.at("18:00").do(lambda: send_mail(city_float, temp_float, hum_float, pre_float, wind_float, des_float))
schedule.every().day.at("00:00").do(email12)
schedule.every().day.at("18:00").do(email06)
def generate_sensor_data():
global temp, hum, pre
temp = random.randint(20, 30)
hum = random.randint(60, 90)
pre = random.randint(1000, 1120)
def predict(temp_float, hum_float, pre_float):
input = np.array([[temp_float, hum_float, pre_float]])
pred = model2.predict_classes(input)
suggestion = 0
if pred == [1]:
suggestion = "Most Probably Today Will Rain. So, Don't Miss Your Jacket."
if pred == [2]:
suggestion = "Most Probably Today Will Snow."
else:
suggestion = "I Cannot Predict Whether Rain or Snow."
return suggestion
def check_temp(temp_float, temp):
instuction = 0
if temp_float > temp:
instuction = "Outside Temperature Higher Than Inside."
else:
instuction = "Inside Temperature Higher Than Outside."
return instuction
try:
mqttc = mqtt.Client()
mqttc.on_message = on_message
mqttc.on_publish = on_publish
# Connect
mqttc.username_pw_set("mqtt_username", "mqtt_passwd") # Replace with mqtt username and passwd
mqttc.connect('AWS_E2C_IP_address', 1883, 60) # Replace your AWS E2C IP_address
# Continue the network loop, exit when an error occurs
while True :
global temp_float, hum_float, pre_float, wind_float, city_float, des_float
generate_sensor_data()
API_KEY = '30ad27b312182fa9f7569003a337536b'
# Replace your city name
city = 'Dambulla'
# getting api
url = f'http://api.openweathermap.org/data/2.5/weather?q={city}&units=metric&appid={API_KEY}'
response = requests.get(url).json()
# If name of city is wrong spell or unknown
if response.get('cod') != 200:
message = response.get('message', '')
weather = {
'city': city,
'temperature': response['main']['temp'],
'humidity': response['main']['humidity'],
'pressure': response['main']['pressure'],
'wind': response['wind']['speed'],
'description': response['weather'][0]['description'],
'icon': response['weather'][0]['icon'],
}
temp_float = weather.get('temperature')
hum_float = weather.get('humidity')
pre_float = weather.get('pressure')
wind_float = weather.get('wind')
city_float = weather.get('city')
des_float = weather.get('description')
temp_int = round(temp_float)
# This api is showing forecast for five days with days/nights
url_forecast = f'http://api.openweathermap.org/data/2.5/forecast?q={city}&units=metric&appid={API_KEY}'
forecast_response = requests.get(url_forecast).json()
day = day_forecast()
night = night_forecast()
prediction = predict(temp_float, hum_float, pre_float)
instuction = check_temp(temp_float, temp)
# print(prediction)
sensor = {
"temp": temp,
"hum": hum,
"pre": pre
}
api = {
"temperature": temp_int,
"humidity": weather.get('humidity'),
"pressure": weather.get('pressure'),
"wind": weather.get('wind'),
"city" :weather.get('city'),
"description": weather.get('description'),
"icon": weather.get('icon'),
"prediction": prediction,
"instuction": instuction,
"email": email
}
forecast = {
"day": day,
"night": night
}
mqttc.publish("sensor", (json.dumps(sensor)))
mqttc.publish("api", (json.dumps(api)))
mqttc.publish("forecast", (json.dumps(forecast)))
print('published')
schedule.run_pending()
time.sleep(1)
except:
exit
|
import numpy as np
from Puzzle.Enums import directions, Directions, TypeEdge, TypePiece, rotate_direction
class PuzzlePiece():
"""
Wrapper used to store informations about pieces of the puzzle.
Contains the position of the piece in the puzzle graph, a list of edges,
the list of pixels composing the piece, the number of borders and the type
of the piece.
"""
def __init__(self, edges, img_piece):
self.position = (0, 0)
self.edges_ = edges
self.img_piece_ = img_piece # List of Pixels
self.nBorders_ = self.number_of_border()
self.type = TypePiece(self.nBorders_)
def number_of_border(self):
""" Fast computations of the nunmber of borders """
return len(list(filter(lambda x: x.type == TypeEdge.BORDER, self.edges_)))
def rotate_edges(self, r):
""" Rotate the edges """
for e in self.edges_:
e.direction = rotate_direction(e.direction, r)
def edge_in_direction(self, dir):
""" Return the edge in the `dir` direction """
for e in self.edges_:
if e.direction == dir:
return e
def is_border_aligned(self, p2):
""" Find if a border of the piece is aligned with a border of `p2` """
for e in self.edges_:
if e.type == TypeEdge.BORDER and p2.edge_in_direction(e.direction).type == TypeEdge.BORDER:
return True
return False |
# -*- coding: utf-8 -*-
from django.shortcuts import render, render_to_response, get_object_or_404
from django.http import HttpResponseRedirect
from django.views.generic.edit import FormView, CreateView, UpdateView, DeleteView
from django.core.urlresolvers import reverse_lazy
from django.contrib.auth.decorators import login_required
# Create your views here.
from kurs import models
from kurs import forms
@login_required
def show_ctrls(request):
ctrls_view = models.CtrlBase.objects.all()
current_user = request.user
return render(request, 'show_ctrls.html', locals())
@login_required
def show_elements(request):
elements_view = models.ElementsBase.objects.all()
current_user = request.user
return render(request, 'show_elements.html', locals())
@login_required
def show_bom(request):
bom_view = models.CtrlBom.objects.all()
ctrls_view = models.CtrlBase.objects.all()
elements_view = models.ElementsBase.objects.all()
current_user = request.user
return render_to_response('show_boms.html', locals())
@login_required
def show_orders(request):
orders_view = models.Order.objects.all()
ctrls_view = models.CtrlBase.objects.all()
customer_view = models.SuppliersCustomersBase.objects.all()
current_user = request.user
return render_to_response('show_orders.html', locals())
@login_required
def show_suppliers(request):
suppliers_view = models.SuppliersCustomersBase.objects.filter(supplier_customer_identify=1)
current_user = request.user
return render_to_response('show_suppliers.html', locals())
@login_required
def show_clients(request):
clients_view = models.SuppliersCustomersBase.objects.filter(supplier_customer_identify=0)
current_user = request.user
return render_to_response('show_clients.html', locals())
@login_required
def show_purchases(request):
purchases_view = models.PurchasesConsumption.objects.filter(p_c=1)
elements_view = models.ElementsBase.objects.all()
invoices_view = models.Invoices.objects.all()
suppliers_view = models.SuppliersCustomersBase.objects.all()
current_user = request.user
return render_to_response("show_purchases.html", locals())
@login_required
def show_consumptions(request):
consumptions_view = models.PurchasesConsumption.objects.filter(p_c=0)
elements_view = models.ElementsBase.objects.all()
invoices_view = models.Invoices.objects.all()
suppliers_view = models.SuppliersCustomersBase.objects.all()
current_user = request.user
return render_to_response("show_consumptions.html", locals())
@login_required
def show_stock(request):
elements_view = models.ElementsBase.objects.all()
current_user = request.user
return render_to_response("show_stock.html", locals())
class NewCtrl(CreateView):
model = models.CtrlBase
form_class = forms.CtrlForm
success_url = '/added/'
class NewElement(CreateView):
model = models.ElementsBase
form_class = forms.ElementForm
success_url = '/added/'
class NewBom(CreateView):
model = models.CtrlBom
form_class = forms.CtrlBomForm
success_url = '/added/'
class NewOrder(CreateView):
model = models.Order
form_class = forms.OrderForm
success_url = '/added/'
class NewConsumption(CreateView):
model = models.PurchasesConsumption
form_class = forms.ConsumpForm
success_url = '/added/'
class NewPurchase(CreateView):
model = models.PurchasesConsumption
form_class = forms.PurchForm
success_url = '/added/'
class NewSupplier(CreateView):
model = models.SuppliersCustomersBase
form_class = forms.SuppForm
success_url = '/added/'
class NewCustomer(CreateView):
model = models.SuppliersCustomersBase
form_class = forms.CustomForm
success_url = '/added/'
|
PUPPETDB_HOST = 'localhost'
PUPPETDB_PORT = 8080
PUPPETDB_SSL_VERIFY = True
PUPPETDB_KEY = None
PUPPETDB_CERT = None
PUPPETDB_TIMEOUT = 20
DEV_LISTEN_HOST = '127.0.0.1'
DEV_LISTEN_PORT = 5000
UNRESPONSIVE_HOURS = 2
ENABLE_QUERY = True
LOCALISE_TIMESTAMP = True
LOGLEVEL = 'info'
REPORTS_COUNT = 10
OFFLINE_MODE = False
|
from collections import namedtuple
Position = namedtuple('Position', ['line', 'column'])
class JavaToken():
def __init__(self, value, position=None):
self.value, self.position = value, position
def __str__(self):
if not self.position:
return '%s "%s"' % (self.__class__.__name__, self.value)
return '%s "%s" line %d, column %d' % (
self.__class__.__name__, self.value, self.position[0], self.position[1])
def __eq__(self, other):
raise NotImplementedError
class Keyword(JavaToken):
VALUES_SET = {'abstract', 'assert', 'boolean', 'break', 'byte', 'case',
'catch', 'char', 'class', 'const', 'continue', 'default',
'do', 'double', 'else', 'enum', 'extends', 'final',
'finally', 'float', 'for', 'goto', 'if', 'implements',
'import', 'instanceof', 'int', 'interface', 'long', 'native',
'new', 'package', 'private', 'protected', 'public', 'return',
'short', 'static', 'strictfp', 'super', 'switch',
'synchronized', 'this', 'throw', 'throws', 'transient', 'try',
'void', 'volatile', 'while'}
class SimpleType(Keyword):
VALUES_SET = {'boolean', 'byte', 'char', 'double', 'float', 'int', 'long', 'short'}
class Literal(JavaToken):
pass
# Literal includes integers, strings, bool, and so on
# For the moment it doesn't matter
class Comment(JavaToken):
pass
class Separator(JavaToken):
VALUES_SET = {'(', ')', '{', '}', '[', ']', ';', ',', '.'}
class Operator(JavaToken):
MAX_LEN = 4
VALUES_SET = {'>>>=', '>>=', '<<=', '%=', '^=', '|=', '&=', '/=',
'*=', '-=', '+=', '<<', '--', '++', '||', '&&', '!=',
'>=', '<=', '==', '%', '^', '|', '&', '/', '*', '-',
'+', ':', '?', '~', '!', '<', '>', '=', '...', '->', '::'}
PREFIX_SET = {'++', '--', '!', '~', '+', '-'}
ASSIGNMENT_SET = {'=', '+=', '-=', '*=', '/=', '&=', '|=', '^=', '%=', '<<=', '>>=', '>>>='}
def is_prefix(self):
return self.value in self.PREFIX_SET
def is_assignment(self):
return self.value in self.ASSIGNMENT_SET
class Annotation(JavaToken):
pass
class Identifier(JavaToken):
pass
|
"""Freeze metadata from Python index server to test locally.
Inspired by index_from_rubygems.rb from CocoaPods/Resolver-Integration-Specs.
This only reads metadata from wheels compatible with the given platform, and
does not cover sdists at all.
"""
from __future__ import annotations
import argparse
import collections
import dataclasses
import email.parser
import itertools
import json
import logging
import os
import pathlib
import re
import sys
import urllib.parse
import zipfile
from typing import (
IO,
BinaryIO,
Dict,
FrozenSet,
Iterable,
Iterator,
List,
NamedTuple,
Optional,
Set,
Tuple,
Union,
cast,
)
import html5lib
import packaging.requirements
import packaging.tags
import packaging.utils
import packaging.version
import requests
logger = logging.getLogger()
PythonVersion = Union[Tuple[int], Tuple[int, int]]
def _parse_python_version(s: str) -> PythonVersion:
match = re.match(r"^(\d+)(?:\.(\d+))?$", s)
if not match:
raise ValueError(s)
major, *more = match.groups()
if more:
return (int(major), int(more[0]))
return (int(major),)
def _parse_output_path(s: str) -> Optional[pathlib.Path]:
if s == "-":
return None
if os.sep in s or (os.altsep and os.altsep in s):
return pathlib.Path(s)
return pathlib.Path(__file__).with_name("inputs").joinpath("index", s)
def parse_args(args: Optional[List[str]]) -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
"package_names",
metavar="PACKAGE",
nargs="+",
type=packaging.utils.canonicalize_name,
)
parser.add_argument(
"--python-version",
dest="python_version",
type=_parse_python_version,
default=".".join(str(v) for v in sys.version_info[:2]),
)
parser.add_argument(
"--interpreter",
default=None,
)
parser.add_argument(
"--platform",
dest="platforms",
action="append",
default=None,
)
parser.add_argument(
"--output",
type=_parse_output_path,
required=True,
)
parser.add_argument(
"--overwrite",
action="store_true",
default=False,
)
return parser.parse_args(args)
def get_output_path(path: pathlib.Path, overwrite: bool) -> pathlib.Path:
if path.suffix != ".json":
path = path.with_name(path.name + ".json")
if path.is_file() and not overwrite:
raise FileExistsError(os.fspath(path))
path.parent.mkdir(parents=True, exist_ok=True)
return path
def _parse_tag(s: str) -> FrozenSet[packaging.tags.Tag]:
try:
return packaging.tags.parse_tag(s)
except ValueError:
raise ValueError(f"invalid tag {s!r}")
@dataclasses.dataclass()
class WheelMatcher:
required_python: packaging.version.Version
tags: Dict[packaging.tags.Tag, int]
@classmethod
def compatible_with(
cls,
python_version: PythonVersion,
impl: Optional[str],
plats: Optional[List[str]],
) -> WheelMatcher:
required_python = packaging.version.Version(
".".join(str(v) for v in python_version)
)
# TODO: Add ABI customization.
tag_it = itertools.chain(
packaging.tags.compatible_tags(python_version, impl, plats),
packaging.tags.cpython_tags(python_version, None, plats),
)
tags = {t: i for i, t in enumerate(tag_it)}
return cls(required_python, tags)
def rank(self, tag: str, requires_python: Optional[str]) -> Optional[int]:
if requires_python:
spec = packaging.specifiers.SpecifierSet(requires_python)
if self.required_python not in spec:
return None
ranks = [self.tags[t] for t in _parse_tag(tag) if t in self.tags]
if not ranks:
return None
return min(ranks)
@dataclasses.dataclass()
class HttpFile:
url: str
session: requests.Session
def __post_init__(self):
self._offset = 0
self._size = int(self.session.head(self.url).headers["Content-Length"])
def read(self, n=None):
if n is None:
end = self._size
else:
end = self._offset + n
headers = {"Range": f"bytes={self._offset}-{end - 1}"}
res = self.session.get(self.url, headers=headers)
data = res.content
self._offset += len(data)
return data
def seek(self, offset, whence=0):
if whence == 0:
self._offset = offset
elif whence == 1:
self._offset += offset
elif whence == 2:
self._offset = self._size + offset
else:
err = f"ValueError: invalid whence ({whence}, should be 0, 1 or 2)"
raise ValueError(err)
def seekable(self):
return True
def tell(self):
return self._offset
def _parse_wheel_name(rest: str) -> Tuple[str, str, str]:
name, rest = rest.split("-", 1)
version, x, y, z = rest.rsplit("-", 3)
return name, version, f"{x}-{y}-{z}"
def _open_metadata(zf: zipfile.ZipFile, prefix: str) -> IO[bytes]:
for fn in zf.namelist():
if not fn.endswith(".dist-info/METADATA"):
continue
if packaging.utils.canonicalize_name(fn).startswith(prefix):
return zf.open(fn)
raise ValueError("Can't find metadata")
class PackageEntry(NamedTuple):
version: str
dependencies: List[str]
DistListMapping = Dict[str, List[Tuple[int, str]]]
@dataclasses.dataclass()
class Finder:
index_urls: List[str]
matcher: WheelMatcher
session: requests.Session
def collect_best_dist_urls(self, name: str) -> Dict[str, str]:
all_dists: DistListMapping = collections.defaultdict(list)
for index_url in self.index_urls:
res = requests.get(f"{index_url}/{name}")
res.raise_for_status()
doc = html5lib.parse(res.content, namespaceHTMLElements=False)
for el in doc.findall(".//a"):
url = el.attrib["href"]
filename = urllib.parse.urlsplit(url).path.rsplit("/", 1)[-1]
wheel_name, ext = filename.rsplit(".", 1)
if ext != "whl":
continue
requires_python = el.attrib.get("data-requires-python")
name, version, tag = _parse_wheel_name(wheel_name)
try:
rank = self.matcher.rank(tag, requires_python)
except packaging.specifiers.InvalidSpecifier:
logger.critical(
"Dropping %s==%s; invalid Requires-Python %r",
name,
version,
requires_python,
)
continue
if rank is None:
continue
all_dists[version].append((rank, url))
urls = {version: min(dists)[1] for version, dists in all_dists.items()}
logger.info("%d URLs found for %s", len(urls), name)
return urls
def iter_package_entries(self, name: str) -> Iterator[PackageEntry]:
for version, url in self.collect_best_dist_urls(name).items():
http_file = cast(IO[bytes], HttpFile(url, self.session))
with zipfile.ZipFile(http_file) as zf:
with _open_metadata(zf, name) as f:
parser = email.parser.BytesParser()
data = parser.parse(cast(BinaryIO, f), headersonly=True)
dependencies: List[str] = data.get_all("Requires-Dist", [])
yield PackageEntry(version, dependencies)
def process_package_entry(
self, name: str, entry: PackageEntry
) -> Optional[Set[str]]:
more = set()
for dep in entry.dependencies:
try:
req = packaging.requirements.Requirement(dep)
except packaging.requirements.InvalidRequirement:
logger.critical(
"Dropping %s==%s; invalid dependency %r",
name,
entry.version,
dep,
)
return None
more.add(str(packaging.utils.canonicalize_name(req.name)))
return more
def find(self, package_names: Iterable[str]) -> dict:
data = {}
while package_names:
more: Set[str] = set()
logger.info("Discovering %s", ", ".join(package_names))
for name in package_names:
entries: Dict[str, dict] = {}
for e in self.iter_package_entries(name):
result = self.process_package_entry(name, e)
if result is None:
continue
more |= result
entries[e.version] = {"dependencies": e.dependencies}
data[name] = entries
package_names = {n for n in more if n not in data}
return data
def main(args: Optional[List[str]]) -> int:
options = parse_args(args)
if not options.output:
output_path: Optional[pathlib.Path] = None
else:
output_path = get_output_path(options.output, options.overwrite)
matcher = WheelMatcher.compatible_with(
options.python_version, options.interpreter, options.platforms
)
finder = Finder(["https://pypi.org/simple"], matcher, requests.Session())
data = finder.find(options.package_names)
if output_path is None:
json.dump(data, sys.stdout, indent=2)
print()
else:
with output_path.open("w") as f:
json.dump(data, f, indent="\t")
logger.info("Written: %s", os.fspath(output_path))
return 0
if __name__ == "__main__":
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
sys.exit(main(None))
|
from numpy.lib.npyio import load
import torch
import torch.nn.functional as F
from utility_functions import AvgPool3D
import h5py
import imageio
import numpy as np
import matplotlib.pyplot as plt
from utility_functions import AvgPool2D
import os
import h5py
from netCDF4 import Dataset
FlowSTSR_folder_path = os.path.dirname(os.path.abspath(__file__))
'''
load_folder = os.path.join(FlowSTSR_folder_path, "TrainingData", "Supernova", "0.h5")
f = h5py.File(load_folder, 'r')
rootgrp = Dataset("supernova.nc", "w", format="NETCDF4")
rootgrp.createDimension("x")
rootgrp.createDimension("y")
rootgrp.createDimension("z")
dim_0 = rootgrp.createVariable("supernova", np.float32, ("x","y","z"))
dim_0[:] = f['data'][0]
'''
'''
load_folder = os.path.join(FlowSTSR_folder_path, "TrainingData", "Supernova_raw")
save_folder = os.path.join(FlowSTSR_folder_path, "TrainingData", "Supernova")
i = 0
for filename in os.listdir(load_folder):
print(filename)
data = np.fromfile(os.path.join(load_folder, filename), dtype=np.float32)
data = data.reshape([432, 432, 432])
print(data.min())
print(data.max())
print(data.mean())
#data = np.log10(1+data)
data -= data.mean()
data *= ( 1 / (max(data.max(), abs(data.min()))+1e-6))
data = torch.tensor(data).unsqueeze(0).unsqueeze(0)
data = F.interpolate(data, mode="trilinear", size=[448, 448, 448]).numpy()[0,0]
f = h5py.File(os.path.join(save_folder, str(i)+".h5"), 'w')
#f_data = np.array(f['data'])
#oct_no = 0
#f_data -= f_data.mean()
#f_data *= (1 / (max(f_data.max(), abs(f_data.min()))+ 1e-6))
#del f['data']
f.create_dataset("data", data=np.expand_dims(data, 0))
for z in range(0, f_data.shape[1], 128):
for y in range(0, f_data.shape[2], 128):
for x in range(0, f_data.shape[3], 128):
d = f_data[:,z:z+128,y:y+128,x:x+128]
f_h5_oct = h5py.File(os.path.join(save_folder, str(oct_no)+"_"+filename), 'w')
f_h5_oct.create_dataset("data", data=d)
f_h5_oct.close()
oct_no += 1
f.close()
i += 1
'''
'''
load_folder = os.path.join(FlowSTSR_folder_path, "TrainingData", "Combustion_raw")
save_folder = os.path.join(FlowSTSR_folder_path, "TrainingData", "Combustion_vort")
imgs = []
for i in range(1, 123):
i_format = "%04d" % i
folder = os.path.join(load_folder, "jet_"+i_format)
load_name = "jet_vort_"+i_format+".dat"
save_name = "%04d.h5" % (i-1)
print("loading " + load_name)
data = np.fromfile(os.path.join(load_folder, folder, load_name), dtype=np.float32)
data = data.reshape([120, 720, 480])
print(data.min())
print(data.max())
print(data.mean())
data -= data.min()
#data = np.log10(1+data)
data = data / data.max()
print(data.shape)
data = torch.tensor(data).unsqueeze(0).unsqueeze(0)
data = F.interpolate(data, mode='trilinear', size=[128, 768, 512])
data = data[0].cpu().numpy()
print(data.shape)
imgs.append(data[0,64])
print("saving " + save_name)
f_h5 = h5py.File(os.path.join(save_folder, save_name), 'w')
f_h5.create_dataset("data", data=data)
f_h5.close()
print("Saving gif")
imageio.mimwrite("Combustion_vort.gif", imgs)
'''
'''
name = "dark_matter_density"
data = np.fromfile(os.path.join(FlowSTSR_folder_path, "InputData", "Nyx", name+".dat"), dtype=np.float32)
data = data.reshape([512, 512, 512])
print(data.min())
print(data.max())
print(data.mean())
data -= data.min()
data = np.log10(1+data)
data = data / data.max()
data -= data.mean()
data *= ( 1 / (max(data.max(), abs(data.min()))+1e-6))
print(data.shape)
rootgrp = Dataset(name+".nc", "w", format="NETCDF4")
rootgrp.createDimension("x")
rootgrp.createDimension("y")
rootgrp.createDimension("z")
dim_0 = rootgrp.createVariable(name, np.float32, ("x","y","z"))
dim_0[:] = data
'''
'''
results_powerspectrum = {
"xs" : [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0, 32.0, 33.0, 34.0, 35.0, 36.0, 37.0, 38.0, 39.0, 40.0, 41.0, 42.0, 43.0, 44.0, 45.0, 46.0, 47.0, 48.0, 49.0, 50.0, 51.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0, 58.0, 59.0, 60.0, 61.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0, 75.0, 76.0, 77.0, 78.0, 79.0, 80.0, 81.0, 82.0, 83.0, 84.0, 85.0, 86.0, 87.0, 88.0, 89.0, 90.0, 91.0, 92.0, 93.0, 94.0, 95.0, 96.0, 97.0, 98.0, 99.0, 100.0, 101.0, 102.0, 103.0, 104.0, 105.0, 106.0, 107.0, 108.0, 109.0, 110.0, 111.0, 112.0, 113.0, 114.0, 115.0, 116.0, 117.0, 118.0, 119.0, 120.0, 121.0, 122.0, 123.0, 124.0, 125.0, 126.0, 127.0, 128.0, 129.0, 130.0, 131.0, 132.0, 133.0, 134.0, 135.0, 136.0, 137.0, 138.0, 139.0, 140.0, 141.0, 142.0, 143.0, 144.0, 145.0, 146.0, 147.0, 148.0, 149.0, 150.0, 151.0, 152.0, 153.0, 154.0, 155.0, 156.0, 157.0, 158.0, 159.0, 160.0, 161.0, 162.0, 163.0, 164.0, 165.0, 166.0, 167.0, 168.0, 169.0, 170.0, 171.0, 172.0, 173.0, 174.0, 175.0, 176.0, 177.0, 178.0, 179.0, 180.0, 181.0, 182.0, 183.0, 184.0, 185.0, 186.0, 187.0, 188.0, 189.0, 190.0, 191.0, 192.0, 193.0, 194.0, 195.0, 196.0, 197.0, 198.0, 199.0, 200.0, 201.0, 202.0, 203.0, 204.0, 205.0, 206.0, 207.0, 208.0, 209.0, 210.0, 211.0, 212.0, 213.0, 214.0, 215.0, 216.0, 217.0, 218.0, 219.0, 220.0, 221.0, 222.0, 223.0, 224.0, 225.0, 226.0, 227.0, 228.0, 229.0, 230.0, 231.0, 232.0, 233.0, 234.0, 235.0, 236.0, 237.0, 238.0, 239.0, 240.0, 241.0, 242.0, 243.0, 244.0, 245.0, 246.0, 247.0, 248.0, 249.0, 250.0, 251.0, 252.0, 253.0, 254.0, 255.0, 256.0, 257.0, 258.0, 259.0, 260.0, 261.0, 262.0, 263.0, 264.0, 265.0, 266.0, 267.0, 268.0, 269.0, 270.0, 271.0, 272.0, 273.0, 274.0, 275.0, 276.0, 277.0, 278.0, 279.0, 280.0, 281.0, 282.0, 283.0, 284.0, 285.0, 286.0, 287.0, 288.0, 289.0, 290.0, 291.0, 292.0, 293.0, 294.0, 295.0, 296.0, 297.0, 298.0, 299.0, 300.0, 301.0, 302.0, 303.0, 304.0, 305.0, 306.0, 307.0, 308.0, 309.0, 310.0, 311.0, 312.0, 313.0, 314.0, 315.0, 316.0, 317.0, 318.0, 319.0, 320.0, 321.0, 322.0, 323.0, 324.0, 325.0, 326.0, 327.0, 328.0, 329.0, 330.0, 331.0, 332.0, 333.0, 334.0, 335.0, 336.0, 337.0, 338.0, 339.0, 340.0, 341.0, 342.0, 343.0, 344.0, 345.0, 346.0, 347.0, 348.0, 349.0, 350.0, 351.0, 352.0, 353.0, 354.0, 355.0, 356.0, 357.0, 358.0, 359.0, 360.0, 361.0, 362.0, 363.0, 364.0, 365.0, 366.0, 367.0, 368.0, 369.0, 370.0, 371.0, 372.0, 373.0, 374.0, 375.0, 376.0, 377.0, 378.0, 379.0, 380.0, 381.0, 382.0, 383.0, 384.0, 385.0, 386.0, 387.0, 388.0, 389.0, 390.0, 391.0, 392.0, 393.0, 394.0, 395.0, 396.0, 397.0, 398.0, 399.0, 400.0, 401.0, 402.0, 403.0, 404.0, 405.0, 406.0, 407.0, 408.0, 409.0, 410.0, 411.0, 412.0, 413.0, 414.0, 415.0, 416.0, 417.0, 418.0, 419.0, 420.0, 421.0, 422.0, 423.0, 424.0, 425.0, 426.0, 427.0, 428.0, 429.0, 430.0, 431.0, 432.0, 433.0, 434.0, 435.0, 436.0, 437.0, 438.0, 439.0, 440.0, 441.0, 442.0, 443.0, 444.0, 445.0, 446.0, 447.0, 448.0, 449.0, 450.0, 451.0, 452.0, 453.0, 454.0, 455.0, 456.0, 457.0, 458.0, 459.0, 460.0, 461.0, 462.0, 463.0, 464.0, 465.0, 466.0, 467.0, 468.0, 469.0, 470.0, 471.0, 472.0, 473.0, 474.0, 475.0, 476.0, 477.0, 478.0, 479.0, 480.0, 481.0, 482.0, 483.0, 484.0, 485.0, 486.0, 487.0, 488.0, 489.0, 490.0, 491.0, 492.0, 493.0, 494.0, 495.0, 496.0, 497.0, 498.0, 499.0, 500.0, 501.0, 502.0, 503.0, 504.0, 505.0, 506.0, 507.0, 508.0, 509.0, 510.0, 511.0],
"Ground truth": [1.1190683841705322, 0.3420126140117645, 0.9333255887031555, 1.5620728731155396, 1.6811823844909668, 2.0471885204315186, 2.0127644538879395, 2.205718517303467, 2.3285107612609863, 2.538517951965332, 2.5047643184661865, 2.498236656188965, 2.514432907104492, 2.6636266708374023, 2.662963390350342, 2.8065056800842285, 2.864065647125244, 2.9518051147460938, 3.0031092166900635, 3.0606651306152344, 3.1099603176116943, 3.1896238327026367, 3.196472406387329, 3.265127182006836, 3.221163749694824, 3.3132147789001465, 3.384387493133545, 3.4574427604675293, 3.376142740249634, 3.4250850677490234, 3.466907024383545, 3.5291590690612793, 3.601930618286133, 3.6391797065734863, 3.601001739501953, 3.6566579341888428, 3.6933250427246094, 3.736211061477661, 3.7532618045806885, 3.854665756225586, 3.8566954135894775, 3.90645694732666, 3.891075611114502, 3.910860061645508, 3.9208781719207764, 3.9504170417785645, 3.9425199031829834, 3.991117238998413, 4.017868518829346, 4.0647430419921875, 4.067299842834473, 4.077878475189209, 4.05474328994751, 4.081634521484375, 4.050901889801025, 4.0634846687316895, 4.095348834991455, 4.094532012939453, 4.0956525802612305, 4.124024868011475, 4.126298904418945, 4.132656097412109, 4.0990495681762695, 4.15346097946167, 4.089120864868164, 4.131629943847656, 4.105545997619629, 4.0649189949035645, 4.097238540649414, 4.112988471984863, 4.083864212036133, 4.0850653648376465, 4.084239959716797, 4.14586877822876, 4.110552787780762, 4.0778398513793945, 4.0708417892456055, 4.077771186828613, 4.0268754959106445, 4.011659622192383, 4.011858940124512, 4.018866062164307, 4.013390064239502, 4.033783912658691, 3.999823808670044, 4.001452445983887, 3.9851107597351074, 3.9653677940368652, 3.925686836242676, 3.9554500579833984, 3.961393117904663, 3.916207790374756, 3.8985559940338135, 3.8903772830963135, 3.852931261062622, 3.8683061599731445, 3.854796886444092, 3.8557546138763428, 3.835136651992798, 3.811753988265991, 3.7835757732391357, 3.807292938232422, 3.7804579734802246, 3.7565386295318604, 3.749875068664551, 3.7290899753570557, 3.692688465118408, 3.7154173851013184, 3.68202805519104, 3.6697309017181396, 3.654149055480957, 3.6538758277893066, 3.6413869857788086, 3.6132185459136963, 3.5981738567352295, 3.591278314590454, 3.5696606636047363, 3.552767038345337, 3.524082660675049, 3.5163803100585938, 3.5020384788513184, 3.4951729774475098, 3.4604597091674805, 3.4652695655822754, 3.442308187484741, 3.431004524230957, 3.4016897678375244, 3.374783515930176, 3.372143507003784, 3.372366189956665, 3.34720778465271, 3.3294551372528076, 3.3186697959899902, 3.3201980590820312, 3.2724483013153076, 3.250406265258789, 3.2459616661071777, 3.2337632179260254, 3.210862159729004, 3.1972506046295166, 3.185260772705078, 3.178865432739258, 3.1517069339752197, 3.1460461616516113, 3.12481689453125, 3.1197800636291504, 3.093331813812256, 3.0784103870391846, 3.056544303894043, 3.0535006523132324, 3.031879186630249, 3.0087976455688477, 2.9976024627685547, 2.9956202507019043, 2.9708242416381836, 2.948726177215576, 2.9309027194976807, 2.9230315685272217, 2.917128801345825, 2.901489496231079, 2.872803211212158, 2.8735508918762207, 2.857450485229492, 2.830136299133301, 2.8179712295532227, 2.814012050628662, 2.7855384349823, 2.7807164192199707, 2.7613062858581543, 2.751081943511963, 2.741682291030884, 2.726741313934326, 2.704336643218994, 2.6901416778564453, 2.6726255416870117, 2.662313461303711, 2.641780376434326, 2.628110647201538, 2.6177759170532227, 2.6156134605407715, 2.5985188484191895, 2.578557252883911, 2.5638628005981445, 2.5618531703948975, 2.5376439094543457, 2.5236611366271973, 2.5087203979492188, 2.5025901794433594, 2.483574867248535, 2.4718785285949707, 2.455981731414795, 2.4522318840026855, 2.432283878326416, 2.418727397918701, 2.4100472927093506, 2.3955516815185547, 2.37978458404541, 2.3629322052001953, 2.3499863147735596, 2.346564292907715, 2.3332254886627197, 2.324185609817505, 2.3017940521240234, 2.293722152709961, 2.2805330753326416, 2.275031566619873, 2.266355037689209, 2.2468535900115967, 2.228368043899536, 2.22149658203125, 2.2054643630981445, 2.193699359893799, 2.18642520904541, 2.180067539215088, 2.1603341102600098, 2.1440157890319824, 2.1316323280334473, 2.124614715576172, 2.1163768768310547, 2.105872869491577, 2.089383363723755, 2.0855250358581543, 2.068859815597534, 2.057668447494507, 2.0414881706237793, 2.032466411590576, 2.020662307739258, 2.013077735900879, 2.0018367767333984, 1.993006944656372, 1.9785138368606567, 1.969189167022705, 1.95833420753479, 1.9510802030563354, 1.9384760856628418, 1.9260565042495728, 1.9182871580123901, 1.9099781513214111, 1.8929193019866943, 1.8888294696807861, 1.8793785572052002, 1.8702912330627441, 1.8617284297943115, 1.8521543741226196, 1.8379478454589844, 1.8265902996063232, 1.8141100406646729, 1.8109724521636963, 1.7976315021514893, 1.7858699560165405, 1.77964448928833, 1.7742376327514648, 1.7610981464385986, 1.7476470470428467, 1.7410534620285034, 1.733888030052185, 1.7177059650421143, 1.711566686630249, 1.7022755146026611, 1.6938519477844238, 1.683475375175476, 1.6756837368011475, 1.66288423538208, 1.658718466758728, 1.6499748229980469, 1.6431115865707397, 1.6306805610656738, 1.6191924810409546, 1.6145540475845337, 1.6099400520324707, 1.595442771911621, 1.5885342359542847, 1.5823520421981812, 1.5738065242767334, 1.5614428520202637, 1.5527968406677246, 1.5471256971359253, 1.537325143814087, 1.5250238180160522, 1.5206549167633057, 1.5111960172653198, 1.5057361125946045, 1.4965167045593262, 1.490788459777832, 1.4815080165863037, 1.4746391773223877, 1.4640076160430908, 1.457160234451294, 1.448843002319336, 1.4422194957733154, 1.434809684753418, 1.4261173009872437, 1.4200022220611572, 1.4154033660888672, 1.4036798477172852, 1.393580675125122, 1.3865587711334229, 1.3800013065338135, 1.3716802597045898, 1.3671883344650269, 1.3590185642242432, 1.3514574766159058, 1.3467581272125244, 1.3412702083587646, 1.3284497261047363, 1.325471043586731, 1.3168408870697021, 1.3105382919311523, 1.3025895357131958, 1.2964926958084106, 1.2893784046173096, 1.2835643291473389, 1.276824712753296, 1.2680318355560303, 1.263905644416809, 1.258460521697998, 1.2467734813690186, 1.2420029640197754, 1.234485149383545, 1.2292991876602173, 1.2234277725219727, 1.2177150249481201, 1.2119454145431519, 1.2068240642547607, 1.197357416152954, 1.1922489404678345, 1.1870543956756592, 1.1797062158584595, 1.171940803527832, 1.1681784391403198, 1.161798119544983, 1.15366530418396, 1.146681547164917, 1.1455373764038086, 1.1367378234863281, 1.1318409442901611, 1.1273504495620728, 1.1197967529296875, 1.1149868965148926, 1.1098885536193848, 1.1020715236663818, 1.0972886085510254, 1.0892186164855957, 1.0862195491790771, 1.0812530517578125, 1.0765764713287354, 1.0699241161346436, 1.0631858110427856, 1.0596129894256592, 1.0557236671447754, 1.0486197471618652, 1.042668342590332, 1.0372450351715088, 1.0340666770935059, 1.026733636856079, 1.023666262626648, 1.0181037187576294, 1.0113698244094849, 1.0057456493377686, 1.001365065574646, 0.9977527856826782, 0.9932143092155457, 0.988821804523468, 0.9849964380264282, 0.9771963357925415, 0.97525954246521, 0.9684959650039673, 0.9636805057525635, 0.9594756960868835, 0.9553160667419434, 0.9498395919799805, 0.9436156749725342, 0.9402933120727539, 0.9379162788391113, 0.9327365159988403, 0.9280365705490112, 0.9213746786117554, 0.9198072552680969, 0.9153320789337158, 0.909796953201294, 0.9058825969696045, 0.9016543030738831, 0.8976033329963684, 0.8946702480316162, 0.8910138607025146, 0.8878934383392334, 0.8817130327224731, 0.8781323432922363, 0.8737807273864746, 0.8715190887451172, 0.866780161857605, 0.8626649379730225, 0.860140860080719, 0.8559972643852234, 0.8512244820594788, 0.8483637571334839, 0.8452695608139038, 0.8412681818008423, 0.8377372026443481, 0.8345264196395874, 0.8293238878250122, 0.8290265798568726, 0.8246049284934998, 0.8204882144927979, 0.8184759616851807, 0.8162283897399902, 0.8110785484313965, 0.8082807064056396, 0.8076949119567871, 0.8048509359359741, 0.7995688915252686, 0.7971093654632568, 0.7940529584884644, 0.7921943664550781, 0.7880988121032715, 0.7861512899398804, 0.7832455635070801, 0.7797336578369141, 0.7787905335426331, 0.7774345874786377, 0.7742193341255188, 0.77223140001297, 0.7704635858535767, 0.7684988379478455, 0.7648189663887024, 0.7648065090179443, 0.7629718780517578, 0.7606545686721802, 0.7585764527320862, 0.7572806477546692, 0.7546086311340332, 0.7538478374481201, 0.7529914975166321, 0.7520278692245483, 0.7515685558319092, 0.7493504285812378, 0.7462525367736816, 0.7479411363601685, 0.7471016049385071, 0.7466188669204712, 0.7458482980728149, 0.7455189228057861, 0.7447053790092468, 0.7445083856582642, 0.7447601556777954, 0.7460190057754517, 0.7449151277542114, 0.7455974221229553, 0.7451785206794739, 0.7469951510429382, 0.7494481801986694, 0.7494547367095947, 0.7504644989967346, 0.7523459792137146, 0.7533676028251648, 0.7557471990585327, 0.7569328546524048, 0.7594692707061768, 0.7615477442741394, 0.7657470107078552, 0.7682344913482666, 0.7709678411483765, 0.7746517062187195, 0.7800387740135193, 0.7848511934280396, 0.7890198230743408, 0.7936344146728516, 0.7995954155921936, 0.8055813908576965, 0.8128366470336914, 0.8191878795623779, 0.8277574777603149, 0.8362009525299072, 0.8454173803329468, 0.8551415205001831, 0.8661440014839172, 0.8768675327301025, 0.8884624242782593, 0.9010363221168518, 0.918041467666626, 0.9367120265960693, 0.9509989023208618, 0.9254101514816284, 0.8328577280044556, 0.6971551179885864, 0.59366774559021, 0.5412344932556152, 0.5084195733070374, 0.4840403199195862, 0.46631577610969543, 0.45144525170326233, 0.4398314654827118, 0.43015772104263306, 0.42178037762641907, 0.4137604534626007, 0.4069886803627014, 0.40074673295021057, 0.395257830619812, 0.38960590958595276, 0.38429105281829834, 0.3801017105579376, 0.3757082223892212, 0.37125539779663086, 0.3681381642818451, 0.3640318214893341, 0.36089059710502625, 0.3572271466255188, 0.35398179292678833, 0.35095149278640747, 0.3481188416481018, 0.34498322010040283, 0.3427697420120239],
"Ours": [1.1153228282928467, 0.34090104699134827, 0.9304943680763245, 1.5573272705078125, 1.6758836507797241, 2.0408053398132324, 2.006425142288208, 2.1985116004943848, 2.320852756500244, 2.5305397510528564, 2.496875047683716, 2.4901957511901855, 2.506537675857544, 2.655010938644409, 2.6539340019226074, 2.7967591285705566, 2.854279041290283, 2.9418861865997314, 2.9931249618530273, 3.0504517555236816, 3.0996084213256836, 3.178816795349121, 3.1851437091827393, 3.2540173530578613, 3.210718870162964, 3.3019816875457764, 3.373905658721924, 3.4469873905181885, 3.365607261657715, 3.414923667907715, 3.456472635269165, 3.5185256004333496, 3.5911662578582764, 3.6282572746276855, 3.59013032913208, 3.6463606357574463, 3.683981418609619, 3.7290332317352295, 3.7451581954956055, 3.8440325260162354, 3.847442626953125, 3.897752046585083, 3.883057117462158, 3.906029224395752, 3.9152119159698486, 3.9427499771118164, 3.936728000640869, 3.9846973419189453, 4.010962963104248, 4.057991981506348, 4.0607829093933105, 4.071920394897461, 4.047878265380859, 4.076672554016113, 4.046592712402344, 4.058632850646973, 4.090663909912109, 4.090540885925293, 4.090915203094482, 4.118472099304199, 4.120879173278809, 4.12725830078125, 4.096337795257568, 4.151799201965332, 4.08836555480957, 4.13215446472168, 4.10634708404541, 4.064346790313721, 4.095883369445801, 4.112642288208008, 4.084968566894531, 4.086372375488281, 4.085682392120361, 4.147841453552246, 4.11374044418335, 4.08182954788208, 4.079225540161133, 4.088042736053467, 4.035247802734375, 4.021348476409912, 4.022994518280029, 4.02926778793335, 4.022043228149414, 4.0482635498046875, 4.017740249633789, 4.017524242401123, 4.002788543701172, 3.9826741218566895, 3.9433817863464355, 3.981473445892334, 3.9912376403808594, 3.944359302520752, 3.9277195930480957, 3.923551559448242, 3.888425350189209, 3.9035427570343018, 3.891080141067505, 3.8936822414398193, 3.8742923736572266, 3.8523356914520264, 3.8271563053131104, 3.849806308746338, 3.8199501037597656, 3.795869827270508, 3.7883427143096924, 3.770404100418091, 3.732848644256592, 3.754577159881592, 3.7169692516326904, 3.7038183212280273, 3.6884007453918457, 3.6786253452301025, 3.659703016281128, 3.6306939125061035, 3.6139280796051025, 3.5972037315368652, 3.5640182495117188, 3.5416903495788574, 3.507223606109619, 3.499530792236328, 3.48062801361084, 3.4627771377563477, 3.4176933765411377, 3.408714532852173, 3.3786826133728027, 3.355195999145508, 3.3076860904693604, 3.2773783206939697, 3.269092559814453, 3.2521603107452393, 3.214777946472168, 3.18684720993042, 3.15631103515625, 3.1373634338378906, 3.0841126441955566, 3.047171115875244, 3.0256576538085938, 3.001328468322754, 2.9589123725891113, 2.9159340858459473, 2.8838462829589844, 2.8604300022125244, 2.8156261444091797, 2.7942371368408203, 2.747051239013672, 2.7138137817382812, 2.670997381210327, 2.6312925815582275, 2.590864896774292, 2.564603567123413, 2.521679162979126, 2.47790789604187, 2.4420883655548096, 2.416328191757202, 2.368852138519287, 2.3216724395751953, 2.2812752723693848, 2.2469561100006104, 2.2083239555358887, 2.1667633056640625, 2.118393659591675, 2.0920870304107666, 2.0534658432006836, 2.0046892166137695, 1.9649662971496582, 1.9365615844726562, 1.888371467590332, 1.8534724712371826, 1.813797950744629, 1.7801812887191772, 1.7457194328308105, 1.70719313621521, 1.671086072921753, 1.638359546661377, 1.60001802444458, 1.5695774555206299, 1.533945083618164, 1.5016539096832275, 1.4650049209594727, 1.4411805868148804, 1.410683035850525, 1.376370906829834, 1.3433935642242432, 1.3191940784454346, 1.2900049686431885, 1.2610714435577393, 1.2302417755126953, 1.2054611444473267, 1.1760773658752441, 1.1485674381256104, 1.1211044788360596, 1.1028622388839722, 1.0798556804656982, 1.0536324977874756, 1.0297801494598389, 1.00953209400177, 0.9869029521942139, 0.9602900147438049, 0.9374580383300781, 0.9174153208732605, 0.8955039381980896, 0.8792177438735962, 0.8594746589660645, 0.8422635793685913, 0.8203250169754028, 0.8051974773406982, 0.7877455949783325, 0.7678631544113159, 0.749730110168457, 0.7361254692077637, 0.7218165397644043, 0.7057351469993591, 0.6909279227256775, 0.679057776927948, 0.6636543273925781, 0.6493905782699585, 0.6355352401733398, 0.6247724294662476, 0.6135773062705994, 0.6006838083267212, 0.5882638096809387, 0.5807201862335205, 0.5683357119560242, 0.5572512149810791, 0.5483596920967102, 0.5400669574737549, 0.5283457040786743, 0.5201305150985718, 0.5123773813247681, 0.5048538446426392, 0.4954215884208679, 0.48746538162231445, 0.4800589978694916, 0.4743598699569702, 0.46617409586906433, 0.4590623080730438, 0.4532712697982788, 0.44650328159332275, 0.43933895230293274, 0.43494218587875366, 0.4298359453678131, 0.4246175289154053, 0.4195297658443451, 0.415266752243042, 0.40991103649139404, 0.40447118878364563, 0.39901694655418396, 0.3962525725364685, 0.39106887578964233, 0.386704683303833, 0.3837285041809082, 0.3807882070541382, 0.3760018050670624, 0.3721970319747925, 0.3696250021457672, 0.36673808097839355, 0.3622777462005615, 0.35973986983299255, 0.3576853275299072, 0.35531824827194214, 0.352448046207428, 0.35022327303886414, 0.3471429944038391, 0.3462942838668823, 0.3442196846008301, 0.3433523178100586, 0.34128111600875854, 0.3384576439857483, 0.3376944959163666, 0.33681434392929077, 0.3337528705596924, 0.33340197801589966, 0.33275824785232544, 0.33121222257614136, 0.32958775758743286, 0.32840844988822937, 0.3276406526565552, 0.32626813650131226, 0.3249896168708801, 0.32487475872039795, 0.32280296087265015, 0.32224124670028687, 0.3210645914077759, 0.32047122716903687, 0.3192417621612549, 0.3181474804878235, 0.3160315155982971, 0.3153911828994751, 0.3146517276763916, 0.3135589063167572, 0.3125459849834442, 0.31119486689567566, 0.30966514348983765, 0.3093380928039551, 0.30769842863082886, 0.3065149784088135, 0.3054748475551605, 0.3045114576816559, 0.3031247854232788, 0.30188876390457153, 0.30010342597961426, 0.2989503741264343, 0.2982436418533325, 0.2971932888031006, 0.29501110315322876, 0.29465359449386597, 0.29316025972366333, 0.29215922951698303, 0.29055124521255493, 0.28896310925483704, 0.28788793087005615, 0.28697723150253296, 0.2854756712913513, 0.28388339281082153, 0.28250908851623535, 0.2812078893184662, 0.27883782982826233, 0.277726411819458, 0.2758181691169739, 0.2740240693092346, 0.2723538875579834, 0.2712932825088501, 0.2695227563381195, 0.2675762474536896, 0.265394926071167, 0.2640475332736969, 0.26242145895957947, 0.2606334090232849, 0.25847649574279785, 0.2570120692253113, 0.2550842761993408, 0.25285786390304565, 0.2507804036140442, 0.2497207671403885, 0.24733400344848633, 0.24500975012779236, 0.2432435154914856, 0.24159389734268188, 0.23940923810005188, 0.23743754625320435, 0.23546430468559265, 0.23370173573493958, 0.23107978701591492, 0.2296576350927353, 0.22751715779304504, 0.22556769847869873, 0.22350920736789703, 0.2212245762348175, 0.2196393758058548, 0.21803191304206848, 0.21571755409240723, 0.213881254196167, 0.21181795001029968, 0.21047206223011017, 0.2082580029964447, 0.20665214955806732, 0.20481657981872559, 0.202865332365036, 0.20101895928382874, 0.19924373924732208, 0.19771181046962738, 0.19587865471839905, 0.19445990025997162, 0.1931648701429367, 0.19073542952537537, 0.18941225111484528, 0.1872359812259674, 0.1856788992881775, 0.18424083292484283, 0.18266473710536957, 0.18082359433174133, 0.17901328206062317, 0.17771995067596436, 0.1763811707496643, 0.17479191720485687, 0.17346076667308807, 0.17155960202217102, 0.17053812742233276, 0.16937805712223053, 0.1678336262702942, 0.16652624309062958, 0.1652628630399704, 0.16423648595809937, 0.16327372193336487, 0.16181880235671997, 0.16084891557693481, 0.15929532051086426, 0.1581839919090271, 0.15716151893138885, 0.1565023809671402, 0.1552121490240097, 0.154057577252388, 0.15326741337776184, 0.15239381790161133, 0.15136146545410156, 0.15054181218147278, 0.1497347503900528, 0.14888013899326324, 0.14802473783493042, 0.14726915955543518, 0.14606109261512756, 0.14556919038295746, 0.14463719725608826, 0.14386232197284698, 0.14328238368034363, 0.14262837171554565, 0.14163267612457275, 0.14101764559745789, 0.14057859778404236, 0.13993299007415771, 0.1391335427761078, 0.13869091868400574, 0.13792432844638824, 0.1372213065624237, 0.13645437359809875, 0.13610398769378662, 0.13545837998390198, 0.13474208116531372, 0.13437873125076294, 0.1339283585548401, 0.13320046663284302, 0.132838636636734, 0.13239337503910065, 0.13194067776203156, 0.13123728334903717, 0.1311550885438919, 0.13078373670578003, 0.130154550075531, 0.1296483725309372, 0.1294194608926773, 0.12897707521915436, 0.12878157198429108, 0.12839968502521515, 0.12813791632652283, 0.12794016301631927, 0.127616286277771, 0.12715895473957062, 0.12721049785614014, 0.12672457098960876, 0.12648063898086548, 0.12652838230133057, 0.12635426223278046, 0.12599827349185944, 0.12594787776470184, 0.12578772008419037, 0.12594535946846008, 0.12584179639816284, 0.12572622299194336, 0.1253417581319809, 0.12534385919570923, 0.12552067637443542, 0.12552782893180847, 0.1254911720752716, 0.12551414966583252, 0.12537963688373566, 0.12546998262405396, 0.12544065713882446, 0.12555497884750366, 0.1255529522895813, 0.12559042870998383, 0.1255866140127182, 0.12576580047607422, 0.12582141160964966, 0.1259608268737793, 0.12607309222221375, 0.12626877427101135, 0.12627995014190674, 0.12640318274497986, 0.1264580935239792, 0.12670743465423584, 0.126683309674263, 0.12687517702579498, 0.12701520323753357, 0.12725453078746796, 0.1273999810218811, 0.12762989103794098, 0.12778374552726746, 0.12795989215373993, 0.1280672550201416, 0.12846659123897552, 0.1287492960691452, 0.1290655881166458, 0.1292874813079834, 0.1296805441379547, 0.12978419661521912, 0.13002178072929382, 0.13022078573703766, 0.13058802485466003, 0.13087955117225647, 0.13125506043434143, 0.13132280111312866, 0.13167330622673035, 0.13202489912509918, 0.1323847472667694, 0.13260358572006226, 0.13301017880439758, 0.13318505883216858, 0.13359428942203522, 0.1339586079120636, 0.13416869938373566, 0.134586900472641, 0.13489389419555664, 0.1350838541984558, 0.1355653554201126, 0.13578404486179352, 0.13623575866222382, 0.13661232590675354, 0.1370251476764679, 0.13739144802093506, 0.13773508369922638, 0.13797834515571594, 0.13846638798713684],
"SR-octree":[2.17677903175354, 3.104686737060547, 4.043375015258789, 4.037208557128906, 4.919696807861328, 5.874845504760742, 6.2145538330078125, 6.847893714904785, 7.0682477951049805, 7.276698589324951, 7.544886589050293, 7.74085807800293, 7.777347087860107, 7.984922409057617, 8.12092399597168, 8.410907745361328, 8.589017868041992, 8.80884838104248, 9.000163078308105, 9.18309497833252, 9.34698486328125, 9.463083267211914, 9.539694786071777, 9.696228981018066, 9.7568941116333, 9.933374404907227, 9.937393188476562, 10.075197219848633, 10.151405334472656, 10.271442413330078, 10.316315650939941, 10.415536880493164, 10.535900115966797, 10.610088348388672, 10.73361587524414, 10.794353485107422, 10.822015762329102, 10.972721099853516, 11.053365707397461, 11.175329208374023, 11.213326454162598, 11.322216033935547, 11.321980476379395, 11.381351470947266, 11.37746524810791, 11.416399002075195, 11.463138580322266, 11.52442741394043, 11.55345344543457, 11.60187816619873, 11.626443862915039, 11.648893356323242, 11.609807014465332, 11.608047485351562, 11.581705093383789, 11.57864761352539, 11.59166431427002, 11.564367294311523, 11.611806869506836, 11.585509300231934, 11.54958724975586, 11.591119766235352, 11.512060165405273, 11.541635513305664, 11.442058563232422, 11.432317733764648, 11.370356559753418, 11.346770286560059, 11.323681831359863, 11.246341705322266, 11.24929428100586, 11.263555526733398, 11.189676284790039, 11.172561645507812, 11.127989768981934, 11.109527587890625, 11.025104522705078, 10.93754768371582, 10.87304973602295, 10.828706741333008, 10.773083686828613, 10.73055648803711, 10.691869735717773, 10.672197341918945, 10.602753639221191, 10.565139770507812, 10.472122192382812, 10.436323165893555, 10.384164810180664, 10.307401657104492, 10.244791030883789, 10.187444686889648, 10.133800506591797, 10.053133010864258, 9.98469066619873, 9.944353103637695, 9.868850708007812, 9.82454776763916, 9.760875701904297, 9.713533401489258, 9.636054992675781, 9.573599815368652, 9.500782012939453, 9.448432922363281, 9.378438949584961, 9.313541412353516, 9.23959732055664, 9.18528938293457, 9.09572982788086, 9.068514823913574, 8.998703956604004, 8.928522109985352, 8.880106925964355, 8.817352294921875, 8.758609771728516, 8.687564849853516, 8.623149871826172, 8.560752868652344, 8.500204086303711, 8.43880844116211, 8.361600875854492, 8.317346572875977, 8.25059700012207, 8.209114074707031, 8.130905151367188, 8.076295852661133, 8.017139434814453, 7.965965747833252, 7.907797813415527, 7.84065580368042, 7.799917697906494, 7.758687973022461, 7.68281364440918, 7.627373218536377, 7.573716163635254, 7.519899368286133, 7.457916259765625, 7.397641181945801, 7.358355522155762, 7.32036828994751, 7.266603469848633, 7.226200103759766, 7.178731441497803, 7.144039154052734, 7.082544326782227, 7.045069694519043, 6.994504928588867, 6.961493015289307, 6.912945747375488, 6.869272232055664, 6.82679557800293, 6.802300453186035, 6.76731014251709, 6.724225044250488, 6.694716453552246, 6.665427207946777, 6.637665271759033, 6.601127624511719, 6.570601463317871, 6.55035400390625, 6.521780967712402, 6.493141174316406, 6.459081649780273, 6.441119194030762, 6.4113569259643555, 6.389740467071533, 6.367619514465332, 6.360256195068359, 6.339751243591309, 6.32916259765625, 6.317852973937988, 6.301746368408203, 6.2907280921936035, 6.273906707763672, 6.252493858337402, 6.241593837738037, 6.233564376831055, 6.2279157638549805, 6.2212419509887695, 6.213734149932861, 6.20947790145874, 6.22147274017334, 6.211834907531738, 6.213722229003906, 6.207303524017334, 6.221661567687988, 6.212750434875488, 6.214476108551025, 6.21366024017334, 6.223293781280518, 6.221900463104248, 6.224071025848389, 6.22590446472168, 6.234245300292969, 6.237458229064941, 6.233868598937988, 6.241415023803711, 6.249100685119629, 6.248523235321045, 6.256603240966797, 6.259876251220703, 6.273702621459961, 6.27950382232666, 6.289096832275391, 6.293007850646973, 6.297235488891602, 6.301527500152588, 6.298032760620117, 6.297462463378906, 6.294023513793945, 6.291060447692871, 6.304028511047363, 6.300370693206787, 6.30103063583374, 6.298976421356201, 6.296726226806641, 6.291236400604248, 6.282033443450928, 6.275025367736816, 6.267657279968262, 6.258029937744141, 6.253689289093018, 6.23329496383667, 6.222102642059326, 6.199103832244873, 6.181999683380127, 6.157290458679199, 6.129861831665039, 6.096798419952393, 6.0693511962890625, 6.038651466369629, 6.005161285400391, 5.967451095581055, 5.922616958618164, 5.8816070556640625, 5.838935852050781, 5.783500671386719, 5.735221862792969, 5.679076194763184, 5.627633094787598, 5.560635566711426, 5.496788024902344, 5.427298545837402, 5.342999458312988, 5.254882335662842, 5.17019510269165, 5.070696830749512, 4.9738569259643555, 4.871763229370117, 4.771126747131348, 4.658448219299316, 4.535346984863281, 4.411426544189453, 4.275084018707275, 4.1309332847595215, 4.0061116218566895, 3.914379119873047, 3.8543171882629395, 3.821730852127075, 3.8338818550109863, 3.8677101135253906, 3.9125382900238037, 3.9539642333984375, 4.002926826477051, 4.0449371337890625, 4.090041637420654, 4.138810634613037, 4.185399532318115, 4.234495162963867, 4.282010555267334, 4.3295440673828125, 4.379822731018066, 4.421647071838379, 4.465240955352783, 4.506595611572266, 4.540782928466797, 4.575828552246094, 4.611869812011719, 4.645750522613525, 4.678774833679199, 4.708827972412109, 4.747437477111816, 4.7737274169921875, 4.807931900024414, 4.831366539001465, 4.85777473449707, 4.8805317878723145, 4.900992393493652, 4.919943332672119, 4.936578273773193, 4.956294536590576, 4.975931644439697, 4.987401008605957, 5.0027666091918945, 5.011504650115967, 5.023746490478516, 5.032135963439941, 5.037472724914551, 5.047362327575684, 5.051031589508057, 5.057233810424805, 5.061892986297607, 5.061836242675781, 5.067837715148926, 5.067499160766602, 5.074305534362793, 5.0682053565979, 5.072011470794678, 5.070659637451172, 5.067062854766846, 5.064232349395752, 5.062520980834961, 5.061490058898926, 5.058897972106934, 5.058325290679932, 5.0545244216918945, 5.05312442779541, 5.048454284667969, 5.037303447723389, 5.040555477142334, 5.036755084991455, 5.039066314697266, 5.034225940704346, 5.033823013305664, 5.037308216094971, 5.036977767944336, 5.034346103668213, 5.034607410430908, 5.038334846496582, 5.03901481628418, 5.036465644836426, 5.045547008514404, 5.048652648925781, 5.05712890625, 5.063508987426758, 5.075639724731445, 5.085427284240723, 5.094447612762451, 5.106485843658447, 5.114947319030762, 5.125388145446777, 5.1417999267578125, 5.154247283935547, 5.1744208335876465, 5.18792724609375, 5.207650661468506, 5.229397773742676, 5.250720024108887, 5.271365165710449, 5.293094635009766, 5.319616794586182, 5.347279071807861, 5.3690032958984375, 5.396540641784668, 5.420563697814941, 5.449283599853516, 5.474859237670898, 5.501993179321289, 5.533157825469971, 5.559548377990723, 5.590087890625, 5.621253967285156, 5.648688316345215, 5.678748607635498, 5.698730945587158, 5.727315902709961, 5.745884895324707, 5.771259307861328, 5.7925214767456055, 5.8118896484375, 5.838423252105713, 5.859399795532227, 5.8812761306762695, 5.905402660369873, 5.925880432128906, 5.955153942108154, 5.977754592895508, 6.005517959594727, 6.02662467956543, 6.052084922790527, 6.0818281173706055, 6.108105182647705, 6.13942813873291, 6.16900634765625, 6.197851657867432, 6.232394695281982, 6.262883186340332, 6.296454429626465, 6.319486618041992, 6.351753234863281, 6.381895065307617, 6.415424823760986, 6.443106651306152, 6.472728729248047, 6.512679100036621, 6.546327590942383, 6.583624839782715, 6.619935512542725, 6.654709815979004, 6.691723823547363, 6.724106788635254, 6.764636993408203, 6.7952985763549805, 6.840662479400635, 6.880825996398926, 6.916048526763916, 6.9608259201049805, 7.000340461730957, 7.038213729858398, 7.080545425415039, 7.117116928100586, 7.1634440422058105, 7.197575569152832, 7.242242813110352, 7.280797958374023, 7.32054328918457, 7.366442680358887, 7.4085798263549805, 7.4543046951293945, 7.496654510498047, 7.541134357452393, 7.584671974182129, 7.6252007484436035, 7.673555374145508, 7.71476411819458, 7.759086608886719, 7.8000168800354, 7.84334135055542, 7.881185054779053, 7.916384696960449, 7.9546966552734375, 7.992111682891846, 8.031181335449219, 8.076112747192383, 8.114800453186035, 8.159378051757812, 8.196464538574219, 8.238592147827148, 8.276259422302246, 8.319649696350098, 8.355762481689453, 8.391340255737305, 8.430968284606934, 8.470632553100586, 8.500429153442383, 8.538912773132324, 8.567869186401367, 8.603072166442871, 8.634054183959961, 8.664288520812988, 8.690605163574219, 8.71954345703125, 8.751075744628906, 8.771533012390137, 8.790977478027344, 8.815882682800293, 8.832954406738281, 8.855300903320312, 8.868273735046387, 8.889711380004883, 8.905672073364258, 8.920380592346191, 8.931053161621094, 8.941938400268555, 8.953032493591309, 8.960453033447266, 8.966425895690918, 8.966469764709473, 8.962377548217773, 8.963920593261719, 8.95942497253418, 8.946266174316406, 8.930642127990723, 8.920928955078125, 8.900790214538574, 8.878737449645996, 8.849403381347656, 8.828131675720215, 8.79581356048584, 8.76168441772461, 8.722474098205566, 8.68349552154541, 8.640190124511719, 8.5897798538208, 8.537193298339844, 8.476903915405273, 8.405702590942383, 8.338116645812988, 8.260357856750488, 8.182703018188477, 8.100356101989746, 8.015028953552246, 7.922852516174316, 7.824296951293945, 7.716063499450684, 7.601341247558594, 7.478263854980469, 7.349638938903809, 7.216497421264648, 7.072693824768066, 6.924648284912109, 6.76527214050293, 6.596034049987793, 6.420612335205078, 6.227963447570801, 6.034524917602539, 5.822513580322266, 5.598674774169922, 5.357320308685303, 5.093730449676514, 4.806214809417725, 4.531240463256836],
"SZ": [1.1190619468688965, 0.34201210737228394, 0.9333475828170776, 1.5620157718658447, 1.681032419204712, 2.0469706058502197, 2.0127103328704834, 2.206033706665039, 2.328901767730713, 2.538883686065674, 2.504638195037842, 2.4975924491882324, 2.5137884616851807, 2.663048028945923, 2.66184663772583, 2.805690050125122, 2.863844871520996, 2.9515700340270996, 3.002574920654297, 3.05953311920166, 3.107973337173462, 3.1872987747192383, 3.193516254425049, 3.2624454498291016, 3.2178006172180176, 3.308588981628418, 3.379387855529785, 3.4514007568359375, 3.369500160217285, 3.4174001216888428, 3.457951307296753, 3.5173583030700684, 3.5882511138916016, 3.623811960220337, 3.5844874382019043, 3.639864921569824, 3.674999713897705, 3.713590621948242, 3.72645902633667, 3.8242640495300293, 3.824152946472168, 3.87026309967041, 3.8501853942871094, 3.8670477867126465, 3.87192440032959, 3.8959081172943115, 3.8859715461730957, 3.929947853088379, 3.9506988525390625, 3.9905550479888916, 3.984234571456909, 3.989090919494629, 3.9621353149414062, 3.9815280437469482, 3.9457390308380127, 3.9499614238739014, 3.9740636348724365, 3.9659032821655273, 3.9558870792388916, 3.9726500511169434, 3.965010643005371, 3.961996555328369, 3.920492172241211, 3.961965560913086, 3.888455390930176, 3.9148354530334473, 3.8792693614959717, 3.8292994499206543, 3.846773386001587, 3.8487887382507324, 3.8078389167785645, 3.797238826751709, 3.7817800045013428, 3.8223776817321777, 3.774991512298584, 3.730078935623169, 3.705967426300049, 3.697174549102783, 3.6367387771606445, 3.6035876274108887, 3.5847620964050293, 3.574397325515747, 3.5516433715820312, 3.547213554382324, 3.4986047744750977, 3.480088472366333, 3.4480583667755127, 3.412999153137207, 3.358727216720581, 3.363582134246826, 3.3452863693237305, 3.2854080200195312, 3.249812364578247, 3.224419593811035, 3.172354221343994, 3.1627469062805176, 3.1287827491760254, 3.1047301292419434, 3.065045118331909, 3.0283141136169434, 2.9875917434692383, 2.980961322784424, 2.933614730834961, 2.894111156463623, 2.8704631328582764, 2.8376266956329346, 2.7843897342681885, 2.7737069129943848, 2.7284021377563477, 2.698416233062744, 2.668336868286133, 2.644864797592163, 2.613569736480713, 2.577439308166504, 2.5467350482940674, 2.516451835632324, 2.479862928390503, 2.451566219329834, 2.4199094772338867, 2.3976221084594727, 2.3656129837036133, 2.3447184562683105, 2.3087923526763916, 2.298741102218628, 2.2678747177124023, 2.246103286743164, 2.211454391479492, 2.1820895671844482, 2.1698617935180664, 2.1587369441986084, 2.1336708068847656, 2.10846209526062, 2.091322898864746, 2.0862746238708496, 2.0507583618164062, 2.0341908931732178, 2.026151180267334, 2.013186454772949, 1.9952553510665894, 1.977057695388794, 1.9700067043304443, 1.9659950733184814, 1.9434460401535034, 1.9407951831817627, 1.9257279634475708, 1.920248031616211, 1.9045746326446533, 1.896983027458191, 1.8830678462982178, 1.8751592636108398, 1.8624513149261475, 1.8502843379974365, 1.8464475870132446, 1.8492474555969238, 1.8319069147109985, 1.814847707748413, 1.8049674034118652, 1.8076651096343994, 1.8015028238296509, 1.789158582687378, 1.7714269161224365, 1.7646206617355347, 1.7562837600708008, 1.740386962890625, 1.7293846607208252, 1.72336745262146, 1.6951992511749268, 1.6874486207962036, 1.6726592779159546, 1.6632044315338135, 1.6531035900115967, 1.6374624967575073, 1.6177810430526733, 1.5999736785888672, 1.5852718353271484, 1.5754683017730713, 1.559812307357788, 1.5473434925079346, 1.5335639715194702, 1.532987117767334, 1.5183902978897095, 1.5057533979415894, 1.5027167797088623, 1.5002946853637695, 1.4879395961761475, 1.4833570718765259, 1.4836971759796143, 1.484024167060852, 1.4773701429367065, 1.4800664186477661, 1.476729393005371, 1.4892044067382812, 1.4902658462524414, 1.4929455518722534, 1.5034515857696533, 1.5082266330718994, 1.5139641761779785, 1.5147325992584229, 1.5236027240753174, 1.5412383079528809, 1.5489773750305176, 1.5626559257507324, 1.5696327686309814, 1.58928644657135, 1.5965263843536377, 1.6088354587554932, 1.6241027116775513, 1.631760597229004, 1.6459665298461914, 1.6623057126998901, 1.6740193367004395, 1.6926474571228027, 1.7119553089141846, 1.7354670763015747, 1.742661952972412, 1.755727767944336, 1.7755937576293945, 1.7953863143920898, 1.8119704723358154, 1.830310583114624, 1.8414878845214844, 1.8616740703582764, 1.8796923160552979, 1.8994557857513428, 1.9166967868804932, 1.9364278316497803, 1.9453483819961548, 1.965328335762024, 1.978123426437378, 1.996690034866333, 2.015673875808716, 2.033355951309204, 2.0503053665161133, 2.0690176486968994, 2.076859951019287, 2.0888333320617676, 2.101884603500366, 2.1133601665496826, 2.120739459991455, 2.132235527038574, 2.1358509063720703, 2.144136428833008, 2.1499228477478027, 2.1540470123291016, 2.1492269039154053, 2.1441752910614014, 2.134638786315918, 2.136739730834961, 2.1230297088623047, 2.109149217605591, 2.1046860218048096, 2.1003265380859375, 2.085144519805908, 2.074223279953003, 2.066187858581543, 2.056112051010132, 2.0417754650115967, 2.037087917327881, 2.0283091068267822, 2.0150649547576904, 2.000258445739746, 1.9921152591705322, 1.9744106531143188, 1.9654525518417358, 1.954437255859375, 1.9428000450134277, 1.9261436462402344, 1.9171143770217896, 1.9140522480010986, 1.9042999744415283, 1.8850293159484863, 1.8793892860412598, 1.8692011833190918, 1.859171748161316, 1.8457446098327637, 1.8362152576446533, 1.831392526626587, 1.8193511962890625, 1.806980848312378, 1.806101679801941, 1.7964414358139038, 1.7908635139465332, 1.7779438495635986, 1.7726483345031738, 1.764747142791748, 1.756056308746338, 1.7478394508361816, 1.7419404983520508, 1.732616662979126, 1.7288529872894287, 1.7279545068740845, 1.7195398807525635, 1.7099456787109375, 1.7136808633804321, 1.7060167789459229, 1.6969823837280273, 1.6949162483215332, 1.692852258682251, 1.6855382919311523, 1.6804730892181396, 1.6770415306091309, 1.6766258478164673, 1.6761162281036377, 1.672552227973938, 1.6629319190979004, 1.6659969091415405, 1.6612406969070435, 1.6605236530303955, 1.6580346822738647, 1.6543668508529663, 1.6549453735351562, 1.6527307033538818, 1.6439101696014404, 1.6428707838058472, 1.6452714204788208, 1.6439049243927002, 1.636620283126831, 1.63687264919281, 1.6330581903457642, 1.630275011062622, 1.6241943836212158, 1.6214922666549683, 1.6174169778823853, 1.610004186630249, 1.6001245975494385, 1.5976614952087402, 1.5941195487976074, 1.5871977806091309, 1.5772569179534912, 1.5697910785675049, 1.5614454746246338, 1.551461935043335, 1.5398439168930054, 1.5318961143493652, 1.519102692604065, 1.5117474794387817, 1.4991230964660645, 1.4849053621292114, 1.4740794897079468, 1.4659497737884521, 1.453723669052124, 1.4402424097061157, 1.4239249229431152, 1.4151721000671387, 1.4031044244766235, 1.3928157091140747, 1.379800796508789, 1.3682942390441895, 1.3631805181503296, 1.3555231094360352, 1.342212200164795, 1.3350253105163574, 1.3305249214172363, 1.3306610584259033, 1.3229098320007324, 1.3186655044555664, 1.319215178489685, 1.3192389011383057, 1.3176418542861938, 1.3168869018554688, 1.323311448097229, 1.3292877674102783, 1.3331258296966553, 1.3417787551879883, 1.3464759588241577, 1.3596137762069702, 1.3662841320037842, 1.3784514665603638, 1.394921064376831, 1.4103858470916748, 1.4228129386901855, 1.4383515119552612, 1.4565277099609375, 1.4764289855957031, 1.4967515468597412, 1.516669750213623, 1.5349936485290527, 1.5598375797271729, 1.580789566040039, 1.601300835609436, 1.6218500137329102, 1.6401358842849731, 1.6593494415283203, 1.681135892868042, 1.696661114692688, 1.7121673822402954, 1.7243638038635254, 1.7437570095062256, 1.7577836513519287, 1.7715823650360107, 1.7789584398269653, 1.7877416610717773, 1.8002862930297852, 1.8107786178588867, 1.819185733795166, 1.8323454856872559, 1.8419474363327026, 1.848493218421936, 1.8590577840805054, 1.8680301904678345, 1.8702025413513184, 1.8794498443603516, 1.8853495121002197, 1.893364429473877, 1.8965972661972046, 1.899944543838501, 1.9031002521514893, 1.9080784320831299, 1.9109441041946411, 1.9113426208496094, 1.9081871509552002, 1.910861611366272, 1.9091122150421143, 1.9098503589630127, 1.9054169654846191, 1.9079868793487549, 1.9055787324905396, 1.8974486589431763, 1.8971264362335205, 1.8959470987319946, 1.8905012607574463, 1.8901782035827637, 1.888779878616333, 1.8859139680862427, 1.8776710033416748, 1.8786239624023438, 1.8730659484863281, 1.8658636808395386, 1.8634743690490723, 1.8553334474563599, 1.8453938961029053, 1.8465522527694702, 1.8449242115020752, 1.8397514820098877, 1.8348169326782227, 1.829789161682129, 1.818169116973877, 1.8188124895095825, 1.8141300678253174, 1.810652494430542, 1.808395266532898, 1.8019081354141235, 1.7972506284713745, 1.7935072183609009, 1.7906689643859863, 1.7912800312042236, 1.7862417697906494, 1.7842061519622803, 1.7758066654205322, 1.776123046875, 1.778996467590332, 1.7771673202514648, 1.775059461593628, 1.7733936309814453, 1.7740062475204468, 1.7746773958206177, 1.772887110710144, 1.7773106098175049, 1.7788712978363037, 1.7794239521026611, 1.7782416343688965, 1.7823517322540283, 1.784118413925171, 1.7872034311294556, 1.7922935485839844, 1.7954142093658447, 1.7982006072998047, 1.8040978908538818, 1.8072601556777954, 1.8117246627807617, 1.8147165775299072, 1.8216161727905273, 1.825623631477356, 1.8294861316680908, 1.8323206901550293, 1.838452696800232, 1.8437917232513428, 1.846151351928711, 1.8450276851654053, 1.8512694835662842, 1.8559616804122925, 1.85966956615448, 1.8583629131317139, 1.860475778579712, 1.859307885169983, 1.8560261726379395, 1.8526322841644287, 1.8524503707885742, 1.8470311164855957, 1.84355628490448, 1.8375625610351562, 1.8294399976730347, 1.8221049308776855, 1.8162025213241577, 1.80690598487854, 1.8003500699996948, 1.7895920276641846, 1.7811152935028076, 1.770780086517334, 1.7577975988388062, 1.7454791069030762, 1.7303060293197632, 1.7174433469772339, 1.7074322700500488, 1.6924636363983154, 1.681823492050171, 1.6654127836227417, 1.6525027751922607, 1.6384811401367188, 1.622106909751892, 1.606553077697754, 1.5919227600097656]
}
'''
'''
results_powerspectrum = {
'xs': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511],
'Raw data' :[25141876.0, 13286992.0, 8955820.0, 4851923.0, 3892812.25, 3050182.5, 2347255.25, 1896700.75, 1525954.875, 1263125.75, 1058105.75, 891347.375, 766699.75, 666951.0, 591555.5625, 529092.9375, 478589.5, 435338.75, 397212.75, 363648.96875, 334474.125, 307504.21875, 283811.53125, 262763.21875, 244826.90625, 227678.6875, 212548.703125, 199033.40625, 187594.328125, 176524.640625, 166688.90625, 157329.78125, 149208.984375, 141788.109375, 134911.921875, 128553.3984375, 122273.7109375, 117257.953125, 112486.9140625, 107833.2578125, 103159.265625, 98889.3125, 94595.4140625, 90664.0859375, 86916.09375, 83536.5234375, 80415.8671875, 77514.640625, 74906.3125, 72001.8359375, 69438.34375, 66829.453125, 64475.94921875, 62029.390625, 59832.98046875, 57838.61328125, 55881.015625, 54059.33984375, 52325.421875, 50618.49609375, 49016.87109375, 47423.12109375, 45925.94140625, 44478.4140625, 43019.53515625, 41686.44140625, 40323.84375, 39139.21875, 37970.7109375, 36828.15625, 35812.4765625, 34841.03125, 33878.5234375, 32892.22265625, 31993.111328125, 31135.845703125, 30199.095703125, 29315.689453125, 28486.3984375, 27708.53515625, 26912.701171875, 26211.685546875, 25568.484375, 24915.564453125, 24270.333984375, 23651.17578125, 23058.09765625, 22437.87109375, 21875.1171875, 21311.974609375, 20782.193359375, 20252.873046875, 19743.4375, 19256.00390625, 18794.548828125, 18340.013671875, 17896.013671875, 17477.669921875, 17076.3984375, 16677.59765625, 16275.0419921875, 15891.75390625, 15531.921875, 15168.0439453125, 14816.1650390625, 14477.1640625, 14148.21484375, 13833.9150390625, 13522.8515625, 13243.140625, 12952.943359375, 12656.0732421875, 12385.1787109375, 12118.4638671875, 11857.0888671875, 11587.1103515625, 11344.759765625, 11107.5771484375, 10871.8876953125, 10635.3154296875, 10405.09375, 10197.33203125, 9975.1748046875, 9770.193359375, 9574.0751953125, 9373.71484375, 9187.4384765625, 8998.6875, 8820.7265625, 8642.431640625, 8467.70703125, 8309.181640625, 8141.38623046875, 7977.49365234375, 7818.611328125, 7665.3271484375, 7511.1474609375, 7362.58056640625, 7221.4130859375, 7088.78173828125, 6956.4443359375, 6825.03271484375, 6700.505859375, 6573.017578125, 6450.48291015625, 6328.498046875, 6206.0390625, 6094.7724609375, 5981.8076171875, 5871.62744140625, 5766.6162109375, 5659.26318359375, 5558.939453125, 5455.3271484375, 5355.41357421875, 5261.1259765625, 5167.3896484375, 5075.70849609375, 4988.44091796875, 4901.6611328125, 4816.58056640625, 4729.3994140625, 4646.001953125, 4565.93701171875, 4486.60009765625, 4409.6845703125, 4334.63232421875, 4262.66748046875, 4190.36083984375, 4120.4677734375, 4050.70361328125, 3980.265625, 3912.138671875, 3845.581298828125, 3779.189208984375, 3715.713623046875, 3655.260498046875, 3595.952880859375, 3538.279541015625, 3481.411865234375, 3425.979248046875, 3370.122314453125, 3313.984619140625, 3259.69482421875, 3206.369873046875, 3154.7890625, 3104.523681640625, 3055.9814453125, 3008.705810546875, 2962.079833984375, 2915.131591796875, 2869.6875, 2824.535888671875, 2778.665771484375, 2733.961181640625, 2691.293701171875, 2649.86474609375, 2609.45263671875, 2569.23388671875, 2531.54052734375, 2492.306396484375, 2454.446533203125, 2417.775390625, 2381.2353515625, 2346.611572265625, 2310.54345703125, 2277.16064453125, 2242.473876953125, 2208.437744140625, 2175.02880859375, 2142.0078125, 2109.7685546875, 2078.712646484375, 2047.566650390625, 2017.1783447265625, 1987.4345703125, 1958.0875244140625, 1930.089599609375, 1902.3743896484375, 1875.690673828125, 1849.054443359375, 1822.3514404296875, 1794.2344970703125, 1768.279296875, 1742.642578125, 1717.462158203125, 1692.822998046875, 1668.912353515625, 1646.3355712890625, 1623.7279052734375, 1600.619384765625, 1578.2537841796875, 1556.806884765625, 1534.783447265625, 1514.194091796875, 1493.203857421875, 1473.034423828125, 1453.0010986328125, 1432.9752197265625, 1414.0389404296875, 1394.798828125, 1375.8853759765625, 1356.8912353515625, 1338.320068359375, 1320.4765625, 1302.3670654296875, 1284.3868408203125, 1266.865966796875, 1250.555419921875, 1234.1126708984375, 1217.716796875, 1201.2408447265625, 1185.488037109375, 1169.4755859375, 1153.5521240234375, 1137.9691162109375, 1122.755859375, 1108.227783203125, 1093.362548828125, 1079.3084716796875, 1065.2711181640625, 1051.5191650390625, 1038.0482177734375, 1024.62939453125, 1011.54248046875, 998.5826416015625, 985.7373657226562, 973.33251953125, 960.98779296875, 948.6842651367188, 936.9301147460938, 925.10791015625, 913.5501098632812, 901.7206420898438, 890.2074584960938, 879.0147705078125, 867.3776245117188, 856.447021484375, 845.5843505859375, 835.097412109375, 824.7184448242188, 814.604248046875, 804.8720092773438, 794.9691772460938, 785.288818359375, 775.722412109375, 766.090087890625, 756.629638671875, 747.1991577148438, 738.093505859375, 729.378173828125, 720.3740844726562, 711.7059936523438, 702.715576171875, 694.3911743164062, 685.609375, 677.0723876953125, 668.9702758789062, 661.0812377929688, 653.4078369140625, 645.7487182617188, 638.44091796875, 630.90185546875, 623.3991088867188, 615.8792724609375, 608.6513671875, 601.4356689453125, 594.442626953125, 587.272216796875, 580.5233154296875, 573.7293090820312, 566.9773559570312, 560.4168701171875, 553.8124389648438, 547.5328369140625, 541.001708984375, 534.7037353515625, 528.7418823242188, 522.623779296875, 516.6343383789062, 510.79779052734375, 505.11456298828125, 499.5931091308594, 493.838134765625, 488.1368103027344, 482.5367126464844, 477.0275573730469, 471.5401916503906, 466.1739501953125, 460.876953125, 455.7603759765625, 450.8058166503906, 445.7428283691406, 440.95294189453125, 436.0252685546875, 431.24224853515625, 426.5335998535156, 421.7265930175781, 417.0493469238281, 412.28155517578125, 407.7283935546875, 403.2799072265625, 398.8334045410156, 394.5728759765625, 390.42376708984375, 386.2483215332031, 382.1973571777344, 378.0982666015625, 374.0193176269531, 369.95556640625, 366.00628662109375, 362.0304260253906, 358.15069580078125, 354.4022521972656, 350.563232421875, 346.8364562988281, 343.095947265625, 339.5651550292969, 335.9510498046875, 332.41156005859375, 329.0798645019531, 325.71551513671875, 322.4022521972656, 319.0783386230469, 315.8150939941406, 312.5162353515625, 309.1629638671875, 305.97344970703125, 302.8131103515625, 299.6266784667969, 296.5963439941406, 293.5699462890625, 290.66156005859375, 287.72503662109375, 284.82684326171875, 282.0689697265625, 279.2023620605469, 276.4267578125, 273.6347351074219, 270.9993896484375, 268.2995300292969, 265.6341857910156, 263.12451171875, 260.5594177246094, 258.14007568359375, 255.58432006835938, 253.18020629882812, 250.76470947265625, 248.32420349121094, 245.9781951904297, 243.6289520263672, 241.35421752929688, 239.12603759765625, 236.83966064453125, 234.6668243408203, 232.48683166503906, 230.33242797851562, 228.1403045654297, 226.03602600097656, 224.0401611328125, 221.9927520751953, 219.97579956054688, 218.04962158203125, 216.21824645996094, 214.32107543945312, 212.45217895507812, 210.67225646972656, 208.91578674316406, 207.0709686279297, 205.23745727539062, 203.53610229492188, 201.83346557617188, 200.0789031982422, 198.37493896484375, 196.71322631835938, 195.14669799804688, 193.5537872314453, 192.01156616210938, 190.51034545898438, 189.04600524902344, 187.65777587890625, 186.22862243652344, 184.8418731689453, 183.44471740722656, 182.13967895507812, 180.82957458496094, 179.55355834960938, 178.34368896484375, 177.09344482421875, 175.91812133789062, 174.7257080078125, 173.57394409179688, 172.47311401367188, 171.37098693847656, 170.2907257080078, 169.26490783691406, 168.30007934570312, 167.37013244628906, 166.39134216308594, 165.5266876220703, 164.70570373535156, 163.8613739013672, 163.08193969726562, 162.29449462890625, 161.552734375, 160.8280487060547, 160.19752502441406, 159.5591583251953, 158.96743774414062, 158.4168701171875, 157.97557067871094, 157.50906372070312, 157.06729125976562, 156.717041015625, 156.31704711914062, 156.00025939941406, 155.70001220703125, 155.55184936523438, 155.39122009277344, 155.25001525878906, 155.2377166748047, 155.30703735351562, 155.43060302734375, 155.5107879638672, 155.71400451660156, 156.0399169921875, 156.40650939941406, 156.82205200195312, 157.3544464111328, 157.99932861328125, 158.7686767578125, 159.61036682128906, 160.59054565429688, 161.67189025878906, 162.86753845214844, 164.1967010498047, 165.6467742919922, 167.3438720703125, 169.35015869140625, 171.19827270507812, 170.92869567871094, 166.2805938720703, 156.95675659179688, 145.14138793945312, 131.62783813476562, 117.17093658447266, 103.91419219970703, 94.19029998779297, 88.37032318115234, 84.25625610351562, 81.04232025146484, 78.44087982177734, 76.24996948242188, 74.35597229003906, 72.68389892578125, 71.15137481689453, 69.74664306640625, 68.44426727294922, 67.24844360351562, 66.10746002197266, 65.03433227539062, 64.03384399414062, 63.08150100708008, 62.179588317871094, 61.30385208129883, 60.47306823730469, 59.66187286376953, 58.893035888671875, 58.153297424316406, 57.452980041503906, 56.77273941040039, 56.099998474121094],
'Ours' : [25086374.0, 13259168.0, 8937381.0, 4842663.0, 3885173.25, 3044032.0, 2342550.25, 1892818.625, 1522778.0, 1260462.625, 1055903.25, 889523.9375, 765123.625, 665592.6875, 590354.0625, 528018.5, 477596.375, 434427.5625, 396375.125, 362877.0, 333769.03125, 306860.1875, 283221.75, 262225.25, 244332.953125, 227216.96875, 212124.03125, 198633.875, 187228.3125, 176180.953125, 166365.25, 157027.890625, 148922.53125, 141518.578125, 134655.84375, 128305.8125, 122042.015625, 117035.9140625, 112280.5703125, 107639.8046875, 102974.2421875, 98719.1328125, 94437.6171875, 90519.3828125, 86774.4140625, 83404.03125, 80289.6328125, 77395.4921875, 74791.6640625, 71890.609375, 69333.796875, 66726.3046875, 64376.4609375, 61938.10546875, 59750.046875, 57760.75390625, 55806.99609375, 53990.32421875, 52257.95703125, 50550.44921875, 48947.95703125, 47358.5078125, 45864.76171875, 44419.671875, 42962.1171875, 41632.4375, 40274.40625, 39092.22265625, 37928.8125, 36785.4921875, 35773.8984375, 34801.140625, 33836.578125, 32848.4453125, 31947.669921875, 31093.68359375, 30159.134765625, 29277.740234375, 28450.388671875, 27675.751953125, 26881.939453125, 26182.416015625, 25539.0390625, 24889.9296875, 24246.859375, 23629.44921875, 23039.3828125, 22424.091796875, 21865.607421875, 21302.857421875, 20772.48828125, 20245.572265625, 19737.921875, 19250.611328125, 18788.515625, 18334.77734375, 17893.62890625, 17474.462890625, 17074.306640625, 16671.51171875, 16267.5830078125, 15884.5634765625, 15523.578125, 15161.1103515625, 14808.560546875, 14467.322265625, 14137.1337890625, 13820.8486328125, 13507.861328125, 13221.720703125, 12923.2060546875, 12623.49609375, 12347.96484375, 12075.1630859375, 11807.74609375, 11533.56640625, 11286.890625, 11043.265625, 10800.7578125, 10559.7890625, 10322.6728515625, 10106.7548828125, 9880.1884765625, 9670.41796875, 9469.931640625, 9263.955078125, 9071.1953125, 8876.318359375, 8691.021484375, 8506.337890625, 8322.4033203125, 8156.54150390625, 7983.7685546875, 7813.23193359375, 7647.44482421875, 7488.02490234375, 7328.13916015625, 7172.0400390625, 7025.375, 6889.3671875, 6752.07763671875, 6614.73291015625, 6483.64306640625, 6350.6962890625, 6221.7275390625, 6091.955078125, 5963.73779296875, 5847.77197265625, 5731.29443359375, 5616.068359375, 5505.533203125, 5396.95458984375, 5293.869140625, 5185.6181640625, 5083.86962890625, 4986.73681640625, 4890.8232421875, 4795.03857421875, 4704.6435546875, 4615.14453125, 4527.46630859375, 4439.59912109375, 4354.14208984375, 4271.55029296875, 4189.880859375, 4112.25439453125, 4037.536865234375, 3965.844970703125, 3894.762939453125, 3825.94677734375, 3757.956787109375, 3689.139892578125, 3621.111572265625, 3554.463623046875, 3488.527099609375, 3425.76611328125, 3366.484619140625, 3307.95703125, 3252.389404296875, 3196.420166015625, 3142.677001953125, 3090.153564453125, 3035.413330078125, 2983.51318359375, 2931.41162109375, 2883.7470703125, 2836.70849609375, 2789.6240234375, 2744.742431640625, 2701.097900390625, 2657.78955078125, 2615.054931640625, 2572.158935546875, 2530.02392578125, 2489.28173828125, 2450.86279296875, 2413.09375, 2376.168701171875, 2339.88037109375, 2305.100341796875, 2269.871826171875, 2234.5224609375, 2201.078125, 2167.69873046875, 2135.7890625, 2103.503173828125, 2074.215576171875, 2043.864013671875, 2013.9686279296875, 1984.635009765625, 1955.6505126953125, 1928.2210693359375, 1900.0267333984375, 1873.1285400390625, 1846.1243896484375, 1820.8114013671875, 1795.4609375, 1770.3046875, 1746.51904296875, 1722.0565185546875, 1698.4722900390625, 1673.7496337890625, 1650.047607421875, 1627.31982421875, 1604.8199462890625, 1583.19189453125, 1561.8165283203125, 1540.7784423828125, 1520.5291748046875, 1500.7432861328125, 1480.2606201171875, 1460.7928466796875, 1442.187744140625, 1422.9251708984375, 1404.593505859375, 1385.6163330078125, 1367.61083984375, 1350.0335693359375, 1331.9691162109375, 1315.490478515625, 1298.43212890625, 1281.9635009765625, 1264.8807373046875, 1248.07568359375, 1231.839111328125, 1215.2337646484375, 1198.8941650390625, 1182.765625, 1167.81787109375, 1152.43017578125, 1137.1143798828125, 1122.1849365234375, 1107.65185546875, 1093.179931640625, 1078.48681640625, 1063.94140625, 1050.2298583984375, 1036.8875732421875, 1023.0238647460938, 1009.609130859375, 996.4658813476562, 983.659912109375, 970.4968872070312, 957.3748168945312, 944.7362060546875, 932.2721557617188, 919.7241821289062, 907.2108764648438, 895.1275634765625, 882.994384765625, 871.3276977539062, 859.210693359375, 847.6813354492188, 836.18115234375, 824.74462890625, 813.4380493164062, 801.924072265625, 790.7955932617188, 779.5589599609375, 768.5333862304688, 757.6715698242188, 746.6666259765625, 736.0463256835938, 725.6542358398438, 715.046875, 704.7596435546875, 694.5159301757812, 684.4671020507812, 674.334228515625, 664.23876953125, 654.5004272460938, 644.4861450195312, 634.5929565429688, 624.852294921875, 615.2882080078125, 605.7454223632812, 596.10888671875, 586.5631103515625, 577.4290161132812, 568.2418823242188, 559.0137329101562, 549.8805541992188, 540.94482421875, 532.1825561523438, 523.365966796875, 514.7274169921875, 506.15985107421875, 497.63250732421875, 489.0686340332031, 480.68609619140625, 472.1288757324219, 463.7294616699219, 455.5837707519531, 447.5718078613281, 439.807861328125, 431.8489990234375, 424.14007568359375, 416.4290771484375, 408.8225402832031, 401.33099365234375, 393.7798156738281, 386.5447692871094, 379.3397216796875, 372.09832763671875, 364.9993896484375, 357.9806823730469, 351.0247497558594, 344.1992492675781, 337.55670166015625, 330.99798583984375, 324.3707580566406, 317.9281311035156, 311.79241943359375, 305.4871826171875, 299.449462890625, 293.56378173828125, 287.6938781738281, 281.85736083984375, 276.0900573730469, 270.5283203125, 264.85797119140625, 259.41693115234375, 254.1158447265625, 248.9314422607422, 243.806396484375, 238.77020263671875, 233.95010375976562, 229.12110900878906, 224.50619506835938, 219.8014373779297, 215.31898498535156, 210.8824462890625, 206.5464630126953, 202.2780303955078, 198.03419494628906, 193.99423217773438, 189.9760284423828, 186.06637573242188, 182.1781463623047, 178.35256958007812, 174.6624755859375, 171.06314086914062, 167.5377960205078, 164.0326690673828, 160.65093994140625, 157.3340606689453, 154.03118896484375, 150.77818298339844, 147.6309356689453, 144.58497619628906, 141.5930938720703, 138.62576293945312, 135.77548217773438, 133.04818725585938, 130.33331298828125, 127.6618881225586, 125.04879760742188, 122.54822540283203, 120.07101440429688, 117.66300964355469, 115.30770111083984, 113.00035858154297, 110.7917251586914, 108.61622619628906, 106.48108673095703, 104.38042449951172, 102.34392547607422, 100.36337280273438, 98.39898681640625, 96.5145034790039, 94.67707824707031, 92.89352416992188, 91.1530532836914, 89.44609832763672, 87.7967529296875, 86.19510650634766, 84.62256622314453, 83.10785675048828, 81.59303283691406, 80.17215728759766, 78.75909423828125, 77.35926818847656, 76.00105285644531, 74.67333984375, 73.42906951904297, 72.18960571289062, 70.99433135986328, 69.82556915283203, 68.71015930175781, 67.62393951416016, 66.54727172851562, 65.4766616821289, 64.46293640136719, 63.45573425292969, 62.49359893798828, 61.54044723510742, 60.64391326904297, 59.763179779052734, 58.89873504638672, 58.082725524902344, 57.272010803222656, 56.502044677734375, 55.733585357666016, 55.00220489501953, 54.294822692871094, 53.59550476074219, 52.9167594909668, 52.252777099609375, 51.61712646484375, 50.9974250793457, 50.386722564697266, 49.807403564453125, 49.25212860107422, 48.721946716308594, 48.19172286987305, 47.68341827392578, 47.20656204223633, 46.7345085144043, 46.280616760253906, 45.836936950683594, 45.40451431274414, 44.98884963989258, 44.57628631591797, 44.19237518310547, 43.79893493652344, 43.42437744140625, 43.07963562011719, 42.72689437866211, 42.38664627075195, 42.056575775146484, 41.73991012573242, 41.42827606201172, 41.126617431640625, 40.83439254760742, 40.559104919433594, 40.275238037109375, 40.00959396362305, 39.750423431396484, 39.48468780517578, 39.24017333984375, 38.99643325805664, 38.76231002807617, 38.52574920654297, 38.299686431884766, 38.089900970458984, 37.88383483886719, 37.680015563964844, 37.47666549682617, 37.28871154785156, 37.109962463378906, 36.92898178100586, 36.75177001953125, 36.57804489135742, 36.409141540527344, 36.245849609375, 36.08535385131836, 35.928245544433594, 35.776817321777344, 35.631954193115234, 35.504520416259766, 35.377323150634766, 35.24844741821289, 35.12152099609375, 35.00223159790039, 34.888038635253906, 34.76499938964844, 34.65043640136719, 34.53892135620117, 34.43434143066406, 34.332366943359375, 34.230281829833984, 34.14014434814453, 34.04792404174805, 33.959495544433594, 33.87434005737305, 33.789676666259766, 33.71051025390625, 33.62729263305664, 33.54425811767578, 33.46764373779297, 33.39337158203125, 33.31790542602539, 33.24443054199219, 33.17169952392578, 33.11022186279297, 33.04153823852539, 32.978519439697266, 32.921058654785156, 32.86847686767578, 32.816802978515625, 32.69873046875, 32.46784210205078],
'SR-octree' : [25132244.0, 13281402.0, 8951631.0, 4848903.5, 3889900.0, 3047375.5, 2344633.25, 1894093.25, 1523378.875, 1260615.625, 1055676.375, 888950.0625, 764311.0625, 664528.375, 589094.4375, 526597.3125, 476018.78125, 432721.59375, 394539.53125, 360944.1875, 331732.9375, 304748.0, 281050.5625, 259982.1875, 242004.734375, 224838.3125, 209688.21875, 196144.625, 184676.703125, 173590.15625, 163750.40625, 154362.96875, 146224.90625, 138787.3125, 131887.625, 125493.71875, 119187.34375, 114149.9140625, 109352.46875, 104672.2578125, 99990.1796875, 95717.953125, 91428.515625, 87486.453125, 83732.8828125, 80343.078125, 77218.3515625, 74311.84375, 71680.3203125, 68771.578125, 66201.0625, 63602.31640625, 61242.62109375, 58804.3203125, 56615.2265625, 54627.60546875, 52678.9453125, 50852.0078125, 49125.234375, 47421.32421875, 45822.37109375, 44238.76953125, 42744.640625, 41310.5, 39861.3203125, 38543.33203125, 37201.390625, 36024.76171875, 34872.0859375, 33740.90625, 32732.736328125, 31760.68359375, 30803.291015625, 29830.77734375, 28940.876953125, 28095.048828125, 27180.693359375, 26321.798828125, 25511.099609375, 24756.1796875, 23985.947265625, 23299.900390625, 22669.26953125, 22036.30078125, 21407.46875, 20800.984375, 20222.861328125, 19628.716796875, 19086.10546875, 18539.935546875, 18029.962890625, 17522.8125, 17039.341796875, 16570.013671875, 16129.7783203125, 15695.2978515625, 15273.40625, 14875.9296875, 14490.0205078125, 14106.98046875, 13722.509765625, 13363.3212890625, 13021.7060546875, 12681.376953125, 12351.5205078125, 12031.814453125, 11723.306640625, 11424.521484375, 11135.63671875, 10871.1240234375, 10597.515625, 10327.3798828125, 10076.9404296875, 9833.5859375, 9593.8486328125, 9349.9873046875, 9130.8212890625, 8914.978515625, 8702.6982421875, 8491.1259765625, 8283.2216796875, 8091.80517578125, 7895.46435546875, 7714.65283203125, 7541.0703125, 7363.92138671875, 7200.4365234375, 7037.5224609375, 6880.341796875, 6722.6484375, 6568.48779296875, 6428.87158203125, 6284.10498046875, 6142.3505859375, 6007.1416015625, 5876.869140625, 5747.47509765625, 5622.4150390625, 5504.80810546875, 5396.02587890625, 5285.89453125, 5176.8916015625, 5074.97412109375, 4970.1201171875, 4869.62451171875, 4769.798828125, 4671.8642578125, 4584.2373046875, 4495.5712890625, 4410.138671875, 4327.9765625, 4247.85009765625, 4173.33984375, 4095.212890625, 4022.4755859375, 3952.232666015625, 3885.27392578125, 3817.199951171875, 3753.499267578125, 3690.28955078125, 3627.267822265625, 3566.835205078125, 3506.520751953125, 3450.7236328125, 3394.744873046875, 3343.28955078125, 3294.80078125, 3247.318359375, 3202.055419921875, 3156.745849609375, 3113.868408203125, 3069.783935546875, 3027.1875, 2984.815673828125, 2943.79931640625, 2904.882568359375, 2868.63818359375, 2831.937744140625, 2797.76611328125, 2764.260498046875, 2731.87060546875, 2701.999267578125, 2670.38330078125, 2641.94189453125, 2612.248046875, 2585.61474609375, 2559.0361328125, 2531.64501953125, 2506.032958984375, 2481.2392578125, 2456.72509765625, 2431.816650390625, 2407.468017578125, 2383.45263671875, 2358.60693359375, 2336.810791015625, 2313.908935546875, 2293.759521484375, 2271.09814453125, 2250.71875, 2231.080322265625, 2210.462890625, 2192.728515625, 2171.860595703125, 2154.205322265625, 2134.06884765625, 2115.39501953125, 2093.932861328125, 2073.0205078125, 2053.96728515625, 2033.6912841796875, 2015.5574951171875, 1996.7813720703125, 1978.93212890625, 1959.3017578125, 1940.0799560546875, 1920.5614013671875, 1901.5845947265625, 1881.5557861328125, 1860.809814453125, 1842.452392578125, 1822.9481201171875, 1802.7069091796875, 1780.9976806640625, 1760.1651611328125, 1739.3118896484375, 1717.093017578125, 1693.622802734375, 1671.4678955078125, 1648.776123046875, 1625.5469970703125, 1602.4229736328125, 1579.2845458984375, 1554.245849609375, 1530.4786376953125, 1505.9620361328125, 1480.3255615234375, 1455.20556640625, 1428.761962890625, 1403.357666015625, 1375.6943359375, 1347.50927734375, 1318.6904296875, 1288.283447265625, 1257.11376953125, 1226.1259765625, 1194.1226806640625, 1161.4693603515625, 1129.00390625, 1096.0333251953125, 1061.69970703125, 1026.0716552734375, 989.4236450195312, 950.93896484375, 912.0731811523438, 877.7230834960938, 850.851806640625, 830.9099731445312, 818.193359375, 814.0968017578125, 815.3104248046875, 818.0100708007812, 820.777099609375, 823.81787109375, 827.1791381835938, 829.7216796875, 832.7659912109375, 835.8765869140625, 839.984619140625, 842.9035034179688, 845.9434204101562, 848.7767944335938, 851.534912109375, 853.7897338867188, 854.3933715820312, 855.5598754882812, 856.0223388671875, 856.4060668945312, 856.35888671875, 855.9899291992188, 856.0278930664062, 855.8786010742188, 855.4097900390625, 854.5775756835938, 853.4462280273438, 851.8607177734375, 850.1112670898438, 847.6365356445312, 845.0194702148438, 842.2386474609375, 839.36767578125, 836.6060791015625, 832.9148559570312, 829.8011474609375, 826.0823364257812, 821.9711303710938, 817.8948974609375, 813.704833984375, 809.6802368164062, 805.043212890625, 800.3983764648438, 795.8177490234375, 790.9659423828125, 786.127685546875, 781.2378540039062, 776.3318481445312, 771.692626953125, 766.4392700195312, 761.2966918945312, 755.89306640625, 750.4918212890625, 745.7673950195312, 740.2044677734375, 735.2103881835938, 730.607177734375, 725.4835815429688, 720.821044921875, 715.47900390625, 710.2374267578125, 705.7545776367188, 700.5547485351562, 696.4874877929688, 691.8468627929688, 687.1177368164062, 683.3565673828125, 678.978271484375, 674.8321533203125, 670.4938354492188, 666.6170654296875, 663.0614013671875, 659.1465454101562, 655.7939453125, 652.7390747070312, 649.8187866210938, 646.8560180664062, 644.2083740234375, 641.6151123046875, 639.03662109375, 636.3818969726562, 633.7366333007812, 631.6297607421875, 629.4259643554688, 627.7952880859375, 626.0906372070312, 624.6311645507812, 623.42529296875, 622.0878295898438, 621.150390625, 620.0771484375, 619.2242431640625, 618.574951171875, 617.9061279296875, 617.3737182617188, 616.7476806640625, 616.2654418945312, 615.666015625, 615.3160400390625, 615.193359375, 614.7626342773438, 614.69970703125, 614.4883422851562, 614.3919677734375, 614.0614013671875, 613.3917236328125, 612.900390625, 611.992919921875, 611.1935424804688, 610.001953125, 609.1231689453125, 608.251708984375, 607.2630004882812, 606.2374877929688, 605.2764892578125, 604.6358642578125, 603.6674194335938, 602.890380859375, 602.3126220703125, 601.82275390625, 601.0425415039062, 600.3038330078125, 599.8391723632812, 599.4749145507812, 599.2013549804688, 598.7662963867188, 598.8283081054688, 598.6171264648438, 598.7136840820312, 598.5442504882812, 598.0870361328125, 597.9546508789062, 597.5980834960938, 597.6692504882812, 597.2802124023438, 597.1073608398438, 597.4594116210938, 597.7301025390625, 598.1304321289062, 598.1954345703125, 598.3310546875, 598.4630126953125, 598.682373046875, 598.9364624023438, 599.0576171875, 599.7174682617188, 600.5422973632812, 600.8466796875, 601.312255859375, 601.9002685546875, 602.3974609375, 602.8402709960938, 602.9353637695312, 603.8067016601562, 604.2817993164062, 604.6519165039062, 605.2142944335938, 605.6998291015625, 606.3735961914062, 606.9798583984375, 607.5481567382812, 608.26318359375, 608.8380737304688, 609.190185546875, 609.953857421875, 610.4891967773438, 611.072265625, 611.5262451171875, 612.291015625, 612.6510620117188, 612.80908203125, 612.9273071289062, 612.9762573242188, 612.8861694335938, 612.9046630859375, 613.596923828125, 613.6477661132812, 613.8983764648438, 614.1630859375, 614.3814697265625, 614.7525024414062, 614.6544799804688, 614.6533813476562, 614.6949462890625, 614.7125244140625, 614.7023315429688, 614.3853759765625, 614.0360107421875, 613.8653564453125, 613.1323852539062, 612.5751342773438, 612.0900268554688, 611.28125, 610.6011352539062, 609.698974609375, 608.8304443359375, 607.5247802734375, 606.4237060546875, 605.121337890625, 603.8177490234375, 602.3216552734375, 600.9725341796875, 599.6211547851562, 597.8339233398438, 596.1483154296875, 594.11962890625, 592.159423828125, 590.2316284179688, 587.9564819335938, 585.444091796875, 582.9096069335938, 580.3082885742188, 577.7213745117188, 574.3182983398438, 570.9820556640625, 567.882080078125, 564.2346801757812, 560.601806640625, 556.4447631835938, 552.6826782226562, 548.4627075195312, 543.9164428710938, 539.3906860351562, 534.3480834960938, 529.59521484375, 524.3068237304688, 519.0333251953125, 513.223876953125, 506.9593505859375, 500.6521301269531, 494.1068420410156, 487.329345703125, 480.4949035644531, 473.3659362792969, 466.2189636230469, 458.52825927734375, 450.35748291015625, 441.810546875, 432.9241943359375, 423.8951721191406, 414.4730224609375, 404.65191650390625, 394.53326416015625, 383.9576721191406, 372.8827819824219, 361.3356018066406, 349.3583984375, 336.9060974121094, 323.8538513183594, 310.1408386230469, 295.6970520019531, 280.0346984863281, 263.290283203125, 247.6102294921875],
'SZ' : [25140042.0, 13286912.0, 8956124.0, 4852523.0, 3893000.0, 3050219.0, 2347155.75, 1896422.25, 1525565.375, 1262613.625, 1057567.75, 890780.125, 766139.0, 666395.5625, 590983.3125, 528536.375, 478005.6875, 434750.625, 396585.65625, 362984.28125, 333786.28125, 306795.5, 283104.6875, 262026.8125, 244060.625, 226894.46875, 211748.53125, 198202.3125, 186728.234375, 175627.453125, 165766.46875, 156399.390625, 148249.8125, 140812.6875, 133905.171875, 127526.09375, 121223.5390625, 116171.875, 111357.375, 106673.84375, 101971.7578125, 97672.765625, 93347.5, 89387.2734375, 85619.03125, 82202.578125, 79052.71875, 76116.0546875, 73472.875, 70546.171875, 67954.09375, 65320.1328125, 62936.2421875, 60465.26953125, 58244.21875, 56215.51953125, 54229.96484375, 52376.75390625, 50612.83984375, 48878.57421875, 47247.171875, 45628.64453125, 44105.515625, 42632.35546875, 41153.09765625, 39797.96875, 38408.25390625, 37198.73828125, 36010.72265625, 34848.60546875, 33810.24609375, 32809.77734375, 31826.859375, 30824.44921875, 29898.32421875, 29020.431640625, 28065.26171875, 27170.822265625, 26328.265625, 25533.955078125, 24729.111328125, 24009.876953125, 23353.111328125, 22680.88671875, 22022.736328125, 21383.53125, 20777.3125, 20148.2421875, 19575.09765625, 19006.8359375, 18470.498046875, 17941.900390625, 17430.849609375, 16939.578125, 16467.470703125, 16009.341796875, 15559.2939453125, 15137.4365234375, 14728.4052734375, 14327.4375, 13930.9677734375, 13550.0498046875, 13193.0849609375, 12834.2021484375, 12489.310546875, 12155.6298828125, 11834.2138671875, 11525.1181640625, 11220.875, 10943.6748046875, 10662.91796875, 10378.0224609375, 10114.8857421875, 9856.4765625, 9606.1884765625, 9352.0341796875, 9118.373046875, 8894.2119140625, 8672.5849609375, 8455.7021484375, 8243.958984375, 8052.16162109375, 7852.7783203125, 7667.26904296875, 7489.75732421875, 7311.4853515625, 7144.90478515625, 6978.806640625, 6822.53857421875, 6665.439453125, 6513.84912109375, 6376.3486328125, 6235.0595703125, 6095.16064453125, 5962.1494140625, 5836.36669921875, 5709.18115234375, 5586.2724609375, 5473.818359375, 5369.4814453125, 5263.53173828125, 5157.97998046875, 5062.037109375, 4964.03955078125, 4866.73583984375, 4772.02490234375, 4678.68603515625, 4594.560546875, 4510.03564453125, 4427.63623046875, 4350.654296875, 4271.654296875, 4199.3017578125, 4123.06396484375, 4050.28076171875, 3984.61181640625, 3916.016357421875, 3851.46826171875, 3790.540771484375, 3729.816650390625, 3669.4658203125, 3606.742919921875, 3549.422119140625, 3491.42919921875, 3433.53955078125, 3379.34765625, 3326.68212890625, 3277.40185546875, 3227.268798828125, 3180.39306640625, 3131.591064453125, 3084.559814453125, 3038.613037109375, 2991.469970703125, 2946.02978515625, 2902.512939453125, 2861.303466796875, 2821.4873046875, 2783.010498046875, 2747.431884765625, 2712.817626953125, 2677.81005859375, 2645.70654296875, 2612.67138671875, 2583.577392578125, 2552.575439453125, 2523.707275390625, 2496.275634765625, 2470.456298828125, 2447.026123046875, 2420.758544921875, 2399.16259765625, 2377.2412109375, 2355.558349609375, 2335.3779296875, 2315.52783203125, 2298.0341796875, 2279.113525390625, 2262.22998046875, 2246.837890625, 2230.16064453125, 2215.310791015625, 2200.21875, 2185.416259765625, 2170.30859375, 2155.8837890625, 2142.55908203125, 2128.677734375, 2116.7421875, 2104.771484375, 2094.0546875, 2082.614501953125, 2070.75, 2059.3076171875, 2046.712890625, 2036.5228271484375, 2024.990234375, 2015.2982177734375, 2005.3648681640625, 1995.4364013671875, 1985.745361328125, 1975.5531005859375, 1964.674560546875, 1953.6842041015625, 1944.018310546875, 1933.4049072265625, 1923.262451171875, 1913.514892578125, 1904.1666259765625, 1894.1085205078125, 1883.1842041015625, 1872.7230224609375, 1863.2755126953125, 1852.108642578125, 1841.407958984375, 1830.5950927734375, 1820.1683349609375, 1808.6009521484375, 1795.8079833984375, 1783.733154296875, 1770.9658203125, 1757.185791015625, 1743.0224609375, 1729.4954833984375, 1715.373046875, 1699.881591796875, 1684.73486328125, 1669.5843505859375, 1653.6278076171875, 1637.039794921875, 1620.498046875, 1604.31396484375, 1588.1661376953125, 1572.4183349609375, 1557.7603759765625, 1543.1405029296875, 1528.9083251953125, 1514.121826171875, 1498.9931640625, 1484.047119140625, 1468.71337890625, 1453.3782958984375, 1438.87939453125, 1425.650634765625, 1412.1048583984375, 1399.462646484375, 1387.0877685546875, 1374.842041015625, 1363.2413330078125, 1350.42041015625, 1339.0184326171875, 1326.730712890625, 1314.91796875, 1303.4271240234375, 1291.5242919921875, 1280.949951171875, 1269.4776611328125, 1259.0809326171875, 1248.303955078125, 1238.0615234375, 1227.9815673828125, 1217.5364990234375, 1207.5390625, 1197.33203125, 1187.800537109375, 1178.13330078125, 1168.038330078125, 1158.6199951171875, 1149.876953125, 1140.857421875, 1132.2454833984375, 1123.378662109375, 1114.7076416015625, 1106.116455078125, 1097.3651123046875, 1089.2049560546875, 1081.0947265625, 1072.9744873046875, 1065.365234375, 1057.7127685546875, 1050.0205078125, 1042.334228515625, 1034.7769775390625, 1027.7977294921875, 1021.0133666992188, 1014.3549194335938, 1007.9764404296875, 1001.3951416015625, 994.8221435546875, 988.298583984375, 982.1041259765625, 975.85546875, 969.9302368164062, 964.248779296875, 958.7221069335938, 953.208984375, 947.3475341796875, 941.79736328125, 936.5003662109375, 931.0283203125, 925.47705078125, 920.4869995117188, 915.7028198242188, 910.82763671875, 905.5574340820312, 900.7262573242188, 895.9791259765625, 890.5762329101562, 885.690185546875, 880.7027587890625, 875.8668823242188, 870.8870239257812, 866.111572265625, 861.6068115234375, 856.7537231445312, 851.8948974609375, 847.0258178710938, 842.700927734375, 837.9742431640625, 833.6423950195312, 829.2338256835938, 825.2230834960938, 821.3671264648438, 817.3464965820312, 813.3553466796875, 809.4246826171875, 805.8170166015625, 801.78076171875, 797.7027587890625, 793.72216796875, 790.3976440429688, 786.8432006835938, 783.23095703125, 780.1178588867188, 777.2537841796875, 774.6553955078125, 771.734619140625, 769.3119506835938, 767.2828979492188, 765.045654296875, 762.9378051757812, 760.9224853515625, 758.83935546875, 757.1056518554688, 755.05322265625, 753.4863891601562, 752.1709594726562, 750.9159545898438, 750.089111328125, 748.9017333984375, 748.0357666015625, 747.5089111328125, 746.8538818359375, 746.1165161132812, 745.4715576171875, 744.9641723632812, 744.5673217773438, 743.9049072265625, 743.268798828125, 742.7949829101562, 742.3162841796875, 741.8408813476562, 741.076416015625, 740.113037109375, 739.5142211914062, 738.7108154296875, 737.664794921875, 736.6333618164062, 735.7399291992188, 734.6648559570312, 733.427978515625, 732.364990234375, 731.1055908203125, 729.7080688476562, 728.3575439453125, 727.30615234375, 726.0611572265625, 724.6103515625, 723.4706420898438, 722.3829956054688, 721.2230224609375, 719.8196411132812, 718.3875732421875, 716.820556640625, 715.3382568359375, 713.7517700195312, 712.394775390625, 710.7255249023438, 709.1192626953125, 707.6550903320312, 706.1121215820312, 704.6255493164062, 702.6729736328125, 701.2367553710938, 699.6781616210938, 698.0101318359375, 696.1544799804688, 694.4179077148438, 692.7006225585938, 690.8418579101562, 689.0565795898438, 687.46875, 685.9212646484375, 684.2257690429688, 682.6304321289062, 680.8450927734375, 679.3353881835938, 677.6156005859375, 675.8213500976562, 674.0729370117188, 672.4496459960938, 670.9967651367188, 669.0643920898438, 667.4789428710938, 665.774658203125, 664.129638671875, 662.5123901367188, 660.8478393554688, 659.385986328125, 657.8442993164062, 656.4347534179688, 654.9685668945312, 653.5841064453125, 652.05810546875, 650.5470581054688, 649.0493774414062, 647.568115234375, 646.0777587890625, 644.5557861328125, 643.2374877929688, 641.9409790039062, 640.453125, 639.1685180664062, 637.9728393554688, 636.580322265625, 635.3001708984375, 634.0682373046875, 632.79638671875, 631.6026611328125, 630.2958374023438, 629.013671875, 627.8966064453125, 626.5953979492188, 625.5006103515625, 624.3063354492188, 623.3893432617188, 622.3204956054688, 621.2110595703125, 620.1679077148438, 619.1118774414062, 618.04541015625, 616.84326171875, 615.9456787109375, 614.9801635742188, 613.9580078125, 612.8753051757812, 611.7736206054688, 610.8096923828125, 609.7308349609375, 608.4657592773438, 607.2495727539062, 606.0509643554688, 604.784423828125, 603.2223510742188, 601.7694702148438, 600.5296020507812, 599.2265625, 597.947509765625, 596.8887939453125, 595.603271484375, 594.39306640625, 593.1397094726562, 591.9876708984375, 590.8253173828125, 589.5977172851562, 588.550048828125, 587.3819580078125, 586.1613159179688, 584.9620971679688, 583.6386108398438, 582.28466796875, 581.0192260742188, 579.824462890625, 578.66357421875, 577.47900390625, 576.4356689453125, 575.3408813476562, 574.2741088867188, 573.1587524414062, 571.9419555664062, 570.5778198242188, 568.8909301757812]
}
font = {#'font.family' : 'normal',
#'font.weight' : 'bold',
'font.size' : 18}
plt.rcParams.update(font)
xs = results_powerspectrum['xs']
GT_freqs = results_powerspectrum['Raw data']
NN_freqs = results_powerspectrum['Ours']
SZ_freqs = results_powerspectrum['SZ']
octree_freqs = results_powerspectrum['SR-octree']
plt.plot(xs, np.array(GT_freqs), label="Raw data", color="red")
plt.plot(xs, np.array(NN_freqs), label="Ours", color="blue")
plt.plot(xs, np.array(octree_freqs), label="SR-octree", color="gray")
plt.plot(xs, np.array(SZ_freqs), label="SZ", color="green")
#plt.legend()
plt.xlabel("Wavenumber")
plt.title("Iso3D magnitude")
plt.ylabel("Power")
plt.yscale("log")
plt.xscale("log")
#plt.xscale("log")
#plt.xticks(xs, xs_labels)
#plt.title(d)
#plt.savefig(os.path.join(save_folder, metric+"_psnr.png"))
plt.tight_layout()
plt.show()
plt.savefig("powerspectra.png")
plt.clf()
xs = np.array(xs)
spot = (xs >= 60) & (xs <= 300)
plt.plot(xs[spot], np.array(GT_freqs)[spot], label="Raw data", color="red")
plt.plot(xs[spot], np.array(NN_freqs)[spot], label="Ours", color="blue")
plt.plot(xs[spot], np.array(octree_freqs)[spot], label="SR-octree", color="gray")
plt.plot(xs[spot], np.array(SZ_freqs)[spot], label="SZ", color="green")
#plt.title("Iso3D magnitude")
#plt.ylabel("Power")
#plt.xlabel("Wavenumber")
#plt.legend()
plt.yscale("log")
plt.xscale("log")
plt.show()
plt.savefig("powerspectra_zoom.png")
'''
results_psnr = {
"Iso2D magnitude": {
"NN": [48.04, 39.64, 32.28, 27.57, 24.64, 22.42],
"Bilinear interpolation": [41.79, 33.97, 28.89, 25.52, 23.16, 21.30],
"Bicubic interpolation": [43.52, 34.87, 29.31, 25.75, 23.31, 21.47]
},
"Iso3D magnitude": {
"NN": [52.55, 41.83, 34.44, 29.83],
"Trilinear interpolation": [43.11, 35.58, 30.77, 27.62]
},
"Mixing3D pressure": {
#"NN_old": [51.52, 45.99, 39.68, 36.90, 31.59],
"NN":[53.38, 46.536, 40.001, 34.998, 31.59],
"Trilinear interpolation": [51.00, 42.37, 36.75, 33.14, 30.72]
},
"Mixing2D magnitude": {
"NN":[45.43, 39.12, 32.86, 26.67, 22.50, 19.63],
"Bilinear interp.": [44.08, 34.91, 28.48, 23.88, 20.90, 18.96],
"Bicubic interp.": [45.36, 35.83, 29.13, 24.29, 21.14, 19.09]
},
"Vorts": {
"NN":[39.73, 35.17, 25.59, 19.06],
"Trilinear interpolation": [36.90, 27.94, 22.24, 19.34]
},
"Plume":{
"NN":[50.55, 42.90, 37.53, 33.71],
"Trilinear interpolation":[47.33, 40.18, 34.82, 29.26]
}
}
results_inner_psnr = {
"Mixing2D magnitude": {
"NN":[57.11, 45.16, 33.93, 26.83, 22.63, 19.71],
"Bilinear interp.": [43.92, 34.77, 28.36, 23.86, 20.90, 18.98],
"Bicubic interp.": [45.25, 35.71, 29.01, 24.25, 21.15, 19.12]
}
}
results_ssim = {
"Iso2D magnitude": {
"NN": [0.990, 0.926, 0.704, 0.474, 0.355, 0.310],
"Bilinear interpolation": [0.978, 0.820, 0.575, 0.401, 0.326, 0.299],
"Bicubic interpolation": [0.980, 0.854, 0.607, 0.416, 0.332, 0.302]
},
"Iso3D magnitude": {
"NN": [0.995, 0.930, 0.692, 0.432],
"Trilinear interpolation": [0.961, 0.791, 0.519, 0.330]
},
"Mixing3D pressure": {
#"NN_old": [0.999, 0.999, 0.996, 0.984, 0.962],
"NN":[0.9997, 0.9992, 0.9953, 0.9829, 0.962],
"Trilinear interpolation": [0.999, 0.997, 0.990, 0.974, 0.953]
},
"Mixing2D magnitude": {
"NN":[0.997, 0.975, 0.834, 0.576, 0.372, 0.239],
"Bilinear interp.": [0.978, 0.879, 0.669, 0.448, 0.298, 0.194],
"Bicubic interp.": [0.983, 0.900, 0.700, 0.471, 0.314, 0.207]
},
"Vorts": {
"NN":[0.961, 0.920, 0.576, 0.150],
"Trilinear interpolation": [0.933, 0.698, 0.348, 0.145]
},
"Plume":{
"NN":[0.997, 0.986, 0.955, 0.906],
"Trilinear interpolation":[0.994, 0.967, 0.902, 0.746]
}
}
font = {#'font.family' : 'normal',
#'font.weight' : 'bold',
'font.size' : 18}
plt.rcParams.update(font)
fig, ax1 = plt.subplots()
d = "Vorts"
markers = {
"NN" : "^",
"Bilinear interpolation": "s",
"Trilinear interpolation": "s",
"Bicubic interpolation": "o",
"Bilinear interp.": "s",
"Bicubic interp.": "o"
}
colors = {
"NN" : "blue",
"Bilinear interpolation": "green",
"Trilinear interpolation": "orange",
"Bicubic interpolation": "red",
"Bilinear interp.": "green",
"Bicubic interp.": "red"
}
for method in results_psnr[d].keys():
xs = []
for i in range(len(results_psnr[d][method])):
xs.append(int(2**(i+1)))
ax1.plot(xs, results_psnr[d][method], label=method,
marker=markers[method], color = colors[method])
ax2 = ax1.twinx()
for method in results_ssim[d].keys():
xs = []
for i in range(len(results_ssim[d][method])):
xs.append(int(2**(i+1)))
ax2.plot(xs, results_ssim[d][method], label=method, marker=markers[method],
color = colors[method], linestyle="--")
xs_labels = []
for i in range(len(xs)):
xs_labels.append(str(xs[i]))
print(xs)
#plt.legend()
ax1.set_ylabel("PSNR (dB)")
ax2.set_ylabel("SSIM")
ax2.set_ylim(0.15)
#plt.ylabel("PSNR (dB)")
plt.xscale("log")
plt.xticks(xs, xs_labels)
plt.xlabel("SR scale factor")
ax1.set_xlabel("SR scale factor")
plt.title(d)
fig.tight_layout()
#plt.savefig(os.path.join(save_folder, metric+"_psnr.png"))
plt.show()
'''
files_to_convert = [
"isomag2D_compressiontest"
]
file_loc_base = os.path.join(FlowSTSR_folder_path, "TestingData", "octree_files")
f = h5py.File(os.path.join(file_loc_base, files_to_convert[0]+".h5"), 'r')
d = np.array(f['data'])
#d[0].tofile("512cubed.dat")
rootgrp = Dataset(files_to_convert[0]+".nc", "w", format="NETCDF4")
rootgrp.createDimension("u")
rootgrp.createDimension("v")
#rootgrp.createDimension("w")
rootgrp.createDimension("channels", d.shape[0])
dim_0 = rootgrp.createVariable("velocity magnitude", np.float32, ("u","v"))
dim_0[:] = d[0]
#dim_1 = rootgrp.createVariable("v", np.float32, ("u","v", "w"))
#dim_1[:] = d[1]
#dim_2 = rootgrp.createVariable("w", np.float32, ("u","v", "w"))
#dim_2[:] = d[2]
'''
'''
def to_netcdf(vf, name):
rootgrp = Dataset(name+".nc", "w", format="NETCDF4")
if(len(vf.shape) == 3):
rootgrp.createDimension("u")
rootgrp.createDimension("v")
rootgrp.createDimension("channels", vf.shape[0])
for i in range(vf.shape[0]):
dim_i = rootgrp.createVariable("dim"+str(i), np.float32, ("u","v"))
dim_i[:] = vf[i]
if(len(vf.shape) == 4):
rootgrp.createDimension("u")
rootgrp.createDimension("v")
rootgrp.createDimension("w")
rootgrp.createDimension("channels", vf.shape[0])
for i in range(vf.shape[0]):
dim_i = rootgrp.createVariable("dim"+str(i), np.float32, ("u","v", "w"))
dim_i[:] = vf[i]
files_to_convert = [
"isomag2D_compressiontest",
"isomag3D_compressiontest",
"mixing3D_compressiontest",
"iso3DVF_compressiontest"
]
file_loc_base = os.path.join(FlowSTSR_folder_path, "TestingData", "octree_files")
for name in files_to_convert:
print("Loading " + name)
f = h5py.File(os.path.join(file_loc_base, name+".h5"), 'r')
d = np.array(f['data'])
to_netcdf(d, name)
print("Finished " + name)
'''
'''
VF_folder = os.path.join(FlowSTSR_folder_path, "TestingData", "iso1024")
new_VF_folder = os.path.join(FlowSTSR_folder_path, "TestingData", "iso3DVF")
#mixing_folder = os.path.join(FlowSTSR_folder_path, "InputData", "mixing_p")
#new_mixing_folder = os.path.join(FlowSTSR_folder_path, "InputData", "mix_p")
for filename in os.listdir(VF_folder):
file_loc = os.path.join(VF_folder, filename)
f = h5py.File(file_loc, 'r+')
d = np.array(f.get('data'))
f.close()
octant_no = 0
for x in range(0, d.shape[0], int(d.shape[0]/2)):
x_end = x+int(d.shape[0]/2)
for y in range(0, d.shape[1], int(d.shape[1]/2)):
y_end = y+int(d.shape[1]/2)
for z in range(0, d.shape[2], int(d.shape[2]/2)):
z_end = z+int(d.shape[2]/2)
print("Saving octant " + str(octant_no))
f_h5 = h5py.File(os.path.join(new_VF_folder, "vf_ts"+filename+\
"_octant"+str(octant_no)+'.h5'), 'w')
a = d[x:x_end, y:y_end, z:z_end,:]
a = np.transpose(a, (3, 0, 1, 2))
f_h5.create_dataset("data", data=a)
f_h5.close()
octant_no += 1
'''
'''
for filename in os.listdir(mixing_folder):
file_loc = os.path.join(mixing_folder, filename)
f = h5py.File(file_loc, 'r+')
d = np.expand_dims(f.get('data')[:,:,:,0], axis=0).astype(np.float32)
f.close()
file_loc = os.path.join(new_mixing_folder, filename)
f = h5py.File(file_loc, 'w')
f.create_dataset("data", data=d)
f.close()
'''
'''
# This simply converts a vector field from some .h5 files
# to their magnitude fields, and also splits it into octants before
# saving.
FlowSTSR_folder_path = os.path.dirname(os.path.abspath(__file__))
location = os.path.join(FlowSTSR_folder_path, "InputData", "iso1024_VF")
save_location = os.path.join(FlowSTSR_folder_path, "InputData", "iso1024_magfield")
for filename in os.listdir(location):
print("Loading " + filename)
f = h5py.File(os.path.join(location, filename), 'r')
fname = filename.split(".")[0]
data = np.array(f['data'])
print("Loaded velocity field " + str(data.shape))
mag_field = np.linalg.norm(data, axis=0)
print("Converted to velocity magnitude " + str(mag_field.shape))
octant_no = 0
for x_start, x_end in [(0, int(mag_field.shape[0]/2)), (int(mag_field.shape[0]/2), mag_field.shape[0])]:
for y_start, y_end in [(0, int(mag_field.shape[1]/2)), (int(mag_field.shape[1]/2), mag_field.shape[1])]:
for z_start, z_end in [(0, int(mag_field.shape[2]/2)), (int(mag_field.shape[2]/2), mag_field.shape[2])]:
print("Saving octant " + str(octant_no))
f_h5 = h5py.File(os.path.join(save_location, "v_mag_ts"+fname+"_octant"+str(octant_no)+'.h5'), 'w')
f_h5.create_dataset("data", data=mag_field[x_start:x_end, y_start:y_end, z_start:z_end])
f_h5.close()
octant_no += 1
'''
'''
# Experiment to see if the distribution of downscaled frames that are
# downscaled with a method that doesn't follow downscale(x, S) =
# downscale(downscale(x, S/2), S/2).
# Compare distributions with PCA? T-SNE? Just mean and variance?
#
from download_JHUTDB import get_full_frame_parallel
frames = []
name = "isotropic1024"
startts = 1
endts = 1001
ts_skip = 10
ds = 32
ds_once_data = []
ds_many_data = []
ds_once_stats = []
ds_many_stats = []
for i in range(startts, endts, ts_skip):
print("TS " + str(i))
f = get_full_frame_parallel(0, 1024, 1,#x
0, 1024, 1, #y
512, 513, 1, #z
name, i,
"u", 3,
64)
f = f[:,:,0,:].astype(np.float32)
f_img = f.copy()
f_img[:,:,0] -= f_img[:,:,0].min()
f_img[:,:,0] *= (255.0/f_img[:,:,0].max())
f_img[:,:,1] -= f_img[:,:,1].min()
f_img[:,:,1] *= (255.0/f_img[:,:,1].max())
f_img[:,:,2] -= f_img[:,:,2].min()
f_img[:,:,2] *= (255.0/f_img[:,:,2].max())
f_img = f_img.astype(np.uint8)
imageio.imwrite("full_res.png", f_img)
f = f.swapaxes(0,2).swapaxes(1,2)
f = torch.from_numpy(f).unsqueeze(0)
f_downscaled_once = F.interpolate(f.clone(), mode="bilinear", align_corners=True, scale_factor=1/ds)
f_downscaled_many = f.clone()
curr_s = 1
while(curr_s < ds):
f_downscaled_many = F.interpolate(f_downscaled_many, mode="bilinear", align_corners=True, scale_factor=1/2)
curr_s *= 2
ds_once_data.append(f_downscaled_once.clone().view(1, -1).cpu().numpy())
ds_many_data.append(f_downscaled_many.clone().view(1, -1).cpu().numpy())
ds_once_stats.append(np.array([f_downscaled_once.min(), f_downscaled_once.max(), f_downscaled_once.mean(), f_downscaled_once.std()]))
ds_many_stats.append(np.array([f_downscaled_many.min(), f_downscaled_many.max(), f_downscaled_many.mean(), f_downscaled_many.std()]))
print("DS_once min/max: %0.03f/%0.03f, mean/std: %0.03f/%0.03f" % \
(f_downscaled_once.min(), f_downscaled_once.max(), f_downscaled_once.mean(), f_downscaled_once.std()))
print("DS_many min/max: %0.03f/%0.03f, mean/std: %0.03f/%0.03f" % \
(f_downscaled_many.min(), f_downscaled_many.max(), f_downscaled_many.mean(), f_downscaled_many.std()))
ds_once_img = f_downscaled_once[0].permute(1, 2, 0).cpu().numpy()
ds_once_img[:,:,0] -= ds_once_img[:,:,0].min()
ds_once_img[:,:,0] *= (255.0/ds_once_img[:,:,0].max())
ds_once_img[:,:,1] -= ds_once_img[:,:,1].min()
ds_once_img[:,:,1] *= (255.0/ds_once_img[:,:,1].max())
ds_once_img[:,:,2] -= ds_once_img[:,:,2].min()
ds_once_img[:,:,2] *= (255.0/ds_once_img[:,:,2].max())
ds_once_img = ds_once_img.astype(np.uint8)
imageio.imwrite("downscaled_once.png", ds_once_img)
ds_many_img = f_downscaled_many[0].permute(1, 2, 0).cpu().numpy()
ds_many_img[:,:,0] -= ds_many_img[:,:,0].min()
ds_many_img[:,:,0] *= (255.0/ds_many_img[:,:,0].max())
ds_many_img[:,:,1] -= ds_many_img[:,:,1].min()
ds_many_img[:,:,1] *= (255.0/ds_many_img[:,:,1].max())
ds_many_img[:,:,2] -= ds_many_img[:,:,2].min()
ds_many_img[:,:,2] *= (255.0/ds_many_img[:,:,2].max())
ds_many_img = ds_many_img.astype(np.uint8)
imageio.imwrite("downscaled_many.png", ds_many_img)
ds_once_stats = np.array(ds_once_stats)
ds_many_stats = np.array(ds_many_stats)
ds_once_data = np.concatenate(ds_once_data, axis=0)
ds_many_data = np.concatenate(ds_many_data, axis=0)
all_data = np.concatenate([ds_once_data, ds_many_data], axis=0)
from sklearn.decomposition import PCA
pca = PCA(n_components=2, svd_solver='full')
pca.fit(all_data)
all_data_transformed = pca.transform(all_data)
plt.scatter(all_data_transformed[:ds_once_data.shape[0],0], all_data_transformed[:ds_once_data.shape[0],1],
color='red', label='downscaled once', marker='x',alpha=0.5)
plt.scatter(all_data_transformed[ds_once_data.shape[0]:,0], all_data_transformed[ds_once_data.shape[0]:,1],
color='blue', label='downscaled many times', marker='o',alpha=0.5)
plt.legend()
plt.xlabel("PCA dimension 1")
plt.ylabel("PCA dimension 2")
plt.title("PCA decomposition of 2D "+str(ds)+"x downscaled fluid frames slices")
plt.show()
plt.clf()
plt.plot()
plt.plot(np.arange(0, ds_once_stats.shape[0]), ds_once_stats[:,0], marker='x', label='downscaled once minimum velocity component')
plt.plot(np.arange(0, ds_many_stats.shape[0]), ds_many_stats[:,0], marker='o', label='downscaled many minimum velocity component')
plt.plot(np.arange(0, ds_once_stats.shape[0]), ds_once_stats[:,1], marker='x', label='downscaled once maximum velocity component')
plt.plot(np.arange(0, ds_many_stats.shape[0]), ds_many_stats[:,1], marker='o', label='downscaled many maximum velocity component')
plt.plot(np.arange(0, ds_once_stats.shape[0]), ds_once_stats[:,2], marker='x', label='downscaled once mean velocity component')
plt.plot(np.arange(0, ds_many_stats.shape[0]), ds_many_stats[:,2], marker='o', label='downscaled many mean velocity component')
plt.plot(np.arange(0, ds_once_stats.shape[0]), ds_once_stats[:,3], marker='x', label='downscaled once std of velocity component')
plt.plot(np.arange(0, ds_many_stats.shape[0]), ds_many_stats[:,3], marker='o', label='downscaled many std of velocity component')
plt.legend()
plt.xlabel("Simulation timestep")
plt.ylabel("m/s")
plt.title("Min/max/mean/std of data downscaled by a factor of " + str(ds) + "x once or a factor of 2x " + str(int(np.log(ds)/np.log(2))) + " times")
plt.show()
'''
'''
# This experiment shows the seams between leaf nodes of a quadtree
# when they are upscaled separately
skip = 32
ds = 8
a = imageio.imread("./TestingData/quadtree_images/Lenna.jpg").astype(np.float32)
b = torch.tensor(a).cuda().permute(2, 0, 1).unsqueeze(0)
c = F.interpolate(b.clone()[:,:,::ds,::ds], mode="bilinear", scale_factor=ds, align_corners=True)
c = c[0].permute(1, 2, 0).cpu().numpy()
imageio.imwrite("Lenna_noseams.jpg", c)
a[::skip, :, :] = np.array([0, 0, 0])
a[:, ::skip, :] = np.array([0, 0, 0])
imageio.imwrite("Lenna_cutput.jpg", a)
for x in range(0, b.shape[2], skip):
for y in range(0, b.shape[3], skip):
b[:,:,x:x+skip,y:y+skip] = F.interpolate(b[:,:,x:x+skip:ds,y:y+skip:ds],
scale_factor=ds, mode="bilinear", align_corners=True)
b = b[0].permute(1, 2, 0).cpu().numpy()
imageio.imwrite("Lenna_seams.jpg", b)
'''
'''
# This experiment tests which downscaling methods follow have the property
# downscale(x, S) = downscale(downscale(x, S/2), S/2)
a = torch.randn([1, 1, 16, 16]).cuda()
b = a.clone()
a = F.interpolate(a, scale_factor=0.5, mode='bilinear', align_corners=True)
a = F.interpolate(a, scale_factor=0.5, mode='bilinear', align_corners=True)
b = F.interpolate(b, scale_factor=0.25, mode='bilinear', align_corners=True)
print("Bilinear interpolation difference: " +str((b-a).sum()))
a = torch.randn([1, 1, 16, 16]).cuda()
b = a.clone()
a = F.interpolate(a, scale_factor=0.5, mode='bicubic', align_corners=True)
a = F.interpolate(a, scale_factor=0.5, mode='bicubic', align_corners=True)
b = F.interpolate(b, scale_factor=0.25, mode='bicubic', align_corners=True)
print("Bicubic interpolation difference: " +str((b-a).sum()))
a = torch.randn([1, 1, 16, 16]).cuda()
b = a.clone()
a = AvgPool2D(a, 2)
a = AvgPool2D(a, 2)
b = AvgPool2D(b, 4)
print("Avgerage pooling difference: " +str((b-a).sum()))
a = torch.randn([1, 1, 16, 16]).cuda()
b = a.clone()
a = a[:,:,::2,::2]
a = a[:,:,::2,::2]
b = b[:,:,::4,::4]
print("Subsampling difference: " +str((b-a).sum()))
'''
'''
f = h5py.File('bigboy.h5', 'r')
data = torch.tensor(f['data']).type(torch.FloatTensor).cuda()
f.close()
data = data.unsqueeze(0)
data_mag = torch.linalg.norm(data,axis=1)[0]
data_mag /= data_mag.max()
image_out = torch.zeros(data_mag.shape).cuda()
image_out = image_out.unsqueeze(2)
image_out = image_out.repeat(1, 1, 3)
'''
#plt.hist(data_mag.cpu().numpy().flatten(), bins=25, density=True, cumulative=True)
#plt.show()
'''
# black white red
color_mapping_keys = [0.01, 0.3, 0.6]
color_mapping_values = [torch.from_numpy(np.array([0.0, 0.0, 0.0])).type(torch.FloatTensor).cuda(),
torch.from_numpy(np.array([200.0, 200.0, 200.0])).type(torch.FloatTensor).cuda(),
torch.from_numpy(np.array([200.0, 0.0, 0.0])).type(torch.FloatTensor).cuda()]
'''
'''
# rainbow R O Y G B I V R
color_mapping_keys = [0.0, 0.08, 0.16, 0.25, 0.3, 0.35, 0.4, 1.0]
color_mapping_values = [torch.from_numpy(np.array([128, 0.0, 0.0])).type(torch.FloatTensor).cuda(),
torch.from_numpy(np.array([255.0, 136.0, 0.0])).type(torch.FloatTensor).cuda(),
torch.from_numpy(np.array([255.0, 255.0, 0.0])).type(torch.FloatTensor).cuda(),
torch.from_numpy(np.array([0.0, 255.0, 0.0])).type(torch.FloatTensor).cuda(),
torch.from_numpy(np.array([0.0, 0.0, 255.0])).type(torch.FloatTensor).cuda(),
torch.from_numpy(np.array([0.0, 255.0, 200.0])).type(torch.FloatTensor).cuda(),
torch.from_numpy(np.array([128.0, 76.0, 128.0])).type(torch.FloatTensor).cuda(),
torch.from_numpy(np.array([128.0, 0.0, 0.0])).type(torch.FloatTensor).cuda()]
'''
'''
# another blk Y G B I V R
color_mapping_keys = [0.0, 0.21, 0.41, 0.6]
color_mapping_values = [torch.from_numpy(np.array([9.0, 171.0, 166.0])).type(torch.FloatTensor).cuda(),
torch.from_numpy(np.array([0.0, 0.0, 0.0])).type(torch.FloatTensor).cuda(),
torch.from_numpy(np.array([121.0, 9.0, 9.0])).type(torch.FloatTensor).cuda(),
torch.from_numpy(np.array([255.0, 255.0, 255.0])).type(torch.FloatTensor).cuda(),
]
image_out[data_mag < color_mapping_keys[0]] = color_mapping_values[0]
for i in range(len(color_mapping_keys)-1):
ratios = data_mag.clone()
ratios -= color_mapping_keys[i]
ratios *= 1 / (color_mapping_keys[i+1] - color_mapping_keys[i])
ratios = ratios.type(torch.FloatTensor).cuda()
cmap = (1-ratios).view(ratios.shape[0], ratios.shape[0], 1).repeat(1, 1, 3) \
* color_mapping_values[i].view(1, 1, 3).repeat(ratios.shape[0], ratios.shape[1], 1)
cmap += color_mapping_values[i+1].view(1, 1, 3).repeat(ratios.shape[0], ratios.shape[1], 1) \
* ratios.view(ratios.shape[0], ratios.shape[0], 1).repeat(1, 1, 3)
indices = torch.bitwise_and(data_mag >= color_mapping_keys[i],
data_mag < color_mapping_keys[i+1])
image_out[indices] = cmap[indices]
image_out[data_mag > color_mapping_keys[-1]] = color_mapping_values[-1]
img = image_out.cpu().numpy().astype(np.uint8)
imageio.imwrite("bigboy.jpg", img)
''' |
'''
Unit tests for tree_util.py.
'''
import unittest
import tree_util
class TreeUtilTest(unittest.TestCase):
def setUp(self):
self.root = tree_util.Node({'id':0, 'form':'improve'}, [])
self.root.add_child(tree_util.Node({'id':1, 'form':'economy'}))
self.root.add_child(tree_util.Node({'id':2, 'form':'to'}))
def test_find(self):
testcases = [('id', 2), ('form', 'economy'), ('pos', 1), ('form', 'one')]
self.assertEqual(self.root.find('id', 2)[0].label['id'], 2)
self.assertEqual(self.root.find('form', 'economy')[0].label['form'], 'economy')
self.assertEqual(len(self.root.find('pos', 1)), 0)
self.assertEqual(len(self.root.find('form', 'one')), 0)
if __name__ == '__main__':
unittest.main() |
"""
This project lets you practice NESTED LOOPS (i.e., loops within loops)
in the context of SEQUENCES OF SUB-SEQUENCES.
Authors: David Mutchler, Vibha Alangar, Matt Boutell, Dave Fisher,
Mark Hays, Amanda Stouder, Derek Whitley, their colleagues,
and PUT_YOUR_NAME_HERE.
""" # TODO: 1. PUT YOUR NAME IN THE ABOVE LINE.
import time
import testing_helper
def main():
""" Calls the other functions to test them. """
print()
print("Un-comment and re-comment calls in MAIN one by one as you work.")
# run_test_sum_numbers()
# run_test_multiply_by_c()
def run_test_sum_numbers():
""" Tests the sum_numbers function. """
# -------------------------------------------------------------------------
# TODO: 2. Implement this TEST function.
# It TESTS the sum_numbers function defined below.
# Include at least ** 4 ** tests (we wrote 3 for you).
# -------------------------------------------------------------------------
print()
print('--------------------------------------------------')
print('Testing the sum_numbers function:')
print('--------------------------------------------------')
format_string = ' sum_numbers( {} )'
test_results = [0, 0] # Number of tests passed, failed.
# Test 1:
expected = 38
print_expected_result_of_test([[(3, 1, 4), (10, 10), [1, 2, 3, 4]]],
expected, test_results, format_string)
actual = sum_numbers([(3, 1, 4),
(10, 10),
[1, 2, 3, 4]])
print_actual_result_of_test(expected, actual, test_results)
# Test 2:
expected = 5
print_expected_result_of_test([([], [5], [])],
expected, test_results, format_string)
actual = sum_numbers(([], [5], []))
print_actual_result_of_test(expected, actual, test_results)
# Test 3:
expected = 105
print_expected_result_of_test([[(5, 0, 4), (10,), (), (8, 3, 2, 10, 10),
(3, 5), (1, 2, 3, 4, 5, 6, 7, 8, 9)]],
expected, test_results, format_string)
actual = sum_numbers([(5, 0, 4),
(10,),
(),
(8, 3, 2, 10, 10),
(3, 5),
(1, 2, 3, 4, 5, 6, 7, 8, 9)])
print_actual_result_of_test(expected, actual, test_results)
# -------------------------------------------------------------------------
# TODO: 2 (continued): Add your ADDITIONAL test here:
# -------------------------------------------------------------------------
# SUMMARY of test results:
print_summary_of_test_results(test_results)
def sum_numbers(seq_seq):
"""
Returns the sum of the numbers in the given sequence
of subsequences. For example, if the given argument is:
[(3, 1, 4), (10, 10), [1, 2, 3, 4]]
then this function returns 38
(which is 3 + 1 + 4 + 10 + 10 + 1 + 2 + 3 + 4).
Preconditions: the given argument is a sequences of sequences,
and each item in the subsequences is a number.
"""
# -------------------------------------------------------------------------
# TODO: 3. Implement and test this function.
# Note that you should write its TEST function first (above).
# __
# NOTE: This is a classic SEQUENCE of SEQUENCES problem:
# -- Each loop is simply the pattern you have seen many times.
# -- But INSIDE the OUTER loop and BEFORE the INNER loop,
# you can 'extract' the current (OUTER loop) SUB-list
# to loop through it in the INNER loop.
# -- See m2r_nested_loops_in_sequences as needed.
# -------------------------------------------------------------------------
def run_test_multiply_by_c():
""" Tests the multiply_by_c function. """
# -------------------------------------------------------------------------
# We have supplied tests for you. No additional tests are required,
# although you are welcome to supply more tests if you choose.
# -------------------------------------------------------------------------
print()
print('------------------------------------------------------')
print('Testing the multiply_by_c function:')
print('------------------------------------------------------')
format_string = ' multiply_by_c( {}, {} )'
test_results = [0, 0] # Number of tests passed, failed.
# -------------------------------------------------------------------------
# Test 1: Tests whether the function MUTATES the sub-lists correctly.
seq_of_lists = ([10, 3, 101], [8, 0])
c = 3 # Each number in each sub-list should be multiplied by this
# After the function call, seq_of_lists should be as follows:
expected = ([30, 9, 303], [24, 0])
print_expected_result_of_test([c, seq_of_lists], expected,
test_results, format_string)
actual = multiply_by_c(c,
seq_of_lists)
print_actual_result_of_test(expected, seq_of_lists, test_results)
print('The above is for seq_of_lists (whose lists should be MUTATED.')
# Test 2: (a continuation of Test 1)
# Tests whether the function does not RETURN a value (i.e., returns None)
print_expected_result_of_test([c, seq_of_lists], None,
test_results, format_string)
print_actual_result_of_test(None, actual, test_results)
print('The above is for the RETURNED VALUE, which should be')
print('the constant None, NOT the STRING "None".')
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
# Test 3: Tests whether the function MUTATES the sub-lists correctly.
seq_of_lists = ([4, 2, 1], [8, 0], [1, 2, 3, 4, 5], [], [101])
c = 2 # Each number in each sub-list should be multiplied by this
# After the function call, seq_of_lists should be as follows:
expected = ([8, 4, 2], [16, 0], [2, 4, 6, 8, 10], [], [202])
print_expected_result_of_test([c, seq_of_lists], expected,
test_results, format_string)
actual = multiply_by_c(c,
seq_of_lists)
print_actual_result_of_test(expected, seq_of_lists, test_results)
print('The above is for seq_of_lists (whose lists should be MUTATED.')
# Test 4: (a continuation of Test 3)
# Tests whether the function does not RETURN a value (i.e., returns None)
print_expected_result_of_test([c, seq_of_lists], None,
test_results, format_string)
print_actual_result_of_test(None, actual, test_results)
print('The above is for the RETURNED VALUE, which should be')
print('the constant None, NOT the STRING "None".')
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
# Test 5: Tests whether the function MUTATES the sub-lists correctly.
seq_of_lists = [[], [1], [20, 2, 30, 4, 100, 8, 2, 2, 2], [], [300],
[5, 5], [], [-10, 4]]
c = 100 # Each number in each sub-list should be multiplied by this
# After the function call, seq_of_lists should be as follows:
expected = [[], [100], [2000, 200, 3000, 400, 10000, 800, 200, 200, 200],
[], [30000], [500, 500], [], [-1000, 400]]
print_expected_result_of_test([c, seq_of_lists], expected,
test_results, format_string)
actual = multiply_by_c(c,
seq_of_lists)
print_actual_result_of_test(expected, seq_of_lists, test_results)
print('The above is for seq_of_lists (whose lists should be MUTATED.')
# Test 6: (a continuation of Test 5)
# Tests whether the function does not RETURN a value (i.e., returns None)
print_expected_result_of_test([c, seq_of_lists], None,
test_results, format_string)
print_actual_result_of_test(None, actual, test_results)
print('The above is for the RETURNED VALUE, which should be')
print('the constant None, NOT the STRING "None".')
# -------------------------------------------------------------------------
# SUMMARY of test results:
print_summary_of_test_results(test_results)
def multiply_by_c(c, sequence_of_lists):
"""
What comes in:
-- a number c
-- a sequence of lists, with each item in the lists being a number
What goes out: Nothing (i.e. None).
Side effects: MUTATES the given lists by multiplying
each item in the lists by the given number c.
For example, consider the following code:
seq_of_lists = ([4, 2, 1], [8, 0], [1, 2, 3, 4, 5], [], [101])
v = multiply_by_c(2,
seq_of_lists)
After the above code runs,
v (the returned value) should be None
and seq_of_lists should be:
([8, 4, 2], [16, 0], [2, 4, 6, 8, 10], [], [202]).
Type hints:
:type: c: float
:type sequence_of_lists: sequence of lists of numbers
"""
# -------------------------------------------------------------------------
# TODO: 4. Implement and test this function.
# ** READ THE TESTS that have been written for you (ABOVE).
# ** ASK QUESTIONS if you do not understand the TESTS (ABOVE).
# -------------------------------------------------------------------------
###############################################################################
# Our tests use the following to print error messages in red.
# Do NOT change it. You do NOT have to do anything with it.
###############################################################################
def print_expected_result_of_test(arguments, expected,
test_results, format_string, suffix=''):
testing_helper.print_expected_result_of_test(arguments, expected,
test_results, format_string,
suffix)
def print_actual_result_of_test(expected, actual, test_results,
precision=None):
testing_helper.print_actual_result_of_test(expected, actual,
test_results, precision)
def print_summary_of_test_results(test_results):
testing_helper.print_summary_of_test_results(test_results)
# To allow color-coding the output to the console:
USE_COLORING = True # Change to False to revert to OLD style coloring
testing_helper.USE_COLORING = USE_COLORING
if USE_COLORING:
# noinspection PyShadowingBuiltins
print = testing_helper.print_colored
else:
# noinspection PyShadowingBuiltins
print = testing_helper.print_uncolored
# -----------------------------------------------------------------------------
# Calls main to start the ball rolling.
# The try .. except prevents error messages on the console from being
# intermingled with ordinary output to the console.
# -----------------------------------------------------------------------------
try:
main()
except Exception:
print('ERROR - While running this test,', color='red')
print('your code raised the following exception:', color='red')
print()
time.sleep(1)
raise
|
"""
Objects that represent -- and generate code for -- C/C++ Python extension modules.
Modules and Sub-modules
=======================
A L{Module} object takes care of generating the code for a Python
module. The way a Python module is organized is as follows. There is
one "root" L{Module} object. There can be any number of
L{SubModule}s. Sub-modules themselves can have additional sub-modules.
Calling L{Module.generate} on the root module will trigger code
generation for the whole module, not only functions and types, but
also all its sub-modules.
In Python, a sub-module will appear as a I{built-in} Python module
that is available as an attribute of its parent module. For instance,
a module I{foo} having a sub-module I{xpto} appears like this::
|>>> import foo
|>>> foo.xpto
|<module 'foo.xpto' (built-in)>
Modules and C++ namespaces
==========================
Modules can be associated with specific C++ namespaces. This means,
for instance, that any C++ class wrapped inside that module must
belong to that C++ namespace. Example::
|>>> from cppclass import *
|>>> mod = Module("foo", cpp_namespace="::foo")
|>>> mod.add_class("Bar")
|<pybindgen.CppClass 'foo::Bar'>
When we have a toplevel C++ namespace which contains another nested
namespace, we want to wrap the nested namespace as a Python
sub-module. The method L{ModuleBase.add_cpp_namespace} makes it easy
to create sub-modules for wrapping nested namespaces. For instance::
|>>> from cppclass import *
|>>> mod = Module("foo", cpp_namespace="::foo")
|>>> submod = mod.add_cpp_namespace('xpto')
|>>> submod.add_class("Bar")
|<pybindgen.CppClass 'foo::xpto::Bar'>
"""
from pybindgen.function import Function, OverloadedFunction, CustomFunctionWrapper
from pybindgen.typehandlers.base import CodeBlock, DeclarationsScope, ReturnValue, TypeHandler
from pybindgen.typehandlers.codesink import MemoryCodeSink, CodeSink, FileCodeSink, NullCodeSink
from pybindgen.cppclass import CppClass
from pybindgen.cppexception import CppException
from pybindgen.enum import Enum
from pybindgen.container import Container
from pybindgen.converter_functions import PythonToCConverter, CToPythonConverter
from pybindgen import utils
import warnings
import traceback
import collections
class MultiSectionFactory(object):
"""
Abstract base class for objects providing support for
multi-section code generation, i.e., splitting the generated C/C++
code into multiple files. The generated code will generally have
the following structure:
1. For each section there is one source file specific to that section;
2. There is a I{main} source file, e.g. C{foomodule.cc}. Code
that does not belong to any section will be included in this
main file;
3. Finally, there is a common header file, (e.g. foomodule.h),
which is included by the main file and section files alike.
Typically this header file contains function prototypes and
type definitions.
@see: L{Module.generate}
"""
def get_section_code_sink(self, section_name):
"""
Create and/or return a code sink for a given section.
:param section_name: name of the section
:return: a L{CodeSink} object that will receive generated code belonging to the section C{section_name}
"""
raise NotImplementedError
def get_main_code_sink(self):
"""
Create and/or return a code sink for the main file.
"""
raise NotImplementedError
def get_common_header_code_sink(self):
"""
Create and/or return a code sink for the common header.
"""
raise NotImplementedError
def get_common_header_include(self):
"""
Return the argument for an #include directive to include the common header.
:returns: a string with the header name, including surrounding
"" or <>. For example, '"foomodule.h"'.
"""
raise NotImplementedError
class _SinkManager(object):
"""
Internal abstract base class for bridging differences between
multi-file and single-file code generation.
"""
def get_code_sink_for_wrapper(self, wrapper):
"""
:param wrapper: wrapper object
:returns: (body_code_sink, header_code_sink)
"""
raise NotImplementedError
def get_includes_code_sink(self):
raise NotImplementedError
def get_main_code_sink(self):
raise NotImplementedError
def close(self):
raise NotImplementedError
class _MultiSectionSinkManager(_SinkManager):
"""
Sink manager that deals with multi-section code generation.
"""
def __init__(self, multi_section_factory):
super(_MultiSectionSinkManager, self).__init__()
self.multi_section_factory = multi_section_factory
utils.write_preamble(self.multi_section_factory.get_common_header_code_sink())
self.multi_section_factory.get_main_code_sink().writeln(
"#include %s" % self.multi_section_factory.get_common_header_include())
self._already_initialized_sections = {}
self._already_initialized_sections['__main__'] = True
def get_code_sink_for_wrapper(self, wrapper):
header_sink = self.multi_section_factory.get_common_header_code_sink()
section = getattr(wrapper, "section", None)
if section is None:
return self.multi_section_factory.get_main_code_sink(), header_sink
else:
section_sink = self.multi_section_factory.get_section_code_sink(section)
if section not in self._already_initialized_sections:
self._already_initialized_sections[section] = True
section_sink.writeln("#include %s" % self.multi_section_factory.get_common_header_include())
return section_sink, header_sink
def get_includes_code_sink(self):
return self.multi_section_factory.get_common_header_code_sink()
def get_main_code_sink(self):
return self.multi_section_factory.get_main_code_sink()
def close(self):
pass
class _MonolithicSinkManager(_SinkManager):
"""
Sink manager that deals with single-section monolithic code generation.
"""
def __init__(self, code_sink):
super(_MonolithicSinkManager, self).__init__()
self.final_code_sink = code_sink
self.null_sink = NullCodeSink()
self.includes = MemoryCodeSink()
self.code_sink = MemoryCodeSink()
utils.write_preamble(code_sink)
def get_code_sink_for_wrapper(self, dummy_wrapper):
return self.code_sink, self.code_sink
def get_includes_code_sink(self):
return self.includes
def get_main_code_sink(self):
return self.code_sink
def close(self):
self.includes.flush_to(self.final_code_sink)
self.code_sink.flush_to(self.final_code_sink)
class ModuleBase(dict):
"""
ModuleBase objects can be indexed dictionary style to access contained types. Example::
>>> from enum import Enum
>>> from cppclass import CppClass
>>> m = Module("foo", cpp_namespace="foo")
>>> subm = m.add_cpp_namespace("subm")
>>> c1 = m.add_class("Bar")
>>> c2 = subm.add_class("Zbr")
>>> e1 = m.add_enum("En1", ["XX"])
>>> e2 = subm.add_enum("En2", ["XX"])
>>> m["Bar"] is c1
True
>>> m["foo::Bar"] is c1
True
>>> m["En1"] is e1
True
>>> m["foo::En1"] is e1
True
>>> m["badname"]
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
KeyError: 'badname'
>>> m["foo::subm::Zbr"] is c2
True
>>> m["foo::subm::En2"] is e2
True
"""
def __init__(self, name, parent=None, docstring=None, cpp_namespace=None):
"""
Note: this is an abstract base class, see L{Module}
:param name: module name
:param parent: parent L{module<Module>} (i.e. the one that contains this submodule) or None if this is a root module
:param docstring: docstring to use for this module
:param cpp_namespace: C++ namespace prefix associated with this module
:return: a new module object
"""
super(ModuleBase, self).__init__()
self.parent = parent
self.docstring = docstring
self.submodules = []
self.enums = []
self.typedefs = [] # list of (wrapper, alias) tuples
self._forward_declarations_declared = False
self.cpp_namespace = cpp_namespace
if self.parent is None:
error_return = 'return MOD_ERROR;'
self.after_forward_declarations = MemoryCodeSink()
else:
self.after_forward_declarations = None
self.parent.submodules.append(self)
error_return = 'return NULL;'
self.prefix = None
self.init_function_name = None
self._name = None
self.name = name
path = self.get_namespace_path()
if path and path[0] == '::':
del path[0]
self.cpp_namespace_prefix = '::'.join(path)
self.declarations = DeclarationsScope()
self.functions = collections.OrderedDict() # name => OverloadedFunction
self.classes = []
self.containers = []
self.exceptions = []
self.before_init = CodeBlock(error_return, self.declarations)
self.after_init = CodeBlock(error_return, self.declarations,
predecessor=self.before_init)
self.c_function_name_transformer = None
self.set_strip_prefix(name + '_')
if parent is None:
self.header = MemoryCodeSink()
self.body = MemoryCodeSink()
self.one_time_definitions = {}
self.includes = []
else:
self.header = parent.header
self.body = parent.body
self.one_time_definitions = parent.one_time_definitions
self.includes = parent.includes
self._current_section = '__main__'
def get_current_section(self):
return self.get_root()._current_section
current_section = property(get_current_section)
def begin_section(self, section_name):
"""
Declare that types and functions registered with the module in
the future belong to the section given by that section_name
parameter, until a matching end_section() is called.
.. note::
:meth:`begin_section`/:meth:`end_section` are silently ignored
unless a :class:`MultiSectionFactory` object is used as code
generation output.
"""
if self.current_section != '__main__':
raise ValueError("begin_section called while current section not ended")
if section_name == '__main__':
raise ValueError ("__main__ not allowed as section name")
assert self.parent is None
self._current_section = section_name
def end_section(self, section_name):
"""
Declare the end of a section, i.e. further types and functions
will belong to the main module.
:param section_name: name of section; must match the one in
the previous :meth:`begin_section` call.
"""
assert self.parent is None
if self._current_section != section_name:
raise ValueError("end_section called for wrong section: expected %r, got %r"
% (self._current_section, section_name))
self._current_section = '__main__'
def get_name(self):
return self._name
def set_name(self, name):
self._name = name
if self.parent is None:
self.prefix = self.name.replace('.', '_')
self.init_function_name = "init%s" % (self.name.split('.')[-1],)
else:
self.prefix = self.parent.prefix + "_" + self.name
self.init_function_name = "init%s" % (self.prefix,)
name = property(get_name, set_name)
def get_submodule(self, submodule_name):
"get a submodule by its name"
for submodule in self.submodules:
if submodule.name == submodule_name:
return submodule
raise ValueError("submodule %s not found" % submodule_name)
def get_root(self):
":return: the root :class:`Module` (even if it is self)"
root = self
while root.parent is not None:
root = root.parent
return root
def set_strip_prefix(self, prefix):
"""Sets the prefix string to be used when transforming a C
function name into the python function name; the given prefix
string is removed from the C function name."""
def strip_prefix(c_name):
"""A C funtion name transformer that simply strips a
common prefix from the name"""
if c_name.startswith(prefix):
return c_name[len(prefix):]
else:
return c_name
self.c_function_name_transformer = strip_prefix
def set_c_function_name_transformer(self, transformer):
"""Sets the function to be used when transforming a C function
name into the python function name; the given given function
is called like this::
python_name = transformer(c_name)
"""
self.c_function_name_transformer = transformer
def add_include(self, include):
"""
Adds an additional include directive, needed to compile this python module
:param include: the name of the header file to include, including
surrounding "" or <>.
"""
include = utils.ascii(include)
assert include.startswith('"') or include.startswith('<')
assert include.endswith('"') or include.endswith('>')
if include not in self.includes:
self.includes.append(include)
def _add_function_obj(self, wrapper):
assert isinstance(wrapper, Function)
name = utils.ascii(wrapper.custom_name)
if name is None:
name = self.c_function_name_transformer(wrapper.function_name)
name = utils.get_mangled_name(name, wrapper.template_parameters)
try:
overload = self.functions[name]
except KeyError:
overload = OverloadedFunction(name)
self.functions[name] = overload
wrapper.module = self
wrapper.section = self.current_section
overload.add(wrapper)
def add_function(self, *args, **kwargs):
"""
Add a function to the module/namespace. See the documentation for
:meth:`Function.__init__` for information on accepted parameters.
"""
if len(args) >= 1 and isinstance(args[0], Function):
func = args[0]
warnings.warn("add_function has changed API; see the API documentation",
DeprecationWarning, stacklevel=2)
if len(args) == 2:
func.custom_name = args[1]
elif 'name' in kwargs:
assert len(args) == 1
func.custom_name = kwargs['name']
else:
assert len(args) == 1
assert len(kwargs) == 0
else:
try:
func = Function(*args, **kwargs)
except utils.SkipWrapper:
return None
self._add_function_obj(func)
return func
def add_custom_function_wrapper(self, *args, **kwargs):
"""
Add a function, using custom wrapper code, to the module/namespace. See the documentation for
:class:`pybindgen.function.CustomFunctionWrapper` for information on accepted parameters.
"""
try:
func = CustomFunctionWrapper(*args, **kwargs)
except utils.SkipWrapper:
return None
self._add_function_obj(func)
return func
def register_type(self, name, full_name, type_wrapper):
"""
Register a type wrapper with the module, for easy access in
the future. Normally should not be called by the programmer,
as it is meant for internal pybindgen use and called automatically.
:param name: type name without any C++ namespace prefix, or None
:param full_name: type name with a C++ namespace prefix, or None
:param type_wrapper: the wrapper object for the type (e.g. L{CppClass} or L{Enum})
"""
module = self
if name:
module[name] = type_wrapper
if full_name:
while module is not None:
module[full_name] = type_wrapper
module = module.parent
def _add_class_obj(self, class_):
"""
Add a class to the module.
:param class_: a CppClass object
"""
assert isinstance(class_, CppClass)
class_.module = self
class_.section = self.current_section
self.classes.append(class_)
self.register_type(class_.name, class_.full_name, class_)
def add_class(self, *args, **kwargs):
"""
Add a class to the module. See the documentation for
L{CppClass.__init__} for information on accepted parameters.
"""
if len(args) == 1 and len(kwargs) == 0 and isinstance(args[0], CppClass):
cls = args[0]
warnings.warn("add_class has changed API; see the API documentation",
DeprecationWarning, stacklevel=2)
else:
cls = CppClass(*args, **kwargs)
self._add_class_obj(cls)
return cls
def add_struct(self, *args, **kwargs):
"""
Add a struct to the module.
In addition to the parameters accepted by
L{CppClass.__init__}, this method accepts the following
keyword parameters:
- no_constructor (bool): if True, the structure will not
have a constructor by default (if omitted, it will be
considered to have a trivial constructor).
- no_copy (bool): if True, the structure will not
have a copy constructor by default (if omitted, it will be
considered to have a simple copy constructor).
"""
try:
no_constructor = kwargs['no_constructor']
except KeyError:
no_constructor = False
else:
del kwargs['no_constructor']
try:
no_copy = kwargs['no_copy']
except KeyError:
no_copy = False
else:
del kwargs['no_copy']
struct = CppClass(*args, **kwargs)
struct.stack_where_defined = traceback.extract_stack()
self._add_class_obj(struct)
if not no_constructor:
struct.add_constructor([])
if not no_copy:
struct.add_copy_constructor()
return struct
def add_cpp_namespace(self, name):
"""
Add a nested module namespace corresponding to a C++
namespace. If the requested namespace was already added, the
existing module is returned instead of creating a new one.
:param name: name of C++ namespace (just the last component,
not full scoped name); this also becomes the name of the
submodule.
:return: a L{SubModule} object that maps to this namespace.
"""
name = utils.ascii(name)
try:
return self.get_submodule(name)
except ValueError:
module = SubModule(name, parent=self, cpp_namespace=name)
module.stack_where_defined = traceback.extract_stack()
return module
def _add_enum_obj(self, enum):
"""
Add an enumeration.
"""
assert isinstance(enum, Enum)
self.enums.append(enum)
enum.module = self
enum.section = self.current_section
self.register_type(enum.name, enum.full_name, enum)
def add_enum(self, *args, **kwargs):
"""
Add an enumeration to the module. See the documentation for
L{Enum.__init__} for information on accepted parameters.
"""
if len(args) == 1 and len(kwargs) == 0 and isinstance(args[0], Enum):
enum = args[0]
warnings.warn("add_enum has changed API; see the API documentation",
DeprecationWarning, stacklevel=2)
else:
enum = Enum(*args, **kwargs)
enum.stack_where_defined = traceback.extract_stack()
self._add_enum_obj(enum)
return enum
def _add_container_obj(self, container):
"""
Add a container to the module.
:param container: a L{Container} object
"""
assert isinstance(container, Container)
container.module = self
container.section = self.current_section
self.containers.append(container)
self.register_type(container.name, container.full_name, container)
def add_container(self, *args, **kwargs):
"""
Add a container to the module. See the documentation for
L{Container.__init__} for information on accepted parameters.
"""
try:
container = Container(*args, **kwargs)
except utils.SkipWrapper:
return None
container.stack_where_defined = traceback.extract_stack()
self._add_container_obj(container)
return container
def _add_exception_obj(self, exc):
assert isinstance(exc, CppException)
exc.module = self
exc.section = self.current_section
self.exceptions.append(exc)
self.register_type(exc.name, exc.full_name, exc)
def add_exception(self, *args, **kwargs):
"""
Add a C++ exception to the module. See the documentation for
L{CppException.__init__} for information on accepted parameters.
"""
exc = CppException(*args, **kwargs)
self._add_exception_obj(exc)
return exc
def declare_one_time_definition(self, definition_name):
"""
Internal helper method for code geneneration to coordinate
generation of code that can only be defined once per compilation unit
(note: assuming here one-to-one mapping between 'module' and
'compilation unit').
:param definition_name: a string that uniquely identifies the code
definition that will be added. If the given definition was
already declared KeyError is raised.
>>> module = Module('foo')
>>> module.declare_one_time_definition("zbr")
>>> module.declare_one_time_definition("zbr")
Traceback (most recent call last):
...
KeyError: 'zbr'
>>> module.declare_one_time_definition("bar")
"""
definition_name = utils.ascii(definition_name)
if definition_name in self.one_time_definitions:
raise KeyError(definition_name)
self.one_time_definitions[definition_name] = None
def generate_forward_declarations(self, code_sink):
"""(internal) generate forward declarations for types"""
assert not self._forward_declarations_declared
if self.classes or self.containers or self.exceptions:
code_sink.writeln('/* --- forward declarations --- */')
code_sink.writeln()
for class_ in [c for c in self.classes if c.import_from_module]:
class_.generate_forward_declarations(code_sink, self)
for class_ in [c for c in self.classes if not c.import_from_module]:
class_.generate_forward_declarations(code_sink, self)
for container in self.containers:
container.generate_forward_declarations(code_sink, self)
for exc in self.exceptions:
exc.generate_forward_declarations(code_sink, self)
## recurse to submodules
for submodule in self.submodules:
submodule.generate_forward_declarations(code_sink)
self._forward_declarations_declared = True
def get_module_path(self):
"""Get the full [module, submodule, submodule,...] path """
names = [self.name]
parent = self.parent
while parent is not None:
names.insert(0, parent.name)
parent = parent.parent
return names
def get_namespace_path(self):
"""Get the full [root_namespace, namespace, namespace,...] path (C++)"""
if not self.cpp_namespace:
names = []
else:
if self.cpp_namespace == '::':
names = []
else:
names = self.cpp_namespace.split('::')
if not names[0]:
del names[0]
parent = self.parent
while parent is not None:
if parent.cpp_namespace and parent.cpp_namespace != '::':
parent_names = parent.cpp_namespace.split('::')
if not parent_names[0]:
del parent_names[0]
names = parent_names + names
parent = parent.parent
return names
def do_generate(self, out, module_file_base_name=None):
"""(internal) Generates the module."""
assert isinstance(out, _SinkManager)
if self.parent is None:
## generate the include directives (only the root module)
forward_declarations_sink = MemoryCodeSink()
if not self._forward_declarations_declared:
self.generate_forward_declarations(forward_declarations_sink)
self.after_forward_declarations.flush_to(forward_declarations_sink)
if self.parent is None:
for include in self.includes:
out.get_includes_code_sink().writeln("#include %s" % include)
self.includes = None
forward_declarations_sink.flush_to(out.get_includes_code_sink())
else:
assert module_file_base_name is None, "only root modules can generate with alternate module_file_base_name"
## generate the submodules
for submodule in self.submodules:
submodule.do_generate(out)
m = self.declarations.declare_variable('PyObject*', 'm')
assert m == 'm'
if module_file_base_name is None:
mod_init_name = '.'.join(self.get_module_path())
else:
mod_init_name = module_file_base_name
self.before_init.write_code('#if PY_VERSION_HEX >= 0x03000000')
self.before_init.write_code(
"m = PyModule_Create(&%s_moduledef);"
% (self.prefix))
self.before_init.write_code('#else')
self.before_init.write_code(
"m = Py_InitModule3((char *) \"%s\", %s_functions, %s);"
% (mod_init_name, self.prefix,
self.docstring and '"'+self.docstring+'"' or 'NULL'))
self.before_init.write_code('#endif')
self.before_init.write_error_check("m == NULL")
main_sink = out.get_main_code_sink()
## generate the function wrappers
py_method_defs = []
if self.functions:
main_sink.writeln('/* --- module functions --- */')
main_sink.writeln()
for func_name, overload in self.functions.items():
sink, header_sink = out.get_code_sink_for_wrapper(overload)
sink.writeln()
try:
utils.call_with_error_handling(overload.generate, (sink,), {}, overload)
except utils.SkipWrapper:
continue
try:
utils.call_with_error_handling(overload.generate_declaration, (main_sink,), {}, overload)
except utils.SkipWrapper:
continue
sink.writeln()
py_method_defs.append(overload.get_py_method_def(func_name))
del sink
## generate the function table
main_sink.writeln("static PyMethodDef %s_functions[] = {"
% (self.prefix,))
main_sink.indent()
for py_method_def in py_method_defs:
main_sink.writeln(py_method_def)
main_sink.writeln("{NULL, NULL, 0, NULL}")
main_sink.unindent()
main_sink.writeln("};")
## generate the classes
if self.classes:
main_sink.writeln('/* --- classes --- */')
main_sink.writeln()
for class_ in [c for c in self.classes if c.import_from_module]:
sink, header_sink = out.get_code_sink_for_wrapper(class_)
sink.writeln()
class_.generate(sink, self)
sink.writeln()
for class_ in [c for c in self.classes if not c.import_from_module]:
sink, header_sink = out.get_code_sink_for_wrapper(class_)
sink.writeln()
class_.generate(sink, self)
sink.writeln()
## generate the containers
if self.containers:
main_sink.writeln('/* --- containers --- */')
main_sink.writeln()
for container in self.containers:
sink, header_sink = out.get_code_sink_for_wrapper(container)
sink.writeln()
container.generate(sink, self)
sink.writeln()
## generate the exceptions
if self.exceptions:
main_sink.writeln('/* --- exceptions --- */')
main_sink.writeln()
for exc in self.exceptions:
sink, header_sink = out.get_code_sink_for_wrapper(exc)
sink.writeln()
exc.generate(sink, self)
sink.writeln()
# typedefs
for (wrapper, alias) in self.typedefs:
if isinstance(wrapper, CppClass):
cls = wrapper
cls.generate_typedef(self, alias)
## generate the enums
if self.enums:
main_sink.writeln('/* --- enumerations --- */')
main_sink.writeln()
for enum in self.enums:
sink, header_sink = out.get_code_sink_for_wrapper(enum)
sink.writeln()
enum.generate(sink)
enum.generate_declaration(header_sink, self)
sink.writeln()
## register the submodules
if self.submodules:
submodule_var = self.declarations.declare_variable('PyObject*', 'submodule')
for submodule in self.submodules:
self.after_init.write_code('%s = %s();' % (
submodule_var, submodule.init_function_name))
self.after_init.write_error_check('%s == NULL' % submodule_var)
self.after_init.write_code('Py_INCREF(%s);' % (submodule_var,))
self.after_init.write_code('PyModule_AddObject(m, (char *) "%s", %s);'
% (submodule.name, submodule_var,))
## flush the header section
self.header.flush_to(out.get_includes_code_sink())
## flush the body section
self.body.flush_to(main_sink)
## now generate the module init function itself
main_sink.writeln('#if PY_VERSION_HEX >= 0x03000000\n'
'static struct PyModuleDef %s_moduledef = {\n'
' PyModuleDef_HEAD_INIT,\n'
' "%s",\n'
' %s,\n'
' -1,\n'
' %s_functions,\n'
'};\n'
'#endif' % (self.prefix, mod_init_name,
self.docstring and '"'+self.docstring+'"' or 'NULL',
self.prefix))
main_sink.writeln()
if self.parent is None:
main_sink.writeln('''
#if PY_VERSION_HEX >= 0x03000000
#define MOD_ERROR NULL
#define MOD_INIT(name) PyObject* PyInit_##name(void)
#define MOD_RETURN(val) val
#else
#define MOD_ERROR
#define MOD_INIT(name) void init##name(void)
#define MOD_RETURN(val)
#endif
#if defined(__cplusplus)
extern "C"
#endif
#if defined(__GNUC__) && __GNUC__ >= 4
__attribute__ ((visibility("default")))
#endif
''')
else:
main_sink.writeln("static PyObject *")
if self.parent is None:
main_sink.writeln("MOD_INIT(%s)" % (self.name,))
elif module_file_base_name is None:
main_sink.writeln("%s(void)" % (self.init_function_name,))
else:
main_sink.writeln("init%s(void)" % (module_file_base_name,))
main_sink.writeln('{')
main_sink.indent()
self.declarations.get_code_sink().flush_to(main_sink)
self.before_init.sink.flush_to(main_sink)
self.after_init.write_cleanup()
self.after_init.sink.flush_to(main_sink)
if self.parent is not None:
main_sink.writeln("return m;")
else:
main_sink.writeln("return MOD_RETURN(m);")
main_sink.unindent()
main_sink.writeln('}')
def __repr__(self):
return "<pybindgen.module.Module %r>" % self.name
def add_typedef(self, wrapper, alias):
"""
Declares an equivalent to a typedef in C::
typedef Foo Bar;
:param wrapper: the wrapper object to alias (Foo in the example)
:param alias: name of the typedef alias
@note: only typedefs for CppClass objects have been
implemented so far; others will be implemented in the future.
"""
assert isinstance(wrapper, CppClass)
alias = utils.ascii(alias)
self.typedefs.append((wrapper, alias))
self.register_type(alias, alias, wrapper)
wrapper.register_alias(alias)
full_name = '::'.join(self.get_namespace_path() + [alias])
wrapper.register_alias(full_name)
class Module(ModuleBase):
def __init__(self, name, docstring=None, cpp_namespace=None):
"""
:param name: module name
:param docstring: docstring to use for this module
:param cpp_namespace: C++ namespace prefix associated with this module
"""
super(Module, self).__init__(name, docstring=docstring, cpp_namespace=cpp_namespace)
def generate(self, out, module_file_base_name=None):
"""Generates the module
:type out: a file object, L{FileCodeSink}, or L{MultiSectionFactory}
:param module_file_base_name: base name of the module file.
This is useful when we want to produce a _foo module that will
be imported into a foo module, to avoid making all types
docstrings contain _foo.Xpto instead of foo.Xpto.
"""
if hasattr(out, 'write'):
out = FileCodeSink(out)
if isinstance(out, CodeSink):
sink_manager = _MonolithicSinkManager(out)
elif isinstance(out, MultiSectionFactory):
sink_manager = _MultiSectionSinkManager(out)
else:
raise TypeError
self.do_generate(sink_manager, module_file_base_name)
sink_manager.close()
def get_python_to_c_type_converter_function_name(self, value_type):
"""
Internal API, do not use.
"""
assert isinstance(value_type, TypeHandler)
ctype = value_type.ctype
mangled_ctype = utils.mangle_name(str(ctype))
converter_function_name = "_wrap_convert_py2c__%s" % mangled_ctype
return converter_function_name
def generate_python_to_c_type_converter(self, value_type, code_sink):
"""
Generates a python-to-c converter function for a given type
and returns the name of the generated function. If called
multiple times with the same name only the first time is the
converter function generated.
Use: this method is to be considered pybindgen internal, used
by code generation modules.
:type value_type: L{ReturnValue}
:type code_sink: L{CodeSink}
:returns: name of the converter function
"""
assert isinstance(value_type, TypeHandler)
converter_function_name = self.get_python_to_c_type_converter_function_name(value_type)
try:
self.declare_one_time_definition(converter_function_name)
except KeyError:
return converter_function_name
else:
converter = PythonToCConverter(value_type, converter_function_name)
self.header.writeln("\n%s;\n" % converter.get_prototype())
code_sink.writeln()
converter.generate(code_sink, converter_function_name)
code_sink.writeln()
return converter_function_name
def get_c_to_python_type_converter_function_name(self, value_type):
"""
Internal API, do not use.
"""
assert isinstance(value_type, TypeHandler)
ctype = value_type.ctype
mangled_ctype = utils.mangle_name(str(ctype))
converter_function_name = "_wrap_convert_c2py__%s" % mangled_ctype
return converter_function_name
def generate_c_to_python_type_converter(self, value_type, code_sink):
"""
Generates a c-to-python converter function for a given type
and returns the name of the generated function. If called
multiple times with the same name only the first time is the
converter function generated.
Use: this method is to be considered pybindgen internal, used
by code generation modules.
:type value_type: L{ReturnValue}
:type code_sink: L{CodeSink}
:returns: name of the converter function
"""
assert isinstance(value_type, TypeHandler)
converter_function_name = self.get_c_to_python_type_converter_function_name(value_type)
try:
self.declare_one_time_definition(converter_function_name)
except KeyError:
return converter_function_name
else:
converter = CToPythonConverter(value_type, converter_function_name)
self.header.writeln("\n%s;\n" % converter.get_prototype())
code_sink.writeln()
converter.generate(code_sink)
code_sink.writeln()
return converter_function_name
class SubModule(ModuleBase):
def __init__(self, name, parent, docstring=None, cpp_namespace=None):
"""
:param parent: parent L{module<Module>} (i.e. the one that contains this submodule)
:param name: name of the submodule
:param docstring: docstring to use for this module
:param cpp_namespace: C++ namespace component associated with this module
"""
super(SubModule, self).__init__(name, parent, docstring=docstring, cpp_namespace=cpp_namespace)
|
import sys
import click
import logging
import os
from datetime import datetime
from trimbot_modules import Configuration, Session, Recipe, V3Api, ResourceServiceFactory, CheckAction, NoCheckAction
def configure_logging(trace):
if trace:
logFormatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s (%(name)s)")
else:
logFormatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s")
today = datetime.now()
timestamp = today.strftime("%Y%m%d%H%M%S")
if not os.path.exists('./logs'):
os.makedirs('./logs')
fileHandler = logging.FileHandler(f"./logs/trimbot_{timestamp}.log")
fileHandler.setFormatter(logFormatter)
consoleHandler = logging.StreamHandler(sys.stdout)
consoleHandler.setFormatter(logFormatter)
logging.getLogger().setLevel(logging.INFO)
logging.getLogger().addHandler(fileHandler)
logging.getLogger().addHandler(consoleHandler)
def create_child_session(profile, workspace):
workspace_profile = workspace.get_profile()
if workspace_profile:
profile = workspace_profile
child_session = Session(profile, workspace.get_role_arn(), workspace.get_external_id())
caller_account_id = child_session.get_connected_account_id()
if workspace.get_account() != caller_account_id:
raise RuntimeError(
f'Connected account id {caller_account_id} differs from turbot expected account id {caller_account_id}')
return child_session
def create_v3_api(configuration, workspace):
configuration_host = configuration.get_turbot_host()
configuration_access_key = configuration.get_turbot_access_key()
configuration_secret_access_key = configuration.get_turbot_secret_access_key()
configuration_verify_ssl = configuration.get_verify_ssl()
workspace_host = workspace.get_turbot_host()
workspace_access_key = workspace.get_turbot_access_key()
workspace_secret_access_key = workspace.get_turbot_secret_access_key()
workspace_verify_ssl = workspace.get_verify_ssl()
host = workspace_host if workspace_host else configuration_host
access_key = workspace_access_key if workspace_access_key else configuration_access_key
secret_access_key = workspace_secret_access_key if workspace_secret_access_key else configuration_secret_access_key
verify_ssl = workspace_verify_ssl if workspace_verify_ssl != None else configuration_verify_ssl
if not host or not access_key or not secret_access_key:
return None
return V3Api(
host,
access_key,
secret_access_key,
verify_ssl
)
def load_recipe(configuration, workspace):
configuration_recipe = configuration.get_recipe()
workspace_recipe = workspace.get_recipe()
recipe_location = workspace_recipe if workspace_recipe else configuration_recipe
recipe = Recipe(recipe_location)
recipe.load()
return recipe
def resolve_profile(configuration, workspace):
configuration_profile = configuration.get_profile()
workspace_profile = workspace.get_profile()
return workspace_profile if workspace_profile else configuration_profile
@ click.command()
@ click.option('-f', '--config-file', type=click.File('r'), required=True, help='/path/to/a/configuration/file.yml')
@ click.option('-a', '--approve', is_flag=True, default=False, help='If set, destructive changes will be applied')
@ click.option('-t', '--trace', is_flag=True, default=False, help='Adds more detailed logging')
@ click.option('-c', '--check', is_flag=True, default=False, help='Runs action check only')
def cli(config_file, approve, trace, check):
try:
dry_run = not approve
configure_logging(trace)
logging.info(f'Started TrimBot')
action = CheckAction() if check else NoCheckAction(dry_run)
configuration = Configuration(config_file)
for workspace in configuration.workspaces:
try:
# Note, these two values may be empty
turbot_account_id = workspace.get_turbot_account()
turbot_cluster_id = workspace.get_turbot_cluster()
if turbot_account_id and turbot_cluster_id:
logging.info(f"Processing account {turbot_account_id} for cluster {turbot_cluster_id}")
else:
logging.info(f"Processing account {workspace.get_account()}")
profile = resolve_profile(configuration, workspace)
v3_api = create_v3_api(configuration, workspace)
child_session = create_child_session(profile, workspace)
master_session = Session(profile)
factory = ResourceServiceFactory(
master_session,
child_session,
v3_api,
turbot_account_id,
turbot_cluster_id
)
recipe = load_recipe(configuration, workspace)
for recipe_resource in recipe.resources:
service = factory.create_resource_service(recipe_resource)
if not action.should_process(recipe_resource["actions"]):
continue
if service.is_global_service():
logging.info(
f"Processing global resource named '{service.get_user_defined_name()}' for service {service.get_service_name()} and resource {service.get_resource_name()}")
action.run_action(service, child_session.get_default_region(), recipe_resource["actions"])
logging.info(
f"Completed - Processing global resource named '{service.get_user_defined_name()}' for service {service.get_service_name()} and resource {service.get_resource_name()}")
else:
logging.info(
f"Processing resource named '{service.get_user_defined_name()}' for service {service.get_service_name()} and resource {service.get_resource_name()}")
regions = child_session.get_regions()
for region in regions:
logging.info(f"Processing region {region}")
action.run_action(service, region, recipe_resource["actions"])
logging.info(
f"Completed - Processing resource named '{service.get_user_defined_name()}' for service {service.get_service_name()} and resource {service.get_resource_name()}")
if turbot_account_id and turbot_cluster_id:
logging.info(f"Completed - Processing account {turbot_account_id} for cluster {turbot_cluster_id}")
else:
logging.info(f"Completed - Processing account {workspace.get_account()}")
except Exception as e:
logging.error(f'Ignoring workspace for account {workspace.get_account()}')
logging.error(e)
logging.info(f'TrimBot completed')
except Exception as e:
logging.error(f'Unexpected exception:')
logging.error(e)
if __name__ == "__main__":
cli()
|
from django.db import models
class ProgrammingLanguage(models.Model):
name = models.CharField(max_length=200, null=False, unique=True)
def __str__(self):
return self.name
@property
def link(self):
return f'/?language={self.name}&rate=all#search-section-form'
@staticmethod
def get_other_default_language():
return ProgrammingLanguage.objects.get_or_create(name='Other')[0]
COMPLEXITY_LEVEL = [
(1, 'Very Easy'),
(2, 'Easy'),
(3, 'Medium'),
(4, 'Hard'),
(5, 'Very Hard'),
]
class IssueRate(models.Model):
rate = models.IntegerField(choices=COMPLEXITY_LEVEL)
|
#This is code includes the logistic regression algorithm for the classification of the japanese credit dataset.
#goto http://ataspinar.com for a detailed explanation of the math behind logistic regression
#goto https://github.com/taspinar/siml for the full code
#It was used during hackathon4 of the Eindhoven Data Science group: https://www.meetup.com/Eindhoven-Data-Science-Meetup/events/234115346/
import pandas as pd
from sets import Set
import random
import numpy as np
datafile = './japanese_credit.data'
df = pd.read_csv(datafile, header=None)
column_values = list(df.columns.values)
categorical_columns = [0,3,4,5,6,8,9,11,12]
str_cols = [0,1,3,4,5,6,8,9,11,12,13]
int_columns = [10,13,14]
float_columns = [1,2,7]
#first we select only the rows which do not contain any invalid values
for col in str_cols:
df = df[df[col] != '?']
#columns containing categorical values are expanded to k different columns with binary values (k is number of categories)
for col in categorical_columns:
col_values = list(Set(df[col].values))
for col_value in col_values:
if col_value != '?':
df.loc[df[col] == col_value, str(col)+'_is_'+col_value] = 1
#remove original columns
for col in categorical_columns:
del df[col]
#rename the column with the label to 'label' and make it integer
df.loc[df[15] == '+', 'label'] = 1
del df[15]
#normalize the columns with integer values by the mean value
for col in int_columns:
df[col] = df[col].apply(int)
col_values = list(df[col].values)
mean = np.mean(map(int,col_values))
df[col] = df[col].apply(lambda x: x/float(mean))
#normalize the columns with float values by the mean value
for col in float_columns:
df[col] = df[col].apply(float)
col_values = list(df[col].values)
mean = np.mean(map(float,col_values))
df[col] = df[col].apply(lambda x: x/mean)
df = df.fillna(0)
#create a training and a test set
indices = df.index.values
random.shuffle(indices)
no_training_examples = int(0.7*len(indices))
df_training = df.ix[indices[:no_training_examples]]
df_test = df.ix[indices[no_training_examples:]]
#create and fill the Y matrices of the training and test set
Y = df_training['label'].values
Y_test = df_test['label'].values
del df_training['label']
del df_test['label']
#create the X matrices of the training and test set and initialize with zero
no_features = len(df_training.columns.values)
no_test_examples = len(df_test.index.values)
X = np.zeros(shape=(no_training_examples, no_features))
X_test = np.zeros(shape=(no_test_examples,no_features))
#fill the X matrices
col_values = df_training.columns.values
for ii in range(0,len(col_values)):
col = col_values[ii]
X[:,ii] = df_training[col].values
X_test[:,ii] = df_test[col].values
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import numpy as np
import os.path
import random
import navpy
import simplekml
sys.path.append('../lib')
import Pose
import ProjectMgr
import SRTM
import transformations
# for all the images in the project image_dir, compute the camera
# poses from the aircraft pose (and camera mounting transform).
# Project the image plane onto an SRTM (DEM) surface for our best
# layout guess (at this point before we do any matching/bundle
# adjustment work.)
parser = argparse.ArgumentParser(description='Set the initial camera poses.')
parser.add_argument('--project', required=True, help='project directory')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
ref = proj.ned_reference_lla
# setup SRTM ground interpolator
sss = SRTM.NEDGround( ref, 2000, 2000, 30 )
# start a new kml file
kml = simplekml.Kml()
camw, camh = proj.cam.get_image_params()
for image in proj.image_list:
print image.name
scale = float(image.width) / float(camw)
K = proj.cam.get_K(scale)
IK = np.linalg.inv(K)
corner_list = []
corner_list.append( [0, image.height] )
corner_list.append( [image.width, image.height] )
corner_list.append( [image.width, 0] )
corner_list.append( [0, 0] )
proj_list = proj.projectVectors( IK, image, corner_list )
print "proj_list:\n", proj_list
#pts = proj.intersectVectorsWithGroundPlane(image.camera_pose['ned'],
# g, proj_list)
pts = sss.interpolate_vectors(image.camera_pose, proj_list)
#print "pts (ned):\n", pts
corners_lonlat = []
for ned in pts:
print ned
lla = navpy.ned2lla([ned], ref[0], ref[1], ref[2])
corners_lonlat.append([lla[1], lla[0]])
ground = kml.newgroundoverlay(name=image.name)
ground.icon.href = "Images/" + image.name
ground.gxlatlonquad.coords.addcoordinates(corners_lonlat)
filename = args.project + "/GroundOverlay.kml"
kml.save(filename)
|
from pynput.keyboard import Key, Controller, Listener
import socket
UDP_IP_ADDRESS = "127.0.0.1"
UDP_PORT_NO = 6150
SOCK = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def sendjump(sock):
sock.sendto(("JUMP!").encode(), (UDP_IP_ADDRESS, UDP_PORT_NO) )
print("Jump Action Triggered!")
def on_press(key):
if key == Key.up:
sendjump(SOCK)
print('{0} pressed'.format(
key))
def on_release(key):
print('{0} release'.format(
key))
if key == Key.esc:
# Stop listener
return False
# Collect events until released
with Listener(
on_press=on_press,
on_release=on_release) as listener:
listener.join() |
from shared_foundation.models.user import SharedUser
from shared_foundation.models.abstract_thing import AbstractSharedThing
from shared_foundation.models.abstract_contact_point import AbstractSharedContactPoint
from shared_foundation.models.abstract_postal_address import AbstractSharedPostalAddress
from shared_foundation.models.abstract_geo_coorindate import AbstractSharedGeoCoordinate
from shared_foundation.models.opening_hours_specification import SharedOpeningHoursSpecification
from shared_foundation.models.academy import SharedAcademy
from shared_foundation.models.academy import SharedAcademyDomain
|
from __future__ import absolute_import
from __future__ import division
from multiprocessing import cpu_count, Pool
import time, signal
import numpy as np
from .decision_tree import DecisionTree
from .util import iterate_with_progress
#################################
# Multi-process funcs & klasses #
#################################
class KeyboardInterruptError(Exception): pass
def train_tree(args):
try:
tree, data, labels = args
tree.train(data, labels)
return tree
except KeyboardInterrupt:
raise KeyboardInterruptError()
def prune_tree(args):
try:
tree, data, labels = args
tree.prune(data, labels)
return tree
except KeyboardInterrupt:
raise KeyboardInterruptError()
class RandomForest:
def __init__(self, impurity, segmentor, **kwargs):
self._impurity = impurity
self._segmentor = segmentor
self._num_trees = kwargs.get('num_trees', 10)
assert self._num_trees > 0
self._max_depth = kwargs.get('max_depth', None)
self._min_samples = kwargs.get('min_samples', 2)
self._trees = []
def train(self, data, labels):
self._klasses = np.unique(labels)
print 'Prepare parallel training.'
args_list = []
for _ in iterate_with_progress(xrange(self._num_trees)):
sampled_data, sampled_labels = self._sample_data_labels(data, labels)
tree = DecisionTree(self._impurity,
self._segmentor,
max_depth=self._max_depth,
min_samples=self._min_samples)
args_list.append([tree, sampled_data, sampled_labels])
num_processes = cpu_count()
print 'Train in parallel with {0} processes.'.format(num_processes)
pool = Pool(num_processes)
try:
start = time.time()
self._trees = pool.map(train_tree, args_list)
print 'Training takes {0} seconds.'.format(int(time.time() - start))
pool.close()
except KeyboardInterrupt:
pool.terminate()
except Exception, e:
pool.terminate()
finally:
pool.join()
def predict(self, data):
if not self._trees:
raise StandardError("Random forest has not been trained.")
def draw_votes(probs):
avg_probs = {}
for klass in self._klasses:
total_prob = sum([prob.get(klass, 0.0) for prob in probs])
avg_probs[klass] = total_prob / self._num_trees
return max(avg_probs, key=lambda klass : avg_probs[klass])
tree_results = np.array([tree.predict(data, True) for tree in self._trees])
return np.apply_along_axis(draw_votes, 0, tree_results)
def score(self, data, labels):
if not self._trees:
raise StandardError("Random forest has not been trained.")
predictions = self.predict(data)
correct_count = np.count_nonzero(predictions == labels)
return correct_count / labels.shape[0]
def prune(self, data, labels):
args_list = []
for tree in self._trees:
args_list.append([tree, data, labels])
num_processes = cpu_count()
print 'Prune in parallel with {0} processes.'.format(num_processes)
pool = Pool(num_processes)
try:
start = time.time()
self._trees = pool.map(prune_tree, args_list)
print 'Pruning takes {0} seconds.'.format(int(time.time() - start))
pool.close()
return self.score(data, labels)
except KeyboardInterrupt:
pool.terminate()
except Exception, e:
pool.terminate()
finally:
pool.join()
def _sample_data_labels(self, data, labels):
num_data = len(data)
assert num_data == len(labels)
data_indices = np.random.choice(num_data, num_data)
sampled_data = data[data_indices,:]
sampled_labels = labels[data_indices]
return sampled_data, sampled_labels
|
from jet_bridge import fields
class SqlParamsSerializers(fields.CharField):
def to_internal_value_item(self, value):
value = super(SqlParamsSerializers, self).to_internal_value_item(value)
if value is None:
return []
# value = list(filter(lambda x: x != '', value.split(',')))
value = value.split(',')
return dict([['param_{}'.format(i), x] for i, x in enumerate(value)])
def to_representation_item(self, value):
return list(value)
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext as _
from django.db import models
from filebrowser.fields import FileBrowseField
from const.purpose import *
from crm import models as crmmodels
class XSLFile(models.Model):
title = models.CharField(verbose_name = _("Title"), max_length=100, blank=True, null=True)
xslfile = FileBrowseField(verbose_name=_("XSL File"), max_length=200)
class Meta:
app_label = "djangoUserExtension"
#app_label_koalix = _('Djang User Extention')
verbose_name = _('XSL File')
verbose_name_plural = _('XSL Files')
def __unicode__(self):
return str(self.id) + ' ' + self.title
class UserExtension(models.Model):
user = models.ForeignKey('auth.User')
defaultTemplateSet = models.ForeignKey('TemplateSet')
defaultCurrency = models.ForeignKey('crm.Currency')
class Meta:
app_label = "djangoUserExtension"
#app_label_koalix = _('Djang User Extention')
verbose_name = _('User Extention')
verbose_name_plural = _('User Extentions')
def __unicode__(self):
return str(self.id) + ' ' + self.user.__unicode__()
class TemplateSet(models.Model):
organisationname = models.CharField(verbose_name = _("Name of the Organisation"), max_length=200)
title = models.CharField(verbose_name = _("Title"), max_length=100)
invoiceXSLFile = models.ForeignKey(XSLFile, verbose_name=_("XSL File for Invoice"), related_name="db_reltemplateinvoice")
quoteXSLFile = models.ForeignKey(XSLFile, verbose_name=_("XSL File for Quote"), related_name="db_reltemplatequote")
purchaseorderXSLFile = models.ForeignKey(XSLFile, verbose_name=_("XSL File for Purchaseorder"), related_name="db_reltemplatepurchaseorder")
purchaseconfirmationXSLFile = models.ForeignKey(XSLFile, verbose_name=_("XSL File for Purchase Confirmation"), related_name="db_reltemplatepurchaseconfirmation")
deilveryorderXSLFile = models.ForeignKey(XSLFile, verbose_name=_("XSL File for Deilvery Order"), related_name="db_reltemplatedeliveryorder")
profitLossStatementXSLFile = models.ForeignKey(XSLFile, verbose_name=_("XSL File for Profit Loss Statement"), related_name="db_reltemplateprofitlossstatement")
balancesheetXSLFile = models.ForeignKey(XSLFile, verbose_name=_("XSL File for Balancesheet"), related_name="db_reltemplatebalancesheet")
logo = FileBrowseField(verbose_name=_("Logo for the PDF generation"), blank=True, null=True, max_length=200)
bankingaccountref = models.CharField(max_length=60, verbose_name=_("Reference to Banking Account"), blank=True, null=True)
addresser = models.CharField(max_length=200, verbose_name=_("Addresser"), blank=True, null=True)
fopConfigurationFile = FileBrowseField(verbose_name=_("FOP Configuration File"), blank=True, null=True, max_length=200)
footerTextsalesorders = models.TextField(verbose_name=_("Footer Text On Salesorders"), blank=True, null=True)
headerTextsalesorders = models.TextField(verbose_name=_("Header Text On Salesorders"), blank=True, null=True)
headerTextpurchaseorders = models.TextField(verbose_name=_("Header Text On Purchaseorders"), blank=True, null=True)
footerTextpurchaseorders = models.TextField(verbose_name=_("Footer Text On Purchaseorders"), blank=True, null=True)
pagefooterleft = models.CharField(max_length=40, verbose_name=_("Page Footer Left"), blank=True, null=True)
pagefootermiddle = models.CharField(max_length=40, verbose_name=_("Page Footer Middle"), blank=True, null=True)
class Meta:
app_label = "djangoUserExtension"
#app_label_koalix = _('Djang User Extention')
verbose_name = _('Templateset')
verbose_name_plural = _('Templatesets')
def __unicode__(self):
return str(self.id) + ' ' + self.title
class UserExtensionPostalAddress(crmmodels.PostalAddress):
purpose = models.CharField(verbose_name=_("Purpose"), max_length=1, choices=PURPOSESADDRESSINUSEREXTENTION)
userExtension = models.ForeignKey(UserExtension)
def __unicode__(self):
return self.name + ' ' + self.prename
class Meta:
app_label = "djangoUserExtension"
#app_label_koalix = _('Djang User Extention')
verbose_name = _('Postal Address for User Extention')
verbose_name_plural = _('Postal Address for User Extention')
class UserExtensionPhoneAddress(crmmodels.PhoneAddress):
purpose = models.CharField(verbose_name=_("Purpose"), max_length=1, choices=PURPOSESADDRESSINUSEREXTENTION)
userExtension = models.ForeignKey(UserExtension)
def __unicode__(self):
return self.phone
class Meta:
app_label = "djangoUserExtension"
#app_label_koalix = _('Djang User Extention')
verbose_name = _('Phonenumber for User Extention')
verbose_name_plural = _('Phonenumber for User Extention')
class UserExtensionEmailAddress(crmmodels.EmailAddress):
purpose = models.CharField(verbose_name=_("Purpose"), max_length=1, choices=PURPOSESADDRESSINUSEREXTENTION)
userExtension = models.ForeignKey(UserExtension)
def __unicode__(self):
return self.email
class Meta:
app_label = "djangoUserExtension"
#app_label_koalix = _('Djang User Extention')
verbose_name = _('Email Address for User Extention')
verbose_name_plural = _('Email Address for User Extention')
|
from collections import namedtuple
import hashlib
import logging
import mimetypes
import os
import subprocess32 as subprocess
import time
from dropbox.client import DropboxClient
from dropbox.rest import ErrorResponse
from app import analytics
from app import celery
from app import db
from app import emailer
from app import filesystem
from app import redis
from app.models import User, Book
log = logging.getLogger()
# Lock expires in 30 minutes, in case there are lots of epubs to convert.
LOCK_EXPIRE = 60 * 30
# And can only email 25 books at a time. Sendgrid only allows 20MB at a time,
# after encoding to email text, so more like 15. Mailgun is about 25MB? And
# can only email 25 books at a time.
# Lower ATTACHMENTS_LIMIT to prevent users from hogging the celery workers.
ATTACHMENTS_LIMIT = 5
CONVERTIBLE_ATTACHMENTS_LIMIT = 1
ATTACHMENTS_SIZE_LIMIT = 25 * (10**6)
AMAZON_SIZE_LIMIT = 50 * (10**6)
# Try to send a file this many times before giving up. Sending a file means
# successful Dropbox download, file conversion, and correct response from
# SendGrid or Mailgun.
MAX_SEND_ATTEMPTS = 10
# Number of seconds to wait before timing out calibre conversion
CONVERSION_TIMEOUT = 1200
################################
# Book mimetypes
################################
# Amazon doesn't support these formats, but BookDrop does!
EPUB_MIMETYPE = 'application/epub+zip'
CBR_MIMETYPE = 'application/x-cbr'
CBZ_MIMETYPE = 'application/x-cbz'
AZW_MIMETYPE = 'application/vnd.amazon.ebook' # not a real mimetype, but we need to recognize it.
CONVERTIBLE_MIMETYPES = {EPUB_MIMETYPE,
CBR_MIMETYPE,
CBZ_MIMETYPE,
AZW_MIMETYPE,
}
MOBI_MIMETYPE = 'application/x-mobipocket-ebook'
# Supported filetypes.
# According to:
# http://www.amazon.com/gp/help/customer/display.html?nodeId=200375630
BOOK_MIMETYPES = CONVERTIBLE_MIMETYPES.union({
MOBI_MIMETYPE,
'text/plain',
'application/pdf',
'application/msword',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'application/rtf',
'text/html',
'image/jpeg',
'image/gif',
'image/x-ms-bmp',
'image/png',
})
mimetypes.add_type(MOBI_MIMETYPE, '.mobi')
mimetypes.add_type(MOBI_MIMETYPE, '.prc')
mimetypes.add_type(AZW_MIMETYPE, '.azw')
mimetypes.add_type(AZW_MIMETYPE, '.azw1')
mimetypes.add_type(AZW_MIMETYPE, '.azw3')
mimetypes.add_type(EPUB_MIMETYPE, '.epub')
@celery.task(ignore_result=True)
def upload_welcome_pdf(dropbox_id):
user = User.query.filter_by(dropbox_id=dropbox_id,
active=True).first()
if user is None:
return False
# If we've already sent the welcome PDF before, Dropbox webhook went
# trigger, so do it here.
if user.uploaded_welcome_pdf:
return kindlebox(dropbox_id)
analytics.track(str(user.id), 'Sent welcome pdf')
client = DropboxClient(user.access_token)
try:
with open('app/static/bookdrop_welcome.pdf', 'rb') as f:
response = client.put_file('Welcome to BookDrop.pdf', f, overwrite=True)
if response:
log.info(u"Welcome PDF sent to user ID {0}.".format(user.id))
else:
raise Exception("No response received after sending welcome PDF")
user.set_uploaded_welcome_pdf()
db.session.commit()
except:
log.error((u"Welcome PDF failed for user ID "
"{0}.").format(user.id), exc_info=True)
return False
return True
def _kindlebox(user, client):
"""
The main body of a `kindlebox` task. Processes a single Dropbox delta for
the given user. Adds and deletes any books from the database, and updates
the user's Dropbox API cursor.
"""
try:
delta = client.delta(user.cursor)
except ErrorResponse as e:
log.info(u"Marking user id {0} inactive due to {1}".format(user.id, e.error_msg))
user.active = False
db.session.commit()
return True
# Process delta to get added and removed books. Also download any newly
# added books and get the hashes.
# NOTE: It's possible that the book failed to download here, in which case
# each book in `added_books` has `book_hash` None. We still add it to the
# database in case it can be downloaded later.
added_books = get_added_books(delta['entries'], user.id, client)
removed_books = get_removed_books(delta['entries'], user.id)
log.debug(u"Delta contains {0} added books, {1} removed "
"books".format(len(added_books), len(removed_books)))
# If there are no more changes to process, update the cursor and we are
# done.
if len(added_books) == 0 and len(removed_books) == 0:
user.cursor = delta['cursor']
db.session.commit()
return True
# Add and delete books from the database.
for book in added_books:
db.session.add(book)
for book in removed_books:
db.session.delete(book)
# Update the Dropbox delta cursor in database.
user.cursor = delta['cursor']
db.session.merge(user)
db.session.commit()
return False
@celery.task(ignore_result=True)
def kindlebox(dropbox_id):
"""
Task that continually processes any Dropbox changes for the user associated
with the given dropbox ID until there are no more changes. Any books
removed from Dropbox are also deleted from the database. The first
`ATTACHMENTS_LIMIT` books out of the books added to Dropbox are sent. The
rest of the books are queued.
"""
# Only process Dropbox changes for active users.
user = User.query.filter_by(dropbox_id=dropbox_id, active=True).first()
if user is None:
return
kindlebox_lock = acquire_kindlebox_lock(user.id)
# Another worker is taking care of it, so I'm done.
if kindlebox_lock is None:
log.debug(u"Unable to acquire kindlebox lock for user id "
"{0}".format(user.id))
return
log.info(u"Processing dropbox webhook for user id {0}".format(user.id))
# Loop until there is no delta.
# NOTE: There is a slight chance of a race condition between dropbox
# webhook and two celery workers that would result in a delta getting
# dropped, but hopefully this is better than cluttering the task queues.
client = DropboxClient(user.access_token)
try:
while True:
log.debug(u"Processing one kindlebox iteration for user id "
"{0}".format(user.id))
done = _kindlebox(user, client)
if done:
break
except:
log.error((u"Failed to process dropbox webhook for user id "
"{0}.").format(user.id), exc_info=True)
kindlebox_lock.release()
clear_user_files(user.id, u'kindlebox')
if user.active:
send_books(user.id)
def _send_books(user, books):
"""
Helper function for the `send_books` celery task. Download, if necessary,
and email all the given user's books. Mark each book as `unsent` or not in
the database.
"""
client = DropboxClient(user.access_token)
email_from = user.emailer
email_to = [row.kindle_name for row in user.kindle_names.all()]
attachments = []
attachment_size = 0
for book in books:
# If there's an error downloading or converting the book, don't try
# to send it.
download_book(client, book, u'send_books')
if book.book_hash is None:
continue
error = convert_book(book)
if error:
log.error(u"Failed to ebook-convert {book} for user id {user_id}\n"
"STDERR: {stderr}\n".format(book=book.pathname,
user_id=user.id,
stderr=error))
continue
# If the next book added will put us over the attachment size limit,
# send this batch.
# NOTE: An individual book with size over the limit will still get sent
# using this code. We want to do this in case it actually is possible
# to send the file (who knows what sendgrid's limits are?).
if (attachment_size + book.get_size() > ATTACHMENTS_SIZE_LIMIT and
len(attachments) > 0):
email_attachments(email_from, email_to, attachments, user.id)
attachments = []
attachment_size = 0
attachments.append(book)
attachment_size += book.get_size()
if len(attachments) > 0:
email_attachments(email_from, email_to, attachments, user.id)
@celery.task(ignore_result=True)
def send_books(user_id, min_book_id=0, convert=False):
"""
Task to send any books associated with the given user ID that are marked as
`unsent`. Sends a batch of at most `ATTACHMENTS_LIMIT` books, all with
Book.id greater than or equal to the given `min_book_id`. Download books.
Convert books if `convert` is True.
The task queues another `send_books` task for the next batch of (distinct)
books.
"""
send_lock = acquire_send_books_lock(user_id)
if send_lock is None:
return
# Only resend books for active users.
user = User.query.filter_by(id=user_id, active=True).first()
if user is None:
return
# Get the next batch of books that haven't been sent yet and are still
# under the maximum number of send attempts.
unsent_books_query = (user.books.filter_by(unsent=True)
.filter(Book.num_attempts < MAX_SEND_ATTEMPTS)
.order_by(Book.id))
unsent_books = unsent_books_query.filter(Book.id >= min_book_id).all()
# Only short-circuit if there are no new books at all to send, not just
# ones that don't need conversion.
if len(unsent_books) == 0 and min_book_id == 0:
send_lock.release()
clear_user_files(user.id, u'send_books')
return
# Send either books that need conversion or books that don't.
compatible_books, convertible_books = [], []
for book in unsent_books:
if convert_to_mobi_path(book.pathname) is None:
compatible_books.append(book)
else:
convertible_books.append(book)
if convert:
unsent_books = convertible_books[:CONVERTIBLE_ATTACHMENTS_LIMIT]
else:
unsent_books = compatible_books[:ATTACHMENTS_LIMIT]
log_string = ['{' + str(i) + '}' for i in range(len(unsent_books))]
if len(unsent_books) > 0:
log_string = ' '.join(log_string).format(*[book.id for book in unsent_books])
if convert:
log_string += ', with conversion'
log.info(u"Processing book resend for user id {0}, book ids {1}".format(user_id, log_string))
# Re-download and convert books that failed to send before.
try:
_send_books(user, unsent_books)
# TODO: Reset all attempts to 0 before release.
for book in unsent_books:
book.num_attempts += 1
db.session.commit()
except:
log.error(u"Failed to resend books for user id {0}".format(user_id),
exc_info=True)
next_unsent_book = None
if len(unsent_books) > 0:
# If there are any more books to send after this batch, requeue them.
next_unsent_book = unsent_books_query.filter(Book.id > unsent_books[-1].id).first()
send_lock.release()
# For some reason, calibre is leaving a lot of garbage files...
filesystem.clear_calibre_files()
clear_user_files(user.id, u'send_books')
if next_unsent_book is None and not convert:
send_books.apply_async((user_id, ),
{'convert': True},
queue='conversion')
elif next_unsent_book is not None:
queue_kwarg = {}
if convert:
queue_kwarg['queue'] = 'conversion'
send_books.apply_async((user_id, ),
{
'min_book_id': next_unsent_book.id,
'convert': convert,
},
**queue_kwarg)
def get_added_books(delta_entries, user_id, client):
"""
Return a list of Books. All books in this list have the correct mimetype,
are under the size limit, and don't have a duplicate hash in the database
(i.e. not a filepath rename).
"""
added_entries = []
for entry in delta_entries:
pathname, metadata = entry
pathname = canonicalize(pathname)
# First check that it's not a removed pathname.
if metadata is None:
continue
# Check that pathname is a file, has an okay mimetype and is under the
# size limit.
if (metadata['is_dir'] or not mimetypes_filter(pathname) or
metadata['bytes'] > AMAZON_SIZE_LIMIT):
continue
book = Book(user_id,
pathname,
metadata['bytes'])
download_book(client, book, u'kindlebox')
# Make sure that the book is not a duplicate of a previously added book
# (probably a renamed file).
duplicate = (Book.query.filter_by(user_id=user_id)
.filter_by(book_hash=book.book_hash).first())
if (duplicate is not None):
book.unsent = duplicate.unsent
added_entries.append(book)
return added_entries
def get_removed_books(delta_entries, user_id):
"""
Return a list of Books whose paths were deleted during this delta.
"""
removed_entries = [canonicalize(entry[0]) for entry in delta_entries if
entry[1] is None]
if len(removed_entries) > 0:
return (Book.query.filter_by(user_id=user_id)
.filter(Book.pathname.in_(removed_entries)).all())
else:
return []
def convert_book(book):
"""
Attempt to convert any books of type in `CONVERTIBLE_MIMETYPES` to .mobi,
in the same folder as the given temporary path.
"""
tmp_path = book.get_tmp_pathname(u'send_books')
mobi_tmp_path = convert_to_mobi_path(tmp_path)
if mobi_tmp_path is None:
return None
log.info(u"Converting book for user id {0}".format(book.user_id))
try:
subprocess.check_output(['ebook-convert', tmp_path, mobi_tmp_path],
timeout=CONVERSION_TIMEOUT)
except subprocess.CalledProcessError as e:
return e.output
except subprocess.TimeoutExpired as e:
return "Timed out converting book"
except Exception as e:
return e.message
def download_book(client, book, tag):
"""
Download the given book from the Dropbox client to a temporary path. Make
all the directories in the given book path at the temporary root folder if
they don't already exist.
Set the book's hash of the downloaded file.
"""
# Make all the necessary nested directories in the temporary directory.
tmp_path = book.get_tmp_pathname(tag)
try:
book_dir = os.path.dirname(tmp_path)
if not os.path.exists(book_dir):
os.makedirs(book_dir)
except OSError:
log.error(u"Error creating directories for book {0}".format(book.pathname),
exc_info=True)
try:
md5 = hashlib.md5()
with open(tmp_path, 'w') as tmp_book:
with client.get_file(book.pathname) as book_file:
data = book_file.read()
tmp_book.write(data)
md5.update(data)
book.book_hash = md5.hexdigest()
except:
log.error(u"Failed to download book {book_path} for user id "
"{user_id}".format(book_path=book.pathname,
user_id=book.user_id), exc_info=True)
return None
def email_attachments(email_from, email_to, attachments, user_id):
"""
Given a 'from' email address and a list of 'to' email addresses, try to
email as many of the attachments in the given list as possible. For each
attachment, add the book to the user associated with the given ID and mark
whether it was successfully emailed or not.
"""
attachment_paths = []
for book in attachments:
tmp_path = book.get_tmp_pathname(u'send_books')
# If this book got converted, get the .mobi path instead.
mobi_tmp_path = convert_to_mobi_path(tmp_path)
if mobi_tmp_path is not None:
tmp_path = mobi_tmp_path
attachment_paths.append(tmp_path)
log.debug(u"Sending email to " + ' '.join(email_to) + " " + ' '.join(attachment_paths))
try:
# First try to batch email.
_email_attachments(email_from, email_to, attachment_paths)
for book in attachments:
book.mark_unsent(False)
except:
log.error(u"Failed to send books for user id {0}".format(user_id),
exc_info=True)
# If fail to batch email, try sending individually instead.
for book, path in zip(attachments, attachment_paths):
try:
_email_attachments(email_from, email_to, [path])
book.mark_unsent(False)
except:
log.error(u"Failed to resend book for user id {0}".format(user_id),
exc_info=True)
book.mark_unsent(True)
def _email_attachments(email_from, email_to, attachment_paths):
status, message = emailer.send_mail(email_from, email_to,
attachment_paths)
if status != 200:
raise KindleboxException(message)
def convert_to_mobi_path(path):
if mimetypes.guess_type(path)[0] in CONVERTIBLE_MIMETYPES:
stripped_path = os.path.splitext(path)[0]
return u'{path}.mobi'.format(path=stripped_path)
def canonicalize(pathname):
return pathname.lower()
def mimetypes_filter(path):
return mimetypes.guess_type(path)[0] in BOOK_MIMETYPES
def _acquire_lock(method_name, user_id):
# Lock per user.
lock_id = '{0}-lock-{1}'.format(method_name, user_id)
lock = redis.lock(lock_id, timeout=LOCK_EXPIRE)
# If non-blocking and unable to acquire lock, discard the task and hope
# that another worker finishes it.
if not lock.acquire(blocking=False):
log.debug(u"Couldn't acquire lock {0}.".format(lock_id))
return None
log.debug(u"Lock {0} acquired.".format(lock_id))
return lock
def acquire_kindlebox_lock(user_id):
"""
"""
return _acquire_lock(kindlebox.__name__,
user_id)
def acquire_send_books_lock(user_id):
"""
"""
return _acquire_lock(send_books.__name__,
user_id)
def clear_user_files(user_id, task):
"""
Clears as many temporary files as possible for the given `user_id` and
celery `task`. If `task` is not one of 'kindlebox' or 'send_books', does
nothing.
"""
if task == u'kindlebox':
acquire_method = acquire_send_books_lock
elif task == u'send_books':
acquire_method = acquire_kindlebox_lock
else:
return
task_directory = filesystem.get_user_directory(user_id, task)
filesystem.clear_directory(task_directory)
# May be downloading books to send, so don't clear the upper-level
# directory yet.
lock = acquire_method(user_id)
if lock is not None:
user_directory = filesystem.get_user_directory(user_id)
filesystem.clear_empty_directory(user_directory)
lock.release()
class KindleboxException(Exception):
pass
|
from .base import *
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'testsecretkey'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['*']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
|
#!/bin/python3
"""
Parameters:
id: The Bugzilla bug ID to send the attachment to
image: The image to use (defaults to quay.io/kubevirt/must-gather)
api-key: Use a generated API key instead of a username and login
log-folder: Use a specific folder for storing the output of must-gather
Requirements:
Openshift 4.1+
Python 3.6+
A Bugzilla account for www.bugzilla.redhat.com
This script attaches the result of the must-gather command, as executed
by the kubevirt must-gather image, to the supplied bugzilla id on the
Red Hat bugzilla website.
It first creates an output subdirectory in the working directory named
gather-files/ and then executes the following command:
'oc adm must-gather --image=quay.io/kubevirt/must-gather
--dest-dir=gather-files/' and pipes the output to
gather-files/must-gather.log
In order to meet the maximum attachment file sizes, logs are trimmed to the
last n seconds (defaulting to 30 minutes)
It then creates a time-stamped archive file to compress the attachment
and prepare it for upload.
After doing so, the attachment is encoded as a base64 string.
In order to authenticate against the Bugzilla website, a username and
password are prompted. A POST request is then sent to the Bugzilla
website. If there are any errors (invalid ID or invalid login), the
script prompts for those and retries the request until there are no
errors.
"""
import argparse
import os
import shutil
import itertools
import re
import subprocess
import tarfile
import datetime
import base64
from getpass import getpass
import requests
NUM_SECONDS = 30 * 60 # 30 minutes
BUGZILLA_URL = "https://bugzilla.redhat.com"
HEADERS = {'Content-type': 'application/json'}
LOGFOLDER = "gather-files/"
OUTPUT_FILE = "must-gather.log"
ARCHIVE_NAME = "must-gather"
MAX_ARCHIVE_SIZE = 19.5 * 1024 * 1024 #19.5 MB as bytes
IMAGE = "quay.io/kubevirt/must-gather"
NODELOG_TIMESTAMP_REGEX = re.compile(r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) \d+ \d+:\d+:\d+")
NODELOG_TIMESTAMP_FORMAT = "%b %d %H:%M:%S"
PODLOG_TIMESTAMP_REGEX = re.compile(r"^\d{4}-\d{2}-\d{2}T\d+:\d+:\d+")
PODLOG_TIMESTAMP_FORMAT = "%Y-%m-%dT%H:%M:%S"
_current_time = datetime.datetime.utcnow()
def main():
"""Main function"""
# Start with getting command-line argument(s)
parser = argparse.ArgumentParser(description="Sends the result of must-gather to Bugzilla.")
parser.add_argument("ID", metavar="id", type=int,
help="The ID of the bug in Bugzilla")
parser.add_argument("--image", metavar="image", action="append",
help="The image to use for must-gather. If none supplied, defaults to quay.io/kubevirt/must-gather")
parser.add_argument("--image-stream", metavar="image-stream", action="append",
help="Specify an image stream to pass to must-gather")
parser.add_argument("--api-key", metavar="api-key",
help="Bugzilla API key. Can also be set using BUGZILLA_API_KEY environment variable")
parser.add_argument("-t", "--time", type=int, help="Number of minutes to use for trimming the log files. Defaults to 30")
parser.add_argument("--log-folder", metavar="log-folder",
help="Optional destination for the must-gather output (defaults to creating gather-files/ in the local directory)")
parser.add_argument("-r", "--reuse-must-gather", action="store_true",
help="Use this to skip rerunning must-gather and just attach what is already gathered")
parser.add_argument("-i", "--interactive", action="store_true",
help="Use this flag to prompt for a username and password. Using this will prompt for retries if the login is unsuccessful")
args = parser.parse_args()
bug_id = args.ID
if not check_bug_exists(bug_id):
print("Bug not found in Bugzilla")
exit(1)
if args.time:
num_seconds = args.time * 60
else:
num_seconds = NUM_SECONDS
# If an image or an image stream is supplied, use that, if not, use the default
if args.image:
images = args.image
else:
if args.image_stream == None:
images = [IMAGE]
else:
images = []
# If a folder is supplied, use that, otherwise use the default in the local folder
if args.log_folder:
logfolder = args.log_folder
else:
logfolder = LOGFOLDER
api_key = os.environ.get('BUGZILLA_API_KEY', "")
if args.api_key:
api_key = args.api_key
# If there is no API key provided, prompt for a login
use_api_key = api_key != None and api_key != ""
if not use_api_key:
if args.interactive:
bugzilla_username = input("Enter Bugzilla username: ")
bugzilla_password = getpass(prompt="Enter Bugzilla password: ")
else:
print("No API key supplied and not in interactive mode.")
exit(1)
if not args.reuse_must_gather:
run_must_gather(images, logfolder, args.image_stream)
else:
print("Using must-gather results located in %s." % logfolder)
#Trim the log folders to the number of seconds
trim_logs(logfolder, num_seconds)
# Create a time-stamped archive name
archive_name = ARCHIVE_NAME + "-%s.tar.gz" % _current_time.strftime("%Y-%m-%d_%H:%M:%SZ")
# Add all the files in the log folder to a new archive file, except for the hidden ones
with tarfile.open(archive_name, "w:gz") as tar:
print("Creating archive: " + archive_name)
tar.add(logfolder,
filter=filter_hidden)
# Now that the archive is created, move the files back in place of the trimmed versions
restore_hidden_files(logfolder)
if os.path.getsize(archive_name) > MAX_ARCHIVE_SIZE:
print("Archive %s is too large to upload (exceeds %d MB)." % (archive_name, MAX_ARCHIVE_SIZE / (1024*1024)))
exit()
print("Preparing to send the data to " + BUGZILLA_URL)
file_data = ""
with open(archive_name, "rb") as data_file:
file_data = base64.b64encode(data_file.read()).decode()
comment = generate_comment(num_seconds)
# Send the data to the target URL (depending on whether using API key or not)
if use_api_key:
authentication = {"api_key": api_key}
else:
authentication = {"username": bugzilla_username, "password:": bugzilla_password}
resp = send_data(bug_id, archive_name, file_data, comment, authentication)
resp_json = resp.json()
# Handle the potential errors
while "error" in resp_json:
# Using an api key will disable retries, so just output the error message
if use_api_key:
print(resp_json["message"])
exit(1)
# 300: invalid username or password
if resp_json["code"] == 300:
print("Incorrect username or password.")
bugzilla_username = input("Username (leave blank to exit): ")
if bugzilla_username == "":
print("Username left blank, exiting")
exit(0)
bugzilla_password = getpass(prompt="Password: ")
authentication = {"username": bugzilla_username, "password:": bugzilla_password}
resp = send_data(bug_id, archive_name, file_data, comment, authentication)
resp_json = resp.json()
# 101: Invalid bug id
elif resp_json["code"] == 101:
print("Invalid bug id")
new_bug_id = input("Enter a new bug id (leave blank to exit): ")
if new_bug_id == "":
print("ID left blank, exiting")
exit(0)
bug_id, valid = try_parse_int(new_bug_id)
# Try and see if the new supplied ID is a positive integer
while not valid or bug_id <= 0:
print("Could not parse bug id as valid, try again")
new_bug_id = input("Enter a new bug id (leave blank to exit): ")
if new_bug_id == "":
print("ID left blank, exiting")
exit(0)
bug_id, valid = try_parse_int(new_bug_id)
resp = send_data(bug_id, archive_name, file_data, comment, authentication)
resp_json = resp.json()
else:
print("Error: " + resp_json["message"])
exit(1)
print("File successfully uploaded to Bugzilla")
def run_must_gather(images, logfolder, image_streams):
# If the log folder already exists, delete it
if os.path.isdir(logfolder):
shutil.rmtree(logfolder)
# Make a new log folder
os.mkdir(logfolder)
image_args = []
if images is not None:
for image in images:
image_args.append("--image=" + image)
if image_streams is not None:
for image_stream in image_streams:
image_args.append("--image-stream=" + image_stream)
# Open the output file
with open(logfolder + OUTPUT_FILE, "w+") as out_file:
# Run oc adm must-gather with the appropriate image and dest-dir
print("Running must-gather")
try:
subprocess.run(
["oc", "adm", "must-gather",
"--dest-dir=" + logfolder] + image_args,
stdout=out_file, stderr=subprocess.PIPE, check=True)
except subprocess.CalledProcessError as e:
print("Error in the execution of must-gather: ")
print(e.stderr.decode("utf-8"))
exit(1)
def trim_logs(logfolder, num_seconds):
global _deadline
_deadline = _current_time - datetime.timedelta(seconds = num_seconds)
for subdir, _, files in os.walk(logfolder):
for file in files:
if file == "must-gather.log": #Ignore the log made by capturing the output of must-gather
continue
full_path = os.path.join(subdir, file)
if ".log" in file:
trim_from_back(full_path, pod_condition(full_path))
#trim_file_by_time(os.path.join(subdir, file), num_seconds, PODLOG_TIMESTAMP_REGEX, PODLOG_TIMESTAMP_FORMAT)
elif "kubelet" in file or "NetworkManager" in file:
trim_from_back(full_path, node_condition(full_path))
#trim_file_by_time(os.path.join(subdir, file), num_seconds, NODELOG_TIMESTAMP_REGEX, NODELOG_TIMESTAMP_FORMAT)
def try_parse_int(value):
"""Tries to parse the value as an int"""
try:
return int(value), True
except ValueError:
return value, False
def send_data(bug_id, file_name, file_data, comment, authentication):
"""Sends the data to Bugzilla with the relevant information"""
url = BUGZILLA_URL + '/rest/bug/%s/attachment' % bug_id
data = {
**authentication,
"ids": [bug_id],
"comment": comment,
"summary": "Result from must-gather command",
"content_type": "application/gzip",
"file_name": file_name,
"data": file_data
}
return requests.post(url, json=data, headers=HEADERS)
"""Enum for the possible outputs of the line condition functions"""
LINE_LATER = 1
LINE_EARLIER = 0
LINE_NO_TIMESTAMP = -1
"""Regex and format for reading the header of a node log"""
HEADER_REGEX = re.compile(r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} \w{3}")
HEADER_FORMAT = "%Y-%m-%d %H:%M:%S %Z"
def node_condition(filename):
"""Returns a line condition function based on the timestamp in the header"""
with open(filename, "r") as file:
header_timestamps = HEADER_REGEX.findall(file.readline())
log_end = datetime.datetime.strptime(header_timestamps[1], HEADER_FORMAT)
def check_line(line):
#Empty string means end of file, otherwise it would be '\n'
if line == '':
return LINE_LATER
regex_result = NODELOG_TIMESTAMP_REGEX.search(line)
if regex_result:
timestamp = datetime.datetime.strptime(regex_result.group(0), NODELOG_TIMESTAMP_FORMAT)
#Since there's no year provided, default it to the log end's year
timestamp = timestamp.replace(year=log_end.year)
#If that made the timestamp greater than the log end, it means it was from a previous year, so set it to be the year before the log end
if timestamp > log_end:
timestamp = timestamp.replace(year=log_end.year - 1)
# Check whether the line is earlier or later than the deadline
if timestamp < _deadline:
return LINE_EARLIER
else:
return LINE_LATER
else:
return LINE_NO_TIMESTAMP
return check_line
def pod_condition(filename):
"""Returns a condition function for checking the lines of a pod log"""
def check_line(line):
#Empty string means end of file, otherwise it would be '\n'
if line == '':
return LINE_LATER
regex_result = PODLOG_TIMESTAMP_REGEX.search(line)
if regex_result:
timestamp = datetime.datetime.strptime(regex_result.group(0), PODLOG_TIMESTAMP_FORMAT)
if timestamp < _deadline:
return LINE_EARLIER
else:
return LINE_LATER
else:
return LINE_NO_TIMESTAMP
return check_line
"""Size of chunk to use for reading from the back of a log file."""
CHUNK_SIZE = 65536
def trim_from_back(filename, condition):
"""Scans chunks of data from the back of the file until it's reached a point that's earlier than the deadline.
It then reads forward line by line until it reaches the correct point to trim."""
print("Trimming %s" % filename)
with open(filename, "r+") as file:
file.seek(0, os.SEEK_END)
curr_chunk_start = file.tell() - CHUNK_SIZE
condition_result = LINE_LATER
while curr_chunk_start > 0:
file.seek(curr_chunk_start)
file.readline() #Discard this since it's most likely a partial line
condition_result = condition(file.readline()) #This is the first full line in the chunk
while condition_result == LINE_NO_TIMESTAMP:
condition_result = condition(file.readline()) # Read until there's a line that has a timestamp
if condition_result == LINE_EARLIER:
break
curr_chunk_start -= CHUNK_SIZE
#At this point the curr_chunk_start is either less than zero, or the chunk contains the first line later than the deadline
if curr_chunk_start < 0:
curr_chunk_start = 0
file.seek(curr_chunk_start)
trim_start = curr_chunk_start
while condition_result != LINE_LATER:
line = file.readline()
trim_start += len(line)
condition_result = condition(line)
# trim_start is now right before the last line that was later than the deadline
if trim_start == 0:
return
# Since this file will be trimmed, create a hidden copy of it
hidden_filename = os.path.join(os.path.dirname(filename), "." + os.path.basename(filename))
shutil.copy(filename, hidden_filename)
file.seek(trim_start)
# Read the data we want to keep
content_to_keep = file.read()
file.seek(0)
file.truncate(0)
file.write("This file was trimmed to only contain lines since %s\n" % _deadline.strftime("%Y-%m-%d %H:%M:%SZ"))
file.write(content_to_keep)
def check_bug_exists(bug_id):
"""Checks whether the bug exists in Bugzilla"""
url = BUGZILLA_URL + '/rest/bug/%s' % bug_id
return "error" not in requests.get(url).json()
def generate_comment(num_seconds):
"""Creates the comment text for the attachment"""
comment = ""
comment += "Result from running must-gather"
comment += "Log files were trimmed to the last %d" % num_seconds
return comment
def filter_hidden(file):
"""Filters out hidden files so that the untrimmed ones won't be added to the archive"""
return file if os.path.basename(os.path.normpath(file.name))[0] != "." else None
def restore_hidden_files(logfolder):
"""Finds any hidden files and renames them to their original name"""
for subdir, _, files in os.walk(logfolder):
for file in files:
# If the file is hidden, it was a trimmed file so restore it
if file[0] == ".":
shutil.move(os.path.join(subdir, file), os.path.join(subdir, file[1:]))
main()
|
import doctest
import os
import sys
from glob import glob
from unittest import TestSuite, defaultTestLoader
TESTS_ROOT = os.path.abspath(os.path.dirname(__file__))
def suite():
result = TestSuite()
result.addTest(doctest.DocTestSuite('django_any.xunit'))
result.addTest(doctest.DocTestSuite('django_any.forms'))
for filename in glob(os.path.join(TESTS_ROOT, '*.py')):
if filename.endswith('__init__.py'):
continue
module_name = 'testapp.tests.%s' % \
os.path.splitext(os.path.basename(filename))[0]
__import__(module_name)
result.addTest(
defaultTestLoader.loadTestsFromModule(sys.modules[module_name]))
return result
|
import numpy as np
import tensorflow as tf
from absl.testing import absltest
import lm.examples
import lm.tf
class TestTF(absltest.TestCase):
def test_as_dataset(self):
def infeed(params):
def simplegen():
for i in range(batch_size):
yield lm.examples.Seq2SeqSimpleExample(
np.ones(8, dtype=np.int64) * i, np.zeros(8, dtype=np.int64)
).serialize()
ds = lm.tf.from_generator(lambda: simplegen)
ds = ds.batch(params["batch_size"])
return ds
batch_size = 8
for ex in lm.tf.consume(infeed, params=dict(batch_size=batch_size)):
self.assertEqual(ex.shape, (batch_size,))
break
if __name__ == "__main__":
absltest.main()
|
import pytest
import six
from mock import Mock
from thefuck import conf
@pytest.fixture
def load_source(mocker):
return mocker.patch('thefuck.conf.load_source')
@pytest.fixture
def environ(monkeypatch):
data = {}
monkeypatch.setattr('thefuck.conf.os.environ', data)
return data
@pytest.mark.usefixture('environ')
def test_settings_defaults(load_source, settings):
load_source.return_value = object()
settings.init()
for key, val in conf.DEFAULT_SETTINGS.items():
assert getattr(settings, key) == val
@pytest.mark.usefixture('environ')
class TestSettingsFromFile(object):
def test_from_file(self, load_source, settings):
load_source.return_value = Mock(rules=['test'],
wait_command=10,
require_confirmation=True,
no_colors=True,
priority={'vim': 100},
exclude_rules=['git'])
settings.init()
assert settings.rules == ['test']
assert settings.wait_command == 10
assert settings.require_confirmation is True
assert settings.no_colors is True
assert settings.priority == {'vim': 100}
assert settings.exclude_rules == ['git']
def test_from_file_with_DEFAULT(self, load_source, settings):
load_source.return_value = Mock(rules=conf.DEFAULT_RULES + ['test'],
wait_command=10,
exclude_rules=[],
require_confirmation=True,
no_colors=True)
settings.init()
assert settings.rules == conf.DEFAULT_RULES + ['test']
@pytest.mark.usefixture('load_source')
class TestSettingsFromEnv(object):
def test_from_env(self, environ, settings):
environ.update({'THEFUCK_RULES': 'bash:lisp',
'THEFUCK_EXCLUDE_RULES': 'git:vim',
'THEFUCK_WAIT_COMMAND': '55',
'THEFUCK_REQUIRE_CONFIRMATION': 'true',
'THEFUCK_NO_COLORS': 'false',
'THEFUCK_PRIORITY': 'bash=10:lisp=wrong:vim=15'})
settings.init()
assert settings.rules == ['bash', 'lisp']
assert settings.exclude_rules == ['git', 'vim']
assert settings.wait_command == 55
assert settings.require_confirmation is True
assert settings.no_colors is False
assert settings.priority == {'bash': 10, 'vim': 15}
def test_from_env_with_DEFAULT(self, environ, settings):
environ.update({'THEFUCK_RULES': 'DEFAULT_RULES:bash:lisp'})
settings.init()
assert settings.rules == conf.DEFAULT_RULES + ['bash', 'lisp']
class TestInitializeSettingsFile(object):
def test_ignore_if_exists(self, settings):
settings_path_mock = Mock(is_file=Mock(return_value=True), open=Mock())
settings.user_dir = Mock(joinpath=Mock(return_value=settings_path_mock))
settings._init_settings_file()
assert settings_path_mock.is_file.call_count == 1
assert not settings_path_mock.open.called
def test_create_if_doesnt_exists(self, settings):
settings_file = six.StringIO()
settings_path_mock = Mock(
is_file=Mock(return_value=False),
open=Mock(return_value=Mock(
__exit__=lambda *args: None, __enter__=lambda *args: settings_file)))
settings.user_dir = Mock(joinpath=Mock(return_value=settings_path_mock))
settings._init_settings_file()
settings_file_contents = settings_file.getvalue()
assert settings_path_mock.is_file.call_count == 1
assert settings_path_mock.open.call_count == 1
assert conf.SETTINGS_HEADER in settings_file_contents
for setting in conf.DEFAULT_SETTINGS.items():
assert '# {} = {}\n'.format(*setting) in settings_file_contents
settings_file.close()
|
from typing import Optional, List
import json
def load_json(file) -> Optional[any]:
json_obj = None
with open(file) as f:
json_obj = json.load(f)
return json_obj
def write_json(json_obj, file):
with open(file, 'w') as f:
json.dump(json_obj, f)
def create_json_of_types():
"""
The source of the types (little pre-processing step):
https://github.com/filipekiss/pokemon-type-chart/blob/master/types.json
:return:
"""
file: str = "types.json"
json_obj = load_json(file)
new_json_obj = {}
for type_obj in json_obj:
new_json_obj[type_obj['name']] = {}
for attribute in ["immunes", "weaknesses", "strengths"]:
new_json_obj[type_obj['name']][attribute] = type_obj[attribute]
write_json(new_json_obj, "types-after-gen6.json")
def read_file(file: str) -> List[str]:
text: List[str] = []
with open(file) as f:
text = f.read().splitlines()
return text
|
import matplotlib.pyplot as plt
from tkinter import *
from tkinter.filedialog import askopenfilename
from PIL import Image, ImageTk
import matplotlib.image as mpimg
from scipy import misc
import math
import numpy as np
import sys as sys
from point import P2_Point
from point import R2_Point
import copy
def normalizeImg(image):
(row_num, col_num, _) = image.shape
maxVal = 0
minValNonZero = sys.maxsize
for j in range(0, col_num):
for i in range(0, row_num):
pxValue = image[i][j][0]
if maxVal < pxValue:
maxVal = pxValue
if minValNonZero > pxValue > 0:
minValNonZero = pxValue
minValNonZero = 0
for j in range(0, col_num):
for i in range(0, row_num):
pxValue = image[i][j][0]
if pxValue > 0:
image[i][j][0] = int((pxValue-minValNonZero)*255/(maxVal-minValNonZero))
image[i][j][1] = int((pxValue-minValNonZero)*255/(maxVal-minValNonZero))
image[i][j][2] = int((pxValue-minValNonZero)*255/(maxVal-minValNonZero))
#print("pxValnorm = ", image[i][j][0])
return image
def rotation(vs, theta):
xs = vs.x
ys = vs.y
cosT = math.cos(theta)
sinT = math.sin(theta)
xu = xs*cosT - ys*sinT
yu = xs*sinT + ys*cosT
vu = R2_Point(xu, yu)
return vu
def makeP2Line(P0, v, angle):
v.r2Normalize
v_angle = rotation(v, angle)
(xv, yv) = v_angle.toTuple()
(x0, y0, _) = P0.toTuple()
p2Line = P2_Point(yv, -xv, -yv*x0 + xv*y0)
return p2Line
filename = askopenfilename(filetypes=[("all files","*"),("Bitmap Files","*.bmp; *.dib"),
("JPEG", "*.jpg; *.jpe; *.jpeg; *.jfif"),
("PNG", "*.png"), ("TIFF", "*.tiff; *.tif")])
image = misc.imread(filename, mode = 'RGB')
#plt.imshow(image)
(ySize, xSize, _) = image.shape
# Linha do horizonte
(x0, y0) = (0, 38)
(xf, yf) = (785, 38)
#plt.plot([x0, xf], [y0, yf])
# origem (0,0)
p00 = R2_Point(0,0)
p00.to_img_coord(xSize, ySize)
#plt.plot([p00.x], [p00.y], 'x')
p00 = p00.toP2_Point()
# Pontos de fuga
pfb = R2_Point(785, 38)#(665,38) #(785, 38)
pfb.to_cartesian_coord(xSize, ySize)
PFn = copy.deepcopy(pfb)
pfb = pfb.toP2_Point()
rfb = p00.cross(pfb)
pfa = R2_Point(0,38)#(194,38) #(0,38)
pfa.to_cartesian_coord(xSize, ySize)
PF0 = copy.deepcopy(pfa)
pfa = pfa.toP2_Point()
rfa = p00.cross(pfa)
vh = PFn - PF0
vh.r2Normalize()
p = rfa.cross(rfb)
p.normalize()
#plt.plot([p.x], [p.y], 'ro')
tMax = 180
sMax = 160
dTheta = (PFn.euclideanDistance(PF0))/(tMax)
ds = np.pi/sMax #3.14159265/sMax
(xb0, yb0) = (300,104)
(xbf, ybf) = (527,241)
shapeResult = (sMax, tMax, 3)
sinograma = np.zeros(shapeResult)
for t in range(0, tMax):
theta = t*dTheta
PFt = PF0 + theta*vh
#PFt.to_img_coord(xSize, ySize)
#plt.plot([PFt.x], [PFt.y], 'x')
for s in range(1, sMax):
countR = sinograma[s][t][0]
countG = sinograma[s][t][1]
countB = sinograma[s][t][2]
angle_s = s*ds
# calculando reta de fuga
Rts = makeP2Line(PFt.toP2_Point(), vh, angle_s)
Rts.normalize()
bottomSideLine = P2_Point(0, 1, ySize/2) # linha inferior da imagem
bottomSideLine.normalize()
pbound = Rts.cross(bottomSideLine)
pboundImg = pbound.toR2_Point()
pboundImg.to_img_coord(xSize, ySize)
PFImg = copy.deepcopy(PFt)
PFImg.to_img_coord(xSize, ySize)
#plt.plot([PFImg.x, pboundImg.x], [PFImg.y, pboundImg.y], 'r--')
#
raySize = PFt.euclideanDistance(pbound)
nu = 100
du = raySize/nu
vu = pboundImg - PFImg
vu.r2Normalize()
for u in range(0, nu):
Pxy = PFImg + du*u*vu
#Pxy.to_img_coord(width, height)
xIdx = Pxy.x
yIdx = Pxy.y
#print("img: x,y = %f,%f" %(Pxy.x, Pxy.y))
if xb0 <= xIdx < xbf and yb0 <= yIdx < ybf:
#if 0 <= xIdx < xSize and 0 <= yIdx < ySize:
#plt.plot([xIdx], [yIdx], 'r.')
countR = countR + image[yIdx][xIdx][0]
countG = countG + image[yIdx][xIdx][1]
countB = countB + image[yIdx][xIdx][2]
sinograma[s][t][0] = countR
sinograma[s][t][1] = countG
sinograma[s][t][2] = countB
sinograma = normalizeImg(sinograma)
plt.imshow(image)
#plt.imshow(sinograma)
#for x in range(xb0, xbf):
# for y in range(yb0, ybf):
# Pxy = R2_Point(x, y)
# Pxy.to_cartesian_coord(xSize, ySize)
plt.show()
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Output formatters for JSON.
"""
import json
from . import base
from cliff import columns
class JSONFormatter(base.ListFormatter, base.SingleFormatter):
def add_argument_group(self, parser):
group = parser.add_argument_group(title='json formatter')
group.add_argument(
'--noindent',
action='store_true',
dest='noindent',
help='whether to disable indenting the JSON'
)
def emit_list(self, column_names, data, stdout, parsed_args):
items = []
for item in data:
items.append(
{n: (i.machine_readable()
if isinstance(i, columns.FormattableColumn)
else i)
for n, i in zip(column_names, item)}
)
indent = None if parsed_args.noindent else 2
json.dump(items, stdout, indent=indent)
def emit_one(self, column_names, data, stdout, parsed_args):
one = {
n: (i.machine_readable()
if isinstance(i, columns.FormattableColumn)
else i)
for n, i in zip(column_names, data)
}
indent = None if parsed_args.noindent else 2
json.dump(one, stdout, indent=indent)
|
from typing import ClassVar, List, Optional
from pycfmodel.model.base import CustomModel
from pycfmodel.model.resources.properties.policy_document import PolicyDocument
from pycfmodel.model.resources.resource import Resource
from pycfmodel.model.types import Resolvable, ResolvableStr, ResolvableStrOrList
from pycfmodel.model.utils import OptionallyNamedPolicyDocument
class IAMPolicyProperties(CustomModel):
"""
Properties:
- Groups: Friendly name of the IAM groups to attach the policy to.
- PolicyDocument: A [policy document][pycfmodel.model.resources.properties.policy_document.PolicyDocument] object.
- PolicyName: Name of the policy.
- Roles: Friendly name of the IAM roles to attach the policy to.
- Users: Friendly name of the IAM users to attach the policy to.
More info at [AWS Docs](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iam-policy.html)
"""
Groups: Optional[ResolvableStrOrList] = None
PolicyDocument: Resolvable[PolicyDocument]
PolicyName: ResolvableStr
Roles: Optional[ResolvableStrOrList] = None
Users: Optional[ResolvableStrOrList] = None
class IAMPolicy(Resource):
"""
Properties:
- Properties: A [IAM Policy properties][pycfmodel.model.resources.iam_policy.IAMPolicyProperties] object.
More info at [AWS Docs](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iam-policy.html)
"""
TYPE_VALUE: ClassVar = "AWS::IAM::Policy"
Type: str = TYPE_VALUE
Properties: Resolvable[IAMPolicyProperties]
@property
def policy_documents(self) -> List[OptionallyNamedPolicyDocument]:
return [
OptionallyNamedPolicyDocument(
name=self.Properties.PolicyName, policy_document=self.Properties.PolicyDocument
)
]
|
# coding:utf-8
#
# The MIT License (MIT)
#
# Copyright (c) 2016-2018 yutiansut/QUANTAXIS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import time
from functools import lru_cache
import datetime
from QUANTAXIS.QAARP.QAPortfolio import QA_Portfolio
from QUANTAXIS.QAARP.QAUser import QA_User
from QUANTAXIS.QAEngine.QAEvent import QA_Event
from QUANTAXIS.QAFetch.QAQuery_Advance import (QA_fetch_stock_day_adv,
QA_fetch_stock_min_adv)
from QUANTAXIS.QAMarket.QAMarket import QA_Market
from QUANTAXIS.QAMarket.QAShipaneBroker import QA_SPEBroker
from QUANTAXIS.QAUtil import (QA_Setting, QA_util_log_info,
QA_util_mongo_initial)
from QUANTAXIS.QAUtil.QAError import (QAError_database_connection,
QAError_market_enging_down,
QAError_web_connection)
from QUANTAXIS.QAUtil.QAParameter import (AMOUNT_MODEL, BROKER_EVENT,
BROKER_TYPE, ENGINE_EVENT, FREQUENCE,
MARKET_TYPE, ORDER_DIRECTION,
ORDER_MODEL)
from QUANTAXIS.QAUtil.QADate_trade import QA_util_if_tradetime
class QATrade_Realtime():
def __init__(self, market_type, frequence,):
self.user = QA_User()
self.if_settled = False
self.account = None
self.portfolio = None
self.market = QA_Market()
self.market_type = market_type
self.frequence = frequence
self.broker = QA_SPEBroker()
self.broker_name = 'shipane_broker'
self.ingest_data = None
@property
def now(self):
return datetime.datetime.now()
def load_account(self, account):
# 通过 broke名字 新建立一个 QAAccount 放在的中 session字典中 session 是 { 'cookie' , QAAccount }
self.market.login(self.broker_name, account.account_cookie, account)
def start_market(self):
"""
start the market thread and register backtest broker thread
QAMarket 继承QATrader, QATrader 中有 trade_engine属性 , trade_engine类型是QA_Engine从 QA_Thread继承
"""
# 启动 trade_engine 线程
self.market.start()
# 注册 backtest_broker ,并且启动和它关联线程QAThread 存放在 kernels 词典中, { 'broker_name': QAThread }
self.market.register(self.broker_name, self.broker)
def run(self):
"""generator driven data flow
"""
# 如果出现了日期的改变 才会进行结算的事件
_date = None
while QA_util_if_tradetime(self.now):
for data in self.ingest_data: # 对于在ingest_data中的数据
# <class 'QUANTAXIS.QAData.QADataStruct.QA_DataStruct_Stock_day'>
date = data.date[0]
if self.market_type is MARKET_TYPE.STOCK_CN: # 如果是股票市场
if _date != date: # 如果新的date
# 前一天的交易日已经过去
# 往 broker 和 account 发送 settle 事件
try:
self.market.trade_engine.join()
# time.sleep(2)
self.market._settle(self.broker_name)
except Exception as e:
raise e
# 基金 指数 期货
elif self.market_type in [MARKET_TYPE.FUND_CN, MARKET_TYPE.INDEX_CN, MARKET_TYPE.FUTURE_CN]:
self.market._settle(self.broker_name)
# print(data)
self.broker.run(
QA_Event(event_type=ENGINE_EVENT.UPCOMING_DATA, market_data=data))
# 生成 UPCOMING_DATA 事件放到 队列中去执行
self.market.upcoming_data(self.broker_name, data)
self.market.trade_engine.join()
_date = date
|
from __future__ import annotations
import logging
from pathlib import Path
import bokeh
import bokeh.layouts
import bokeh.palettes
import pandas
from annofabcli.statistics.histogram import get_histogram_figure, get_sub_title_from_series
logger = logging.getLogger(__name__)
class Task:
"""'タスクlist.csv'に該当するデータフレームをラップしたクラス"""
def __init__(self, df: pandas.DataFrame) -> None:
self.df = df
def _validate_df_for_output(self, output_file: Path) -> bool:
if len(self.df) == 0:
logger.warning(f"データが0件のため、{output_file} は出力しません。")
return False
return True
def plot_histogram_of_worktime(
self,
output_file: Path,
bins: int = 20,
):
"""作業時間に関する情報をヒストグラムでプロットする。
Args:
output_file (Path): [description]
bins (int, optional): [description]. Defaults to 20.
"""
if not self._validate_df_for_output(output_file):
return
logger.debug(f"{output_file} を出力します。")
df = self.df
histogram_list = [
dict(
title="教師付作業時間",
column="annotation_worktime_hour",
),
dict(
title="検査作業時間",
column="inspection_worktime_hour",
),
dict(
title="受入作業時間",
column="acceptance_worktime_hour",
),
dict(title="総作業時間", column="sum_worktime_hour"),
]
figure_list = []
decimals = 2
for hist in histogram_list:
column = hist["column"]
title = hist["title"]
sub_title = get_sub_title_from_series(df[column], decimals=decimals)
fig = get_histogram_figure(
df[column], x_axis_label="作業時間[hour]", y_axis_label="タスク数", title=title, sub_title=sub_title, bins=bins
)
figure_list.append(fig)
# 自動検査したタスクを除外して、検査時間をグラフ化する
df_ignore_inspection_skipped = df.query("inspection_worktime_hour.notnull() and not inspection_is_skipped")
sub_title = get_sub_title_from_series(
df_ignore_inspection_skipped["inspection_worktime_hour"], decimals=decimals
)
figure_list.append(
get_histogram_figure(
df_ignore_inspection_skipped["inspection_worktime_hour"],
x_axis_label="作業時間[hour]",
y_axis_label="タスク数",
title="検査作業時間(自動検査されたタスクを除外)",
sub_title=sub_title,
bins=bins,
)
)
df_ignore_acceptance_skipped = df.query("acceptance_worktime_hour.notnull() and not acceptance_is_skipped")
sub_title = get_sub_title_from_series(
df_ignore_acceptance_skipped["acceptance_worktime_hour"], decimals=decimals
)
figure_list.append(
get_histogram_figure(
df_ignore_acceptance_skipped["acceptance_worktime_hour"],
x_axis_label="作業時間[hour]",
y_axis_label="タスク数",
title="受入作業時間(自動受入されたタスクを除外)",
sub_title=sub_title,
bins=bins,
)
)
bokeh_obj = bokeh.layouts.gridplot(figure_list, ncols=3)
output_file.parent.mkdir(exist_ok=True, parents=True)
bokeh.plotting.reset_output()
bokeh.plotting.output_file(output_file, title=output_file.stem)
bokeh.plotting.save(bokeh_obj)
def plot_histogram_of_others(
self,
output_file: Path,
bins: int = 20,
):
"""アノテーション数や、検査コメント数など、作業時間以外の情報をヒストグラムで表示する。
Args:
output_file (Path): [description]
bins (int, optional): [description]. Defaults to 20.
"""
if not self._validate_df_for_output(output_file):
return
logger.debug(f"{output_file} を出力します。")
df = self.df
histogram_list = [
dict(column="annotation_count", x_axis_label="アノテーション数", title="アノテーション数"),
dict(column="input_data_count", x_axis_label="画像枚数", title="画像枚数"),
dict(column="inspection_count", x_axis_label="検査コメント数", title="検査コメント数"),
dict(
column="input_data_count_of_inspection",
x_axis_label="指摘を受けた画像枚数",
title="指摘を受けた画像枚数",
),
# 経過日数
dict(
column="diff_days_to_first_inspection_started",
x_axis_label="最初の検査を着手するまでの日数",
title="最初の検査を着手するまでの日数",
),
dict(
column="diff_days_to_first_acceptance_started",
x_axis_label="最初の受入を着手するまでの日数",
title="最初の受入を着手するまでの日数",
),
dict(
column="diff_days_to_first_acceptance_completed",
x_axis_label="初めて受入完了状態になるまでの日数",
title="初めて受入完了状態になるまでの日数",
),
# 差し戻し回数
dict(
column="number_of_rejections_by_inspection",
x_axis_label="検査フェーズでの差し戻し回数",
title="検査フェーズでの差し戻し回数",
),
dict(
column="number_of_rejections_by_acceptance",
x_axis_label="受入フェーズでの差し戻し回数",
title="受入フェーズでの差し戻し回数",
),
]
figure_list = []
for hist in histogram_list:
column = hist["column"]
title = hist["title"]
x_axis_label = hist["x_axis_label"]
ser = df[column].dropna()
sub_title = get_sub_title_from_series(ser, decimals=2)
fig = get_histogram_figure(
ser, x_axis_label=x_axis_label, y_axis_label="タスク数", title=title, sub_title=sub_title, bins=bins
)
figure_list.append(fig)
bokeh_obj = bokeh.layouts.gridplot(figure_list, ncols=4)
output_file.parent.mkdir(exist_ok=True, parents=True)
bokeh.plotting.reset_output()
bokeh.plotting.output_file(output_file, title=output_file.stem)
bokeh.plotting.save(bokeh_obj)
|
from math import ceil
from typing import List, Tuple
# noinspection PyTypeChecker
# tag::calculator_class[]
class Calculator:
freq_items = [
(u"Año", 1, 10),
("Semestre", 2, 10),
("Cuatrimestre", 3, 12),
("Bimestre", 6, 12),
("Mes", 12, 12),
("Quincena", 24, 12),
("Bi-semana", 26, 13),
("Semana", 52, 13),
(u"Día", 365, 15),
]
def __init__(self, **kwargs):
self.freq = self.get_int(kwargs.get("freq", 12))
self.num_of_years = self.get_float(kwargs.get("num_of_years", 0))
self.rate = self.get_float(kwargs.get("rate", 0))
self.time_scale, rows_per_page = self.get_time_scale(self.freq)
self.periods = self.get_periods() # end::calculator_class[]
self.periods_a = self.get_periods_a()
self.periods_m = self.get_periods_m()
self.num_of_years_t = self.num_of_years
self.nper_t = 0
self.interests = []
self.balances = []
@staticmethod
def get_float(val: str) -> float:
return float(str(val).replace(",", ""))
@staticmethod
def get_int(val: str) -> int:
return int(str(val).replace(",", ""))
def get_periods(self) -> List[int]:
return [x + 1 for x in range(ceil(self.freq * self.num_of_years))]
def get_periods_a(self) -> List[int]:
return [x + 1 for x in range(ceil(1 * self.num_of_years))]
def get_periods_m(self) -> List[int]:
return [x + 1 for x in range(ceil(12 * self.num_of_years))]
def get_time_scale(self, freq: int) -> Tuple[str, int]:
f = freq if freq else 12
for item in self.freq_items:
if item[1] == f:
return item[0], item[2]
|
import chess
import chess.pgn
import chess.engine
import sys, os
import random
import time
board = chess.Board()
print('legal_moves: ', list(board.legal_moves))
print(list(board.legal_moves)[0])
print(type(list(board.legal_moves)[0]))
m = list(board.legal_moves)[0]
print('m', m)
print('uci', m.uci())
print('san', board.san(m))
#sys.exit(0)
board.push_san("e4")
print(board)
print('out:', board.outcome())
print(board.fen())
STOCKFISH = '/opt/homebrew/bin/stockfish'
engine = chess.engine.SimpleEngine.popen_uci(STOCKFISH)
board = chess.Board('r1bq1b1r/pppnpkpp/8/3n4/3P4/8/PPP2PPP/RNBQKB1R w KQ - 0 7')
print(board)
limit = chess.engine.Limit(time=0.1)
print('before: ', limit)
foo = engine.play(board, limit)
print('result foo:', foo)
print('result foo:', foo.move)
print('result foo/type:', type(foo.move))
print('result foo:', foo.ponder)
print('result foo:', foo.info)
# help(foo)
#sys.exit(0)
#
print('more')
moves = []
limit = chess.engine.Limit(depth=1)
board = chess.Board('r1bq1b1r/pppnpkpp/8/3n4/3P4/8/PPP2PPP/RNBQKB1R w KQ - 0 7')
board = chess.Board()
t1 = time.time()
while board.outcome() is None:
ply = len(moves)
rnd = ''
if ply % 7 == 0:
rnd = '*'
move = random.choice(list(board.legal_moves))
else:
foo = engine.play(board, limit)
move = foo.move
uci = str(move)
san = board.san(move)
#print(uci, san, move, rnd)
moves.append(san)
board.push(move)
print('dt: ', time.time() - t1)
print('moves: ', moves)
print('final: ', board.outcome())
print(board)
print(board.fen)
print()
col = 0
strs = []
for i, san in enumerate(moves):
if i % 2 == 0:
s = f' {int(1+(i/2))}.'
strs.append(s)
col += len(s)
strs.append(' ' + san)
col += 1 + len(san)
if col >= 72:
print(''.join(strs))
col = 0
strs = []
engine.quit()
|
# -*- coding: utf-8 -*-
'''
© 2012-2013 eBay Software Foundation
Authored by: Tim Keefer
Licensed under CDDL 1.0
'''
import os
import sys
import gevent
from optparse import OptionParser
sys.path.insert(0, '%s/../' % os.path.dirname(__file__))
from common import dump
from ebaysdk.finding import Connection as finding
from ebaysdk.http import Connection as html
from ebaysdk.exception import ConnectionError
def init_options():
usage = "usage: %prog [options]"
parser = OptionParser(usage=usage)
parser.add_option("-d", "--debug",
action="store_true", dest="debug", default=False,
help="Enabled debugging [default: %default]")
parser.add_option("-y", "--yaml",
dest="yaml", default='ebay.yaml',
help="Specifies the name of the YAML defaults file. [default: %default]")
parser.add_option("-a", "--appid",
dest="appid", default=None,
help="Specifies the eBay application id to use.")
(opts, args) = parser.parse_args()
return opts, args
def run(opts):
timeout = gevent.Timeout(4)
timeout.start()
try:
calls = []
for page in range(1, 10):
api = finding(debug=opts.debug, appid=opts.appid,
config_file=opts.yaml)
call = gevent.spawn(api.execute,
'findItemsAdvanced',
{'keywords': 'python',
'paginationInput': {'pageNumber': page}})
calls.append(call)
gevent.joinall(calls)
try:
call_results = [c.get() for c in calls]
toprated = 0
for resp in call_results:
for item in resp.reply.searchResult.item:
if item.topRatedListing == 'true':
toprated += 1
print("Top Rated Listings: %s" % toprated)
except ConnectionError as e:
print("%s" % e)
except gevent.timeout.Timeout as e:
print("Calls reached timeout threshold: %s" % e)
finally:
timeout.cancel()
if __name__ == "__main__":
(opts, args) = init_options()
run(opts)
|
"""articles_column
Revision ID: 49f02f2c2ea
Revises: 171e70161dd
Create Date: 2016-03-19 22:38:51.402128
"""
# revision identifiers, used by Alembic.
revision = '49f02f2c2ea'
down_revision = '171e70161dd'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('article_columns',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('articles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=64), nullable=True),
sa.Column('body', sa.Text(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.Column('article_column_id', sa.Integer(), nullable=True),
sa.Column('index', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['article_column_id'], ['article_columns.id'], ),
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_articles_timestamp'), 'articles', ['timestamp'], unique=False)
op.drop_table('article_column')
op.drop_table('article')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('article',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('title', sa.VARCHAR(length=64), nullable=True),
sa.Column('body', sa.TEXT(), nullable=True),
sa.Column('timestamp', sa.DATETIME(), nullable=True),
sa.Column('author_id', sa.INTEGER(), nullable=True),
sa.Column('index', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('article_column',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('title', sa.VARCHAR(length=64), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.drop_index(op.f('ix_articles_timestamp'), table_name='articles')
op.drop_table('articles')
op.drop_table('article_columns')
### end Alembic commands ###
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-10 10:39
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Alias',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('source', models.CharField(blank=True, max_length=128)),
],
),
migrations.CreateModel(
name='DkimDomain',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('selector', models.CharField(max_length=63)),
('private_key', models.TextField()),
('public_key', models.TextField()),
],
),
migrations.CreateModel(
name='Domain',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=63, unique=True)),
('relay', models.BooleanField()),
],
),
migrations.CreateModel(
name='Recipient',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('address', models.CharField(max_length=192)),
('action', models.CharField(choices=[('REJECT', 'REJECT'), ('DEFER', 'DEFER'), ('421', '421'), ('521', '521')], max_length=8)),
('message', models.CharField(max_length=512)),
],
),
migrations.CreateModel(
name='SenderCredential',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=128)),
('password', models.CharField(max_length=106)),
('relayhost', models.CharField(max_length=192)),
('domain_name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pdje.Domain', to_field='name')),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128)),
('password', models.CharField(max_length=106)),
],
),
migrations.AddField(
model_name='dkimdomain',
name='domain_name',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pdje.Domain', to_field='name'),
),
migrations.AddField(
model_name='alias',
name='domain',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pdje.Domain'),
),
migrations.AddField(
model_name='alias',
name='target',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pdje.User'),
),
]
|
class DFDefault:
def __repr__(self):
return "DEFAULT_VAL"
def __str__(self):
return self.__repr__()
def __bool__(self):
return False
DEFAULT_VAL = DFDefault() # used for detecting when no parameter was passed without using 'None' or alternatives.
EMPTY_ARGS = dict(items=tuple())
|
'''
lanhuage: python
Descripttion:
version: beta
Author: xiaoshuyui
Date: 2021-01-06 10:51:46
LastEditors: xiaoshuyui
LastEditTime: 2021-02-20 09:41:13
'''
import sys
sys.path.append("..")
import datetime
import importlib
import inspect
from devtool.devTool import DevTool
# import devtool.tests.utils.func1
from devtool.utils.common import (match_datetime, validate_date,
validate_datetime)
if __name__ == "__main__":
# name = 'devtool.tests.utils.func1'
# module = importlib.import_module(name)
# # print(module)
# # # print(dir(devtool.tests.utils.func1))
# member_list = inspect.getmembers(module, predicate=inspect.isfunction)
# for v,_ in member_list:
# print(v)
DevTool.exec('devtool')
print(DevTool.storage)
DevTool.analysis()
DevTool.grep('this','or','123','True')
DevTool.treeWithState('devtool')
# a = match_datetime('2021-01-07 21:19:35,345 - DevTool - ERROR - __main__.test4 Traceback (most recent call last):')
# b = validate_date('2021-01-07 '.strip())
# # print(b)
# print(str(datetime.datetime.now())[:11] + '00:00:00')
# c = validate_datetime(str(datetime.datetime.now())[:11] + '00:00:00')
# print(c)
# DevTool.logFilter('ERROR','INFO',start='122',since='1998-01-01')
# print(DevTool.logFilter.__annotations__)
|
import asyncio
from typing import Dict, List, Union
import dramatiq
from . import crud, models, settings, utils
MAX_RETRIES = 3
STATUS_MAPPING = {
0: "Pending",
3: "complete",
2: "invalid",
1: "expired",
4: "In progress",
5: "Failed",
"Paid": "complete",
"Pending": "Pending",
"Unknown": "invalid",
"Expired": "expired",
}
@dramatiq.actor(actor_name="poll_updates", max_retries=MAX_RETRIES)
@settings.run_sync
async def poll_updates(obj: Union[int, models.Invoice], task_wallets: Dict[str, str]):
obj = await models.Invoice.get(obj)
if not obj:
return
await crud.invoice_add_related(obj)
if settings.TEST:
await asyncio.sleep(1)
await obj.update(status="test").apply()
await utils.publish_message(obj.id, {"status": "test"})
return
payment_methods = await models.PaymentMethod.query.where(models.PaymentMethod.invoice_id == obj.id).gino.all()
if not payment_methods:
return
for ind, method in enumerate(payment_methods):
payment_methods[ind].coin = settings.get_coin(method.currency, task_wallets[method.currency])
await process_invoice(obj, task_wallets, payment_methods)
async def process_invoice(
invoice: models.Invoice, task_wallets: Dict[str, str], payment_methods: List[models.PaymentMethod], notify: bool = True
):
while not settings.shutdown.is_set():
for method in payment_methods:
invoice_data = await method.coin.getrequest(method.payment_address)
if invoice_data["status"] != "Pending" and invoice_data["status"] != 0:
status = invoice_data["status"]
if isinstance(status, int):
status = STATUS_MAPPING[status]
elif isinstance(status, str) and status in STATUS_MAPPING:
status = STATUS_MAPPING[status]
if not status:
status = "expired"
await invoice.update(status=status, discount=method.discount).apply()
if status == "complete":
await invoice.update(paid_currency=method.currency).apply()
if notify:
await invoice_notification(invoice, status)
return
await asyncio.sleep(1)
poll_updates.send_with_options(args=(invoice.id, task_wallets), delay=1000) # to run on next startup
async def invoice_notification(invoice: models.Invoice, status: str):
await crud.invoice_add_related(invoice)
await utils.publish_message(invoice.id, {"status": status})
await utils.send_ipn(invoice, status)
if status == "complete":
store = await models.Store.get(invoice.store_id)
await crud.store_add_related(store)
await utils.notify(store, await utils.get_notify_template(store, invoice))
if invoice.products:
if utils.check_ping(
store.email_host,
store.email_port,
store.email_user,
store.email_password,
store.email,
store.email_use_ssl,
):
messages = []
for product_id in invoice.products:
product = await models.Product.get(product_id)
relation = (
await models.ProductxInvoice.query.where(models.ProductxInvoice.invoice_id == invoice.id)
.where(models.ProductxInvoice.product_id == product_id)
.gino.first()
)
quantity = relation.count
messages.append(await utils.get_product_template(store, product, quantity))
utils.send_mail(
store,
invoice.buyer_email,
await utils.get_store_template(store, messages),
)
@dramatiq.actor(actor_name="sync_wallet", max_retries=0)
@settings.run_sync
async def sync_wallet(model: Union[int, models.Wallet]):
test = settings.TEST
model = await models.Wallet.get(model)
if not model:
return
coin = settings.get_coin(model.currency, model.xpub)
balance = await coin.balance()
await model.update(balance=balance["confirmed"]).apply()
if test:
await asyncio.sleep(1)
await utils.publish_message(model.id, {"status": "success", "balance": balance["confirmed"]})
|
# The MIT License (MIT)
# Copyright (c) 2019 by the xcube development team and contributors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import click
# noinspection PyShadowingBuiltins,PyUnusedLocal
@click.command(name="vars2dim")
@click.argument('cube')
@click.option('--variable', '--var', metavar='VARIABLE',
default='data',
help='Name of the new variable that includes all variables. Defaults to "data".')
@click.option('--dim_name', '-D', metavar='DIM-NAME',
default='var',
help='Name of the new dimension into variables. Defaults to "var".')
@click.option('--output', '-o', metavar='OUTPUT',
help="Output path. If omitted, 'INPUT-vars2dim.FORMAT' will be used.")
@click.option('--format', '-f', metavar='FORMAT', type=click.Choice(['zarr', 'netcdf']),
help="Format of the output. If not given, guessed from OUTPUT.")
def vars2dim(cube, variable, dim_name, output=None, format=None):
"""
Convert cube variables into new dimension.
Moves all variables of CUBE into into a single new variable <var-name>
with a new dimension DIM-NAME and writes the results to OUTPUT.
"""
from xcube.core.dsio import guess_dataset_format
from xcube.core.dsio import open_dataset, write_dataset
from xcube.core.vars2dim import vars_to_dim
import os
if not output:
dirname = os.path.dirname(cube)
basename = os.path.basename(cube)
basename, ext = os.path.splitext(basename)
output = os.path.join(dirname, basename + '-vars2dim' + ext)
format_name = format if format else guess_dataset_format(output)
with open_dataset(input_path=cube) as ds:
converted_dataset = vars_to_dim(ds, dim_name=dim_name, var_name=variable)
write_dataset(converted_dataset, output_path=output, format_name=format_name)
|
import tensorflow as tf
import numpy as np
from A3CAgent.helpers import *
from A3CAgent.AC_Network import AC_Network
from A3CAgent.Worker import Worker
import os
class A3CAgent:
def __init__(self):
self.first_init = True
self.roll_out_steps = 5
self.this_level = 0
self.worker_number = 0
def init(self, sso, elapsed_timer):
s_size = sso.observation.shape[0] * sso.observation.shape[1] * sso.observation.shape[2] # Observations are greyscale frames of 84 * 84 * 1
a_size = len(sso.availableActions) # Agent can move Left, Right, or Fire
s_shape = sso.observation.shape
if self.first_init or not (s_size == self.s_size and a_size == self.a_size and s_shape == self.s_shape) :
#STAT OF FIRST INIT
tf.reset_default_graph()
self.first_init = False
if(len(sso.availableActions) > 0):
self.roll_out_steps =len(sso.availableActions)*3
self.gamma = .99 # discount rate for advantage estimation and reward discounting
self.prv_observation = None
self.prv_score = 0
self.prv_action = 0
self.s_size = sso.observation.shape[0] * sso.observation.shape[1] * sso.observation.shape[2] # Observations are greyscale frames of 84 * 84 * 1
self.a_size = len(sso.availableActions) # Agent can move Left, Right, or Fire
self.s_shape = sso.observation.shape
# self.model_path = 'tensor_flow/'
# if not os.path.exists(self.model_path):
# os.makedirs(self.model_path)
with tf.device("/cpu:0"):
self.trainer = tf.train.AdamOptimizer(learning_rate=1e-4)
self.master_network = AC_Network(self.s_size, self.a_size, 'global', None, self.s_shape) # Generate global network
self.session = tf.Session()
self.session.run(tf.global_variables_initializer())
print("A3CAgent Init ran for first time")
#END OF FIRST INIT
#Start new worker for level
self.worker_number += 1
print("Worker starting : "+str(self.worker_number))
self.Worker = Worker(self.worker_number, self.s_size, self.a_size, self.trainer, self.s_shape,self.session)
#Initialize Workers local variables
self.session.run(tf.variables_initializer(
tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, "worker_"+str(self.worker_number))
))
self.prv_observation = sso.observation
print("A3CAgent Init ran")
def act(self, sso, elapsed_timer):
reward = sso.gameScore - self.prv_score
if sso.gameTick == 1:# First action of the level
action = self.Worker.work(self.gamma, self.session, self.prv_observation , self.prv_action, reward, sso.observation, game_over=False, do_train=False, first_run=True)
else:
if sso.gameTick % self.roll_out_steps == 0: #Role back onexperiance gained and Train action
action = self.Worker.work(self.gamma, self.session, self.prv_observation , self.prv_action, reward, sso.observation, game_over=False, do_train=True, first_run=False)
else: #Collect experiance action
action = self.Worker.work(self.gamma, self.session, self.prv_observation , self.prv_action, reward, sso.observation, game_over=False, do_train=False, first_run=False)
self.prv_observation = sso.observation
self.prv_score = sso.gameScore
self.prv_action = action
return sso.availableActions[action]
def result(self, sso, elapsed_timer):
reward = sso.gameScore - self.prv_score
self.Worker.work(self.gamma, self.session, self.prv_observation , self.prv_action, reward, sso.observation, game_over=True, do_train=True, first_run=False)
self.this_level += 1
if self.this_level > 2:
self.this_level = 0
print("A3CAgent Result ran")
return self.this_level |
from __future__ import print_function
import httplib2
import os
import gspread
import praw
# https://github.com/burnash/gspread
from apiclient import discovery
from apiclient import errors
import oauth2client
from oauth2client import client
from oauth2client import tools
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
SCOPES = 'https://www.googleapis.com/auth/drive.metadata.readonly https://spreadsheets.google.com/feeds https://docs.google.com/feeds'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Drive API Python Quickstart'
LOSEIT_ID = '1-EKK8u-6lP7eaaMSmuPeadhg44rgyhkf0EMXPo7wHgw'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'drive-python-quickstart.json')
print (credential_path)
store = oauth2client.file.Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
message = """Hello {name}.
Week 9 final check-ins have been active since last Friday, but you have not weighed in yet!
Please go to the [link for the week 9 weigh ins ](http://goo.gl/forms/aJnKJjTVGW)
and enter your reddit user name ({name}), and your current weight.
You have until end of day Thursday to weigh in for Week 9.
***ALSO-> You may also be interested in the next 2016 New Year, New you challenge. [Google form] (http://goo.gl/forms/VnWeWOBl14) or [Reddit post] (https://www.reddit.com/r/loseit/comments/401074/challenge_as_the_winter_2015_challenge_comes_to_a/)
This is just a PM from a reminder program written by jeffles2, I am not affiliated with the actual contest organizers.. just a zealous Icicle.
"""
skip_list = ['Gigi tweekz', 'TourmanlineTart', 'ThunderCatsServent', 'Pirite', 'lositinSD', 'JohnS1821', 'evitable_betrayal', 'Doktor_Rutherz', 'Brick_Pudding']
def main():
r = praw.Reddit(user_agent='Send message to loseit weight loss challenge subscribers who have not weighed in by /u/jeffles2')
username = raw_input('Enter your reddit username: ')
password = raw_input('Enter your password: ')
r.login(username, password)
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('drive', 'v2', http=http)
try:
file = service.files().get(fileId=LOSEIT_ID).execute()
print('Title: %s' % file['title'])
print('MIME type: %s' % file['mimeType'])
#for k,v in file.iteritems():
# print (k,v)
selflink = file['selfLink']
print ('selflink %s' % selflink)
# import pdb;pdb.set_trace()
# print ('Export Links %s' % file['exportLinks'])
except errors.HttpError, error:
print('An error occurred: %sfor k,v ' % error)
gc = gspread.authorize(credentials)
sheet = gc.open("/r/loseit Holiday 2015 Challenge (Responses)")
wks = sheet.get_worksheet(1)
# import pdb; pdb.set_trace()
# for row_num in range(wks.row_count):
# row = wks.row_values(row_num)
# print (row)
# exit()
team_list = wks.range('A2:A3500')
name_list = wks.range('B2:B3500')
week1_weight_list = wks.range('AC2:AC3500')
week2_weight_list = wks.range('AD2:AD3500') #TODO Currently week
week3_weight_list = wks.range('AE2:AE3500') #TODO Currently week
total = 0
for team, name, weight1, weight2, weight3 in zip(team_list, name_list, week1_weight_list, week2_weight_list, week3_weight_list ):
if weight3.value == '' and team.value != 'TEAM ICICLE':
if weight1.value == '' and weight2.value == '':
continue
recipient = name.value
if recipient in skip_list:
#print (recipient, " is in the skip list")
continue
custom_message = message.format(name=recipient)
if True: #TODO up date to current week column and message title below
try:
r.send_message(recipient, 'Lose It - Week 9 weigh in reminder', custom_message)
except praw.errors.InvalidUser:
print ("\n!!! ", recipient, " is an Invalid User")
continue
import time
time.sleep(2)
print ("/u/{name} {team}".format(name=recipient, team=team.value))
total +=1
print ("")
print ("Total is ", total)
if __name__ == '__main__':
main() |
#!/usr/bin/env python3
# -*- Coding: UTF-8 -*- #
# -*- System: Linux -*- #
# -*- Usage: *.py -*- #
# Owner: Jacob B. Sanders
# Source: code.cloud-technology.io
# License: BSD 2-Clause License
"""
...
"""
from . import *
import Database.SQL as SQL
import Database.Association
import Database.User.Models.Base as Base
import Database.User.Schemas.Nexus as Type
import Database.User.Models.Name
import Database.User.Models.Note
import Database.Business.Models.Company
import Database.Business.Models.Unit
import Database.User.Models.Verification
class Table(Base.Table, SQL.Base):
"""
A User SQL ORM Class
Properties:
ID: Column - Primary Key
Email: Column - Indexable User Property Representing an Email Address.
Note that the email address is not a requirement.
Token-Safe N-Bytes:
Return a random URL-safe text string, containing nbytes random bytes. The text is Base64 encoded,
so on average each byte results in approximately 1.3 characters. If nbytes is None or not
supplied, a reasonable default is used. --> 32 // 1.333 := 24
Example:
>>> import secrets
...
>>> print(32 // 1.333) # --> 24
...
>>> String = secrets.token_urlsafe(24)
...
>>> print(String, "Length", "-", len(String))
"""
__tablename__ = "User"
__mapper_args__ = {"eager_defaults": True}
Username: Column = Column(
key = "Username",
name = "Username",
type_ = String,
index = True,
quote = True,
unique = True,
default = None,
nullable = False,
primary_key = False,
autoincrement = False
)
Email: Column = Column(
key = "Email",
name = "Email-Address",
type_ = String,
index = True,
quote = True,
unique = True,
default = None,
nullable = False,
primary_key = False,
autoincrement = False
)
Password: Column = Column(
key = "Password",
name = "Password",
type_ = String,
index = False,
quote = True,
unique = False,
default = None,
nullable = False,
primary_key = False,
autoincrement = False
)
Salt: Column = Column(
key = "Salt",
name = "Salt",
type_ = String,
index = False,
quote = True,
unique = False,
default = None,
nullable = False,
primary_key = False,
autoincrement = False
)
Active: Column = Column(
key = "Active",
name = "Active",
type_ = Boolean,
index = True,
quote = True,
unique = False,
default = None,
nullable = False,
primary_key = False,
autoincrement = False
)
Token: Column = Column(
key = "Token",
name = "Token",
type_ = String,
index = False,
quote = True,
unique = True,
default = lambda: Token(),
nullable = True,
primary_key = False,
autoincrement = False
)
GISO: Column = Column(
key = "GISO",
name = "GISO",
type_ = Enumeration(ISO5218,
unique = False,
nullable = False,
name = "Gender-Code"
), index = True,
quote = True,
unique = False,
default = 0x0,
nullable = True,
primary_key = False,
autoincrement = False
)
Preferred: Column = Column(
key = "Preferred",
name = "Preferred-Name",
type_ = String,
index = False,
quote = True,
unique = False,
default = "N/A",
nullable = True,
primary_key = False,
autoincrement = False
)
Name = Relationship(Database.User.Models.Name.Table, cascade = "all, delete-orphan", uselist = True, lazy = "joined")
Note = Relationship(Database.User.Models.Note.Table, cascade = "all, delete-orphan", uselist = True, lazy = "joined")
BCFK = Column(UUID, Foreign("Business-Company.ID"),
key = "Company-Foreign-Key",
name = "Company-Foreign-Key",
index = True,
quote = True,
default = None,
nullable = True
);
Company: Database.Business.Models.Company.Table = Relationship(Database.Business.Models.Company.Table, single_parent = True, back_populates = "User", lazy = "joined")
BUFK = Column(UUID, Foreign("Business-Unit.ID"),
key = "Business-Unit-Foreign-Key",
name = "Business-Unit-Foreign-Key",
index = True,
quote = True,
default = None,
nullable = True
);
Unit: Database.Business.Models.Unit.Table = Relationship(Database.Business.Models.Unit.Table, single_parent = True, back_populates = "User", lazy = "joined")
UVFK = Column(UUID, Foreign("User-Verification.ID"),
key = "VFK",
name = "User-Verification-Foreign-Key",
index = True,
quote = True,
default = None,
nullable = True
);
Verification = Relationship(Database.User.Models.Verification.Table, cascade = "all, delete-orphan", uselist = False, single_parent = True, back_populates = "User", lazy = "joined")
HTML: Column = Column(
key = "HTML-Foreign-Key",
name = "HTML-Foreign-Key",
type_ = String,
index = True,
quote = True,
unique = True,
default = None,
nullable = True,
primary_key = False,
autoincrement = False
)
Meta: MetaData = Table.metadata
__all__ = [
"Table",
"Meta"
]
|
"""
Blink LED - timer callback
On-board LED is connected to GPIO2.
"""
from machine import Pin, Timer
led = Pin(2, Pin.OUT)
my_timer = Timer(0) # using timer 0
# define callback function
def toggle_led(t):
led.value(not led.value()) # reverse led pin state
my_timer.init(period = 1000, callback = toggle_led) # 1000ms
while True:
pass # do nothing |
import click
from model.MyModel import MyModel
from model.utils.lr_schedule import LRSchedule
from model.utils.Config import Config
from model.pre.data import DataFrameDataset
from model.pre.transforms import trans_train, trans_valid
from model.pre.split_data import generate_split
from torch.utils.data import TensorDataset, DataLoader, Dataset
from sklearn.model_selection import train_test_split
import pandas as pd
@click.command()
@click.option('--data', default="config/data.json",
help='Path to data json config')
@click.option('--training', default="config/training.json",
help='Path to training json config')
@click.option('--model', default="config/model.json",
help='Path to model json config')
@click.option('--output', default="results/local/",
help='Dir for results and model weights')
def main(data, training, model, output):
# Load configs
dir_output = output
config = Config([data, training, model])
config.save(dir_output)
# Load datasets
# train_ids, cv_ids, train_labels, cv_labels = generate_split(train_label_path, wsi_path)
labels = pd.read_csv(config.path_label_train)
train, val = train_test_split(labels, stratify=labels.label, test_size=0.2)
print(len(train), len(val))
dataset_train = DataFrameDataset(df_data=train, data_dir=config.dir_images_train, transform=trans_train)
dataset_valid = DataFrameDataset(df_data=val, data_dir=config.dir_images_train, transform=trans_valid)
loader_train = DataLoader(dataset=dataset_train, batch_size=config.batch_size, shuffle=True, num_workers=3)
loader_valid = DataLoader(dataset=dataset_valid, batch_size=config.batch_size//2, shuffle=False, num_workers=3)
# Define learning rate schedule
n_batches_epoch = len(loader_train)
lr_schedule = LRSchedule(lr_init=config.lr_init,
start_decay=config.start_decay*n_batches_epoch,
end_decay=config.end_decay*n_batches_epoch,
end_warm=config.end_warm*n_batches_epoch,
lr_warm=config.lr_warm,
lr_min=config.lr_min)
# Build model and train
model = MyModel(config, dir_output)
model.build_train(config)
model.restore()
model.train(config, loader_train, loader_valid, lr_schedule)
if __name__ == "__main__":
main() |
def numbers():
return [1, 2, 3]
def main():
print("Hello World!")
if __name__ == '__main__':
main() |
import sqlite3
from datetime import datetime #takes the date from the computer where the data is running
def handleWriting(next_pin_values):
# print(next_pin_values)
p0,p1,p2,p3,p4,sink=next_pin_values #sink is a throwaway value
date_value = str(datetime.now()) #gives python from the date and time at that time
db_conn = sqlite3.connect('movement_db')
db_curr = db_conn.cursor()
statement0 = f'''
INSERT INTO movement_table0 VALUES({p0}, '{date_value}')
'''
statement1 = f'''
INSERT INTO movement_table1 VALUES({p1}, '{date_value}')
'''
statement2 = f'''
INSERT INTO movement_table2 VALUES({p2}, '{date_value}')
'''
statement3 = f'''
INSERT INTO movement_table3 VALUES({p3}, '{date_value}')
'''
statement4 = f'''
INSERT INTO movement_table4 VALUES({p4}, '{date_value}')
'''
# print (statement)
db_curr.execute(statement0)
db_curr.execute(statement1)
db_curr.execute(statement2)
db_curr.execute(statement3)
db_curr.execute(statement4)
db_conn.commit()
db_conn.close()
if __name__ == "__main__":
handleWriting([1000, 2000, 3000, 4000, 5000]) # exercise the code with saple dumy values |
# -*- coding: utf-8 -*-
# Copyright (C) 2017 Intel Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class MessageDataMonitor(object):
def __init__(self):
self.device_id = None
self.resource_uri = None
self.interval = None
self.requester_name = None
self.push_url = None
self.local_path = None
self.monitor_id = None
self.sequence = None
self.process = None
def equals(self, obj):
if self == obj:
return True
if obj is None or getattr(self, '__class__') != getattr(
obj, '__class__'):
return False
other = obj
if other.local_path == self.local_path:
return True
return False
def hash_code(self):
return 0 if self.local_path is None else self.local_path.hash_code()
def to_json_string(self):
bf = []
bf.append("{")
bf.append("\"di\":\"" + self.device_id + "\",")
bf.append("\"ri\":\"" + self.resource_uri + "\",")
if self.sequence is not None:
bf.append('"sequence":{},'.format(self.sequence))
if self.process is not None and self.process is True:
bf.append('"process":"{}",'.format(self.process))
bf.append('"interval":{},'.format(self.interval))
bf.append("\"requester\":\"" + self.requester_name + "\",")
bf.append("\"purl\":\"" + self.push_url + "\"")
bf.append("}")
return "".join(bf)
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import scriptcontext as sc
import compas_rhino
from compas_3gs.diagrams import FormNetwork
from compas.geometry import Translation
from compas_3gs.algorithms import volmesh_dual_network
from compas_3gs.rhino import relocate_formdiagram
__commandname__ = "TGS_primal"
def RunCommand(is_interactive):
if '3GS' not in sc.sticky:
compas_rhino.display_message('3GS has not been initialised yet.')
return
scene = sc.sticky['3GS']['scene']
# get ForceVolMeshObject from scene
objects = scene.find_by_name('force')
if not objects:
compas_rhino.display_message("There is no ForceDiagram in the scene.")
return
force = objects[0]
# make FormNetwork
form = volmesh_dual_network(force.diagram, cls=FormNetwork)
# set dual/primal
form.dual = force.diagram
force.diagram.primal = form
# add FormNetworkObject
translation = relocate_formdiagram(force.diagram, form)
form.transform(Translation.from_vector(translation))
form.update_angle_deviations()
scene.add_formnetwork(form, name='form', layer='3GS::FormDiagram')
# update
scene.update()
scene.save()
print('Dual diagram successfully created.')
# ==============================================================================
# Main
# ==============================================================================
if __name__ == "__main__":
RunCommand(True)
|
from django.shortcuts import get_object_or_404
from rest_framework import mixins, views, viewsets
from ..models import Comment, ReplyComment
from .serializers import CommentSerializer, ReplyCommentSerializer
class CommentViewSet(
mixins.CreateModelMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
viewsets.GenericViewSet,
):
queryset = Comment.objects.filter(is_approved=True)
serializer_class = CommentSerializer
|
import math
from typing import Dict, List, Tuple
def required_ore(
ingredients: Dict[str, Tuple[int, List[Tuple[int, str]]]], fuel: int
) -> int:
required = {"FUEL": fuel}
stock: Dict[str, int] = {}
while {k for k, v in required.items() if v > 0} != {"ORE"}:
next_required = next(k for k in required if k != "ORE")
required_amount = required.pop(next_required)
amount, required_ingredients = ingredients[next_required]
multiplier = math.ceil(required_amount / amount)
for ing_amount, ingredient in required_ingredients:
amnt = ing_amount * multiplier
if ingredient in stock:
to_rm = min(stock[ingredient], amnt)
stock[ingredient] -= to_rm
amnt -= to_rm
if stock[ingredient] == 0:
stock.pop(ingredient)
if amnt:
required[ingredient] = amnt + required.get(ingredient, 0)
if (left_over := amount * multiplier - required_amount) :
stock[next_required] = left_over + stock.get(next_required, 0)
return required["ORE"]
def part2(ingredients: Dict[str, Tuple[int, List[Tuple[int, str]]]]) -> int:
ore = 1000000000000
lo, hi = 2, ore
while lo < hi:
mid = (lo + hi) // 2
if ore < required_ore(ingredients, mid):
hi = mid
else:
lo = mid + 1
return lo - 1
def main() -> None:
with open("input.txt") as f:
inp = [
x.split()
for x in f.read().strip().replace(",", "").replace("=>", "").split("\n")
]
ingredients: Dict[
str, Tuple[int, List[Tuple[int, str]]]
] = { # product -> (amount, lst of (amount, ingredient) )
r[-1][1]: (r[-1][0], r[:-1])
for r in [list(zip(map(int, r[::2]), r[1::2])) for r in inp]
}
print("Part 1:", required_ore(ingredients, 1))
print("Part 2:", part2(ingredients))
main()
|
# Nicholas Novak
# Introduction to Python course assignment
#Open the included file
dictionary_file = open("lab5_dictionary.txt","r")
dictionary_file.readlines()
# In this assignment, I created a function to find the longest word in a dictionary text file along with any ties for the longest word in that file.
# Notably, each line of the dictionary ends in '\n' which means the ending characters have to be stripped before adding a word to the output list.
## Created function:
def long_word(file_name):
with open(file_name, 'r') as infile:
words = infile.read().split() #This automatically closes a file after reding it.
max_len = len(max(words, key=len)) #This creates an integer equal to the length of the longest word iterated over
return[i for i in words if len(i) == max_len] #Return statement that returns a list consisting of the longest words found in the file.
long_word("lab5_dictionary.txt") # Calling created function for the included text file.
#-----------------------------------------
# Next, the dictionary will be iterated through to find words containing the most amount of unique letters.
# A list will be output that contains all words tied for most unique letters.
import string
alphabet_string = string.ascii_lowercase #All letters of alphabet as a string
print(alphabet_string)
# Function to open the file
def open_dict(filename):
with open(filename, 'r') as f:
words = f.readlines()
#Open file and close when done
return words
#Function to determine the amount of unique characters in a word
def uniqueness(word):
uniques = set() #Using a set here allows the formation of a series of string swith no repeats. In this case, it is more efficient than a slower loop block.
for character in word:
uniques.add(character)
return word, len(uniques)
#Funciton to determine the longest word(s) in the dictionary file
def unique_words(filename):
dictionary = open_dict(filename)
#Open the file as a set or list
most_unique, unique_count = uniqueness(dictionary[0])
#Reference dictionary words as the unique letter length and unique words
print("Most unique words list:\n")
for word in dictionary[1:]:
curr_word, curr_count = uniqueness(word)
#For the current word and length of uniqueness, set it equal to the uniqueness function
if curr_count >= unique_count:
most_unique, unique_count = curr_word, curr_count
#Add the current word to the unique words list
if curr_count == 17:
#By running this program once without this section, it was found that 17 was the maximum number
#therefore, if the word has 17 unique characters, it is added to the result.
print('This unique word is', most_unique, 'with', unique_count, 'unique characters\n')
unique_words('lab5_dictionary.txt')
#-----------------------------------------
# For the final exercise, I determined if an anagram of a user input exists within the dictionary file.
#
# For example, the following words do have anagrams in the included dictionary file:
#
# 1) restful
#
# 2) bluster
#
# 3) binary
#
# Return all anagrams found.
# Provided function
def is_anagram(string1,string2):
return sorted(string1) == sorted(string2) # Determines if a sorted word is equal to another sorted word. Basically, is it an anagram?
def anagram_finder():
with open("lab5_dictionary.txt", 'r') as infile2:
dictionary_file_2 = infile2.readlines()
user_input = input("Please enter the word you would like to find an anagram for:") # User input
print("anagrams below:\n\n")
ana_list = [] # Create list for results
for s in dictionary_file_2:
if is_anagram(s,user_input+"\n") == True: # Check if a word is an anagram
print(s) # Display found string
ana_list.append(s.rstrip()) # Strip the newline characters before appending to the list
return ana_list # Return stripped list
print(anagram_finder()) # Run the function
|
import collections
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.keras import activations
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.layers.recurrent import DropoutRNNCellMixin
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import tf_logging as logging
# ************************************************************
# Note : We modified the original tensorflow source code, so
# that the GRU_CELL uses a CNN. This was done to avoid the
# out of memory leak it tensorflow's TimeDistributed layer.
#
# Original source code available at :
# https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/keras/layers/recurrent_v2.py
#
# ************************************************************
class CNN_GRU_Cell(DropoutRNNCellMixin, Layer):
"""Cell class for the GRU layer.
Arguments:
units: Positive integer, dimensionality of the output space.
activation: Activation function to use.
Default: hyperbolic tangent (`tanh`).
If you pass None, no activation is applied
(ie. "linear" activation: `a(x) = x`).
recurrent_activation: Activation function to use
for the recurrent step.
Default: hard sigmoid (`hard_sigmoid`).
If you pass `None`, no activation is applied
(ie. "linear" activation: `a(x) = x`).
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the `kernel` weights matrix,
used for the linear transformation of the inputs.
recurrent_initializer: Initializer for the `recurrent_kernel`
weights matrix,
used for the linear transformation of the recurrent state.
bias_initializer: Initializer for the bias vector.
kernel_regularizer: Regularizer function applied to
the `kernel` weights matrix.
recurrent_regularizer: Regularizer function applied to
the `recurrent_kernel` weights matrix.
bias_regularizer: Regularizer function applied to the bias vector.
kernel_constraint: Constraint function applied to
the `kernel` weights matrix.
recurrent_constraint: Constraint function applied to
the `recurrent_kernel` weights matrix.
bias_constraint: Constraint function applied to the bias vector.
dropout: Float between 0 and 1.
Fraction of the units to drop for the linear transformation of the inputs.
recurrent_dropout: Float between 0 and 1.
Fraction of the units to drop for
the linear transformation of the recurrent state.
implementation: Implementation mode, either 1 or 2.
Mode 1 will structure its operations as a larger number of
smaller dot products and additions, whereas mode 2 will
batch them into fewer, larger operations. These modes will
have different performance profiles on different hardware and
for different applications.
reset_after: GRU convention (whether to apply reset gate after or
before matrix multiplication). False = "before" (default),
True = "after" (CuDNN compatible).
Call arguments:
inputs: A 2D tensor.
states: List of state tensors corresponding to the previous timestep.
training: Python boolean indicating whether the layer should behave in
training mode or in inference mode. Only relevant when `dropout` or
`recurrent_dropout` is used.
"""
def __init__(self,
cnn,
units,
ip_dims,
activation='tanh',
recurrent_activation='hard_sigmoid',
use_bias=True,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
dropout=0.,
recurrent_dropout=0.,
implementation=1,
reset_after=False,
**kwargs):
super().__init__(**kwargs)
#changes here
self.cnn = cnn
self.ip_dims = ip_dims
self.units = units
self.activation = activations.get(activation)
self.recurrent_activation = activations.get(recurrent_activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.recurrent_initializer = initializers.get(recurrent_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.recurrent_regularizer = regularizers.get(recurrent_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.recurrent_constraint = constraints.get(recurrent_constraint)
self.bias_constraint = constraints.get(bias_constraint)
self.dropout = min(1., max(0., dropout))
self.recurrent_dropout = min(1., max(0., recurrent_dropout))
if self.recurrent_dropout != 0 and implementation != 1:
logging.debug(RECURRENT_DROPOUT_WARNING_MSG)
self.implementation = 1
else:
self.implementation = implementation
self.reset_after = reset_after
self.state_size = self.units
self.output_size = self.units
@tf_utils.shape_type_conversion
def build(self, input_shape):
#changes here
input_dim = self.ip_dims
self.kernel = self.add_weight(
shape=(input_dim, self.units * 3),
name='kernel',
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units * 3),
name='recurrent_kernel',
initializer=self.recurrent_initializer,
regularizer=self.recurrent_regularizer,
constraint=self.recurrent_constraint)
if self.use_bias:
if not self.reset_after:
bias_shape = (3 * self.units,)
else:
# separate biases for input and recurrent kernels
# Note: the shape is intentionally different from CuDNNGRU biases
# `(2 * 3 * self.units,)`, so that we can distinguish the classes
# when loading and converting saved weights.
bias_shape = (2, 3 * self.units)
self.bias = self.add_weight(shape=bias_shape,
name='bias',
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint)
#caching_device=default_caching_device)
else:
self.bias = None
self.built = True
def call(self, inputs, states,training=None):
h_tm1 = states[0] # previous memory
# changes here
inputs = self.cnn(inputs)
dp_mask = self.get_dropout_mask_for_cell(inputs, training, count=3)
rec_dp_mask = self.get_recurrent_dropout_mask_for_cell(
h_tm1, training, count=3)
if self.use_bias:
if not self.reset_after:
input_bias, recurrent_bias = self.bias, None
else:
input_bias, recurrent_bias = array_ops.unstack(self.bias)
if self.implementation == 1:
if 0. < self.dropout < 1.:
inputs_z = inputs * dp_mask[0]
inputs_r = inputs * dp_mask[1]
inputs_h = inputs * dp_mask[2]
else:
inputs_z = inputs
inputs_r = inputs
inputs_h = inputs
x_z = K.dot(inputs_z, self.kernel[:, :self.units])
x_r = K.dot(inputs_r, self.kernel[:, self.units:self.units * 2])
x_h = K.dot(inputs_h, self.kernel[:, self.units * 2:])
if self.use_bias:
x_z = K.bias_add(x_z, input_bias[:self.units])
x_r = K.bias_add(x_r, input_bias[self.units: self.units * 2])
x_h = K.bias_add(x_h, input_bias[self.units * 2:])
if 0. < self.recurrent_dropout < 1.:
h_tm1_z = h_tm1 * rec_dp_mask[0]
h_tm1_r = h_tm1 * rec_dp_mask[1]
h_tm1_h = h_tm1 * rec_dp_mask[2]
else:
h_tm1_z = h_tm1
h_tm1_r = h_tm1
h_tm1_h = h_tm1
recurrent_z = K.dot(h_tm1_z, self.recurrent_kernel[:, :self.units])
recurrent_r = K.dot(h_tm1_r,
self.recurrent_kernel[:, self.units:self.units * 2])
if self.reset_after and self.use_bias:
recurrent_z = K.bias_add(recurrent_z, recurrent_bias[:self.units])
recurrent_r = K.bias_add(recurrent_r,
recurrent_bias[self.units:self.units * 2])
z = self.recurrent_activation(x_z + recurrent_z)
r = self.recurrent_activation(x_r + recurrent_r)
# reset gate applied after/before matrix multiplication
if self.reset_after:
recurrent_h = K.dot(h_tm1_h, self.recurrent_kernel[:, self.units * 2:])
if self.use_bias:
recurrent_h = K.bias_add(recurrent_h, recurrent_bias[self.units * 2:])
recurrent_h = r * recurrent_h
else:
recurrent_h = K.dot(r * h_tm1_h,
self.recurrent_kernel[:, self.units * 2:])
hh = self.activation(x_h + recurrent_h)
else:
if 0. < self.dropout < 1.:
inputs = inputs * dp_mask[0]
# inputs projected by all gate matrices at once
matrix_x = K.dot(inputs, self.kernel)
if self.use_bias:
# biases: bias_z_i, bias_r_i, bias_h_i
matrix_x = K.bias_add(matrix_x, input_bias)
x_z, x_r, x_h = array_ops.split(matrix_x, 3, axis=-1)
if self.reset_after:
# hidden state projected by all gate matrices at once
matrix_inner = K.dot(h_tm1, self.recurrent_kernel)
if self.use_bias:
matrix_inner = K.bias_add(matrix_inner, recurrent_bias)
else:
# hidden state projected separately for update/reset and new
matrix_inner = K.dot(h_tm1, self.recurrent_kernel[:, :2 * self.units])
recurrent_z, recurrent_r, recurrent_h = array_ops.split(
matrix_inner, [self.units, self.units, -1], axis=-1)
z = self.recurrent_activation(x_z + recurrent_z)
r = self.recurrent_activation(x_r + recurrent_r)
if self.reset_after:
recurrent_h = r * recurrent_h
else:
recurrent_h = K.dot(r * h_tm1,
self.recurrent_kernel[:, 2 * self.units:])
hh = self.activation(x_h + recurrent_h)
# previous and candidate state mixed by update gate
h = z * h_tm1 + (1 - z) * hh
return h, [h]
def get_config(self):
config = {
'units': self.units,
'activation': activations.serialize(self.activation),
'recurrent_activation':
activations.serialize(self.recurrent_activation),
'use_bias': self.use_bias,
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'recurrent_initializer':
initializers.serialize(self.recurrent_initializer),
'bias_initializer': initializers.serialize(self.bias_initializer),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'recurrent_regularizer':
regularizers.serialize(self.recurrent_regularizer),
'bias_regularizer': regularizers.serialize(self.bias_regularizer),
'kernel_constraint': constraints.serialize(self.kernel_constraint),
'recurrent_constraint':
constraints.serialize(self.recurrent_constraint),
'bias_constraint': constraints.serialize(self.bias_constraint),
'dropout': self.dropout,
'recurrent_dropout': self.recurrent_dropout,
'implementation': self.implementation,
'reset_after': self.reset_after
}
base_config = super(GRUCell, self).get_config()
return dict(list(base_config.items()) + list(config.items())) |
import os
import random
from django.db import models
from contests.models import Submission, User, Contest
VERDICT = (
('RTE', 'Run Time Error'),
('MLE', 'Memory Limit Exceeded'),
('TLE', 'Time Limit Exceeded'),
('WA', 'Wrong Answer'),
('CE', 'Compilation Error'),
('IE', 'Internal Error'),
('AC', 'Accepted'),
('PENDING', 'Pending'),
)
def spec_upload_path(instance, filename):
alphanum = '1234567890qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM'
random_str = ''.join(random.choice(alphanum) for _ in range(64))
return os.path.join("{}-{}-{}".format('gradingspec', instance.id, random_str), filename)
class GradingGroup(models.Model):
submission = models.ForeignKey(
Submission, on_delete=models.CASCADE, related_name='grading_groups')
issued_time = models.DateTimeField(auto_now_add=True)
verdict = models.CharField(
max_length=32, choices=VERDICT, default='PENDING')
finish_time = models.DateTimeField(blank=True, null=True)
# contain tcgen solution checker submission
spec = models.FileField(upload_to=spec_upload_path)
grading_size = models.IntegerField()
def __str__(self):
return "{} - Grading Group #{}".format(self.submission, self.id)
class Grading(models.Model):
# filled when inserted
grading_group = models.ForeignKey(
GradingGroup, on_delete=models.CASCADE, related_name='gradings')
# for optimization
contest = models.ForeignKey(
Contest, on_delete=models.CASCADE, related_name='gradings')
verdict = models.CharField(
max_length=32, choices=VERDICT, default='PENDING')
grader_group = models.IntegerField()
# filled when claimed
claimed_at = models.DateTimeField(blank=True, null=True)
claimed_by = models.ForeignKey(
User, null=True, blank=True, on_delete=models.SET_NULL)
# filled when finished
finish_at = models.DateTimeField(blank=True, null=True)
output = models.FileField(null=True, blank=True)
def __str__(self):
return "{} - Grading #{}".format(self.grading_group, self.id)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.