ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | b40d1d28cec687536a099d0425b6c0303b145975 | n = float(input('Digite um numero: '))
n1 = n * 2
n2 = n * 3
n3 = n ** (1/2)
print('O numero é {}, o dobro é {}, o triplo é {} e a raiz quadrada é {:.2f}'.format(n, n1, n2, n3)) |
py | b40d1e218d01e8e31934f016ad6ed4afb79db00e | class Solution:
def frequencySort(self, s: str) -> str:
# iterate to find count of each char in str
# append count of each descending to answer str
answer = ""
freqDict = {}
for char in s:
if char not in freqDict:
freqDict[char] = 1
else:
freqDict[char] += 1
for item in sorted(freqDict.items(), key=lambda item: item[1], reverse=True):
answer += (item[0] * item[1])
return answer
|
py | b40d1fdf18da25bdeef17cea84e0784503b6d34d | # module working with mongo
from pymongo import MongoClient
from config import DATABASE_NAME, DATABASE_COLLECTION_NAME
client = MongoClient('localhost', 27017)
db = client[DATABASE_NAME]
music = db[DATABASE_COLLECTION_NAME]
def select_all():
""" Получаем все строки """
return music.find({})
def select_single(rownum):
""" Получаем одну строку с номером rownum """
return music.find_one({"id": rownum})
def count_rows():
""" Считаем количество строк """
rows = music.count()
print('Mongo rows: ' + str(rows))
return rows
def close():
""" Закрываем текущее соединение с БД """
client.close() |
py | b40d200493e64dc5f3c791884007d22fb2dd2eee | # Variável
idade = int(input('Digite o ano de nascimento: '))
# Variáveis
classificacao = ['Mirim', 'Infantil', 'Junior', 'Sênior', 'Master']
idade = 2020 - idade
# IFS
if idade < 9:
print(f"A sua categoria: {classificacao[0]}")
elif 9 < idade <= 14:
print(f"A sua categoria: {classificacao[1]}")
elif 15 <= idade < 19:
print(f"A sua categoria: {classificacao[2]}")
elif 19 <= idade < 20:
print(f"A sua categoria: {classificacao[3]}")
else:
print(f"A sua categoria: {classificacao[4]}")
|
py | b40d20a3e8a48b45659c949474965e84d3a3aad4 | from setuptools import setup, find_packages
setup(
name="breakzip",
version="0.1",
description="",
author="Michael Terrell",
author_email="[email protected]",
license="MIT",
packages=find_packages(),
tests_require=["pytest"],
extras_require={"testing": ["pytest-mock"]},
entry_points={
"console_scripts": ["breakzip=breakzip.command_line:main"],
},
zip_safe=False,
)
|
py | b40d20bbab0dd32043f9f4f6e9dd78833db79204 | import enum
import sys
from advent import dates
from advent import overview
from advent import show
from advent import test
from advent.door import Door
USAGE = """
Usage:
Run `python advent.py` to get an overview of all days
Run `python advent.py show 2` to show the task for day 2.
Change the number for other days ;)
Run `python advent.py test 2` to test your solution for day 2.
Run `python advent.py reset 2` to reset your solution for day 2.
You probably don't need this :D
"""
INVALID_DAY = -1
class Command(enum.Enum):
Overview = 'overview'
Show = 'show'
Test = 'test'
Reset = 'reset'
class Overview(object):
def __init__(self):
pass
class Show(object):
def __init__(self, day):
pass
class Test(object):
def __init__(self, day):
pass
class InvalidArgs(Exception):
pass
def parse_args(args):
nargs = len(args)
if nargs <= 1:
return INVALID_DAY, Command.Overview
if nargs == 2:
try:
day = int(args[1])
cmd = Command.Show
except ValueError:
day = dates.today()
try:
cmd = Command(args[1].lower())
except:
raise InvalidArgs()
if nargs == 3:
try:
cmd = Command(args[1].lower())
day = int(args[2])
except:
raise InvalidArgs()
return day, cmd
def main():
try:
day, cmd = parse_args(sys.argv)
if cmd == Command.Overview:
overview.show()
return
door = Door.for_day(day)
if cmd == Command.Show:
show.present(door)
elif cmd == Command.Test:
test.test(door)
elif cmd == Command.Reset:
where = door.reset()
print('Tuerchen {} wiederhergestellt!'.format(day))
print('Das Tuerchen findest du wie vorher unter {} :)'.format(where.file_path))
except InvalidArgs:
print(USAGE)
except dates.DayCheckFailed as e:
print('Na na na!')
print('Nicht so neugierig, das ist noch nicht dran!')
print('Nur noch {} mal schlafen bis das dran ist :)'.format(e.days_to_go))
if __name__ == '__main__':
main()
|
py | b40d220ed66a3f0dff364f6805841a564c95c9b2 | """
Time: O(N)
Space: O(F), storing number of factors for N.
"""
class Solution(object):
def kthFactor(self, n, k):
factors = []
for i in xrange(1, n+1):
if n%i==0:
factors.append(i)
if len(factors)==k: return factors[-1]
return -1
"""
Time: O(N^1/2)
Space: O(F), storing number of factors for N.
"""
class Solution(object):
def kthFactor(self, n, k):
factors1 = []
factors2 = []
for i in xrange(1, int(n**0.5)+1):
if n%i==0:
factors1.append(i)
if i!=n/i: factors2.append(n/i)
factors = factors1+factors2[::-1]
return factors[k-1] if k-1<len(factors) else -1 |
py | b40d22e5b08ced3929cb6d3fb96b7e3dbb710bb7 | """Tests for run from CLI, interactive, without an EE.
"""
import pytest
from ..._interactions import Command
from ..._interactions import Step
from ..._interactions import add_indicies
from ..._interactions import step_id
from .base import BaseClass
from .base import base_steps
from .base import inventory_path
from .base import playbook_path
cmdline = f"{playbook_path} -i {inventory_path}"
CLI = Command(subcommand="run", cmdline=cmdline, execution_environment=False).join()
initial_steps = (
Step(
user_input=CLI,
comment="ansible-navigator run playbook",
search_within_response=["COMPLETE", "SUCCESSFUL"],
),
)
steps = add_indicies(initial_steps + base_steps)
@pytest.mark.parametrize("step", steps, ids=step_id)
class Test(BaseClass):
"""Run the tests for run from CLI, interactive, without an EE."""
UPDATE_FIXTURES = False
|
py | b40d22f8edcb0d534b378a7226d123b6201d5df4 | # util/__init__.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
from collections import defaultdict as defaultdict
from functools import partial as partial
from functools import update_wrapper as update_wrapper
from ._collections import coerce_generator_arg as coerce_generator_arg
from ._collections import coerce_to_immutabledict as coerce_to_immutabledict
from ._collections import column_dict as column_dict
from ._collections import column_set as column_set
from ._collections import EMPTY_DICT as EMPTY_DICT
from ._collections import EMPTY_SET as EMPTY_SET
from ._collections import FacadeDict as FacadeDict
from ._collections import flatten_iterator as flatten_iterator
from ._collections import has_dupes as has_dupes
from ._collections import has_intersection as has_intersection
from ._collections import IdentitySet as IdentitySet
from ._collections import ImmutableContainer as ImmutableContainer
from ._collections import immutabledict as immutabledict
from ._collections import ImmutableProperties as ImmutableProperties
from ._collections import LRUCache as LRUCache
from ._collections import merge_lists_w_ordering as merge_lists_w_ordering
from ._collections import ordered_column_set as ordered_column_set
from ._collections import OrderedDict as OrderedDict
from ._collections import OrderedIdentitySet as OrderedIdentitySet
from ._collections import OrderedProperties as OrderedProperties
from ._collections import OrderedSet as OrderedSet
from ._collections import PopulateDict as PopulateDict
from ._collections import Properties as Properties
from ._collections import ScopedRegistry as ScopedRegistry
from ._collections import sort_dictionary as sort_dictionary
from ._collections import ThreadLocalRegistry as ThreadLocalRegistry
from ._collections import to_column_set as to_column_set
from ._collections import to_list as to_list
from ._collections import to_set as to_set
from ._collections import unique_list as unique_list
from ._collections import UniqueAppender as UniqueAppender
from ._collections import update_copy as update_copy
from ._collections import WeakPopulateDict as WeakPopulateDict
from ._collections import WeakSequence as WeakSequence
from ._preloaded import preload_module as preload_module
from ._preloaded import preloaded as preloaded
from .compat import arm as arm
from .compat import b as b
from .compat import b64decode as b64decode
from .compat import b64encode as b64encode
from .compat import cmp as cmp
from .compat import cpython as cpython
from .compat import dataclass_fields as dataclass_fields
from .compat import decode_backslashreplace as decode_backslashreplace
from .compat import dottedgetter as dottedgetter
from .compat import has_refcount_gc as has_refcount_gc
from .compat import inspect_getfullargspec as inspect_getfullargspec
from .compat import local_dataclass_fields as local_dataclass_fields
from .compat import osx as osx
from .compat import py38 as py38
from .compat import py39 as py39
from .compat import pypy as pypy
from .compat import win32 as win32
from .concurrency import await_fallback as await_fallback
from .concurrency import await_only as await_only
from .concurrency import greenlet_spawn as greenlet_spawn
from .concurrency import is_exit_exception as is_exit_exception
from .deprecations import became_legacy_20 as became_legacy_20
from .deprecations import deprecated as deprecated
from .deprecations import deprecated_cls as deprecated_cls
from .deprecations import deprecated_params as deprecated_params
from .deprecations import deprecated_property as deprecated_property
from .deprecations import moved_20 as moved_20
from .deprecations import warn_deprecated as warn_deprecated
from .langhelpers import add_parameter_text as add_parameter_text
from .langhelpers import as_interface as as_interface
from .langhelpers import asbool as asbool
from .langhelpers import asint as asint
from .langhelpers import assert_arg_type as assert_arg_type
from .langhelpers import attrsetter as attrsetter
from .langhelpers import bool_or_str as bool_or_str
from .langhelpers import chop_traceback as chop_traceback
from .langhelpers import class_hierarchy as class_hierarchy
from .langhelpers import classproperty as classproperty
from .langhelpers import clsname_as_plain_name as clsname_as_plain_name
from .langhelpers import coerce_kw_type as coerce_kw_type
from .langhelpers import constructor_copy as constructor_copy
from .langhelpers import constructor_key as constructor_key
from .langhelpers import counter as counter
from .langhelpers import create_proxy_methods as create_proxy_methods
from .langhelpers import decode_slice as decode_slice
from .langhelpers import decorator as decorator
from .langhelpers import dictlike_iteritems as dictlike_iteritems
from .langhelpers import duck_type_collection as duck_type_collection
from .langhelpers import ellipses_string as ellipses_string
from .langhelpers import EnsureKWArg as EnsureKWArg
from .langhelpers import format_argspec_init as format_argspec_init
from .langhelpers import format_argspec_plus as format_argspec_plus
from .langhelpers import generic_fn_descriptor as generic_fn_descriptor
from .langhelpers import generic_repr as generic_repr
from .langhelpers import get_annotations as get_annotations
from .langhelpers import get_callable_argspec as get_callable_argspec
from .langhelpers import get_cls_kwargs as get_cls_kwargs
from .langhelpers import get_func_kwargs as get_func_kwargs
from .langhelpers import getargspec_init as getargspec_init
from .langhelpers import has_compiled_ext as has_compiled_ext
from .langhelpers import HasMemoized as HasMemoized
from .langhelpers import hybridmethod as hybridmethod
from .langhelpers import hybridproperty as hybridproperty
from .langhelpers import inject_docstring_text as inject_docstring_text
from .langhelpers import iterate_attributes as iterate_attributes
from .langhelpers import map_bits as map_bits
from .langhelpers import md5_hex as md5_hex
from .langhelpers import memoized_instancemethod as memoized_instancemethod
from .langhelpers import memoized_property as memoized_property
from .langhelpers import MemoizedSlots as MemoizedSlots
from .langhelpers import method_is_overridden as method_is_overridden
from .langhelpers import methods_equivalent as methods_equivalent
from .langhelpers import (
monkeypatch_proxied_specials as monkeypatch_proxied_specials,
)
from .langhelpers import non_memoized_property as non_memoized_property
from .langhelpers import NoneType as NoneType
from .langhelpers import only_once as only_once
from .langhelpers import (
parse_user_argument_for_enum as parse_user_argument_for_enum,
)
from .langhelpers import PluginLoader as PluginLoader
from .langhelpers import portable_instancemethod as portable_instancemethod
from .langhelpers import quoted_token_parser as quoted_token_parser
from .langhelpers import safe_reraise as safe_reraise
from .langhelpers import set_creation_order as set_creation_order
from .langhelpers import string_or_unprintable as string_or_unprintable
from .langhelpers import symbol as symbol
from .langhelpers import TypingOnly as TypingOnly
from .langhelpers import (
unbound_method_to_callable as unbound_method_to_callable,
)
from .langhelpers import walk_subclasses as walk_subclasses
from .langhelpers import warn as warn
from .langhelpers import warn_exception as warn_exception
from .langhelpers import warn_limited as warn_limited
from .langhelpers import wrap_callable as wrap_callable
|
py | b40d2462ec80a51d30eb6150fc3f20c780decb7b | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
#
# Copyright 2021 The NiPreps Developers <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# We support and encourage derived works from this project, please read
# about our expectations at
#
# https://www.nipreps.org/community/licensing/
#
import re
from pathlib import Path
import simplejson as json
from mriqc import config
from mriqc.utils.misc import BIDS_COMP
from nipype.interfaces.base import (
BaseInterfaceInputSpec,
DynamicTraitedSpec,
File,
SimpleInterface,
Str,
TraitedSpec,
Undefined,
isdefined,
traits,
)
class IQMFileSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
in_file = Str(mandatory=True, desc="path of input file")
subject_id = Str(mandatory=True, desc="the subject id")
modality = Str(mandatory=True, desc="the qc type")
session_id = traits.Either(None, Str, usedefault=True)
task_id = traits.Either(None, Str, usedefault=True)
acq_id = traits.Either(None, Str, usedefault=True)
rec_id = traits.Either(None, Str, usedefault=True)
run_id = traits.Either(None, traits.Int, usedefault=True)
dataset = Str(desc="dataset identifier")
metadata = traits.Dict()
provenance = traits.Dict()
root = traits.Dict(desc="output root dictionary")
out_dir = File(desc="the output directory")
_outputs = traits.Dict(value={}, usedefault=True)
def __setattr__(self, key, value):
if key not in self.copyable_trait_names():
if not isdefined(value):
super(IQMFileSinkInputSpec, self).__setattr__(key, value)
self._outputs[key] = value
else:
if key in self._outputs:
self._outputs[key] = value
super(IQMFileSinkInputSpec, self).__setattr__(key, value)
class IQMFileSinkOutputSpec(TraitedSpec):
out_file = File(desc="the output JSON file containing the IQMs")
class IQMFileSink(SimpleInterface):
input_spec = IQMFileSinkInputSpec
output_spec = IQMFileSinkOutputSpec
expr = re.compile("^root[0-9]+$")
def __init__(self, fields=None, force_run=True, **inputs):
super(IQMFileSink, self).__init__(**inputs)
if fields is None:
fields = []
self._out_dict = {}
# Initialize fields
fields = list(set(fields) - set(self.inputs.copyable_trait_names()))
self._input_names = fields
undefined_traits = {key: self._add_field(key) for key in fields}
self.inputs.trait_set(trait_change_notify=False, **undefined_traits)
if force_run:
self._always_run = True
def _add_field(self, name, value=Undefined):
self.inputs.add_trait(name, traits.Any)
self.inputs._outputs[name] = value
return value
def _gen_outfile(self):
out_dir = Path()
if isdefined(self.inputs.out_dir):
out_dir = Path(self.inputs.out_dir)
# Crawl back to the BIDS root
path = Path(self.inputs.in_file)
for i in range(1, 4):
if str(path.parents[i].name).startswith("sub-"):
bids_root = path.parents[i + 1]
break
in_file = str(path.relative_to(bids_root))
# Build path and ensure directory exists
bids_path = out_dir / in_file.replace("".join(Path(in_file).suffixes), ".json")
bids_path.parent.mkdir(parents=True, exist_ok=True)
self._results["out_file"] = str(bids_path)
return self._results["out_file"]
def _run_interface(self, runtime):
out_file = self._gen_outfile()
if isdefined(self.inputs.root):
self._out_dict = self.inputs.root
root_adds = []
for key, val in list(self.inputs._outputs.items()):
if not isdefined(val) or key == "trait_added":
continue
if not self.expr.match(key) is None:
root_adds.append(key)
continue
key, val = _process_name(key, val)
self._out_dict[key] = val
for root_key in root_adds:
val = self.inputs._outputs.get(root_key, None)
if isinstance(val, dict):
self._out_dict.update(val)
else:
config.loggers.interface.warning(
'Output "%s" is not a dictionary (value="%s"), '
"discarding output.",
root_key,
str(val),
)
# Fill in the "bids_meta" key
id_dict = {}
for comp in list(BIDS_COMP.keys()):
comp_val = getattr(self.inputs, comp, None)
if isdefined(comp_val) and comp_val is not None:
id_dict[comp] = comp_val
id_dict["modality"] = self.inputs.modality
if isdefined(self.inputs.metadata) and self.inputs.metadata:
id_dict.update(self.inputs.metadata)
if self._out_dict.get("bids_meta") is None:
self._out_dict["bids_meta"] = {}
self._out_dict["bids_meta"].update(id_dict)
if isdefined(self.inputs.dataset):
self._out_dict["bids_meta"]["dataset"] = self.inputs.dataset
# Fill in the "provenance" key
# Predict QA from IQMs and add to metadata
prov_dict = {}
if isdefined(self.inputs.provenance) and self.inputs.provenance:
prov_dict.update(self.inputs.provenance)
if self._out_dict.get("provenance") is None:
self._out_dict["provenance"] = {}
self._out_dict["provenance"].update(prov_dict)
with open(out_file, "w") as f:
f.write(
json.dumps(
self._out_dict,
sort_keys=True,
indent=2,
ensure_ascii=False,
)
)
return runtime
def _process_name(name, val):
if "." in name:
newkeys = name.split(".")
name = newkeys.pop(0)
nested_dict = {newkeys.pop(): val}
for nk in reversed(newkeys):
nested_dict = {nk: nested_dict}
val = nested_dict
return name, val
|
py | b40d2557a238d022403c367c8f95d5e14ec950ac | from fixtures import fin, handle_args, try_
def top(arg):
try:
try_(arg)
finally:
fin()
handle_args(top)
|
py | b40d25cee41320617df504b0b400cf8255488de9 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for metadata service."""
import base64
import hashlib
import hmac
import re
try:
import cPickle as pickle
except ImportError:
import pickle
import mock
from oslo_config import cfg
from oslo_serialization import jsonutils
import webob
from nova.api.metadata import base
from nova.api.metadata import handler
from nova.api.metadata import password
from nova import block_device
from nova.compute import flavors
from nova.conductor import api as conductor_api
from nova import context
from nova import db
from nova.db.sqlalchemy import api
from nova import exception
from nova.network import api as network_api
from nova.network import model as network_model
from nova import objects
from nova import test
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_network
from nova.tests.unit.objects import test_security_group
from nova.virt import netutils
CONF = cfg.CONF
USER_DATA_STRING = ("This is an encoded string")
ENCODE_USER_DATA_STRING = base64.b64encode(USER_DATA_STRING)
def fake_inst_obj(context):
inst = objects.Instance(
context=context,
id=1,
uuid='b65cee2f-8c69-4aeb-be2f-f79742548fc2',
project_id='test',
key_name="mykey",
key_data="ssh-rsa AAAAB3Nzai....N3NtHw== someuser@somehost",
host='test',
launch_index=1,
reservation_id='r-xxxxxxxx',
user_data=ENCODE_USER_DATA_STRING,
image_ref=7,
vcpus=1,
fixed_ips=[],
root_device_name='/dev/sda1',
hostname='test.novadomain',
display_name='my_displayname',
metadata={},
default_ephemeral_device=None,
default_swap_device=None,
system_metadata={})
nwinfo = network_model.NetworkInfo([])
inst.info_cache = objects.InstanceInfoCache(context=context,
instance_uuid=inst.uuid,
network_info=nwinfo)
with mock.patch.object(inst, 'save'):
inst.set_flavor(flavors.get_default_flavor())
return inst
def return_non_existing_address(*args, **kwarg):
raise exception.NotFound()
def fake_InstanceMetadata(stubs, inst_data, address=None,
sgroups=None, content=None, extra_md=None,
vd_driver=None, network_info=None):
content = content or []
extra_md = extra_md or {}
if sgroups is None:
sgroups = [dict(test_security_group.fake_secgroup,
name='default')]
def sg_get(*args, **kwargs):
return sgroups
stubs.Set(api, 'security_group_get_by_instance', sg_get)
return base.InstanceMetadata(inst_data, address=address,
content=content, extra_md=extra_md,
vd_driver=vd_driver, network_info=network_info)
def fake_request(stubs, mdinst, relpath, address="127.0.0.1",
fake_get_metadata=None, headers=None,
fake_get_metadata_by_instance_id=None):
def get_metadata_by_remote_address(address):
return mdinst
app = handler.MetadataRequestHandler()
if fake_get_metadata is None:
fake_get_metadata = get_metadata_by_remote_address
if stubs:
stubs.Set(app, 'get_metadata_by_remote_address', fake_get_metadata)
if fake_get_metadata_by_instance_id:
stubs.Set(app, 'get_metadata_by_instance_id',
fake_get_metadata_by_instance_id)
request = webob.Request.blank(relpath)
request.remote_addr = address
if headers is not None:
request.headers.update(headers)
response = request.get_response(app)
return response
class MetadataTestCase(test.TestCase):
def setUp(self):
super(MetadataTestCase, self).setUp()
self.context = context.RequestContext('fake', 'fake')
self.instance = fake_inst_obj(self.context)
self.flags(use_local=True, group='conductor')
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
def test_can_pickle_metadata(self):
# Make sure that InstanceMetadata is possible to pickle. This is
# required for memcache backend to work correctly.
md = fake_InstanceMetadata(self.stubs, self.instance.obj_clone())
pickle.dumps(md, protocol=0)
def test_user_data(self):
inst = self.instance.obj_clone()
inst['user_data'] = base64.b64encode("happy")
md = fake_InstanceMetadata(self.stubs, inst)
self.assertEqual(
md.get_ec2_metadata(version='2009-04-04')['user-data'], "happy")
def test_no_user_data(self):
inst = self.instance.obj_clone()
inst.user_data = None
md = fake_InstanceMetadata(self.stubs, inst)
obj = object()
self.assertEqual(
md.get_ec2_metadata(version='2009-04-04').get('user-data', obj),
obj)
def test_security_groups(self):
inst = self.instance.obj_clone()
sgroups = [dict(test_security_group.fake_secgroup, name='default'),
dict(test_security_group.fake_secgroup, name='other')]
expected = ['default', 'other']
md = fake_InstanceMetadata(self.stubs, inst, sgroups=sgroups)
data = md.get_ec2_metadata(version='2009-04-04')
self.assertEqual(data['meta-data']['security-groups'], expected)
def test_local_hostname_fqdn(self):
md = fake_InstanceMetadata(self.stubs, self.instance.obj_clone())
data = md.get_ec2_metadata(version='2009-04-04')
self.assertEqual(data['meta-data']['local-hostname'],
"%s.%s" % (self.instance['hostname'], CONF.dhcp_domain))
def test_format_instance_mapping(self):
# Make sure that _format_instance_mappings works.
ctxt = None
instance_ref0 = objects.Instance(**{'id': 0,
'uuid': 'e5fe5518-0288-4fa3-b0c4-c79764101b85',
'root_device_name': None,
'default_ephemeral_device': None,
'default_swap_device': None})
instance_ref1 = objects.Instance(**{'id': 0,
'uuid': 'b65cee2f-8c69-4aeb-be2f-f79742548fc2',
'root_device_name': '/dev/sda1',
'default_ephemeral_device': None,
'default_swap_device': None})
def fake_bdm_get(ctxt, uuid, use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 87654321,
'snapshot_id': None,
'no_device': None,
'source_type': 'volume',
'destination_type': 'volume',
'delete_on_termination': True,
'device_name': '/dev/sdh'}),
fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': 'swap',
'delete_on_termination': None,
'device_name': '/dev/sdc'}),
fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': None,
'delete_on_termination': None,
'device_name': '/dev/sdb'})]
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_bdm_get)
expected = {'ami': 'sda1',
'root': '/dev/sda1',
'ephemeral0': '/dev/sdb',
'swap': '/dev/sdc',
'ebs0': '/dev/sdh'}
conductor_api.LocalAPI()
self.assertEqual(base._format_instance_mapping(ctxt,
instance_ref0), block_device._DEFAULT_MAPPINGS)
self.assertEqual(base._format_instance_mapping(ctxt,
instance_ref1), expected)
def test_pubkey(self):
md = fake_InstanceMetadata(self.stubs, self.instance.obj_clone())
pubkey_ent = md.lookup("/2009-04-04/meta-data/public-keys")
self.assertEqual(base.ec2_md_print(pubkey_ent),
"0=%s" % self.instance['key_name'])
self.assertEqual(base.ec2_md_print(pubkey_ent['0']['openssh-key']),
self.instance['key_data'])
def test_image_type_ramdisk(self):
inst = self.instance.obj_clone()
inst['ramdisk_id'] = 'ari-853667c0'
md = fake_InstanceMetadata(self.stubs, inst)
data = md.lookup("/latest/meta-data/ramdisk-id")
self.assertIsNotNone(data)
self.assertTrue(re.match('ari-[0-9a-f]{8}', data))
def test_image_type_kernel(self):
inst = self.instance.obj_clone()
inst['kernel_id'] = 'aki-c2e26ff2'
md = fake_InstanceMetadata(self.stubs, inst)
data = md.lookup("/2009-04-04/meta-data/kernel-id")
self.assertTrue(re.match('aki-[0-9a-f]{8}', data))
self.assertEqual(
md.lookup("/ec2/2009-04-04/meta-data/kernel-id"), data)
inst.kernel_id = None
md = fake_InstanceMetadata(self.stubs, inst)
self.assertRaises(base.InvalidMetadataPath,
md.lookup, "/2009-04-04/meta-data/kernel-id")
def test_check_version(self):
inst = self.instance.obj_clone()
md = fake_InstanceMetadata(self.stubs, inst)
self.assertTrue(md._check_version('1.0', '2009-04-04'))
self.assertFalse(md._check_version('2009-04-04', '1.0'))
self.assertFalse(md._check_version('2009-04-04', '2008-09-01'))
self.assertTrue(md._check_version('2008-09-01', '2009-04-04'))
self.assertTrue(md._check_version('2009-04-04', '2009-04-04'))
def test_InstanceMetadata_uses_passed_network_info(self):
network_info = []
self.mox.StubOutWithMock(netutils, "get_injected_network_template")
netutils.get_injected_network_template(network_info).AndReturn(False)
self.mox.ReplayAll()
base.InstanceMetadata(fake_inst_obj(self.context),
network_info=network_info)
def test_InstanceMetadata_invoke_metadata_for_config_drive(self):
inst = self.instance.obj_clone()
inst_md = base.InstanceMetadata(inst)
for (path, value) in inst_md.metadata_for_config_drive():
self.assertIsNotNone(path)
def test_InstanceMetadata_queries_network_API_when_needed(self):
network_info_from_api = []
self.mox.StubOutWithMock(netutils, "get_injected_network_template")
netutils.get_injected_network_template(
network_info_from_api).AndReturn(False)
self.mox.ReplayAll()
base.InstanceMetadata(fake_inst_obj(self.context))
def test_local_ipv4(self):
nw_info = fake_network.fake_get_instance_nw_info(self.stubs,
num_networks=2)
expected_local = "192.168.1.100"
md = fake_InstanceMetadata(self.stubs, self.instance,
network_info=nw_info, address="fake")
data = md.get_ec2_metadata(version='2009-04-04')
self.assertEqual(expected_local, data['meta-data']['local-ipv4'])
def test_local_ipv4_from_nw_info(self):
nw_info = fake_network.fake_get_instance_nw_info(self.stubs,
num_networks=2)
expected_local = "192.168.1.100"
md = fake_InstanceMetadata(self.stubs, self.instance,
network_info=nw_info)
data = md.get_ec2_metadata(version='2009-04-04')
self.assertEqual(data['meta-data']['local-ipv4'], expected_local)
def test_local_ipv4_from_address(self):
expected_local = "fake"
md = fake_InstanceMetadata(self.stubs, self.instance,
network_info=[], address="fake")
data = md.get_ec2_metadata(version='2009-04-04')
self.assertEqual(data['meta-data']['local-ipv4'], expected_local)
class OpenStackMetadataTestCase(test.TestCase):
def setUp(self):
super(OpenStackMetadataTestCase, self).setUp()
self.context = context.RequestContext('fake', 'fake')
self.instance = fake_inst_obj(self.context)
self.flags(use_local=True, group='conductor')
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
def test_top_level_listing(self):
# request for /openstack/<version>/ should show metadata.json
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
result = mdinst.lookup("/openstack")
# trailing / should not affect anything
self.assertEqual(result, mdinst.lookup("/openstack/"))
# the 'content' should not show up in directory listing
self.assertNotIn(base.CONTENT_DIR, result)
self.assertIn('2012-08-10', result)
self.assertIn('latest', result)
def test_version_content_listing(self):
# request for /openstack/<version>/ should show metadata.json
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
listing = mdinst.lookup("/openstack/2012-08-10")
self.assertIn("meta_data.json", listing)
def test_returns_apis_supported_in_havana_version(self):
mdinst = fake_InstanceMetadata(self.stubs, self.instance)
havana_supported_apis = mdinst.lookup("/openstack/2013-10-17")
self.assertEqual([base.MD_JSON_NAME, base.UD_NAME, base.PASS_NAME,
base.VD_JSON_NAME], havana_supported_apis)
def test_returns_apis_supported_in_folsom_version(self):
mdinst = fake_InstanceMetadata(self.stubs, self.instance)
folsom_supported_apis = mdinst.lookup("/openstack/2012-08-10")
self.assertEqual([base.MD_JSON_NAME, base.UD_NAME],
folsom_supported_apis)
def test_returns_apis_supported_in_grizzly_version(self):
mdinst = fake_InstanceMetadata(self.stubs, self.instance)
grizzly_supported_apis = mdinst.lookup("/openstack/2013-04-04")
self.assertEqual([base.MD_JSON_NAME, base.UD_NAME, base.PASS_NAME],
grizzly_supported_apis)
def test_metadata_json(self):
inst = self.instance.obj_clone()
content = [
('/etc/my.conf', "content of my.conf"),
('/root/hello', "content of /root/hello"),
]
mdinst = fake_InstanceMetadata(self.stubs, inst,
content=content)
mdjson = mdinst.lookup("/openstack/2012-08-10/meta_data.json")
mdjson = mdinst.lookup("/openstack/latest/meta_data.json")
mddict = jsonutils.loads(mdjson)
self.assertEqual(mddict['uuid'], self.instance['uuid'])
self.assertIn('files', mddict)
self.assertIn('public_keys', mddict)
self.assertEqual(mddict['public_keys'][self.instance['key_name']],
self.instance['key_data'])
self.assertIn('launch_index', mddict)
self.assertEqual(mddict['launch_index'], self.instance['launch_index'])
# verify that each of the things we put in content
# resulted in an entry in 'files', that their content
# there is as expected, and that /content lists them.
for (path, content) in content:
fent = [f for f in mddict['files'] if f['path'] == path]
self.assertEqual(1, len(fent))
fent = fent[0]
found = mdinst.lookup("/openstack%s" % fent['content_path'])
self.assertEqual(found, content)
def test_extra_md(self):
# make sure extra_md makes it through to metadata
inst = self.instance.obj_clone()
extra = {'foo': 'bar', 'mylist': [1, 2, 3],
'mydict': {"one": 1, "two": 2}}
mdinst = fake_InstanceMetadata(self.stubs, inst, extra_md=extra)
mdjson = mdinst.lookup("/openstack/2012-08-10/meta_data.json")
mddict = jsonutils.loads(mdjson)
for key, val in extra.iteritems():
self.assertEqual(mddict[key], val)
def test_password(self):
# make sure extra_md makes it through to metadata
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
result = mdinst.lookup("/openstack/latest/password")
self.assertEqual(result, password.handle_password)
def test_userdata(self):
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
userdata_found = mdinst.lookup("/openstack/2012-08-10/user_data")
self.assertEqual(USER_DATA_STRING, userdata_found)
# since we had user-data in this instance, it should be in listing
self.assertIn('user_data', mdinst.lookup("/openstack/2012-08-10"))
inst.user_data = None
mdinst = fake_InstanceMetadata(self.stubs, inst)
# since this instance had no user-data it should not be there.
self.assertNotIn('user_data', mdinst.lookup("/openstack/2012-08-10"))
self.assertRaises(base.InvalidMetadataPath,
mdinst.lookup, "/openstack/2012-08-10/user_data")
def test_random_seed(self):
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
# verify that 2013-04-04 has the 'random' field
mdjson = mdinst.lookup("/openstack/2013-04-04/meta_data.json")
mddict = jsonutils.loads(mdjson)
self.assertIn("random_seed", mddict)
self.assertEqual(len(base64.b64decode(mddict["random_seed"])), 512)
# verify that older version do not have it
mdjson = mdinst.lookup("/openstack/2012-08-10/meta_data.json")
self.assertNotIn("random_seed", jsonutils.loads(mdjson))
def test_no_dashes_in_metadata(self):
# top level entries in meta_data should not contain '-' in their name
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
mdjson = jsonutils.loads(
mdinst.lookup("/openstack/latest/meta_data.json"))
self.assertEqual([], [k for k in mdjson.keys() if k.find("-") != -1])
def test_vendor_data_presence(self):
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
# verify that 2013-10-17 has the vendor_data.json file
result = mdinst.lookup("/openstack/2013-10-17")
self.assertIn('vendor_data.json', result)
# verify that older version do not have it
result = mdinst.lookup("/openstack/2013-04-04")
self.assertNotIn('vendor_data.json', result)
def test_vendor_data_response(self):
inst = self.instance.obj_clone()
mydata = {'mykey1': 'value1', 'mykey2': 'value2'}
class myVdriver(base.VendorDataDriver):
def __init__(self, *args, **kwargs):
super(myVdriver, self).__init__(*args, **kwargs)
data = mydata.copy()
uuid = kwargs['instance']['uuid']
data.update({'inst_uuid': uuid})
self.data = data
def get(self):
return self.data
mdinst = fake_InstanceMetadata(self.stubs, inst, vd_driver=myVdriver)
# verify that 2013-10-17 has the vendor_data.json file
vdpath = "/openstack/2013-10-17/vendor_data.json"
vd = jsonutils.loads(mdinst.lookup(vdpath))
# the instance should be passed through, and our class copies the
# uuid through to 'inst_uuid'.
self.assertEqual(vd['inst_uuid'], inst['uuid'])
# check the other expected values
for k, v in mydata.items():
self.assertEqual(vd[k], v)
class MetadataHandlerTestCase(test.TestCase):
"""Test that metadata is returning proper values."""
def setUp(self):
super(MetadataHandlerTestCase, self).setUp()
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
self.context = context.RequestContext('fake', 'fake')
self.instance = fake_inst_obj(self.context)
self.flags(use_local=True, group='conductor')
self.mdinst = fake_InstanceMetadata(self.stubs, self.instance,
address=None, sgroups=None)
def test_callable(self):
def verify(req, meta_data):
self.assertIsInstance(meta_data, CallableMD)
return "foo"
class CallableMD(object):
def lookup(self, path_info):
return verify
response = fake_request(self.stubs, CallableMD(), "/bar")
self.assertEqual(response.status_int, 200)
self.assertEqual(response.body, "foo")
def test_root(self):
expected = "\n".join(base.VERSIONS) + "\nlatest"
response = fake_request(self.stubs, self.mdinst, "/")
self.assertEqual(response.body, expected)
response = fake_request(self.stubs, self.mdinst, "/foo/../")
self.assertEqual(response.body, expected)
def test_root_metadata_proxy_enabled(self):
self.flags(service_metadata_proxy=True,
group='neutron')
expected = "\n".join(base.VERSIONS) + "\nlatest"
response = fake_request(self.stubs, self.mdinst, "/")
self.assertEqual(response.body, expected)
response = fake_request(self.stubs, self.mdinst, "/foo/../")
self.assertEqual(response.body, expected)
def test_version_root(self):
response = fake_request(self.stubs, self.mdinst, "/2009-04-04")
response_ctype = response.headers['Content-Type']
self.assertTrue(response_ctype.startswith("text/plain"))
self.assertEqual(response.body, 'meta-data/\nuser-data')
response = fake_request(self.stubs, self.mdinst, "/9999-99-99")
self.assertEqual(response.status_int, 404)
def test_json_data(self):
response = fake_request(self.stubs, self.mdinst,
"/openstack/latest/meta_data.json")
response_ctype = response.headers['Content-Type']
self.assertTrue(response_ctype.startswith("application/json"))
response = fake_request(self.stubs, self.mdinst,
"/openstack/latest/vendor_data.json")
response_ctype = response.headers['Content-Type']
self.assertTrue(response_ctype.startswith("application/json"))
def test_user_data_non_existing_fixed_address(self):
self.stubs.Set(network_api.API, 'get_fixed_ip_by_address',
return_non_existing_address)
response = fake_request(None, self.mdinst, "/2009-04-04/user-data",
"127.1.1.1")
self.assertEqual(response.status_int, 404)
def test_fixed_address_none(self):
response = fake_request(None, self.mdinst,
relpath="/2009-04-04/user-data", address=None)
self.assertEqual(response.status_int, 500)
def test_invalid_path_is_404(self):
response = fake_request(self.stubs, self.mdinst,
relpath="/2009-04-04/user-data-invalid")
self.assertEqual(response.status_int, 404)
def test_user_data_with_use_forwarded_header(self):
expected_addr = "192.192.192.2"
def fake_get_metadata(address):
if address == expected_addr:
return self.mdinst
else:
raise Exception("Expected addr of %s, got %s" %
(expected_addr, address))
self.flags(use_forwarded_for=True)
response = fake_request(self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="168.168.168.1",
fake_get_metadata=fake_get_metadata,
headers={'X-Forwarded-For': expected_addr})
self.assertEqual(response.status_int, 200)
response_ctype = response.headers['Content-Type']
self.assertTrue(response_ctype.startswith("text/plain"))
self.assertEqual(response.body,
base64.b64decode(self.instance['user_data']))
response = fake_request(self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="168.168.168.1",
fake_get_metadata=fake_get_metadata,
headers=None)
self.assertEqual(response.status_int, 500)
@mock.patch('nova.utils.constant_time_compare')
def test_by_instance_id_uses_constant_time_compare(self, mock_compare):
mock_compare.side_effect = test.TestingException
req = webob.Request.blank('/')
hnd = handler.MetadataRequestHandler()
req.headers['X-Instance-ID'] = 'fake-inst'
req.headers['X-Instance-ID-Signature'] = 'fake-sig'
req.headers['X-Tenant-ID'] = 'fake-proj'
self.assertRaises(test.TestingException,
hnd._handle_instance_id_request, req)
self.assertEqual(1, mock_compare.call_count)
def test_user_data_with_neutron_instance_id(self):
expected_instance_id = 'a-b-c-d'
def fake_get_metadata(instance_id, remote_address):
if remote_address is None:
raise Exception('Expected X-Forwared-For header')
elif instance_id == expected_instance_id:
return self.mdinst
else:
# raise the exception to aid with 500 response code test
raise Exception("Expected instance_id of %s, got %s" %
(expected_instance_id, instance_id))
signed = hmac.new(
CONF.neutron.metadata_proxy_shared_secret,
expected_instance_id,
hashlib.sha256).hexdigest()
# try a request with service disabled
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
headers={'X-Instance-ID': 'a-b-c-d',
'X-Tenant-ID': 'test',
'X-Instance-ID-Signature': signed})
self.assertEqual(response.status_int, 200)
# now enable the service
self.flags(service_metadata_proxy=True,
group='neutron')
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
fake_get_metadata_by_instance_id=fake_get_metadata,
headers={'X-Forwarded-For': '192.192.192.2',
'X-Instance-ID': 'a-b-c-d',
'X-Tenant-ID': 'test',
'X-Instance-ID-Signature': signed})
self.assertEqual(response.status_int, 200)
response_ctype = response.headers['Content-Type']
self.assertTrue(response_ctype.startswith("text/plain"))
self.assertEqual(response.body,
base64.b64decode(self.instance['user_data']))
# mismatched signature
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
fake_get_metadata_by_instance_id=fake_get_metadata,
headers={'X-Forwarded-For': '192.192.192.2',
'X-Instance-ID': 'a-b-c-d',
'X-Tenant-ID': 'test',
'X-Instance-ID-Signature': ''})
self.assertEqual(response.status_int, 403)
# missing X-Tenant-ID from request
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
fake_get_metadata_by_instance_id=fake_get_metadata,
headers={'X-Forwarded-For': '192.192.192.2',
'X-Instance-ID': 'a-b-c-d',
'X-Instance-ID-Signature': signed})
self.assertEqual(response.status_int, 400)
# mismatched X-Tenant-ID
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
fake_get_metadata_by_instance_id=fake_get_metadata,
headers={'X-Forwarded-For': '192.192.192.2',
'X-Instance-ID': 'a-b-c-d',
'X-Tenant-ID': 'FAKE',
'X-Instance-ID-Signature': signed})
self.assertEqual(response.status_int, 404)
# without X-Forwarded-For
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
fake_get_metadata_by_instance_id=fake_get_metadata,
headers={'X-Instance-ID': 'a-b-c-d',
'X-Tenant-ID': 'test',
'X-Instance-ID-Signature': signed})
self.assertEqual(response.status_int, 500)
# unexpected Instance-ID
signed = hmac.new(
CONF.neutron.metadata_proxy_shared_secret,
'z-z-z-z',
hashlib.sha256).hexdigest()
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
fake_get_metadata_by_instance_id=fake_get_metadata,
headers={'X-Forwarded-For': '192.192.192.2',
'X-Instance-ID': 'z-z-z-z',
'X-Tenant-ID': 'test',
'X-Instance-ID-Signature': signed})
self.assertEqual(response.status_int, 500)
def test_get_metadata(self):
def _test_metadata_path(relpath):
# recursively confirm a http 200 from all meta-data elements
# available at relpath.
response = fake_request(self.stubs, self.mdinst,
relpath=relpath)
for item in response.body.split('\n'):
if 'public-keys' in relpath:
# meta-data/public-keys/0=keyname refers to
# meta-data/public-keys/0
item = item.split('=')[0]
if item.endswith('/'):
path = relpath + '/' + item
_test_metadata_path(path)
continue
path = relpath + '/' + item
response = fake_request(self.stubs, self.mdinst, relpath=path)
self.assertEqual(response.status_int, 200, message=path)
_test_metadata_path('/2009-04-04/meta-data')
class MetadataPasswordTestCase(test.TestCase):
def setUp(self):
super(MetadataPasswordTestCase, self).setUp()
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
self.context = context.RequestContext('fake', 'fake')
self.instance = fake_inst_obj(self.context)
self.flags(use_local=True, group='conductor')
self.mdinst = fake_InstanceMetadata(self.stubs, self.instance,
address=None, sgroups=None)
self.flags(use_local=True, group='conductor')
def test_get_password(self):
request = webob.Request.blank('')
self.mdinst.password = 'foo'
result = password.handle_password(request, self.mdinst)
self.assertEqual(result, 'foo')
def test_bad_method(self):
request = webob.Request.blank('')
request.method = 'PUT'
self.assertRaises(webob.exc.HTTPBadRequest,
password.handle_password, request, self.mdinst)
@mock.patch('nova.objects.Instance.get_by_uuid')
def _try_set_password(self, get_by_uuid, val='bar'):
request = webob.Request.blank('')
request.method = 'POST'
request.body = val
get_by_uuid.return_value = self.instance
with mock.patch.object(self.instance, 'save') as save:
password.handle_password(request, self.mdinst)
save.assert_called_once_with()
self.assertIn('password_0', self.instance.system_metadata)
def test_set_password(self):
self.mdinst.password = ''
self._try_set_password()
def test_conflict(self):
self.mdinst.password = 'foo'
self.assertRaises(webob.exc.HTTPConflict,
self._try_set_password)
def test_too_large(self):
self.mdinst.password = ''
self.assertRaises(webob.exc.HTTPBadRequest,
self._try_set_password,
val=('a' * (password.MAX_SIZE + 1)))
|
py | b40d25f1f534eef486376f21eef367d12e168ece | # coding=utf-8
# Copyright 2019 The Trax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementations of reversible layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import jax
from trax import backend
from trax.layers import base
from trax.layers import combinators as cb
class ReversibleLayer(base.Layer):
"""Reversible Layer."""
def reverse(self, output, weights=(), state=(), new_state=(), **kwargs):
"""Reverse this layer: compute input given output."""
raise NotImplementedError
def reverse_and_grad(self, output, grad, weights=(), state=(), new_state=(),
**kwargs):
"""Backward pass: computes the inverse of a layer and propagates gradients.
While you may choose to only implement reverse, some layers implement this
function directly as computation may be shared between reversing and
computing gradients.
Args:
output: Output activations; can be a (possibly nested) tuple.
grad: gradient signal (cotangent) computed based on subsequent layers.
The structure and shape must match the output.
weights: layer weights
state: start state
new_state: updated state computed by the forward pass
**kwargs: kwargs for the layer
Returns:
A tuple (x, (x_grad, weights_grad)), where x is the reconstructed input,
x_grad is the gradient signal for the input, and weights_grad is the
gradient signal for the weights.
"""
# Note: jax.vjp does not allow us to use **kwargs in the signature here.
def _do_forward(x, weights):
return super(ReversibleLayer, self).forward_with_state(
x, weights=weights, state=state, **kwargs)[0]
reconstructed_x = self.reverse(output, weights, state, new_state, **kwargs)
_, vjpfun = jax.vjp(_do_forward, reconstructed_x, weights)
x_weights_grad = vjpfun(grad)
return reconstructed_x, x_weights_grad
@property
def has_backward(self):
return True
def backward(self, inputs, output, ct, weights, state, new_state, **kwargs):
del inputs
_, inputs_weights_ct = self.reverse_and_grad(output, ct, weights,
state, new_state, **kwargs)
return inputs_weights_ct
class ReversibleSwap(ReversibleLayer, cb.Swap):
"""Swap the first two element on the stack."""
def reverse(self, output, weights=(), state=(), new_state=(), **kwargs):
# Swap is its own inverse, except that reverse doesn't return the state.
return self.forward_with_state(output, weights=weights, state=state,
**kwargs)[0]
class ReversibleSerial(ReversibleLayer, cb.Serial):
"""A reversible version of tl.Serial (requires reversible sub-layers)."""
def __init__(self, *layers):
super(ReversibleSerial, self).__init__(*layers)
# Note that sublayers has already been flattened to remove nested lists.
for i, layer in enumerate(self.sublayers):
if not isinstance(layer, ReversibleLayer):
raise ValueError(
'Sub-layer {} of ReversibleSerial is not reversible: {}'.format(
i, layer))
def reverse(self, output, weights=(), state=(), new_state=(), **kwargs):
rng = kwargs.pop('rng', None)
rngs = (None,) * self._n_layers
if rng is not None:
rngs = backend.random.split(rng, self._n_layers)
layer_val = output
for layer, p, s, ns, rng in reversed(list(zip(
self.sublayers, weights, state, new_state, rngs))):
layer_val = layer.reverse(layer_val, p, s, ns, rng=rng, **kwargs)
return layer_val
def reverse_and_grad(self, output, ct, weights=(), state=(), new_state=(),
**kwargs):
rng = kwargs.pop('rng', None)
rngs = (None,) * self._n_layers
if rng is not None:
rngs = backend.random.split(rng, self._n_layers)
layer_val = output
layer_ct = ct
weights_ct = []
for layer, p, s, ns, rng in reversed(list(zip(
self.sublayers, weights, state, new_state, rngs))):
layer_val, layer_ct = layer.reverse_and_grad(
layer_val, layer_ct, p, s, ns, rng=rng, **kwargs)
layer_ct, p_ct = layer_ct
weights_ct.insert(0, p_ct)
return layer_val, (layer_ct, weights_ct)
|
py | b40d260893b4a4fbf0f0e555d970a2092d5ef59b | # Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
from dataclasses import dataclass
from itertools import chain
from typing import cast
from pants.backend.docker.goals.package_image import BuiltDockerImage
from pants.backend.docker.subsystems.docker_options import DockerOptions
from pants.backend.docker.target_types import DockerRegistriesField, DockerSkipPushField
from pants.backend.docker.util_rules.docker_binary import DockerBinary
from pants.core.goals.publish import (
PublishFieldSet,
PublishPackages,
PublishProcesses,
PublishRequest,
)
from pants.engine.process import InteractiveProcess
from pants.engine.rules import collect_rules, rule
logger = logging.getLogger(__name__)
class PublishDockerImageRequest(PublishRequest):
pass
@dataclass(frozen=True)
class PublishDockerImageFieldSet(PublishFieldSet):
publish_request_type = PublishDockerImageRequest
required_fields = (DockerRegistriesField,)
registries: DockerRegistriesField
skip_push: DockerSkipPushField
@rule
async def push_docker_images(
request: PublishDockerImageRequest, docker: DockerBinary, options: DockerOptions
) -> PublishProcesses:
tags = tuple(
chain.from_iterable(
cast(BuiltDockerImage, image).tags
for pkg in request.packages
for image in pkg.artifacts
)
)
if request.field_set.skip_push.value:
return PublishProcesses(
[
PublishPackages(
names=tags,
description=f"(by `{request.field_set.skip_push.alias}` on {request.field_set.address})",
),
]
)
process = docker.push_image(tags)
return PublishProcesses(
[
PublishPackages(
names=tags,
process=InteractiveProcess.from_process(process) if process else None,
),
]
)
def rules():
return (
*collect_rules(),
*PublishDockerImageFieldSet.rules(),
)
|
py | b40d266725fc5bc89314703b97c9f7a082facee6 |
# coding: utf-8
# In[1]:
# -*- coding: utf-8 -*-
#### -*- coding: future_fstrings -*- in py 2.7 for f-strings like in 3.6 (not tested here)
# In[2]:
from PNlib.PolyNum import PolyNum
# place folder `PNlib` near to *.ipynb
# As well PN operations as PN functions have easy computer implementation and are analogous to the floating-point arithmetic.
# In[3]:
a = PolyNum('(~-3.2~,-5.0~2.1~)') #live demo
b = PolyNum('(~-0.1~,1.0~)')
# In[5]:
# print() ___ R ___ PN ___ # Codefolding extension
print()
print('________ R _________',' ______________ PN ___________________')
print()
print(' a = 5.3 2 | ', 'a =', f'{a:>28}')
print(' b = 2.1 | ', 'b =', f'{b:>28}')
print(' * ------------- | ','* ------------------------------')
print(' 5 3 2 | ', f'{a*b[1]:>32}')
print(' 1 0 6 4 | ', f'{a*b[0]:>27}' )
print(' a * b ========= | ','a * b ==========================')
print(' 1 1.1 7 2 | ', f'{a * b:>32}')
# - - - -
# [<<<... 00. index ...](../index.ipynb) _ | _ [... 02. PN Laplace and Z transforms ...>>>](02.PN Laplace and Z transforms.ipynb)
|
py | b40d277942b9465046bc8ce5766ac4f8365f499f | import os
import re
import sys
import shlex
import warnings
import importlib
from datetime import timedelta
from functools import partial
from types import ModuleType
from typing import Any, Set, Dict, TypeVar, Union, Optional, Iterable, Callable, Type, overload
from .log import logger
from nonebot import permission as perm
from .command import Command, CommandManager, CommandSession
from .notice_request import _bus, EventHandler
from .natural_language import NLProcessor, NLPManager
from .typing import CommandName_T, CommandHandler_T, NLPHandler_T, NoticeHandler_T, Patterns_T, PermChecker_T, RequestHandler_T
class Plugin:
__slots__ = ('module', 'name', 'usage', 'commands', 'nl_processors',
'event_handlers')
def __init__(self,
module: ModuleType,
name: Optional[str] = None,
usage: Optional[Any] = None,
commands: Set[Command] = ...,
nl_processors: Set[NLProcessor] = ...,
event_handlers: Set[EventHandler] = ...):
"""Creates a plugin with no name, no usage, and no handlers."""
self.module = module
self.name = name
self.usage = usage
self.commands: Set[Command] = \
commands if commands is not ... else set()
self.nl_processors: Set[NLProcessor] = \
nl_processors if nl_processors is not ... else set()
self.event_handlers: Set[EventHandler] = \
event_handlers if event_handlers is not ... else set()
class GlobalTemp:
"""INTERNAL API"""
commands: Set[Command] = set()
nl_processors: Set[NLProcessor] = set()
event_handlers: Set[EventHandler] = set()
@classmethod
def clear(cls):
cls.commands.clear()
cls.nl_processors.clear()
cls.event_handlers.clear()
@classmethod
def make_plugin(cls, module: ModuleType):
return Plugin(module=module,
name=getattr(module, '__plugin_name__', None),
usage=getattr(module, '__plugin_usage__', None),
commands={*cls.commands},
nl_processors={*cls.nl_processors},
event_handlers={*cls.event_handlers})
class PluginManager:
_plugins: Dict[str, Plugin] = {}
def __init__(self):
self.cmd_manager = CommandManager()
self.nlp_manager = NLPManager()
@classmethod
def add_plugin(cls, module_path: str, plugin: Plugin) -> None:
"""Register a plugin
Args:
module_path (str): module path
plugin (Plugin): Plugin object
"""
if module_path in cls._plugins:
warnings.warn(f"Plugin {module_path} already exists")
return
cls._plugins[module_path] = plugin
@classmethod
def get_plugin(cls, module_path: str) -> Optional[Plugin]:
"""Get plugin object by plugin module path
Args:
module_path (str): Plugin module path
Returns:
Optional[Plugin]: Plugin object
"""
return cls._plugins.get(module_path, None)
@classmethod
def remove_plugin(cls, module_path: str) -> bool:
"""Remove a plugin by plugin module path
** Warning: This function not remove plugin actually! **
** Just remove command, nlprocessor and event handlers **
Args:
module_path (str): Plugin module path
Returns:
bool: Success or not
"""
plugin = cls.get_plugin(module_path)
if not plugin:
warnings.warn(f"Plugin {module_path} not exists")
return False
for command in plugin.commands:
CommandManager.remove_command(command.name)
for nl_processor in plugin.nl_processors:
NLPManager.remove_nl_processor(nl_processor)
for event_handler in plugin.event_handlers:
for event in event_handler.events:
_bus.unsubscribe(event, event_handler.func)
del cls._plugins[module_path]
return True
@classmethod
def switch_plugin_global(cls,
module_path: str,
state: Optional[bool] = None) -> None:
"""Change plugin state globally or simply switch it if `state` is None
Args:
module_path (str): Plugin module path
state (Optional[bool]): State to change to. Defaults to None.
"""
plugin = cls.get_plugin(module_path)
if not plugin:
warnings.warn(f"Plugin {module_path} not found")
return
for command in plugin.commands:
CommandManager.switch_command_global(command.name, state)
for nl_processor in plugin.nl_processors:
NLPManager.switch_nlprocessor_global(nl_processor, state)
for event_handler in plugin.event_handlers:
for event in event_handler.events:
if event_handler.func in _bus._subscribers[event] and not state:
_bus.unsubscribe(event, event_handler.func)
elif event_handler.func not in _bus._subscribers[
event] and state is not False:
_bus.subscribe(event, event_handler.func)
@classmethod
def switch_command_global(cls,
module_path: str,
state: Optional[bool] = None) -> None:
"""Change plugin command state globally or simply switch it if `state` is None
Args:
module_path (str): Plugin module path
state (Optional[bool]): State to change to. Defaults to None.
"""
plugin = cls.get_plugin(module_path)
if not plugin:
warnings.warn(f"Plugin {module_path} not found")
return
for command in plugin.commands:
CommandManager.switch_command_global(command.name, state)
@classmethod
def switch_nlprocessor_global(cls,
module_path: str,
state: Optional[bool] = None) -> None:
"""Change plugin nlprocessor state globally or simply switch it if `state` is None
Args:
module_path (str): Plugin module path
state (Optional[bool]): State to change to. Defaults to None.
"""
plugin = cls.get_plugin(module_path)
if not plugin:
warnings.warn(f"Plugin {module_path} not found")
return
for processor in plugin.nl_processors:
NLPManager.switch_nlprocessor_global(processor, state)
@classmethod
def switch_eventhandler_global(cls,
module_path: str,
state: Optional[bool] = None) -> None:
"""Change plugin event handler state globally or simply switch it if `state` is None
Args:
module_path (str): Plugin module path
state (Optional[bool]): State to change to. Defaults to None.
"""
plugin = cls.get_plugin(module_path)
if not plugin:
warnings.warn(f"Plugin {module_path} not found")
return
for event_handler in plugin.event_handlers:
for event in event_handler.events:
if event_handler.func in _bus._subscribers[event] and not state:
_bus.unsubscribe(event, event_handler.func)
elif event_handler.func not in _bus._subscribers[
event] and state is not False:
_bus.subscribe(event, event_handler.func)
def switch_plugin(self,
module_path: str,
state: Optional[bool] = None) -> None:
"""Change plugin state or simply switch it if `state` is None
Tips:
This method will only change the state of the plugin's
commands and natural language processors since change
state of the event handler for message is meaningless.
Args:
module_path (str): Plugin module path
state (Optional[bool]): State to change to. Defaults to None.
"""
plugin = self.get_plugin(module_path)
if not plugin:
warnings.warn(f"Plugin {module_path} not found")
return
for command in plugin.commands:
self.cmd_manager.switch_command(command.name, state)
for nl_processor in plugin.nl_processors:
self.nlp_manager.switch_nlprocessor(nl_processor, state)
def switch_command(self,
module_path: str,
state: Optional[bool] = None) -> None:
"""Change plugin command state or simply switch it if `state` is None
Args:
module_path (str): Plugin module path
state (Optional[bool]): State to change to. Defaults to None.
"""
plugin = self.get_plugin(module_path)
if not plugin:
warnings.warn(f"Plugin {module_path} not found")
return
for command in plugin.commands:
self.cmd_manager.switch_command(command.name, state)
def switch_nlprocessor(self,
module_path: str,
state: Optional[bool] = None) -> None:
"""Change plugin nlprocessor state or simply switch it if `state` is None
Args:
module_path (str): Plugin module path
state (Optional[bool]): State to change to. Defaults to None.
"""
plugin = self.get_plugin(module_path)
if not plugin:
warnings.warn(f"Plugin {module_path} not found")
return
for processor in plugin.nl_processors:
self.nlp_manager.switch_nlprocessor(processor, state)
def load_plugin(module_path: str) -> Optional[Plugin]:
"""Load a module as a plugin
Args:
module_path (str): path of module to import
Returns:
Optional[Plugin]: Plugin object loaded
"""
Plugin.GlobalTemp.clear()
try:
module = importlib.import_module(module_path)
plugin = Plugin.GlobalTemp.make_plugin(module)
PluginManager.add_plugin(module_path, plugin)
logger.info(f'Succeeded to import "{module_path}"')
return plugin
except Exception as e:
logger.error(f'Failed to import "{module_path}", error: {e}')
logger.exception(e)
return None
def reload_plugin(module_path: str) -> Optional[Plugin]:
result = PluginManager.remove_plugin(module_path)
if not result:
return None
for module in list(
filter(lambda x: x.startswith(module_path), sys.modules.keys())):
del sys.modules[module]
Plugin.GlobalTemp.clear()
try:
module = importlib.import_module(module_path)
plugin = Plugin.GlobalTemp.make_plugin(module)
PluginManager.add_plugin(module_path, plugin)
logger.info(f'Succeeded to reload "{module_path}"')
return plugin
except Exception as e:
logger.error(f'Failed to reload "{module_path}", error: {e}')
logger.exception(e)
return None
def load_plugins(plugin_dir: str, module_prefix: str) -> Set[Plugin]:
"""Find all non-hidden modules or packages in a given directory,
and import them with the given module prefix.
Args:
plugin_dir (str): Plugin directory to search
module_prefix (str): Module prefix used while importing
Returns:
Set[Plugin]: Set of plugin objects successfully loaded
"""
count = set()
for name in os.listdir(plugin_dir):
path = os.path.join(plugin_dir, name)
if os.path.isfile(path) and \
(name.startswith('_') or not name.endswith('.py')):
continue
if os.path.isdir(path) and \
(name.startswith('_') or not os.path.exists(
os.path.join(path, '__init__.py'))):
continue
m = re.match(r'([_A-Z0-9a-z]+)(.py)?', name)
if not m:
continue
result = load_plugin(f'{module_prefix}.{m.group(1)}')
if result:
count.add(result)
return count
def load_builtin_plugins() -> Set[Plugin]:
"""
Load built-in plugins distributed along with "nonebot" package.
"""
plugin_dir = os.path.join(os.path.dirname(__file__), 'plugins')
return load_plugins(plugin_dir, 'nonebot.plugins')
def get_loaded_plugins() -> Set[Plugin]:
"""
Get all plugins loaded.
:return: a set of Plugin objects
"""
return set(PluginManager._plugins.values())
def on_command_custom(
name: Union[str, CommandName_T],
*,
aliases: Union[Iterable[str], str],
patterns: Patterns_T,
only_to_me: bool,
privileged: bool,
shell_like: bool,
perm_checker: PermChecker_T,
expire_timeout: Optional[timedelta],
run_timeout: Optional[timedelta],
session_class: Optional[Type[CommandSession]]
) -> Callable[[CommandHandler_T], CommandHandler_T]:
"""
INTERNAL API
The implementation of on_command function with custom per checker function.
dev: This function may not last long. Kill it when this function is referenced
only once
"""
def deco(func: CommandHandler_T) -> CommandHandler_T:
if not isinstance(name, (str, tuple)):
raise TypeError('the name of a command must be a str or tuple')
if not name:
raise ValueError('the name of a command must not be empty')
if session_class is not None and not issubclass(session_class,
CommandSession):
raise TypeError(
'session_class must be a subclass of CommandSession')
cmd_name = (name,) if isinstance(name, str) else name
cmd = Command(name=cmd_name,
func=func,
only_to_me=only_to_me,
privileged=privileged,
perm_checker_func=perm_checker,
expire_timeout=expire_timeout,
run_timeout=run_timeout,
session_class=session_class)
if shell_like:
async def shell_like_args_parser(session):
session.args['argv'] = shlex.split(session.current_arg)
cmd.args_parser_func = shell_like_args_parser
CommandManager.add_command(cmd_name, cmd)
CommandManager.add_aliases(aliases, cmd)
CommandManager.add_patterns(patterns, cmd)
Plugin.GlobalTemp.commands.add(cmd)
func.args_parser = cmd.args_parser
return func
return deco
def on_command(
name: Union[str, CommandName_T],
*,
aliases: Union[Iterable[str], str] = (),
patterns: Patterns_T = (),
permission: int = perm.EVERYBODY,
only_to_me: bool = True,
privileged: bool = False,
shell_like: bool = False,
expire_timeout: Optional[timedelta] = ...,
run_timeout: Optional[timedelta] = ...,
session_class: Optional[Type[CommandSession]] = None
) -> Callable[[CommandHandler_T], CommandHandler_T]:
"""
Decorator to register a function as a command.
:param name: command name (e.g. 'echo' or ('random', 'number'))
:param aliases: aliases of command name, for convenient access
:param patterns: custom regex pattern for the command.
Please use this carefully. Abuse may cause performance problem.
Also, Please notice that if a message is matched by this method,
it will use the full command as session current_arg.
:param permission: permission required by the command
:param only_to_me: only handle messages to me
:param privileged: can be run even when there is already a session
:param shell_like: use shell-like syntax to split arguments
:param expire_timeout: will override SESSION_EXPIRE_TIMEOUT if provided
:param run_timeout: will override SESSION_RUN_TIMEOUT if provided
:param session_class: session class
"""
perm_checker = partial(perm.check_permission, permission_required=permission)
return on_command_custom(name, aliases=aliases, patterns=patterns,
only_to_me=only_to_me, privileged=privileged,
shell_like=shell_like, perm_checker=perm_checker,
expire_timeout=expire_timeout, run_timeout=run_timeout,
session_class=session_class)
def on_natural_language_custom(
keywords: Union[Optional[Iterable[str]], str, NLPHandler_T],
*,
only_to_me: bool,
only_short_message: bool,
allow_empty_message: bool,
perm_checker: PermChecker_T
) -> Union[Callable[[NLPHandler_T], NLPHandler_T], NLPHandler_T]:
"""
INTERNAL API
The implementation of on_natural_language function with custom per checker function.
dev: This function may not last long. Kill it when this function is referenced
only once
"""
def deco(func: NLPHandler_T) -> NLPHandler_T:
nl_processor = NLProcessor(
func=func,
keywords=keywords, # type: ignore
only_to_me=only_to_me,
only_short_message=only_short_message,
allow_empty_message=allow_empty_message,
perm_checker_func=perm_checker)
NLPManager.add_nl_processor(nl_processor)
Plugin.GlobalTemp.nl_processors.add(nl_processor)
return func
if callable(keywords):
# here "keywords" is the function to be decorated
# applies default args provided by this function
return on_natural_language()(keywords)
else:
if isinstance(keywords, str):
keywords = (keywords,)
return deco
@overload
def on_natural_language(func: NLPHandler_T) -> NLPHandler_T:
"""
Decorator to register a function as a natural language processor with
default kwargs.
"""
@overload
def on_natural_language(
keywords: Optional[Union[Iterable[str], str]] = ...,
*,
permission: int = ...,
only_to_me: bool = ...,
only_short_message: bool = ...,
allow_empty_message: bool = ...
) -> Callable[[NLPHandler_T], NLPHandler_T]:
"""
Decorator to register a function as a natural language processor.
:param keywords: keywords to respond to, if None, respond to all messages
:param permission: permission required by the processor
:param only_to_me: only handle messages to me
:param only_short_message: only handle short messages
:param allow_empty_message: handle empty messages
"""
def on_natural_language(
keywords: Union[Optional[Iterable[str]], str, NLPHandler_T] = None,
*,
permission: int = perm.EVERYBODY,
only_to_me: bool = True,
only_short_message: bool = True,
allow_empty_message: bool = False
):
"""
Implementation of on_natural_language overloads.
"""
perm_checker = partial(perm.check_permission, permission_required=permission)
return on_natural_language_custom(keywords, only_to_me=only_to_me,
only_short_message=only_short_message,
allow_empty_message=allow_empty_message,
perm_checker=perm_checker)
_Teh = TypeVar('_Teh', NoticeHandler_T, RequestHandler_T)
def _make_event_deco(post_type: str):
def deco_deco(arg: Optional[Union[str, _Teh]] = None,
*events: str) -> Union[Callable[[_Teh], _Teh], _Teh]:
def deco(func: _Teh) -> _Teh:
if isinstance(arg, str):
events_tmp = list(
map(lambda x: f"{post_type}.{x}", [arg, *events])) # if arg is part of events str
for e in events_tmp:
_bus.subscribe(e, func)
handler = EventHandler(events_tmp, func)
else:
_bus.subscribe(post_type, func)
handler = EventHandler([post_type], func)
Plugin.GlobalTemp.event_handlers.add(handler)
return func
if callable(arg):
return deco(arg)
return deco
return deco_deco
@overload
def on_notice(func: NoticeHandler_T) -> NoticeHandler_T: ...
@overload
def on_notice(*events: str) -> Callable[[NoticeHandler_T], NoticeHandler_T]: ...
on_notice = _make_event_deco('notice') # type: ignore[override]
@overload
def on_request(func: RequestHandler_T) -> RequestHandler_T: ...
@overload
def on_request(*events: str) -> Callable[[RequestHandler_T], RequestHandler_T]: ...
on_request = _make_event_deco('request') # type: ignore[override]
__all__ = [
'Plugin',
'PluginManager',
'load_plugin',
'reload_plugin',
'load_plugins',
'load_builtin_plugins',
'get_loaded_plugins',
'on_command',
'on_natural_language',
'on_notice',
'on_request',
]
|
py | b40d27df3c591458db3090a06687bee38723041e | # -*- coding: utf-8 -*-
from pprint import pprint
import traceback
from flask import current_app as app
from sandboxes.create_sandbox import create_sandbox
@app.manager.option('-n',
'--name',
help='Sandbox name',
default='ci')
@app.manager.option('-d',
'--delete',
help='Delete database first',
default='true')
def sandbox(name, delete):
try:
with_delete = delete == 'true'
create_sandbox(name, with_delete)
except Exception as e:
print('ERROR: ' + str(e))
traceback.print_tb(e.__traceback__)
pprint(vars(e))
|
py | b40d2822abc84e36a557242a3e5063b41ade015f | """
Script file: sensor.py
Created on: Oct 19, 2021
Last modified on: Nov 8, 2021
Comments:
Support for KCS TraceME N1Cx sensor
"""
import logging
from datetime import datetime, timedelta
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from homeassistant.const import(
ATTR_ATTRIBUTION,
CONF_NAME
)
from .const import (
CONF_DEV_EUI,
CONF_GAS,
CONF_TEMPERATURE,
CONF_HUMIDITY,
CONF_PRESSURE,
CONF_AIR_QUALITY,
CONF_BATTERY,
CONF_ALL,
PLATFORM,
ATTRIBUTION,
SENSOR_TYPE,
ICON,
DEFAULT_NAME,
DEFAULT_DEV_EUI,
DEFAULT_GAS,
DEFAULT_DEVICE_TYPE,
DEFAULT_TEMPERATURE,
DEFAULT_HUMIDITY,
DEFAULT_PRESSURE,
DEFAULT_AIR_QUALITY,
DEFAULT_BATTERY,
DEFAULT_ALL,
ATTR_DEVICE_TYPE,
ATTR_TEMPERATURE,
ATTR_HUMIDITY,
ATTR_PRESSURE,
ATTR_AIR_QUALITY,
ATTR_BATTERY
)
from .kcs_n1cx import KCSTraceMeN1CxDataClient
# set scan interval as 10 mins
SCAN_INTERVAL = timedelta(seconds=600)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, entry, async_add_entities):
"""
Set up KCS TraceME N1Cx sensor
:param hass: hass object
:param entry: config entry
:return: none
"""
# in-line function
async def async_update_data():
"""
Fetch data from KCS TraceME N1Cx API
This is the place to pre-process the data to lookup tables so entities can quickly look up their data
:param: none
:return: json data decoded
"""
return await hass.async_add_executor_job(decode_payload, api, entry)
async def async_initialize():
"""
Initialize objects from KCS TraceME N1Cx API
:param: none
:return: data coordinator, device type
"""
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name=PLATFORM,
update_method=async_update_data
)
# fetch initial data so we have data when entities subscribe
sensor_name, device_type = await hass.async_add_executor_job(get_device_info, api, entry)
await coordinator.async_refresh()
return (coordinator, sensor_name, device_type)
# initialize KCS TraceME N1Cx API
dev_eui = None
if entry.data:
dev_eui = entry.data.get(CONF_DEV_EUI, DEFAULT_DEV_EUI)
api = KCSTraceMeN1CxDataClient(dev_eui)
coordinator, sensor_name, device_type = await async_initialize()
# get options
options = None
if entry.data:
options = {
"temperature": entry.data.get(CONF_TEMPERATURE, DEFAULT_TEMPERATURE),
"humidity": entry.data.get(CONF_HUMIDITY, DEFAULT_HUMIDITY),
"pressure": entry.data.get(CONF_PRESSURE, DEFAULT_PRESSURE),
"air_quality": entry.data.get(CONF_AIR_QUALITY, DEFAULT_AIR_QUALITY),
"battery": entry.data.get(CONF_BATTERY, DEFAULT_BATTERY),
"all": entry.data.get(CONF_ALL, DEFAULT_ALL)
}
# add sensor
async_add_entities([KCSTraceMeN1CxSensor(coordinator, sensor_name, device_type, options)], False)
def get_device_info(api, config_entry):
"""
Get sensor information
:param api: KCS TraceME N1Cx client
:param config_entry: config entry
:return: (device name, smarte meter type)
"""
sensor_name = None
# check the input data
if config_entry.data:
sensor_name = config_entry.data.get(CONF_NAME, DEFAULT_NAME)
# get device type
device_type = None
return (sensor_name, device_type)
def decode_payload(api, config_entry):
"""
List raw values from the given API
:param api: KCS TraceME N1Cx api client
:param config_entry: config entry
:return: data list in json format
"""
# get sensor readings
data = None
try:
data = api.parse_data()
except ValueError as err:
_LOGGER.warning(f"[API] Error: {str(err)}")
finally:
return data
class KCSTraceMeN1CxSensor(Entity):
"""Implementation of a sensor"""
def __init__(self, coordinator, sensor_name, device_type, options):
"""
Initialize sensor class
:param coordinator: data coordinator object
:param sensor_name: device name
:param device_type: device type
:param options: option flags
:return: none
"""
self._name = sensor_name
self._type = SENSOR_TYPE
self._state = None
self._coordinator = coordinator
self._device_type = DEFAULT_DEVICE_TYPE
self._options = options
# parameter validation
if device_type is not None:
self._device_type = device_type
@property
def name(self):
"""
Return the name of the sensor
:param: none
:return: sensor name
"""
return self._name
@property
def unique_id(self):
"""
Return sensor unique id
:param: none
:return: unique id
"""
return self._type
@property
def state(self):
"""
Return the state of the sensor
:param: none
:return: sensor state
"""
return self._state
@property
def icon(self):
"""
Icon for each sensor
:param: none
:return: sensor icon
"""
return ICON
@property
def unit_of_measurement(self):
"""
Return the unit of measurement of this entity, if any
:param: none
:return: data unit
"""
return 'PPM'
@property
def should_poll(self):
"""
Need to poll.
Coordinator notifies entity of updates
:param: none
:return: false
"""
return True
@property
def device_state_attributes(self):
"""
Return the state attributes
:param: none
:return: state attributes
"""
attributes = {
ATTR_DEVICE_TYPE: self._device_type,
ATTR_ATTRIBUTION: ATTRIBUTION
}
if self._coordinator.data and self._options:
"""
if self._options.get('temperature'):
attributes[ATTR_TEMPERATURE] = f"{self._coordinator.data.get('temperature'):.2f} °C",
if self._options.get('humidity'):
attributes[ATTR_HUMIDITY] = f"{self._coordinator.data.get('humidity'):.2f} %"
if self._options.get('pressure'):
attributes[ATTR_PRESSURE] = f"{self._coordinator.data.get('pressure'):.2f} hPa"
if self._options.get('air_quality'):
attributes[ATTR_AIR_QUALITY] = self._coordinator.data.get('air_quality')
if self._options.get('battery'):
attributes[ATTR_BATTERY] = f"{self._coordinator.data.get('battery'):.3f} V"
"""
attributes[ATTR_TEMPERATURE] = f"{self._coordinator.data.get('temperature'):.2f} °C",
attributes[ATTR_HUMIDITY] = f"{self._coordinator.data.get('humidity'):.2f} %"
attributes[ATTR_PRESSURE] = f"{self._coordinator.data.get('pressure'):.2f} hPa"
attributes[ATTR_AIR_QUALITY] = self._coordinator.data.get('air_quality')
attributes[ATTR_BATTERY] = f"{self._coordinator.data.get('battery'):.3f} V"
return attributes
@property
def available(self):
"""
Return if entity is available
:param: none
:return: true is sensor is available, false otherwise
"""
return self._coordinator.last_update_success
def update_state(self):
"""
Calculate the consumption data
:param: none
:return: none
"""
if self._coordinator.data:
# get consumption value
value = self._coordinator.data.get('co2')
self._state = f"{value:.2f}"
async def async_added_to_hass(self):
"""
When entity is added to hass
:param: none
:return: none
"""
self.async_on_remove(
self._coordinator.async_add_listener(self.async_write_ha_state)
)
self.update_state()
async def async_update(self):
"""
Update the entity
Only used by the generic entity update service
:param: none
:return: none
"""
_LOGGER.info("[ENTITY] Async updated")
await self._coordinator.async_request_refresh()
self.update_state()
|
py | b40d2867144d61fcc6de8b3bbaad39812167f199 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Train and eval."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensor2tensor.bin import t2t_trainer
from tensor2tensor.data_generators import problem
import tensorflow.compat.v1 as tf
from state_of_sparsity.sparse_transformer import common_flags
from state_of_sparsity.sparse_transformer.models import sparse_transformer # pylint: disable=unused-import
flags = tf.flags
FLAGS = flags.FLAGS
def main(argv):
argv = common_flags.update_argv(argv)
return t2t_trainer.main(argv)
if __name__ == "__main__":
tf.app.run()
|
py | b40d299b0d0322267860af7810fcf3f5612682e5 | import os
from datetime import datetime
from zipfile import ZipFile
# set file name and time of creation
today = datetime.now()
file_name = 'zipper_' + today.strftime('%Y.%m.%dh%H%M') + '.zip'
dir_name = 'tmp/' # update path
def zipdir(path, zip):
for root, dirs, files in os.walk(path):
for file in files:
zip.write(os.path.join(root, file))
if __name__ == '__main__':
zipfile = ZipFile(file_name, 'w')
zipdir(dir_name, zipfile)
zipfile.close()
|
py | b40d2a20be4a3f7cf1a19c7169c908ce6cfe1cf1 | # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import logging
import os
import tokenize
from io import StringIO
from pants.base.build_file_target_factory import BuildFileTargetFactory
from pants.base.parse_context import ParseContext
from pants.engine.legacy.structs import BundleAdaptor, Globs, RGlobs, TargetAdaptor, ZGlobs
from pants.engine.mapper import UnaddressableObjectError
from pants.engine.objects import Serializable
from pants.engine.parser import ParseError, Parser
from pants.util.memo import memoized_property
logger = logging.getLogger(__name__)
class LegacyPythonCallbacksParser(Parser):
"""A parser that parses the given python code into a list of top-level objects.
Only Serializable objects with `name`s will be collected and returned. These objects will be
addressable via their name in the parsed namespace.
This parser attempts to be compatible with existing legacy BUILD files and concepts including
macros and target factories.
"""
def __init__(self, symbol_table, aliases, build_file_imports_behavior):
"""
:param symbol_table: A SymbolTable for this parser, which will be overlaid with the given
additional aliases.
:type symbol_table: :class:`pants.engine.parser.SymbolTable`
:param aliases: Additional BuildFileAliases to register.
:type aliases: :class:`pants.build_graph.build_file_aliases.BuildFileAliases`
:param build_file_imports_behavior: How to behave if a BUILD file being parsed tries to use
import statements. Valid values: "allow", "warn", "error".
:type build_file_imports_behavior: string
"""
super().__init__()
self._symbols, self._parse_context = self._generate_symbols(symbol_table, aliases)
self._build_file_imports_behavior = build_file_imports_behavior
@staticmethod
def _generate_symbols(symbol_table, aliases):
symbols = {}
# Compute "per path" symbols. For performance, we use the same ParseContext, which we
# mutate (in a critical section) to set the rel_path appropriately before it's actually used.
# This allows this method to reuse the same symbols for all parses. Meanwhile we set the
# rel_path to None, so that we get a loud error if anything tries to use it before it's set.
# TODO: See https://github.com/pantsbuild/pants/issues/3561
parse_context = ParseContext(rel_path=None, type_aliases=symbols)
class Registrar(BuildFileTargetFactory):
def __init__(self, parse_context, type_alias, object_type):
self._parse_context = parse_context
self._type_alias = type_alias
self._object_type = object_type
self._serializable = Serializable.is_serializable_type(self._object_type)
@memoized_property
def target_types(self):
return [self._object_type]
def __call__(self, *args, **kwargs):
# Target names default to the name of the directory their BUILD file is in
# (as long as it's not the root directory).
if 'name' not in kwargs and issubclass(self._object_type, TargetAdaptor):
dirname = os.path.basename(self._parse_context.rel_path)
if dirname:
kwargs['name'] = dirname
else:
raise UnaddressableObjectError(
'Targets in root-level BUILD files must be named explicitly.')
name = kwargs.get('name')
if name and self._serializable:
kwargs.setdefault('type_alias', self._type_alias)
obj = self._object_type(**kwargs)
self._parse_context._storage.add(obj)
return obj
else:
return self._object_type(*args, **kwargs)
for alias, symbol in symbol_table.table.items():
registrar = Registrar(parse_context, alias, symbol)
symbols[alias] = registrar
symbols[symbol] = registrar
if aliases.objects:
symbols.update(aliases.objects)
for alias, object_factory in aliases.context_aware_object_factories.items():
symbols[alias] = object_factory(parse_context)
for alias, target_macro_factory in aliases.target_macro_factories.items():
underlying_symbol = symbols.get(alias, TargetAdaptor)
symbols[alias] = target_macro_factory.target_macro(parse_context)
for target_type in target_macro_factory.target_types:
symbols[target_type] = Registrar(parse_context, alias, underlying_symbol)
# TODO: Replace builtins for paths with objects that will create wrapped PathGlobs objects.
# The strategy for https://github.com/pantsbuild/pants/issues/3560 should account for
# migrating these additional captured arguments to typed Sources.
class GlobWrapper:
def __init__(self, parse_context, glob_type):
self._parse_context = parse_context
self._glob_type = glob_type
def __call__(self, *args, **kwargs):
return self._glob_type(*args, spec_path=self._parse_context.rel_path, **kwargs)
symbols['globs'] = GlobWrapper(parse_context, Globs)
symbols['rglobs'] = GlobWrapper(parse_context, RGlobs)
symbols['zglobs'] = GlobWrapper(parse_context, ZGlobs)
symbols['bundle'] = BundleAdaptor
return symbols, parse_context
def parse(self, filepath, filecontent):
python = filecontent.decode()
# Mutate the parse context for the new path, then exec, and copy the resulting objects.
# We execute with a (shallow) clone of the symbols as a defense against accidental
# pollution of the namespace via imports or variable definitions. Defending against
# _intentional_ mutation would require a deep clone, which doesn't seem worth the cost at
# this juncture.
self._parse_context._storage.clear(os.path.dirname(filepath))
exec(python, dict(self._symbols))
# Perform this check after successful execution, so we know the python is valid (and should
# tokenize properly!)
# Note that this is incredibly poor sandboxing. There are many ways to get around it.
# But it's sufficient to tell most users who aren't being actively malicious that they're doing
# something wrong, and it has a low performance overhead.
if self._build_file_imports_behavior != 'allow' and 'import' in python:
io_wrapped_python = StringIO(python)
for token in tokenize.generate_tokens(io_wrapped_python.readline):
if token[1] == 'import':
line_being_tokenized = token[4]
if self._build_file_imports_behavior == 'warn':
logger.warning('{} tried to import - import statements should be avoided ({})'.format(
filepath,
line_being_tokenized
))
elif self._build_file_imports_behavior == 'error':
raise ParseError(
'import statements have been banned, but tried to import: {}'.format(
line_being_tokenized
)
)
else:
raise ParseError(
"Didn't know what to do for build_file_imports_behavior value {}".format(
self._build_file_imports_behavior
)
)
return list(self._parse_context._storage.objects)
|
py | b40d2a57b81062df4624e2a596ac5bbffe73aebe | """
Utilities for interpreting CSS from Stylers for formatting non-HTML outputs.
"""
import re
import warnings
class CSSWarning(UserWarning):
"""
This CSS syntax cannot currently be parsed.
"""
pass
def _side_expander(prop_fmt: str):
def expand(self, prop, value: str):
tokens = value.split()
try:
mapping = self.SIDE_SHORTHANDS[len(tokens)]
except KeyError:
warnings.warn(
f'Could not expand "{prop}: {value}"', CSSWarning,
)
return
for key, idx in zip(self.SIDES, mapping):
yield prop_fmt.format(key), tokens[idx]
return expand
class CSSResolver:
"""
A callable for parsing and resolving CSS to atomic properties.
"""
def __call__(self, declarations_str, inherited=None):
"""
The given declarations to atomic properties.
Parameters
----------
declarations_str : str
A list of CSS declarations
inherited : dict, optional
Atomic properties indicating the inherited style context in which
declarations_str is to be resolved. ``inherited`` should already
be resolved, i.e. valid output of this method.
Returns
-------
dict
Atomic CSS 2.2 properties.
Examples
--------
>>> resolve = CSSResolver()
>>> inherited = {'font-family': 'serif', 'font-weight': 'bold'}
>>> out = resolve('''
... border-color: BLUE RED;
... font-size: 1em;
... font-size: 2em;
... font-weight: normal;
... font-weight: inherit;
... ''', inherited)
>>> sorted(out.items()) # doctest: +NORMALIZE_WHITESPACE
[('border-bottom-color', 'blue'),
('border-left-color', 'red'),
('border-right-color', 'red'),
('border-top-color', 'blue'),
('font-family', 'serif'),
('font-size', '24pt'),
('font-weight', 'bold')]
"""
props = dict(self.atomize(self.parse(declarations_str)))
if inherited is None:
inherited = {}
# 1. resolve inherited, initial
for prop, val in inherited.items():
if prop not in props:
props[prop] = val
for prop, val in list(props.items()):
if val == "inherit":
val = inherited.get(prop, "initial")
if val == "initial":
val = None
if val is None:
# we do not define a complete initial stylesheet
del props[prop]
else:
props[prop] = val
# 2. resolve relative font size
if props.get("font-size"):
if "font-size" in inherited:
em_pt = inherited["font-size"]
assert em_pt[-2:] == "pt"
em_pt = float(em_pt[:-2])
else:
em_pt = None
props["font-size"] = self.size_to_pt(
props["font-size"], em_pt, conversions=self.FONT_SIZE_RATIOS
)
font_size = float(props["font-size"][:-2])
else:
font_size = None
# 3. TODO: resolve other font-relative units
for side in self.SIDES:
prop = f"border-{side}-width"
if prop in props:
props[prop] = self.size_to_pt(
props[prop], em_pt=font_size, conversions=self.BORDER_WIDTH_RATIOS
)
for prop in [
f"margin-{side}",
f"padding-{side}",
]:
if prop in props:
# TODO: support %
props[prop] = self.size_to_pt(
props[prop], em_pt=font_size, conversions=self.MARGIN_RATIOS
)
return props
UNIT_RATIOS = {
"rem": ("pt", 12),
"ex": ("em", 0.5),
# 'ch':
"px": ("pt", 0.75),
"pc": ("pt", 12),
"in": ("pt", 72),
"cm": ("in", 1 / 2.54),
"mm": ("in", 1 / 25.4),
"q": ("mm", 0.25),
"!!default": ("em", 0),
}
FONT_SIZE_RATIOS = UNIT_RATIOS.copy()
FONT_SIZE_RATIOS.update(
{
"%": ("em", 0.01),
"xx-small": ("rem", 0.5),
"x-small": ("rem", 0.625),
"small": ("rem", 0.8),
"medium": ("rem", 1),
"large": ("rem", 1.125),
"x-large": ("rem", 1.5),
"xx-large": ("rem", 2),
"smaller": ("em", 1 / 1.2),
"larger": ("em", 1.2),
"!!default": ("em", 1),
}
)
MARGIN_RATIOS = UNIT_RATIOS.copy()
MARGIN_RATIOS.update({"none": ("pt", 0)})
BORDER_WIDTH_RATIOS = UNIT_RATIOS.copy()
BORDER_WIDTH_RATIOS.update(
{
"none": ("pt", 0),
"thick": ("px", 4),
"medium": ("px", 2),
"thin": ("px", 1),
# Default: medium only if solid
}
)
def size_to_pt(self, in_val, em_pt=None, conversions=UNIT_RATIOS):
def _error():
warnings.warn(f"Unhandled size: {repr(in_val)}", CSSWarning)
return self.size_to_pt("1!!default", conversions=conversions)
try:
val, unit = re.match(r"^(\S*?)([a-zA-Z%!].*)", in_val).groups()
except AttributeError:
return _error()
if val == "":
# hack for 'large' etc.
val = 1
else:
try:
val = float(val)
except ValueError:
return _error()
while unit != "pt":
if unit == "em":
if em_pt is None:
unit = "rem"
else:
val *= em_pt
unit = "pt"
continue
try:
unit, mul = conversions[unit]
except KeyError:
return _error()
val *= mul
val = round(val, 5)
if int(val) == val:
size_fmt = f"{int(val):d}pt"
else:
size_fmt = f"{val:f}pt"
return size_fmt
def atomize(self, declarations):
for prop, value in declarations:
attr = "expand_" + prop.replace("-", "_")
try:
expand = getattr(self, attr)
except AttributeError:
yield prop, value
else:
for prop, value in expand(prop, value):
yield prop, value
SIDE_SHORTHANDS = {
1: [0, 0, 0, 0],
2: [0, 1, 0, 1],
3: [0, 1, 2, 1],
4: [0, 1, 2, 3],
}
SIDES = ("top", "right", "bottom", "left")
expand_border_color = _side_expander("border-{:s}-color")
expand_border_style = _side_expander("border-{:s}-style")
expand_border_width = _side_expander("border-{:s}-width")
expand_margin = _side_expander("margin-{:s}")
expand_padding = _side_expander("padding-{:s}")
def parse(self, declarations_str: str):
"""
Generates (prop, value) pairs from declarations.
In a future version may generate parsed tokens from tinycss/tinycss2
Parameters
----------
declarations_str : str
"""
for decl in declarations_str.split(";"):
if not decl.strip():
continue
prop, sep, val = decl.partition(":")
prop = prop.strip().lower()
# TODO: don't lowercase case sensitive parts of values (strings)
val = val.strip().lower()
if sep:
yield prop, val
else:
warnings.warn(
f"Ill-formatted attribute: expected a colon in {repr(decl)}",
CSSWarning,
)
|
py | b40d2adc03000675ba476d7f499f0fb53c92e84f | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
import torch
from fairseq import search, utils
from fairseq.data import data_utils
from fairseq.models import FairseqIncrementalDecoder
class SequenceGenerator(object):
def __init__(
self,
tgt_dict,
beam_size=1,
max_len_a=0,
max_len_b=200,
min_len=1,
normalize_scores=True,
len_penalty=1.,
unk_penalty=0.,
retain_dropout=False,
temperature=1.,
match_source_len=False,
no_repeat_ngram_size=0,
search_strategy=None,
eos=None
):
"""Generates translations of a given source sentence.
Args:
tgt_dict (~fairseq.data.Dictionary): target dictionary
beam_size (int, optional): beam width (default: 1)
max_len_a/b (int, optional): generate sequences of maximum length
ax + b, where x is the source length
min_len (int, optional): the minimum length of the generated output
(not including end-of-sentence)
normalize_scores (bool, optional): normalize scores by the length
of the output (default: True)
len_penalty (float, optional): length penalty, where <1.0 favors
shorter, >1.0 favors longer sentences (default: 1.0)
unk_penalty (float, optional): unknown word penalty, where <0
produces more unks, >0 produces fewer (default: 0.0)
retain_dropout (bool, optional): use dropout when generating
(default: False)
temperature (float, optional): temperature, where values
>1.0 produce more uniform samples and values <1.0 produce
sharper samples (default: 1.0)
match_source_len (bool, optional): outputs should match the source
length (default: False)
"""
self.pad = tgt_dict.pad()
self.unk = tgt_dict.unk()
self.eos = tgt_dict.eos() if eos is None else eos
self.vocab_size = len(tgt_dict)
self.beam_size = beam_size
# the max beam size is the dictionary size - 1, since we never select pad
self.beam_size = min(beam_size, self.vocab_size - 1)
self.max_len_a = max_len_a
self.max_len_b = max_len_b
self.min_len = min_len
self.normalize_scores = normalize_scores
self.len_penalty = len_penalty
self.unk_penalty = unk_penalty
self.retain_dropout = retain_dropout
self.temperature = temperature
self.match_source_len = match_source_len
self.no_repeat_ngram_size = no_repeat_ngram_size
assert temperature > 0, '--temperature must be greater than 0'
self.search = (
search.BeamSearch(tgt_dict) if search_strategy is None else search_strategy
)
@torch.no_grad()
def generate(self, models, sample, **kwargs):
"""Generate a batch of translations.
Args:
models (List[~fairseq.models.FairseqModel]): ensemble of models
sample (dict): batch
prefix_tokens (torch.LongTensor, optional): force decoder to begin
with these tokens
bos_token (int, optional): beginning of sentence token
(default: self.eos)
"""
model = EnsembleModel(models)
return self._generate(model, sample, **kwargs)
@torch.no_grad()
def _generate(
self,
model,
sample,
prefix_tokens=None,
bos_token=None,
**kwargs
):
if not self.retain_dropout:
model.eval()
# model.forward normally channels prev_output_tokens into the decoder
# separately, but SequenceGenerator directly calls model.encoder
encoder_input = {
k: v for k, v in sample['net_input'].items()
if k != 'prev_output_tokens'
}
src_tokens = encoder_input['src_tokens']
src_lengths = (src_tokens.ne(self.eos) & src_tokens.ne(self.pad)).long().sum(dim=1)
input_size = src_tokens.size()
# batch dimension goes first followed by source lengths
bsz = input_size[0]
src_len = input_size[1]
beam_size = self.beam_size
if self.match_source_len:
max_len = src_lengths.max().item()
else:
max_len = min(
int(self.max_len_a * src_len + self.max_len_b),
# exclude the EOS marker
model.max_decoder_positions() - 1,
)
assert self.min_len <= max_len, 'min_len cannot be larger than max_len, please adjust these!'
# compute the encoder output for each beam
encoder_outs = model.forward_encoder(encoder_input)
new_order = torch.arange(bsz).view(-1, 1).repeat(1, beam_size).view(-1)
new_order = new_order.to(src_tokens.device).long()
encoder_outs = model.reorder_encoder_out(encoder_outs, new_order)
# initialize buffers
scores = src_tokens.new(bsz * beam_size, max_len + 1).float().fill_(0)
scores_buf = scores.clone()
tokens = src_tokens.new(bsz * beam_size, max_len + 2).long().fill_(self.pad)
tokens_buf = tokens.clone()
tokens[:, 0] = self.eos if bos_token is None else bos_token
attn, attn_buf = None, None
# The blacklist indicates candidates that should be ignored.
# For example, suppose we're sampling and have already finalized 2/5
# samples. Then the blacklist would mark 2 positions as being ignored,
# so that we only finalize the remaining 3 samples.
blacklist = src_tokens.new_zeros(bsz, beam_size).eq(-1) # forward and backward-compatible False mask
# list of completed sentences
finalized = [[] for i in range(bsz)]
finished = [False for i in range(bsz)]
num_remaining_sent = bsz
# number of candidate hypos per step
cand_size = 2 * beam_size # 2 x beam size in case half are EOS
# offset arrays for converting between different indexing schemes
bbsz_offsets = (torch.arange(0, bsz) * beam_size).unsqueeze(1).type_as(tokens)
cand_offsets = torch.arange(0, cand_size).type_as(tokens)
# helper function for allocating buffers on the fly
buffers = {}
def buffer(name, type_of=tokens): # noqa
if name not in buffers:
buffers[name] = type_of.new()
return buffers[name]
def is_finished(sent, step, unfin_idx):
"""
Check whether we've finished generation for a given sentence, by
comparing the worst score among finalized hypotheses to the best
possible score among unfinalized hypotheses.
"""
assert len(finalized[sent]) <= beam_size
if len(finalized[sent]) == beam_size or step == max_len:
return True
return False
def finalize_hypos(step, bbsz_idx, eos_scores):
"""
Finalize the given hypotheses at this step, while keeping the total
number of finalized hypotheses per sentence <= beam_size.
Note: the input must be in the desired finalization order, so that
hypotheses that appear earlier in the input are preferred to those
that appear later.
Args:
step: current time step
bbsz_idx: A vector of indices in the range [0, bsz*beam_size),
indicating which hypotheses to finalize
eos_scores: A vector of the same size as bbsz_idx containing
scores for each hypothesis
"""
assert bbsz_idx.numel() == eos_scores.numel()
# clone relevant token and attention tensors
tokens_clone = tokens.index_select(0, bbsz_idx)
tokens_clone = tokens_clone[:, 1:step + 2] # skip the first index, which is EOS
assert not tokens_clone.eq(self.eos).any()
tokens_clone[:, step] = self.eos
attn_clone = attn.index_select(0, bbsz_idx)[:, :, 1:step+2] if attn is not None else None
# compute scores per token position
pos_scores = scores.index_select(0, bbsz_idx)[:, :step+1]
pos_scores[:, step] = eos_scores
# convert from cumulative to per-position scores
pos_scores[:, 1:] = pos_scores[:, 1:] - pos_scores[:, :-1]
# normalize sentence-level scores
if self.normalize_scores:
eos_scores /= (step + 1) ** self.len_penalty
cum_unfin = []
prev = 0
for f in finished:
if f:
prev += 1
else:
cum_unfin.append(prev)
sents_seen = set()
for i, (idx, score) in enumerate(zip(bbsz_idx.tolist(), eos_scores.tolist())):
unfin_idx = idx // beam_size
sent = unfin_idx + cum_unfin[unfin_idx]
sents_seen.add((sent, unfin_idx))
if self.match_source_len and step > src_lengths[unfin_idx]:
score = -math.inf
def get_hypo():
if attn_clone is not None:
# remove padding tokens from attn scores
hypo_attn = attn_clone[i]
else:
hypo_attn = None
return {
'tokens': tokens_clone[i],
'score': score,
'attention': hypo_attn, # src_len x tgt_len
'alignment': None,
'positional_scores': pos_scores[i],
}
if len(finalized[sent]) < beam_size:
finalized[sent].append(get_hypo())
newly_finished = []
for sent, unfin_idx in sents_seen:
# check termination conditions for this sentence
if not finished[sent] and is_finished(sent, step, unfin_idx):
finished[sent] = True
newly_finished.append(unfin_idx)
return newly_finished
reorder_state = None
batch_idxs = None
for step in range(max_len + 1): # one extra step for EOS marker
# reorder decoder internal states based on the prev choice of beams
if reorder_state is not None:
if batch_idxs is not None:
# update beam indices to take into account removed sentences
corr = batch_idxs - torch.arange(batch_idxs.numel()).type_as(batch_idxs)
reorder_state.view(-1, beam_size).add_(corr.unsqueeze(-1) * beam_size)
model.reorder_incremental_state(reorder_state)
encoder_outs = model.reorder_encoder_out(encoder_outs, reorder_state)
lprobs, avg_attn_scores = model.forward_decoder(
tokens[:, :step + 1], encoder_outs, temperature=self.temperature,
)
lprobs[lprobs != lprobs] = -math.inf
lprobs[:, self.pad] = -math.inf # never select pad
lprobs[:, self.unk] -= self.unk_penalty # apply unk penalty
# handle max length constraint
if step >= max_len:
lprobs[:, :self.eos] = -math.inf
lprobs[:, self.eos + 1:] = -math.inf
# handle prefix tokens (possibly with different lengths)
if prefix_tokens is not None and step < prefix_tokens.size(1) and step < max_len:
prefix_toks = prefix_tokens[:, step].unsqueeze(-1).repeat(1, beam_size).view(-1)
prefix_lprobs = lprobs.gather(-1, prefix_toks.unsqueeze(-1))
prefix_mask = prefix_toks.ne(self.pad)
lprobs[prefix_mask] = -math.inf
lprobs[prefix_mask] = lprobs[prefix_mask].scatter_(
-1, prefix_toks[prefix_mask].unsqueeze(-1), prefix_lprobs[prefix_mask]
)
# if prefix includes eos, then we should make sure tokens and
# scores are the same across all beams
eos_mask = prefix_toks.eq(self.eos)
if eos_mask.any():
# validate that the first beam matches the prefix
first_beam = tokens[eos_mask].view(-1, beam_size, tokens.size(-1))[:, 0, 1:step + 1]
eos_mask_batch_dim = eos_mask.view(-1, beam_size)[:, 0]
target_prefix = prefix_tokens[eos_mask_batch_dim][:, :step]
assert (first_beam == target_prefix).all()
def replicate_first_beam(tensor, mask):
tensor = tensor.view(-1, beam_size, tensor.size(-1))
tensor[mask] = tensor[mask][:, :1, :]
return tensor.view(-1, tensor.size(-1))
# copy tokens, scores and lprobs from the first beam to all beams
tokens = replicate_first_beam(tokens, eos_mask_batch_dim)
scores = replicate_first_beam(scores, eos_mask_batch_dim)
lprobs = replicate_first_beam(lprobs, eos_mask_batch_dim)
elif step < self.min_len:
# minimum length constraint (does not apply if using prefix_tokens)
lprobs[:, self.eos] = -math.inf
if self.no_repeat_ngram_size > 0:
# for each beam and batch sentence, generate a list of previous ngrams
gen_ngrams = [{} for bbsz_idx in range(bsz * beam_size)]
for bbsz_idx in range(bsz * beam_size):
gen_tokens = tokens[bbsz_idx].tolist()
for ngram in zip(*[gen_tokens[i:] for i in range(self.no_repeat_ngram_size)]):
gen_ngrams[bbsz_idx][tuple(ngram[:-1])] = \
gen_ngrams[bbsz_idx].get(tuple(ngram[:-1]), []) + [ngram[-1]]
# Record attention scores
if type(avg_attn_scores) is list:
avg_attn_scores = avg_attn_scores[0]
if avg_attn_scores is not None:
if attn is None:
attn = scores.new(bsz * beam_size, avg_attn_scores.size(1), max_len + 2)
attn_buf = attn.clone()
attn[:, :, step + 1].copy_(avg_attn_scores)
scores = scores.type_as(lprobs)
scores_buf = scores_buf.type_as(lprobs)
eos_bbsz_idx = buffer('eos_bbsz_idx')
eos_scores = buffer('eos_scores', type_of=scores)
self.search.set_src_lengths(src_lengths)
if self.no_repeat_ngram_size > 0:
def calculate_banned_tokens(bbsz_idx):
# before decoding the next token, prevent decoding of ngrams that have already appeared
ngram_index = tuple(tokens[bbsz_idx, step + 2 - self.no_repeat_ngram_size:step + 1].tolist())
return gen_ngrams[bbsz_idx].get(ngram_index, [])
if step + 2 - self.no_repeat_ngram_size >= 0:
# no banned tokens if we haven't generated no_repeat_ngram_size tokens yet
banned_tokens = [calculate_banned_tokens(bbsz_idx) for bbsz_idx in range(bsz * beam_size)]
else:
banned_tokens = [[] for bbsz_idx in range(bsz * beam_size)]
for bbsz_idx in range(bsz * beam_size):
lprobs[bbsz_idx, banned_tokens[bbsz_idx]] = -math.inf
cand_scores, cand_indices, cand_beams = self.search.step(
step,
lprobs.view(bsz, -1, self.vocab_size),
scores.view(bsz, beam_size, -1)[:, :, :step],
)
# cand_bbsz_idx contains beam indices for the top candidate
# hypotheses, with a range of values: [0, bsz*beam_size),
# and dimensions: [bsz, cand_size]
cand_bbsz_idx = cand_beams.add(bbsz_offsets)
# finalize hypotheses that end in eos, except for blacklisted ones
# or candidates with a score of -inf
eos_mask = cand_indices.eq(self.eos) & cand_scores.ne(-math.inf)
eos_mask[:, :beam_size][blacklist] = 0
# only consider eos when it's among the top beam_size indices
torch.masked_select(
cand_bbsz_idx[:, :beam_size],
mask=eos_mask[:, :beam_size],
out=eos_bbsz_idx,
)
finalized_sents = set()
if eos_bbsz_idx.numel() > 0:
torch.masked_select(
cand_scores[:, :beam_size],
mask=eos_mask[:, :beam_size],
out=eos_scores,
)
finalized_sents = finalize_hypos(step, eos_bbsz_idx, eos_scores)
num_remaining_sent -= len(finalized_sents)
assert num_remaining_sent >= 0
if num_remaining_sent == 0:
break
assert step < max_len
if len(finalized_sents) > 0:
new_bsz = bsz - len(finalized_sents)
# construct batch_idxs which holds indices of batches to keep for the next pass
batch_mask = cand_indices.new_ones(bsz)
batch_mask[cand_indices.new(finalized_sents)] = 0
batch_idxs = batch_mask.nonzero().squeeze(-1)
eos_mask = eos_mask[batch_idxs]
cand_beams = cand_beams[batch_idxs]
bbsz_offsets.resize_(new_bsz, 1)
cand_bbsz_idx = cand_beams.add(bbsz_offsets)
cand_scores = cand_scores[batch_idxs]
cand_indices = cand_indices[batch_idxs]
if prefix_tokens is not None:
prefix_tokens = prefix_tokens[batch_idxs]
src_lengths = src_lengths[batch_idxs]
blacklist = blacklist[batch_idxs]
scores = scores.view(bsz, -1)[batch_idxs].view(new_bsz * beam_size, -1)
scores_buf.resize_as_(scores)
tokens = tokens.view(bsz, -1)[batch_idxs].view(new_bsz * beam_size, -1)
tokens_buf.resize_as_(tokens)
if attn is not None:
attn = attn.view(bsz, -1)[batch_idxs].view(new_bsz * beam_size, attn.size(1), -1)
attn_buf.resize_as_(attn)
bsz = new_bsz
else:
batch_idxs = None
# Set active_mask so that values > cand_size indicate eos or
# blacklisted hypos and values < cand_size indicate candidate
# active hypos. After this, the min values per row are the top
# candidate active hypos.
active_mask = buffer('active_mask')
eos_mask[:, :beam_size] |= blacklist
torch.add(
eos_mask.type_as(cand_offsets) * cand_size,
cand_offsets[:eos_mask.size(1)],
out=active_mask,
)
# get the top beam_size active hypotheses, which are just the hypos
# with the smallest values in active_mask
active_hypos, new_blacklist = buffer('active_hypos'), buffer('new_blacklist')
torch.topk(
active_mask, k=beam_size, dim=1, largest=False,
out=(new_blacklist, active_hypos)
)
# update blacklist to ignore any finalized hypos
blacklist = new_blacklist.ge(cand_size)[:, :beam_size]
assert (~blacklist).any(dim=1).all()
active_bbsz_idx = buffer('active_bbsz_idx')
torch.gather(
cand_bbsz_idx, dim=1, index=active_hypos,
out=active_bbsz_idx,
)
active_scores = torch.gather(
cand_scores, dim=1, index=active_hypos,
out=scores[:, step].view(bsz, beam_size),
)
active_bbsz_idx = active_bbsz_idx.view(-1)
active_scores = active_scores.view(-1)
# copy tokens and scores for active hypotheses
torch.index_select(
tokens[:, :step + 1], dim=0, index=active_bbsz_idx,
out=tokens_buf[:, :step + 1],
)
torch.gather(
cand_indices, dim=1, index=active_hypos,
out=tokens_buf.view(bsz, beam_size, -1)[:, :, step + 1],
)
if step > 0:
torch.index_select(
scores[:, :step], dim=0, index=active_bbsz_idx,
out=scores_buf[:, :step],
)
torch.gather(
cand_scores, dim=1, index=active_hypos,
out=scores_buf.view(bsz, beam_size, -1)[:, :, step],
)
# copy attention for active hypotheses
if attn is not None:
torch.index_select(
attn[:, :, :step + 2], dim=0, index=active_bbsz_idx,
out=attn_buf[:, :, :step + 2],
)
# swap buffers
tokens, tokens_buf = tokens_buf, tokens
scores, scores_buf = scores_buf, scores
if attn is not None:
attn, attn_buf = attn_buf, attn
# reorder incremental state in decoder
reorder_state = active_bbsz_idx
# sort by score descending
for sent in range(len(finalized)):
finalized[sent] = sorted(finalized[sent], key=lambda r: r['score'], reverse=True)
return finalized
class EnsembleModel(torch.nn.Module):
"""A wrapper around an ensemble of models."""
def __init__(self, models):
super().__init__()
self.models = torch.nn.ModuleList(models)
self.incremental_states = None
if all(hasattr(m, 'decoder') and isinstance(m.decoder, FairseqIncrementalDecoder) for m in models):
self.incremental_states = {m: {} for m in models}
def has_encoder(self):
return hasattr(self.models[0], 'encoder')
def max_decoder_positions(self):
return min(m.max_decoder_positions() for m in self.models)
@torch.no_grad()
def forward_encoder(self, encoder_input):
if not self.has_encoder():
return None
return [model.encoder(**encoder_input) for model in self.models]
@torch.no_grad()
def forward_decoder(self, tokens, encoder_outs, temperature=1.):
if len(self.models) == 1:
return self._decode_one(
tokens,
self.models[0],
encoder_outs[0] if self.has_encoder() else None,
self.incremental_states,
log_probs=True,
temperature=temperature,
)
log_probs = []
avg_attn = None
for model, encoder_out in zip(self.models, encoder_outs):
probs, attn = self._decode_one(
tokens,
model,
encoder_out,
self.incremental_states,
log_probs=True,
temperature=temperature,
)
log_probs.append(probs)
if attn is not None:
if avg_attn is None:
avg_attn = attn
else:
avg_attn.add_(attn)
avg_probs = torch.logsumexp(torch.stack(log_probs, dim=0), dim=0) - math.log(len(self.models))
if avg_attn is not None:
avg_attn.div_(len(self.models))
return avg_probs, avg_attn
def _decode_one(
self, tokens, model, encoder_out, incremental_states, log_probs,
temperature=1.,
):
if self.incremental_states is not None:
decoder_out = list(model.forward_decoder(
tokens, encoder_out=encoder_out, incremental_state=self.incremental_states[model],
))
else:
decoder_out = list(model.forward_decoder(tokens, encoder_out=encoder_out))
decoder_out[0] = decoder_out[0][:, -1:, :]
if temperature != 1.:
decoder_out[0].div_(temperature)
attn = decoder_out[1] if len(decoder_out) > 1 else None
if type(attn) is dict:
attn = attn.get('attn', None)
if type(attn) is list:
attn = attn[0]
if attn is not None:
attn = attn[:, -1, :]
probs = model.get_normalized_probs(decoder_out, log_probs=log_probs)
probs = probs[:, -1, :]
return probs, attn
def reorder_encoder_out(self, encoder_outs, new_order):
if not self.has_encoder():
return
return [
model.encoder.reorder_encoder_out(encoder_out, new_order)
for model, encoder_out in zip(self.models, encoder_outs)
]
def reorder_incremental_state(self, new_order):
if self.incremental_states is None:
return
for model in self.models:
model.decoder.reorder_incremental_state(self.incremental_states[model], new_order)
class SequenceGeneratorWithAlignment(SequenceGenerator):
def __init__(self, tgt_dict, left_pad_target=False, **kwargs):
"""Generates translations of a given source sentence.
Produces alignments following "Jointly Learning to Align and
Translate with Transformer Models" (Garg et al., EMNLP 2019).
Args:
left_pad_target (bool, optional): Whether or not the
hypothesis should be left padded or not when they are
teacher forced for generating alignments.
"""
super().__init__(tgt_dict, **kwargs)
self.left_pad_target = left_pad_target
@torch.no_grad()
def generate(self, models, sample, **kwargs):
model = EnsembleModelWithAlignment(models)
finalized = super()._generate(model, sample, **kwargs)
src_tokens = sample['net_input']['src_tokens']
bsz = src_tokens.shape[0]
beam_size = self.beam_size
src_tokens, src_lengths, prev_output_tokens, tgt_tokens = \
self._prepare_batch_for_alignment(sample, finalized)
if any(getattr(m, 'full_context_alignment', False) for m in model.models):
attn = model.forward_align(src_tokens, src_lengths, prev_output_tokens)
else:
attn = [
finalized[i // beam_size][i % beam_size]['attention'].transpose(1, 0)
for i in range(bsz * beam_size)
]
# Process the attn matrix to extract hard alignments.
for i in range(bsz * beam_size):
alignment = utils.extract_hard_alignment(attn[i], src_tokens[i], tgt_tokens[i], self.pad, self.eos)
finalized[i // beam_size][i % beam_size]['alignment'] = alignment
return finalized
def _prepare_batch_for_alignment(self, sample, hypothesis):
src_tokens = sample['net_input']['src_tokens']
bsz = src_tokens.shape[0]
src_tokens = src_tokens[:, None, :].expand(-1, self.beam_size, -1).contiguous().view(bsz * self.beam_size, -1)
src_lengths = sample['net_input']['src_lengths']
src_lengths = src_lengths[:, None].expand(-1, self.beam_size).contiguous().view(bsz * self.beam_size)
prev_output_tokens = data_utils.collate_tokens(
[beam['tokens'] for example in hypothesis for beam in example],
self.pad, self.eos, self.left_pad_target, move_eos_to_beginning=True,
)
tgt_tokens = data_utils.collate_tokens(
[beam['tokens'] for example in hypothesis for beam in example],
self.pad, self.eos, self.left_pad_target, move_eos_to_beginning=False,
)
return src_tokens, src_lengths, prev_output_tokens, tgt_tokens
class EnsembleModelWithAlignment(EnsembleModel):
"""A wrapper around an ensemble of models."""
def __init__(self, models):
super().__init__(models)
def forward_align(self, src_tokens, src_lengths, prev_output_tokens):
avg_attn = None
for model in self.models:
decoder_out = model(src_tokens, src_lengths, prev_output_tokens)
attn = decoder_out[1]['attn']
if avg_attn is None:
avg_attn = attn
else:
avg_attn.add_(attn)
if len(self.models) > 1:
avg_attn.div_(len(self.models))
return avg_attn
def _decode_one(
self, tokens, model, encoder_out, incremental_states, log_probs,
temperature=1.,
):
if self.incremental_states is not None:
decoder_out = list(model.forward_decoder(
tokens,
encoder_out=encoder_out,
incremental_state=self.incremental_states[model],
))
else:
decoder_out = list(model.forward_decoder(tokens, encoder_out=encoder_out))
decoder_out[0] = decoder_out[0][:, -1:, :]
if temperature != 1.:
decoder_out[0].div_(temperature)
attn = decoder_out[1] if len(decoder_out) > 1 else None
if type(attn) is dict:
attn = attn.get('attn', None)
if type(attn) is list:
attn = attn[0]
if attn is not None:
attn = attn[:, -1, :]
probs = model.get_normalized_probs(decoder_out, log_probs=log_probs)
probs = probs[:, -1, :]
return probs, attn
|
py | b40d2b8f0726a49c0b32e495f1473a815f500ec7 | #!/usr/bin/python
import sys, datetime, os, subprocess, io
sys.path.append('database/')
sys.path.append('logic/')
import DiaryDatabaseWrapper, newTag, newAuthor, commonDiaryFunctions
def addTask(argv):
"""
Generate a new task file.
"""
taskTitle, tagListString, templateFilename, authorInitialsList, \
year, month, day = validateInputsAndSetDefaults(argv)
taskDir = createFolders(year, month, day)
taskFilename = createTaskFilename(year, month, day, taskDir, \
authorInitialsList)
createFile(taskDir,taskFilename,taskTitle, tagListString, templateFilename, \
authorInitialsList)
openTask(taskDir + '/' + taskFilename)
# Input validation
def validateInputsAndSetDefaults(argv):
"""
Validate the provided input and set the defaults values for
the optional parameters if not specified or empty.
"""
nInputs = len(argv)
# Validate that 2 to 5 input variables are given
if nInputs<2 or nInputs>5:
print("Error: You must specify 2 to 5 input parameters.")
print("addTask.py \"tagA tagB\" \"Task Title\" authorInitials " + \
"template YYYY-MM-DD")
sys.exit(2)
tagListString = argv[0]
taskTitle = argv[1]
# Validate the tag
# The provided tag(s) must be compared to the tag database. If not
# found, it must be generated by calling the newTag function.
tagListString = checkTags(tagListString)[0]
# Set or validate author initials
if nInputs<3 or not argv[2]:
authorInitialsList = ""
else:
# The provided author initials must be compared to the author database.
# If not found, it must be generated by calling the newAuthor function.
authorInitialsList = checkAuthors(argv[2])
# Set or validate the template
if nInputs<4 or not argv[3]:
templateFilename = 'default.tpl.tex'
else:
templateFilename = argv[3] + '.tpl.tex'
templateDir = \
commonDiaryFunctions.unicodeDir(os.path.abspath(__file__)) + \
'/templates'
if not os.path.isfile(templateDir + '/' + templateFilename):
print("Error: The specified template file does not exist in " + \
"the template folder. Please create it.")
# Set or validate the date
if nInputs<5 or not argv[4]:
now = datetime.datetime.now()
year = commonDiaryFunctions.unicodeStr(now.year)
month = commonDiaryFunctions.unicodeStr(now.month).zfill(2)
day = commonDiaryFunctions.unicodeStr(now.day).zfill(2)
else:
try:
datetime.datetime.strptime(argv[4], '%Y-%m-%d')
except ValueError:
raise ValueError("Incorrect date or date format." + \
"Should be YYYY-MM-DD")
year, month, day = argv[4].split('-')
return taskTitle, tagListString, templateFilename, authorInitialsList, \
year, month, day
# See if the provided tags are in the diary database
def checkTags(tagListString):
"""
See if the provided tags are in the diary database.
"""
# Create a diary database object.
db = DiaryDatabaseWrapper.DiaryDatabaseWrapper()
# Create a list of the tags
tagList = tagListString.split(',')
newTagList = list()
for tag in tagList:
# Remove leading and trailing spaces
tag = tag.strip()
# If an empty tag has been provided, ignore it
if tag == '':
print("An empty tag has been provided and is ignored.")
else:
# Is the tag in the database
tagRows = db.selectFromTable('tags',('name',),\
'WHERE name=\'' + tag + '\'')
if len(tagRows)==0:
# Ask the user to add it
print("The tag '%s' does not exist in the diary database." % tag)
sys.stdout.write("Do you want to add it (Y/n)? ")
choice = commonDiaryFunctions.getUserInput().lower()
if choice in ('','y','yes'):
tagTitle = ''
sys.stdout.write("Please provide a tag title: ")
while tagTitle=='':
tagTitle = commonDiaryFunctions.getUserInput()
newTag.addTag2database((tag,tagTitle))
newTagList.append(tag)
else:
print("Ignoring the tag '%s'." % tag)
else:
newTagList.append(tag)
db.close()
if len(newTagList)==0:
print("No valid tags have been provided. Aborting.")
sys.exit(2)
return ','.join(newTagList), newTagList
# See if the provided authors are in the author database
def checkAuthors(authorInitialsListString):
"""
See if the provided authors are in the diary database.
"""
# Create a diary database object.
db = DiaryDatabaseWrapper.DiaryDatabaseWrapper()
# Create a list of the tags
authorInitialsList = authorInitialsListString.split(',')
newAuthorInitialsList = list()
for authorInitials in authorInitialsList:
# Remove leading and trailing spaces
authorInitials = authorInitials.strip()
# If an empty author has been provided, ignore it
if authorInitials == '':
print("An empty author initials string has been provided "\
"and is ignored.")
else:
# Is the author in the database
authorInitialsRows = db.selectFromTable('authors',('initials',),\
'WHERE initials=\'' + authorInitials + '\'')
if len(authorInitialsRows)==0:
# Ask the user to add it
print("The author initials '%s' does not exist in the diary "\
"database." % authorInitials)
sys.stdout.write("Do you want to add it (Y/n)? ")
choice = commonDiaryFunctions.getUserInput().lower()
if choice in ('','y','yes'):
authorName = ''
sys.stdout.write("Please provide a name: ")
while authorName=='':
authorName = commonDiaryFunctions.getUserInput()
authorEmail = ''
sys.stdout.write("Please provide an email address: ")
while authorEmail=='':
authorEmail = commonDiaryFunctions.getUserInput()
newAuthor.addAuthor2database((authorInitials,\
authorName,authorEmail))
newAuthorInitialsList.append(authorInitials)
else:
print("Ignoring the author initials '%s'." % authorInitials)
else:
newAuthorInitialsList.append(authorInitials)
db.close()
if len(authorInitialsList)==0:
print("No valid author initials have been provided. Aborting.")
sys.exit(2)
return authorInitialsList
# Create folders if needed
def createFolders(year, month, day):
"""
Create the year, month, and day folders if needed.
"""
entriesDir = \
commonDiaryFunctions.unicodeDir(os.path.abspath(__file__)) + '/entries'
if not os.path.isdir(entriesDir + '/' + year):
os.makedirs(entriesDir + '/' + year)
if not os.path.isdir(entriesDir + '/' + year + '/' + month):
os.makedirs(entriesDir + '/' + year + '/' + month)
if not os.path.isdir(entriesDir + '/' + year + '/' + month + '/' + day):
os.makedirs(entriesDir + '/' + year + '/' + month + '/' + day)
return entriesDir + '/' + year + '/' + month + '/' + day
# Create the file name
def createTaskFilename(year, month, day, taskDir, authorInitialsList):
"""
Create the file name for the current task.
"""
taskIndex = 0
if len(authorInitialsList) > 0:
baseFilename = year + month + day + '_' + authorInitialsList[0]
else:
baseFilename = year + month + day + '_'
taskFilename = baseFilename + str(taskIndex) + '.tex'
while os.path.isfile(taskDir + '/' + taskFilename):
taskIndex = taskIndex+1
taskFilename = baseFilename + str(taskIndex) + '.tex'
return taskFilename
# Create the file containing the task
def createFile(taskDir,taskFilename,taskTitle, tagListString, templateFilename, \
authorInitialsList):
"""
Create the task file from the provided template.
"""
templatePath = commonDiaryFunctions.unicodeDir(os.path.abspath(__file__)) \
+ '/templates/' + templateFilename
taskPath = taskDir + '/' + taskFilename
# Set the task title, the tags, and possibly also the author
templateFile = io.open(templatePath, 'r', encoding='utf-8')
taskFile = io.open(taskPath,'w', encoding='utf-8')
for line in templateFile:
# the task title
line = line.replace('@taskTitle',taskTitle)
# the task label
root,ext = os.path.splitext(taskFilename)
line = line.replace('@label', root)
# the tags
line = line.replace('@tags', tagListString)
# If not empty, also set the author initials
if authorInitialsList:
line = line.replace('%\\authors{}', \
'\\authors{'+ ','.join(authorInitialsList) +'}')
taskFile.write(line)
templateFile.close()
taskFile.close()
# Open the task in the default latex editor
def openTask(taskPath):
"""
Open the task in the default latex editor.
"""
if sys.platform.startswith('darwin'):
subprocess.call(('open', taskPath))
elif os.name == 'nt':
os.startfile(taskPath)
elif os.name == 'posix':
subprocess.call(('xdg-open', taskPath))
if __name__ == '__main__':
unicodedInputList = \
commonDiaryFunctions.convertTerminalInputs2Unicode(sys.argv[1:])
addTask(unicodedInputList)
|
py | b40d2e2db3f187511785eb8ec634506733633263 | """
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
Unit Tests for linux launcher-wrappers: all are sanity code-path tests, since no interprocess actions should be taken
"""
import os
import pytest
import unittest.mock as mock
import ly_test_tools.launchers
pytestmark = pytest.mark.SUITE_smoke
class TestLinuxLauncher(object):
def test_Construct_TestDoubles_LinuxLauncherCreated(self):
under_test = ly_test_tools.launchers.LinuxLauncher(mock.MagicMock(), ["some_args"])
assert isinstance(under_test, ly_test_tools.launchers.Launcher)
assert isinstance(under_test, ly_test_tools.launchers.LinuxLauncher)
def test_BinaryPath_DummyPath_AddPathToApp(self):
dummy_path = "dummy_workspace_path"
dummy_project = "dummy_project"
mock_workspace = mock.MagicMock()
mock_workspace.paths.build_directory.return_value = dummy_path
mock_workspace.project = dummy_project
launcher = ly_test_tools.launchers.LinuxLauncher(mock_workspace, ["some_args"])
under_test = launcher.binary_path()
expected = os.path.join(dummy_path, f"{dummy_project}.GameLauncher")
assert under_test == expected
@mock.patch('ly_test_tools.launchers.LinuxLauncher.binary_path', mock.MagicMock)
@mock.patch('subprocess.Popen')
def test_Launch_DummyArgs_ArgsPassedToPopen(self, mock_subprocess):
dummy_args = ["some_args"]
launcher = ly_test_tools.launchers.LinuxLauncher(mock.MagicMock(), dummy_args)
launcher.launch()
mock_subprocess.assert_called_once()
name, args, kwargs = mock_subprocess.mock_calls[0]
unpacked_args = args[0] # args is a list inside a tuple
assert len(dummy_args) > 0, "accidentally removed dummy_args"
for expected_arg in dummy_args:
assert expected_arg in unpacked_args
@mock.patch('ly_test_tools.launchers.LinuxLauncher.is_alive')
def test_Kill_MockAliveFalse_SilentSuccess(self, mock_alive):
mock_alive.return_value = False
mock_proc = mock.MagicMock()
launcher = ly_test_tools.launchers.LinuxLauncher(mock.MagicMock(), ["dummy"])
launcher._proc = mock_proc
launcher.stop()
mock_proc.kill.assert_called_once()
mock_alive.assert_called()
|
py | b40d2fa26bcde1c8f02c84c54d5dfa0766a773fd | import os
from datetime import date, time, datetime
from base import PyexcelWriterBase, PyexcelHatWriterBase
from pyexcel_xls import get_data
from pyexcel_libxlsxw import xlsxw as xlsx
from nose.tools import eq_
class TestNativeXLWriter:
def setUp(self):
self.testfile = "xlwriter.xlsx"
def test_write_book(self):
self.content = {
"Sheet1": [[1, 1, 1, 1], [2, 2, 2, 2], [3, 3, 3, 3]],
"Sheet2": [[4, 4, 4, 4], [5, 5, 5, 5], [6, 6, 6, 6]],
"Sheet3": [[u"X", u"Y", u"Z"], [1, 4, 7], [2, 5, 8], [3, 6, 9]],
}
writer = xlsx.XLSXWriter(self.testfile, "xlsx")
writer.write(self.content)
writer.close()
content = get_data(self.testfile)
for key in content.keys():
content[key] = list(content[key])
eq_(content, self.content)
def test_write_dates(self):
self.content = {
"date": [[date(2020, 10, 11)]],
"time": [[time(11, 22, 11)]],
"datetime": [[datetime(2020, 11, 11, 12, 12, 12)]],
}
writer = xlsx.XLSXWriter(self.testfile, "xlsx")
writer.write(self.content)
writer.close()
content = get_data(self.testfile)
for key in content.keys():
content[key] = list(content[key])
eq_(content, self.content)
def tearDown(self):
if os.path.exists(self.testfile):
os.unlink(self.testfile)
class TestXLSnCSVWriter(PyexcelWriterBase):
def setUp(self):
self.testfile = "test.xlsx"
self.testfile2 = "test.csv"
def tearDown(self):
if os.path.exists(self.testfile):
os.unlink(self.testfile)
if os.path.exists(self.testfile2):
os.unlink(self.testfile2)
class TestXLSHatWriter(PyexcelHatWriterBase):
def setUp(self):
self.testfile = "test.xlsx"
def tearDown(self):
if os.path.exists(self.testfile):
os.unlink(self.testfile)
|
py | b40d31942ef71c900d5fe2df6b3c6d938c7c4c46 | import math
import os
import random
import re
import sys
#
# Complete the 'aVeryBigSum' function below.
# The function is expected to return a LONG_INTEGER.
# The function accepts LONG_INTEGER_ARRAY ar as parameter.
#
def aVeryBigSum(ar):
# Write your code here
suma = 0
for i in ar:
suma+=i
return suma
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'] + 'solucion3.txt', 'w')
print("Escribe un número")
ar_count = int(input().strip())
print("Escribe la lista de números")
ar = list(map(int, input().rstrip().split()))
result = aVeryBigSum(ar)
fptr.write(str(result) + '\n')
fptr.close() |
py | b40d3217222e32c43b0517ad9b8cf061e0bfd6f3 | #
# Copyright (c) 2013 Pavol Rusnak
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import binascii
import hashlib
import hmac
import os
import sys
import unicodedata
from pbkdf2 import PBKDF2
PBKDF2_ROUNDS = 2048
class Mnemonic(object):
def __init__(self, language):
self.radix = 2048
with open('%s/%s.txt' % (self._get_directory(), language), 'r') as f:
self.wordlist = [w.strip() for w in f.readlines()]
if len(self.wordlist) != self.radix:
raise Exception('Wordlist should contain %d words, but it contains %d words.' % (self.radix, len(self.wordlist)))
@classmethod
def _get_directory(cls):
return os.path.join(os.path.dirname(__file__), 'wordlist')
@classmethod
def list_languages(cls):
return [ f.split('.')[0] for f in os.listdir(cls._get_directory()) if f.endswith('.txt') ]
@classmethod
def normalize_string(cls, txt):
if isinstance(txt, str if sys.version < '3' else bytes):
utxt = txt.decode('utf8')
elif isinstance(txt, unicode if sys.version < '3' else str):
utxt = txt
else:
raise Exception("String value expected")
return unicodedata.normalize('NFKD', utxt)
@classmethod
def detect_language(cls, code):
first = code.split(' ')[0]
languages = cls.list_languages()
for lang in languages:
mnemo = cls(lang)
if first in mnemo.wordlist:
return lang
raise Exception("Language not detected")
def generate(self, strength = 128):
if strength % 32 > 0:
raise Exception('Strength should be divisible by 32, but it is not (%d).' % strength)
return self.to_mnemonic(os.urandom(strength // 8))
def to_mnemonic(self, data):
if len(data) % 4 > 0:
raise Exception('Data length in bits should be divisible by 32, but it is not (%d bytes = %d bits).' % (len(data), len(data) * 8))
h = hashlib.sha256(data).hexdigest()
b = bin(int(binascii.hexlify(data), 16))[2:].zfill(len(data) * 8) + \
bin(int(h, 16))[2:].zfill(256)[:len(data) * 8 // 32]
result = []
for i in range(len(b) // 11):
idx = int(b[i * 11:(i + 1) * 11], 2)
result.append(self.wordlist[idx])
return ' '.join(result)
def check(self, mnemonic):
mnemonic = mnemonic.split(' ')
if len(mnemonic) % 3 > 0:
return False
try:
idx = map(lambda x: bin(self.wordlist.index(x))[2:].zfill(11), mnemonic)
b = ''.join(idx)
except:
return False
l = len(b)
d = b[:l // 33 * 32]
h = b[-l // 33:]
nd = binascii.unhexlify(hex(int(d, 2))[2:].rstrip('L').zfill(l // 33 * 8))
nh = bin(int(hashlib.sha256(nd).hexdigest(), 16))[2:].zfill(256)[:l // 33]
return h == nh
@classmethod
def to_seed(cls, mnemonic, passphrase = ''):
mnemonic = cls.normalize_string(mnemonic)
passphrase = cls.normalize_string(passphrase)
return PBKDF2(mnemonic, u'mnemonic' + passphrase, iterations=PBKDF2_ROUNDS, macmodule=hmac, digestmodule=hashlib.sha512).read(64)
|
py | b40d32d2eeb40a3d168d7ef3804587d2efcb1bf7 | #
# Copyright Ericsson AB 2013. All rights reserved
#
# Authors: Ildiko Vancsa <[email protected]>
# Balazs Gibizer <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Common functions for MongoDB and DB2 backends
"""
import pymongo
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common import log
from ceilometer.storage import base
from ceilometer.storage import models
from ceilometer.storage.mongo import utils as pymongo_utils
from ceilometer import utils
LOG = log.getLogger(__name__)
COMMON_AVAILABLE_CAPABILITIES = {
'meters': {'query': {'simple': True,
'metadata': True}},
'samples': {'query': {'simple': True,
'metadata': True,
'complex': True}},
'events': {'query': {'simple': True}},
}
AVAILABLE_STORAGE_CAPABILITIES = {
'storage': {'production_ready': True},
}
class Connection(base.Connection):
"""Base Connection class for MongoDB and DB2 drivers."""
CAPABILITIES = utils.update_nested(base.Connection.CAPABILITIES,
COMMON_AVAILABLE_CAPABILITIES)
STORAGE_CAPABILITIES = utils.update_nested(
base.Connection.STORAGE_CAPABILITIES,
AVAILABLE_STORAGE_CAPABILITIES,
)
def get_meters(self, user=None, project=None, resource=None, source=None,
metaquery=None, pagination=None):
"""Return an iterable of models.Meter instances
:param user: Optional ID for user that owns the resource.
:param project: Optional ID for project that owns the resource.
:param resource: Optional resource filter.
:param source: Optional source filter.
:param metaquery: Optional dict with metadata to match on.
:param pagination: Optional pagination query.
"""
if pagination:
raise NotImplementedError('Pagination not implemented')
metaquery = metaquery or {}
q = {}
if user is not None:
q['user_id'] = user
if project is not None:
q['project_id'] = project
if resource is not None:
q['_id'] = resource
if source is not None:
q['source'] = source
q.update(metaquery)
for r in self.db.resource.find(q):
for r_meter in r['meter']:
yield models.Meter(
name=r_meter['counter_name'],
type=r_meter['counter_type'],
# Return empty string if 'counter_unit' is not valid for
# backward compatibility.
unit=r_meter.get('counter_unit', ''),
resource_id=r['_id'],
project_id=r['project_id'],
source=r['source'],
user_id=r['user_id'],
)
def get_samples(self, sample_filter, limit=None):
"""Return an iterable of model.Sample instances.
:param sample_filter: Filter.
:param limit: Maximum number of results to return.
"""
if limit == 0:
return []
q = pymongo_utils.make_query_from_filter(sample_filter,
require_meter=False)
return self._retrieve_samples(q,
[("timestamp", pymongo.DESCENDING)],
limit)
def record_events(self, event_models):
"""Write the events to database.
Return a list of events of type models.Event.DUPLICATE in case of
trying to write an already existing event to the database, or
models.Event.UNKONW_PROBLEM in case of any failures with recording the
event in the database.
:param event_models: a list of models.Event objects.
"""
problem_events = []
for event_model in event_models:
traits = []
if event_model.traits:
for trait in event_model.traits:
traits.append({'trait_name': trait.name,
'trait_type': trait.dtype,
'trait_value': trait.value})
try:
self.db.event.insert(
{'_id': event_model.message_id,
'event_type': event_model.event_type,
'timestamp': event_model.generated,
'traits': traits})
except pymongo.errors.DuplicateKeyError as ex:
LOG.exception(_("Failed to record duplicated event: %s") % ex)
problem_events.append((models.Event.DUPLICATE,
event_model))
except Exception as ex:
LOG.exception(_("Failed to record event: %s") % ex)
problem_events.append((models.Event.UNKNOWN_PROBLEM,
event_model))
return problem_events
def get_events(self, event_filter):
"""Return an iter of models.Event objects.
:param event_filter: storage.EventFilter object, consists of filters
for events that are stored in database.
"""
q = pymongo_utils.make_events_query_from_filter(event_filter)
for event in self.db.event.find(q):
traits = []
for trait in event['traits']:
traits.append(models.Trait(name=trait['trait_name'],
dtype=int(trait['trait_type']),
value=trait['trait_value']))
yield models.Event(message_id=event['_id'],
event_type=event['event_type'],
generated=event['timestamp'],
traits=traits)
def get_event_types(self):
"""Return all event types as an iter of strings."""
event_types = set()
events = self.db.event.find()
for event in events:
event_type = event['event_type']
if event_type not in event_types:
event_types.add(event_type)
yield event_type
def get_trait_types(self, event_type):
"""Return a dictionary containing the name and data type of the trait.
Only trait types for the provided event_type are returned.
:param event_type: the type of the Event.
"""
trait_names = set()
events = self.db.event.find({'event_type': event_type})
for event in events:
for trait in event['traits']:
trait_name = trait['trait_name']
if trait_name not in trait_names:
# Here we check that our method return only unique
# trait types. Method will return only one trait type. It
# is proposed that certain trait name could have only one
# trait type.
trait_names.add(trait_name)
yield {'name': trait_name,
'data_type': trait['trait_type']}
def get_traits(self, event_type, trait_name=None):
"""Return all trait instances associated with an event_type.
If trait_type is specified, only return instances of that trait type.
:param event_type: the type of the Event to filter by
:param trait_name: the name of the Trait to filter by
"""
if not trait_name:
events = self.db.event.find({'event_type': event_type})
else:
# We choose events that simultaneously have event_type and certain
# trait_name, and retrieve events contains only mentioned traits.
events = self.db.event.find({'$and': [{'event_type': event_type},
{'traits.trait_name': trait_name}]},
{'traits': {'$elemMatch':
{'trait_name': trait_name}}
})
for event in events:
for trait in event['traits']:
yield models.Trait(name=trait['trait_name'],
dtype=trait['trait_type'],
value=trait['trait_value'])
def query_samples(self, filter_expr=None, orderby=None, limit=None):
if limit == 0:
return []
query_filter = {}
orderby_filter = [("timestamp", pymongo.DESCENDING)]
transformer = pymongo_utils.QueryTransformer()
if orderby is not None:
orderby_filter = transformer.transform_orderby(orderby)
if filter_expr is not None:
query_filter = transformer.transform_filter(filter_expr)
return self._retrieve_samples(query_filter, orderby_filter, limit)
def _retrieve_samples(self, query, orderby, limit):
if limit is not None:
samples = self.db.meter.find(query,
limit=limit,
sort=orderby)
else:
samples = self.db.meter.find(query,
sort=orderby)
for s in samples:
# Remove the ObjectId generated by the database when
# the sample was inserted. It is an implementation
# detail that should not leak outside of the driver.
del s['_id']
# Backward compatibility for samples without units
s['counter_unit'] = s.get('counter_unit', '')
# Tolerate absence of recorded_at in older datapoints
s['recorded_at'] = s.get('recorded_at')
yield models.Sample(**s)
|
py | b40d333d242e56d9a4ed2c1f620b96a7bca3a86f | """
MIT License
Copyright (c) 2021 Vítor Mussa
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
__all__ = ('QualitubeException',)
class QualitubeException(Exception):
"""
"""
# class QualitubeException(Exception):
# """Qualitube custom Exception.""" |
py | b40d33a11d8fd46e2d3bbff47093d2a0bc3556d6 | # -*- coding: utf-8 -*-
"""
Given an array of n positive integers and a positive integer s, find the minimal length of a subarray of which the sum
≥ s. If there isn't one, return 0 instead.
For example, given the array [2,3,1,2,4,3] and s = 7,
the subarray [4,3] has the minimal length under the problem constraint.
"""
__author__ = 'Daniel'
import sys
class Solution:
def minSubArrayLen(self, s, nums):
"""
Brute force: O(n^2)
two pointers sliding window
minimum length -> sliding window works by shrinking
:type s: int
:type nums: list[int]
:rtype: int
"""
n = len(nums)
f = [0 for _ in xrange(n+1)]
for i in xrange(1, n+1):
f[i] = f[i-1]+nums[i-1]
b, e = 0, 1
mini = sys.maxint
while e <= n:
if f[e]-f[b] >= s:
mini = min(mini, e-b)
b += 1
else:
e += 1
if mini == sys.maxint:
mini = 0
return mini
if __name__ == "__main__":
assert Solution().minSubArrayLen(7, [2, 3, 1, 2, 4, 3]) == 2
|
py | b40d34d70b5d6d0acef59b9fc1e61a1eaa819459 | import os
import json
import pickle
from os.path import join, dirname, abspath, exists
import sys
sys.path.append(dirname(dirname(abspath(__file__))))
import jackal_navi_envs
from torch import nn
import torch
import gym
import numpy as np
import random
import time
from policy import TD3Policy
from tianshou.utils.net.common import Net
from tianshou.exploration import GaussianNoise
from tianshou.utils.net.continuous import Actor, Critic
from tianshou.data import Batch
random.seed(43)
# This is an ordered benchmarking world index by the transveral time of default dwa
benchmarking_train = [54, 94, 156, 68, 52, 101, 40, 135, 51, 42, 75, 67, 18, 53, 87, 36, 28, 61, 233, 25, 35, 20, 34, 79, 108, 46, 65, 90, 6, 73, 70, 10, 29, 167, 15, 31, 77, 116, 241, 155, 194, 99, 56, 149, 38, 261, 239, 234, 60, 173, 247, 178, 291, 16, 9, 21, 169, 257, 148, 296, 151, 259, 102, 145, 130, 205, 121, 105, 43, 242, 213, 171, 62, 202, 293, 224, 225, 152, 111, 55, 125, 200, 161, 1, 136, 106, 286, 139, 244, 230, 222, 238, 170, 267, 26, 132, 124, 23, 59, 3, 97, 119, 89, 12, 164, 39, 236, 263, 81, 188, 84, 11, 268, 192, 122, 22, 253, 219, 216, 137, 85, 195, 206, 212, 4, 274, 91, 248, 44, 131, 203, 63, 80, 37, 110, 50, 74, 120, 128, 249, 30, 14, 103, 49, 154, 82, 2, 143, 158, 147, 235, 83, 157, 142, 187, 185, 288, 45, 140, 271, 160, 146, 109, 223, 126, 98, 252, 134, 272, 115, 71, 117, 255, 141, 174, 33, 245, 92, 295, 281, 186, 260, 7, 166, 196, 66, 113, 153, 227, 107, 199, 298, 278, 114, 72, 165, 228, 176, 24, 162, 198, 180, 285, 232, 243, 207, 190, 262, 275, 172, 179, 269, 127, 86, 183, 273, 287, 215, 266, 95, 5, 299, 279, 13, 250, 96, 197, 177, 58, 289, 211, 220, 182, 282, 210, 280, 251, 283, 217, 276, 292, 221, 204, 191, 181, 209, 297, 264, 231, 254]
# adjust the occurance by the difficulty level
benchmarking_train = 1*benchmarking_train[20:50] + 2*benchmarking_train[50:150] + 4*benchmarking_train[150:200] + 2*benchmarking_train[200:240]
benchmarking_train = benchmarking_train*3
random.shuffle(benchmarking_train)
BASE_PATH = join(os.getenv('HOME'), 'buffer')
def init_actor(id):
assert os.path.exists(BASE_PATH)
actor_path = join(BASE_PATH, 'actor_%s' %(str(id)))
if not exists(actor_path):
os.mkdir(actor_path) # path to store all the trajectories
f = None
while f is None:
try:
f = open(join(BASE_PATH, 'config.json'), 'rb')
except:
time.sleep(2)
config = json.load(f)
num_env = config['training_config']['num_actor']
def count_actor():
files = os.listdir(BASE_PATH)
num_actor = sum([f.startswith("actor_") for f in files])
return num_actor
# wait until most of actors successfully initialized, tolerance 50 envs
#while num_env-count_actor() > 50:
# time.sleep(10)
return config
def load_model(model):
model_path = join(BASE_PATH, 'policy.pth')
state_dict = {}
state_dict_raw = None
while state_dict_raw is None:
try:
state_dict_raw = torch.load(model_path)
except:
time.sleep(0.1)
pass
model.load_state_dict(state_dict_raw)
model = model.float()
# exploration noise std
with open(join(BASE_PATH, 'eps.txt'), 'r') as f:
eps = float(f.readlines()[0])
return model, eps
def write_buffer(traj, ep, id):
with open(join(BASE_PATH, 'actor_%s' %(str(id)), 'traj_%d.pickle' %(ep)), 'wb') as f:
pickle.dump(traj, f)
def main(id):
config = init_actor(id)
env_config = config['env_config']
if env_config['world_name'] != "sequential_applr_testbed.world":
env_config['world_name'] = 'Benchmarking/train/world_%d.world' %(benchmarking_train[id])
assert os.path.exists('/jackal_ws/src/jackal_helper/worlds/Benchmarking/train/world_%d.world' %(benchmarking_train[id]))
wrapper_config = config['wrapper_config']
training_config = config['training_config']
wrapper_dict = jackal_navi_envs.jackal_env_wrapper.wrapper_dict
if config['env'] == 'jackal':
env = wrapper_dict[wrapper_config['wrapper']](gym.make('jackal_continuous-v0', **env_config), **wrapper_config['wrapper_args'])
else:
env = gym.make('Pendulum-v0')
state_shape = env.observation_space.shape or env.observation_space.n
action_shape = env.action_space.shape or env.action_space.n
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
net = Net(training_config['num_layers'], state_shape, device=device, hidden_layer_size=training_config['hidden_size'])
actor = Actor(
net, action_shape,
1, device, hidden_layer_size=training_config['hidden_size']
).to(device)
actor_optim = torch.optim.Adam(actor.parameters(), lr=training_config['actor_lr'])
net = Net(training_config['num_layers'], state_shape,
action_shape, concat=True, device=device, hidden_layer_size=training_config['hidden_size'])
critic1 = Critic(net, device, hidden_layer_size=training_config['hidden_size']).to(device)
critic1_optim = torch.optim.Adam(critic1.parameters(), lr=training_config['critic_lr'])
critic2 = Critic(net, device, hidden_layer_size=training_config['hidden_size']).to(device)
critic2_optim = torch.optim.Adam(critic2.parameters(), lr=training_config['critic_lr'])
policy = TD3Policy(
actor, actor_optim, critic1, critic1_optim, critic2, critic2_optim,
action_range=[env.action_space.low, env.action_space.high],
tau=training_config['tau'], gamma=training_config['gamma'],
exploration_noise=GaussianNoise(sigma=training_config['exploration_noise']),
policy_noise=training_config['policy_noise'],
update_actor_freq=training_config['update_actor_freq'],
noise_clip=training_config['noise_clip'],
reward_normalization=training_config['rew_norm'],
ignore_done=training_config['ignore_done'],
estimation_step=training_config['n_step'])
print(env.action_space.low, env.action_space.high)
ep = 0
while True:
obs = env.reset()
obs_batch = Batch(obs=[obs], info={})
ep += 1
traj = []
done = False
count = 0
policy, eps = load_model(policy)
policy.set_exp_noise(GaussianNoise(sigma=eps))
while not done:
time.sleep(0.01)
p = random.random()
obs = torch.tensor([obs]).float()
actions = policy(obs_batch).act.cpu().detach().numpy()
#actions = np.array([0.5, 1.57, 6, 20, 0.3])
obs_new, rew, done, info = env.step(actions.reshape(-1))
count += 1
traj.append([obs, actions, rew, done, info])
obs_batch = Batch(obs=[obs_new], info={})
obs = obs_new
# print('count: %d, rew: %f' %(count, rew))
write_buffer(traj, ep, id)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description = 'start an actor')
parser.add_argument('--id', dest='actor_id', type = int, default = 1)
id = parser.parse_args().actor_id
main(id)
|
py | b40d3560e759a647689e00d6960faa3f7db3567f | """Utilities for using Manim with IPython (in particular: Jupyter notebooks)"""
import hashlib
import mimetypes
import os
import shutil
from pathlib import Path
from manim import config, tempconfig
from .._config.main_utils import parse_args
try:
from IPython import get_ipython
from IPython.core.magic import (
Magics,
magics_class,
line_cell_magic,
needs_local_scope,
)
from IPython.core.display import display, Image, Video
except ImportError:
pass
else:
@magics_class
class ManimMagic(Magics):
def __init__(self, shell):
super(ManimMagic, self).__init__(shell)
self.rendered_files = dict()
@needs_local_scope
@line_cell_magic
def manim(self, line, cell=None, local_ns=None):
r"""Render Manim scenes contained in IPython cells.
Works as a line or cell magic.
.. note::
This line and cell magic works best when used in a JupyterLab
environment: while all of the functionality is available for
classic Jupyter notebooks as well, it is possible that videos
sometimes don't update on repeated execution of the same cell
if the scene name stays the same.
This problem does not occur when using JupyterLab.
Please refer to `<https://jupyter.org/>`_ for more information about JupyterLab
and Jupyter notebooks.
Usage in line mode::
%manim MyAwesomeScene [CLI options]
Usage in cell mode::
%%manim MyAwesomeScene [CLI options]
class MyAweseomeScene(Scene):
def construct(self):
...
Run ``%manim -h`` for possible command line interface options.
"""
if cell:
exec(cell, local_ns)
cli_args = ["manim", ""] + line.split()
if len(cli_args) == 2:
# empty line.split(): no commands have been passed, call with -h
cli_args.append("-h")
try:
args = parse_args(cli_args)
except SystemExit:
return # probably manim -h was called, process ended preemptively
with tempconfig(local_ns.get("config", {})):
config.digest_args(args)
exec(f"{config['scene_names'][0]}().render()", local_ns)
local_path = Path(config["output_file"]).relative_to(Path.cwd())
tmpfile = (
Path(config["media_dir"])
/ "jupyter"
/ f"{_video_hash(local_path)}{local_path.suffix}"
)
if local_path in self.rendered_files:
self.rendered_files[local_path].unlink()
self.rendered_files[local_path] = tmpfile
os.makedirs(tmpfile.parent, exist_ok=True)
shutil.copy(local_path, tmpfile)
file_type = mimetypes.guess_type(config["output_file"])[0]
if file_type.startswith("image"):
display(Image(filename=config["output_file"]))
return
# videos need to be embedded when running in google colab
video_embed = "google.colab" in str(get_ipython())
display(
Video(
tmpfile,
html_attributes='controls autoplay loop style="max-width: 100%;"',
embed=video_embed,
)
)
def _video_hash(path):
sha1 = hashlib.sha1()
with open(path, "rb") as f:
while True:
data = f.read(65536)
if not data:
break
sha1.update(data)
return sha1.hexdigest()
|
py | b40d386e6c30dceb040fa35c499326bcd473ecb9 | ENTRY_POINT = 'triangle_area'
#[PROMPT]
def triangle_area(a, h):
"""Given length of a side and high return area for a triangle.
>>> triangle_area(5, 3)
7.5
"""
#[SOLUTION]
return a * h / 2.0
#[CHECK]
METADATA = {}
def check(candidate):
assert candidate(5, 3) == 7.5
assert candidate(2, 2) == 2.0
assert candidate(10, 8) == 40.0
|
py | b40d388f9b1dc9cdb2f8749c7bb583be0d8d6a67 | """
Download notMNIST and generate a pickle file
Author: Rowel Atienza
Project: https://github.com/roatienza/Deep-Learning-Experiments
"""
# On command line: python3 mnist_a2j_2pickle.py
# Prerequisite: tensorflow (see tensorflow.org)
from __future__ import print_function
import numpy as np
import pickle
import os
import sys
import tarfile
import random
import matplotlib.image as img
from os.path import join
from six.moves.urllib.request import urlretrieve
url = 'http://yaroslavvb.com/upload/notMNIST/'
def maybe_download(filename, expected_bytes):
"""Download a file if not present, and make sure it's the right size."""
if not os.path.exists(filename):
print('Downloading ', filename, " ...")
filename, _ = urlretrieve(url + filename, filename)
statinfo = os.stat(filename)
if statinfo.st_size == expected_bytes:
print('Found and verified', filename)
else:
raise Exception('Failed to verify' +
filename + '. Can you get to it with a browser?')
else:
print('Found and verified', filename)
return filename
def extract(filename):
root = os.path.splitext(os.path.splitext(filename)[0])[0] # remove .tar.gz
data_folders = []
if os.path.exists(root):
data_folders = [os.path.join(root, d)
for d in sorted(os.listdir(root)) if d != '.DS_Store']
if len(data_folders) == num_classes:
print("Using previously extracted files...")
print(data_folders)
return data_folders
tar = tarfile.open(filename)
print('Extracting data for %s. This may take a while. Please wait.' % root)
sys.stdout.flush()
tar.extractall()
tar.close()
data_folders = [os.path.join(root, d)
for d in sorted(os.listdir(root)) if d != '.DS_Store']
if len(data_folders) != num_classes:
raise Exception(
'Expected %d folders, one per class. Found %d instead.' % (
num_classes, len(data_folders)))
print(data_folders)
return data_folders
def getfiles_fromlist(dirs):
files = []
for dir in dirs:
files.extend([os.path.join(dir,f) for f in sorted(os.listdir(dir)) if f != '.DS_Store'])
return files
def readfile(path):
try:
data = img.imread(path)
return data
except:
print("Error reading: ", path)
return np.array([])
def read_image_files(files):
imagelabels = []
imagedata = []
for file in files:
parent_dir = os.path.dirname(file)
label = (np.arange(num_classes) == (ord(parent_dir[-1])-ord('A')) ).astype(np.float32)
data = readfile(file)
if (data.size > 0):
imagelabels.append(label)
imagedata.append(data)
return np.array(imagedata),np.array(imagelabels)
train_filename = maybe_download('notMNIST_large.tar.gz', 247336696)
test_filename = maybe_download('notMNIST_small.tar.gz', 8458043)
num_classes = 10
train_folders = extract(train_filename)
test_folders = extract(test_filename)
train_files = np.array(getfiles_fromlist(train_folders))
test_files = np.array(getfiles_fromlist(test_folders))
random.shuffle(train_files)
all_dataset, all_labels = read_image_files(train_files)
test_dataset, test_labels = read_image_files(test_files)
image_size = all_dataset.shape[2]
all_dataset = all_dataset.reshape((-1,image_size*image_size)).astype(np.float32)
test_dataset = test_dataset.reshape((-1,image_size*image_size)).astype(np.float32)
data = { "test_labels" : test_labels, "all_labels" : all_labels, "test_dataset" : test_dataset,
"all_dataset" : all_dataset }
pickle_file = open( "mnist_a2j.pickle", "wb" )
pickle.dump( data, pickle_file )
pickle_file.close() |
py | b40d38d9dd91738784852d4abb6e32e643864297 | """Test for the testing module"""
# Authors: Guillaume Lemaitre <[email protected]>
# Christos Aridas
# License: MIT
from pytest import raises
from imblearn.base import SamplerMixin
from imblearn.utils.testing import all_estimators
from imblearn.utils.testing import warns
def test_all_estimators():
# check if the filtering is working with a list or a single string
type_filter = 'sampler'
all_estimators(type_filter=type_filter)
type_filter = ['sampler']
estimators = all_estimators(type_filter=type_filter)
for estimator in estimators:
# check that all estimators are sampler
assert issubclass(estimator[1], SamplerMixin)
# check that an error is raised when the type is unknown
type_filter = 'rnd'
with raises(ValueError, match="Parameter type_filter must be 'sampler'"):
all_estimators(type_filter=type_filter)
def test_warns():
import warnings
with warns(UserWarning, match=r'must be \d+$'):
warnings.warn("value must be 42", UserWarning)
with raises(AssertionError, match='pattern not found'):
with warns(UserWarning, match=r'must be \d+$'):
warnings.warn("this is not here", UserWarning)
with warns(UserWarning, match=r'aaa'):
warnings.warn("cccccccccc", UserWarning)
warnings.warn("bbbbbbbbbb", UserWarning)
warnings.warn("aaaaaaaaaa", UserWarning)
a, b, c = ('aaa', 'bbbbbbbbbb', 'cccccccccc')
expected_msg = r"'{}' pattern not found in \['{}', '{}'\]".format(a, b, c)
with raises(AssertionError, match=expected_msg):
with warns(UserWarning, match=r'aaa'):
warnings.warn("bbbbbbbbbb", UserWarning)
warnings.warn("cccccccccc", UserWarning)
|
py | b40d39ec70013637f09d96acef866b9812a38697 |
# -*- coding: utf-8 -*-
'''
File name: code\almost_rightangled_triangles_ii\sol_224.py
Author: Vaidic Joshi
Date created: Oct 20, 2018
Python Version: 3.x
'''
# Solution to Project Euler Problem #224 :: Almost right-angled triangles II
#
# For more information see:
# https://projecteuler.net/problem=224
# Problem Statement
'''
Let us call an integer sided triangle with sides a ≤ b ≤ c barely obtuse if the sides satisfy a2 + b2 = c2 - 1.
How many barely obtuse triangles are there with perimeter ≤ 75,000,000?
'''
# Solution
# Solution Approach
'''
'''
|
py | b40d3a5034827d4cd6a348675226dd252176e46d | # type: ignore
from datetime import datetime
import warnings
from copy import deepcopy
from enum import Enum
from itertools import chain
from typing import List, Optional, Dict, Union, Callable, Type, Any, Generator
from pydantic import BaseModel, conlist, constr
from labelbox.schema.ontology import SchemaId
from labelbox.utils import camel_case
class DataRowMetadataKind(Enum):
number = "CustomMetadataNumber"
datetime = "CustomMetadataDateTime"
enum = "CustomMetadataEnum"
string = "CustomMetadataString"
option = "CustomMetadataEnumOption"
embedding = "CustomMetadataEmbedding"
# Metadata schema
class DataRowMetadataSchema(BaseModel):
uid: SchemaId
name: constr(strip_whitespace=True, min_length=1, max_length=100)
reserved: bool
kind: DataRowMetadataKind
options: Optional[List["DataRowMetadataSchema"]]
parent: Optional[SchemaId]
DataRowMetadataSchema.update_forward_refs()
Embedding: Type[List[float]] = conlist(float, min_items=128, max_items=128)
String: Type[str] = constr(max_length=500)
class _CamelCaseMixin(BaseModel):
class Config:
allow_population_by_field_name = True
alias_generator = camel_case
# Metadata base class
class DataRowMetadataField(_CamelCaseMixin):
schema_id: SchemaId
# value is of type `Any` so that we do not improperly coerce the value to the wrong tpye
# Additional validation is performed before upload using the schema information
value: Any
class DataRowMetadata(_CamelCaseMixin):
data_row_id: str
fields: List[DataRowMetadataField]
class DeleteDataRowMetadata(_CamelCaseMixin):
data_row_id: str
fields: List[SchemaId]
class DataRowMetadataBatchResponse(_CamelCaseMixin):
data_row_id: str
error: Optional[str] = None
fields: List[Union[DataRowMetadataField, SchemaId]]
# --- Batch GraphQL Objects ---
# Don't want to crowd the name space with internals
# Bulk upsert values
class _UpsertDataRowMetadataInput(_CamelCaseMixin):
schema_id: str
value: Any
# Batch of upsert values for a datarow
class _UpsertBatchDataRowMetadata(_CamelCaseMixin):
data_row_id: str
fields: List[_UpsertDataRowMetadataInput]
class _DeleteBatchDataRowMetadata(_CamelCaseMixin):
data_row_id: str
schema_ids: List[SchemaId]
_BatchInputs = Union[List[_UpsertBatchDataRowMetadata],
List[_DeleteBatchDataRowMetadata]]
_BatchFunction = Callable[[_BatchInputs], List[DataRowMetadataBatchResponse]]
class DataRowMetadataOntology:
""" Ontology for data row metadata
Metadata provides additional context for a data rows. Metadata is broken into two classes
reserved and custom. Reserved fields are defined by Labelbox and used for creating
specific experiences in the platform.
>>> mdo = client.get_data_row_metadata_ontology()
"""
def __init__(self, client):
self._client = client
self._batch_size = 50 # used for uploads and deletes
self._raw_ontology = self._get_ontology()
self._build_ontology()
def _build_ontology(self):
# all fields
self.fields = self._parse_ontology(self._raw_ontology)
self.fields_by_id = self._make_id_index(self.fields)
# reserved fields
self.reserved_fields: List[DataRowMetadataSchema] = [
f for f in self.fields if f.reserved
]
self.reserved_by_id = self._make_id_index(self.reserved_fields)
self.reserved_by_name: Dict[
str,
DataRowMetadataSchema] = self._make_name_index(self.reserved_fields)
# custom fields
self.custom_fields: List[DataRowMetadataSchema] = [
f for f in self.fields if not f.reserved
]
self.custom_by_id = self._make_id_index(self.custom_fields)
self.custom_by_name: Dict[
str,
DataRowMetadataSchema] = self._make_name_index(self.custom_fields)
@staticmethod
def _make_name_index(fields: List[DataRowMetadataSchema]):
index = {}
for f in fields:
if f.options:
index[f.name] = {}
for o in f.options:
index[f.name][o.name] = o
else:
index[f.name] = f
return index
@staticmethod
def _make_id_index(
fields: List[DataRowMetadataSchema]
) -> Dict[SchemaId, DataRowMetadataSchema]:
index = {}
for f in fields:
index[f.uid] = f
if f.options:
for o in f.options:
index[o.uid] = o
return index
def _get_ontology(self) -> List[Dict[str, Any]]:
query = """query GetMetadataOntologyBetaPyApi {
customMetadataOntology {
id
name
kind
reserved
options {
id
kind
name
reserved
}
}}
"""
return self._client.execute(query)["customMetadataOntology"]
@staticmethod
def _parse_ontology(raw_ontology) -> List[DataRowMetadataSchema]:
fields = []
copy = deepcopy(raw_ontology)
for schema in copy:
schema["uid"] = schema["id"]
options = None
if schema.get("options"):
options = []
for option in schema["options"]:
option["uid"] = option["id"]
options.append(
DataRowMetadataSchema(**{
**option,
**{
"parent": schema["uid"]
}
}))
schema["options"] = options
fields.append(DataRowMetadataSchema(**schema))
return fields
def parse_metadata(
self, unparsed: List[Dict[str,
List[Union[str,
Dict]]]]) -> List[DataRowMetadata]:
""" Parse metadata responses
>>> mdo.parse_metadata([metadata])
Args:
unparsed: An unparsed metadata export
Returns:
metadata: List of `DataRowMetadata`
"""
parsed = []
if isinstance(unparsed, dict):
raise ValueError("Pass a list of dictionaries")
for dr in unparsed:
fields = []
for f in dr["fields"]:
schema = self.fields_by_id[f["schemaId"]]
if schema.kind == DataRowMetadataKind.enum:
continue
elif schema.kind == DataRowMetadataKind.option:
field = DataRowMetadataField(schema_id=schema.parent,
value=schema.uid)
elif schema.kind == DataRowMetadataKind.datetime:
field = DataRowMetadataField(
schema_id=schema.uid,
value=datetime.fromisoformat(f["value"][:-1] +
"+00:00"))
else:
field = DataRowMetadataField(schema_id=schema.uid,
value=f["value"])
fields.append(field)
parsed.append(
DataRowMetadata(data_row_id=dr["dataRowId"], fields=fields))
return parsed
def bulk_upsert(
self, metadata: List[DataRowMetadata]
) -> List[DataRowMetadataBatchResponse]:
"""Upsert datarow metadata
>>> metadata = DataRowMetadata(
>>> data_row_id="datarow-id",
>>> fields=[
>>> DataRowMetadataField(schema_id="schema-id", value="my-message"),
>>> ...
>>> ]
>>> )
>>> mdo.batch_upsert([metadata])
Args:
metadata: List of DataRow Metadata to upsert
Returns:
list of unsuccessful upserts.
An empty list means the upload was successful.
"""
if not len(metadata):
raise ValueError("Empty list passed")
def _batch_upsert(
upserts: List[_UpsertBatchDataRowMetadata]
) -> List[DataRowMetadataBatchResponse]:
query = """mutation UpsertDataRowMetadataBetaPyApi($metadata: [DataRowCustomMetadataBatchUpsertInput!]!) {
upsertDataRowCustomMetadata(data: $metadata){
dataRowId
error
fields {
value
schemaId
}
}
}"""
res = self._client.execute(
query, {"metadata": upserts})['upsertDataRowCustomMetadata']
return [
DataRowMetadataBatchResponse(data_row_id=r['dataRowId'],
error=r['error'],
fields=self.parse_metadata(
[r])[0].fields) for r in res
]
items = []
for m in metadata:
items.append(
_UpsertBatchDataRowMetadata(
data_row_id=m.data_row_id,
fields=list(
chain.from_iterable(
self._parse_upsert(m) for m in m.fields))).dict(
by_alias=True))
res = _batch_operations(_batch_upsert, items, self._batch_size)
return res
def bulk_delete(
self, deletes: List[DeleteDataRowMetadata]
) -> List[DataRowMetadataBatchResponse]:
""" Delete metadata from a datarow by specifiying the fields you want to remove
>>> delete = DeleteDataRowMetadata(
>>> data_row_id="datarow-id",
>>> fields=[
>>> "schema-id-1",
>>> "schema-id-2"
>>> ...
>>> ]
>>> )
>>> mdo.batch_delete([metadata])
Args:
deletes: Data row and schema ids to delete
Returns:
list of unsuccessful deletions.
An empty list means all data rows were successfully deleted.
"""
if not len(deletes):
raise ValueError("Empty list passed")
def _batch_delete(
deletes: List[_DeleteBatchDataRowMetadata]
) -> List[DataRowMetadataBatchResponse]:
query = """mutation DeleteDataRowMetadataBetaPyApi($deletes: [DataRowCustomMetadataBatchDeleteInput!]!) {
deleteDataRowCustomMetadata(data: $deletes) {
dataRowId
error
fields {
value
schemaId
}
}
}
"""
res = self._client.execute(
query, {"deletes": deletes})['deleteDataRowCustomMetadata']
failures = []
for dr in res:
dr['fields'] = [f['schemaId'] for f in dr['fields']]
failures.append(DataRowMetadataBatchResponse(**dr))
return failures
items = [self._validate_delete(m) for m in deletes]
return _batch_operations(_batch_delete,
items,
batch_size=self._batch_size)
def bulk_export(self, data_row_ids: List[str]) -> List[DataRowMetadata]:
""" Exports metadata for a list of data rows
>>> mdo.bulk_export([data_row.uid for data_row in data_rows])
Args:
data_row_ids: List of data data rows to fetch metadata for
Returns:
A list of DataRowMetadata.
There will be one DataRowMetadata for each data_row_id passed in.
This is true even if the data row does not have any meta data.
Data rows without metadata will have empty `fields`.
"""
if not len(data_row_ids):
raise ValueError("Empty list passed")
def _bulk_export(_data_row_ids: List[str]) -> List[DataRowMetadata]:
query = """query dataRowCustomMetadataPyApi($dataRowIds: [ID!]!) {
dataRowCustomMetadata(where: {dataRowIds : $dataRowIds}) {
dataRowId
fields {
value
schemaId
}
}
}
"""
return self.parse_metadata(
self._client.execute(
query,
{"dataRowIds": _data_row_ids})['dataRowCustomMetadata'])
return _batch_operations(_bulk_export,
data_row_ids,
batch_size=self._batch_size)
def _parse_upsert(
self, metadatum: DataRowMetadataField
) -> List[_UpsertDataRowMetadataInput]:
"""Format for metadata upserts to GQL"""
if metadatum.schema_id not in self.fields_by_id:
raise ValueError(
f"Schema Id `{metadatum.schema_id}` not found in ontology")
schema = self.fields_by_id[metadatum.schema_id]
if schema.kind == DataRowMetadataKind.datetime:
parsed = _validate_parse_datetime(metadatum)
elif schema.kind == DataRowMetadataKind.string:
parsed = _validate_parse_text(metadatum)
elif schema.kind == DataRowMetadataKind.number:
parsed = _validate_parse_number(metadatum)
elif schema.kind == DataRowMetadataKind.embedding:
parsed = _validate_parse_embedding(metadatum)
elif schema.kind == DataRowMetadataKind.enum:
parsed = _validate_enum_parse(schema, metadatum)
elif schema.kind == DataRowMetadataKind.option:
raise ValueError("An Option id should not be set as the Schema id")
else:
raise ValueError(f"Unknown type: {schema}")
return [_UpsertDataRowMetadataInput(**p) for p in parsed]
def _validate_delete(self, delete: DeleteDataRowMetadata):
if not len(delete.fields):
raise ValueError(f"No fields specified for {delete.data_row_id}")
deletes = set()
for schema_id in delete.fields:
if schema_id not in self.fields_by_id:
raise ValueError(
f"Schema Id `{schema_id}` not found in ontology")
schema = self.fields_by_id[schema_id]
# handle users specifying enums by adding all option enums
if schema.kind == DataRowMetadataKind.enum:
[deletes.add(o.uid) for o in schema.options]
deletes.add(schema.uid)
return _DeleteBatchDataRowMetadata(
data_row_id=delete.data_row_id,
schema_ids=list(delete.fields)).dict(by_alias=True)
def _batch_items(iterable: List[Any], size: int) -> Generator[Any, None, None]:
l = len(iterable)
for ndx in range(0, l, size):
yield iterable[ndx:min(ndx + size, l)]
def _batch_operations(
batch_function: _BatchFunction,
items: List,
batch_size: int = 100,
):
response = []
for batch in _batch_items(items, batch_size):
response += batch_function(batch)
return response
def _validate_parse_embedding(
field: DataRowMetadataField
) -> List[Dict[str, Union[SchemaId, Embedding]]]:
if isinstance(field.value, list):
if not (Embedding.min_items <= len(field.value) <= Embedding.max_items):
raise ValueError(
"Embedding length invalid. "
"Must have length within the interval "
f"[{Embedding.min_items},{Embedding.max_items}]. Found {len(field.value)}"
)
field.value = [float(x) for x in field.value]
else:
raise ValueError(
f"Expected a list for embedding. Found {type(field.value)}")
return [field.dict(by_alias=True)]
def _validate_parse_number(
field: DataRowMetadataField
) -> List[Dict[str, Union[SchemaId, str, float, int]]]:
field.value = float(field.value)
return [field.dict(by_alias=True)]
def _validate_parse_datetime(
field: DataRowMetadataField) -> List[Dict[str, Union[SchemaId, str]]]:
if isinstance(field.value, str):
if field.value.endswith("Z"):
field.value = field.value[:-1]
field.value = datetime.fromisoformat(field.value)
elif not isinstance(field.value, datetime):
raise TypeError(
f"value for datetime fields must be either a string or datetime object. Found {type(field.value)}"
)
return [{
"schemaId": field.schema_id,
"value": field.value.isoformat() + "Z", # needs to be UTC
}]
def _validate_parse_text(
field: DataRowMetadataField) -> List[Dict[str, Union[SchemaId, str]]]:
if not isinstance(field.value, str):
raise ValueError(
f"Expected a string type for the text field. Found {type(field.value)}"
)
if len(field.value) > String.max_length:
raise ValueError(
f"string fields cannot exceed {String.max_length} characters.")
return [field.dict(by_alias=True)]
def _validate_enum_parse(
schema: DataRowMetadataSchema,
field: DataRowMetadataField) -> List[Dict[str, Union[SchemaId, dict]]]:
if schema.options:
if field.value not in {o.uid for o in schema.options}:
raise ValueError(
f"Option `{field.value}` not found for {field.schema_id}")
else:
raise ValueError("Incorrectly specified enum schema")
return [{
"schemaId": field.schema_id,
"value": {}
}, {
"schemaId": field.value,
"value": {}
}]
|
py | b40d3a93245009e19ab36db374f22b0e29840012 | # NCC: Neural Code Comprehension
# https://github.com/spcl/ncc
# Copyright 2018 ETH Zurich
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ==============================================================================
"""Main inst2vec and ncc workflow"""
import os
import pickle
from inst2vec import inst2vec_datagen as i2v_datagen
from inst2vec import inst2vec_preprocess as i2v_prep
from inst2vec import inst2vec_vocabulary as i2v_vocab
from inst2vec import inst2vec_embedding as i2v_emb
from inst2vec import inst2vec_evaluate as i2v_eval
from inst2vec import inst2vec_appflags
from absl import flags, app
FLAGS = flags.FLAGS
def main(argv):
del argv # unused
data_folder = os.path.join(FLAGS.data_folder, FLAGS.data)
if not os.path.exists(FLAGS.embeddings_file):
if FLAGS.data == "ncc/data" and len(os.listdir(data_folder)) <= 1:
# Generate the data set
print('Folder', data_folder, 'is empty - preparing to download training data')
i2v_datagen.datagen(data_folder)
else:
# Assert the data folder's existence
assert os.path.exists(data_folder), "Folder " + data_folder + " does not exist"
# Build XFGs from raw code
data_folders = i2v_prep.construct_xfg(data_folder)
# Build vocabulary
i2v_vocab.construct_vocabulary(data_folder, data_folders)
# Train embeddings
embedding_matrix, embeddings_file = i2v_emb.train_embeddings(data_folder, data_folders)
else:
print('Loading pre-trained embeddings from', FLAGS.embeddings_file)
with open(FLAGS.embeddings_file, 'rb') as f:
embedding_matrix = pickle.load(f)
embeddings_file = FLAGS.embeddings_file
# Evaluate embeddings (intrinsic evaluation)
i2v_eval.evaluate_embeddings(data_folder, embedding_matrix, embeddings_file)
if __name__ == '__main__':
app.run(main)
|
py | b40d3afdb785adbc0a00918872612d02923cf2ae | class Solution:
def XXX(self, n: int) -> List[str]:
ans = []
def dfs(n, path, open, close):
if len(path) == 2 * n:
ans.append(path)
return
if open < n:
dfs(n, path + '(', open + 1, close)
if close < open:
dfs(n, path + ')', open, close + 1)
dfs(n, '', 0, 0)
return ans
|
py | b40d3c8465be79a09b9632d48dbbf4fc66e8fcca | from decimal import Decimal
import os
import random
import warnings
from sqlalchemy import __version__
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import Numeric
from sqlalchemy import String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
warnings.filterwarnings("ignore", r".*Decimal objects natively") # noqa
# speed up cdecimal if available
try:
import cdecimal
import sys
sys.modules["decimal"] = cdecimal
except ImportError:
pass
Base = declarative_base()
class Employee(Base):
__tablename__ = "employee"
id = Column(Integer, primary_key=True)
name = Column(String(100), nullable=False)
type = Column(String(50), nullable=False)
__mapper_args__ = {"polymorphic_on": type}
class Boss(Employee):
__tablename__ = "boss"
id = Column(Integer, ForeignKey("employee.id"), primary_key=True)
golf_average = Column(Numeric)
__mapper_args__ = {"polymorphic_identity": "boss"}
class Grunt(Employee):
__tablename__ = "grunt"
id = Column(Integer, ForeignKey("employee.id"), primary_key=True)
savings = Column(Numeric)
employer_id = Column(Integer, ForeignKey("boss.id"))
employer = relationship(
"Boss", backref="employees", primaryjoin=Boss.id == employer_id
)
__mapper_args__ = {"polymorphic_identity": "grunt"}
if os.path.exists("orm2010.db"):
os.remove("orm2010.db")
# use a file based database so that cursor.execute() has some
# palpable overhead.
engine = create_engine("sqlite:///orm2010.db")
Base.metadata.create_all(engine)
sess = Session(engine)
def runit(status, factor=1, query_runs=5):
num_bosses = 100 * factor
num_grunts = num_bosses * 100
bosses = [
Boss(name="Boss %d" % i, golf_average=Decimal(random.randint(40, 150)))
for i in range(num_bosses)
]
sess.add_all(bosses)
status("Added %d boss objects" % num_bosses)
grunts = [
Grunt(
name="Grunt %d" % i,
savings=Decimal(random.randint(5000000, 15000000) / 100),
)
for i in range(num_grunts)
]
status("Added %d grunt objects" % num_grunts)
while grunts:
# this doesn't associate grunts with bosses evenly,
# just associates lots of them with a relatively small
# handful of bosses
batch_size = 100
batch_num = (num_grunts - len(grunts)) / batch_size
boss = sess.query(Boss).filter_by(name="Boss %d" % batch_num).first()
for grunt in grunts[0:batch_size]:
grunt.employer = boss
grunts = grunts[batch_size:]
sess.commit()
status("Associated grunts w/ bosses and committed")
# do some heavier reading
for i in range(query_runs):
status("Heavy query run #%d" % (i + 1))
report = []
# load all the Grunts, print a report with their name, stats,
# and their bosses' stats.
for grunt in sess.query(Grunt):
report.append(
(
grunt.name,
grunt.savings,
grunt.employer.name,
grunt.employer.golf_average,
)
)
sess.close() # close out the session
def run_with_profile(runsnake=False, dump=False):
import cProfile
import pstats
filename = "orm2010.profile"
if os.path.exists("orm2010.profile"):
os.remove("orm2010.profile")
def status(msg):
print(msg)
cProfile.runctx("runit(status)", globals(), locals(), filename)
stats = pstats.Stats(filename)
counts_by_methname = dict(
(key[2], stats.stats[key][0]) for key in stats.stats
)
print("SQLA Version: %s" % __version__)
print("Total calls %d" % stats.total_calls)
print("Total cpu seconds: %.2f" % stats.total_tt)
print(
"Total execute calls: %d"
% counts_by_methname[
"<method 'execute' of 'sqlite3.Cursor' " "objects>"
]
)
print(
"Total executemany calls: %d"
% counts_by_methname.get(
"<method 'executemany' of 'sqlite3.Cursor' " "objects>", 0
)
)
if dump:
stats.sort_stats("time", "calls")
stats.print_stats()
if runsnake:
os.system("runsnake %s" % filename)
def run_with_time():
import time
now = time.time()
def status(msg):
print("%d - %s" % (time.time() - now, msg))
runit(status, 10)
print("Total time: %d" % (time.time() - now))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"--profile",
action="store_true",
help="run shorter test suite w/ cprofilng",
)
parser.add_argument(
"--dump",
action="store_true",
help="dump full call profile (implies --profile)",
)
parser.add_argument(
"--runsnake",
action="store_true",
help="invoke runsnakerun (implies --profile)",
)
args = parser.parse_args()
args.profile = args.profile or args.dump or args.runsnake
if args.profile:
run_with_profile(runsnake=args.runsnake, dump=args.dump)
else:
run_with_time()
|
py | b40d3d18b6e65d29698d86e817949bf2cbf8ec88 | '''
The arithmetic sequence, 1487, 4817, 8147, in which each of the terms increases
by 3330, is unusual in two ways: (i) each of the three terms are prime, and,
(ii) each of the 4-digit numbers are permutations of one another.
There are no arithmetic sequences made up of three 1-, 2-, or 3-digit primes,
exhibiting this property, but there is one other 4-digit increasing sequence.
What 12-digit number do you form by concatenating the three terms in this
sequence? '''
from itertools import permutations
def prime_sieve(up_to):
nums = list(range(up_to))
nums[0] = nums[1] = 0
for num in nums[:int(up_to**.5)+1]:
if num != 0:
multiple = num+num
while multiple < up_to:
nums[multiple] = 0
multiple += num
return (prime for prime in nums if prime != 0)
def find_2_equal_diffs(it):
if len(it) < 3:
return None
sorted_it = sorted(list(it))
for i1, num1 in enumerate(sorted_it):
for i2, num2 in enumerate(sorted_it[i1+1:]):
diff = num2-num1
if num2 + diff in it:
return (num1, num2, num2 + diff)
return None
def find_matching_permutations(num, matchset):
perms = set([int(''.join(p)) for p in permutations(str(num))])
return perms.intersection(matchset)
def solve_p049():
primes = [p for p in prime_sieve(10000) if p > 999]
prime_sets = [find_matching_permutations(p, primes) for p in primes]
return set([find_2_equal_diffs(p) for p in prime_sets if find_2_equal_diffs(p)])
|
py | b40d3d2b1ad671e15adc4ff8e057cdcc96184080 | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import threading
import os
import traceback
import shutil
import weakref
import csv
from decimal import Decimal
import base64
import binascii
import eth_abi
import queue
from PyQt5.QtCore import Qt
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from .exception_window import Exception_Hook
from electrum import keystore, constants, ecc
from electrum.vipstarcoin import COIN, is_address, TYPE_ADDRESS, TYPE_SCRIPT, is_hash160, eth_abi_encode
from electrum.plugin import run_hook
from electrum.i18n import _
from electrum.util import (bh2u, bfh, format_time, format_satoshis, format_fee_satoshis,PrintError, format_satoshis_plain,
NotEnoughFunds, UserCancelled, UserFacingException, profiler, export_meta, import_meta, open_browser,
InvalidPassword)
from electrum import util, bitcoin, commands, coinchooser
from electrum import paymentrequest
from electrum.transaction import Transaction, opcodes, contract_script, TxOutput, is_opcreate_script
from electrum.address_synchronizer import AddTransactionException
from electrum.wallet import Multisig_Wallet
from electrum.tokens import Token
from electrum.crypto import hash_160
from electrum.i18n import _
try:
from electrum.plot import plot_history
except:
plot_history = None
from .amountedit import AmountEdit, BTCAmountEdit, MyLineEdit
from .qrcodewidget import QRCodeWidget, QRDialog
from .qrtextedit import ShowQRTextEdit, ScanQRTextEdit
from .transaction_dialog import show_transaction
from .fee_slider import FeeSlider
from .util import *
from .token_dialog import TokenAddDialog, TokenInfoDialog, TokenSendDialog
from .smart_contract_dialog import ContractCreateDialog, ContractFuncDialog, ContractEditDialog
class StatusBarButton(QPushButton):
def __init__(self, icon, tooltip, func):
QPushButton.__init__(self, icon, '')
self.setToolTip(tooltip)
self.setFlat(True)
self.setMaximumWidth(25)
self.clicked.connect(self.onPress)
self.func = func
self.setIconSize(QSize(25,25))
def onPress(self, checked=False):
'''Drops the unwanted PyQt5 "checked" argument'''
self.func()
def keyPressEvent(self, e):
if e.key() == Qt.Key_Return:
self.func()
class ElectrumWindow(QMainWindow, MessageBoxMixin, PrintError):
payment_request_ok_signal = pyqtSignal()
payment_request_error_signal = pyqtSignal()
new_fx_quotes_signal = pyqtSignal()
new_fx_history_signal = pyqtSignal()
new_fx_token_signal = pyqtSignal()
network_signal = pyqtSignal(str, object)
alias_received_signal = pyqtSignal()
computing_privkeys_signal = pyqtSignal()
show_privkeys_signal = pyqtSignal()
def __init__(self, gui_object, wallet):
QMainWindow.__init__(self)
self.gui_object = gui_object
self.config = config = gui_object.config
self.setup_exception_hook()
self.network = gui_object.daemon.network
self.wallet = wallet
self.fx = gui_object.daemon.fx
self.invoices = wallet.invoices
self.contacts = wallet.contacts
self.smart_contracts = wallet.smart_contracts
self.tokens = wallet.tokens
self.tray = gui_object.tray
self.app = gui_object.app
self.cleaned_up = False
self.is_max = False
self.payment_request = None
self.checking_accounts = False
self.qr_window = None
self.not_enough_funds = False
self.pluginsdialog = None
self.require_fee_update = False
self.tl_windows = []
self.tx_external_keypairs = {}
self.tx_notification_queue = queue.Queue()
self.tx_notification_last_time = 0
self.create_status_bar()
self.need_update = threading.Event()
self.decimal_point = config.get('decimal_point', 8)
self.num_zeros = int(config.get('num_zeros', 0))
self.completions = QStringListModel()
self.tabs = tabs = QTabWidget(self)
self.send_tab = self.create_send_tab()
self.receive_tab = self.create_receive_tab()
self.addresses_tab = self.create_addresses_tab()
self.utxo_tab = self.create_utxo_tab()
self.console_tab = self.create_console_tab()
self.contacts_tab = self.create_contacts_tab()
self.tokens_tab = self.create_tokens_tab()
self.smart_contract_tab = self.create_smart_contract_tab()
tabs.addTab(self.create_history_tab(), read_QIcon("tab_history.png"), _('History'))
tabs.addTab(self.send_tab, read_QIcon("tab_send.png"), _('Send'))
tabs.addTab(self.receive_tab, read_QIcon("tab_receive.png"), _('Receive'))
tabs.addTab(self.tokens_tab, read_QIcon("tab_contacts.png"), _('Tokens'))
# tabs.addTab(self.contacts_tab, read_QIcon("tab_contacts.png"), _('Contacts'))
def add_optional_tab(tabs, tab, icon, description, name):
tab.tab_icon = icon
tab.tab_description = description
tab.tab_pos = len(tabs)
tab.tab_name = name
if self.config.get('show_{}_tab'.format(name), False):
tabs.addTab(tab, icon, description.replace("&", ""))
add_optional_tab(tabs, self.addresses_tab, read_QIcon("tab_addresses.png"), _("&Addresses"), "addresses")
add_optional_tab(tabs, self.utxo_tab, read_QIcon("tab_coins.png"), _("Co&ins"), "utxo")
add_optional_tab(tabs, self.console_tab, read_QIcon("tab_console.png"), _("Con&sole"), "console")
add_optional_tab(tabs, self.contacts_tab, read_QIcon("tab_contracts.png"), _("Con&tacts"), "contacts")
add_optional_tab(tabs, self.smart_contract_tab, read_QIcon("tab_console.png"), _('Smart Contract'),
'contract')
tabs.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.setCentralWidget(tabs)
if self.config.get("is_maximized"):
self.showMaximized()
self.setWindowIcon(read_QIcon("electrum.png"))
self.init_menubar()
wrtabs = weakref.proxy(tabs)
QShortcut(QKeySequence("Ctrl+W"), self, self.close)
QShortcut(QKeySequence("Ctrl+Q"), self, self.close)
QShortcut(QKeySequence("Ctrl+R"), self, self.update_wallet)
QShortcut(QKeySequence("Ctrl+PgUp"), self, lambda: wrtabs.setCurrentIndex((wrtabs.currentIndex() - 1)%wrtabs.count()))
QShortcut(QKeySequence("Ctrl+PgDown"), self, lambda: wrtabs.setCurrentIndex((wrtabs.currentIndex() + 1)%wrtabs.count()))
for i in range(wrtabs.count()):
QShortcut(QKeySequence("Alt+" + str(i + 1)), self, lambda i=i: wrtabs.setCurrentIndex(i))
self.payment_request_ok_signal.connect(self.payment_request_ok)
self.payment_request_error_signal.connect(self.payment_request_error)
self.history_list.setFocus(True)
# network callbacks
if self.network:
self.network_signal.connect(self.on_network_qt)
interests = ['wallet_updated', 'network_updated', 'blockchain_updated', 'new_transaction', 'status',
'banner', 'verified', 'fee']
# To avoid leaking references to "self" that prevent the
# window from being GC-ed when closed, callbacks should be
# methods of this class only, and specifically not be
# partials, lambdas or methods of subobjects. Hence...
self.network.register_callback(self.on_network, interests)
# set initial message
self.console.showMessage(self.network.banner)
self.network.register_callback(self.on_quotes, ['on_quotes'])
self.network.register_callback(self.on_history, ['on_history'])
self.network.register_callback(self.on_token, ['on_token'])
self.new_fx_quotes_signal.connect(self.on_fx_quotes)
self.new_fx_history_signal.connect(self.on_fx_history)
self.new_fx_token_signal.connect(self.on_fx_token)
# update fee slider in case we missed the callback
self.fee_slider.update()
self.load_wallet(wallet)
self.connect_slots(gui_object.timer)
self.fetch_alias()
def on_history(self, b):
self.new_fx_history_signal.emit()
def setup_exception_hook(self):
Exception_Hook(self)
def on_fx_history(self):
self.history_list.refresh_headers()
self.history_list.update()
self.address_list.update()
def on_quotes(self, b):
self.new_fx_quotes_signal.emit()
def on_fx_quotes(self):
self.update_status()
# Refresh edits with the new rate
edit = self.fiat_send_e if self.fiat_send_e.is_last_edited else self.amount_e
edit.textEdited.emit(edit.text())
edit = self.fiat_receive_e if self.fiat_receive_e.is_last_edited else self.receive_amount_e
edit.textEdited.emit(edit.text())
# History tab needs updating if it used spot
if self.fx.history_used_spot:
self.history_list.update()
def on_token(self, b):
self.new_fx_token_signal.emit()
def on_fx_token(self):
self.token_balance_list.update()
self.token_hist_list.update()
def toggle_tab(self, tab):
show = not self.config.get('show_{}_tab'.format(tab.tab_name), False)
self.config.set_key('show_{}_tab'.format(tab.tab_name), show)
item_text = (_("Hide") if show else _("Show")) + " " + tab.tab_description
tab.menu_action.setText(item_text)
if show:
# Find out where to place the tab
index = len(self.tabs)
for i in range(len(self.tabs)):
try:
if tab.tab_pos < self.tabs.widget(i).tab_pos:
index = i
break
except AttributeError:
pass
self.tabs.insertTab(index, tab, tab.tab_icon, tab.tab_description.replace("&", ""))
else:
i = self.tabs.indexOf(tab)
self.tabs.removeTab(i)
def push_top_level_window(self, window):
'''Used for e.g. tx dialog box to ensure new dialogs are appropriately
parented. This used to be done by explicitly providing the parent
window, but that isn't something hardware wallet prompts know.'''
self.tl_windows.append(window)
def pop_top_level_window(self, window):
self.tl_windows.remove(window)
def top_level_window(self, test_func=None):
'''Do the right thing in the presence of tx dialog windows'''
override = self.tl_windows[-1] if self.tl_windows else None
if override and test_func and not test_func(override):
override = None # only override if ok for test_func
return self.top_level_window_recurse(override, test_func)
def diagnostic_name(self):
return "%s/%s" % (PrintError.diagnostic_name(self),
self.wallet.basename() if self.wallet else "None")
def is_hidden(self):
return self.isMinimized() or self.isHidden()
def show_or_hide(self):
if self.is_hidden():
self.bring_to_top()
else:
self.hide()
def bring_to_top(self):
self.show()
self.raise_()
def on_error(self, exc_info):
e = exc_info[1]
if isinstance(e, UserCancelled):
pass
elif isinstance(e, UserFacingException):
self.show_error(str(e))
else:
try:
traceback.print_exception(*exc_info)
except OSError:
pass # see #4418
self.show_error(str(e))
def on_network(self, event, *args):
if event == 'wallet_updated':
wallet = args[0]
if wallet == self.wallet:
self.need_update.set()
elif event == 'network_updated':
self.gui_object.network_updated_signal_obj.network_updated_signal \
.emit(event, args)
self.network_signal.emit('status', None)
elif event == 'blockchain_updated':
# to update number of confirmations in history
self.need_update.set()
elif event == 'new_transaction':
wallet, tx = args
if wallet == self.wallet:
self.tx_notification_queue.put(tx)
elif event in ['status', 'banner', 'verified', 'fee']:
# Handle in GUI thread
self.network_signal.emit(event, args)
else:
self.print_error("unexpected network message:", event, args)
def on_network_qt(self, event, args=None):
# Handle a network message in the GUI thread
if event == 'status':
self.update_status()
elif event == 'banner':
self.console.showMessage(args[0])
elif event == 'verified':
wallet, tx_hash, tx_mined_status = args
if wallet == self.wallet:
self.history_list.update_item(tx_hash, tx_mined_status)
elif event == 'fee':
if self.config.is_dynfee():
self.fee_slider.update()
self.do_update_fee()
else:
self.print_error("unexpected network_qt signal:", event, args)
def fetch_alias(self):
self.alias_info = None
alias = self.config.get('alias')
if alias:
alias = str(alias)
def f():
self.alias_info = self.contacts.resolve_openalias(alias)
self.alias_received_signal.emit()
t = threading.Thread(target=f)
t.setDaemon(True)
t.start()
def close_wallet(self):
if self.wallet:
self.print_error('close_wallet', self.wallet.storage.path)
run_hook('close_wallet', self.wallet)
@profiler
def load_wallet(self, wallet):
wallet.thread = TaskThread(self, self.on_error)
self.update_recently_visited(wallet.storage.path)
# update(==init) all tabs; expensive for large wallets..
# so delay it somewhat, hence __init__ can finish and the window can appear sooner
QTimer.singleShot(50, self.update_tabs)
self.need_update.set()
# Once GUI has been initialized check if we want to announce something since the callback has been called before the GUI was initialized
# update menus
self.seed_menu.setEnabled(self.wallet.has_seed())
self.update_lock_icon()
self.update_buttons_on_seed()
self.update_console()
self.clear_receive_tab()
self.request_list.update()
self.tabs.show()
self.init_geometry()
if self.config.get('hide_gui') and self.gui_object.tray.isVisible():
self.hide()
else:
self.show()
self.watching_only_changed()
run_hook('load_wallet', wallet, self)
def init_geometry(self):
winpos = self.wallet.storage.get("winpos-qt")
try:
screen = self.app.desktop().screenGeometry()
assert screen.contains(QRect(*winpos))
self.setGeometry(*winpos)
except:
self.print_error("using default geometry")
self.setGeometry(100, 100, 780, 400)
def watching_only_changed(self):
title = 'Electrum for VIPSTARCOIN %s - %s' % (self.wallet.electrum_version,
self.wallet.basename())
extra = [self.wallet.storage.get('wallet_type', '?')]
if self.wallet.is_watching_only():
# self.warn_if_watching_only()
extra.append(_('watching only'))
title += ' [%s]'% ', '.join(extra)
if constants.net.TESTNET:
title += ' - {}'.format(_('testnet'))
self.setWindowTitle(title)
self.password_menu.setEnabled(self.wallet.may_have_password())
self.import_privkey_menu.setVisible(self.wallet.can_import_privkey())
self.import_address_menu.setVisible(self.wallet.can_import_address())
self.export_menu.setEnabled(self.wallet.can_export())
def warn_if_watching_only(self):
if self.wallet.is_watching_only():
msg = ' '.join([
_("This wallet is watching-only."),
_("This means you will not be able to spend vipstarcoins with it."),
_("Make sure you own the seed phrase or the private keys, before you request VIPSTARCOINs to be sent to this wallet.")
])
self.show_warning(msg, title=_('Information'))
def open_wallet(self):
try:
wallet_folder = self.get_wallet_folder()
except FileNotFoundError as e:
self.show_error(str(e))
return
filename, __ = QFileDialog.getOpenFileName(self, _("Select your wallet file"), wallet_folder)
if not filename:
return
self.gui_object.new_window(filename)
def backup_wallet(self):
path = self.wallet.storage.path
wallet_folder = os.path.dirname(path)
filename, __ = QFileDialog.getSaveFileName(self, _('Enter a filename for the copy of your wallet'), wallet_folder)
if not filename:
return
new_path = os.path.join(wallet_folder, filename)
if new_path != path:
try:
shutil.copy2(path, new_path)
self.show_message(_("A copy of your wallet file was created in")+" '%s'" % str(new_path), title=_("Wallet backup created"))
except (BaseException,) as reason:
self.show_critical(_("Electrum was unable to copy your wallet file to the specified location.") + "\n" + str(reason), title=_("Unable to create backup"))
def update_recently_visited(self, filename):
recent = self.config.get('recently_open', [])
try:
sorted(recent)
except:
recent = []
if filename in recent:
recent.remove(filename)
recent.insert(0, filename)
recent = [path for path in recent if os.path.exists(path)]
recent = recent[:5]
self.config.set_key('recently_open', recent)
self.recently_visited_menu.clear()
for i, k in enumerate(sorted(recent)):
b = os.path.basename(k)
def loader(k):
return lambda: self.gui_object.new_window(k)
self.recently_visited_menu.addAction(b, loader(k)).setShortcut(QKeySequence("Ctrl+%d"%(i+1)))
self.recently_visited_menu.setEnabled(len(recent))
def get_wallet_folder(self):
return os.path.dirname(os.path.abspath(self.config.get_wallet_path()))
def new_wallet(self):
try:
wallet_folder = self.get_wallet_folder()
except FileNotFoundError as e:
self.show_error(str(e))
return
i = 1
while True:
filename = "wallet_%d" % i
if filename in os.listdir(wallet_folder):
i += 1
else:
break
full_path = os.path.join(wallet_folder, filename)
self.gui_object.start_new_window(full_path, None)
def init_menubar(self):
menubar = QMenuBar()
file_menu = menubar.addMenu(_("&File"))
self.recently_visited_menu = file_menu.addMenu(_("&Recently open"))
file_menu.addAction(_("&Open"), self.open_wallet).setShortcut(QKeySequence.Open)
file_menu.addAction(_("&New/Restore"), self.new_wallet).setShortcut(QKeySequence.New)
file_menu.addAction(_("&Save Copy"), self.backup_wallet).setShortcut(QKeySequence.SaveAs)
file_menu.addSeparator()
file_menu.addAction(_("&Quit"), self.close)
wallet_menu = menubar.addMenu(_("&Wallet"))
wallet_menu.addAction(_("&Information"), self.show_master_public_keys)
wallet_menu.addSeparator()
self.password_menu = wallet_menu.addAction(_("&Password"), self.change_password_dialog)
self.seed_menu = wallet_menu.addAction(_("&Seed"), self.show_seed_dialog)
self.private_keys_menu = wallet_menu.addMenu(_("&Private keys"))
self.private_keys_menu.addAction(_("&Sweep"), self.sweep_key_dialog)
self.import_privkey_menu = self.private_keys_menu.addAction(_("&Import"), self.do_import_privkey)
self.export_menu = self.private_keys_menu.addAction(_("&Export"), self.export_privkeys_dialog)
self.import_address_menu = wallet_menu.addAction(_("Import addresses"), self.import_addresses)
wallet_menu.addSeparator()
address_menu = wallet_menu.addMenu(_("&Addresses"))
address_menu.addAction(_("&Filter"), lambda: self.address_list.show_toolbar(True))
labels_menu = wallet_menu.addMenu(_("&Labels"))
labels_menu.addAction(_("&Import"), self.do_import_labels)
labels_menu.addAction(_("&Export"), self.do_export_labels)
hist_menu = wallet_menu.addMenu(_("&History"))
hist_menu.addAction(_("&Filter"), lambda: self.history_list.show_toolbar(True))
hist_menu.addAction("Plot", self.plot_history_dialog).setEnabled(plot_history is not None)
hist_menu.addAction("Export", self.export_history_dialog)
contacts_menu = wallet_menu.addMenu(_("Contacts"))
contacts_menu.addAction(_("&New"), self.new_contact_dialog)
contacts_menu.addAction(_("Import"), lambda: self.contact_list.import_contacts())
contacts_menu.addAction(_("Export"), lambda: self.contact_list.export_contacts())
invoices_menu = wallet_menu.addMenu(_("Invoices"))
invoices_menu.addAction(_("Import"), lambda: self.invoice_list.import_invoices())
invoices_menu.addAction(_("Export"), lambda: self.invoice_list.export_invoices())
token_menu = wallet_menu.addMenu(_("Token"))
token_menu.addAction(_("Add Token"), lambda: self.token_add_dialog())
wallet_menu.addSeparator()
wallet_menu.addAction(_("Find"), self.toggle_search).setShortcut(QKeySequence("Ctrl+F"))
def add_toggle_action(view_menu, tab):
is_shown = self.config.get('show_{}_tab'.format(tab.tab_name), False)
item_name = (_("Hide") if is_shown else _("Show")) + " " + tab.tab_description
tab.menu_action = view_menu.addAction(item_name, lambda: self.toggle_tab(tab))
view_menu = menubar.addMenu(_("&View"))
add_toggle_action(view_menu, self.addresses_tab)
add_toggle_action(view_menu, self.utxo_tab)
add_toggle_action(view_menu, self.console_tab)
add_toggle_action(view_menu, self.contacts_tab)
add_toggle_action(view_menu, self.smart_contract_tab)
tools_menu = menubar.addMenu(_("&Tools"))
# Settings / Preferences are all reserved keywords in OSX using this as work around
tools_menu.addAction(_("Electrum preferences") if sys.platform == 'darwin' else _("Preferences"), self.settings_dialog)
tools_menu.addAction(_("&Network"), lambda: self.gui_object.show_network_dialog(self))
tools_menu.addAction(_("&Plugins"), self.plugins_dialog)
tools_menu.addSeparator()
tools_menu.addAction(_("&Sign/verify message"), self.sign_verify_message)
tools_menu.addAction(_("&Encrypt/decrypt message"), self.encrypt_message)
tools_menu.addSeparator()
tools_menu.addAction(_("&Pay to many"), self.paytomany)
raw_transaction_menu = tools_menu.addMenu(_("&Load transaction"))
raw_transaction_menu.addAction(_("&From file"), self.do_process_from_file)
raw_transaction_menu.addAction(_("&From text"), self.do_process_from_text)
raw_transaction_menu.addAction(_("&From the blockchain"), self.do_process_from_txid)
raw_transaction_menu.addAction(_("&From QR code"), self.read_tx_from_qrcode)
self.raw_transaction_menu = raw_transaction_menu
run_hook('init_menubar_tools', self, tools_menu)
help_menu = menubar.addMenu(_("&Help"))
help_menu.addAction(_("&About"), self.show_about)
help_menu.addAction(_("&Official website"),
lambda: open_browser("https://github.com/VIPSTARCOIN-electrum/VIPSTARCOIN-electrum/"))
help_menu.addSeparator()
help_menu.addAction(_("&Documentation"), lambda: open_browser("http://docs.electrum.org/")).setShortcut(
QKeySequence.HelpContents)
help_menu.addAction(_("&Report Bug"), self.show_report_bug)
help_menu.addSeparator()
# help_menu.addAction(_("&Donate to server"), self.donate_to_server)
self.setMenuBar(menubar)
def donate_to_server(self):
d = self.network.get_donation_address()
if d:
host = self.network.get_parameters()[0]
self.pay_to_URI('vipstarcoin:%s?message=donation for %s'%(d, host))
else:
self.show_error(_('No donation address for this server'))
def show_about(self):
QMessageBox.about(self, "VIPSTARCOIN Electrum",
_("Version") +" %s" % (self.wallet.electrum_version) + "\n\n" +
_(
"This software is based on Electrum to support VIPSTARCOIN. VIPSTARCOIN Electrum's focus is speed, with low resource usage and simplifying VIPSTARCOIN. You do not need to perform regular backups, because your wallet can be recovered from a secret phrase that you can memorize or write on paper. Startup times are instant because it operates in conjunction with high-performance servers that handle the most complicated parts of the Bitcoin system." + "\n\n" +
_("Uses icons from the Icons8 icon pack (icons8.com).")))
def show_report_bug(self):
msg = ' '.join([
_("Please report any bugs as issues on github:<br/>"),
"<a href=\"https://github.com/VIPSTARCOIN-electrum/VIPSTARCOIN-electrum/issues\">https://github.com/VIPSTARCOIN-electrum/VIPSTARCOIN-electrum/issues</a><br></br>",
_("Before reporting a bug, upgrade to the most recent version of Electrum (latest release or git HEAD), and include the version number in your report."),
_("Try to explain not only what the bug is, but how it occurs.")
])
self.show_message(msg, title="VIPSTARCOIN Electrum - " + _("Reporting Bugs"), rich_text=True)
def notify_transactions(self):
if self.tx_notification_queue.qsize() == 0:
return
if not self.wallet.up_to_date:
return # no notifications while syncing
now = time.time()
rate_limit = 20 # seconds
if self.tx_notification_last_time + rate_limit > now:
return
self.tx_notification_last_time = now
self.print_error("Notifying GUI")
txns = []
while True:
try:
txns.append(self.tx_notification_queue.get_nowait())
except queue.Empty:
break
# Combine the transactions if there are at least three
if len(txns) >=3:
total_amount = 0
for tx in txns:
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
if is_relevant:
total_amount += v
self.notify(_("{} new transactions received: Total amount received in the new transactions {}")
.format(len(txns), self.format_amount_and_units(total_amount)))
else:
for tx in txns:
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
if is_relevant:
self.notify(_("New transaction received: {}").format(self.format_amount_and_units(v)))
def notify(self, message):
if self.tray:
self.tray.showMessage("VIPSTARCOIN Electrum", message, QSystemTrayIcon.Information, 20000)
# custom wrappers for getOpenFileName and getSaveFileName, that remember the path selected by the user
def getOpenFileName(self, title, filter = ""):
directory = self.config.get('io_dir', os.path.expanduser('~'))
fileName, __ = QFileDialog.getOpenFileName(self, title, directory, filter)
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def getSaveFileName(self, title, filename, filter = ""):
directory = self.config.get('io_dir', os.path.expanduser('~'))
path = os.path.join( directory, filename )
fileName, __ = QFileDialog.getSaveFileName(self, title, path, filter)
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def connect_slots(self, sender):
sender.timer_signal.connect(self.timer_actions)
def timer_actions(self):
# Note this runs in the GUI thread
if self.need_update.is_set():
self.need_update.clear()
self.update_wallet()
# resolve aliases
self.payto_e.resolve()
# update fee
if self.require_fee_update:
self.do_update_fee()
self.require_fee_update = False
self.notify_transactions()
def format_amount(self, x, is_diff=False, whitespaces=False):
return format_satoshis(x, self.num_zeros, self.decimal_point, is_diff=is_diff, whitespaces=whitespaces)
def format_amount_and_units(self, amount):
text = self.format_amount(amount) + ' '+ self.base_unit()
x = self.fx.format_amount_and_units(amount) if self.fx else None
if text and x:
text += ' (%s)'%x
return text
def format_fee_rate(self, fee_rate):
return format_fee_satoshis(fee_rate / 1000, num_zeros=self.num_zeros) + ' sat/byte'
def get_decimal_point(self):
return self.decimal_point
def base_unit(self):
assert self.decimal_point in [0, 2, 5, 8]
if self.decimal_point == 0:
return 'boon'
if self.decimal_point == 2:
return 'uVIPS'
if self.decimal_point == 5:
return 'mVIPS'
if self.decimal_point == 8:
return 'VIPS'
raise Exception('Unknown base unit')
def connect_fields(self, window, btc_e, fiat_e, fee_e):
def edit_changed(edit):
if edit.follows:
return
edit.setStyleSheet(BLACK_FG)
fiat_e.is_last_edited = (edit == fiat_e)
amount = edit.get_amount()
rate = self.fx.exchange_rate() if self.fx else Decimal('NaN')
if not rate or rate.is_nan() or amount is None:
if edit is fiat_e:
btc_e.setText("")
if fee_e:
fee_e.setText("")
else:
fiat_e.setText("")
else:
if edit is fiat_e:
btc_e.follows = True
btc_e.setAmount(int(amount / Decimal(rate) * COIN))
btc_e.setStyleSheet(BLUE_FG)
btc_e.follows = False
if fee_e:
window.update_fee()
else:
fiat_e.follows = True
fiat_e.setText(self.fx.ccy_amount_str(
amount * Decimal(rate) / COIN, False))
fiat_e.setStyleSheet(BLUE_FG)
fiat_e.follows = False
btc_e.follows = False
fiat_e.follows = False
fiat_e.textChanged.connect(partial(edit_changed, fiat_e))
btc_e.textChanged.connect(partial(edit_changed, btc_e))
fiat_e.is_last_edited = False
def update_status(self):
if not self.wallet:
return
if self.network is None or not self.network.is_running():
text = _("Offline")
icon = read_QIcon("status_disconnected.png")
elif self.network.is_connected():
server_height = self.network.get_server_height()
server_lag = self.network.get_local_height() - server_height
# Server height can be 0 after switching to a new server
# until we get a headers subscription request response.
# Display the synchronizing message in that case.
if not self.wallet.up_to_date or server_height == 0:
text = _("Synchronizing...")
icon = read_QIcon("status_waiting.png")
elif server_lag > 1:
text = _("Server is lagging (%d blocks)"%server_lag)
icon = read_QIcon("status_lagging.png")
else:
c, u, x = self.wallet.get_balance()
text = _("Balance") + ": %s " % (self.format_amount_and_units(c))
if u:
text += " [%s unconfirmed]" % (self.format_amount(u, True).strip())
if x:
text += " [%s unmatured]" % (self.format_amount(x, True).strip())
# append fiat balance and price
if self.fx.is_enabled():
text += self.fx.get_fiat_status_text(c + u + x,
self.base_unit(), self.get_decimal_point()) or ''
if not self.network.proxy:
icon = read_QIcon("status_connected.png")
else:
icon = read_QIcon("status_connected_proxy.png")
else:
text = _("Not connected")
icon = read_QIcon("status_disconnected.png")
self.tray.setToolTip("%s (%s)" % (text, self.wallet.basename()))
self.balance_label.setText(text)
self.status_button.setIcon( icon )
def update_wallet(self):
self.update_status()
if self.wallet.up_to_date or not self.network or not self.network.is_connected():
self.update_tabs()
def update_tabs(self):
self.history_list.update()
self.request_list.update()
self.address_list.update()
self.utxo_list.update()
self.contact_list.update()
self.token_balance_list.update()
self.token_hist_list.update()
self.smart_contract_list.update()
self.invoice_list.update()
self.update_completions()
def create_history_tab(self):
from .history_list import HistoryList
self.history_list = l = HistoryList(self)
l.searchable_list = l
return self.create_list_tab(l, l.create_toolbar())
def show_address(self, addr):
from . import address_dialog
d = address_dialog.AddressDialog(self, addr)
d.exec_()
def show_transaction(self, tx, tx_desc = None):
'''tx_desc is set only for txs created in the Send tab'''
show_transaction(tx, self, tx_desc)
def create_receive_tab(self):
# A 4-column grid layout. All the stretch is in the last column.
# The exchange rate plugin adds a fiat widget in column 2
self.receive_grid = grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnStretch(3, 1)
self.receive_address_e = ButtonsLineEdit()
self.receive_address_e.addCopyButton(self.app)
self.receive_address_e.setReadOnly(True)
msg = _(
'VIPSTARCOIN address where the payment should be received. Note that each payment request uses a different Bitcoin address.')
self.receive_address_label = HelpLabel(_('Receiving address'), msg)
self.receive_address_e.textChanged.connect(self.update_receive_qr)
self.receive_address_e.setFocusPolicy(Qt.ClickFocus)
grid.addWidget(self.receive_address_label, 0, 0)
grid.addWidget(self.receive_address_e, 0, 1, 1, -1)
self.receive_message_e = QLineEdit()
grid.addWidget(QLabel(_('Description')), 1, 0)
grid.addWidget(self.receive_message_e, 1, 1, 1, -1)
self.receive_message_e.textChanged.connect(self.update_receive_qr)
self.receive_amount_e = BTCAmountEdit(self.get_decimal_point)
grid.addWidget(QLabel(_('Requested amount')), 2, 0)
grid.addWidget(self.receive_amount_e, 2, 1)
self.receive_amount_e.textChanged.connect(self.update_receive_qr)
self.fiat_receive_e = AmountEdit(self.fx.get_currency if self.fx else '')
if not self.fx or not self.fx.is_enabled():
self.fiat_receive_e.setVisible(False)
grid.addWidget(self.fiat_receive_e, 2, 2, Qt.AlignLeft)
self.connect_fields(self, self.receive_amount_e, self.fiat_receive_e, None)
self.expires_combo = QComboBox()
self.expires_combo.addItems([i[0] for i in expiration_values])
self.expires_combo.setCurrentIndex(3)
self.expires_combo.setFixedWidth(self.receive_amount_e.width())
msg = ' '.join([
_('Expiration date of your request.'),
_('This information is seen by the recipient if you send them a signed payment request.'),
_('Expired requests have to be deleted manually from your list, in order to free the corresponding VIPSTARCOIN addresses.'),
_('The VIPSTARCOIN address never expires and will always be part of this wallet.'),
])
grid.addWidget(HelpLabel(_('Request expires'), msg), 3, 0)
grid.addWidget(self.expires_combo, 3, 1)
self.expires_label = QLineEdit('')
self.expires_label.setReadOnly(1)
self.expires_label.setFocusPolicy(Qt.NoFocus)
self.expires_label.hide()
grid.addWidget(self.expires_label, 3, 1)
self.save_request_button = QPushButton(_('Save'))
self.save_request_button.clicked.connect(self.save_payment_request)
self.new_request_button = QPushButton(_('New'))
self.new_request_button.clicked.connect(self.new_payment_request)
self.receive_qr = QRCodeWidget(fixedSize=200)
self.receive_qr.mouseReleaseEvent = lambda x: self.toggle_qr_window()
self.receive_qr.enterEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.PointingHandCursor))
self.receive_qr.leaveEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.ArrowCursor))
self.receive_buttons = buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self.save_request_button)
buttons.addWidget(self.new_request_button)
grid.addLayout(buttons, 4, 1, 1, 2)
self.receive_requests_label = QLabel(_('Requests'))
from .request_list import RequestList
self.request_list = RequestList(self)
# layout
vbox_g = QVBoxLayout()
vbox_g.addLayout(grid)
vbox_g.addStretch()
hbox = QHBoxLayout()
hbox.addLayout(vbox_g)
hbox.addWidget(self.receive_qr)
w = QWidget()
w.searchable_list = self.request_list
vbox = QVBoxLayout(w)
vbox.addLayout(hbox)
vbox.addStretch(1)
vbox.addWidget(self.receive_requests_label)
vbox.addWidget(self.request_list)
vbox.setStretchFactor(self.request_list, 1000)
return w
def delete_payment_request(self, addr):
self.wallet.remove_payment_request(addr, self.config)
self.request_list.update()
self.clear_receive_tab()
def get_request_URI(self, addr):
req = self.wallet.receive_requests[addr]
message = self.wallet.labels.get(addr, '')
amount = req['amount']
extra_query_params = {}
if req.get('time'):
extra_query_params['time'] = str(int(req.get('time')))
if req.get('exp'):
extra_query_params['exp'] = str(int(req.get('exp')))
if req.get('name') and req.get('sig'):
sig = bfh(req.get('sig'))
sig = bitcoin.base_encode(sig, base=58)
extra_query_params['name'] = req['name']
extra_query_params['sig'] = sig
uri = util.create_bip21_uri(addr, amount, message, extra_query_params=extra_query_params)
return str(uri)
def sign_payment_request(self, addr):
alias = self.config.get('alias')
alias_privkey = None
if alias and self.alias_info:
alias_addr, alias_name, validated = self.alias_info
if alias_addr:
if self.wallet.is_mine(alias_addr):
msg = _('This payment request will be signed.') + '\n' + _('Please enter your password')
password = None
if self.wallet.has_keystore_encryption():
password = self.password_dialog(msg)
if not password:
return
try:
self.wallet.sign_payment_request(addr, alias, alias_addr, password)
except Exception as e:
self.show_error(str(e))
return
else:
return
def save_payment_request(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = self.receive_message_e.text()
if not message and not amount:
self.show_error(_('No message or amount'))
return False
i = self.expires_combo.currentIndex()
expiration = list(map(lambda x: x[1], expiration_values))[i]
req = self.wallet.make_payment_request(addr, amount, message, expiration)
try:
self.wallet.add_payment_request(req, self.config)
except Exception as e:
traceback.print_exc(file=sys.stderr)
self.show_error(_('Error adding payment request') + ':\n' + str(e))
else:
self.sign_payment_request(addr)
self.save_request_button.setEnabled(False)
finally:
self.request_list.update()
self.address_list.update()
def view_and_paste(self, title, msg, data):
dialog = WindowModalDialog(self, title)
vbox = QVBoxLayout()
label = QLabel(msg)
label.setWordWrap(True)
vbox.addWidget(label)
pr_e = ShowQRTextEdit(text=data)
vbox.addWidget(pr_e)
vbox.addLayout(Buttons(CopyCloseButton(pr_e.text, self.app, dialog)))
dialog.setLayout(vbox)
dialog.exec_()
def export_payment_request(self, addr):
r = self.wallet.receive_requests.get(addr)
pr = paymentrequest.serialize_request(r).SerializeToString()
name = r['id'] + '.bip70'
fileName = self.getSaveFileName(_("Select where to save your payment request"), name, "*.bip70")
if fileName:
with open(fileName, "wb+") as f:
f.write(util.to_bytes(pr))
self.show_message(_("Request saved successfully"))
self.saved = True
def new_payment_request(self):
addr = self.wallet.get_unused_address()
if addr is None:
if not self.wallet.is_deterministic():
msg = [
_('No more addresses in your wallet.'),
_('You are using a non-deterministic wallet, which cannot create new addresses.'),
_('If you want to create new addresses, use a deterministic wallet instead.')
]
self.show_message(' '.join(msg))
return
if not self.question(_("Warning: The next address will not be recovered automatically if you restore your wallet from seed; you may need to add it manually.\n\nThis occurs because you have too many unused addresses in your wallet. To avoid this situation, use the existing addresses first.\n\nCreate anyway?")):
return
addr = self.wallet.create_new_address(False)
self.set_receive_address(addr)
self.expires_label.hide()
self.expires_combo.show()
self.new_request_button.setEnabled(False)
self.receive_message_e.setFocus(1)
def set_receive_address(self, addr):
self.receive_address_e.setText(addr)
self.receive_message_e.setText('')
self.receive_amount_e.setAmount(None)
def clear_receive_tab(self):
addr = self.wallet.get_receiving_address() or ''
self.receive_address_e.setText(addr)
self.receive_message_e.setText('')
self.receive_amount_e.setAmount(None)
self.expires_label.hide()
self.expires_combo.show()
def toggle_qr_window(self):
from . import qrwindow
if not self.qr_window:
self.qr_window = qrwindow.QR_Window(self)
self.qr_window.setVisible(True)
self.qr_window_geometry = self.qr_window.geometry()
else:
if not self.qr_window.isVisible():
self.qr_window.setVisible(True)
self.qr_window.setGeometry(self.qr_window_geometry)
else:
self.qr_window_geometry = self.qr_window.geometry()
self.qr_window.setVisible(False)
self.update_receive_qr()
def show_send_tab(self):
self.tabs.setCurrentIndex(self.tabs.indexOf(self.send_tab))
def show_receive_tab(self):
self.tabs.setCurrentIndex(self.tabs.indexOf(self.receive_tab))
def receive_at(self, addr):
if not bitcoin.is_address(addr):
return
self.show_receive_tab()
self.receive_address_e.setText(addr)
self.new_request_button.setEnabled(True)
def update_receive_qr(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = self.receive_message_e.text()
self.save_request_button.setEnabled((amount is not None) or (message != ""))
uri = util.create_bip21_uri(addr, amount, message)
self.receive_qr.setData(uri)
if self.qr_window and self.qr_window.isVisible():
self.qr_window.set_content(addr, amount, message, uri)
def create_send_tab(self):
# A 4-column grid layout. All the stretch is in the last column.
# The exchange rate plugin adds a fiat widget in column 2
self.send_grid = grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnStretch(3, 1)
from .paytoedit import PayToEdit
self.amount_e = BTCAmountEdit(self.get_decimal_point)
self.payto_e = PayToEdit(self)
msg = _('Recipient of the funds.') + '\n\n' \
+ _(
'You may enter a VIPS address, a label from your list of contacts (a list of completions will be proposed), or an alias (email-like address that forwards to a VIPSTARCOIN address)')
payto_label = HelpLabel(_('Pay to'), msg)
grid.addWidget(payto_label, 1, 0)
grid.addWidget(self.payto_e, 1, 1, 1, -1)
completer = QCompleter()
completer.setCaseSensitivity(False)
self.payto_e.set_completer(completer)
completer.setModel(self.completions)
msg = _('Description of the transaction (not mandatory).') + '\n\n'\
+ _('The description is not sent to the recipient of the funds. It is stored in your wallet file, and displayed in the \'History\' tab.')
description_label = HelpLabel(_('Description'), msg)
grid.addWidget(description_label, 2, 0)
self.message_e = MyLineEdit()
grid.addWidget(self.message_e, 2, 1, 1, -1)
self.from_label = QLabel(_('From'))
grid.addWidget(self.from_label, 3, 0)
self.from_list = MyTreeWidget(self, self.from_list_menu, ['',''])
self.from_list.setHeaderHidden(True)
self.from_list.setMaximumHeight(80)
grid.addWidget(self.from_list, 3, 1, 1, -1)
self.set_pay_from([])
msg = _('Amount to be sent.') + '\n\n' \
+ _('The amount will be displayed in red if you do not have enough funds in your wallet.') + ' ' \
+ _('Note that if you have frozen some of your addresses, the available funds will be lower than your total balance.') + '\n\n' \
+ _('Keyboard shortcut: type "!" to send all your coins.')
amount_label = HelpLabel(_('Amount'), msg)
grid.addWidget(amount_label, 4, 0)
grid.addWidget(self.amount_e, 4, 1)
self.fiat_send_e = AmountEdit(self.fx.get_currency if self.fx else '')
if not self.fx or not self.fx.is_enabled():
self.fiat_send_e.setVisible(False)
grid.addWidget(self.fiat_send_e, 4, 2)
self.amount_e.frozen.connect(
lambda: self.fiat_send_e.setFrozen(self.amount_e.isReadOnly()))
self.max_button = EnterButton(_("Max"), self.spend_max)
self.max_button.setFixedWidth(140)
grid.addWidget(self.max_button, 4, 3)
hbox = QHBoxLayout()
hbox.addStretch(1)
grid.addLayout(hbox, 4, 4)
msg = _(
'VIPSTARCOIN transactions are in general not free. A transaction fee is paid by the sender of the funds.') + '\n\n' \
+ _('The amount of fee can be decided freely by the sender. However, transactions with low fees take more time to be processed.') + '\n\n' \
+ _('A suggested fee is automatically added to this field. You may override it. The suggested fee increases with the size of the transaction.')
self.fee_e_label = HelpLabel(_('Fee'), msg)
def fee_cb(dyn, pos, fee_rate):
if dyn:
self.config.set_key('fee_level', pos, False)
else:
self.config.set_key('fee_per_kb', fee_rate, False)
self.spend_max() if self.is_max else self.update_fee()
self.fee_slider = FeeSlider(self, self.config, fee_cb)
self.fee_slider.setFixedWidth(140)
self.fee_e = BTCAmountEdit(self.get_decimal_point)
if not self.config.get('show_fee', False):
self.fee_e.setVisible(False)
self.fee_e.textEdited.connect(self.update_fee)
# This is so that when the user blanks the fee and moves on,
# we go back to auto-calculate mode and put a fee back.
self.fee_e.editingFinished.connect(self.update_fee)
self.connect_fields(self, self.amount_e, self.fiat_send_e, self.fee_e)
self.rbf_checkbox = QCheckBox(_('Replaceable'))
msg = [_('If you check this box, your transaction will be marked as non-final,'),
_('and you will have the possiblity, while it is unconfirmed, to replace it with a transaction that pays a higher fee.'),
_('Note that some merchants do not accept non-final transactions until they are confirmed.')]
self.rbf_checkbox.setToolTip('<p>' + ' '.join(msg) + '</p>')
self.rbf_checkbox.setVisible(bool(self.config.get('use_rbf', True)))
grid.addWidget(self.fee_e_label, 5, 0)
grid.addWidget(self.fee_slider, 5, 1)
grid.addWidget(self.fee_e, 5, 2)
grid.addWidget(self.rbf_checkbox, 5, 3)
self.preview_button = EnterButton(_("Preview"), self.do_preview)
self.preview_button.setToolTip(_('Display the details of your transactions before signing it.'))
self.send_button = EnterButton(_("Send"), self.do_send)
self.clear_button = EnterButton(_("Clear"), self.do_clear)
buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self.clear_button)
buttons.addWidget(self.preview_button)
buttons.addWidget(self.send_button)
grid.addLayout(buttons, 6, 1, 1, 3)
self.amount_e.shortcut.connect(self.spend_max)
self.payto_e.textChanged.connect(self.update_fee)
self.amount_e.textEdited.connect(self.update_fee)
def reset_max(t):
self.is_max = False
self.max_button.setEnabled(not bool(t))
self.amount_e.textEdited.connect(reset_max)
self.fiat_send_e.textEdited.connect(reset_max)
def entry_changed():
text = ""
if self.not_enough_funds:
amt_color, fee_color = RED_FG, RED_FG
text = _( "Not enough funds" )
c, u, x = self.wallet.get_frozen_balance()
if c+u+x:
text += ' (' + self.format_amount(c+u+x).strip() + ' ' + self.base_unit() + ' ' +_("are frozen") + ')'
elif self.fee_e.isModified():
amt_color, fee_color = BLACK_FG, BLACK_FG
elif self.amount_e.isModified():
amt_color, fee_color = BLACK_FG, BLUE_FG
else:
amt_color, fee_color = BLUE_FG, BLUE_FG
self.statusBar().showMessage(text)
self.amount_e.setStyleSheet(amt_color)
self.fee_e.setStyleSheet(fee_color)
self.amount_e.textChanged.connect(entry_changed)
self.fee_e.textChanged.connect(entry_changed)
self.invoices_label = QLabel(_('Invoices'))
from .invoice_list import InvoiceList
self.invoice_list = InvoiceList(self)
vbox0 = QVBoxLayout()
vbox0.addLayout(grid)
hbox = QHBoxLayout()
hbox.addLayout(vbox0)
w = QWidget()
vbox = QVBoxLayout(w)
vbox.addLayout(hbox)
vbox.addStretch(1)
vbox.addWidget(self.invoices_label)
vbox.addWidget(self.invoice_list)
vbox.setStretchFactor(self.invoice_list, 1000)
w.searchable_list = self.invoice_list
run_hook('create_send_tab', grid)
return w
def spend_max(self):
self.is_max = True
self.do_update_fee()
def update_fee(self):
self.require_fee_update = True
def get_payto_or_dummy(self):
r = self.payto_e.get_recipient()
if r:
return r
return (TYPE_ADDRESS, self.wallet.dummy_address())
def do_update_fee(self):
'''Recalculate the fee. If the fee was manually input, retain it, but
still build the TX to see if there are enough funds.
'''
if not self.config.get('offline') and self.config.is_dynfee() and not self.config.has_fee_estimates():
self.statusBar().showMessage(_('Waiting for fee estimates...'))
return False
freeze_fee = (self.fee_e.isModified()
and (self.fee_e.text() or self.fee_e.hasFocus()))
amount = '!' if self.is_max else self.amount_e.get_amount()
if amount is None:
if not freeze_fee:
self.fee_e.setAmount(None)
self.not_enough_funds = False
self.statusBar().showMessage('')
else:
fee = self.fee_e.get_amount() if freeze_fee else None
outputs = self.payto_e.get_outputs(self.is_max)
if not outputs:
_type, addr = self.get_payto_or_dummy()
outputs = [TxOutput(_type, addr, amount)]
try:
is_sweep = bool(self.tx_external_keypairs)
tx = self.wallet.make_unsigned_transaction(self.get_coins(), outputs, self.config, fee,
is_sweep=is_sweep)
self.not_enough_funds = False
except NotEnoughFunds:
self.not_enough_funds = True
if not freeze_fee:
self.fee_e.setAmount(None)
return
except BaseException:
return
if not freeze_fee:
fee = None if self.not_enough_funds else tx.get_fee()
self.fee_e.setAmount(fee)
if self.is_max:
amount = tx.output_value()
self.amount_e.setAmount(amount)
if fee is None:
return
def from_list_delete(self, item):
i = self.from_list.indexOfTopLevelItem(item)
self.pay_from.pop(i)
self.redraw_from_list()
self.update_fee()
def from_list_menu(self, position):
item = self.from_list.itemAt(position)
menu = QMenu()
menu.addAction(_("Remove"), lambda: self.from_list_delete(item))
menu.exec_(self.from_list.viewport().mapToGlobal(position))
def set_pay_from(self, coins):
self.pay_from = list(coins)
self.redraw_from_list()
def redraw_from_list(self):
self.from_list.clear()
self.from_label.setHidden(len(self.pay_from) == 0)
self.from_list.setHidden(len(self.pay_from) == 0)
def format(x):
h = x.get('prevout_hash')
return h[0:10] + '...' + h[-10:] + ":%d"%x.get('prevout_n') + u'\t' + "%s"%x.get('address')
for item in self.pay_from:
self.from_list.addTopLevelItem(QTreeWidgetItem( [format(item), self.format_amount(item['value']) ]))
def get_contact_payto(self, key):
_type, label = self.contacts.get(key)
return label + ' <' + key + '>' if _type == 'address' else key
def update_completions(self):
l = [self.get_contact_payto(key) for key in self.contacts.keys()]
self.completions.setStringList(l)
def protected(func):
'''Password request wrapper. The password is passed to the function
as the 'password' named argument. "None" indicates either an
unencrypted wallet, or the user cancelled the password request.
An empty input is passed as the empty string.'''
def request_password(self, *args, **kwargs):
parent = self.top_level_window()
password = None
while self.wallet.has_keystore_encryption():
password = self.password_dialog(parent=parent)
if password is None:
# User cancelled password input
return
try:
self.wallet.check_password(password)
break
except Exception as e:
self.show_error(str(e), parent=parent)
continue
kwargs['password'] = password
return func(self, *args, **kwargs)
return request_password
def read_send_tab(self):
if self.payment_request and self.payment_request.has_expired():
self.show_error(_('Payment request has expired'))
return
label = self.message_e.text()
if self.payment_request:
outputs = self.payment_request.get_outputs()
else:
errors = self.payto_e.get_errors()
if errors:
self.show_warning(_("Invalid Lines found:") + "\n\n" + '\n'.join([ _("Line #") + str(x[0]+1) + ": " + x[1] for x in errors]))
return
outputs = self.payto_e.get_outputs(self.is_max)
if self.payto_e.is_alias and self.payto_e.validated is False:
alias = self.payto_e.toPlainText()
msg = _('WARNING: the alias "%s" could not be validated via an additional security check, DNSSEC, and thus may not be correct.'%alias) + '\n'
msg += _('Do you wish to continue?')
if not self.question(msg):
return
if not outputs:
self.show_error(_('No outputs'))
return
for o in outputs:
if o.address is None:
self.show_error(_('VIPSTARCOIN Address is None'))
return
if o.type == TYPE_ADDRESS and not bitcoin.is_address(o.address):
self.show_error(_('Invalid VIPSTARCOIN Address'))
return
if o.value is None:
self.show_error(_('Invalid Amount'))
return
freeze_fee = self.fee_e.isVisible() and self.fee_e.isModified() and (self.fee_e.text() or self.fee_e.hasFocus())
fee = self.fee_e.get_amount() if freeze_fee else None
coins = self.get_coins()
return outputs, fee, label, coins
def do_preview(self):
self.do_send(preview=True)
def do_send(self, preview=False):
if run_hook('abort_send', self):
return
r = self.read_send_tab()
if not r:
return
outputs, fee, tx_desc, coins = r
try:
is_sweep = bool(self.tx_external_keypairs)
tx = self.wallet.make_unsigned_transaction(coins, outputs, self.config, fee, is_sweep=is_sweep)
except NotEnoughFunds:
self.show_message(_("Insufficient funds"))
return
except BaseException as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
amount = tx.output_value() if self.is_max else sum(map(lambda x:x[2], outputs))
fee = tx.get_fee()
use_rbf = self.rbf_checkbox.isChecked()
tx.set_rbf(use_rbf)
if fee < self.wallet.relayfee() * tx.estimated_size() / 1000:
self.show_error(_("This transaction requires a higher fee, or it will not be propagated by the network"))
return
if preview:
self.show_transaction(tx, tx_desc)
return
# confirmation dialog
msg = [
_("Amount to be sent") + ": " + self.format_amount_and_units(amount),
_("Mining fee") + ": " + self.format_amount_and_units(fee),
]
x_fee = run_hook('get_tx_extra_fee', self.wallet, tx)
if x_fee:
x_fee_address, x_fee_amount = x_fee
msg.append( _("Additional fees") + ": " + self.format_amount_and_units(x_fee_amount) )
confirm_rate = bitcoin.FEERATE_WARNING_HIGH_FEE
if fee > confirm_rate * tx.estimated_size() / 1000:
msg.append(_('Warning') + ': ' + _("The fee for this transaction seems unusually high."))
if self.wallet.has_keystore_encryption():
msg.append("")
msg.append(_("Enter your password to proceed"))
password = self.password_dialog('\n'.join(msg))
if not password:
return
else:
msg.append(_('Proceed?'))
password = None
if not self.question('\n'.join(msg)):
return
def sign_done(success):
if success:
if not tx.is_complete():
self.show_transaction(tx)
self.do_clear()
else:
self.broadcast_transaction(tx, tx_desc)
self.sign_tx_with_password(tx, sign_done, password)
@protected
def sign_tx(self, tx, callback, password):
self.sign_tx_with_password(tx, callback, password)
def sign_tx_with_password(self, tx, callback, password):
'''Sign the transaction in a separate thread. When done, calls
the callback with a success code of True or False.
'''
# call hook to see if plugin needs gui interaction
def on_signed(result):
callback(True)
def on_failed(exc_info):
self.on_error(exc_info)
callback(False)
if self.tx_external_keypairs:
# can sign directly
task = partial(Transaction.sign, tx, self.tx_external_keypairs)
else:
# call hook to see if plugin needs gui interaction
run_hook('sign_tx', self, tx)
task = partial(self.wallet.sign_transaction, tx, password)
WaitingDialog(self, _('Signing transaction...'), task,
on_signed, on_failed)
def broadcast_transaction(self, tx, tx_desc):
def broadcast_thread():
# non-GUI thread
pr = self.payment_request
if pr and pr.has_expired():
self.payment_request = None
return False, _("Payment request has expired")
status, msg = self.network.broadcast_transaction(tx)
if pr and status is True:
self.invoices.set_paid(pr, tx.txid())
self.invoices.save()
self.payment_request = None
refund_address = self.wallet.get_receiving_addresses()[0]
ack_status, ack_msg = pr.send_ack(str(tx), refund_address)
if ack_status:
msg = ack_msg
return status, msg
# Capture current TL window; override might be removed on return
parent = self.top_level_window(lambda win: isinstance(win, MessageBoxMixin))
def broadcast_done(result):
# GUI thread
if result:
status, msg = result
if status:
if tx_desc is not None and tx.is_complete():
self.wallet.set_label(tx.txid(), tx_desc)
parent.show_message(_('Payment sent.') + '\n' + msg)
self.invoice_list.update()
self.do_clear()
else:
display_msg = _('The server returned an error when broadcasting the transaction.')
if msg:
display_msg += '\n' + msg
parent.show_error(display_msg)
WaitingDialog(self, _('Broadcasting transaction...'),
broadcast_thread, broadcast_done, self.on_error)
def query_choice(self, msg, choices):
# Needed by QtHandler for hardware wallets
dialog = WindowModalDialog(self.top_level_window())
clayout = ChoicesLayout(msg, choices)
vbox = QVBoxLayout(dialog)
vbox.addLayout(clayout.layout())
vbox.addLayout(Buttons(OkButton(dialog)))
if not dialog.exec_():
return None
return clayout.selected_index()
def lock_amount(self, b):
self.amount_e.setFrozen(b)
self.max_button.setEnabled(not b)
def prepare_for_payment_request(self):
self.show_send_tab()
self.payto_e.is_pr = True
for e in [self.payto_e, self.amount_e, self.message_e]:
e.setFrozen(True)
self.payto_e.setText(_("please wait..."))
return True
def delete_invoice(self, key):
self.invoices.remove(key)
self.invoice_list.update()
def payment_request_ok(self):
pr = self.payment_request
key = self.invoices.add(pr)
status = self.invoices.get_status(key)
self.invoice_list.update()
if status == PR_PAID:
self.show_message("invoice already paid")
self.do_clear()
self.payment_request = None
return
self.payto_e.is_pr = True
if not pr.has_expired():
self.payto_e.setGreen()
else:
self.payto_e.setExpired()
self.payto_e.setText(pr.get_requestor())
self.amount_e.setText(format_satoshis_plain(pr.get_amount(), self.decimal_point))
self.message_e.setText(pr.get_memo())
# signal to set fee
self.amount_e.textEdited.emit("")
def payment_request_error(self):
self.show_message(self.payment_request.error)
self.payment_request = None
self.do_clear()
def on_pr(self, request):
self.payment_request = request
if self.payment_request.verify(self.contacts):
self.payment_request_ok_signal.emit()
else:
self.payment_request_error_signal.emit()
def pay_to_URI(self, URI):
if not URI:
return
try:
out = util.parse_URI(URI, self.on_pr)
except BaseException as e:
self.show_error(_('Invalid vipstarcoin URI:') + '\n' + str(e))
return
self.show_send_tab()
r = out.get('r')
sig = out.get('sig')
name = out.get('name')
if r or (name and sig):
self.prepare_for_payment_request()
return
address = out.get('address')
amount = out.get('amount')
label = out.get('label')
message = out.get('message')
# use label as description (not BIP21 compliant)
if label and not message:
message = label
if address:
self.payto_e.setText(address)
if message:
self.message_e.setText(message)
if amount:
self.amount_e.setAmount(amount)
self.amount_e.textEdited.emit("")
def do_clear(self):
self.is_max = False
self.not_enough_funds = False
self.payment_request = None
self.payto_e.is_pr = False
for e in [self.payto_e, self.message_e, self.amount_e, self.fiat_send_e, self.fee_e]:
e.setText('')
e.setFrozen(False)
self.fee_slider.activate()
self.set_pay_from([])
self.rbf_checkbox.setChecked(False)
self.tx_external_keypairs = {}
self.update_status()
run_hook('do_clear', self)
def set_frozen_state(self, addrs, freeze):
self.wallet.set_frozen_state(addrs, freeze)
self.address_list.update()
self.utxo_list.update()
self.update_fee()
def create_list_tab(self, l, toolbar=None):
w = QWidget()
w.searchable_list = l
vbox = QVBoxLayout()
w.setLayout(vbox)
vbox.setContentsMargins(0, 0, 0, 0)
vbox.setSpacing(0)
if toolbar:
toolbar.setContentsMargins(1, 0, 1, 6)
vbox.addLayout(toolbar)
vbox.addWidget(l)
return w
def create_addresses_tab(self):
from .address_list import AddressList
self.address_list = l = AddressList(self)
return self.create_list_tab(l, l.create_toolbar(visible=True))
def create_utxo_tab(self):
from .utxo_list import UTXOList
self.utxo_list = l = UTXOList(self)
return self.create_list_tab(l)
def create_smart_contract_tab(self):
from .smart_contract_list import SmartContractList
self.smart_contract_list = l = SmartContractList(self)
return self.create_list_tab(l)
def create_contacts_tab(self):
from .contact_list import ContactList
self.contact_list = l = ContactList(self)
return self.create_list_tab(l)
def remove_address(self, addr):
if self.question(_("Do you want to remove")+" %s "%addr +_("from your wallet?")):
self.wallet.delete_address(addr)
self.need_update.set()
self.clear_receive_tab()
def get_coins(self):
if self.pay_from:
return self.pay_from
else:
return self.wallet.get_spendable_coins(None, self.config)
def spend_coins(self, coins):
self.set_pay_from(coins)
self.show_send_tab()
self.update_fee()
def paytomany(self):
self.show_send_tab()
self.payto_e.paytomany()
msg = '\n'.join([
_('Enter a list of outputs in the \'Pay to\' field.'),
_('One output per line.'),
_('Format: address, amount'),
_('You may load a CSV file using the file icon.')
])
self.show_message(msg, title=_('Pay to many'))
def payto_contacts(self, labels):
paytos = [self.get_contact_payto(label) for label in labels]
self.show_send_tab()
if len(paytos) == 1:
self.payto_e.setText(paytos[0])
self.amount_e.setFocus()
else:
text = "\n".join([payto + ", 0" for payto in paytos])
self.payto_e.setText(text)
self.payto_e.setFocus()
def set_contact(self, label, address):
if not is_address(address):
self.show_error(_('Invalid Address'))
self.contact_list.update() # Displays original unchanged value
return False
self.contacts[address] = ('address', label)
self.contact_list.update()
self.history_list.update()
self.update_completions()
return True
def delete_contacts(self, labels):
if not self.question(_("Remove %s from your list of contacts?")
% " + ".join(labels)):
return
for label in labels:
self.contacts.pop(label)
self.history_list.update()
self.contact_list.update()
self.update_completions()
def show_invoice(self, key):
pr = self.invoices.get(key)
if pr is None:
self.show_error('Cannot find payment request in wallet.')
return
pr.verify(self.contacts)
self.show_pr_details(pr)
def show_pr_details(self, pr):
key = pr.get_id()
d = WindowModalDialog(self, _("Invoice"))
vbox = QVBoxLayout(d)
grid = QGridLayout()
grid.addWidget(QLabel(_("Requestor") + ':'), 0, 0)
grid.addWidget(QLabel(pr.get_requestor()), 0, 1)
grid.addWidget(QLabel(_("Amount") + ':'), 1, 0)
outputs_str = '\n'.join(map(lambda x: self.format_amount(x[2])+ self.base_unit() + ' @ ' + x[1], pr.get_outputs()))
grid.addWidget(QLabel(outputs_str), 1, 1)
expires = pr.get_expiration_date()
grid.addWidget(QLabel(_("Memo") + ':'), 2, 0)
grid.addWidget(QLabel(pr.get_memo()), 2, 1)
grid.addWidget(QLabel(_("Signature") + ':'), 3, 0)
grid.addWidget(QLabel(pr.get_verify_status()), 3, 1)
if expires:
grid.addWidget(QLabel(_("Expires") + ':'), 4, 0)
grid.addWidget(QLabel(format_time(expires)), 4, 1)
vbox.addLayout(grid)
def do_export():
fn = self.getSaveFileName(_("Save invoice to file"), "*.bip70")
if not fn:
return
with open(fn, 'wb') as f:
data = f.write(pr.raw)
self.show_message(_('Invoice saved as' + ' ' + fn))
exportButton = EnterButton(_('Save'), do_export)
def do_delete():
if self.question(_('Delete invoice?')):
self.invoices.remove(key)
self.history_list.update()
self.invoice_list.update()
d.close()
deleteButton = EnterButton(_('Delete'), do_delete)
vbox.addLayout(Buttons(exportButton, deleteButton, CloseButton(d)))
d.exec_()
def do_pay_invoice(self, key):
pr = self.invoices.get(key)
self.payment_request = pr
self.prepare_for_payment_request()
pr.error = None # this forces verify() to re-run
if pr.verify(self.contacts):
self.payment_request_ok()
else:
self.payment_request_error()
def create_console_tab(self):
from .console import Console
self.console = console = Console()
return console
def update_console(self):
console = self.console
console.history = self.config.get("console-history",[])
console.history_index = len(console.history)
console.updateNamespace({'wallet': self.wallet,
'network': self.network,
'plugins': self.gui_object.plugins,
'window': self})
console.updateNamespace({'util': util, 'bitcoin': bitcoin})
c = commands.Commands(self.config, self.wallet, self.network, lambda: self.console.set_json(True))
methods = {}
def mkfunc(f, method):
return lambda *args: f(method, args, self.password_dialog)
for m in dir(c):
if m[0] == '_' or m in ['network', 'wallet']: continue
methods[m] = mkfunc(c._run, m)
console.updateNamespace(methods)
def create_status_bar(self):
sb = QStatusBar()
sb.setFixedHeight(35)
qtVersion = qVersion()
self.balance_label = QLabel("Loading wallet...")
self.balance_label.setTextInteractionFlags(Qt.TextSelectableByMouse)
self.balance_label.setStyleSheet("""QLabel { padding: 0 }""")
sb.addWidget(self.balance_label)
self.search_box = QLineEdit()
self.search_box.textChanged.connect(self.do_search)
self.search_box.hide()
sb.addPermanentWidget(self.search_box)
self.lock_icon = QIcon()
self.password_button = StatusBarButton(self.lock_icon, _("Password"), self.change_password_dialog )
sb.addPermanentWidget(self.password_button)
sb.addPermanentWidget(StatusBarButton(read_QIcon("preferences.png"), _("Preferences"), self.settings_dialog ) )
self.seed_button = StatusBarButton(read_QIcon("seed.png"), _("Seed"), self.show_seed_dialog )
sb.addPermanentWidget(self.seed_button)
self.status_button = StatusBarButton(read_QIcon("status_disconnected.png"), _("Network"), lambda: self.gui_object.show_network_dialog(self))
sb.addPermanentWidget(self.status_button)
run_hook('create_status_bar', sb)
self.setStatusBar(sb)
def update_lock_icon(self):
icon = read_QIcon("lock.png") if self.wallet.has_password() else read_QIcon("unlock.png")
self.password_button.setIcon(icon)
def update_buttons_on_seed(self):
self.seed_button.setVisible(self.wallet.has_seed())
self.password_button.setVisible(self.wallet.may_have_password())
self.send_button.setVisible(not self.wallet.is_watching_only())
def change_password_dialog(self):
from electrum.storage import STO_EV_XPUB_PW
if self.wallet.get_available_storage_encryption_version() == STO_EV_XPUB_PW:
from .password_dialog import ChangePasswordDialogForHW
d = ChangePasswordDialogForHW(self, self.wallet)
ok, encrypt_file = d.run()
if not ok:
return
try:
hw_dev_pw = self.wallet.keystore.get_password_for_storage_encryption()
except UserCancelled:
return
except BaseException as e:
traceback.print_exc(file=sys.stderr)
self.show_error(str(e))
return
old_password = hw_dev_pw if self.wallet.has_password() else None
new_password = hw_dev_pw if encrypt_file else None
else:
from .password_dialog import ChangePasswordDialogForSW
d = ChangePasswordDialogForSW(self, self.wallet)
ok, old_password, new_password, encrypt_file = d.run()
if not ok:
return
try:
self.wallet.update_password(old_password, new_password, encrypt_file)
except InvalidPassword as e:
self.show_error(str(e))
return
except (BaseException,) as e:
traceback.print_exc(file=sys.stdout)
self.show_error('{}:{}'.format(_('Failed to update password'), e))
return
msg = _('Password was updated successfully') if self.wallet.has_password() else _(
'Password is disabled, this wallet is not protected')
self.show_message(msg, title=_("Success"))
self.update_lock_icon()
def toggle_search(self):
tab = self.tabs.currentWidget()
# if hasattr(tab, 'searchable_list'):
# tab.searchable_list.toggle_toolbar()
# return
self.search_box.setHidden(not self.search_box.isHidden())
if not self.search_box.isHidden():
self.search_box.setFocus(1)
else:
self.do_search('')
def do_search(self, t):
tab = self.tabs.currentWidget()
if hasattr(tab, 'searchable_list'):
tab.searchable_list.filter(t)
def new_contact_dialog(self):
d = WindowModalDialog(self, _("New Contact"))
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('New Contact') + ':'))
grid = QGridLayout()
line1 = QLineEdit()
line1.setFixedWidth(280)
line2 = QLineEdit()
line2.setFixedWidth(280)
grid.addWidget(QLabel(_("Address")), 1, 0)
grid.addWidget(line1, 1, 1)
grid.addWidget(QLabel(_("Name")), 2, 0)
grid.addWidget(line2, 2, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if d.exec_():
self.set_contact(line2.text(), line1.text())
def show_master_public_keys(self):
dialog = WindowModalDialog(self, _("Wallet Information"))
dialog.setMinimumSize(500, 100)
mpk_list = self.wallet.get_master_public_keys()
vbox = QVBoxLayout()
wallet_type = self.wallet.storage.get('wallet_type', '')
if self.wallet.is_watching_only():
wallet_type += ' [{}]'.format(_('watching-only'))
seed_available = _('True') if self.wallet.has_seed() else _('False')
keystore_types = [k.get_type_text() for k in self.wallet.get_keystores()]
grid = QGridLayout()
grid.addWidget(QLabel(_("Wallet type") + ':'), 0, 0)
grid.addWidget(QLabel(wallet_type), 0, 1)
grid.addWidget(QLabel(_("Script type") + ':'), 1, 0)
grid.addWidget(QLabel(self.wallet.txin_type), 1, 1)
grid.addWidget(QLabel(_("Script type")+ ':'), 2, 0)
grid.addWidget(QLabel(self.wallet.txin_type), 2, 1)
grid.addWidget(QLabel(_("Seed available") + ':'), 3, 0)
grid.addWidget(QLabel(str(seed_available)), 3, 1)
if len(keystore_types) <= 1:
grid.addWidget(QLabel(_("Keystore type") + ':'), 4, 0)
ks_type = str(keystore_types[0]) if keystore_types else _('No keystore')
grid.addWidget(QLabel(ks_type), 4, 1)
vbox.addLayout(grid)
if self.wallet.is_deterministic():
mpk_text = ShowQRTextEdit()
mpk_text.setMaximumHeight(150)
mpk_text.addCopyButton(self.app)
def show_mpk(index):
mpk_text.setText(mpk_list[index])
# only show the combobox in case multiple accounts are available
if len(mpk_list) > 1:
def label(key):
if isinstance(self.wallet, Multisig_Wallet):
return _("cosigner") + f' {key+1} ( keystore: {keystore_types[key]} )'
return ''
labels = [label(i) for i in range(len(mpk_list))]
on_click = lambda clayout: show_mpk(clayout.selected_index())
labels_clayout = ChoicesLayout(_("Master Public Keys"), labels, on_click)
vbox.addLayout(labels_clayout.layout())
else:
vbox.addWidget(QLabel(_("Master Public Key")))
show_mpk(0)
vbox.addWidget(mpk_text)
vbox.addStretch(1)
vbox.addLayout(Buttons(CloseButton(dialog)))
dialog.setLayout(vbox)
dialog.exec_()
@protected
def show_seed_dialog(self, password):
if not self.wallet.has_seed():
self.show_message(_('This wallet has no seed'))
return
keystore = self.wallet.get_keystore()
try:
seed = keystore.get_seed(password)
passphrase = keystore.get_passphrase(password)
except BaseException as e:
self.show_error(str(e))
return
from .seed_dialog import SeedDialog
d = SeedDialog(self, seed, passphrase)
d.exec_()
def show_qrcode(self, data, title = _("QR code"), parent=None):
if not data:
return
d = QRDialog(data, parent or self, title)
d.exec_()
@protected
def show_private_key(self, address, password):
if not address:
return
try:
pk, redeem_script = self.wallet.export_private_key(address, password)
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
xtype = bitcoin.deserialize_privkey(pk)[0]
d = WindowModalDialog(self, _("Private key"))
d.setMinimumSize(600, 150)
vbox = QVBoxLayout()
vbox.addWidget(QLabel(_("Address") + ': ' + address))
vbox.addWidget(QLabel(_("Script type") + ': ' + xtype))
vbox.addWidget(QLabel(_("Private key") + ':'))
keys_e = ShowQRTextEdit(text=pk)
keys_e.addCopyButton(self.app)
vbox.addWidget(keys_e)
if redeem_script:
vbox.addWidget(QLabel(_("Redeem Script") + ':'))
rds_e = ShowQRTextEdit(text=redeem_script)
rds_e.addCopyButton(self.app)
vbox.addWidget(rds_e)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
d.exec_()
msg_sign = ("Signing with an address actually means signing with the corresponding "
"private key, and verifying with the corresponding public key. The "
"address you have entered does not have a unique public key, so these "
"operations cannot be performed.")
@protected
def do_sign(self, address, message, signature, password):
address = address.text().strip()
message = message.toPlainText().strip()
if not bitcoin.is_address(address):
self.show_message('Invalid VIPSTARCOIN address.')
return
txin_type = self.wallet.get_txin_type(address)
if txin_type not in ['p2pkh', 'p2wpkh', 'p2wpkh-p2sh']:
self.show_message('Cannot sign messages with this type of address.' + '\n\n' + self.msg_sign)
return
if self.wallet.is_watching_only():
self.show_message(_('This is a watching-only wallet.'))
return
if not self.wallet.is_mine(address):
self.show_message('Address not in wallet.')
return
task = partial(self.wallet.sign_message, address, message, password)
def show_signed_message(sig):
signature.setText(base64.b64encode(sig).decode('ascii'))
self.wallet.thread.add(task, on_success=show_signed_message)
def do_verify(self, address, message, signature):
address = address.text().strip()
message = message.toPlainText().strip().encode('utf-8')
if not bitcoin.is_address(address):
self.show_message('Invalid VIPSTARCOIN address.')
return
try:
# This can throw on invalid base64
sig = base64.b64decode(str(signature.toPlainText()))
verified = ecc.verify_message(address, sig, message)
except Exception as e:
verified = False
if verified:
self.show_message(_("Signature verified"))
else:
self.show_error(_("Wrong signature"))
def sign_verify_message(self, address=''):
d = WindowModalDialog(self, _('Sign/verify Message'))
d.setMinimumSize(610, 290)
layout = QGridLayout(d)
message_e = QTextEdit()
message_e.setAcceptRichText(False)
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
address_e = QLineEdit()
address_e.setText(address)
layout.addWidget(QLabel(_('Address')), 2, 0)
layout.addWidget(address_e, 2, 1)
signature_e = QTextEdit()
signature_e.setAcceptRichText(False)
layout.addWidget(QLabel(_('Signature')), 3, 0)
layout.addWidget(signature_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Sign"))
b.clicked.connect(lambda: self.do_sign(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Verify"))
b.clicked.connect(lambda: self.do_verify(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
@protected
def do_decrypt(self, message_e, pubkey_e, encrypted_e, password):
if self.wallet.is_watching_only():
self.show_message(_('This is a watching-only wallet.'))
return
cyphertext = encrypted_e.toPlainText()
task = partial(self.wallet.decrypt_message, pubkey_e.text(), cyphertext, password)
self.wallet.thread.add(task, on_success=lambda text: message_e.setText(text.decode('utf-8')))
def do_encrypt(self, message_e, pubkey_e, encrypted_e):
message = message_e.toPlainText()
message = message.encode('utf-8')
try:
public_key = ecc.ECPubkey(bfh(pubkey_e.text()))
except BaseException as e:
traceback.print_exc(file=sys.stdout)
self.show_warning(_('Invalid Public key'))
return
encrypted = public_key.encrypt_message(message)
encrypted_e.setText(encrypted.decode('ascii'))
def encrypt_message(self, address=''):
d = WindowModalDialog(self, _('Encrypt/decrypt Message'))
d.setMinimumSize(610, 490)
layout = QGridLayout(d)
message_e = QTextEdit()
message_e.setAcceptRichText(False)
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
pubkey_e = QLineEdit()
if address:
pubkey = self.wallet.get_public_key(address)
pubkey_e.setText(pubkey)
layout.addWidget(QLabel(_('Public key')), 2, 0)
layout.addWidget(pubkey_e, 2, 1)
encrypted_e = QTextEdit()
encrypted_e.setAcceptRichText(False)
layout.addWidget(QLabel(_('Encrypted')), 3, 0)
layout.addWidget(encrypted_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Encrypt"))
b.clicked.connect(lambda: self.do_encrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Decrypt"))
b.clicked.connect(lambda: self.do_decrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
def password_dialog(self, msg=None, parent=None):
from .password_dialog import PasswordDialog
parent = parent or self
d = PasswordDialog(parent, msg)
return d.run()
def tx_from_text(self, txt):
from electrum.transaction import tx_from_str
try:
tx = tx_from_str(txt)
return Transaction(tx)
except BaseException as e:
self.show_critical(_("Electrum was unable to parse your transaction") + ":\n" + str(e))
return
def read_tx_from_qrcode(self):
from electrum import qrscanner
try:
data = qrscanner.scan_barcode(self.config.get_video_device())
except BaseException as e:
self.show_error(str(e))
return
if not data:
return
# if the user scanned a bitcoin URI
if str(data).startswith("vipstarcoin:"):
self.pay_to_URI(data)
return
# else if the user scanned an offline signed tx
# transactions are binary, but qrcode seems to return utf8...
try:
data = bh2u(bitcoin.base_decode(data, length=None, base=43))
except (BaseException,) as e:
self.show_error((_('Could not decode QR code') + ':\n{}').format(e))
return
tx = self.tx_from_text(data)
if not tx:
return
self.show_transaction(tx)
def read_tx_from_file(self):
fileName = self.getOpenFileName(_("Select your transaction file"), "*.txn")
if not fileName:
return
try:
with open(fileName, "r") as f:
file_content = f.read()
except (ValueError, IOError, os.error) as reason:
self.show_critical(_("Electrum was unable to open your transaction file") + "\n" + str(reason), title=_("Unable to read file or no transaction found"))
return
return self.tx_from_text(file_content)
def do_process_from_text(self):
text = text_dialog(self, _('Input raw transaction'), _("Transaction:"), _("Load transaction"))
if not text:
return
tx = self.tx_from_text(text)
if tx:
self.show_transaction(tx)
def do_process_from_file(self):
tx = self.read_tx_from_file()
if tx:
self.show_transaction(tx)
def do_process_from_txid(self):
from electrum import transaction
txid, ok = QInputDialog.getText(self, _('Lookup transaction'), _('Transaction ID') + ':')
if ok and txid:
txid = str(txid).strip()
try:
r = self.network.get_transaction(txid)
except BaseException as e:
self.show_message(str(e))
return
tx = transaction.Transaction(r)
self.show_transaction(tx)
@protected
def export_privkeys_dialog(self, password):
if self.wallet.is_watching_only():
self.show_message(_("This is a watching-only wallet"))
return
if isinstance(self.wallet, Multisig_Wallet):
self.show_message(_('WARNING: This is a multi-signature wallet.') + '\n' + _(
'It can not be "backed up" by simply exporting these private keys.'))
d = WindowModalDialog(self, _('Private keys'))
d.setMinimumSize(980, 300)
vbox = QVBoxLayout(d)
msg = "%s\n%s\n%s" % (_("WARNING: ALL your private keys are secret."),
_("Exposing a single private key can compromise your entire wallet!"),
_("In particular, DO NOT use 'redeem private key' services proposed by third parties."))
vbox.addWidget(QLabel(msg))
e = QTextEdit()
e.setReadOnly(True)
vbox.addWidget(e)
defaultname = 'electrum-private-keys.csv'
select_msg = _('Select file to export your private keys to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
b = OkButton(d, _('Export'))
b.setEnabled(False)
vbox.addLayout(Buttons(CancelButton(d), b))
private_keys = {}
addresses = self.wallet.get_addresses()
done = False
cancelled = False
def privkeys_thread():
for addr in addresses:
time.sleep(0.1)
if done or cancelled:
break
privkey = self.wallet.export_private_key(addr, password)[0]
private_keys[addr] = privkey
self.computing_privkeys_signal.emit()
if not cancelled:
self.computing_privkeys_signal.disconnect()
self.show_privkeys_signal.emit()
def show_privkeys():
s = "\n".join( map( lambda x: x[0] + "\t"+ x[1], private_keys.items()))
e.setText(s)
b.setEnabled(True)
self.show_privkeys_signal.disconnect()
nonlocal done
done = True
def on_dialog_closed(*args):
nonlocal done
nonlocal cancelled
if not done:
cancelled = True
self.computing_privkeys_signal.disconnect()
self.show_privkeys_signal.disconnect()
self.computing_privkeys_signal.connect(
lambda: e.setText("Please wait... %d/%d" % (len(private_keys), len(addresses))))
self.show_privkeys_signal.connect(show_privkeys)
d.finished.connect(on_dialog_closed)
threading.Thread(target=privkeys_thread).start()
if not d.exec_():
done = True
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_privkeys(filename, private_keys, csv_button.isChecked())
except (IOError, os.error) as reason:
txt = "\n".join([
_("VIPSTARCOIN Electrum was unable to produce a private key-export."),
str(reason)
])
self.show_critical(txt, title=_("Unable to create csv"))
except Exception as e:
self.show_message(str(e))
return
self.show_message(_("Private keys exported."))
def do_export_privkeys(self, fileName, pklist, is_csv):
with open(fileName, "w+") as f:
if is_csv:
transaction = csv.writer(f)
transaction.writerow(["address", "private_key"])
for addr, pk in pklist.items():
transaction.writerow(["%34s"%addr,pk])
else:
import json
f.write(json.dumps(pklist, indent=4))
def do_import_labels(self):
def import_labels(path):
def _validate(data):
return data # TODO
def import_labels_assign(data):
for key, value in data.items():
self.wallet.set_label(key, value)
import_meta(path, _validate, import_labels_assign)
def on_import():
self.need_update.set()
import_meta_gui(self, _('labels'), import_labels, on_import)
def do_export_labels(self):
def export_labels(filename):
export_meta(self.wallet.labels, filename)
export_meta_gui(self, _('labels'), export_labels)
def export_history_dialog(self):
d = WindowModalDialog(self, _('Export History'))
d.setMinimumSize(400, 200)
vbox = QVBoxLayout(d)
defaultname = os.path.expanduser('~/electrum-history.csv')
select_msg = _('Select file to export your wallet transactions to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
vbox.addStretch(1)
hbox = Buttons(CancelButton(d), OkButton(d, _('Export')))
vbox.addLayout(hbox)
run_hook('export_history_dialog', self, hbox)
self.update()
if not d.exec_():
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_history(self.wallet, filename, csv_button.isChecked())
except (IOError, os.error) as reason:
export_error_label = _("Electrum was unable to produce a transaction export.")
self.show_critical(export_error_label + "\n" + str(reason), title=_("Unable to export history"))
return
self.show_message(_("Your wallet history has been successfully exported."))
def plot_history_dialog(self):
if plot_history is None:
return
wallet = self.wallet
history = wallet.get_history()
if len(history) > 0:
plt = plot_history(self.wallet, history)
plt.show()
def do_export_history(self, wallet, fileName, is_csv):
history = wallet.get_history()
lines = []
for item in history:
tx_hash, tx_mined_status, value, balance = item
if tx_mined_status.height > 0:
if tx_mined_status.timestamp is not None:
time_string = format_time(tx_mined_status.timestamp)
else:
time_string = _("unverified")
else:
time_string = _("unconfirmed")
if value is not None:
value_string = format_satoshis(value, True)
else:
value_string = '--'
if tx_hash:
label = wallet.get_label(tx_hash)
else:
label = ""
if is_csv:
lines.append([tx_hash, label, tx_mined_status.conf, value_string, time_string])
else:
lines.append({'txid':tx_hash, 'date':"%16s"%time_string, 'label':label, 'value':value_string})
with open(fileName, "w+") as f:
if is_csv:
transaction = csv.writer(f, lineterminator='\n')
transaction.writerow(["transaction_hash", "label", "confirmations", "value", "timestamp"])
for line in lines:
transaction.writerow(line)
else:
import json
f.write(json.dumps(lines, indent=4))
def sweep_key_dialog(self):
d = WindowModalDialog(self, title=_('Sweep private keys'))
d.setMinimumSize(600, 300)
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_("Enter private keys:")))
keys_e = ScanQRTextEdit()
keys_e.setTabChangesFocus(True)
vbox.addWidget(keys_e)
addresses = self.wallet.get_unused_addresses()
if not addresses:
try:
addresses = self.wallet.get_receiving_addresses()
except AttributeError:
addresses = self.wallet.get_addresses()
h, address_e = address_field(addresses)
vbox.addLayout(h)
vbox.addStretch(1)
button = OkButton(d, _('Sweep'))
vbox.addLayout(Buttons(CancelButton(d), button))
button.setEnabled(False)
def get_address():
addr = str(address_e.text()).strip()
if bitcoin.is_address(addr):
return addr
def get_pk():
text = str(keys_e.toPlainText())
return keystore.get_private_keys(text)
f = lambda: button.setEnabled(get_address() is not None and get_pk() is not None)
on_address = lambda text: address_e.setStyleSheet(BLACK_FG if get_address() else RED_FG)
keys_e.textChanged.connect(f)
address_e.textChanged.connect(f)
address_e.textChanged.connect(on_address)
if not d.exec_():
return
from electrum.wallet import sweep_preparations
try:
self.do_clear()
coins, keypairs = sweep_preparations(get_pk(), self.network)
self.tx_external_keypairs = keypairs
self.spend_coins(coins)
self.payto_e.setText(get_address())
self.spend_max()
self.payto_e.setFrozen(True)
self.amount_e.setFrozen(True)
except BaseException as e:
self.show_message(str(e))
return
self.warn_if_watching_only()
def _do_import(self, title, msg, func):
text = text_dialog(self, title, msg + ' :', _('Import'))
if not text:
return
keys = str(text).split()
good_inputs, bad_inputs = func(keys)
if good_inputs:
msg = '\n'.join(good_inputs[:10])
if len(good_inputs) > 10: msg += '\n...'
self.show_message(_("The following addresses were added")
+ f' ({len(good_inputs)}):\n' + msg)
if bad_inputs:
msg = "\n".join(f"{key[:10]}... ({msg})" for key, msg in bad_inputs[:10])
if len(bad_inputs) > 10: msg += '\n...'
self.show_error(_("The following inputs could not be imported")
+ f' ({len(bad_inputs)}):\n' + msg)
self.address_list.update()
self.history_list.update()
def import_addresses(self):
if not self.wallet.can_import_address():
return
title, msg = _('Import addresses'), _("Enter addresses")
self._do_import(title, msg, self.wallet.import_addresses)
@protected
def do_import_privkey(self, password):
if not self.wallet.can_import_privkey():
return
title, msg = _('Import private keys'), _("Enter private keys")
self._do_import(title, msg, lambda x: self.wallet.import_private_keys(x, password))
def update_fiat(self):
b = self.fx and self.fx.is_enabled()
self.fiat_send_e.setVisible(b)
self.fiat_receive_e.setVisible(b)
self.history_list.refresh_headers()
self.history_list.update()
self.address_list.refresh_headers()
self.address_list.update()
self.update_status()
def settings_dialog(self):
self.need_restart = False
d = WindowModalDialog(self, _('Preferences'))
vbox = QVBoxLayout()
tabs = QTabWidget()
gui_widgets = []
fee_widgets = []
tx_widgets = []
id_widgets = []
# language
lang_help = _('Select which language is used in the GUI (after restart).')
lang_label = HelpLabel(_('Language') + ':', lang_help)
lang_combo = QComboBox()
from electrum.i18n import languages
lang_combo.addItems(list(languages.values()))
lang_keys = list(languages.keys())
lang_cur_setting = self.config.get("language", '')
try:
index = lang_keys.index(lang_cur_setting)
except ValueError: # not in list
index = 0
lang_combo.setCurrentIndex(index)
if not self.config.is_modifiable('language'):
for w in [lang_combo, lang_label]: w.setEnabled(False)
def on_lang(x):
lang_request = list(languages.keys())[lang_combo.currentIndex()]
if lang_request != self.config.get('language'):
self.config.set_key("language", lang_request, True)
self.need_restart = True
lang_combo.currentIndexChanged.connect(on_lang)
gui_widgets.append((lang_label, lang_combo))
nz_help = _('Number of zeros displayed after the decimal point. For example, if this is set to 2, "1." will be displayed as "1.00"')
nz_label = HelpLabel(_('Zeros after decimal point') + ':', nz_help)
nz = QSpinBox()
nz.setMinimum(0)
nz.setMaximum(self.decimal_point)
nz.setValue(self.num_zeros)
if not self.config.is_modifiable('num_zeros'):
for w in [nz, nz_label]: w.setEnabled(False)
def on_nz():
value = nz.value()
if self.num_zeros != value:
self.num_zeros = value
self.config.set_key('num_zeros', value, True)
self.history_list.update()
self.address_list.update()
nz.valueChanged.connect(on_nz)
gui_widgets.append((nz_label, nz))
def on_dynfee(x):
self.config.set_key('dynamic_fees', x == Qt.Checked)
self.fee_slider.update()
dynfee_cb = QCheckBox(_('Use dynamic fees'))
dynfee_cb.setChecked(self.config.is_dynfee())
dynfee_cb.setToolTip(_("Use fees recommended by the server."))
fee_widgets.append((dynfee_cb, None))
dynfee_cb.stateChanged.connect(on_dynfee)
feebox_cb = QCheckBox(_('Edit fees manually'))
feebox_cb.setChecked(self.config.get('show_fee', False))
feebox_cb.setToolTip(_("Show fee edit box in send tab."))
def on_feebox(x):
self.config.set_key('show_fee', x == Qt.Checked)
self.fee_e.setVisible(bool(x))
feebox_cb.stateChanged.connect(on_feebox)
fee_widgets.append((feebox_cb, None))
use_rbf_cb = QCheckBox(_('Use Replace-By-Fee'))
use_rbf_cb.setChecked(self.config.get('use_rbf', True))
use_rbf_cb.setToolTip(
_('If you check this box, your transactions will be marked as non-final,') + '\n' + \
_('and you will have the possibility, while they are unconfirmed, to replace them with transactions that pay higher fees.') + '\n' + \
_('Note that some merchants do not accept non-final transactions until they are confirmed.'))
def on_use_rbf(x):
self.config.set_key('use_rbf', x == Qt.Checked)
use_rbf_cb.stateChanged.connect(on_use_rbf)
fee_widgets.append((use_rbf_cb, None))
# rbf_policy = self.config.get('rbf_policy', 1)
# rbf_label = HelpLabel(_('Propose Replace-By-Fee') + ':', '')
# rbf_combo = QComboBox()
# rbf_combo.addItems([_('Always'), _('If the fee is low'), _('Never')])
# rbf_combo.setCurrentIndex(rbf_policy)
# def on_rbf(x):
# self.config.set_key('rbf_policy', x)
# rbf_combo.currentIndexChanged.connect(on_rbf)
# fee_widgets.append((rbf_label, rbf_combo))
msg = _('OpenAlias record, used to receive coins and to sign payment requests.') + '\n\n'\
+ _('The following alias providers are available:') + '\n'\
+ '\n'.join(['https://cryptoname.co/', 'http://xmr.link']) + '\n\n'\
+ 'For more information, see http://openalias.org'
alias_label = HelpLabel(_('OpenAlias') + ':', msg)
alias = self.config.get('alias','')
alias_e = QLineEdit(alias)
def set_alias_color():
if not self.config.get('alias'):
alias_e.setStyleSheet("")
return
if self.alias_info:
alias_addr, alias_name, validated = self.alias_info
alias_e.setStyleSheet(GREEN_BG if validated else RED_BG)
else:
alias_e.setStyleSheet(RED_BG)
def on_alias_edit():
alias_e.setStyleSheet("")
alias = str(alias_e.text())
self.config.set_key('alias', alias, True)
if alias:
self.fetch_alias()
set_alias_color()
self.alias_received_signal.connect(set_alias_color)
alias_e.editingFinished.connect(on_alias_edit)
id_widgets.append((alias_label, alias_e))
# SSL certificate
msg = ' '.join([
_('SSL certificate used to sign payment requests.'),
_('Use setconfig to set ssl_chain and ssl_privkey.'),
])
if self.config.get('ssl_privkey') or self.config.get('ssl_chain'):
try:
SSL_identity = paymentrequest.check_ssl_config(self.config)
SSL_error = None
except BaseException as e:
SSL_identity = "error"
SSL_error = str(e)
else:
SSL_identity = ""
SSL_error = None
SSL_id_label = HelpLabel(_('SSL certificate') + ':', msg)
SSL_id_e = QLineEdit(SSL_identity)
SSL_id_e.setStyleSheet(RED_BG if SSL_error else GREEN_BG if SSL_identity else '')
if SSL_error:
SSL_id_e.setToolTip(SSL_error)
SSL_id_e.setReadOnly(True)
id_widgets.append((SSL_id_label, SSL_id_e))
units = ['VIPS', 'mVIPS', 'uVIPS', 'boon']
msg = _('Base unit of your wallet.')\
+ '\n1VIPS=1000mVIPS.\n' \
+ _(' These settings affects the fields in the Send tab')+' '
unit_label = HelpLabel(_('Base unit') + ':', msg)
unit_combo = QComboBox()
unit_combo.addItems(units)
unit_combo.setCurrentIndex(units.index(self.base_unit()))
def on_unit(x, nz):
unit_result = units[unit_combo.currentIndex()]
if self.base_unit() == unit_result:
return
edits = self.amount_e, self.fee_e, self.receive_amount_e
amounts = [edit.get_amount() for edit in edits]
if unit_result == 'VIPS':
self.decimal_point = 8
elif unit_result == 'mVIPS':
self.decimal_point = 5
elif unit_result == 'uVIPS':
self.decimal_point = 2
elif unit_result == 'boon':
self.decimal_point = 0
else:
raise Exception('Unknown base unit')
self.config.set_key('decimal_point', self.decimal_point, True)
nz.setMaximum(self.decimal_point)
self.history_list.update()
self.request_list.update()
self.address_list.update()
for edit, amount in zip(edits, amounts):
edit.setAmount(amount)
self.update_status()
unit_combo.currentIndexChanged.connect(lambda x: on_unit(x, nz))
gui_widgets.append((unit_label, unit_combo))
block_explorers = sorted(util.block_explorer_info().keys())
msg = _('Choose which online block explorer to use for functions that open a web browser')
block_ex_label = HelpLabel(_('Online Block Explorer') + ':', msg)
block_ex_combo = QComboBox()
block_ex_combo.addItems(block_explorers)
block_ex_combo.setCurrentIndex(block_ex_combo.findText(util.block_explorer(self.config)))
def on_be(x):
be_result = block_explorers[block_ex_combo.currentIndex()]
self.config.set_key('block_explorer', be_result, True)
block_ex_combo.currentIndexChanged.connect(on_be)
gui_widgets.append((block_ex_label, block_ex_combo))
from electrum import qrscanner
system_cameras = qrscanner._find_system_cameras()
qr_combo = QComboBox()
qr_combo.addItem("Default","default")
for camera, device in system_cameras.items():
qr_combo.addItem(camera, device)
#combo.addItem("Manually specify a device", config.get("video_device"))
index = qr_combo.findData(self.config.get("video_device"))
qr_combo.setCurrentIndex(index)
msg = _("Install the zbar package to enable this.")
qr_label = HelpLabel(_('Video Device') + ':', msg)
qr_combo.setEnabled(qrscanner.libzbar is not None)
on_video_device = lambda x: self.config.set_key("video_device", qr_combo.itemData(x), True)
qr_combo.currentIndexChanged.connect(on_video_device)
gui_widgets.append((qr_label, qr_combo))
usechange_cb = QCheckBox(_('Use change addresses'))
usechange_cb.setChecked(self.wallet.use_change)
if not self.config.is_modifiable('use_change'): usechange_cb.setEnabled(False)
def on_usechange(x):
usechange_result = x == Qt.Checked
if self.wallet.use_change != usechange_result:
self.wallet.use_change = usechange_result
self.wallet.storage.put('use_change', self.wallet.use_change)
multiple_cb.setEnabled(self.wallet.use_change)
usechange_cb.stateChanged.connect(on_usechange)
usechange_cb.setToolTip(_('Using change addresses makes it more difficult for other people to track your transactions.'))
tx_widgets.append((usechange_cb, None))
def on_multiple(x):
multiple = x == Qt.Checked
if self.wallet.multiple_change != multiple:
self.wallet.multiple_change = multiple
self.wallet.storage.put('multiple_change', multiple)
multiple_change = self.wallet.multiple_change
multiple_cb = QCheckBox(_('Use multiple change addresses'))
multiple_cb.setEnabled(self.wallet.use_change)
multiple_cb.setToolTip('\n'.join([
_('In some cases, use up to 3 change addresses in order to break '
'up large coin amounts and obfuscate the recipient address.'),
_('This may result in higher transactions fees.')
]))
multiple_cb.setChecked(multiple_change)
multiple_cb.stateChanged.connect(on_multiple)
tx_widgets.append((multiple_cb, None))
def fmt_docs(key, klass):
lines = [ln.lstrip(" ") for ln in klass.__doc__.split("\n")]
return '\n'.join([key, "", " ".join(lines)])
choosers = sorted(coinchooser.COIN_CHOOSERS.keys())
if len(choosers) > 1:
chooser_name = coinchooser.get_name(self.config)
msg = _('Choose coin (UTXO) selection method. The following are available:\n\n')
msg += '\n\n'.join(fmt_docs(*item) for item in coinchooser.COIN_CHOOSERS.items())
chooser_label = HelpLabel(_('Coin selection') + ':', msg)
chooser_combo = QComboBox()
chooser_combo.addItems(choosers)
i = choosers.index(chooser_name) if chooser_name in choosers else 0
chooser_combo.setCurrentIndex(i)
def on_chooser(x):
chooser_name = choosers[chooser_combo.currentIndex()]
self.config.set_key('coin_chooser', chooser_name)
chooser_combo.currentIndexChanged.connect(on_chooser)
tx_widgets.append((chooser_label, chooser_combo))
def on_unconf(x):
self.config.set_key('confirmed_only', bool(x))
conf_only = self.config.get('confirmed_only', False)
unconf_cb = QCheckBox(_('Spend only confirmed coins'))
unconf_cb.setToolTip(_('Spend only confirmed inputs.'))
unconf_cb.setChecked(conf_only)
unconf_cb.stateChanged.connect(on_unconf)
tx_widgets.append((unconf_cb, None))
# Fiat Currency
hist_checkbox = QCheckBox()
fiat_address_checkbox = QCheckBox()
ccy_combo = QComboBox()
ex_combo = QComboBox()
def update_currencies():
if not self.fx: return
currencies = sorted(self.fx.get_currencies(self.fx.get_history_config()))
ccy_combo.clear()
ccy_combo.addItems([_('None')] + currencies)
if self.fx.is_enabled():
ccy_combo.setCurrentIndex(ccy_combo.findText(self.fx.get_currency()))
def update_history_cb():
if not self.fx: return
hist_checkbox.setChecked(self.fx.get_history_config())
hist_checkbox.setEnabled(self.fx.is_enabled())
def update_fiat_address_cb():
if not self.fx: return
fiat_address_checkbox.setChecked(self.fx.get_fiat_address_config())
def update_exchanges():
if not self.fx: return
b = self.fx.is_enabled()
ex_combo.setEnabled(b)
if b:
h = self.fx.get_history_config()
c = self.fx.get_currency()
exchanges = self.fx.get_exchanges_by_ccy(c, h)
else:
exchanges = self.fx.get_exchanges_by_ccy('USD', False)
ex_combo.clear()
ex_combo.addItems(sorted(exchanges))
ex_combo.setCurrentIndex(ex_combo.findText(self.fx.config_exchange()))
def on_currency(hh):
if not self.fx: return
b = bool(ccy_combo.currentIndex())
ccy = str(ccy_combo.currentText()) if b else None
self.fx.set_enabled(b)
if b and ccy != self.fx.ccy:
self.fx.set_currency(ccy)
update_history_cb()
update_exchanges()
self.update_fiat()
def on_exchange(idx):
exchange = str(ex_combo.currentText())
if self.fx and self.fx.is_enabled() and exchange and exchange != self.fx.exchange.name():
self.fx.set_exchange(exchange)
def on_history(checked):
if not self.fx: return
self.fx.set_history_config(checked)
update_exchanges()
self.history_list.refresh_headers()
if self.fx.is_enabled() and checked:
# reset timeout to get historical rates
self.fx.timeout = 0
def on_fiat_address(checked):
if not self.fx: return
self.fx.set_fiat_address_config(checked)
self.address_list.refresh_headers()
self.address_list.update()
update_currencies()
update_history_cb()
update_fiat_address_cb()
update_exchanges()
ccy_combo.currentIndexChanged.connect(on_currency)
hist_checkbox.stateChanged.connect(on_history)
fiat_address_checkbox.stateChanged.connect(on_fiat_address)
ex_combo.currentIndexChanged.connect(on_exchange)
fiat_widgets = []
fiat_widgets.append((QLabel(_('Fiat currency')), ccy_combo))
fiat_widgets.append((QLabel(_('Show history rates')), hist_checkbox))
fiat_widgets.append((QLabel(_('Show Fiat balance for addresses')), fiat_address_checkbox))
fiat_widgets.append((QLabel(_('Source')), ex_combo))
tabs_info = [
(fee_widgets, _('Fees')),
(tx_widgets, _('Transactions')),
(gui_widgets, _('Appearance')),
(fiat_widgets, _('Fiat')),
(id_widgets, _('Identity')),
]
for widgets, name in tabs_info:
tab = QWidget()
grid = QGridLayout(tab)
grid.setColumnStretch(0, 1)
for a,b in widgets:
i = grid.rowCount()
if b:
if a:
grid.addWidget(a, i, 0)
grid.addWidget(b, i, 1)
else:
grid.addWidget(a, i, 0, 1, 2)
tabs.addTab(tab, name)
vbox.addWidget(tabs)
vbox.addStretch(1)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
# run the dialog
d.exec_()
if self.fx:
self.fx.timeout = 0
self.alias_received_signal.disconnect(set_alias_color)
run_hook('close_settings_dialog')
if self.need_restart:
self.show_warning(_('Please restart Electrum to activate the new GUI settings'), title=_('Success'))
def closeEvent(self, event):
# It seems in some rare cases this closeEvent() is called twice
if not self.cleaned_up:
self.cleaned_up = True
self.clean_up()
event.accept()
def clean_up(self):
self.wallet.thread.stop()
if self.network:
self.network.unregister_callback(self.on_network)
self.config.set_key("is_maximized", self.isMaximized())
if not self.isMaximized():
g = self.geometry()
self.wallet.storage.put("winpos-qt", [g.left(),g.top(),
g.width(),g.height()])
self.config.set_key("console-history", self.console.history[-50:],
True)
if self.qr_window:
self.qr_window.close()
self.close_wallet()
self.gui_object.close_window(self)
def plugins_dialog(self):
self.pluginsdialog = d = WindowModalDialog(self, _('Electrum Plugins'))
plugins = self.gui_object.plugins
vbox = QVBoxLayout(d)
# plugins
scroll = QScrollArea()
scroll.setEnabled(True)
scroll.setWidgetResizable(True)
scroll.setMinimumSize(400,250)
vbox.addWidget(scroll)
w = QWidget()
scroll.setWidget(w)
w.setMinimumHeight(plugins.count() * 35)
grid = QGridLayout()
grid.setColumnStretch(0,1)
w.setLayout(grid)
settings_widgets = {}
def enable_settings_widget(p, name, i):
widget = settings_widgets.get(name)
if not widget and p and p.requires_settings():
widget = settings_widgets[name] = p.settings_widget(d)
grid.addWidget(widget, i, 1)
if widget:
widget.setEnabled(bool(p and p.is_enabled()))
def do_toggle(cb, name, i):
p = plugins.toggle(name)
cb.setChecked(bool(p))
enable_settings_widget(p, name, i)
run_hook('init_qt', self.gui_object)
for i, descr in enumerate(plugins.descriptions.values()):
full_name = descr['__name__']
prefix, _separator, name = full_name.rpartition('.')
p = plugins.get(name)
if descr.get('registers_keystore'):
continue
try:
cb = QCheckBox(descr['fullname'])
plugin_is_loaded = p is not None
cb_enabled = (not plugin_is_loaded and plugins.is_available(name, self.wallet)
or plugin_is_loaded and p.can_user_disable())
cb.setEnabled(cb_enabled)
cb.setChecked(plugin_is_loaded and p.is_enabled())
grid.addWidget(cb, i, 0)
enable_settings_widget(p, name, i)
cb.clicked.connect(partial(do_toggle, cb, name, i))
msg = descr['description']
if descr.get('requires'):
msg += '\n\n' + _('Requires') + ':\n' + '\n'.join(map(lambda x: x[1], descr.get('requires')))
grid.addWidget(HelpButton(msg), i, 2)
except Exception:
self.print_msg("error: cannot display plugin", name)
traceback.print_exc(file=sys.stdout)
grid.setRowStretch(len(plugins.descriptions.values()), 1)
vbox.addLayout(Buttons(CloseButton(d)))
d.exec_()
def cpfp(self, parent_tx, new_tx):
total_size = parent_tx.estimated_size() + new_tx.estimated_size()
d = WindowModalDialog(self, _('Child Pays for Parent'))
vbox = QVBoxLayout(d)
msg = (
"A CPFP is a transaction that sends an unconfirmed output back to "
"yourself, with a high fee. The goal is to have miners confirm "
"the parent transaction in order to get the fee attached to the "
"child transaction.")
vbox.addWidget(WWLabel(_(msg)))
msg2 = ("The proposed fee is computed using your "
"fee/kB settings, applied to the total size of both child and "
"parent transactions. After you broadcast a CPFP transaction, "
"it is normal to see a new unconfirmed transaction in your history.")
vbox.addWidget(WWLabel(_(msg2)))
grid = QGridLayout()
grid.addWidget(QLabel(_('Total size') + ':'), 0, 0)
grid.addWidget(QLabel('%d bytes'% total_size), 0, 1)
max_fee = new_tx.output_value()
grid.addWidget(QLabel(_('Input amount') + ':'), 1, 0)
grid.addWidget(QLabel(self.format_amount(max_fee) + ' ' + self.base_unit()), 1, 1)
output_amount = QLabel('')
grid.addWidget(QLabel(_('Output amount') + ':'), 2, 0)
grid.addWidget(output_amount, 2, 1)
fee_e = BTCAmountEdit(self.get_decimal_point)
def f(x):
a = max_fee - fee_e.get_amount()
output_amount.setText((self.format_amount(a) + ' ' + self.base_unit()) if a else '')
fee_e.textChanged.connect(f)
fee = self.config.fee_per_kb() * total_size / 1000
fee_e.setAmount(fee)
grid.addWidget(QLabel(_('Fee' + ':')), 3, 0)
grid.addWidget(fee_e, 3, 1)
def on_rate(dyn, pos, fee_rate):
fee = fee_rate * total_size / 1000
fee = min(max_fee, fee)
fee_e.setAmount(fee)
fee_slider = FeeSlider(self, self.config, on_rate)
fee_slider.update()
grid.addWidget(fee_slider, 4, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if not d.exec_():
return
fee = fee_e.get_amount()
if fee > max_fee:
self.show_error(_('Max fee exceeded'))
return
new_tx = self.wallet.cpfp(parent_tx, fee)
new_tx.set_rbf(True)
self.show_transaction(new_tx)
def bump_fee_dialog(self, tx):
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
if fee is None:
self.show_error(_("Can't bump fee: unknown fee for original transaction."))
return
tx_label = self.wallet.get_label(tx.txid())
tx_size = tx.estimated_size()
d = WindowModalDialog(self, _('Bump Fee'))
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('Current fee') + ': %s'% self.format_amount(fee) + ' ' + self.base_unit()))
vbox.addWidget(QLabel(_('New fee' + ':')))
fee_e = BTCAmountEdit(self.get_decimal_point)
fee_e.setAmount(fee * 1.5)
vbox.addWidget(fee_e)
def on_rate(dyn, pos, fee_rate):
fee = fee_rate * tx_size / 1000
fee_e.setAmount(fee)
fee_slider = FeeSlider(self, self.config, on_rate)
vbox.addWidget(fee_slider)
cb = QCheckBox(_('Final'))
vbox.addWidget(cb)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if not d.exec_():
return
is_final = cb.isChecked()
new_fee = fee_e.get_amount()
delta = new_fee - fee
if delta < 0:
self.show_error("fee too low")
return
try:
new_tx = self.wallet.bump_fee(tx, delta)
except BaseException as e:
traceback.print_exc(file=sys.stderr)
self.show_error(_('Error bumping fee') + ':\n' + str(e))
return
if is_final:
new_tx.set_rbf(False)
self.show_transaction(new_tx, tx_label)
def save_transaction_into_wallet(self, tx):
win = self.top_level_window()
try:
if not self.wallet.add_transaction(tx.txid(), tx):
win.show_error(_("Transaction could not be saved.") + "\n" +
_("It conflicts with current history."))
return False
except AddTransactionException as e:
win.show_error(e)
return False
else:
self.wallet.save_transactions(write=True)
# need to update at least: history_list, utxo_list, address_list
self.need_update.set()
msg = (_("Transaction added to wallet history.") + '\n\n' +
_("Note: this is an offline transaction, if you want the network "
"to see it, you need to broadcast it."))
win.msg_box(QPixmap(icon_path("offline_tx.png")), None, _('Success'), msg)
return True
def create_tokens_tab(self):
from .token_list import TokenBalanceList, TokenHistoryList
self.token_balance_list = tbl = TokenBalanceList(self)
self.token_hist_list = thl = TokenHistoryList(self)
splitter = QSplitter(self)
splitter.addWidget(tbl)
splitter.addWidget(thl)
splitter.setOrientation(Qt.Vertical)
return splitter
def set_token(self, token: Token):
self.wallet.add_token(token)
self.token_balance_list.update()
self.token_hist_list.update()
def delete_token(self, key: str):
if not self.question(_("Remove {} from your list of tokens?".format(
self.tokens[key].name))):
return False
self.wallet.delete_token(key)
self.token_balance_list.update()
self.token_hist_list.update()
def token_add_dialog(self):
d = TokenAddDialog(self)
d.show()
def token_view_dialog(self, token: Token):
d = TokenInfoDialog(self, token)
d.show()
def token_send_dialog(self, token: Token):
d = TokenSendDialog(self, token)
d.show()
def do_token_pay(self, token, pay_to, amount, gas_limit, gas_price, dialog, preview=False):
try:
datahex = 'a9059cbb{}{:064x}'.format(pay_to.zfill(64), amount)
script = contract_script(gas_limit, gas_price, datahex, token.contract_addr, opcodes.OP_CALL)
outputs = [TxOutput(TYPE_SCRIPT, script, 0), ]
tx_desc = _('pay out {} {}').format(amount / (10 ** token.decimals), token.symbol)
self._smart_contract_broadcast(outputs, tx_desc, gas_limit * gas_price,
token.bind_addr, dialog, None, preview)
except (BaseException,) as e:
traceback.print_exc(file=sys.stderr)
dialog.show_message(str(e))
def _smart_contract_broadcast(self, outputs, desc, gas_fee, sender, dialog, broadcast_done=None, preview=False):
coins = self.get_coins()
try:
tx = self.wallet.make_unsigned_transaction(coins, outputs, self.config, None,
change_addr=sender,
gas_fee=gas_fee,
sender=sender)
except NotEnoughFunds:
dialog.show_message(_("Insufficient funds"))
return
except BaseException as e:
traceback.print_exc(file=sys.stdout)
dialog.show_message(str(e))
return
if preview:
self.show_transaction(tx, desc)
return
amount = sum(map(lambda y: y[2], outputs))
fee = tx.get_fee()
if fee < self.wallet.relayfee() * tx.estimated_size() / 1000:
dialog.show_message(
_("This transaction requires a higher fee, or it will not be propagated by the network"))
return
# confirmation dialog
msg = [
_(desc),
_("Mining fee") + ": " + self.format_amount_and_units(fee - gas_fee),
_("Gas fee") + ": " + self.format_amount_and_units(gas_fee),
]
confirm_rate = bitcoin.FEERATE_WARNING_HIGH_FEE
if fee - gas_fee > confirm_rate * tx.estimated_size() / 1000:
msg.append(_('Warning') + ': ' + _("The fee for this transaction seems unusually high."))
if self.wallet.has_keystore_encryption():
msg.append("")
msg.append(_("Enter your password to proceed"))
password = self.password_dialog('\n'.join(msg))
if not password:
return
else:
msg.append(_('Proceed?'))
password = None
if not self.question('\n'.join(msg)):
return
def sign_done(success):
if success:
if not tx.is_complete():
self.show_transaction(tx)
self.do_clear()
else:
self.broadcast_transaction(tx, desc)
if broadcast_done:
broadcast_done(tx)
self.sign_tx_with_password(tx, sign_done, password)
def set_smart_contract(self, name: str, address: str, interface: list) -> bool:
if not is_hash160(address):
self.show_error(_('Invalid Address'))
self.smart_contract_list.update()
return False
self.smart_contracts[address] = (name, interface)
self.smart_contract_list.update()
return True
def delete_samart_contact(self, address):
if not self.question(_("Remove {} from your list of smart contracts?".format(
self.smart_contracts[address][0]))):
return False
self.smart_contracts.pop(address)
self.smart_contract_list.update()
return True
def call_smart_contract(self, address, abi, args, sender, dialog):
data = eth_abi_encode(abi, args)
try:
result = self.network.call_contract(address, data, sender)
except BaseException as e:
import traceback, sys
traceback.print_exc(file=sys.stderr)
dialog.show_message(str(e))
return
types = list([x['type'] for x in abi.get('outputs', [])])
try:
if isinstance(result, dict):
output = eth_abi.decode_abi(types, binascii.a2b_hex(result['executionResult']['output']))
else:
output = eth_abi.decode_abi(types, binascii.a2b_hex(result))
def decode_x(x):
if isinstance(x, bytes):
try:
return x.decode()
except UnicodeDecodeError:
return str(x)
return str(x)
output = ','.join([decode_x(x) for x in output])
dialog.show_message(output)
except (BaseException,) as e:
import traceback, sys
traceback.print_exc(file=sys.stderr)
print(e)
dialog.show_message(e, result)
def sendto_smart_contract(self, address, abi, args, gas_limit, gas_price, amount, sender, dialog, preview):
try:
abi_encoded = eth_abi_encode(abi, args)
script = contract_script(gas_limit, gas_price, abi_encoded, address, opcodes.OP_CALL)
outputs = [TxOutput(TYPE_SCRIPT, script, amount), ]
tx_desc = 'contract sendto {}'.format(self.smart_contracts[address][0])
self._smart_contract_broadcast(outputs, tx_desc, gas_limit * gas_price, sender, dialog, None, preview)
except (BaseException,) as e:
import traceback, sys
traceback.print_exc(file=sys.stderr)
dialog.show_message(str(e))
def create_smart_contract(self, name, bytecode, abi, constructor, args, gas_limit, gas_price, sender, dialog, preview):
def broadcast_done(tx):
if is_opcreate_script(bfh(tx.outputs()[0].address)):
reversed_txid = binascii.a2b_hex(tx.txid())[::-1]
output_index = b'\x00\x00\x00\x00'
contract_addr = bh2u(hash_160(reversed_txid + output_index))
self.set_smart_contract(name, contract_addr, abi)
try:
abi_encoded = ''
if constructor:
abi_encoded = eth_abi_encode(constructor, args)
script = contract_script(gas_limit, gas_price, bytecode + abi_encoded, None, opcodes.OP_CREATE)
outputs = [TxOutput(TYPE_SCRIPT, script, 0), ]
self._smart_contract_broadcast(outputs, 'create contract {}'.format(name), gas_limit * gas_price,
sender, dialog, broadcast_done, preview)
except (BaseException,) as e:
import traceback, sys
traceback.print_exc(file=sys.stderr)
dialog.show_message(str(e))
def contract_create_dialog(self):
d = ContractCreateDialog(self)
d.show()
def contract_add_dialog(self):
d = ContractEditDialog(self)
d.show()
def contract_edit_dialog(self, address):
name, interface = self.smart_contracts[address]
contract = {
'name': name,
'interface': interface,
'address': address
}
d = ContractEditDialog(self, contract)
d.show()
def contract_func_dialog(self, address):
name, interface = self.smart_contracts[address]
contract = {
'name': name,
'interface': interface,
'address': address
}
d = ContractFuncDialog(self, contract)
d.show()
|
py | b40d3f051c0136b2a1c479f53be5ece2eb9e1e4f | from __future__ import absolute_import, unicode_literals
import hashlib
import json
import os
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from django.shortcuts import render
from django.utils.encoding import python_2_unicode_compatible
from django.utils.six import text_type
from modelcluster.contrib.taggit import ClusterTaggableManager
from modelcluster.fields import ParentalKey, ParentalManyToManyField
from modelcluster.models import ClusterableModel
from taggit.managers import TaggableManager
from taggit.models import TaggedItemBase
from wagtail.contrib.settings.models import BaseSetting, register_setting
from wagtail.wagtailadmin.edit_handlers import (
FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel,
TabbedInterface)
from wagtail.wagtailadmin.forms import WagtailAdminPageForm
from wagtail.wagtailadmin.utils import send_mail
from wagtail.wagtailcore.blocks import CharBlock, RichTextBlock
from wagtail.wagtailcore.fields import RichTextField, StreamField
from wagtail.wagtailcore.models import Orderable, Page, PageManager
from wagtail.wagtaildocs.edit_handlers import DocumentChooserPanel
from wagtail.wagtailforms.models import AbstractEmailForm, AbstractFormField, AbstractFormSubmission
from wagtail.wagtailimages.blocks import ImageChooserBlock
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
from wagtail.wagtailimages.models import AbstractImage, AbstractRendition, Image
from wagtail.wagtailsearch import index
from wagtail.wagtailsnippets.edit_handlers import SnippetChooserPanel
from wagtail.wagtailsnippets.models import register_snippet
from .forms import ValidatedPageForm
EVENT_AUDIENCE_CHOICES = (
('public', "Public"),
('private', "Private"),
)
COMMON_PANELS = (
FieldPanel('slug'),
FieldPanel('seo_title'),
FieldPanel('show_in_menus'),
FieldPanel('search_description'),
)
# Link fields
class LinkFields(models.Model):
link_external = models.URLField("External link", blank=True)
link_page = models.ForeignKey(
'wagtailcore.Page',
null=True,
blank=True,
related_name='+',
on_delete=models.CASCADE
)
link_document = models.ForeignKey(
'wagtaildocs.Document',
null=True,
blank=True,
related_name='+',
on_delete=models.CASCADE
)
@property
def link(self):
if self.link_page:
return self.link_page.url
elif self.link_document:
return self.link_document.url
else:
return self.link_external
panels = [
FieldPanel('link_external'),
PageChooserPanel('link_page'),
DocumentChooserPanel('link_document'),
]
class Meta:
abstract = True
# Carousel items
class CarouselItem(LinkFields):
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
embed_url = models.URLField("Embed URL", blank=True)
caption = models.CharField(max_length=255, blank=True)
panels = [
ImageChooserPanel('image'),
FieldPanel('embed_url'),
FieldPanel('caption'),
MultiFieldPanel(LinkFields.panels, "Link"),
]
class Meta:
abstract = True
# Related links
class RelatedLink(LinkFields):
title = models.CharField(max_length=255, help_text="Link title")
panels = [
FieldPanel('title'),
MultiFieldPanel(LinkFields.panels, "Link"),
]
class Meta:
abstract = True
# Simple page
class SimplePage(Page):
content = models.TextField()
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('content'),
]
class PageWithOldStyleRouteMethod(Page):
"""
Prior to Wagtail 0.4, the route() method on Page returned an HttpResponse
rather than a Page instance. As subclasses of Page may override route,
we need to continue accepting this convention (albeit as a deprecated API).
"""
content = models.TextField()
template = 'tests/simple_page.html'
def route(self, request, path_components):
return self.serve(request)
# File page
class FilePage(Page):
file_field = models.FileField()
FilePage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('file_field'),
]
# Event page
class EventPageCarouselItem(Orderable, CarouselItem):
page = ParentalKey('tests.EventPage', related_name='carousel_items', on_delete=models.CASCADE)
class EventPageRelatedLink(Orderable, RelatedLink):
page = ParentalKey('tests.EventPage', related_name='related_links', on_delete=models.CASCADE)
class EventPageSpeaker(Orderable, LinkFields):
page = ParentalKey('tests.EventPage', related_name='speakers', on_delete=models.CASCADE)
first_name = models.CharField("Name", max_length=255, blank=True)
last_name = models.CharField("Surname", max_length=255, blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
@property
def name_display(self):
return self.first_name + " " + self.last_name
panels = [
FieldPanel('first_name'),
FieldPanel('last_name'),
ImageChooserPanel('image'),
MultiFieldPanel(LinkFields.panels, "Link"),
]
@python_2_unicode_compatible
class EventCategory(models.Model):
name = models.CharField("Name", max_length=255)
def __str__(self):
return self.name
# Override the standard WagtailAdminPageForm to add validation on start/end dates
# that appears as a non-field error
class EventPageForm(WagtailAdminPageForm):
def clean(self):
cleaned_data = super(EventPageForm, self).clean()
# Make sure that the event starts before it ends
start_date = cleaned_data['date_from']
end_date = cleaned_data['date_to']
if start_date and end_date and start_date > end_date:
raise ValidationError('The end date must be after the start date')
return cleaned_data
class EventPage(Page):
date_from = models.DateField("Start date", null=True)
date_to = models.DateField(
"End date",
null=True,
blank=True,
help_text="Not required if event is on a single day"
)
time_from = models.TimeField("Start time", null=True, blank=True)
time_to = models.TimeField("End time", null=True, blank=True)
audience = models.CharField(max_length=255, choices=EVENT_AUDIENCE_CHOICES)
location = models.CharField(max_length=255)
body = RichTextField(blank=True)
cost = models.CharField(max_length=255)
signup_link = models.URLField(blank=True)
feed_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
categories = ParentalManyToManyField(EventCategory, blank=True)
search_fields = [
index.SearchField('get_audience_display'),
index.SearchField('location'),
index.SearchField('body'),
]
password_required_template = 'tests/event_page_password_required.html'
base_form_class = EventPageForm
EventPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('date_from'),
FieldPanel('date_to'),
FieldPanel('time_from'),
FieldPanel('time_to'),
FieldPanel('location'),
FieldPanel('audience'),
FieldPanel('cost'),
FieldPanel('signup_link'),
InlinePanel('carousel_items', label="Carousel items"),
FieldPanel('body', classname="full"),
InlinePanel('speakers', label="Speakers"),
InlinePanel('related_links', label="Related links"),
FieldPanel('categories'),
]
EventPage.promote_panels = [
MultiFieldPanel(COMMON_PANELS, "Common page configuration"),
ImageChooserPanel('feed_image'),
]
# Just to be able to test multi table inheritance
class SingleEventPage(EventPage):
excerpt = models.TextField(
max_length=255,
blank=True,
null=True,
help_text="Short text to describe what is this action about"
)
# Give this page model a custom URL routing scheme
def get_url_parts(self):
url_parts = super(SingleEventPage, self).get_url_parts()
if url_parts is None:
return None
else:
site_id, root_url, page_path = url_parts
return (site_id, root_url, page_path + 'pointless-suffix/')
def route(self, request, path_components):
if path_components == ['pointless-suffix']:
# treat this as equivalent to a request for this page
return super(SingleEventPage, self).route(request, [])
else:
# fall back to default routing rules
return super(SingleEventPage, self).route(request, path_components)
SingleEventPage.content_panels = [FieldPanel('excerpt')] + EventPage.content_panels
# Event index (has a separate AJAX template, and a custom template context)
class EventIndex(Page):
intro = RichTextField(blank=True)
ajax_template = 'tests/includes/event_listing.html'
def get_events(self):
return self.get_children().live().type(EventPage)
def get_paginator(self):
return Paginator(self.get_events(), 4)
def get_context(self, request, page=1):
# Pagination
paginator = self.get_paginator()
try:
events = paginator.page(page)
except PageNotAnInteger:
events = paginator.page(1)
except EmptyPage:
events = paginator.page(paginator.num_pages)
# Update context
context = super(EventIndex, self).get_context(request)
context['events'] = events
return context
def route(self, request, path_components):
if self.live and len(path_components) == 1:
try:
return self.serve(request, page=int(path_components[0]))
except (TypeError, ValueError):
pass
return super(EventIndex, self).route(request, path_components)
def get_static_site_paths(self):
# Get page count
page_count = self.get_paginator().num_pages
# Yield a path for each page
for page in range(page_count):
yield '/%d/' % (page + 1)
# Yield from superclass
for path in super(EventIndex, self).get_static_site_paths():
yield path
def get_sitemap_urls(self):
# Add past events url to sitemap
return super(EventIndex, self).get_sitemap_urls() + [
{
'location': self.full_url + 'past/',
'lastmod': self.latest_revision_created_at
}
]
EventIndex.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('intro', classname="full"),
]
class FormField(AbstractFormField):
page = ParentalKey('FormPage', related_name='form_fields', on_delete=models.CASCADE)
class FormPage(AbstractEmailForm):
def get_context(self, request):
context = super(FormPage, self).get_context(request)
context['greeting'] = "hello world"
return context
FormPage.content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('form_fields', label="Form fields"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
# FormPage with a non-HTML extension
class JadeFormField(AbstractFormField):
page = ParentalKey('JadeFormPage', related_name='form_fields', on_delete=models.CASCADE)
class JadeFormPage(AbstractEmailForm):
template = "tests/form_page.jade"
JadeFormPage.content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('form_fields', label="Form fields"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
# FormPage with a custom FormSubmission
class FormPageWithCustomSubmission(AbstractEmailForm):
"""
This Form page:
* Have custom submission model
* Have custom related_name (see `FormFieldWithCustomSubmission.page`)
* Saves reference to a user
* Doesn't render html form, if submission for current user is present
"""
intro = RichTextField(blank=True)
thank_you_text = RichTextField(blank=True)
def get_context(self, request, *args, **kwargs):
context = super(FormPageWithCustomSubmission, self).get_context(request)
context['greeting'] = "hello world"
return context
def get_form_fields(self):
return self.custom_form_fields.all()
def get_data_fields(self):
data_fields = [
('username', 'Username'),
]
data_fields += super(FormPageWithCustomSubmission, self).get_data_fields()
return data_fields
def get_submission_class(self):
return CustomFormPageSubmission
def process_form_submission(self, form):
self.get_submission_class().objects.create(
form_data=json.dumps(form.cleaned_data, cls=DjangoJSONEncoder),
page=self, user=form.user
)
if self.to_address:
addresses = [x.strip() for x in self.to_address.split(',')]
content = '\n'.join([x[1].label + ': ' + text_type(form.data.get(x[0])) for x in form.fields.items()])
send_mail(self.subject, content, addresses, self.from_address,)
def serve(self, request, *args, **kwargs):
if self.get_submission_class().objects.filter(page=self, user__pk=request.user.pk).exists():
return render(
request,
self.template,
self.get_context(request)
)
return super(FormPageWithCustomSubmission, self).serve(request, *args, **kwargs)
FormPageWithCustomSubmission.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('intro', classname="full"),
InlinePanel('custom_form_fields', label="Form fields"),
FieldPanel('thank_you_text', classname="full"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
class FormFieldWithCustomSubmission(AbstractFormField):
page = ParentalKey(FormPageWithCustomSubmission, on_delete=models.CASCADE, related_name='custom_form_fields')
class CustomFormPageSubmission(AbstractFormSubmission):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
def get_data(self):
form_data = super(CustomFormPageSubmission, self).get_data()
form_data.update({
'username': self.user.username,
})
return form_data
# Snippets
class AdvertPlacement(models.Model):
page = ParentalKey('wagtailcore.Page', related_name='advert_placements', on_delete=models.CASCADE)
advert = models.ForeignKey('tests.Advert', related_name='+', on_delete=models.CASCADE)
colour = models.CharField(max_length=255)
class AdvertTag(TaggedItemBase):
content_object = ParentalKey('Advert', related_name='tagged_items', on_delete=models.CASCADE)
@python_2_unicode_compatible
class Advert(ClusterableModel):
url = models.URLField(null=True, blank=True)
text = models.CharField(max_length=255)
tags = TaggableManager(through=AdvertTag, blank=True)
panels = [
FieldPanel('url'),
FieldPanel('text'),
FieldPanel('tags'),
]
def __str__(self):
return self.text
register_snippet(Advert)
@python_2_unicode_compatible
class AdvertWithTabbedInterface(models.Model):
url = models.URLField(null=True, blank=True)
text = models.CharField(max_length=255)
something_else = models.CharField(max_length=255)
advert_panels = [
FieldPanel('url'),
FieldPanel('text'),
]
other_panels = [
FieldPanel('something_else'),
]
edit_handler = TabbedInterface([
ObjectList(advert_panels, heading='Advert'),
ObjectList(other_panels, heading='Other'),
])
def __str__(self):
return self.text
class Meta:
ordering = ('text',)
register_snippet(AdvertWithTabbedInterface)
class StandardIndex(Page):
""" Index for the site """
parent_page_types = [Page]
# A custom panel setup where all Promote fields are placed in the Content tab instead;
# we use this to test that the 'promote' tab is left out of the output when empty
StandardIndex.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('seo_title'),
FieldPanel('slug'),
InlinePanel('advert_placements', label="Adverts"),
]
StandardIndex.promote_panels = []
class StandardChild(Page):
pass
# Test overriding edit_handler with a custom one
StandardChild.edit_handler = TabbedInterface([
ObjectList(StandardChild.content_panels, heading='Content'),
ObjectList(StandardChild.promote_panels, heading='Promote'),
ObjectList(StandardChild.settings_panels, heading='Settings', classname='settings'),
ObjectList([], heading='Dinosaurs'),
], base_form_class=WagtailAdminPageForm)
class BusinessIndex(Page):
""" Can be placed anywhere, can only have Business children """
subpage_types = ['tests.BusinessChild', 'tests.BusinessSubIndex']
class BusinessSubIndex(Page):
""" Can be placed under BusinessIndex, and have BusinessChild children """
# BusinessNowherePage is 'incorrectly' added here as a possible child.
# The rules on BusinessNowherePage prevent it from being a child here though.
subpage_types = ['tests.BusinessChild', 'tests.BusinessNowherePage']
parent_page_types = ['tests.BusinessIndex', 'tests.BusinessChild']
class BusinessChild(Page):
""" Can only be placed under Business indexes, no children allowed """
subpage_types = []
parent_page_types = ['tests.BusinessIndex', BusinessSubIndex]
class BusinessNowherePage(Page):
""" Not allowed to be placed anywhere """
parent_page_types = []
class TaggedPageTag(TaggedItemBase):
content_object = ParentalKey('tests.TaggedPage', related_name='tagged_items', on_delete=models.CASCADE)
class TaggedPage(Page):
tags = ClusterTaggableManager(through=TaggedPageTag, blank=True)
TaggedPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('tags'),
]
class SingletonPage(Page):
@classmethod
def can_create_at(cls, parent):
# You can only create one of these!
return super(SingletonPage, cls).can_create_at(parent) \
and not cls.objects.exists()
class PageChooserModel(models.Model):
page = models.ForeignKey('wagtailcore.Page', help_text='help text', on_delete=models.CASCADE)
class EventPageChooserModel(models.Model):
page = models.ForeignKey('tests.EventPage', help_text='more help text', on_delete=models.CASCADE)
class SnippetChooserModel(models.Model):
advert = models.ForeignKey(Advert, help_text='help text', on_delete=models.CASCADE)
panels = [
SnippetChooserPanel('advert'),
]
class CustomImage(AbstractImage):
caption = models.CharField(max_length=255)
not_editable_field = models.CharField(max_length=255)
admin_form_fields = Image.admin_form_fields + (
'caption',
)
class CustomRendition(AbstractRendition):
image = models.ForeignKey(CustomImage, related_name='renditions', on_delete=models.CASCADE)
class Meta:
unique_together = (
('image', 'filter_spec', 'focal_point_key'),
)
class StreamModel(models.Model):
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ImageChooserBlock()),
])
class ExtendedImageChooserBlock(ImageChooserBlock):
"""
Example of Block with custom get_api_representation method.
If the request has an 'extended' query param, it returns a dict of id and title,
otherwise, it returns the default value.
"""
def get_api_representation(self, value, context=None):
image_id = super(ExtendedImageChooserBlock, self).get_api_representation(value, context=context)
if 'request' in context and context['request'].query_params.get('extended', False):
return {
'id': image_id,
'title': value.title
}
return image_id
class StreamPage(Page):
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ExtendedImageChooserBlock()),
])
api_fields = ('body',)
content_panels = [
FieldPanel('title'),
StreamFieldPanel('body'),
]
class DefaultStreamPage(Page):
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ImageChooserBlock()),
], default='')
content_panels = [
FieldPanel('title'),
StreamFieldPanel('body'),
]
class MTIBasePage(Page):
is_creatable = False
class Meta:
verbose_name = "MTI Base page"
class MTIChildPage(MTIBasePage):
# Should be creatable by default, no need to set anything
pass
class AbstractPage(Page):
class Meta:
abstract = True
@register_setting
class TestSetting(BaseSetting):
title = models.CharField(max_length=100)
email = models.EmailField(max_length=50)
@register_setting(icon="tag")
class IconSetting(BaseSetting):
pass
class NotYetRegisteredSetting(BaseSetting):
pass
@register_setting
class FileUploadSetting(BaseSetting):
file = models.FileField()
class BlogCategory(models.Model):
name = models.CharField(unique=True, max_length=80)
class BlogCategoryBlogPage(models.Model):
category = models.ForeignKey(BlogCategory, related_name="+", on_delete=models.CASCADE)
page = ParentalKey('ManyToManyBlogPage', related_name='categories', on_delete=models.CASCADE)
panels = [
FieldPanel('category'),
]
class ManyToManyBlogPage(Page):
"""
A page type with two different kinds of M2M relation.
We don't formally support these, but we don't want them to cause
hard breakages either.
"""
body = RichTextField(blank=True)
adverts = models.ManyToManyField(Advert, blank=True)
blog_categories = models.ManyToManyField(
BlogCategory, through=BlogCategoryBlogPage, blank=True)
# make first_published_at editable on this page model
settings_panels = Page.settings_panels + [
FieldPanel('first_published_at'),
]
class OneToOnePage(Page):
"""
A Page containing a O2O relation.
"""
body = RichTextBlock(blank=True)
page_ptr = models.OneToOneField(Page, parent_link=True,
related_name='+', on_delete=models.CASCADE)
class GenericSnippetPage(Page):
"""
A page containing a reference to an arbitrary snippet (or any model for that matter)
linked by a GenericForeignKey
"""
snippet_content_type = models.ForeignKey(ContentType, on_delete=models.SET_NULL, null=True)
snippet_object_id = models.PositiveIntegerField(null=True)
snippet_content_object = GenericForeignKey('snippet_content_type', 'snippet_object_id')
class CustomImageFilePath(AbstractImage):
def get_upload_to(self, filename):
"""Create a path that's file-system friendly.
By hashing the file's contents we guarantee an equal distribution
of files within our root directories. This also gives us a
better chance of uploading images with the same filename, but
different contents - this isn't guaranteed as we're only using
the first three characters of the checksum.
"""
original_filepath = super(CustomImageFilePath, self).get_upload_to(filename)
folder_name, filename = original_filepath.split(os.path.sep)
# Ensure that we consume the entire file, we can't guarantee that
# the stream has not be partially (or entirely) consumed by
# another process
original_position = self.file.tell()
self.file.seek(0)
hash256 = hashlib.sha256()
while True:
data = self.file.read(256)
if not data:
break
hash256.update(data)
checksum = hash256.hexdigest()
self.file.seek(original_position)
return os.path.join(folder_name, checksum[:3], filename)
class CustomManager(PageManager):
pass
class CustomManagerPage(Page):
objects = CustomManager()
class MyBasePage(Page):
"""
A base Page model, used to set site-wide defaults and overrides.
"""
objects = CustomManager()
class Meta:
abstract = True
class MyCustomPage(MyBasePage):
pass
class ValidatedPage(Page):
foo = models.CharField(max_length=255)
base_form_class = ValidatedPageForm
content_panels = Page.content_panels + [
FieldPanel('foo'),
]
class DefaultRichTextFieldPage(Page):
body = RichTextField()
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('body'),
]
class DefaultRichBlockFieldPage(Page):
body = StreamField([
('rich_text', RichTextBlock()),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body')
]
class CustomRichTextFieldPage(Page):
body = RichTextField(editor='custom')
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('body'),
]
class CustomRichBlockFieldPage(Page):
body = StreamField([
('rich_text', RichTextBlock(editor='custom')),
])
content_panels = [
FieldPanel('title', classname="full title"),
StreamFieldPanel('body'),
]
# a page that only contains RichTextField within an InlinePanel,
# to test that the inline child's form media gets pulled through
class SectionedRichTextPageSection(Orderable):
page = ParentalKey('tests.SectionedRichTextPage', related_name='sections', on_delete=models.CASCADE)
body = RichTextField()
panels = [
FieldPanel('body')
]
class SectionedRichTextPage(Page):
content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('sections')
]
class InlineStreamPageSection(Orderable):
page = ParentalKey('tests.InlineStreamPage', related_name='sections', on_delete=models.CASCADE)
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ImageChooserBlock()),
])
panels = [
StreamFieldPanel('body')
]
class InlineStreamPage(Page):
content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('sections')
]
class UserProfile(models.Model):
# Wagtail's schema must be able to coexist alongside a custom UserProfile model
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
favourite_colour = models.CharField(max_length=255)
class PanelSettings(TestSetting):
panels = [
FieldPanel('title')
]
class TabbedSettings(TestSetting):
edit_handler = TabbedInterface([
ObjectList([
FieldPanel('title')
], heading='First tab'),
ObjectList([
FieldPanel('email')
], heading='Second tab'),
])
class AlwaysShowInMenusPage(Page):
show_in_menus_default = True
|
py | b40d3f73fe8f937ae8b446117acb3e28f37a682a | """
Parsing-related functionality.
"""
from collections import Counter
import datetime
from operator import itemgetter
import os
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import Any, List, NamedTuple, Tuple
from tqdm import tqdm
import tree_sitter
from .parsers.utils import get_parser
from .slicing import get_dates, checkout_by_date
NODE_TYPES = {"c": ["identifier", "type_identifier"],
"c-sharp": ["identifier", "type_identifier"],
"cpp": ["identifier", "type_identifier"],
"java": ["identifier", "type_identifier"],
"python": ["identifier", "type_identifier"]}
SliceLine = NamedTuple("SliceLine", [("date", str), ("start_index", int), ("end_index", int)])
TokenLine = NamedTuple("TokenLine", [("index", int), ("path", str), ("tokens", str)])
def parse_slice_line(slice_line: str) -> SliceLine:
"""
Transform a line in the Slices file into the SliceLine format.
:param slice_line: a line in the Slices file.
:return: SliceLine object.
"""
line_list = slice_line.rstrip().split(";")
return SliceLine(line_list[0], int(line_list[1]), int(line_list[2]))
def parse_token_line(token_line: str) -> TokenLine:
"""
Transform a line in Tokens file into the TokenLine format.
:param token_line: a line in the Tokens file.
:return: TokenLine object.
"""
line_list = token_line.rstrip().split(";")
return TokenLine(int(line_list[0]), line_list[1], line_list[2])
def get_extensions(lang: str) -> str:
"""
Returns the extension for a given language. TODO: get rid of this and add enry.
:param lang: language name.
:return: the extension.
"""
extensions = {"cpp": "cpp",
"java": "java",
"python": "py"}
return extensions[lang]
def get_files(directory: str, extension: str) -> List[str]:
"""
Get a list of files with a given extension.
:param directory: path to directory.
:param extension: extension for file filtering -
only files with this extension will be preserved.
:return: list of file paths.
"""
dir_path = Path(directory)
files = [str(path) for path in sorted(dir_path.rglob("*." + extension))]
return files
def read_file(file: str) -> bytes:
"""
Read the contents of the file.
:param file: the path to the file.
:return: bytes with the contents of the file.
"""
with open(file) as fin:
return bytes(fin.read(), "utf-8")
def get_positional_bytes(node: tree_sitter.Node) -> Tuple[int, int]:
"""
Extract start and end byte.
:param node: node on the AST.
:return: (start byte, end byte).
"""
start = node.start_byte
end = node.end_byte
return start, end
def get_identifiers(file: str, lang: str) -> List[Tuple[str, int]]:
"""
Gather a sorted list of identifiers in the file and their count.
:param file: the path to the file.
:param lang: the language of file.
:return: a list of tuples, identifier and count.
"""
content = read_file(file)
tree = get_parser(lang).parse(content)
root = tree.root_node
identifiers = []
def traverse_tree(node: tree_sitter.Node) -> None:
"""
Run down the AST (DFS) from a given node and gather identifiers from its children.
:param node: starting node.
:return: None.
"""
for child in node.children:
if child.type in NODE_TYPES[lang]:
start, end = get_positional_bytes(child)
identifier = content[start:end].decode("utf-8").lower()
if "\n" not in identifier: # Will break output files. TODO: try to recreate bug.
identifiers.append(identifier)
if len(child.children) != 0:
traverse_tree(child)
traverse_tree(root)
sorted_identifiers = sorted(Counter(identifiers).items(), key=itemgetter(1), reverse=True)
return sorted_identifiers
def transform_identifiers(identifiers: List[Tuple[str, int]]) -> List[str]:
"""
Transform the original list of identifiers into the writable form.
:param identifiers: list of tuples, identifier and count.
:return: a list of identifiers in the writable form, "identifier:count".
"""
formatted_identifiers = []
for identifier in identifiers:
if identifier[0].rstrip() != "": # Checking for occurring empty tokens.
formatted_identifiers.append("{identifier}:{count}"
.format(identifier=identifier[0].rstrip(),
count=str(identifier[1]).rstrip()))
return formatted_identifiers
def slice_and_parse(repository: str, output_dir: str, dates: List[datetime.datetime],
lang: str, name: str) -> None:
"""
Split the repository, parse the full files, write the data into a file.
Can be called for parsing full files and for parsing diffs only.
When run several times, overwrites the data.
:param repository: path to the repository to process.
:param output_dir: path to the output directory.
:param dates: a list of dates used for slicing.
:param lang: programming language to use.
:param name: name of the dataset.
:return: None.
"""
print("Creating the temporal slices of the data.")
assert os.path.exists(os.path.abspath(os.path.join(repository, ".git")))
# Create a folder for created files
if not os.path.exists(output_dir):
os.makedirs(output_dir)
dates_indices = {}
count = 0
# Create temporal slices of the project, get a list of files for each slice,
# parse all files, save the tokens
with open(os.path.abspath(os.path.join(output_dir, name + "_tokens.txt")), "w+") as fout:
for date in tqdm(dates):
with TemporaryDirectory() as td:
subdirectory = os.path.abspath(os.path.join(td, date.strftime("%Y-%m-%d")))
checkout_by_date(repository, subdirectory, date)
files = get_files(subdirectory, get_extensions(lang))
start_index = count + 1
for file in files:
if os.path.isfile(file): # TODO: implement a better file-checking mechanism
try:
identifiers = get_identifiers(file, lang)
if len(identifiers) != 0:
count += 1
formatted_identifiers = transform_identifiers(identifiers)
fout.write("{file_index};{file_path};{tokens}\n"
.format(file_index=str(count),
file_path=os.path.relpath(file, os.path.abspath(
os.path.join(output_dir, td))),
tokens=",".join(formatted_identifiers)))
except UnicodeDecodeError:
continue
end_index = count
dates_indices[date.strftime("%Y-%m-%d")] = (start_index, end_index)
# Write the index boundaries of slices into a separate log file
print("Writing the index boundaries of slices into an auxiliary file.")
with open(os.path.abspath(os.path.join(output_dir, name + "_slices.txt")), "w+") as fout:
for date in dates_indices.keys():
if dates_indices[date][1] >= dates_indices[date][0]: # Skips empty slices
fout.write("{date};{start_index};{end_index}\n"
.format(date=date, start_index=str(dates_indices[date][0]),
end_index=str(dates_indices[date][1])))
def split_token_file(slices_file: str, tokens_file: str, output_dir: str) -> None:
"""
Split a single file with tokens into several by the date of the slice. For example, if the
slices in the file are 2015-01-01, 2015-02-01, and 2015-03-01,
it will divide the file into three.
:param slices_file: the path to the file with the indices of the slices.
:param tokens_file: the path to the temporary file with tokens.
:param output_dir: path to the output directory.
:return: None.
"""
print("Splitting the tokens of full files by versions.")
slice_number = 0
date2indices = {}
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Read the data about the indices boundaries of slices
with open(slices_file) as fin:
for line in fin:
slice_number = slice_number + 1
slice_line = parse_slice_line(line)
date2indices[slice_number] = (slice_line.start_index, slice_line.end_index)
# Write the tokens of each slice into a separate file, numbered incrementally
for date in tqdm(date2indices.keys()):
with open(tokens_file) as fin, \
open(os.path.abspath(os.path.join(output_dir, str(date) + ".txt")), "w+") as fout:
for line in fin:
token_line = parse_token_line(line)
if (token_line.index >= date2indices[date][0]) and (
token_line.index <= date2indices[date][1]):
fout.write(line)
def read_tokens_counter(tokens: str) -> Counter:
"""
Transform a string of tokens 'token1:count1,token2:count2' into a Counter object.
:param tokens: input string of tokens 'token1:count1,token2:count2'.
:return: Counter object of token tuples (token, count).
"""
counter_tokens = Counter()
for token_count in tokens.split(","):
token, count = token_count.split(":")
counter_tokens[token] = int(count)
return counter_tokens
def differentiate_tokens(tokens: List[Tuple[str, int]], sign: str,
new_tokens: List[Any]) -> List[Tuple[str, int]]:
"""
Transform the list of tuples (token, count) into the same list,
but adding the necessary sign before each token (+ or -).
:param tokens: input list of tuples (token, count).
:param sign: sign of token, one of two: + or -.
:param new_tokens: output list to append the results to.
:return: list of differentiated tuples (+/-token, count).
"""
assert sign in ["+", "-"]
for token in tokens:
new_token = sign + token[0]
new_tokens.append([new_token, token[1]])
return new_tokens
def calculate_diffs(slices_tokens_dir: str, output_dir: str, name: str,
dates: List[datetime.datetime]) -> None:
"""
Given temporary tokens files of individual slices (separate files with tokens of each file for
every slice), transform this data into a single tokens file with every slice except the first
one, where for every slice and every file in it only changed tokens are saved: new tokens as
'+token', deleted tokens as '-token'.
:param slices_tokens_dir: the directory with token files split by slices.
:param output_dir: path to the output directory.
:param name: name of the processed dataset.
:param dates: a list of dates used for slicing.
:return: None.
"""
print("Calculating the diffs between versions and transforming the token lists.")
diff_indices = {}
count_index_diff = 0
with open(os.path.abspath(os.path.join(output_dir, name + "_diffs_tokens.txt")), "w+") as fout:
for date in tqdm(range(2, len(dates) + 1)):
start_index_diff = count_index_diff + 1
# Save the tokens of the "previous" slice into memory
previous_version = {}
with open(os.path.abspath(
os.path.join(slices_tokens_dir, str(date - 1) + ".txt"))) as fin:
for line in fin:
token_line = parse_token_line(line)
previous_version[token_line.path] = read_tokens_counter(token_line.tokens)
current_version = []
with open(os.path.abspath(os.path.join(slices_tokens_dir, str(date) + ".txt"))) as fin:
for line in fin:
# Iterate over files in the "current" version
token_line = parse_token_line(line)
current_version.append(token_line.path)
tokens = read_tokens_counter(token_line.tokens)
old_path = token_line.path.replace(dates[date - 1].strftime("%Y-%m-%d"),
dates[date - 2].strftime("%Y-%m-%d"), 1)
# Check if the file with this name existed in the previous version
if old_path in previous_version.keys():
old_tokens = previous_version[old_path]
# Calculate which tokens have been added and removed between versions
created_tokens = sorted((tokens - old_tokens).items(), key=itemgetter(1),
reverse=True)
deleted_tokens = sorted((old_tokens - tokens).items(), key=itemgetter(1),
reverse=True)
new_tokens = []
if len(created_tokens) != 0:
new_tokens = differentiate_tokens(created_tokens, "+", new_tokens)
if len(deleted_tokens) != 0:
new_tokens = differentiate_tokens(deleted_tokens, "-", new_tokens)
# If the file is new, all of its tokens are considered created
else:
tokens = sorted(tokens.items(), key=itemgetter(1), reverse=True)
new_tokens = []
new_tokens = differentiate_tokens(tokens, "+", new_tokens)
if len(new_tokens) != 0:
formatted_new_tokens = transform_identifiers(new_tokens)
count_index_diff = count_index_diff + 1
fout.write("{file_index};{file_path};{tokens}\n"
.format(file_index=str(count_index_diff),
file_path=token_line.path,
tokens=",".join(formatted_new_tokens)))
# Iterate over files in the "previous" version to see which have been deleted
for old_path in previous_version.keys():
new_path = old_path.replace(dates[date - 2].strftime("%Y-%m-%d"),
dates[date - 1].strftime("%Y-%m-%d"), 1)
if new_path not in current_version:
tokens = sorted(previous_version[old_path].items(), key=itemgetter(1),
reverse=True)
new_tokens = []
new_tokens = differentiate_tokens(tokens, "-", new_tokens)
formatted_new_tokens = transform_identifiers(new_tokens)
count_index_diff = count_index_diff + 1
fout.write("{file_index};{file_path};{tokens}\n"
.format(file_index=str(count_index_diff),
file_path=old_path,
tokens=",".join(formatted_new_tokens)))
end_index_diff = count_index_diff
diff_indices[dates[date - 1].strftime("%Y-%m-%d")] = (start_index_diff, end_index_diff)
# Write the index boundaries of slices into a separate log file
print("Writing the index boundaries of slices into an auxiliary file (updated).")
with open(os.path.abspath(os.path.join(output_dir, name + "_diffs_slices.txt")), "w+") as fout:
for date in diff_indices.keys():
if diff_indices[date][1] >= diff_indices[date][0]: # Skips empty slices
fout.write("{date};{start_index};{end_index}\n"
.format(date=date, start_index=str(diff_indices[date][0]),
end_index=str(diff_indices[date][1])))
def uci_format(tokens_file: str, output_dir: str, name: str) -> None:
"""
Transform the file with tokens into the UCI bag-of-words format. The format consists of two
files: the first one lists all the tokens in the dataset alphabetically, and the second one
lists all the triplets document-token-count, ranged first by documents, then by tokens.
:param tokens_file: the path to the temporary file with tokens.
:param output_dir: path to the output directory.
:param name: name of the processed dataset.
:return: None.
"""
print("Transforming the data into the UCI format for topic-modeling.")
n_nnz = 0
set_of_tokens = set()
# Compile a list of all tokens in the dataset for a sorted list
with open(tokens_file) as fin:
for n_documents, line in enumerate(fin, start=1):
token_line = parse_token_line(line)
for token in token_line.tokens.split(","):
n_nnz = n_nnz + 1
set_of_tokens.add(token.split(":")[0])
n_tokens = len(set_of_tokens)
# Sort the list of tokens, transform them to indexes and write to file
sorted_list_of_tokens = sorted(list(set_of_tokens))
sorted_dictionary_of_tokens = {}
with open(os.path.abspath(os.path.join(output_dir, "vocab." + name + ".txt")), "w+") as fout:
for index in range(len(sorted_list_of_tokens)):
sorted_dictionary_of_tokens[sorted_list_of_tokens[index]] = index + 1
fout.write(sorted_list_of_tokens[index] + "\n")
# Compile the second necessary file: NNZ triplets sorted by document
with open(tokens_file) as fin, open(
os.path.abspath(os.path.join(output_dir, "docword." + name + ".txt")), "w+") as fout:
fout.write(str(n_documents) + "\n" + str(n_tokens) + "\n" + str(n_nnz) + "\n")
for line in tqdm(fin):
token_line = parse_token_line(line)
file_tokens = token_line.tokens.split(",")
file_tokens_separated = []
file_tokens_separated_numbered = []
for entry in file_tokens:
file_tokens_separated.append(entry.split(":"))
for entry in file_tokens_separated:
file_tokens_separated_numbered.append(
[sorted_dictionary_of_tokens[entry[0]], int(entry[1])])
file_tokens_separated_numbered = sorted(file_tokens_separated_numbered,
key=itemgetter(0), reverse=False)
for entry in file_tokens_separated_numbered:
fout.write("{doc_id} {token_id} {count}\n".format(doc_id=str(line.split(";")[0]),
token_id=str(entry[0]),
count=str(entry[1])))
def slice_and_parse_full_files(repository: str, output_dir: str, n_dates: int,
day_delta: int, lang: str, name: str,
start_date: str = None) -> None:
"""
Split the repository, parse the full files, write the data into a file,
transform into the UCI format.
:param repository: path to the repository to process.
:param output_dir: path to the output directory.
:param n_dates: number of dates.
:param day_delta: the number of days between dates.
:param start_date: the starting (latest) date of the slicing, in the format YYYY-MM-DD,
the default value is the moment of calling.
:param lang: programming language to use.
:param name: name of the dataset.
:return: None.
"""
dates = get_dates(n_dates, day_delta, start_date)
tokens_file = os.path.abspath(os.path.join(output_dir, name + "_tokens.txt"))
slice_and_parse(repository, output_dir, dates, lang, name)
uci_format(tokens_file, output_dir, name)
print("Finished data preprocessing.")
def slice_and_parse_diffs(repository: str, output_dir: str, n_dates: int,
day_delta: int, lang: str, name: str, start_date: str = None) -> None:
"""
Split the repository, parse the full files, extract the diffs,
write the data into a file, transform into the UCI format.
:param repository: path to the repository to process.
:param output_dir: path to the output directory.
:param n_dates: number of dates.
:param day_delta: the number of days between dates.
:param start_date: the starting (latest) date of the slicing, in the format YYYY-MM-DD,
the default value is the moment of calling.
:param lang: programming language to use.
:param name: name of the dataset.
:return: None.
"""
dates = get_dates(n_dates, day_delta, start_date)
slices_file = os.path.abspath(os.path.join(output_dir, name + "_slices.txt"))
tokens_file = os.path.abspath(os.path.join(output_dir, name + "_tokens.txt"))
slices_tokens_dir = os.path.abspath(os.path.join(output_dir, name + "_slices_tokens"))
tokens_file_diffs = os.path.abspath(os.path.join(output_dir, name + "_diffs_tokens.txt"))
slice_and_parse(repository, output_dir, dates, lang, name)
split_token_file(slices_file, tokens_file, slices_tokens_dir)
calculate_diffs(slices_tokens_dir, output_dir, name, dates)
uci_format(tokens_file_diffs, output_dir, name + "_diffs")
print("Finished data preprocessing.")
|
py | b40d40cddc1c0b578cd237d0621db3d646db92a6 | #!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
raise RuntimeError('setuptools is required')
import versioneer
DESCRIPTION = ('Pyvisco is a Python library that supports Prony series ' +
'identification for linear viscoelastic material models.')
LONG_DESCRIPTION = """
Pyvisco is a Python library that supports the identification of Prony series
parameters for linear viscoelastic materials described by a Generalized Maxwell
model. The necessary material model parameters are identified by fitting a Prony
series to the experimental measurement data in either the frequency-domain
(via Dynamic Mechanical Thermal Analysis) or time-domain (via relaxation
measurements). Pyvisco performs the necessary data processing of the
experimental measurements, mathematical operations, and curve-fitting routines
to identify the Prony series parameters. These parameters are used in subsequent
Finite Element simulations involving linear viscoelastic material models that
accurately describe the mechanical behavior of polymeric materials such as
encapsulants and backsheets of PV modules. An optional minimization routine is
included to reduce the number of Prony elements. This routine is helpful in
large Finite Element simulations where reducing the computational complexity of
the linear viscoelastic material models can shorten the simulation time.
Documentation: https://pyvisco.readthedocs.io
Source code: https://github.com/NREL/pyvisco
"""
DISTNAME = 'pyvisco'
AUTHOR = "Martin Springer"
AUTHOR_EMAIL = '[email protected]'
MAINTAINER = "Martin Springer"
MAINTAINER_EMAIL = '[email protected]'
LICENSE = 'BSD-3'
URL = 'https://github.com/NREL/pyvisco'
INSTALL_REQUIRES = [
'jupyter',
'matplotlib',
'numpy',
'pandas',
'scipy',
'ipython',
'ipywidgets',
'ipympl',
'voila',
'xlrd',
'Markdown',
]
DOCS_REQUIRE = [
'sphinx == 4.3.1', 'm2r2',
]
EXTRAS_REQUIRE = {
'doc': DOCS_REQUIRE
}
EXTRAS_REQUIRE['all'] = sorted(set(sum(EXTRAS_REQUIRE.values(), [])))
SETUP_REQUIRES = ['setuptools_scm']
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering'
]
KEYWORDS = [
'curve-fitting',
'material-modelling',
'viscoelasticity',
'prony'
]
PROJECT_URLS = {
"Documentation": "https://pyvisco.readthedocs.io/",
"Source Code": "https://github.com/NREL/pyvisco/",
"Web application" : "https://pyvisco.herokuapp.com/"
}
PACKAGES = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"])
setup(
name=DISTNAME,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
use_scm_version=True,
packages=PACKAGES,
keywords=KEYWORDS,
install_requires=INSTALL_REQUIRES,
extras_require=EXTRAS_REQUIRE,
#tests_require=TESTS_REQUIRE,
setup_requires=SETUP_REQUIRES,
ext_modules=[],
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author=AUTHOR,
author_email=MAINTAINER_EMAIL,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
license=LICENSE,
classifiers=CLASSIFIERS,
url=URL,
project_urls=PROJECT_URLS,
include_package_data=True,
python_requires='>=3.7',
)
|
py | b40d40f51fecfb2eb1a6d618f7877f550fe47c3c | # Copyright 2021 solo-learn development team.
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies
# or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
# FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import argparse
import pytorch_lightning as pl
from solo.args.dataset import (
augmentations_args,
custom_dataset_args,
dataset_args,
linear_augmentations_args,
)
from solo.args.utils import additional_setup_linear, additional_setup_pretrain
from solo.methods import METHODS
from solo.utils.auto_resumer import AutoResumer
from solo.utils.checkpointer import Checkpointer
try:
from solo.utils.auto_umap import AutoUMAP
except ImportError:
_umap_available = False
else:
_umap_available = True
def parse_args_pretrain() -> argparse.Namespace:
"""Parses dataset, augmentation, pytorch lightning, model specific and additional args.
First adds shared args such as dataset, augmentation and pytorch lightning args, then pulls the
model name from the command and proceeds to add model specific args from the desired class. If
wandb is enabled, it adds checkpointer args. Finally, adds additional non-user given parameters.
Returns:
argparse.Namespace: a namespace containing all args needed for pretraining.
"""
parser = argparse.ArgumentParser()
# add shared arguments
dataset_args(parser)
augmentations_args(parser)
custom_dataset_args(parser)
# add pytorch lightning trainer args
parser = pl.Trainer.add_argparse_args(parser)
# add method-specific arguments
parser.add_argument("--method", type=str)
# THIS LINE IS KEY TO PULL THE MODEL NAME
temp_args, _ = parser.parse_known_args()
# add model specific args
parser = METHODS[temp_args.method].add_model_specific_args(parser)
# add auto checkpoint/umap args
parser.add_argument("--save_checkpoint", action="store_true")
parser.add_argument("--auto_umap", action="store_true")
parser.add_argument("--auto_resume", action="store_true")
temp_args, _ = parser.parse_known_args()
# optionally add checkpointer and AutoUMAP args
if temp_args.save_checkpoint:
parser = Checkpointer.add_checkpointer_args(parser)
if _umap_available and temp_args.auto_umap:
parser = AutoUMAP.add_auto_umap_args(parser)
if temp_args.auto_resume:
parser = AutoResumer.add_autoresumer_args(parser)
# parse args
args = parser.parse_args()
# prepare arguments with additional setup
additional_setup_pretrain(args)
return args
def parse_args_linear() -> argparse.Namespace:
"""Parses feature extractor, dataset, pytorch lightning, linear eval specific and additional args.
First adds an arg for the pretrained feature extractor, then adds dataset, pytorch lightning
and linear eval specific args. If wandb is enabled, it adds checkpointer args. Finally, adds
additional non-user given parameters.
Returns:
argparse.Namespace: a namespace containing all args needed for pretraining.
"""
parser = argparse.ArgumentParser()
parser.add_argument("--pretrained_feature_extractor", type=str)
# add shared arguments
dataset_args(parser)
linear_augmentations_args(parser)
custom_dataset_args(parser)
# add pytorch lightning trainer args
parser = pl.Trainer.add_argparse_args(parser)
# linear model
parser = METHODS["linear"].add_model_specific_args(parser)
# THIS LINE IS KEY TO PULL WANDB AND SAVE_CHECKPOINT
parser.add_argument("--save_checkpoint", action="store_true")
temp_args, _ = parser.parse_known_args()
# optionally add checkpointer
if temp_args.save_checkpoint:
parser = Checkpointer.add_checkpointer_args(parser)
# parse args
args = parser.parse_args()
additional_setup_linear(args)
return args
def parse_args_knn() -> argparse.Namespace:
"""Parses arguments for offline K-NN.
Returns:
argparse.Namespace: a namespace containing all args needed for pretraining.
"""
parser = argparse.ArgumentParser()
# add knn args
parser.add_argument("--pretrained_checkpoint_dir", type=str)
parser.add_argument("--batch_size", type=int, default=16)
parser.add_argument("--num_workers", type=int, default=10)
parser.add_argument("--k", type=int, nargs="+")
parser.add_argument("--temperature", type=float, nargs="+")
parser.add_argument("--distance_function", type=str, nargs="+")
parser.add_argument("--feature_type", type=str, nargs="+")
# add shared arguments
dataset_args(parser)
custom_dataset_args(parser)
# parse args
args = parser.parse_args()
return args
def parse_args_umap() -> argparse.Namespace:
"""Parses arguments for offline UMAP.
Returns:
argparse.Namespace: a namespace containing all args needed for pretraining.
"""
parser = argparse.ArgumentParser()
# add knn args
parser.add_argument("--pretrained_checkpoint_dir", type=str)
parser.add_argument("--batch_size", type=int, default=16)
parser.add_argument("--num_workers", type=int, default=10)
# add shared arguments
dataset_args(parser)
custom_dataset_args(parser)
# parse args
args = parser.parse_args()
return args
|
py | b40d434a3c37488095e120706b0dbd71d729ea3c |
import xmltodict as xmld #JMDict parsing
from gzip import GzipFile #JMDict streaming
import pickle #cache serialization
import romkan #kana/romaji conversion
def loopOn(input):
if isinstance(input,list):
for i in input:
yield i;
else: yield input;
class Dictionary:
def __init__(self, jmfile, kjfile):
self.jmfile = jmfile;
self.kjfile = kjfile;
pass;
def Load(self, dictfile):
try:
with open(dictfile,"rb") as f:
print("Loading dictionary...");
(self.jmdict,self.rindex,self.jindex,self.kjdict,self.kindex) = pickle.load(f);
except (OSError,IOError):
print("Parsing JMdict...");
self.jmdict = xmld.parse(GzipFile(self.jmfile));
self.jmdict = self.jmdict["JMdict"]["entry"];
print("Indexing...");
self.rindex = {};
self.jindex = {};
for i,entry in enumerate(self.jmdict):
try:
kele = entry["k_ele"];
for j in loopOn(kele):
r = j["keb"];
a = self.rindex.get(r);
if a is None:
self.rindex[r] = [i];
self.jindex[r] = [i];
else:
a.append(i);
self.jindex[r].append(i);
except KeyError:
pass;
try:
rele = entry["r_ele"];
for j in loopOn(rele):#rele:
r = romkan.to_roma(j["reb"]).replace('\'','');
a = self.rindex.get(r);
if a is None:
self.rindex[r] = [i];
else: a.append(i);
except KeyError:
pass;
sense = entry["sense"];
for j in loopOn(sense):
for g in loopOn(j["gloss"]):
t = g["#text"];
a = self.rindex.get(t);
if a is None:
self.rindex[t] = [i];
else: a.append(i);
print("Parsing KANJIDIC2...");
self.kjdict = xmld.parse(GzipFile(self.kjfile));
self.kjdict = self.kjdict["kanjidic2"]["character"];
print("Indexing...");
self.kindex = {};
for i,entry in enumerate(self.kjdict):
lit = entry["literal"];
ron = [];
kun = [];
meaning = [];
try:
rm = entry["reading_meaning"]["rmgroup"];
except KeyError:
continue; #radical: skip for now
try:
for rele in loopOn(rm["reading"]):
if rele["@r_type"] == "ja_on":
ron.append(rele["#text"]);
elif rele["@r_type"] == "ja_kun":
kun.append(rele["#text"]);
except KeyError:
pass;
try:
for mele in loopOn(rm["meaning"]):
if isinstance(mele,str): #other than english are dictionaries
meaning.append(mele);
except KeyError:
pass;
self.kindex[lit] = (ron,kun,meaning);
with open(dictfile,"wb") as f:
print("Caching...");
pickle.dump((self.jmdict,self.rindex,self.jindex,self.kjdict,self.kindex),f);
def LoadTags(self, tagfile, tagdef):
self.tagfile = tagfile;
self.tagdef = tagdef;
try:
with open(self.tagfile,"rb") as f:
print("Loading tags...");
self.tagdict = pickle.load(f);
except (OSError,IOError):
self.tagdict = {};
self.tagdict[self.tagdef] = [];
def SaveTags(self):
if len(self.tagdict) > 1 or len(self.tagdict[self.tagdef]) > 0:
with open(self.tagfile,"wb") as f:
pickle.dump(self.tagdict,f);
|
py | b40d43b0c97cd228a29713617873093df574f5a5 | # This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = "None"
cfg.parentdir_prefix = "None"
cfg.versionfile_source = "rechunker/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {} # type: ignore
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen(
[c] + args,
cwd=cwd,
env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr else None),
)
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {
"version": dirname[len(parentdir_prefix) :],
"full-revisionid": None,
"dirty": False,
"error": None,
"date": None,
}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print(
"Tried directories %s but none started with prefix %s"
% (str(rootdirs), parentdir_prefix)
)
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r"\d", r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix) :]
if verbose:
print("picking %s" % r)
return {
"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False,
"error": None,
"date": date,
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {
"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False,
"error": "no suitable tags",
"date": None,
}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(
GITS,
[
"describe",
"--tags",
"--dirty",
"--always",
"--long",
"--match",
"%s*" % tag_prefix,
],
cwd=root,
)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[: git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
full_tag,
tag_prefix,
)
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix) :]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[
0
].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {
"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None,
}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {
"version": rendered,
"full-revisionid": pieces["long"],
"dirty": pieces["dirty"],
"error": None,
"date": pieces.get("date"),
}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split("/"):
root = os.path.dirname(root)
except NameError:
return {
"version": "0+unknown",
"full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None,
}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {
"version": "0+unknown",
"full-revisionid": None,
"dirty": None,
"error": "unable to compute version",
"date": None,
}
|
py | b40d4400edae8d79c363da92dbcec9b44bd488d0 | # Generated by Django 2.2.16 on 2021-03-22 18:47
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('pipeline', '0133_auto_20210322_1831'),
]
operations = [
migrations.RemoveField(
model_name='opportunity',
name='community_id',
),
migrations.AddField(
model_name='opportunity',
name='nearest_community',
field=models.ForeignKey(blank=True, db_column='nearest_community', null=True, on_delete=django.db.models.deletion.SET_NULL, to='pipeline.CommunityDistance'),
),
]
|
py | b40d44f17c74c658c2cfdc26a42d84691806f679 | """
Utils for ML.
"""
import random
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import settings
def prepare_interaction(df, behaviours=settings.BEHAVIOURS):
"""
- Resample the data.
- Drop rows that are labeled as "Start".
- Convert the "speaker_behaviour" column to hot-ones.
- Filter hot-ones by `behaviours`.
"""
# L stands for millis
df = df.resample(f'{settings.RESAMPLING_PERIOD}L').first().ffill()
df = df[df.speaker_behaviour != 'Start']
hot_ones = pd.get_dummies(df.speaker_behaviour)
hot_ones = [hot_ones[key] for key in hot_ones.columns if key in behaviours]
columns = hot_ones + [df.speaker_eye, df.listener_nod]
return pd.concat(columns, axis=1).astype(bool)
def to_samples(df, n):
"""
Break an interaction into many samples in the form of `np.ndarray`.
Each sample contains features from points in time up until the
corrent one (including), and the `listener_nod` of the current time.
"""
values = df.values
num_of_samples = len(values) - n + 1
X = np.array([_take_X(values, i, n) for i in range(num_of_samples)])
y = values[n - 1:, -1][:, np.newaxis]
return np.hstack([X, y])
def _take_X(values, i, n):
return values[i:i + n, :-1].flatten()
def prepare_training_data(store, window_size=settings.WINDOW_SIZE):
"""
Create the X and y arrays, ready for ML.
"""
partials = []
for key in store:
df = prepare_interaction(store[key])
samples = to_samples(df, window_size)
partials.append(samples)
data = np.vstack(partials)
X = data[:, :-1]
y = data[:, -1]
return X, y
def plot_data(df, *, title=None, figsize=settings.FIGSIZE):
"""
Plot the (already prepared for training) interaction for investigation.
"""
spreaded = df - np.arange(len(df.columns)) * 2
spreaded.plot(figsize=figsize)
# Put a legend to the right of the current axis
plt.gca().legend(loc='center left', bbox_to_anchor=(1, 0.5))
# Hide Y axis labels
plt.yticks([])
if title is not None:
plt.title(title)
|
py | b40d4778207496ddee6d9c670fe2d7c8365bb671 | import discord
from discord.ext import commands
from discord.ext.commands import Bot
import os
import traceback
bot = commands.Bot(command_prefix='/')
token = os.environ['DISCORD_BOT_TOKEN']
@bot.event
async def on_command_error(ctx, error):
orig_error = getattr(error, "original", error)
error_msg = ''.join(traceback.TracebackException.from_exception(orig_error).format())
await ctx.send(error_msg)
@bot.command()
async def csm(ctx):
await ctx.send('【Choco stupid Mountain】 https://clips.twitch.tv/GoodReliableArmadilloDoggo-QAW30SL4Rrgfkdrl')
# embed = discord.Embed(title="choco stupid mountain",description="choco stupid mountain")
# await channel.send(embed=embed)
@bot.command()
async def csmcsm(ctx):
embed = discord.Embed(title="choco stupid mountain",description="choco stupid mountain",color=0xff0000)
await ctx.send(embed=embed)
bot.run(token)
|
py | b40d48049947cc184bd5b4d28939c2620641db3d | import datetime
import pytest
from urllib.parse import quote_plus
import findpapers.searchers.acm_searcher as acm_searcher
from findpapers.models.search import Search
from findpapers.models.publication import Publication
def test_get_search_url(search: Search):
url = acm_searcher._get_search_url(search)
query = search.query.replace(' AND NOT ', ' NOT ')
assert quote_plus(query) in url
assert url.startswith('https://dl.acm.org/action/doSearch?')
def test_mocks():
assert acm_searcher._get_result() is not None
assert acm_searcher._get_paper_page() is not None
assert acm_searcher._get_paper_metadata() is not None
def test_get_paper():
paper_page = acm_searcher._get_paper_page()
paper = acm_searcher._get_paper(paper_page, 'fake-paper-doi', 'fake-url')
assert paper is not None
assert paper.title is not None
assert paper.doi is not None
assert paper.number_of_pages == 2
assert len(paper.authors) == 3
assert paper.publication_date.year == 2020
assert paper.publication is not None
assert paper.publication.title == 'Proceedings of the 7th ACM IKDD CoDS and 25th COMAD'
assert paper.publication.publisher == 'Association for Computing Machinery'
assert paper.publication.isbn == '9781450377386'
@pytest.mark.skip(reason="It needs some revision after some tool's refactoring")
def test_run(search: Search):
search.limit = 14
search.limit_per_database = None
acm_searcher.run(search)
assert len(search.papers) == 14
|
py | b40d48791c0503c97c0b3f2bfae87caad33b5a19 | #!/usr/bin/env python3
from curses import COLOR_BLACK, COLOR_WHITE
import os,sys
from shutil import move
from sre_constants import CALL
from time import sleep
from ev3dev.ev3 import *
from ev3dev2.sensor import *
from ev3dev2.sensor.lego import *
from ev3dev2.motor import *
from ev3dev2.sound import *
from ev3dev2.wheel import *
# state constants
ON = True
OFF = False
def debug_print(*args, **kwargs):
'''Print debug messages to stderr.
This shows up in the output panel in VS Code.
'''
print(*args, **kwargs, file=sys.stderr)
def reset_console():
'''Resets the console to the default state'''
print('\x1Bc', end='')
def set_cursor(state):
'''Turn the cursor on or off'''
if state:
print('\x1B[?25h', end='')
else:
print('\x1B[?25l', end='')
def set_font(name):
'''Sets the console font
A full list of fonts can be found with `ls /usr/share/consolefonts`
'''
os.system('setfont ' + name)
#my functions
def move_front(tankpair, distance):
tankpair.on_for_seconds(69.5,70, distance/ROBO_SPEED, False)
def move_back(tankpair, distance):
tankpair.on_for_seconds(-69.5,-70, distance/ROBO_SPEED, False)
def turn(tankpair, direction, gyrosens):
gyrosens.reset()
if direction==-1:
while gyrosens.angle > -90:
tankpair.on_for_seconds(0,50, 0.05, False)
elif direction==1:
while gyrosens.angle < 90:
tankpair.on_for_seconds(0,-50, 0.05, False)
def read_colour(color_sensor):
col = 0
if color_sensor.color() == 6:
col = 0
elif color_sensor.color() == 1:
col = 1
return col
def read_black(color_sensor):
perc = color_sensor.reflected_light_intensity
if perc < 9:
return 5
elif perc < 40:
return 1
else:
return 0
def brake_robot(tankpair):
tankpair.off(brake=True)
# set the console just how we want it
reset_console()
set_cursor(OFF)
set_font('Lat15-Terminus24x12')
print('Hello there!')
tank_pair = MoveTank(OUTPUT_D, OUTPUT_B)
lift = MediumMotor(OUTPUT_C)
gy = GyroSensor(INPUT_3)
colS = ColorSensor(INPUT_2)
sound = Sound()
#ults = UltrasonicSensor(INPUT_1)
# if (ults.MODE_US_DIST_IN < 1):
# brake_robot(tank_pair)
ROBO_SPEED = 9.5 #Inch/second
turn(tank_pair,1, gy)
move_back(tank_pair,3)
sleep(1)
move_front(tank_pair, 6)
sleep(1)
lift.on_for_seconds(-50, 1)
move_back(tank_pair,6)
turn(tank_pair,-1, gy)
move_front(tank_pair, 27)
lift.on_for_seconds(50, 1)
move_back(tank_pair,3)
|
py | b40d48b9a86545e6c95f37533af40eb14d596912 | import unittest
import requests
from releases import rate_limit_reached
class RateLimitReachedTest(unittest.TestCase):
def test_rate_limit_reached_none_input(self):
actual_return = rate_limit_reached(None)
expected_return = False
self.assertEqual(expected_return, actual_return)
def test_rate_limit_reached_irrelevant_url_input(self):
r = requests.get("https://esciencecenter.nl", headers={})
actual_return = rate_limit_reached(r)
expected_return = False
self.assertEqual(expected_return, actual_return)
if __name__ == '__main__':
unittest.main()
|
py | b40d4948e9b0e938026123dd6f7af39b90d550da | import FWCore.ParameterSet.Config as cms
import os
maxevts = 1000
globaltag = 'STARTUP31X_V1::All'
globaltag = 'STARTUP3XY_V9::All'
inputfile = '/store/relval/CMSSW_3_1_1/RelValZMM/GEN-SIM-DIGI-RAW-HLTDEBUG/STARTUP31X_V1-v2/0002/FCBE122E-D66B-DE11-9667-001D09F291D2.root'
process = cms.Process("RPCTechnicalTrigger")
process.load("FWCore.MessageService.MessageLogger_cfi")
process.MessageLogger.cerr.enable = False
process.MessageLogger.cout = cms.untracked.PSet(
enable = cms.untracked.bool(True),
threshold = cms.untracked.string('DEBUG'),
INFO = cms.untracked.PSet(
limit = cms.untracked.int32(-1) ) )
#.. Geometry and Global Tags
process.load("Configuration.StandardSequences.Geometry_cff")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.GlobalTag.globaltag = cms.string( globaltag )
process.load("Configuration.StandardSequences.MagneticField_cff")
#.. if cosmics: reconstruction sequence for Cosmics
####process.load("Configuration.StandardSequences.ReconstructionCosmics_cff")
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(maxevts) )
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring( inputfile ) )
#..............................................................................................................
#.. EventSetup Configuration
#...
useEventSetup = 0
mytag = 'test5'
database = 'sqlite'
if database == 'sqlite':
dbconnection = 'sqlite_file:/afs/cern.ch/user/a/aosorio/public/rpcTechnicalTrigger/myrbconfig.db'
elif database == 'oraclerpc':
dbconnection = 'oracle://devdb10/CMS_RPC_COMMISSIONING'
else:
dbconnection = ''
if useEventSetup >= 1:
from CondCore.DBCommon.CondDBCommon_cfi import *
PoolDBESSource = cms.ESSource("PoolDBESSource",
loadAll = cms.bool(True),
toGet = cms.VPSet(cms.PSet( record = cms.string('RBCBoardSpecsRcd'),
tag = cms.string(mytag+'a')),
cms.PSet( record = cms.string('TTUBoardSpecsRcd'),
tag = cms.string(mytag+'b'))),
DBParameters = cms.PSet( messageLevel = cms.untracked.int32(2),
authenticationPath = cms.untracked.string('')),
messagelevel = cms.untracked.uint32(2),
connect = cms.string(dbconnection) )
CondDBCommon.connect = cms.string( dbconnection )
#..............................................................................................................
process.load("L1Trigger.RPCTechnicalTrigger.rpcTechnicalTrigger_cfi")
process.rpcTechnicalTrigger.RPCDigiLabel = cms.InputTag("simMuonRPCDigis")
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string('rpcttbits.root'),
outputCommands = cms.untracked.vstring('drop *','keep L1GtTechnicalTriggerRecord_*_*_*') )
process.p = cms.Path(process.rpcTechnicalTrigger)
process.e = cms.EndPath(process.out)
|
py | b40d49987fb1b2ad392d00714abc29d3eb4bc950 | import datetime
import unittest
from unittest import mock
from urllib.parse import quote_plus
from django.test import SimpleTestCase
from django.utils.encoding import (
DjangoUnicodeDecodeError, escape_uri_path, filepath_to_uri, force_bytes,
force_str, get_system_encoding, iri_to_uri, smart_bytes, smart_str,
uri_to_iri,
)
from django.utils.functional import SimpleLazyObject
from django.utils.translation import gettext_lazy
class TestEncodingUtils(SimpleTestCase):
def test_force_str_exception(self):
"""
Broken __str__ actually raises an error.
"""
class MyString:
def __str__(self):
return b'\xc3\xb6\xc3\xa4\xc3\xbc'
# str(s) raises a TypeError if the result is not a text type.
with self.assertRaises(TypeError):
force_str(MyString())
def test_force_str_lazy(self):
s = SimpleLazyObject(lambda: 'x')
self.assertIs(type(force_str(s)), str)
def test_force_str_DjangoUnicodeDecodeError(self):
msg = (
"'utf-8' codec can't decode byte 0xff in position 0: invalid "
"start byte. You passed in b'\\xff' (<class 'bytes'>)"
)
with self.assertRaisesMessage(DjangoUnicodeDecodeError, msg):
force_str(b'\xff')
def test_force_bytes_exception(self):
"""
force_bytes knows how to convert to bytes an exception
containing non-ASCII characters in its args.
"""
error_msg = "This is an exception, voilà"
exc = ValueError(error_msg)
self.assertEqual(force_bytes(exc), error_msg.encode())
self.assertEqual(force_bytes(exc, encoding='ascii', errors='ignore'), b'This is an exception, voil')
def test_force_bytes_strings_only(self):
today = datetime.date.today()
self.assertEqual(force_bytes(today, strings_only=True), today)
def test_force_bytes_encoding(self):
error_msg = 'This is an exception, voilà'.encode()
result = force_bytes(error_msg, encoding='ascii', errors='ignore')
self.assertEqual(result, b'This is an exception, voil')
def test_force_bytes_memory_view(self):
data = b'abc'
result = force_bytes(memoryview(data))
# Type check is needed because memoryview(bytes) == bytes.
self.assertIs(type(result), bytes)
self.assertEqual(result, data)
def test_smart_bytes(self):
class Test:
def __str__(self):
return 'ŠĐĆŽćžšđ'
lazy_func = gettext_lazy('x')
self.assertIs(smart_bytes(lazy_func), lazy_func)
self.assertEqual(smart_bytes(Test()), b'\xc5\xa0\xc4\x90\xc4\x86\xc5\xbd\xc4\x87\xc5\xbe\xc5\xa1\xc4\x91')
self.assertEqual(smart_bytes(1), b'1')
self.assertEqual(smart_bytes('foo'), b'foo')
def test_smart_str(self):
class Test:
def __str__(self):
return 'ŠĐĆŽćžšđ'
lazy_func = gettext_lazy('x')
self.assertIs(smart_str(lazy_func), lazy_func)
self.assertEqual(smart_str(Test()), '\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111')
self.assertEqual(smart_str(1), '1')
self.assertEqual(smart_str('foo'), 'foo')
def test_get_default_encoding(self):
with mock.patch('locale.getdefaultlocale', side_effect=Exception):
self.assertEqual(get_system_encoding(), 'ascii')
class TestRFC3987IEncodingUtils(unittest.TestCase):
def test_filepath_to_uri(self):
self.assertEqual(filepath_to_uri(None), None)
self.assertEqual(filepath_to_uri('upload\\чубака.mp4'), 'upload/%D1%87%D1%83%D0%B1%D0%B0%D0%BA%D0%B0.mp4')
def test_iri_to_uri(self):
cases = [
# Valid UTF-8 sequences are encoded.
('red%09rosé#red', 'red%09ros%C3%A9#red'),
('/blog/for/Jürgen Münster/', '/blog/for/J%C3%BCrgen%20M%C3%BCnster/'),
('locations/%s' % quote_plus('Paris & Orléans'), 'locations/Paris+%26+Orl%C3%A9ans'),
# Reserved chars remain unescaped.
('%&', '%&'),
('red&♥ros%#red', 'red&%E2%99%A5ros%#red'),
(gettext_lazy('red&♥ros%#red'), 'red&%E2%99%A5ros%#red'),
]
for iri, uri in cases:
self.assertEqual(iri_to_uri(iri), uri)
# Test idempotency.
self.assertEqual(iri_to_uri(iri_to_uri(iri)), uri)
def test_uri_to_iri(self):
cases = [
(None, None),
# Valid UTF-8 sequences are decoded.
('/%e2%89%Ab%E2%99%a5%E2%89%aB/', '/≫♥≫/'),
('/%E2%99%A5%E2%99%A5/?utf8=%E2%9C%93', '/♥♥/?utf8=✓'),
('/%41%5a%6B/', '/AZk/'),
# Reserved and non-URL valid ASCII chars are not decoded.
('/%25%20%02%41%7b/', '/%25%20%02A%7b/'),
# Broken UTF-8 sequences remain escaped.
('/%AAd%AAj%AAa%AAn%AAg%AAo%AA/', '/%AAd%AAj%AAa%AAn%AAg%AAo%AA/'),
('/%E2%99%A5%E2%E2%99%A5/', '/♥%E2♥/'),
('/%E2%99%A5%E2%99%E2%99%A5/', '/♥%E2%99♥/'),
('/%E2%E2%99%A5%E2%99%A5%99/', '/%E2♥♥%99/'),
('/%E2%99%A5%E2%99%A5/?utf8=%9C%93%E2%9C%93%9C%93', '/♥♥/?utf8=%9C%93✓%9C%93'),
]
for uri, iri in cases:
self.assertEqual(uri_to_iri(uri), iri)
# Test idempotency.
self.assertEqual(uri_to_iri(uri_to_iri(uri)), iri)
def test_complementarity(self):
cases = [
('/blog/for/J%C3%BCrgen%20M%C3%BCnster/', '/blog/for/J\xfcrgen%20M\xfcnster/'),
('%&', '%&'),
('red&%E2%99%A5ros%#red', 'red&♥ros%#red'),
('/%E2%99%A5%E2%99%A5/', '/♥♥/'),
('/%E2%99%A5%E2%99%A5/?utf8=%E2%9C%93', '/♥♥/?utf8=✓'),
('/%25%20%02%7b/', '/%25%20%02%7b/'),
('/%AAd%AAj%AAa%AAn%AAg%AAo%AA/', '/%AAd%AAj%AAa%AAn%AAg%AAo%AA/'),
('/%E2%99%A5%E2%E2%99%A5/', '/♥%E2♥/'),
('/%E2%99%A5%E2%99%E2%99%A5/', '/♥%E2%99♥/'),
('/%E2%E2%99%A5%E2%99%A5%99/', '/%E2♥♥%99/'),
('/%E2%99%A5%E2%99%A5/?utf8=%9C%93%E2%9C%93%9C%93', '/♥♥/?utf8=%9C%93✓%9C%93'),
]
for uri, iri in cases:
self.assertEqual(iri_to_uri(uri_to_iri(uri)), uri)
self.assertEqual(uri_to_iri(iri_to_uri(iri)), iri)
def test_escape_uri_path(self):
self.assertEqual(
escape_uri_path('/;some/=awful/?path/:with/@lots/&of/+awful/chars'),
'/%3Bsome/%3Dawful/%3Fpath/:with/@lots/&of/+awful/chars'
)
self.assertEqual(escape_uri_path('/foo#bar'), '/foo%23bar')
self.assertEqual(escape_uri_path('/foo?bar'), '/foo%3Fbar')
|
py | b40d49d4aa41907ad7a419bb22d6515a8dc24c91 | import torch
import torch.nn as nn
class Net(nn.Module):
def __init__(self, num_channels, base_channels, num_residuals):
super(Net, self).__init__()
self.input_conv = nn.Sequential(nn.Conv2d(num_channels, base_channels, kernel_size=3, stride=1, padding=1, bias=False), nn.ReLU(inplace=True))
self.residual_layers = nn.Sequential(*[nn.Sequential(nn.Conv2d(base_channels, base_channels, kernel_size=3, stride=1, padding=1, bias=False), nn.ReLU(inplace=True)) for _ in range(num_residuals)])
self.output_conv = nn.Conv2d(base_channels, num_channels, kernel_size=3, stride=1, padding=1, bias=False)
def weight_init(self):
for m in self._modules:
weights_init_kaiming(m)
def forward(self, x):
residual = x
x = self.input_conv(x)
x = self.residual_layers(x)
x = self.output_conv(x)
x = torch.add(x, residual)
return x
def weights_init_kaiming(m):
class_name = m.__class__.__name__
if class_name.find('Linear') != -1:
nn.init.kaiming_normal_(m.weight)
if m.bias is not None:
m.bias.data.zero_()
elif class_name.find('Conv2d') != -1:
nn.init.kaiming_normal_(m.weight)
if m.bias is not None:
m.bias.data.zero_()
elif class_name.find('ConvTranspose2d') != -1:
nn.init.kaiming_normal_(m.weight)
if m.bias is not None:
m.bias.data.zero_()
elif class_name.find('Norm') != -1:
m.weight.data.normal_(1.0, 0.02)
if m.bias is not None:
m.bias.data.zero_()
|
py | b40d49e240fd08330a2f31313051667967749edb | import signal
import sys
from .helpers import *
from unicornherder.herder import Herder, HerderError
if sys.version_info > (3, 0):
builtin_mod = 'builtins'
else:
builtin_mod = '__builtin__'
class TestHerder(object):
def test_init_defaults(self):
h = Herder()
assert_equal(h.unicorn, 'gunicorn')
assert_equal(h.pidfile, 'gunicorn.pid')
assert_equal(h.args, '')
def test_init_unicorn(self):
h = Herder(unicorn='unicorn')
assert_equal(h.unicorn, 'unicorn')
def test_init_gunicorn(self):
h = Herder(unicorn='gunicorn')
assert_equal(h.unicorn, 'gunicorn')
def test_init_unicornbad(self):
assert_raises(HerderError, Herder, unicorn='unicornbad')
@patch('unicornherder.herder.subprocess.Popen')
def test_spawn_returns_true(self, popen_mock):
h = Herder()
h._boot_loop = lambda: True
assert_true(h.spawn())
@patch('unicornherder.herder.subprocess.Popen')
def test_spawn_gunicorn(self, popen_mock):
h = Herder(unicorn='gunicorn')
h._boot_loop = lambda: True
h.spawn()
assert_equal(popen_mock.call_count, 1)
popen_mock.assert_called_once_with(['gunicorn', '-D', '-p', 'gunicorn.pid'])
@patch('unicornherder.herder.subprocess.Popen')
def test_spawn_unicorn(self, popen_mock):
h = Herder(unicorn='unicorn')
h._boot_loop = lambda: True
h.spawn()
assert_equal(popen_mock.call_count, 1)
popen_mock.assert_called_once_with(['unicorn', '-D', '-P', 'unicorn.pid'])
@patch('unicornherder.herder.subprocess.Popen')
@patch('unicornherder.herder.timeout')
def test_spawn_unicorn_timeout(self, timeout_mock, popen_mock):
popen_mock.return_value.pid = -1
timeout_mock.side_effect = fake_timeout_fail
h = Herder()
popen_mock.return_value.poll.return_value = None
ret = h.spawn()
assert_false(ret)
popen_mock.return_value.terminate.assert_called_once_with()
@patch('unicornherder.herder.subprocess.Popen')
@patch('unicornherder.herder.timeout')
def test_configurable_boot_timeout(self, timeout_mock, popen_mock):
popen_mock.return_value.pid = -1
timeout_mock.side_effect = fake_timeout_fail
h = Herder(boot_timeout=45)
popen_mock.return_value.poll.return_value = None
ret = h.spawn()
timeout_mock.assert_called_once_with(45)
assert_false(ret)
popen_mock.return_value.terminate.assert_called_once_with()
@patch('unicornherder.herder.time.sleep')
@patch('unicornherder.herder.psutil.Process')
@patch('%s.open' % builtin_mod)
def test_configurable_overlap(self, open_mock, process_mock, sleep_mock):
h = Herder(overlap=17)
# Set up an initial dummy master process for the herder to kill later
open_mock.return_value.read.return_value = '123\n'
process = MagicMock(pid=123)
process.children.return_value = ["forked process", "worker 1"]
process_mock.return_value = process
h._loop_inner()
# Simulate a reloaded Unicorn
open_mock.return_value.read.return_value = '456\n'
process = MagicMock(pid=456)
process.children.return_value = ["worker 1"]
process_mock.return_value = process
# Simulate SIGHUP, so the Herder thinks it's reloading
h._handle_HUP(signal.SIGHUP, None)
h._loop_inner()
sleep_mock.assert_any_call(17)
@patch('unicornherder.herder.time.sleep')
@patch('unicornherder.herder.psutil.Process')
@patch('%s.open' % builtin_mod)
def test_waits_for_workers(self, open_mock, process_mock, sleep_mock):
open_mock.return_value.read.return_value = '123\n'
h = Herder()
# Set up an initial dummy master process for the herder to kill later
open_mock.return_value.read.return_value = '123\n'
process = MagicMock(pid=123)
process.children.return_value = ["forked process", "worker 1", "worker 2"]
process_mock.return_value = process
h._loop_inner()
# Simulate a reloaded Unicorn
open_mock.return_value.read.return_value = '456\n'
process = MagicMock(pid=456)
# First call returns an empty list, the second returns 2 workers
process.children.side_effect = [[], ["worker 1", "worker 2"]]
process_mock.return_value = process
# Simulate SIGHUP, so the Herder thinks it's reloading
h._handle_HUP(signal.SIGHUP, None)
h._loop_inner()
# while waiting for workers
sleep_mock.asset_any_call(1)
# overlap of both processes
sleep_mock.asset_any_call(15)
@patch('unicornherder.herder.time.sleep')
@patch('unicornherder.herder.timeout')
@patch('unicornherder.herder.psutil.Process')
@patch('%s.open' % builtin_mod)
def test_recovers_from_less_workers(self, open_mock, process_mock, timeout_mock, sleep_mock):
timeout_mock.side_effect = fake_timeout_fail
h = Herder()
# Set up an initial dummy master process for the herder to kill later
open_mock.return_value.read.return_value = '123\n'
old_process = MagicMock(pid=123)
old_process.children.return_value = ["forked process", "worker 1", "worker 2"]
process_mock.return_value = old_process
h._loop_inner()
# Simulate a reloaded Unicorn
open_mock.return_value.read.return_value = '456\n'
new_process = MagicMock(pid=456)
new_process.children.return_value = ["worker 1"]
process_mock.return_value = new_process
# Simulate SIGHUP, so the Herder thinks it's reloading
h._handle_HUP(signal.SIGHUP, None)
h._loop_inner()
old_process.send_signal.assert_called_with(signal.SIGQUIT)
@patch('unicornherder.herder.time.sleep')
@patch('unicornherder.herder.psutil.Process')
@patch('%s.open' % builtin_mod)
def test_loop_valid_pid(self, open_mock, process_mock, sleep_mock):
open_mock.return_value.read.return_value = '123\n'
h = Herder()
ret = h._loop_inner()
assert_equal(ret, True)
process_mock.assert_called_once_with(123)
@patch('unicornherder.herder.time.sleep')
@patch('%s.open' % builtin_mod)
def test_loop_invalid_pid(self, open_mock, sleep_mock):
open_mock.return_value.read.return_value = 'foobar'
h = Herder()
assert_raises(HerderError, h._loop_inner)
@patch('unicornherder.herder.time.sleep')
@patch('%s.open' % builtin_mod)
def test_loop_nonexistent_pidfile(self, open_mock, sleep_mock):
def _fail():
raise IOError()
open_mock.return_value.read.side_effect = _fail
h = Herder()
assert_raises(HerderError, h._loop_inner)
@patch('unicornherder.herder.time.sleep')
@patch('%s.open' % builtin_mod)
def test_loop_nonexistent_pidfile_terminating(self, open_mock, sleep_mock):
def _fail():
raise IOError()
open_mock.return_value.read.side_effect = _fail
h = Herder()
h.terminating = True
assert_equal(h._loop_inner(), False)
@patch('unicornherder.herder.time.sleep')
@patch('unicornherder.herder.psutil.Process')
@patch('%s.open' % builtin_mod)
def test_loop_detects_pidchange(self, open_mock, process_mock, sleep_mock):
proc1 = MagicMock()
proc2 = MagicMock()
proc1.pid = 123
proc2.pid = 456
h = Herder()
open_mock.return_value.read.return_value = '123\n'
process_mock.return_value = proc1
ret = h._loop_inner()
assert_equal(ret, True)
open_mock.return_value.read.return_value = '456\n'
process_mock.return_value = proc2
ret = h._loop_inner()
assert_equal(ret, True)
expected_calls = []
assert_equal(proc1.mock_calls, expected_calls)
@patch('unicornherder.herder.time.sleep')
@patch('unicornherder.herder.psutil.Process')
@patch('%s.open' % builtin_mod)
def test_loop_reload_pidchange_signals(self, open_mock, process_mock, sleep_mock):
proc1 = MagicMock()
proc2 = MagicMock()
proc1.pid = 123
proc2.pid = 456
proc1.children = MagicMock(return_value=[proc2, "worker"])
proc2.children = MagicMock(return_value=["worker 1"])
h = Herder()
open_mock.return_value.read.return_value = '123\n'
process_mock.return_value = proc1
ret = h._loop_inner()
assert_equal(ret, True)
# Simulate SIGHUP
h._handle_HUP(signal.SIGHUP, None)
open_mock.return_value.read.return_value = '456\n'
process_mock.return_value = proc2
ret = h._loop_inner()
assert_equal(ret, True)
expected_calls = [call.send_signal(signal.SIGUSR2),
call.children(),
call.send_signal(signal.SIGWINCH),
call.send_signal(signal.SIGQUIT)]
assert_equal(proc1.mock_calls, expected_calls)
def test_forward_signal(self):
h = Herder()
h.master = MagicMock()
h._handle_signal('INT')(signal.SIGINT, None)
h.master.send_signal.assert_called_once_with(signal.SIGINT)
def test_forward_signal_nomaster(self):
h = Herder()
h._handle_signal('INT')(signal.SIGINT, None)
def test_handle_hup_nomaster(self):
h = Herder()
h._handle_HUP(signal.SIGHUP, None)
|
py | b40d4ad530ba6318d1540e8832ca50c0f059a832 | # Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from azure.mgmt.eventgrid.models import EventSubscription, EventSubscriptionFilter
from c7n_azure.session import Session
from c7n.utils import local_session
class AzureEvents:
"""A mapping of resource types to events.
Provides user friendly event names for common events."""
azure_events = {
'AppServicePlanWrite': {
'resource_provider': 'Microsoft.Web/serverFarms',
'event': 'write'},
'BatchWrite': {
'resource_provider': 'Microsoft.Batch/batchAccounts',
'event': 'write'},
'CdnProfileWrite': {
'resource_provider': 'Microsoft.Cdn/profiles',
'event': 'write'},
'CognitiveServiceWrite': {
'resource_provider': 'Microsoft.CognitiveServices/account',
'event': 'write'},
'ContainerServiceWrite': {
'resource_provider': 'Microsoft.ContainerService/managedClusters',
'event': 'write'},
'CosmosDbWrite': {
'resource_provider': 'Microsoft.DocumentDB/databaseAccounts',
'event': 'write'},
'DataFactoryWrite': {
'resource_provider': 'Microsoft.DataFactory/factories',
'event': 'write'},
'DataLakeWrite': {
'resource_provider': 'Microsoft.DataLakeStore/accounts',
'event': 'write'},
'DiskWrite': {
'resource_provider': 'Microsoft.Compute/disks',
'event': 'write'},
'IotHubWrite': {
'resource_provider': 'Microsoft.Devices/IotHubs',
'event': 'write'},
'KeyVaultWrite': {
'resource_provider': 'Microsoft.KeyVault/vaults',
'event': 'write'},
'LoadBalancerWrite': {
'resource_provider': 'Microsoft.Network/loadBalancers',
'event': 'write'},
'NetworkInterfaceWrite': {
'resource_provider': 'Microsoft.Network/networkInterfaces',
'event': 'write'},
'NetworkSecurityGroupWrite': {
'resource_provider': 'Microsoft.Network/networkSecurityGroups',
'event': 'write'},
'PublicIpWrite': {
'resource_provider': 'Microsoft.Network/publicIPAddresses',
'event': 'write'},
'RedisWrite': {
'resource_provider': 'Microsoft.Cache/Redis',
'event': 'write'},
'ResourceGroupWrite': {
'resource_provider': 'Microsoft.Resources/subscriptions/resourceGroups',
'event': 'write'},
'SqlServerWrite': {
'resource_provider': 'Microsoft.Sql/servers',
'event': 'write'},
'StorageWrite': {
'resource_provider': 'Microsoft.Storage/storageAccounts',
'event': 'write'},
'StorageContainerWrite': {
'resource_provider': 'Microsoft.Storage/storageAccounts/blobServices/containers',
'event': 'write'},
'VmWrite': {
'resource_provider': 'Microsoft.Compute/virtualMachines',
'event': 'write'},
'VmssWrite': {
'resource_provider': 'Microsoft.Compute/virtualMachineScaleSets',
'event': 'write'},
'VnetWrite': {
'resource_provider': 'Microsoft.Network/virtualNetworks',
'event': 'write'},
'WebAppWrite': {
'resource_provider': 'Microsoft.Web/sites',
'event': 'write'}
}
@classmethod
def get(cls, event):
return cls.azure_events.get(event)
@classmethod
def get_event_operations(cls, events):
event_operations = []
for e in events:
if isinstance(e, str):
event = cls.get(e)
event_operations.append('%s/%s' % (event['resource_provider'], event['event']))
else:
event_operations.append('%s/%s' % (e['resourceProvider'], e['event']))
return event_operations
class AzureEventSubscription:
@staticmethod
def create(destination, name, subscription_id, session=None, event_filter=None):
s = session or local_session(Session)
event_filter = event_filter or EventSubscriptionFilter()
event_info = EventSubscription(destination=destination, filter=event_filter)
scope = '/subscriptions/%s' % subscription_id
client = s.client('azure.mgmt.eventgrid.EventGridManagementClient')
event_subscription = \
client.event_subscriptions.begin_create_or_update(scope, name, event_info)
return event_subscription.result()
|
py | b40d4af084707720b0c3e72470e6f8674ff51a04 | import sys
import kungfu.yijinjing.time as kft
from kungfu.wingchun.constants import *
source = Source.XTP
exchange = Exchange.SSE
def test_timer(context, event):
context.log.info('test timer')
def test_time_interval(context, event):
context.log.info('test time interval')
def pre_start(context):
context.log.info("pre run strategy")
context.add_account(source, "15040900", 100000000.0)
context.subscribe(source, ["600000", "601988"], exchange)
def on_quote(context, quote):
context.logger.info("position: {}".format(context.ledger.get_position(quote.instrument_id, exchange)))
order_id = context.insert_order(quote.instrument_id, exchange, "15040900", quote.ask_price[0], 200, PriceType.Limit, Side.Buy, Offset.Open)
context.log.info("quote received: [time]{} [instrument_id]{} [last_price]{}".format(kft.strftime(quote.data_time), quote.instrument_id, quote.last_price))
def on_transaction(context, transaction):
context.log.info("{} {}".format(transaction.instrument_id, transaction.exchange_id))
pass
def on_entrust(context, entrust):
context.log.info("{} {}".format(entrust.instrument_id, entrust.exchange_id))
pass
def on_order(context, order):
context.log.info('order received: [instrument_id]{} [volume]{} [price]{}'.format(order.instrument_id, order.volume, order.limit_price))
def on_trade(context, trade):
context.log.info('trade received: {} [trade_id]{} [volume]{} [price]{}'.format(kft.strftime(trade.trade_time), trade.order_id, trade.volume, trade.price))
|
py | b40d4c5a12b6223fbdae5671f80537c0ee2a8d4f | """
Collection of tests for Ivy optimizers
"""
# global
import pytest
import numpy as np
# local
import ivy
import ivy_tests.helpers as helpers
from ivy.core.container import Container
# sgd
@pytest.mark.parametrize(
"bs_ic_oc_target", [
([1, 2], 4, 5, [[0.30230279, 0.65123089, 0.30132881, -0.90954636, 1.08810135]]),
])
@pytest.mark.parametrize(
"with_v", [True, False])
@pytest.mark.parametrize(
"inplace", [True, False])
@pytest.mark.parametrize(
"dtype_str", ['float32'])
def test_sgd_optimizer(bs_ic_oc_target, with_v, inplace, dtype_str, dev_str, compile_graph, call):
# smoke test
if call is helpers.np_call:
# NumPy does not support gradients
pytest.skip()
batch_shape, input_channels, output_channels, target = bs_ic_oc_target
x = ivy.cast(ivy.linspace(ivy.zeros(batch_shape), ivy.ones(batch_shape), input_channels), 'float32')
if with_v:
np.random.seed(0)
wlim = (6 / (output_channels + input_channels)) ** 0.5
w = ivy.variable(ivy.array(np.random.uniform(-wlim, wlim, (output_channels, input_channels)),
'float32', dev_str=dev_str))
b = ivy.variable(ivy.zeros([output_channels], dev_str=dev_str))
v = Container({'w': w, 'b': b})
else:
v = None
linear_layer = ivy.Linear(input_channels, output_channels, dev_str=dev_str, v=v)
def loss_fn(v_):
out = linear_layer(x, v=v_)
return ivy.reduce_mean(out)[0]
# optimizer
optimizer = ivy.SGD(inplace=inplace)
# compile if this mode is set
if compile_graph and call is helpers.torch_call:
# Currently only PyTorch is supported for ivy compilation
optimizer.compile_graph(linear_layer.v, )
# train
loss_tm1 = 1e12
loss = None
grads = None
for i in range(10):
loss, grads = ivy.execute_with_gradients(loss_fn, linear_layer.v)
linear_layer.v = optimizer.step(linear_layer.v, grads)
assert loss < loss_tm1
loss_tm1 = loss
# type test
assert ivy.is_array(loss)
assert isinstance(grads, ivy.Container)
# cardinality test
if call is helpers.mx_call:
# mxnet slicing cannot reduce dimension to zero
assert loss.shape == (1,)
else:
assert loss.shape == ()
# value test
assert ivy.reduce_max(ivy.abs(grads.b)) > 0
assert ivy.reduce_max(ivy.abs(grads.w)) > 0
# compilation test
if call is helpers.torch_call:
# pytest scripting does not **kwargs
return
if not ivy.wrapped_mode():
helpers.assert_compilable(loss_fn)
# lars
@pytest.mark.parametrize(
"bs_ic_oc_target", [
([1, 2], 4, 5, [[0.30230279, 0.65123089, 0.30132881, -0.90954636, 1.08810135]]),
])
@pytest.mark.parametrize(
"with_v", [True, False])
@pytest.mark.parametrize(
"inplace", [True, False])
@pytest.mark.parametrize(
"dtype_str", ['float32'])
def test_lars_optimizer(bs_ic_oc_target, with_v, inplace, dtype_str, dev_str, compile_graph, call):
# smoke test
if call is helpers.np_call:
# NumPy does not support gradients
pytest.skip()
batch_shape, input_channels, output_channels, target = bs_ic_oc_target
x = ivy.cast(ivy.linspace(ivy.zeros(batch_shape), ivy.ones(batch_shape), input_channels), 'float32')
if with_v:
np.random.seed(0)
wlim = (6 / (output_channels + input_channels)) ** 0.5
w = ivy.variable(ivy.array(np.random.uniform(-wlim, wlim, (output_channels, input_channels)),
'float32', dev_str=dev_str))
b = ivy.variable(ivy.zeros([output_channels], dev_str=dev_str))
v = Container({'w': w, 'b': b})
else:
v = None
linear_layer = ivy.Linear(input_channels, output_channels, dev_str=dev_str, v=v)
def loss_fn(v_):
out = linear_layer(x, v=v_)
return ivy.reduce_mean(out)[0]
# optimizer
optimizer = ivy.LARS(inplace=inplace)
# compile if this mode is set
if compile_graph and call is helpers.torch_call:
# Currently only PyTorch is supported for ivy compilation
optimizer.compile_graph(linear_layer.v)
# train
loss_tm1 = 1e12
loss = None
grads = None
for i in range(10):
loss, grads = ivy.execute_with_gradients(loss_fn, linear_layer.v)
linear_layer.v = optimizer.step(linear_layer.v, grads)
assert loss < loss_tm1
loss_tm1 = loss
# type test
assert ivy.is_array(loss)
assert isinstance(grads, ivy.Container)
# cardinality test
if call is helpers.mx_call:
# mxnet slicing cannot reduce dimension to zero
assert loss.shape == (1,)
else:
assert loss.shape == ()
# value test
assert ivy.reduce_max(ivy.abs(grads.b)) > 0
assert ivy.reduce_max(ivy.abs(grads.w)) > 0
# compilation test
if call is helpers.torch_call:
# pytest scripting does not **kwargs
return
if not ivy.wrapped_mode():
helpers.assert_compilable(loss_fn)
# adam
@pytest.mark.parametrize(
"bs_ic_oc_target", [
([1, 2], 4, 5, [[0.30230279, 0.65123089, 0.30132881, -0.90954636, 1.08810135]]),
])
@pytest.mark.parametrize(
"with_v", [True, False])
@pytest.mark.parametrize(
"inplace", [True, False])
@pytest.mark.parametrize(
"dtype_str", ['float32'])
def test_adam_optimizer(bs_ic_oc_target, with_v, inplace, dtype_str, dev_str, compile_graph, call):
# smoke test
if call is helpers.np_call:
# NumPy does not support gradients
pytest.skip()
batch_shape, input_channels, output_channels, target = bs_ic_oc_target
x = ivy.cast(ivy.linspace(ivy.zeros(batch_shape), ivy.ones(batch_shape), input_channels), 'float32')
if with_v:
np.random.seed(0)
wlim = (6 / (output_channels + input_channels)) ** 0.5
w = ivy.variable(ivy.array(np.random.uniform(-wlim, wlim, (output_channels, input_channels)),
'float32', dev_str=dev_str))
b = ivy.variable(ivy.zeros([output_channels], dev_str=dev_str))
v = Container({'w': w, 'b': b})
else:
v = None
linear_layer = ivy.Linear(input_channels, output_channels, dev_str=dev_str, v=v)
def loss_fn(v_):
out = linear_layer(x, v=v_)
return ivy.reduce_mean(out)[0]
# optimizer
optimizer = ivy.Adam(dev_str=dev_str, inplace=inplace)
# compile if this mode is set
if compile_graph and call is helpers.torch_call:
# Currently only PyTorch is supported for ivy compilation
optimizer.compile_graph(linear_layer.v)
# train
loss, grads = ivy.execute_with_gradients(loss_fn, linear_layer.v)
linear_layer.v = optimizer.step(linear_layer.v, grads)
loss_tm1 = 1e12
loss = None
grads = None
for i in range(10):
loss, grads = ivy.execute_with_gradients(loss_fn, linear_layer.v)
linear_layer.v = optimizer.step(linear_layer.v, grads)
assert loss < loss_tm1
loss_tm1 = loss
# type test
assert ivy.is_array(loss)
assert isinstance(grads, ivy.Container)
# cardinality test
if call is helpers.mx_call:
# mxnet slicing cannot reduce dimension to zero
assert loss.shape == (1,)
else:
assert loss.shape == ()
# value test
assert ivy.reduce_max(ivy.abs(grads.b)) > 0
assert ivy.reduce_max(ivy.abs(grads.w)) > 0
# compilation test
if call is helpers.torch_call:
# pytest scripting does not **kwargs
return
if not ivy.wrapped_mode():
helpers.assert_compilable(loss_fn)
# lamb
@pytest.mark.parametrize(
"bs_ic_oc_target", [
([1, 2], 4, 5, [[0.30230279, 0.65123089, 0.30132881, -0.90954636, 1.08810135]]),
])
@pytest.mark.parametrize(
"with_v", [True, False])
@pytest.mark.parametrize(
"inplace", [True, False])
@pytest.mark.parametrize(
"dtype_str", ['float32'])
def test_lamb_optimizer(bs_ic_oc_target, with_v, inplace, dtype_str, dev_str, compile_graph, call):
# smoke test
if call is helpers.np_call:
# NumPy does not support gradients
pytest.skip()
batch_shape, input_channels, output_channels, target = bs_ic_oc_target
x = ivy.cast(ivy.linspace(ivy.zeros(batch_shape), ivy.ones(batch_shape), input_channels), 'float32')
if with_v:
np.random.seed(0)
wlim = (6 / (output_channels + input_channels)) ** 0.5
w = ivy.variable(ivy.array(np.random.uniform(-wlim, wlim, (output_channels, input_channels)),
'float32', dev_str=dev_str))
b = ivy.variable(ivy.zeros([output_channels], dev_str=dev_str))
v = Container({'w': w, 'b': b})
else:
v = None
linear_layer = ivy.Linear(input_channels, output_channels, dev_str=dev_str, v=v)
def loss_fn(v_):
out = linear_layer(x, v=v_)
return ivy.reduce_mean(out)[0]
# optimizer
optimizer = ivy.LAMB(dev_str=dev_str, inplace=inplace)
# compile if this mode is set
if compile_graph and call is helpers.torch_call:
# Currently only PyTorch is supported for ivy compilation
optimizer.compile_graph(linear_layer.v)
# train
loss, grads = ivy.execute_with_gradients(loss_fn, linear_layer.v)
linear_layer.v = optimizer.step(linear_layer.v, grads)
loss_tm1 = 1e12
loss = None
grads = None
for i in range(10):
loss, grads = ivy.execute_with_gradients(loss_fn, linear_layer.v)
linear_layer.v = optimizer.step(linear_layer.v, grads)
assert loss < loss_tm1
loss_tm1 = loss
# type test
assert ivy.is_array(loss)
assert isinstance(grads, ivy.Container)
# cardinality test
if call is helpers.mx_call:
# mxnet slicing cannot reduce dimension to zero
assert loss.shape == (1,)
else:
assert loss.shape == ()
# value test
assert ivy.reduce_max(ivy.abs(grads.b)) > 0
assert ivy.reduce_max(ivy.abs(grads.w)) > 0
# compilation test
if call is helpers.torch_call:
# pytest scripting does not **kwargs
return
if not ivy.wrapped_mode():
helpers.assert_compilable(loss_fn)
|
bzl | b40d4e08dda53728e702941458fb3ac6917111d4 | # Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"Common web_test attributes"
load("@build_bazel_rules_nodejs//internal:node.bzl", "sources_aspect")
# Attributes shared by any web_test rule (ts_web_test, karma_web_test, protractor_web_test)
COMMON_WEB_TEST_ATTRS = {
"srcs": attr.label_list(
doc = "A list of JavaScript test files",
allow_files = [".js"],
),
"configuration_env_vars": attr.string_list(
doc = """Pass these configuration environment variables to the resulting binary.
Chooses a subset of the configuration environment variables (taken from ctx.var), which also
includes anything specified via the --define flag.
Note, this can lead to different outputs produced by this rule.""",
default = [],
),
"data": attr.label_list(
doc = "Runtime dependencies",
allow_files = True,
),
"deps": attr.label_list(
doc = "Other targets which produce JavaScript such as `ts_library`",
allow_files = True,
aspects = [sources_aspect],
),
}
|
py | b40d4e866552b233a2275199811985647dd40183 | # Copyright 2015 IBM Corp.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from nova.api.metadata import base as instance_metadata
from nova.i18n import _LI, _LW
from nova.network import model as network_model
from nova.virt import configdrive
import os
from taskflow import task
from oslo_config import cfg
from oslo_log import log as logging
from pypowervm import const as pvm_const
from pypowervm.tasks import scsi_mapper as tsk_map
from pypowervm.tasks import storage as tsk_stg
from pypowervm import util as pvm_util
from pypowervm.utils import transaction as pvm_tx
from pypowervm.wrappers import base_partition as pvm_bp
from pypowervm.wrappers import managed_system as pvm_ms
from pypowervm.wrappers import storage as pvm_stg
from pypowervm.wrappers import virtual_io_server as pvm_vios
from nova_powervm.virt.powervm import exception as npvmex
from nova_powervm.virt.powervm import vm
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
_LLA_SUBNET = "fe80::/64"
class ConfigDrivePowerVM(object):
_cur_vios_uuid = None
_cur_vios_name = None
_cur_vg_uuid = None
def __init__(self, adapter, host_uuid):
"""Creates the config drive manager for PowerVM.
:param adapter: The pypowervm adapter to communicate with the system.
:param host_uuid: The UUID of the host system.
"""
self.adapter = adapter
self.host_uuid = host_uuid
# The validate will use the cached static variables for the VIOS info.
# Once validate is done, set the class variables to the updated cache.
self._validate_vopt_vg()
self.vios_uuid = ConfigDrivePowerVM._cur_vios_uuid
self.vios_name = ConfigDrivePowerVM._cur_vios_name
self.vg_uuid = ConfigDrivePowerVM._cur_vg_uuid
def _create_cfg_dr_iso(self, instance, injected_files, network_info,
admin_pass=None):
"""Creates an ISO file that contains the injected files. Used for
config drive.
:param instance: The VM instance from OpenStack.
:param injected_files: A list of file paths that will be injected into
the ISO.
:param network_info: The network_info from the nova spawn method.
:param admin_pass: Optional password to inject for the VM.
:return iso_path: The path to the ISO
:return file_name: The file name for the ISO
"""
LOG.info(_LI("Creating config drive for instance: %s"), instance.name)
extra_md = {}
if admin_pass is not None:
extra_md['admin_pass'] = admin_pass
inst_md = instance_metadata.InstanceMetadata(instance,
content=injected_files,
extra_md=extra_md,
network_info=network_info)
# Make sure the path exists.
im_path = CONF.powervm.image_meta_local_path
if not os.path.exists(im_path):
os.mkdir(im_path)
file_name = pvm_util.sanitize_file_name_for_api(
instance.name, prefix='cfg_', suffix='.iso',
max_len=pvm_const.MaxLen.VOPT_NAME)
iso_path = os.path.join(im_path, file_name)
with configdrive.ConfigDriveBuilder(instance_md=inst_md) as cdb:
LOG.info(_LI("Config drive ISO being built for instance %(inst)s "
"building to path %(iso_path)s."),
{'inst': instance.name, 'iso_path': iso_path})
cdb.make_drive(iso_path)
return iso_path, file_name
def create_cfg_drv_vopt(self, instance, injected_files, network_info,
lpar_uuid, admin_pass=None, mgmt_cna=None,
stg_ftsk=None):
"""Creates the config drive virtual optical and attach to VM.
:param instance: The VM instance from OpenStack.
:param injected_files: A list of file paths that will be injected into
the ISO.
:param network_info: The network_info from the nova spawn method.
:param lpar_uuid: The UUID of the client LPAR
:param admin_pass: (Optional) password to inject for the VM.
:param mgmt_cna: (Optional) The management (RMC) CNA wrapper.
:param stg_ftsk: (Optional) If provided, the tasks to create and attach
the Media to the VM will be deferred on to the
FeedTask passed in. The execute can be done all in
one method (batched together). If None (the default),
the media will be created and attached immediately.
"""
# If there is a management client network adapter, then we should
# convert that to a VIF and add it to the network info
if mgmt_cna is not None:
network_info = copy.deepcopy(network_info)
network_info.append(self._mgmt_cna_to_vif(mgmt_cna))
iso_path, file_name = self._create_cfg_dr_iso(instance, injected_files,
network_info, admin_pass)
# Upload the media
file_size = os.path.getsize(iso_path)
vopt, f_uuid = self._upload_vopt(iso_path, file_name, file_size)
# Delete the media
os.remove(iso_path)
# Run the attach of the virtual optical
self._attach_vopt(instance, lpar_uuid, vopt, stg_ftsk)
def _attach_vopt(self, instance, lpar_uuid, vopt, stg_ftsk=None):
"""Will attach the vopt to the VIOS.
If the stg_ftsk is provided, adds the mapping to the stg_ftsk, but
won't attach until the stg_ftsk is independently executed.
:param instance: The VM instance from OpenStack.
:param lpar_uuid: The UUID of the client LPAR
:param vopt: The virtual optical device to add.
:param stg_ftsk: (Optional) If provided, the tasks to create the
storage mappings to connect the Media to the VM will
be deferred on to the FeedTask passed in. The execute
can be done all in one method (batched together). If
None (the default), the media will be attached
immediately.
"""
# If no transaction manager, build locally so that we can run
# immediately
if stg_ftsk is None:
wtsk = pvm_tx.WrapperTask('media_attach', pvm_vios.VIOS.getter(
self.adapter, entry_uuid=self.vios_uuid,
xag=[pvm_vios.VIOS.xags.SCSI_MAPPING]))
else:
wtsk = stg_ftsk.wrapper_tasks[self.vios_uuid]
# Define the function to build and add the mapping
def add_func(vios_w):
LOG.info(_LI("Adding cfg drive mapping for instance %(inst)s for "
"Virtual I/O Server %(vios)s"),
{'inst': instance.name, 'vios': vios_w.name})
mapping = tsk_map.build_vscsi_mapping(self.host_uuid, vios_w,
lpar_uuid, vopt)
return tsk_map.add_map(vios_w, mapping)
wtsk.add_functor_subtask(add_func)
# If built locally, then execute
if stg_ftsk is None:
wtsk.execute()
def _mgmt_cna_to_vif(self, cna):
"""Converts the mgmt CNA to VIF format for network injection."""
# See IEFT RFC 4291 appendix A for information on this algorithm
mac = vm.norm_mac(cna.mac)
ipv6_link_local = self._mac_to_link_local(mac)
subnet = network_model.Subnet(
version=6, cidr=_LLA_SUBNET,
ips=[network_model.FixedIP(address=ipv6_link_local)])
network = network_model.Network(id='mgmt', subnets=[subnet],
injected='yes')
return network_model.VIF(id='mgmt_vif', address=mac,
network=network)
@staticmethod
def _mac_to_link_local(mac):
# Convert the address to IPv6. The first step is to separate out the
# mac address
splits = mac.split(':')
# Insert into the middle the key ff:fe
splits.insert(3, 'ff')
splits.insert(4, 'fe')
# Do the bit flip on the first octet.
splits[0] = "%.2x" % (int(splits[0], 16) ^ 0b00000010)
# Convert to the IPv6 link local format. The prefix is fe80::. Join
# the hexes together at every other digit.
ll = ['fe80:']
ll.extend([splits[x] + splits[x + 1]
for x in range(0, len(splits), 2)])
return ':'.join(ll)
def _upload_vopt(self, iso_path, file_name, file_size):
with open(iso_path, 'rb') as d_stream:
return tsk_stg.upload_vopt(self.adapter, self.vios_uuid, d_stream,
file_name, file_size)
def _validate_vopt_vg(self):
"""Will ensure that the virtual optical media repository exists.
This method will connect to one of the Virtual I/O Servers on the
system and ensure that there is a root_vg that the optical media (which
is temporary) exists.
If the volume group on an I/O Server goes down (perhaps due to
maintenance), the system will rescan to determine if there is another
I/O Server that can host the request.
The very first invocation may be expensive. It may also be expensive
to call if a Virtual I/O Server unexpectantly goes down.
If there are no Virtual I/O Servers that can support the media, then
an exception will be thrown.
"""
# TODO(IBM) Add thread safety here in case two calls into this are
# done at once.
# If our static variables were set, then we should validate that the
# repo is still running. Otherwise, we need to reset the variables
# (as it could be down for maintenance).
if ConfigDrivePowerVM._cur_vg_uuid is not None:
vio_uuid = ConfigDrivePowerVM._cur_vios_uuid
vg_uuid = ConfigDrivePowerVM._cur_vg_uuid
try:
vg_resp = self.adapter.read(pvm_vios.VIOS.schema_type,
vio_uuid, pvm_stg.VG.schema_type,
vg_uuid)
if vg_resp is not None:
return
except Exception:
pass
LOG.info(_LI("An error occurred querying the virtual optical "
"media repository. Attempting to re-establish "
"connection with a virtual optical media repository"))
# If we're hitting this, either it's our first time booting up, or the
# previously used Volume Group went offline (ex. VIOS went down for
# maintenance).
#
# Since it doesn't matter which VIOS we use for the media repo, we
# should query all Virtual I/O Servers and see if an appropriate
# media repository exists.
vios_resp = self.adapter.read(pvm_ms.System.schema_type,
root_id=self.host_uuid,
child_type=pvm_vios.VIOS.schema_type)
vio_wraps = pvm_vios.VIOS.wrap(vios_resp)
# First loop through the VIOSes to see if any have the right VG
found_vg = None
found_vios = None
for vio_wrap in vio_wraps:
# If the RMC state is not active, skip over to ensure we don't
# timeout
if vio_wrap.rmc_state != pvm_bp.RMCState.ACTIVE:
continue
try:
vg_resp = self.adapter.read(pvm_vios.VIOS.schema_type,
root_id=vio_wrap.uuid,
child_type=pvm_stg.VG.schema_type)
vg_wraps = pvm_stg.VG.wrap(vg_resp)
for vg_wrap in vg_wraps:
if vg_wrap.name == CONF.powervm.vopt_media_volume_group:
found_vg = vg_wrap
found_vios = vio_wrap
break
except Exception:
LOG.warn(_LW('Unable to read volume groups for Virtual '
'I/O Server %s'), vio_wrap.name)
pass
# If we didn't find a volume group...raise the exception. It should
# default to being the rootvg, which all VIOSes will have. Otherwise,
# this is user specified, and if it was not found is a proper
# exception path.
if found_vg is None:
raise npvmex.NoMediaRepoVolumeGroupFound(
vol_grp=CONF.powervm.vopt_media_volume_group)
# Ensure that there is a virtual optical media repository within it.
if len(found_vg.vmedia_repos) == 0:
vopt_repo = pvm_stg.VMediaRepos.bld(
self.adapter, 'vopt', str(CONF.powervm.vopt_media_rep_size))
found_vg.vmedia_repos = [vopt_repo]
found_vg = found_vg.update()
# At this point, we know that we've successfully set up the volume
# group. Save to the static class variables.
ConfigDrivePowerVM._cur_vg_uuid = found_vg.uuid
ConfigDrivePowerVM._cur_vios_uuid = found_vios.uuid
ConfigDrivePowerVM._cur_vios_name = found_vios.name
def dlt_vopt(self, lpar_uuid, stg_ftsk=None):
"""Deletes the virtual optical and scsi mappings for a VM.
:param lpar_uuid: The pypowervm UUID of the LPAR to remove.
:param stg_ftsk: (Optional) A FeedTask. If provided, the actions to
modify the storage will be added as batched functions
onto the FeedTask. If not provided (the default) the
operation to delete the vOpt will execute immediately.
"""
# If no transaction manager, build locally so that we can run
# immediately
if stg_ftsk is None:
built_stg_ftsk = True
vio_resp = self.adapter.read(
pvm_vios.VIOS.schema_type, root_id=self.vios_uuid,
xag=[pvm_vios.VIOS.xags.SCSI_MAPPING])
vio_w = pvm_vios.VIOS.wrap(vio_resp)
stg_ftsk = pvm_tx.FeedTask('media_detach', [vio_w])
else:
built_stg_ftsk = False
# Run the remove maps method.
self.add_dlt_vopt_tasks(lpar_uuid, stg_ftsk)
# If built locally, then execute
if built_stg_ftsk:
stg_ftsk.execute()
def add_dlt_vopt_tasks(self, lpar_uuid, stg_ftsk):
"""Deletes the virtual optical and scsi mappings for a VM.
:param lpar_uuid: The pypowervm UUID of the LPAR to remove.
:param stg_ftsk: A FeedTask handling storage I/O. The task to remove
the mappings and media from the VM will be deferred on
to the FeedTask passed in. The execute can be done all
in one method (batched together). No updates are
actually made here; they are simply added to the
FeedTask.
"""
# The function to find the VOpt
match_func = tsk_map.gen_match_func(pvm_stg.VOptMedia)
def rm_vopt_mapping(vios_w):
return tsk_map.remove_maps(vios_w, lpar_uuid,
match_func=match_func)
# Add a function to remove the map
stg_ftsk.wrapper_tasks[self.vios_uuid].add_functor_subtask(
rm_vopt_mapping)
# Find the vOpt device (before the remove is done) so that it can be
# removed.
partition_id = vm.get_vm_id(self.adapter, lpar_uuid)
media_mappings = tsk_map.find_maps(
stg_ftsk.get_wrapper(self.vios_uuid).scsi_mappings,
client_lpar_id=partition_id, match_func=match_func)
media_elems = [x.backing_storage for x in media_mappings]
def rm_vopt():
LOG.info(_LI("Removing virtual optical for VM with UUID %s."),
lpar_uuid)
vg_rsp = self.adapter.read(pvm_vios.VIOS.schema_type,
root_id=self.vios_uuid,
child_type=pvm_stg.VG.schema_type,
child_id=self.vg_uuid)
tsk_stg.rm_vg_storage(pvm_stg.VG.wrap(vg_rsp), vopts=media_elems)
stg_ftsk.add_post_execute(task.FunctorTask(rm_vopt))
|
py | b40d4ed4a6454c5041d658743ea713dad4aa5915 | import logging
from tuya_connector import TuyaOpenAPI, TUYA_LOGGER
ACCESS_ID = "txejpdfda9iwmn5cg2es"
ACCESS_KEY = "46d6072ffd724e0ba5ebeb5cc6b9dce9"
API_ENDPOINT = "https://openapi.tuyaus.com"
# Enable debug log
TUYA_LOGGER.setLevel(logging.DEBUG)
# Init OpenAPI and connect
openapi = TuyaOpenAPI(API_ENDPOINT, ACCESS_ID, ACCESS_KEY)
openapi.connect()
# Set up device_id
DEVICE_ID1 ="ebfc16d57ed374932cjqfk"
# Call APIs from Tuya
# Get the device information
#response = openapi.get("/v1.0/iot-03/devices/{}".format(DEVICE_ID1))
# Get the instruction set of the device
#response = openapi.get("/v1.0/iot-03/devices/{}/functions".format(DEVICE_ID1))
# Send commands
#commands1 = {'commands': [{'code': 'switch_led', 'value': False}]}
#commands1 = {'commands': [{'code': 'bright_value_v2', 'value': 10}]} # Brightness 10-1000
#commands1 = {'commands': [{'code': 'temp_value_v2', 'value': 255}]} # Temp 0-1000
#commands1 = {'commands': [{'code': 'work_mode', 'value': 'colour'}]} # MODE "{\"range\":[\"white\",\"colour\",\"scene\",\"music\"]}"
commands1 = {'commands': [{'code': 'colour_data_v2', 'value': "{\"h\":0,\"s\":0,\"v\":1000}"}]} # color
openapi.post('/v1.0/iot-03/devices/{}/commands'.format(DEVICE_ID1), commands1)
# Get the status of a single device
response = openapi.get("/v1.0/iot-03/devices/{}/status".format(DEVICE_ID1))
#DEVICE_ID2 = "017743508caab5f0973e" #"017743508caab5f385a7" "017743508caab5f0973e"
# Call APIs from Tuya
# Get the device information
#response = openapi.get("/v1.0/iot-03/devices/{}".format(DEVICE_ID2))
# Get the instruction set of the device
#response = openapi.get("/v1.0/iot-03/devices/{}/functions".format(DEVICE_ID2))
# Send commands
#commands2 = {'commands': [{'code': 'switch_1', 'value': False}]}
#openapi.post('/v1.0/iot-03/devices/{}/commands'.format(DEVICE_ID2), commands2)
# Get the status of a single device
#response = openapi.get("/v1.0/iot-03/devices/{}/status".format(DEVICE_ID2)) |
py | b40d4efc86232139570b77dfc35b7acc35895e6f | #
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Author: Doug Hellmann <[email protected]>
# Julien Danjou <[email protected]>
# Tyaptin Ilya <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for ceilometer/publisher/utils.py
"""
from oslo.serialization import jsonutils
from oslotest import base
from ceilometer.publisher import utils
class TestSignature(base.BaseTestCase):
def test_compute_signature_change_key(self):
sig1 = utils.compute_signature({'a': 'A', 'b': 'B'},
'not-so-secret')
sig2 = utils.compute_signature({'A': 'A', 'b': 'B'},
'not-so-secret')
self.assertNotEqual(sig1, sig2)
def test_compute_signature_change_value(self):
sig1 = utils.compute_signature({'a': 'A', 'b': 'B'},
'not-so-secret')
sig2 = utils.compute_signature({'a': 'a', 'b': 'B'},
'not-so-secret')
self.assertNotEqual(sig1, sig2)
def test_compute_signature_same(self):
sig1 = utils.compute_signature({'a': 'A', 'b': 'B'},
'not-so-secret')
sig2 = utils.compute_signature({'a': 'A', 'b': 'B'},
'not-so-secret')
self.assertEqual(sig1, sig2)
def test_compute_signature_signed(self):
data = {'a': 'A', 'b': 'B'}
sig1 = utils.compute_signature(data, 'not-so-secret')
data['message_signature'] = sig1
sig2 = utils.compute_signature(data, 'not-so-secret')
self.assertEqual(sig1, sig2)
def test_compute_signature_use_configured_secret(self):
data = {'a': 'A', 'b': 'B'}
sig1 = utils.compute_signature(data, 'not-so-secret')
sig2 = utils.compute_signature(data, 'different-value')
self.assertNotEqual(sig1, sig2)
def test_verify_signature_signed(self):
data = {'a': 'A', 'b': 'B'}
sig1 = utils.compute_signature(data, 'not-so-secret')
data['message_signature'] = sig1
self.assertTrue(utils.verify_signature(data, 'not-so-secret'))
def test_verify_signature_unsigned(self):
data = {'a': 'A', 'b': 'B'}
self.assertFalse(utils.verify_signature(data, 'not-so-secret'))
def test_verify_signature_incorrect(self):
data = {'a': 'A', 'b': 'B',
'message_signature': 'Not the same'}
self.assertFalse(utils.verify_signature(data, 'not-so-secret'))
def test_verify_signature_invalid_encoding(self):
data = {'a': 'A', 'b': 'B',
'message_signature': ''}
self.assertFalse(utils.verify_signature(data, 'not-so-secret'))
def test_verify_signature_unicode(self):
data = {'a': 'A', 'b': 'B',
'message_signature': u''}
self.assertFalse(utils.verify_signature(data, 'not-so-secret'))
def test_verify_signature_nested(self):
data = {'a': 'A',
'b': 'B',
'nested': {'a': 'A',
'b': 'B',
},
}
data['message_signature'] = utils.compute_signature(
data,
'not-so-secret')
self.assertTrue(utils.verify_signature(data, 'not-so-secret'))
def test_verify_signature_nested_json(self):
data = {'a': 'A',
'b': 'B',
'nested': {'a': 'A',
'b': 'B',
'c': ('c',),
'd': ['d']
},
}
data['message_signature'] = utils.compute_signature(
data,
'not-so-secret')
jsondata = jsonutils.loads(jsonutils.dumps(data))
self.assertTrue(utils.verify_signature(jsondata, 'not-so-secret'))
def test_besteffort_compare_digest(self):
hash1 = "f5ac3fe42b80b80f979825d177191bc5"
hash2 = "f5ac3fe42b80b80f979825d177191bc5"
hash3 = "1dece7821bf3fd70fe1309eaa37d52a2"
hash4 = b"f5ac3fe42b80b80f979825d177191bc5"
hash5 = b"f5ac3fe42b80b80f979825d177191bc5"
hash6 = b"1dece7821bf3fd70fe1309eaa37d52a2"
self.assertTrue(utils.besteffort_compare_digest(hash1, hash2))
self.assertFalse(utils.besteffort_compare_digest(hash1, hash3))
self.assertTrue(utils.besteffort_compare_digest(hash4, hash5))
self.assertFalse(utils.besteffort_compare_digest(hash4, hash6))
|
py | b40d50995ccc12a3a85d9c7cc6ec89ac8922c281 | #!/usr/bin/env python
#
# Copyright 2019 The Nakama Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import os
import sys
import subprocess
import shutil
_7zip_path = 'c:\\Program Files\\7-Zip\\7z.exe'
sdk_path = '../Nakama'
tmp_path = './_tmp'
platforms = ['win64', 'mac', 'ios', 'android', 'linux']
libs_path = os.path.join(sdk_path, 'libs')
shared_libs_path = os.path.join(sdk_path, 'Source', 'Nakama', 'Private', 'shared-libs')
tmp_shared_libs_path = os.path.join(tmp_path, 'shared-libs')
version = 'unknown'
def call(command, shell=False):
print('calling:', str(command))
res = subprocess.call(command, shell=shell)
if res != 0:
sys.exit(-1)
def archive7zip(src_folder, dest_arch, ignore_list=None):
command = [_7zip_path, 'a', '-r', dest_arch, src_folder]
if ignore_list:
for ignore_item in ignore_list:
command.append('-xr!' + ignore_item)
call(command)
def move_folder(src, dest):
if os.path.exists(src):
shutil.move(src, dest)
def remove_file(src):
if os.path.exists(src):
os.remove(src)
def move_platform_to_temp(platform):
src = os.path.join(shared_libs_path, platform)
dest = os.path.join(tmp_shared_libs_path, platform)
move_folder(src, dest)
def move_platform_to_sdk(platform):
src = os.path.join(tmp_shared_libs_path, platform)
dest = os.path.join(shared_libs_path, platform)
move_folder(src, dest)
def move_all_to_temp():
print('moving to', tmp_path)
for platform in platforms:
move_platform_to_temp(platform)
def release_platform(platform):
print('releasing', platform)
move_platform_to_sdk(platform)
ignore_list = ['Binaries', 'Intermediate', '.gitignore']
out_arch = 'nakama-unreal_{version}_{platform}.7z'.format(version=version, platform=platform)
remove_file(out_arch)
archive7zip(sdk_path, out_arch, ignore_list)
def detect_sdk_version():
with open(os.path.join(sdk_path, 'Nakama.uplugin'), 'r') as f:
while True:
line = f.readline()
if not line:
break
line = line.lstrip()
if line.startswith('"VersionName"'):
pos0 = line.find(':') + 1
pos1 = line.find('"', pos0) + 1
pos2 = line.find('"', pos1)
return line[pos1:pos2]
version = detect_sdk_version()
print('releasing sdk version:', version)
c = input('continue? (y/n): ')
if c != '' and c.lower() != 'y':
sys.exit()
shutil.copy('../CHANGELOG.md', sdk_path)
shutil.copy('../README.md', sdk_path)
shutil.copy('../LICENSE', sdk_path)
for platform in platforms:
move_all_to_temp()
release_platform(platform)
for platform in platforms:
move_platform_to_sdk(platform)
print('done.')
|
py | b40d5214038bd978009284ba09881943f71d9a4e | """Parser for a TOML spec file containing definitions of all supported reStructuredText
directives and roles, and what types of data each should expect."""
import dataclasses
import sys
from dataclasses import dataclass, field
from enum import Enum
from pathlib import Path
from typing import (
Any,
Callable,
Dict,
FrozenSet,
List,
Mapping,
Optional,
Sequence,
Set,
TypeVar,
Union,
cast,
)
import docutils.nodes
import docutils.parsers.rst
import docutils.parsers.rst.directives
import toml
from typing_extensions import Protocol
from . import util
from .flutter import check_type, checked
PACKAGE_ROOT = Path(sys.modules["snooty"].__file__).resolve().parent
if PACKAGE_ROOT.is_file():
PACKAGE_ROOT = PACKAGE_ROOT.parent
#: Types of formatting that can be applied to a role.
FormattingType = Enum("FormattingType", ("strong", "monospace", "emphasis"))
#: How the target should be preprocessed.
TargetType = Enum("TargetType", ("plain", "callable", "cmdline_option"))
#: Types of formatting to which date directives must conform.
DateFormattingType = Enum("DateType", ("iso_8601"))
class _Inheritable(Protocol):
inherit: Optional[str]
class _HasNameAndDomain(Protocol):
domain: Optional[str]
name: str
@checked
@dataclass
class DateType:
"""Configuration for a directive that specifies a date"""
date: DateFormattingType = field(default=DateFormattingType.iso_8601)
@checked
@dataclass
class LinkRoleType:
"""Configuration for a role which links to a specific URL template."""
link: str
ensure_trailing_slash: Optional[bool]
format: Set[FormattingType] = field(default_factory=set)
def __post_init__(self) -> None:
if self.link.count("%s") != 1:
raise ValueError(
f"Link definitions in rstspec.toml need to contain a single '%s' placeholder: {self.link}"
)
@checked
@dataclass
class RefRoleType:
"""Configuration for a role which links to an optionally namespaced target."""
domain: Optional[str]
name: str
tag: Optional[str]
format: Set[FormattingType] = field(
default_factory=lambda: {FormattingType.monospace}
)
_T = TypeVar("_T", bound=_Inheritable)
_V = TypeVar("_V")
SPEC_VERSION = 0
StringOrStringlist = Union[List[str], str, None]
PrimitiveType = Enum(
"PrimitiveType",
(
"integer",
"nonnegative_integer",
"path",
"uri",
"string",
"length",
"boolean",
"flag",
"linenos",
),
)
PrimitiveRoleType = Enum("PrimitiveRoleType", ("text", "explicit_title"))
#: Spec definition of a role: this can be either a PrimitiveRoleType, or
#: an object requiring additional configuration.
RoleType = Union[PrimitiveRoleType, LinkRoleType, RefRoleType]
#: docutils option validation function for each of the above primitive types
VALIDATORS: Dict[PrimitiveType, Callable[[Any], Any]] = {
PrimitiveType.integer: int,
PrimitiveType.nonnegative_integer: docutils.parsers.rst.directives.nonnegative_int,
PrimitiveType.path: util.option_string,
PrimitiveType.uri: docutils.parsers.rst.directives.uri,
PrimitiveType.string: util.option_string,
PrimitiveType.length: docutils.parsers.rst.directives.length_or_percentage_or_unitless,
PrimitiveType.boolean: util.option_bool,
PrimitiveType.flag: util.option_flag,
PrimitiveType.linenos: util.option_string,
}
#: Option types can be a primitive type (PrimitiveType), an enum
#: defined in the spec, or a union of those.
ArgumentType = Union[List[Union[PrimitiveType, str]], PrimitiveType, str, None]
class MissingDict(Dict[str, _V]):
pass
class MissingList(List[ArgumentType]):
pass
@checked
@dataclass
class Meta:
"""Meta information about the file as a whole."""
version: int
@checked
@dataclass
class DirectiveOption:
type: ArgumentType
required: bool = field(default=False)
@checked
@dataclass
class TabDefinition:
id: str
title: str
@checked
@dataclass
class Directive:
"""Declaration of a reStructuredText directive (block content)."""
inherit: Optional[str]
help: Optional[str]
example: Optional[str]
content_type: Optional[StringOrStringlist]
argument_type: Union[DirectiveOption, ArgumentType]
required_context: Optional[str]
domain: Optional[str]
deprecated: bool = field(default=False)
options: Dict[str, Union[DirectiveOption, ArgumentType]] = field(
default_factory=MissingDict
)
fields: List[ArgumentType] = field(default_factory=MissingList)
name: str = field(default="")
rstobject: "Optional[RstObject]" = field(default=None)
# Add a required_options attribute for quickly enumerating options that must exist
# This is a little hacky, but is the best we can do in Python 3.7 using dataclasses.
def __post_init__(self) -> None:
self.__required_options = frozenset(
k
for k, v in self.options.items()
if isinstance(v, DirectiveOption) and v.required
)
@property
def required_options(self) -> FrozenSet[str]:
return self.__required_options
@checked
@dataclass
class Role:
"""Declaration of a reStructuredText role (inline content)."""
inherit: Optional[str]
help: Optional[str]
example: Optional[str]
type: Optional[RoleType]
domain: Optional[str]
deprecated: bool = field(default=False)
name: str = field(default="")
rstobject: "Optional[RstObject]" = field(default=None)
# A target consists of the following parts:
# domain (e.g. "std" or "mongodb")
# ":"
# role (e.g. "label" or "authaction")
# prefix (e.g. "bin" or a program name)
# "."
# target
@checked
@dataclass
class RstObject:
"""Declaration of a reStructuredText object, defining both a Directive
as well as a Role that links to that directive."""
inherit: Optional[str]
help: Optional[str]
domain: Optional[str]
prefix: str = field(default="")
type: TargetType = field(default=TargetType.plain)
deprecated: bool = field(default=False)
name: str = field(default="")
fields: List[ArgumentType] = field(default_factory=MissingList)
format: Set[FormattingType] = field(
default_factory=lambda: {FormattingType.monospace}
)
def create_directive(self) -> Directive:
return Directive(
inherit=None,
help=self.help,
example=None,
content_type="block",
argument_type=DirectiveOption(type="string", required=True),
required_context=None,
domain=self.domain,
deprecated=self.deprecated,
options={},
fields=[],
name=self.name,
rstobject=self,
)
def create_role(self) -> Role:
return Role(
inherit=None,
help=self.help,
example=None,
type=RefRoleType(self.domain, self.name, self.prefix, format=self.format),
domain=self.domain,
deprecated=self.deprecated,
name=self.name,
rstobject=self,
)
@checked
@dataclass
class Spec:
"""The spec root."""
meta: Meta
enum: Dict[str, List[str]] = field(default_factory=dict)
directive: Dict[str, Directive] = field(default_factory=dict)
role: Dict[str, Role] = field(default_factory=dict)
rstobject: Dict[str, RstObject] = field(default_factory=dict)
tabs: Dict[str, List[TabDefinition]] = field(default_factory=dict)
@classmethod
def loads(cls, data: str) -> "Spec":
"""Load a spec from a string."""
root = check_type(cls, toml.loads(data))
if root.meta.version != SPEC_VERSION:
raise ValueError(f"Unknown spec version: {root.meta.version}")
# Inform each section element of its name and domain
sections: Sequence[Mapping[str, _HasNameAndDomain]] = (
root.directive,
root.role,
root.rstobject,
)
for section in sections:
for key, value in section.items():
domain, value.name = util.split_domain(key)
if domain:
value.domain = domain
root._resolve_inheritance()
return root
def strip_prefix_from_name(self, rstobject_id: str, title: str) -> str:
rstobject = self.rstobject.get(rstobject_id, None)
if rstobject is None:
return title
candidate = f"{rstobject.prefix}."
if title.startswith(candidate):
return title[len(candidate) :]
return title
def get_validator(
self, option_spec: Union[DirectiveOption, ArgumentType]
) -> Callable[[str], object]:
"""Return a validation function for a given argument type. This function will take in a
string, and either throw an exception or return an output value."""
if isinstance(option_spec, DirectiveOption):
option_spec = option_spec.type
if isinstance(option_spec, list):
child_validators = [self.get_validator(spec) for spec in option_spec]
def validator(argument: str) -> object:
for child_validator in child_validators:
try:
result = child_validator(argument)
except Exception:
continue
else:
return result
# Assertion to quiet mypy's failing type flow analysis
assert isinstance(option_spec, list)
options = ", ".join(str(x) for x in option_spec)
raise ValueError(f"Expected one of {options}; got {argument}")
return validator
elif isinstance(option_spec, PrimitiveType):
return VALIDATORS[option_spec]
elif isinstance(option_spec, str) and option_spec in self.enum:
return lambda argument: docutils.parsers.rst.directives.choice(
argument, self.enum[cast(str, option_spec)]
)
raise ValueError(f'Unknown directive argument type "{option_spec}"')
def _resolve_inheritance(self) -> None:
"""Spec entries can inherit from other entries; resolve this.
Not all fields are implicitly inherited: only fields with a default value
of None or MissingDict are inherited. This means that, for example content_type
is inherited, but as of this writing, deprecated is not."""
self._resolve_category(self.directive)
self._resolve_category(self.role)
self._resolve_category(self.rstobject)
@staticmethod
def _resolve_category(inheritable_index: Dict[str, _T]) -> None:
"""Resolve inheritance within a tree of inheritable dataclasses."""
resolved: Set[str] = set()
pending: Set[str] = set()
def resolve_value(key: str, inheritable: _T) -> _T:
"""Resolve a single inheritable dataclass."""
if key in pending:
raise ValueError(f"Inheritance cycle detected while resolving {key}")
if key in resolved:
return inheritable
if inheritable.inherit is not None:
pending.add(key)
try:
base = resolve_value(
inheritable.inherit, inheritable_index[inheritable.inherit]
)
except KeyError:
msg = f"Cannot inherit from non-existent directive {inheritable.inherit}"
raise ValueError(msg)
inheritable = dataclasses.replace(
base,
**{
k: v
for k, v in dataclasses.asdict(inheritable).items()
if v is not None
and not isinstance(v, (MissingDict, MissingList))
},
)
inheritable_index[key] = inheritable
pending.remove(key)
resolved.add(key)
return inheritable
for key, inheritable in inheritable_index.items():
resolve_value(key, inheritable)
GLOBAL_SPEC_PATH = PACKAGE_ROOT.joinpath("rstspec.toml")
SPEC = Spec.loads(GLOBAL_SPEC_PATH.read_text(encoding="utf-8"))
|
py | b40d5262f8f735fcf28ce256afcb13805b599fb0 | """Functions for communicating with the SUP800F GPS module."""
import collections
import functools
import struct
HEADER_FORMAT = ''.join((
'!', # network format (big-endian)
'BB', # start of sequence, A0 A1
'H', # payload length
))
TAIL_FORMAT = ''.join((
'!', # network format (big-endian)
'B', # checksum
'BB', # end of sequence, 0D 0A
))
MODE_FORMAT = ''.join((
'!', # network format (big-endian)
'B', # message id, 9 = configure message type
'B', # none = 0, NMEA = 1, binary = 2
'B', # 0 = SRAM, 1 = SRAM and Flash
))
BINARY_FORMAT = ''.join((
'!', # network format (big-endian)
'xxxx', # The message will have 4 header bytes
'x', # message id
'x', # message sub id
'f', # acceleration X
'f', # acceleration Y
'f', # acceleration Z
'f', # magnetic X
'f', # magnetic Y
'f', # magnetic Z
'I', # pressure
'f', # temperature
'xxx', # and 3 checksum bytes
))
BinaryMessage = collections.namedtuple( # pylint: disable=invalid-name
'BinaryMessage',
' '.join((
'acceleration_g_x', 'acceleration_g_y', 'acceleration_g_z',
'magnetic_flux_ut_x', 'magnetic_flux_ut_y', 'magnetic_flux_ut_z',
'pressure_p',
'temperature_c',
))
)
def format_message(payload):
"""Formats a message for the SUP800F."""
checksum = functools.reduce(lambda a, b: a ^ b, payload, 0)
return (
struct.pack(HEADER_FORMAT, 0xA0, 0xA1, len(payload))
+ payload
+ struct.pack(TAIL_FORMAT, checksum, 0x0D, 0x0A)
)
def get_message(ser, timeout_bytes=None):
"""Returns a single message."""
# Keep consuming bytes until we see the header message
if timeout_bytes is None:
timeout_bytes = 10000000
skipped_bytes = 0
while True:
if skipped_bytes > timeout_bytes:
raise ValueError('No binary header found')
part = ser.read(1)
skipped_bytes += 1
if part != b'\xA0':
continue
part = ser.read(1)
skipped_bytes += 1
if part != b'\xA1':
continue
part = ser.read(2)
skipped_bytes += 2
payload_length = struct.unpack('!H', part)[0]
rest = ser.read(payload_length + 3)
skipped_bytes += payload_length + 3
if rest[-2:] != b'\r\n':
print(r"Message didn't end in \r\n")
return b'\xA0\xA1' + struct.pack('!H', payload_length) + rest
def parse_binary(binary_message):
"""Parses a binary message (temperature, accelerometer, magnetometer, and
pressure) from the SUP800F module.
"""
# TODO: I guess the SUP800F also returns navigation data messages? Ignore
# them for now, but this shouldn't be called
if binary_message[4] == 0xA8:
return None
if binary_message[4] != 0xCF:
raise EnvironmentError('Invalid id while parsing binary message')
return BinaryMessage(*struct.unpack(BINARY_FORMAT, binary_message))
def switch_to_nmea_mode(ser):
"""Switches to the NMEA message mode."""
_change_mode(ser, 1)
def switch_to_binary_mode(ser):
"""Switches to the binary message mode."""
_change_mode(ser, 2)
def _change_mode(ser, mode):
"""Change reporting mode between NMEA messages or binary (temperature,
accelerometer and magnetometer) mode.
"""
for _ in range(3):
mode_message = struct.pack(MODE_FORMAT, 9, mode, 0)
ser.write(format_message(mode_message))
ser.flush()
if check_response(ser, limit=10):
return
self._logger.warn('No response to mode change seen, trying again')
raise EnvironmentError('Mode change to {} denied'.format(mode))
def check_response(ser, limit=None):
"""Checks for an ack/nack response."""
response_format = ''.join((
'!', # network format (big-endian)
'xx', # The message will have 2 header bytes
'H', # payload length
'B', # message id
'B', # ack id
'xxx', # and 3 checksum bytes
))
count = 0
def check(): # pylint: disable=missing-docstring
if limit is None:
return True
else:
count += 1
return count <= limit
while check():
data = get_message(ser)
try:
length, message_id, _ack_id = ( # pylint: disable=unused-variable
struct.unpack(response_format, data)
)
except: # pylint: disable=bare-except
continue
if message_id not in (0x83, 0x84):
continue
if message_id == 0x83:
return True
else:
return False
raise EnvironmentError('No response messages seen')
|
py | b40d527a9adecccd4ce310f93e231c2c3a9298ec | from setuptools import setup
setup(name='CTRegisterMicroserviceFlask',
version='0.5.1',
description='Library to interact with the Control-Tower api-gateway (register, do requests to other microservices, etc)',
author='Vizzuality',
author_email='[email protected]',
license='MIT',
packages=['CTRegisterMicroserviceFlask'],
install_requires=[
'flask',
'requests'
],
extras_require={
'dev': [
'pytest==5.2.2',
'pytest-cov==2.8.1',
'pytest-mock==1.11.1',
'codecov==2.0.15',
'requests_mock==1.7.0',
]
},
zip_safe=False)
|
py | b40d531900540323f29b7eb43867288bf34fb8d7 | from .arena import Arena
from .bins_arena import BinsArena
from .empty_arena import EmptyArena
from .pegs_arena import PegsArena
from .table_arena import TableArena
from .height_table_arena import HeightTableArena
from .wipe_force_table_arena import WipeForceTableArena
from .wiping_table_arena import WipingTableArena
from .tactile_table_arena import TactileTableArena
|
py | b40d550abf600e88c4d784720b2e3a914c8f5dbf | """Support for MQTT room presence detection."""
from datetime import timedelta
import json
import logging
import voluptuous as vol
from homeassistant.components import mqtt
from homeassistant.components.mqtt import CONF_STATE_TOPIC
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import (
ATTR_DEVICE_ID,
ATTR_ID,
CONF_DEVICE_ID,
CONF_NAME,
CONF_TIMEOUT,
STATE_NOT_HOME,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.util import dt, slugify
_LOGGER = logging.getLogger(__name__)
ATTR_DISTANCE = "distance"
ATTR_ROOM = "room"
CONF_AWAY_TIMEOUT = "away_timeout"
DEFAULT_AWAY_TIMEOUT = 0
DEFAULT_NAME = "Room Sensor"
DEFAULT_TIMEOUT = 5
DEFAULT_TOPIC = "room_presence"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DEVICE_ID): cv.string,
vol.Required(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(CONF_AWAY_TIMEOUT, default=DEFAULT_AWAY_TIMEOUT): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
).extend(mqtt.MQTT_RO_PLATFORM_SCHEMA.schema)
MQTT_PAYLOAD = vol.Schema(
vol.All(
json.loads,
vol.Schema(
{
vol.Required(ATTR_ID): cv.string,
vol.Required(ATTR_DISTANCE): vol.Coerce(float),
},
extra=vol.ALLOW_EXTRA,
),
)
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up MQTT room Sensor."""
async_add_entities(
[
MQTTRoomSensor(
config.get(CONF_NAME),
config.get(CONF_STATE_TOPIC),
config.get(CONF_DEVICE_ID),
config.get(CONF_TIMEOUT),
config.get(CONF_AWAY_TIMEOUT),
)
]
)
class MQTTRoomSensor(SensorEntity):
"""Representation of a room sensor that is updated via MQTT."""
def __init__(self, name, state_topic, device_id, timeout, consider_home):
"""Initialize the sensor."""
self._state = STATE_NOT_HOME
self._name = name
self._state_topic = f"{state_topic}/+"
self._device_id = slugify(device_id).upper()
self._timeout = timeout
self._consider_home = (
timedelta(seconds=consider_home) if consider_home else None
)
self._distance = None
self._updated = None
async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
@callback
def update_state(device_id, room, distance):
"""Update the sensor state."""
self._state = room
self._distance = distance
self._updated = dt.utcnow()
self.async_write_ha_state()
@callback
def message_received(msg):
"""Handle new MQTT messages."""
try:
data = MQTT_PAYLOAD(msg.payload)
except vol.MultipleInvalid as error:
_LOGGER.debug("Skipping update because of malformatted data: %s", error)
return
device = _parse_update_data(msg.topic, data)
if device.get(CONF_DEVICE_ID) == self._device_id:
if self._distance is None or self._updated is None:
update_state(**device)
else:
# update if:
# device is in the same room OR
# device is closer to another room OR
# last update from other room was too long ago
timediff = dt.utcnow() - self._updated
if (
device.get(ATTR_ROOM) == self._state
or device.get(ATTR_DISTANCE) < self._distance
or timediff.total_seconds() >= self._timeout
):
update_state(**device)
return await mqtt.async_subscribe(
self.hass, self._state_topic, message_received, 1
)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {ATTR_DISTANCE: self._distance}
@property
def state(self):
"""Return the current room of the entity."""
return self._state
def update(self):
"""Update the state for absent devices."""
if (
self._updated
and self._consider_home
and dt.utcnow() - self._updated > self._consider_home
):
self._state = STATE_NOT_HOME
def _parse_update_data(topic, data):
"""Parse the room presence update."""
parts = topic.split("/")
room = parts[-1]
device_id = slugify(data.get(ATTR_ID)).upper()
distance = data.get("distance")
parsed_data = {ATTR_DEVICE_ID: device_id, ATTR_ROOM: room, ATTR_DISTANCE: distance}
return parsed_data
|
py | b40d552455a2e2a49ce2f2068f58f77cd4c74aff | # -*- coding: utf-8 -*-
import re
import tempfile
import libtorrent as lt
from time import sleep
from bs4 import BeautifulSoup
from urllib.parse import unquote
from feedmaker.utils import _get
BASE_URL = 'https://ondebaixa.com/'
def re_search(regex, content):
return re.search(regex, content, re.IGNORECASE)
def mediatypedetect(contentinfo, checktype):
EPISODEREGEX = r'S\d{1,10}(?:\.|)E\d{1,10}'
SEASONREGEX = r'S\d{1,10}|\d\ª\s{1,5}Temporada|season|\d\ª\s|\dA'
mtype = None
if re_search(SEASONREGEX, contentinfo) and not re_search(EPISODEREGEX, contentinfo):
mtype = 'batch'
if re_search(EPISODEREGEX, contentinfo):
mtype = 'episode'
return True if mtype == checktype else None
class DataParser:
def __init__(self, **kwargs):
self.name = "OndeBaixa - Ondebaixa link parser!"
def __str__(self):
return self.name
def gerator(self):
content_type = None
for page in self.getLastPages():
data = self.getSoap(_get(page))
if 'magnet' in str(data):
for link in data.find_all('a'):
mag_url = link.get('href')
if 'magnet' in link.get('href'):
title = self.getTitle(mag_url)
database = None
content_type = None
if title:
content_data = str(data.find_all(
'span', attrs={'class': 'info_dados'}))
if mediatypedetect(title, 'episode'):
database = "shows"
content_type = "episode"
elif mediatypedetect(title, 'batch'):
database = "shows"
content_type = "batch"
elif 'filme' in content_data:
content_type = "movies"
database = "movies"
yield {
"title": title,
"url": mag_url,
"pubdate": None,
"source": BASE_URL,
"database": database,
"subtype": content_type,
}
def getSoap(self, contet):
try:
return BeautifulSoup(contet, 'lxml')
except Exception as e:
raise e
def getLastPages(self):
seen = set()
try:
for num in range(1,8):
link = ''.join([BASE_URL, str(num), '/'])
try:
data = self.getSoap(_get(link))
except Exception as e:
raise e
for a in data.find_all('a'):
_url = a.get('href')
if _url not in seen:
if 'http' in _url:
yield _url
seen.add(_url)
except KeyboardInterrupt:
print('\nStoped by Keyboard')
def getTitle(self, mag_url):
magregex = r'(?:\&dn\=)(.+?)(?:\&tr\=)'
if re.search(magregex, unquote(mag_url)):
return re.findall(magregex, unquote(mag_url))[0]
else:
ses = lt.session()
params = {
'save_path': tempfile.mkdtemp(),
'file_priorities': [0]*5
}
handle = lt.add_magnet_uri(ses, mag_url, params)
wait = 0
while not handle.has_metadata():
if wait >= 10:
break
wait += 1
sleep(1)
try:
return handle.get_torrent_info().name()
except AttributeError:
return None
|
py | b40d556fcfcac85cdf4b0a2a0f4e2e87d274fa01 | """
Module to enhance auto complete of transaction attribues in jupyter shell.
Author: Manikandan Dhamodharan, Morgan Stanley
"""
import re
ILLEGAL_CHARS = re.compile('[^0-9a-zA-Z_]')
ILLEGAL_PREFIX = re.compile('^[^a-zA-Z_]+')
class Txn(object):
"""Enables auto completion of attributes for txn references in jupyter shell """
def __init__(self, pmcNames):
for i, pmc in enumerate(pmcNames):
pmc = ILLEGAL_CHARS.sub('', pmc)
pmc = ILLEGAL_PREFIX.sub('', pmc)
setattr(self, pmc, i)
|
py | b40d55b528dd7b5e9620b221e461ed13d57813b2 | # Copyright 2016 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional.api import client as api_client
from nova.tests.unit.image import fake as fake_image
from nova.tests.unit import policy_fixture
class TestServerGet(test.TestCase):
def setUp(self):
super(TestServerGet, self).setUp()
self.useFixture(policy_fixture.RealPolicyFixture())
self.useFixture(nova_fixtures.NeutronFixture(self))
api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
api_version='v2.1'))
self.api = api_fixture.api
# the image fake backend needed for image discovery
image_service = fake_image.stub_out_image_service(self)
self.addCleanup(fake_image.FakeImageService_reset)
# NOTE(mriedem): This image has an invalid architecture metadata value
# and is used for negative testing in the functional stack.
timestamp = datetime.datetime(2011, 1, 1, 1, 2, 3)
image = {'id': 'c456eb30-91d7-4f43-8f46-2efd9eccd744',
'name': 'fake-image-invalid-arch',
'created_at': timestamp,
'updated_at': timestamp,
'deleted_at': None,
'deleted': False,
'status': 'active',
'is_public': False,
'container_format': 'raw',
'disk_format': 'raw',
'size': '25165824',
'properties': {'kernel_id': 'nokernel',
'ramdisk_id': 'nokernel',
'architecture': 'x64'}}
self.image_id = image_service.create(None, image)['id']
self.flavor_id = self.api.get_flavors()[0]['id']
def test_boot_server_with_invalid_image_meta(self):
"""Regression test for bug #1558866.
Glance allows you to provide any architecture value for image meta
properties but nova validates the image metadata against the
nova.compute.arch.ALL values during the conversion to the ImageMeta
object. This test ensures we get a 400 back in that case rather than
a 500.
"""
server = dict(name='server1',
imageRef=self.image_id,
flavorRef=self.flavor_id)
ex = self.assertRaises(api_client.OpenStackApiException,
self.api.post_server, {'server': server})
self.assertEqual(400, ex.response.status_code)
|
py | b40d58296da9f01642af1f6ed8c867749894d087 | # Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
'''
Named indexer
'''
import unittest
from iptest import IronPythonTestCase, run_test, skipUnlessIronPython
@skipUnlessIronPython()
class IndexerVbTest(IronPythonTestCase):
def setUp(self):
super(IndexerVbTest, self).setUp()
self.add_clr_assemblies("indexerdefinitionsvb", "typesamples")
import System
self.array = System.Array[object]
def test_basic(self):
from Merlin.Testing.Indexer import ClassWithIndexer
for t in [
ClassWithIndexer,
#StructWithIndexer,
]:
x = t()
x.Init()
for y,z in zip(x.PropertyName, range(10)):
self.assertEqual(y,z)
self.assertRaises(TypeError, lambda: x[2])
x.PropertyName[2] = 9
self.assertEqual(x.PropertyName[2], 9)
i = x.PropertyName
i.SetValue(self.array([3]), 10)
self.assertEqual(i.GetValue(self.array([3])), 10)
i = t.PropertyName # bug 363422
#i.SetValue(x, self.array([4]), 11)
#self.assertEqual(i.GetValue(x, self.array([4])), 11)
def test_signature(self):
from Merlin.Testing.Indexer import ClassWithSignature
x = ClassWithSignature()
for y,z in zip(x.PropertyName, range(2, 12)):
self.assertEqual(y,z)
i = x.PropertyName
i[3] = 10
self.assertEqual(10, i[3])
self.assertEqual(10, i[1, 4])
def test_only_optional(self):
from Merlin.Testing.Indexer import ClassWithOnlyOptional
x = ClassWithOnlyOptional()
for y,z in zip(x.PropertyName, range(10)):
self.assertEqual(y,z)
i = x.PropertyName
#i[()] # bug 363440
def test_only_paramarray(self):
from Merlin.Testing.Indexer import ClassWithOnlyParamArray
x = ClassWithOnlyParamArray()
i = x.PropertyName
for y,z in zip(x.PropertyName, range(10)):
self.assertEqual(y,z)
self.assertEqual(i[()], -99)
i[()] = 10
self.assertEqual(i[()], 10)
i[1] = 4
self.assertEqual(i[1], 4)
self.assertEqual(i[1, 4, 5, 7], 4)
def test_static_indexer(self):
from Merlin.Testing.Indexer import ClassWithStaticIndexer
t = ClassWithStaticIndexer
x = t()
i = ClassWithStaticIndexer.PropertyName
i[1] = 10
self.assertEqual(i[100], 111)
i.SetValue(self.array([2]), 20)
self.assertEqual(i.GetValue(self.array([200])), 222)
def test_overloaded_indexer(self):
from Merlin.Testing.Indexer import ClassWithOverloadedIndexers
x = ClassWithOverloadedIndexers()
x.Init()
self.assertEqual(x.PropertyName[6], 6)
#http://ironpython.codeplex.com/WorkItem/View.aspx?WorkItemId=3740
self.assertEqual([y for y in x.PropertyName],
range(10)) #should be [2]?
run_test(__name__)
|
py | b40d59a1e6285f955c98f9b39953215d490ac9f3 | import urllib.request, json
from .models import News_Highlights, News_Highlights_by_source
#this part collects the API key
api_key = None
#this part collects the base url
def config_func(app):
global base_url,api_key
base_url = app.config['NEWS_API_BASE_URL']
api_key =app.config['NEWS_API_KEY']
def get_source_names(search_keyword):
'''
This is a function that collects the news sources from the API
'''
configured_source_url1 = base_url.format(search_keyword,api_key)
with urllib.request.urlopen(configured_source_url1) as url:
collected_sources_data = url.read()
source_names_json = json.loads(collected_sources_data)
list_of_sources = None
if source_names_json['sources']:
successfully_collected_list =source_names_json['sources']
list_of_sources= process_sources(successfully_collected_list)
return list_of_sources
def get_articles(id):
'''
This is a function that retrives articles from a particular source based on the selected source
'''
configured_articles_url = 'https://newsapi.org/v1/articles?source={}&apiKey={}'.format(id,api_key)
with urllib.request.urlopen(configured_articles_url) as url:
collected_articles_data = url.read()
source_articles_json = json.loads(collected_articles_data)
list_of_sources = None
if source_articles_json['articles']:
successfully_collected_articles =source_articles_json['articles']
list_of_articles= process_articles(successfully_collected_articles)
return list_of_articles
def process_sources(source_response):
'''
A function that process json file results and defines them for the class
'''
populated_source_list =[]
for source in source_response:
source_name = source.get('name')
source_id = source.get('id')
source_url = source.get('url')
source_description = source.get('description')
source_object= News_Highlights(source_name,source_id,source_url,source_description)
populated_source_list.append(source_object)
return populated_source_list
def process_articles(articles_response):
'''
function that processes the json files of articles from the api key
'''
populated_articles_list = []
for article in articles_response:
article_name = article.get('author')
article_description = article.get('description')
article_time = article.get('publishedAt')
article_image = article.get('urlToImage')
article_url = article.get('url')
article_title = article.get ('title')
article_objects = News_Highlights_by_source(article_name,article_description,article_time,article_image,article_url, article_title)
populated_articles_list.append(article_objects)
return populated_articles_list
|
py | b40d5bcde339563e9250648b883a8e11e9d4d24b | #!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Create the RenderWindow, Renderer and both Actors
#
ren1 = vtk.vtkRenderer()
ren2 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.SetMultiSamples(0)
renWin.AddRenderer(ren1)
renWin.AddRenderer(ren2)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# create pipeline
#
pl3d = vtk.vtkMultiBlockPLOT3DReader()
pl3d.SetXYZFileName("" + str(VTK_DATA_ROOT) + "/Data/combxyz.bin")
pl3d.SetQFileName("" + str(VTK_DATA_ROOT) + "/Data/combq.bin")
pl3d.SetScalarFunctionNumber(110)
pl3d.SetVectorFunctionNumber(202)
pl3d.Update()
output = pl3d.GetOutput().GetBlock(0)
probeLine = vtk.vtkLineSource()
probeLine.SetPoint1(1,1,29)
probeLine.SetPoint2(16.5,5,31.7693)
probeLine.SetResolution(500)
probe = vtk.vtkProbeFilter()
probe.SetInputConnection(probeLine.GetOutputPort())
probe.SetSourceData(output)
probe.Update()
probeTube = vtk.vtkTubeFilter()
probeTube.SetInputData(probe.GetPolyDataOutput())
probeTube.SetNumberOfSides(5)
probeTube.SetRadius(.05)
probeMapper = vtk.vtkPolyDataMapper()
probeMapper.SetInputConnection(probeTube.GetOutputPort())
probeMapper.SetScalarRange(output.GetScalarRange())
probeActor = vtk.vtkActor()
probeActor.SetMapper(probeMapper)
displayLine = vtk.vtkLineSource()
displayLine.SetPoint1(0,0,0)
displayLine.SetPoint2(1,0,0)
displayLine.SetResolution(probeLine.GetResolution())
displayMerge = vtk.vtkMergeFilter()
displayMerge.SetGeometryConnection(displayLine.GetOutputPort())
displayMerge.SetScalarsData(probe.GetPolyDataOutput())
displayMerge.Update()
displayWarp = vtk.vtkWarpScalar()
displayWarp.SetInputData(displayMerge.GetPolyDataOutput())
displayWarp.SetNormal(0,1,0)
displayWarp.SetScaleFactor(.000001)
displayWarp.Update()
displayMapper = vtk.vtkPolyDataMapper()
displayMapper.SetInputData(displayWarp.GetPolyDataOutput())
displayMapper.SetScalarRange(output.GetScalarRange())
displayActor = vtk.vtkActor()
displayActor.SetMapper(displayMapper)
outline = vtk.vtkStructuredGridOutlineFilter()
outline.SetInputData(output)
outlineMapper = vtk.vtkPolyDataMapper()
outlineMapper.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(outlineMapper)
outlineActor.GetProperty().SetColor(0,0,0)
ren1.AddActor(outlineActor)
ren1.AddActor(probeActor)
ren1.SetBackground(1,1,1)
ren1.SetViewport(0,.25,1,1)
ren2.AddActor(displayActor)
ren2.SetBackground(0,0,0)
ren2.SetViewport(0,0,1,.25)
renWin.SetSize(300,300)
ren1.ResetCamera()
cam1 = ren1.GetActiveCamera()
cam1.SetClippingRange(3.95297,50)
cam1.SetFocalPoint(8.88908,0.595038,29.3342)
cam1.SetPosition(9.9,-26,41)
cam1.SetViewUp(0.060772,-0.319905,0.945498)
ren2.ResetCamera()
cam2 = ren2.GetActiveCamera()
cam2.ParallelProjectionOn()
cam2.SetParallelScale(.15)
iren.Initialize()
# render the image
#
# prevent the tk window from showing up then start the event loop
# --- end of script --
|
py | b40d5bee8e900dbc24a46b079720cea40ee58eaa | import urllib.request,json
from .models import NewsSource,NewsArticle
from datetime import date
#getting api key
api_key = None
#getting news base url
base_url = None
#getting articles url
articles_url = None
def configure_request(app):
global api_key,base_url,articles_url
api_key = app.config['NEWS_API_KEY']
base_url = app.config['NEWS_API_BASE_URL']
articles_url = app.config['ARTICLES_BASE_URL']
def get_news_source(category):
'''
Function that gets the json response to our url request
'''
get_news_source_url = base_url.format(category,api_key)
with urllib.request.urlopen(get_news_source_url) as url:
get_news_source_data = url.read()
get_news_source_response = json.loads(get_news_source_data)
news_source_results = None
if get_news_source_response['sources']:
news_source_results_list = get_news_source_response['sources']
news_source_results = process_news_source(news_source_results_list)
return news_source_results
def process_news_source(news_source_list):
'''
Function that processes the news sources results and turns them into a list of objects
Args:
news_source_list: A list of dictionaries that contain sources details
Returns:
news_source_results: A list of sources objects
'''
news_source_results = []
for news_source_item in news_source_list:
id = news_source_item.get('id')
name = news_source_item.get('name')
description = news_source_item.get('description')
url = news_source_item.get('url')
category = news_source_item.get('category')
country = news_source_item.get('country')
news_source_object = NewsSource(id,name,description,url,category,country)
news_source_results.append(news_source_object)
return news_source_results
def get_articles(id):
'''
Function that processes the articles and returns a list of articles objects
'''
get_articles_url = articles_url.format(id,api_key)
with urllib.request.urlopen(get_articles_url) as url:
news_article_results = json.loads(url.read())
news_article_object = None
if news_article_results['articles']:
news_article_object = process_news_article(news_article_results['articles'])
return news_article_object
def process_news_article(news_article_list):
'''
'''
news_article_object = []
for news_article_item in news_article_list:
id = news_article_item.get('id')
author = news_article_item.get('author')
title = news_article_item.get('title')
description = news_article_item.get('description')
url = news_article_item.get('url')
image = news_article_item.get('urlToImage')
date = news_article_item.get('publishedAt')
if image:
news_article_result = NewsArticle(id,author,title,description,url,image,date)
news_article_object.append(news_article_result)
return news_article_object |
py | b40d5c0a7349632a9dca3073b38d2a34cc835a8f | from typing import List, Sequence
from nmm import Interval, SequenceABC
from ..result import SearchResult
from .fragment import FrameFragment
from .path import FramePath
class FrameSearchResult(SearchResult):
def __init__(self, loglik: float, sequence: SequenceABC, path: FramePath):
self._loglik = loglik
self._fragments: List[FrameFragment] = []
self._intervals: List[Interval] = []
steps = list(path)
for fragi, stepi, homologous in self._create_fragments(path):
substeps = steps[stepi.start : stepi.stop]
fragment_path = FramePath([(s.state, s.seq_len) for s in substeps])
seq = sequence.slice(fragi)
frag = FrameFragment(seq, fragment_path, homologous)
self._fragments.append(frag)
self._intervals.append(fragi)
@property
def fragments(self) -> Sequence[FrameFragment]:
return self._fragments
@property
def intervals(self) -> Sequence[Interval]:
return self._intervals
@property
def loglikelihood(self) -> float:
return self._loglik
# def decode(self) -> CodonSearchResult:
# fragments: List[CodonFragment] = []
# intervals: List[Interval] = []
# start = end = 0
# for i, frag in enumerate(self._fragments):
# codon_frag = frag.decode()
# end += len(codon_frag.sequence)
# fragments.append(codon_frag)
# intervals.append(Interval(start, end))
# start = end
# return CodonSearchResult(self.score, fragments, intervals)
|
py | b40d5cbbb831fc77a904e553aff69420656ae317 | # Recipes urls. Imported with no prefix
from django.urls import include, path, register_converter
import recipes.views as views
import recipes.converters as converters
app_name = "recipes"
register_converter(converters.LetterConverter, "letter")
recipe_urls = [
path("browse",
view=views.RecipeList.as_view(),
name="recipe_list"),
path("add/",
view=views.CreateRecipe.as_view(),
name="recipe_create"),
path("edit/<slug:slug>/",
view=views.UpdateRecipe.as_view(),
name="recipe_update"),
path("delete/<slug:slug>/",
view=views.DeleteRecipe.as_view(),
name="recipe_delete"),
path("save/",
view=views.SaveRecipe.as_view(),
name="save_recipe"),
path("rate/",
view=views.RateRecipe.as_view(),
name="rate_recipe"),
path("search/",
view=views.SearchRecipes.as_view(),
name="search_recipes"),
path("<letter:first_letter>/",
view=views.RecipeByLetterList.as_view(),
name="letter_recipe"),
path("<slug:slug>/",
view=views.RecipeDetail.as_view(),
name="recipe_detail"),
]
tag_urls = [
path("",
view=views.TagList.as_view(),
name="tag_list"),
path("tagrecipe/",
view=views.TagRecipe.as_view(),
name="tag_recipe"),
path("untag/",
view=views.untag,
name="untag"),
path("all/",
view=views.all_tags,
name="get_all_tags"),
path("<slug:slug>",
view=views.TagDetail.as_view(),
name="tag_detail"),
]
urlpatterns = [
path("rotd/",
view=views.RecipeOfTheDay.as_view(),
name="rotd"),
path("home/",
view=views.DashboardView.as_view(),
name="home"),
path("tags/", include(tag_urls)),
path("", include(recipe_urls)),
]
|
py | b40d5d2c95ebef7a7278d2ce2e6489af48997bb9 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import collections.abc
import logging
import os
import smtplib
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formatdate
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
from airflow.configuration import conf
from airflow.exceptions import AirflowConfigException
log = logging.getLogger(__name__)
def send_email(to: Union[List[str], Iterable[str]], subject: str, html_content: str,
files=None, dryrun=False, cc=None, bcc=None,
mime_subtype='mixed', mime_charset='utf-8', **kwargs):
"""Send email using backend specified in EMAIL_BACKEND."""
backend = conf.getimport('email', 'EMAIL_BACKEND')
to_list = get_email_address_list(to)
to_comma_separated = ", ".join(to_list)
return backend(to_comma_separated, subject, html_content, files=files,
dryrun=dryrun, cc=cc, bcc=bcc,
mime_subtype=mime_subtype, mime_charset=mime_charset, **kwargs)
def send_email_smtp(
to: Union[str, Iterable[str]],
subject: str,
html_content: str,
files: Optional[List[str]] = None,
dryrun: bool = False,
cc: Optional[Union[str, Iterable[str]]] = None,
bcc: Optional[Union[str, Iterable[str]]] = None,
mime_subtype: str = 'mixed',
mime_charset: str = 'utf-8',
**kwargs,
):
"""
Send an email with html content
>>> send_email('[email protected]', 'foo', '<b>Foo</b> bar', ['/dev/null'], dryrun=True)
"""
smtp_mail_from = conf.get('smtp', 'SMTP_MAIL_FROM')
msg, recipients = build_mime_message(
mail_from=smtp_mail_from,
to=to,
subject=subject,
html_content=html_content,
files=files,
cc=cc,
bcc=bcc,
mime_subtype=mime_subtype,
mime_charset=mime_charset,
)
send_mime_email(e_from=smtp_mail_from, e_to=recipients, mime_msg=msg, dryrun=dryrun)
def build_mime_message(
mail_from: str,
to: Union[str, Iterable[str]],
subject: str,
html_content: str,
files: Optional[List[str]] = None,
cc: Optional[Union[str, Iterable[str]]] = None,
bcc: Optional[Union[str, Iterable[str]]] = None,
mime_subtype: str = 'mixed',
mime_charset: str = 'utf-8',
custom_headers: Optional[Dict[str, Any]] = None,
) -> Tuple[MIMEMultipart, List[str]]:
"""
Build a MIME message that can be used to send an email and
returns full list of recipients.
:param mail_from: Email address to set as email's from
:param to: List of email addresses to set as email's to
:param subject: Email's subject
:param html_content: Content of email in HTML format
:param files: List of paths of files to be attached
:param cc: List of email addresses to set as email's CC
:param bcc: List of email addresses to set as email's BCC
:param mime_subtype: Can be used to specify the subtype of the message. Default = mixed
:param mime_charset: Email's charset. Default = UTF-8.
:param custom_headers: Additional headers to add to the MIME message.
No validations are run on these values and they should be able to be encoded.
:return: Email as MIMEMultipart and list of recipients' addresses.
"""
to = get_email_address_list(to)
msg = MIMEMultipart(mime_subtype)
msg['Subject'] = subject
msg['From'] = mail_from
msg['To'] = ", ".join(to)
recipients = to
if cc:
cc = get_email_address_list(cc)
msg['CC'] = ", ".join(cc)
recipients = recipients + cc
if bcc:
# don't add bcc in header
bcc = get_email_address_list(bcc)
recipients = recipients + bcc
msg['Date'] = formatdate(localtime=True)
mime_text = MIMEText(html_content, 'html', mime_charset)
msg.attach(mime_text)
for fname in files or []:
basename = os.path.basename(fname)
with open(fname, "rb") as file:
part = MIMEApplication(
file.read(),
Name=basename
)
part['Content-Disposition'] = f'attachment; filename="{basename}"'
part['Content-ID'] = f'<{basename}>'
msg.attach(part)
if custom_headers:
for header_key, header_value in custom_headers.items():
msg[header_key] = header_value
return msg, recipients
def send_mime_email(e_from: str, e_to: List[str], mime_msg: MIMEMultipart, dryrun: bool = False) -> None:
"""Send MIME email."""
smtp_host = conf.get('smtp', 'SMTP_HOST')
smtp_port = conf.getint('smtp', 'SMTP_PORT')
smtp_starttls = conf.getboolean('smtp', 'SMTP_STARTTLS')
smtp_ssl = conf.getboolean('smtp', 'SMTP_SSL')
smtp_user = None
smtp_password = None
try:
smtp_user = conf.get('smtp', 'SMTP_USER')
smtp_password = conf.get('smtp', 'SMTP_PASSWORD')
except AirflowConfigException:
log.debug("No user/password found for SMTP, so logging in with no authentication.")
if not dryrun:
conn = smtplib.SMTP_SSL(smtp_host, smtp_port) if smtp_ssl else smtplib.SMTP(smtp_host, smtp_port)
if smtp_starttls:
conn.starttls()
if smtp_user and smtp_password:
conn.login(smtp_user, smtp_password)
log.info("Sent an alert email to %s", e_to)
conn.sendmail(e_from, e_to, mime_msg.as_string())
conn.quit()
def get_email_address_list(addresses: Union[str, Iterable[str]]) -> List[str]:
"""Get list of email addresses."""
if isinstance(addresses, str):
return _get_email_list_from_str(addresses)
elif isinstance(addresses, collections.abc.Iterable):
if not all(isinstance(item, str) for item in addresses):
raise TypeError("The items in your iterable must be strings.")
return list(addresses)
received_type = type(addresses).__name__
raise TypeError("Unexpected argument type: Received '{}'.".format(received_type))
def _get_email_list_from_str(addresses: str) -> List[str]:
delimiters = [",", ";"]
for delimiter in delimiters:
if delimiter in addresses:
return [address.strip() for address in addresses.split(delimiter)]
return [addresses]
|
py | b40d5d2e4cbb4a4806d2f006461597c398844330 | from haystack import indexes
from .models import Cazador
class CazadorIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Cazador
|
py | b40d5ed38c890d071d5f07249a05d4d643a60cc9 | from django.contrib import admin
# Register your models here.
from .models import User, Email
admin.site.register(User)
admin.site.register(Email)
|
py | b40d5f0d9d010abda6ec07b10f5fa69fb5da51b4 | import os
import torch
import torch.nn as nn
import torch.utils.data as Data
import torchvision
import matplotlib.pyplot as plt
# Hyper Parameters
EPOCH = 1
BATCH_SIZE = 50
LR = 0.001 # learning rate
DOWNLOAD_MNIST = False
if not(os.path.exists('./mnist/')) or not os.listdir('./mnist/'):
# not mnist dir or mnist is empyt dir
DOWNLOAD_MNIST = True
train_data = torchvision.datasets.MNIST(
root='./mnist/',
train=True, # this is training data
transform=torchvision.transforms.ToTensor(), # Converts a PIL.Image or numpy.ndarray to
# torch.FloatTensor of shape (C x H x W) and normalize in the range [0.0, 1.0]
download=DOWNLOAD_MNIST,
)
# plot one example
print(train_data.train_data.size()) # (60000, 28, 28)
print(train_data.train_labels.size()) # (60000)
plt.imshow(train_data.train_data[0].numpy(), cmap='gray')
plt.title('%i' % train_data.train_labels[0])
plt.show()
# Data Loader for easy mini-batch return in training, the image batch shape will be (50, 1, 28, 28)
train_loader = Data.DataLoader(dataset=train_data, batch_size=BATCH_SIZE, shuffle=True)
# pick 2000 samples to speed up testing
test_data = torchvision.datasets.MNIST(root='./mnist/', train=False)
test_x = torch.unsqueeze(test_data.test_data, dim=1).type(torch.FloatTensor)[:2000]/255. # shape from (2000, 28, 28) to (2000, 1, 28, 28), value in range(0,1)
test_y = test_data.test_labels[:2000]
class CNN(nn.Module):
def __init__(self):
super(CNN, self).__init__()
self.conv1 = nn.Sequential( # input shape (1, 28, 28)
nn.Conv2d(
in_channels=1, # input height
out_channels=16, # n_filters
kernel_size=5, # filter size
stride=1, # filter movement/step
padding=2, # if want same width and length of this image after Conv2d, padding=(kernel_size-1)/2 if stride=1
), # output shape (16, 28, 28)
nn.ReLU(), # activation
nn.MaxPool2d(kernel_size=2), # choose max value in 2x2 area, output shape (16, 14, 14)
)
self.conv2 = nn.Sequential( # input shape (16, 14, 14)
nn.Conv2d(16, 32, 5, 1, 2), # output shape (32, 14, 14)
nn.ReLU(), # activation
nn.MaxPool2d(2), # output shape (32, 7, 7)
)
self.out = nn.Linear(32 * 7 * 7, 10) # fully connected layer, output 10 classes
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
x = x.view(x.size(0), -1) # flatten the output of conv2 to (batch_size, 32 * 7 * 7)
output = self.out(x)
return output, x # return x for visualization
cnn = CNN()
print(cnn) # net architecture
optimizer = torch.optim.Adam(cnn.parameters(), lr=LR) # optimize all cnn parameters
loss_func = nn.CrossEntropyLoss() # the target label is not one-hotted
# following function (plot_with_labels) is for visualization, can be ignored if not interested
from matplotlib import cm
try: from sklearn.manifold import TSNE; HAS_SK = True
except: HAS_SK = False; print('Please install sklearn for layer visualization')
def plot_with_labels(lowDWeights, labels):
plt.cla()
X, Y = lowDWeights[:, 0], lowDWeights[:, 1]
for x, y, s in zip(X, Y, labels):
c = cm.rainbow(int(255 * s / 9)); plt.text(x, y, s, backgroundcolor=c, fontsize=9)
plt.xlim(X.min(), X.max()); plt.ylim(Y.min(), Y.max()); plt.title('Visualize last layer'); plt.show(); plt.pause(0.01)
plt.ion()
# training and testing
for epoch in range(EPOCH):
for step, (b_x, b_y) in enumerate(train_loader): # gives batch data, normalize x when iterate train_loader
output = cnn(b_x)[0] # cnn output
loss = loss_func(output, b_y) # cross entropy loss
optimizer.zero_grad() # clear gradients for this training step
loss.backward() # backpropagation, compute gradients
optimizer.step() # apply gradients
if step % 50 == 0:
test_output, last_layer = cnn(test_x)
pred_y = torch.max(test_output, 1)[1].data.numpy()
accuracy = float((pred_y == test_y.data.numpy()).astype(int).sum()) / float(test_y.size(0))
print('Epoch: ', epoch, '| train loss: %.4f' % loss.data.numpy(), '| test accuracy: %.2f' % accuracy)
if HAS_SK:
# Visualization of trained flatten layer (T-SNE)
tsne = TSNE(perplexity=30, n_components=2, init='pca', n_iter=5000)
plot_only = 500
low_dim_embs = tsne.fit_transform(last_layer.data.numpy()[:plot_only, :])
labels = test_y.numpy()[:plot_only]
plot_with_labels(low_dim_embs, labels)
plt.ioff()
# print 10 predictions from test data
test_output, _ = cnn(test_x[:10])
pred_y = torch.max(test_output, 1)[1].data.numpy()
print(pred_y, 'prediction number')
print(test_y[:10].numpy(), 'real number') |
py | b40d5f44f6fa6be9832ba16feacd3403d60e20f3 | # not done yet, still improving ...
import xml.etree.ElementTree as ET
tree = ET.parse('xxxconfig.xml')
root = tree.getroot()
print(root)
i = 0
for child in root:
i += 1
print(i, child.tag, child.attrib)
print("\nCustomCrawler below\n")
for CustomCrawler in root.iter('CustomCrawler'):
print(CustomCrawler.attrib)
print("\nSslCertificate\n")
for SslCertificate in root.iter('SslCertificate'):
print(SslCertificate.attrib)
'''
print("\ncategoryinstance\n")
for categoryinstance in root.iter('categoryinstance'):
print(categoryinstance.attrib)
'''
name, bindport, queryport = "Index name", "bindport", "queryport: "
print("%40s %20s %20s" %(name, bindport, queryport))
for Index in root.findall('Index'):
name = Index.get('name')
bindport = Index.get('bindport')
queryport = Index.get('queryport')
print("%40s %20s %20s" %(name, bindport, queryport))
print("\nabc map crawler info:")
index_abc_map = root[14]
for child in index_abc_map:
print(child.tag, child.attrib)
print("\nabc map crawler sub level 1 info:")
index_abc_map = root[14][0]
for child in index_abc_map:
print(child.tag, child.attrib)
|
py | b40d5f8d178512a1e988c74d181ab1c135bd52ac | from .env_wrapper import *
from .utils import *
from .maddpg import *
from .networks import *
__all__ = [ 'EnvWrapper', 'RLTrainingLogger',
'MADDPGAgents', 'TrainMADDPG']
|
py | b40d5fdd6a977afa90fb2dfb82ab9d6c79c6592c | from .dataset import Dataset
from .dataset import AuxTables
__all__ = ['Dataset', 'AuxTables']
|
py | b40d6109b93afdfd82a94484f795c13eedd8626f | class Solution(object):
def minPartitions(self, n):
"""
:type n: str
:rtype: int
"""
maxd = 1
for item in n:
if int(item) > maxd:
maxd = int(item)
return maxd |
py | b40d638165274e7221053efe066dbfb4a00aa478 | # Lint as: python3
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Metrics, plots, and validations writer."""
from __future__ import absolute_import
from __future__ import division
# Standard __future__ imports
from __future__ import print_function
import itertools
import os
from typing import Any, Dict, Iterable, Iterator, List, Optional, Text, Tuple, Union
from absl import logging
import apache_beam as beam
import numpy as np
import pyarrow as pa
import six
import tensorflow as tf
from tensorflow_model_analysis import config
from tensorflow_model_analysis import constants
from tensorflow_model_analysis import math_util
from tensorflow_model_analysis import types
from tensorflow_model_analysis.evaluators import evaluator
from tensorflow_model_analysis.evaluators import metrics_validator
from tensorflow_model_analysis.metrics import metric_specs
from tensorflow_model_analysis.metrics import metric_types
from tensorflow_model_analysis.post_export_metrics import metric_keys
from tensorflow_model_analysis.proto import metrics_for_slice_pb2
from tensorflow_model_analysis.proto import validation_result_pb2
from tensorflow_model_analysis.slicer import slicer_lib as slicer
from tensorflow_model_analysis.writers import writer
_PARQUET_FORMAT = 'parquet'
_TFRECORD_FORMAT = 'tfrecord'
_SUPPORTED_FORMATS = (_PARQUET_FORMAT, _TFRECORD_FORMAT)
_SLICE_KEY_PARQUET_COLUMN_NAME = 'slice_key'
_SERIALIZED_VALUE_PARQUET_COLUMN_NAME = 'serialized_value'
_SINGLE_SLICE_KEYS_PARQUET_FIELD_NAME = 'single_slice_specs'
_SLICE_KEY_ARROW_TYPE = pa.struct([(pa.field(
_SINGLE_SLICE_KEYS_PARQUET_FIELD_NAME,
pa.list_(
pa.struct([
pa.field('column', pa.string()),
pa.field('bytes_value', pa.binary()),
pa.field('float_value', pa.float32()),
pa.field('int64_value', pa.int64())
]))))])
_SLICED_PARQUET_SCHEMA = pa.schema([
pa.field(_SLICE_KEY_PARQUET_COLUMN_NAME, _SLICE_KEY_ARROW_TYPE),
pa.field(_SERIALIZED_VALUE_PARQUET_COLUMN_NAME, pa.binary())
])
_UNSLICED_PARQUET_SCHEMA = pa.schema(
[pa.field(_SERIALIZED_VALUE_PARQUET_COLUMN_NAME, pa.binary())])
_SliceKeyDictPythonType = Dict[Text, List[Dict[Text, Union[bytes, float, int]]]]
def _match_all_files(file_path: Text) -> Text:
"""Return expression to match all files at given path."""
return file_path + '*'
def _parquet_column_iterator(paths: Iterable[str],
column_name: str) -> Iterator[pa.Buffer]:
"""Yields values from a bytes column in a set of parquet file partitions."""
dataset = pa.parquet.ParquetDataset(paths)
table = dataset.read(columns=[column_name])
for record_batch in table.to_batches():
# always read index 0 because we filter to one column
value_array = record_batch.column(0)
for value in value_array:
yield value.as_buffer()
def _raw_value_iterator(
paths: Iterable[Text],
output_file_format: Text) -> Iterator[Union[pa.Buffer, bytes]]:
"""Returns an iterator of raw per-record values from supported file formats.
When reading parquet format files, values from the column with name
_SERIALIZED_VALUE_PARQUET_COLUMN_NAME will be read.
Args:
paths: The paths from which to read records
output_file_format: The format of the files from which to read records.
Returns:
An iterator which yields serialized values.
Raises:
ValueError when the output_file_format is unknown.
"""
if output_file_format == _PARQUET_FORMAT:
return _parquet_column_iterator(paths,
_SERIALIZED_VALUE_PARQUET_COLUMN_NAME)
elif not output_file_format or output_file_format == _TFRECORD_FORMAT:
return itertools.chain(*(tf.compat.v1.python_io.tf_record_iterator(path)
for path in paths))
raise ValueError('Formats "{}" are currently supported but got '
'output_file_format={}'.format(_SUPPORTED_FORMATS,
output_file_format))
def load_and_deserialize_metrics(
output_path: Text,
output_file_format: Text = '',
slice_specs: Optional[Iterable[slicer.SingleSliceSpec]] = None
) -> Iterator[metrics_for_slice_pb2.MetricsForSlice]:
"""Read and deserialize the MetricsForSlice records.
Args:
output_path: Path or pattern to search for metrics files under. If a
directory is passed, files matching 'metrics*' will be searched for.
output_file_format: Optional file extension to filter files by.
slice_specs: A set of SingleSliceSpecs to use for filtering returned
metrics. The metrics for a given slice key will be returned if that slice
key matches any of the slice_specs.
Yields:
MetricsForSlice protos found in matching files.
"""
if tf.io.gfile.isdir(output_path):
output_path = os.path.join(output_path, constants.METRICS_KEY)
pattern = _match_all_files(output_path)
if output_file_format:
pattern = pattern + '.' + output_file_format
paths = tf.io.gfile.glob(pattern)
for value in _raw_value_iterator(paths, output_file_format):
metrics = metrics_for_slice_pb2.MetricsForSlice.FromString(value)
if slice_specs and not slicer.slice_key_matches_slice_specs(
slicer.deserialize_slice_key(metrics.slice_key), slice_specs):
continue
yield metrics
def load_and_deserialize_plots(
output_path: Text,
output_file_format: Text = '',
slice_specs: Optional[Iterable[slicer.SingleSliceSpec]] = None
) -> Iterator[metrics_for_slice_pb2.PlotsForSlice]:
"""Read and deserialize the PlotsForSlice records.
Args:
output_path: Path or pattern to search for plots files under. If a directory
is passed, files matching 'plots*' will be searched for.
output_file_format: Optional file extension to filter files by.
slice_specs: A set of SingleSliceSpecs to use for filtering returned plots.
The plots for a given slice key will be returned if that slice key matches
any of the slice_specs.
Yields:
PlotsForSlice protos found in matching files.
"""
if tf.io.gfile.isdir(output_path):
output_path = os.path.join(output_path, constants.PLOTS_KEY)
pattern = _match_all_files(output_path)
if output_file_format:
pattern = pattern + '.' + output_file_format
paths = tf.io.gfile.glob(pattern)
for value in _raw_value_iterator(paths, output_file_format):
plots = metrics_for_slice_pb2.PlotsForSlice.FromString(value)
if slice_specs and not slicer.slice_key_matches_slice_specs(
slicer.deserialize_slice_key(plots.slice_key), slice_specs):
continue
yield plots
def load_and_deserialize_attributions(
output_path: Text,
output_file_format: Text = '',
slice_specs: Optional[Iterable[slicer.SingleSliceSpec]] = None
) -> Iterator[metrics_for_slice_pb2.AttributionsForSlice]:
"""Read and deserialize the AttributionsForSlice records.
Args:
output_path: Path or pattern to search for attribution files under. If a
directory is passed, files matching 'attributions*' will be searched for.
output_file_format: Optional file extension to filter files by.
slice_specs: A set of SingleSliceSpecs to use for filtering returned
attributions. The attributions for a given slice key will be returned if
that slice key matches any of the slice_specs.
Yields:
AttributionsForSlice protos found in matching files.
"""
if tf.io.gfile.isdir(output_path):
output_path = os.path.join(output_path, constants.ATTRIBUTIONS_KEY)
pattern = _match_all_files(output_path)
if output_file_format:
pattern = pattern + '.' + output_file_format
paths = tf.io.gfile.glob(pattern)
for value in _raw_value_iterator(paths, output_file_format):
attributions = metrics_for_slice_pb2.AttributionsForSlice.FromString(value)
if slice_specs and not slicer.slice_key_matches_slice_specs(
slicer.deserialize_slice_key(attributions.slice_key), slice_specs):
continue
yield attributions
def load_and_deserialize_validation_result(
output_path: Text,
output_file_format: Text = '') -> validation_result_pb2.ValidationResult:
"""Read and deserialize the ValidationResult record.
Args:
output_path: Path or pattern to search for validation file under. If a
directory is passed, a file matching 'validations*' will be searched for.
output_file_format: Optional file extension to filter file by.
Returns:
ValidationResult proto.
"""
if tf.io.gfile.isdir(output_path):
output_path = os.path.join(output_path, constants.VALIDATIONS_KEY)
pattern = _match_all_files(output_path)
if output_file_format:
pattern = pattern + '.' + output_file_format
validation_records = []
paths = tf.io.gfile.glob(pattern)
for value in _raw_value_iterator(paths, output_file_format):
validation_records.append(
validation_result_pb2.ValidationResult.FromString(value))
assert len(validation_records) == 1
return validation_records[0]
def _convert_to_array_value(
array: np.ndarray) -> metrics_for_slice_pb2.ArrayValue:
"""Converts NumPy array to ArrayValue."""
result = metrics_for_slice_pb2.ArrayValue()
result.shape[:] = array.shape
if array.dtype == 'int32':
result.data_type = metrics_for_slice_pb2.ArrayValue.INT32
result.int32_values[:] = array.flatten()
elif array.dtype == 'int64':
result.data_type = metrics_for_slice_pb2.ArrayValue.INT64
result.int64_values[:] = array.flatten()
elif array.dtype == 'float32':
result.data_type = metrics_for_slice_pb2.ArrayValue.FLOAT32
result.float32_values[:] = array.flatten()
elif array.dtype == 'float64':
result.data_type = metrics_for_slice_pb2.ArrayValue.FLOAT64
result.float64_values[:] = array.flatten()
else:
# For all other types, cast to string and convert to bytes.
result.data_type = metrics_for_slice_pb2.ArrayValue.BYTES
result.bytes_values[:] = [
tf.compat.as_bytes(x) for x in array.astype(six.text_type).flatten()
]
return result
def convert_metric_value_to_proto(
value: types.MetricValueType) -> metrics_for_slice_pb2.MetricValue:
"""Converts a MetricValueType into its proto format."""
if isinstance(value, types.StructuredMetricValue):
return value.to_proto()
result = metrics_for_slice_pb2.MetricValue()
if isinstance(value, six.binary_type):
# Convert textual types to string metrics.
result.bytes_value = value
elif isinstance(value, six.text_type):
# Convert textual types to string metrics.
result.bytes_value = value.encode('utf8')
elif isinstance(value, np.ndarray):
# Convert NumPy arrays to ArrayValue.
result.array_value.CopyFrom(_convert_to_array_value(value))
else:
# We try to convert to float values.
try:
result.double_value.value = float(value)
except (TypeError, ValueError) as e:
result.unknown_type.value = str(value)
result.unknown_type.error = e.message # pytype: disable=attribute-error
return result
def convert_slice_metrics_to_proto(
metrics: Tuple[slicer.SliceKeyOrCrossSliceKeyType, Dict[Any, Any]],
add_metrics_callbacks: List[types.AddMetricsCallbackType]
) -> metrics_for_slice_pb2.MetricsForSlice:
"""Converts the given slice metrics into serialized proto MetricsForSlice.
Args:
metrics: The slice metrics.
add_metrics_callbacks: A list of metric callbacks. This should be the same
list as the one passed to tfma.Evaluate().
Returns:
The MetricsForSlice proto.
Raises:
TypeError: If the type of the feature value in slice key cannot be
recognized.
"""
result = metrics_for_slice_pb2.MetricsForSlice()
slice_key, slice_metrics = metrics
if slicer.is_cross_slice_key(slice_key):
result.cross_slice_key.CopyFrom(slicer.serialize_cross_slice_key(slice_key))
else:
result.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key))
slice_metrics = slice_metrics.copy()
if metric_keys.ERROR_METRIC in slice_metrics:
logging.warning('Error for slice: %s with error message: %s ', slice_key,
slice_metrics[metric_keys.ERROR_METRIC])
result.metrics[metric_keys.ERROR_METRIC].debug_message = slice_metrics[
metric_keys.ERROR_METRIC]
return result
# Convert the metrics from add_metrics_callbacks to the structured output if
# defined.
if add_metrics_callbacks and (not any(
isinstance(k, metric_types.MetricKey) for k in slice_metrics.keys())):
for add_metrics_callback in add_metrics_callbacks:
if hasattr(add_metrics_callback, 'populate_stats_and_pop'):
add_metrics_callback.populate_stats_and_pop(slice_key, slice_metrics,
result.metrics)
for key in sorted(slice_metrics.keys()):
value = slice_metrics[key]
if isinstance(value, types.ValueWithTDistribution):
unsampled_value = value.unsampled_value
_, lower_bound, upper_bound = (
math_util.calculate_confidence_interval(value))
confidence_interval = metrics_for_slice_pb2.ConfidenceInterval(
lower_bound=convert_metric_value_to_proto(lower_bound),
upper_bound=convert_metric_value_to_proto(upper_bound),
standard_error=convert_metric_value_to_proto(
value.sample_standard_deviation),
degrees_of_freedom={'value': value.sample_degrees_of_freedom})
metric_value = convert_metric_value_to_proto(unsampled_value)
# If metric can be stored to double_value metrics, replace it with a
# bounded_value for backwards compatibility.
# TODO(b/188575688): remove this logic to stop populating bounded_value
if metric_value.WhichOneof('type') == 'double_value':
# setting bounded_value clears double_value in the same oneof scope.
metric_value.bounded_value.value.value = unsampled_value
metric_value.bounded_value.lower_bound.value = lower_bound
metric_value.bounded_value.upper_bound.value = upper_bound
metric_value.bounded_value.methodology = (
metrics_for_slice_pb2.BoundedValue.POISSON_BOOTSTRAP)
else:
metric_value = convert_metric_value_to_proto(value)
confidence_interval = None
if isinstance(key, metric_types.MetricKey):
result.metric_keys_and_values.add(
key=key.to_proto(),
value=metric_value,
confidence_interval=confidence_interval)
else:
result.metrics[key].CopyFrom(metric_value)
return result
def convert_slice_plots_to_proto(
plots: Tuple[slicer.SliceKeyOrCrossSliceKeyType, Dict[Any, Any]],
add_metrics_callbacks: List[types.AddMetricsCallbackType]
) -> metrics_for_slice_pb2.PlotsForSlice:
"""Converts the given slice plots into PlotsForSlice proto.
Args:
plots: The slice plots.
add_metrics_callbacks: A list of metric callbacks. This should be the same
list as the one passed to tfma.Evaluate().
Returns:
The PlotsForSlice proto.
"""
result = metrics_for_slice_pb2.PlotsForSlice()
slice_key, slice_plots = plots
if slicer.is_cross_slice_key(slice_key):
result.cross_slice_key.CopyFrom(slicer.serialize_cross_slice_key(slice_key))
else:
result.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key))
slice_plots = slice_plots.copy()
if metric_keys.ERROR_METRIC in slice_plots:
logging.warning('Error for slice: %s with error message: %s ', slice_key,
slice_plots[metric_keys.ERROR_METRIC])
error_metric = slice_plots.pop(metric_keys.ERROR_METRIC)
result.plots[metric_keys.ERROR_METRIC].debug_message = error_metric
return result
if add_metrics_callbacks and (not any(
isinstance(k, metric_types.MetricKey) for k in slice_plots.keys())):
for add_metrics_callback in add_metrics_callbacks:
if hasattr(add_metrics_callback, 'populate_plots_and_pop'):
add_metrics_callback.populate_plots_and_pop(slice_plots, result.plots)
plots_by_key = {}
for key in sorted(slice_plots.keys()):
value = slice_plots[key]
# Remove plot name from key (multiple plots are combined into a single
# proto).
if isinstance(key, metric_types.MetricKey):
parent_key = key._replace(name=None)
else:
continue
if parent_key not in plots_by_key:
key_and_value = result.plot_keys_and_values.add()
key_and_value.key.CopyFrom(parent_key.to_proto())
plots_by_key[parent_key] = key_and_value.value
if isinstance(value, metrics_for_slice_pb2.CalibrationHistogramBuckets):
plots_by_key[parent_key].calibration_histogram_buckets.CopyFrom(value)
slice_plots.pop(key)
elif isinstance(value, metrics_for_slice_pb2.ConfusionMatrixAtThresholds):
plots_by_key[parent_key].confusion_matrix_at_thresholds.CopyFrom(value)
slice_plots.pop(key)
elif isinstance(
value, metrics_for_slice_pb2.MultiClassConfusionMatrixAtThresholds):
plots_by_key[
parent_key].multi_class_confusion_matrix_at_thresholds.CopyFrom(value)
slice_plots.pop(key)
elif isinstance(
value, metrics_for_slice_pb2.MultiLabelConfusionMatrixAtThresholds):
plots_by_key[
parent_key].multi_label_confusion_matrix_at_thresholds.CopyFrom(value)
slice_plots.pop(key)
if slice_plots:
if add_metrics_callbacks is None:
add_metrics_callbacks = []
raise NotImplementedError(
'some plots were not converted or popped. keys: %s. '
'add_metrics_callbacks were: %s' % (
slice_plots.keys(),
[
x.name for x in add_metrics_callbacks # pytype: disable=attribute-error
]))
return result
def convert_slice_attributions_to_proto(
attributions: Tuple[slicer.SliceKeyOrCrossSliceKeyType,
Dict[Any, Dict[Text, Any]]]
) -> metrics_for_slice_pb2.AttributionsForSlice:
"""Converts the given slice attributions into serialized AtributionsForSlice.
Args:
attributions: The slice attributions.
Returns:
The AttributionsForSlice proto.
Raises:
TypeError: If the type of the feature value in slice key cannot be
recognized.
"""
result = metrics_for_slice_pb2.AttributionsForSlice()
slice_key, slice_attributions = attributions
if slicer.is_cross_slice_key(slice_key):
result.cross_slice_key.CopyFrom(slicer.serialize_cross_slice_key(slice_key))
else:
result.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key))
slice_attributions = slice_attributions.copy()
for key in sorted(slice_attributions.keys()):
key_and_value = result.attributions_keys_and_values.add()
key_and_value.key.CopyFrom(key.to_proto())
for feature, value in slice_attributions[key].items():
attribution_value = metrics_for_slice_pb2.MetricValue()
if isinstance(value, six.binary_type):
# Convert textual types to string metrics.
attribution_value.bytes_value = value
elif isinstance(value, six.text_type):
# Convert textual types to string metrics.
attribution_value.bytes_value = value.encode('utf8')
elif isinstance(value, np.ndarray) and value.size != 1:
# Convert NumPy arrays to ArrayValue.
attribution_value.array_value.CopyFrom(_convert_to_array_value(value))
else:
# We try to convert to float values.
try:
attribution_value.double_value.value = float(value)
except (TypeError, ValueError) as e:
attribution_value.unknown_type.value = str(value)
attribution_value.unknown_type.error = e.message # pytype: disable=attribute-error
key_and_value.values[feature].CopyFrom(attribution_value)
return result
def MetricsPlotsAndValidationsWriter( # pylint: disable=invalid-name
output_paths: Dict[Text, Text],
eval_config: config.EvalConfig,
add_metrics_callbacks: Optional[List[types.AddMetricsCallbackType]] = None,
metrics_key: Text = constants.METRICS_KEY,
plots_key: Text = constants.PLOTS_KEY,
attributions_key: Text = constants.ATTRIBUTIONS_KEY,
validations_key: Text = constants.VALIDATIONS_KEY,
output_file_format: Text = '',
rubber_stamp: Optional[bool] = False) -> writer.Writer:
"""Returns metrics and plots writer.
Note, sharding will be enabled by default if a output_file_format is provided.
The files will be named <output_path>-SSSSS-of-NNNNN.<output_file_format>
where SSSSS is the shard number and NNNNN is the number of shards.
Args:
output_paths: Output paths keyed by output key (e.g. 'metrics', 'plots',
'validation').
eval_config: Eval config.
add_metrics_callbacks: Optional list of metric callbacks (if used).
metrics_key: Name to use for metrics key in Evaluation output.
plots_key: Name to use for plots key in Evaluation output.
attributions_key: Name to use for attributions key in Evaluation output.
validations_key: Name to use for validations key in Evaluation output.
output_file_format: File format to use when saving files. Currently
'tfrecord' and 'parquet' are supported. If using parquet, the output
metrics and plots files will contain two columns: 'slice_key' and
'serialized_value'. The 'slice_key' column will be a structured column
matching the metrics_for_slice_pb2.SliceKey proto. the 'serialized_value'
column will contain a serialized MetricsForSlice or PlotsForSlice
proto. The validation result file will contain a single column
'serialized_value' which will contain a single serialized ValidationResult
proto.
rubber_stamp: True if this model is being rubber stamped. When a model is
rubber stamped diff thresholds will be ignored if an associated baseline
model is not passed.
"""
return writer.Writer(
stage_name='WriteMetricsAndPlots',
ptransform=_WriteMetricsPlotsAndValidations( # pylint: disable=no-value-for-parameter
output_paths=output_paths,
eval_config=eval_config,
add_metrics_callbacks=add_metrics_callbacks or [],
metrics_key=metrics_key,
plots_key=plots_key,
attributions_key=attributions_key,
validations_key=validations_key,
output_file_format=output_file_format,
rubber_stamp=rubber_stamp))
@beam.typehints.with_input_types(validation_result_pb2.ValidationResult)
@beam.typehints.with_output_types(validation_result_pb2.ValidationResult)
class CombineValidations(beam.CombineFn):
"""Combines the ValidationResults protos.
Combines PCollection of ValidationResults for different metrics and slices.
"""
def __init__(self,
eval_config: config.EvalConfig,
rubber_stamp: bool = False):
self._eval_config = eval_config
self._rubber_stamp = rubber_stamp
def create_accumulator(self) -> None:
return
def add_input(
self, result: 'Optional[validation_result_pb2.ValidationResult]',
new_input: 'Optional[validation_result_pb2.ValidationResult]'
) -> 'Optional[validation_result_pb2.ValidationResult]':
if new_input is None:
return None
if result is None:
result = validation_result_pb2.ValidationResult(validation_ok=True)
result.validation_ok &= new_input.validation_ok
result.metric_validations_per_slice.extend(
new_input.metric_validations_per_slice)
metrics_validator.merge_details(result, new_input)
return result
def merge_accumulators(
self,
accumulators: 'Iterable[Optional[validation_result_pb2.ValidationResult]]'
) -> 'Optional[validation_result_pb2.ValidationResult]':
accumulators = [accumulator for accumulator in accumulators if accumulator]
if not accumulators:
return None
result = validation_result_pb2.ValidationResult(validation_ok=True)
for new_input in accumulators:
result.metric_validations_per_slice.extend(
new_input.metric_validations_per_slice)
metrics_validator.merge_details(result, new_input)
result.validation_ok &= new_input.validation_ok
return result
def extract_output(
self, accumulator: 'Optional[validation_result_pb2.ValidationResult]'
) -> 'Optional[validation_result_pb2.ValidationResult]':
# Verification fails if there is empty input.
if not accumulator:
accumulator = validation_result_pb2.ValidationResult(validation_ok=False)
thresholds = metric_specs.metric_thresholds_from_metrics_specs(
self._eval_config.metrics_specs)
if not thresholds:
# Default is to validation NOT ok when not rubber stamping.
accumulator.validation_ok = self._rubber_stamp
# Default is to missing thresholds when not rubber stamping.
accumulator.missing_thresholds = not self._rubber_stamp
missing = metrics_validator.get_missing_slices(
accumulator.validation_details.slicing_details, self._eval_config)
if missing:
missing_slices = []
missing_cross_slices = []
for m in missing:
if isinstance(m, config.SlicingSpec):
missing_slices.append(m)
elif isinstance(m, config.CrossSlicingSpec):
missing_cross_slices.append(m)
accumulator.validation_ok = False
if missing_slices:
accumulator.missing_slices.extend(missing_slices)
if missing_cross_slices:
accumulator.missing_cross_slices.extend(missing_cross_slices)
if self._rubber_stamp:
accumulator.rubber_stamp = True
return accumulator
@beam.ptransform_fn
# TODO(b/157600974): Add typehint.
@beam.typehints.with_output_types(beam.pvalue.PDone)
def _WriteMetricsPlotsAndValidations( # pylint: disable=invalid-name
evaluation: evaluator.Evaluation,
output_paths: Dict[Text, Text],
eval_config: config.EvalConfig,
add_metrics_callbacks: List[types.AddMetricsCallbackType],
metrics_key: Text,
plots_key: Text,
attributions_key: Text,
validations_key: Text,
output_file_format: Text,
rubber_stamp: bool = False) -> beam.pvalue.PDone:
"""PTransform to write metrics and plots."""
if output_file_format and output_file_format not in _SUPPORTED_FORMATS:
raise ValueError('only "{}" formats are currently supported but got '
'output_file_format={}'.format(_SUPPORTED_FORMATS,
output_file_format))
def convert_slice_key_to_parquet_dict(
slice_key: metrics_for_slice_pb2.SliceKey) -> _SliceKeyDictPythonType:
single_slice_key_dicts = []
for single_slice_key in slice_key.single_slice_keys:
kind = single_slice_key.WhichOneof('kind')
if not kind:
continue
single_slice_key_dicts.append({kind: getattr(single_slice_key, kind)})
return {_SINGLE_SLICE_KEYS_PARQUET_FIELD_NAME: single_slice_key_dicts}
def convert_to_parquet_columns(
value: Union[metrics_for_slice_pb2.MetricsForSlice,
metrics_for_slice_pb2.PlotsForSlice,
metrics_for_slice_pb2.AttributionsForSlice]
) -> Dict[Text, Union[_SliceKeyDictPythonType, bytes]]:
return {
_SLICE_KEY_PARQUET_COLUMN_NAME:
convert_slice_key_to_parquet_dict(value.slice_key),
_SERIALIZED_VALUE_PARQUET_COLUMN_NAME:
value.SerializeToString()
}
if metrics_key in evaluation and constants.METRICS_KEY in output_paths:
metrics = (
evaluation[metrics_key] | 'ConvertSliceMetricsToProto' >> beam.Map(
convert_slice_metrics_to_proto,
add_metrics_callbacks=add_metrics_callbacks))
file_path_prefix = output_paths[constants.METRICS_KEY]
if output_file_format == _PARQUET_FORMAT:
_ = (
metrics
| 'ConvertToParquetColumns' >> beam.Map(convert_to_parquet_columns)
| 'WriteMetricsToParquet' >> beam.io.WriteToParquet(
file_path_prefix=file_path_prefix,
schema=_SLICED_PARQUET_SCHEMA,
file_name_suffix='.' + output_file_format))
elif not output_file_format or output_file_format == _TFRECORD_FORMAT:
_ = metrics | 'WriteMetrics' >> beam.io.WriteToTFRecord(
file_path_prefix=file_path_prefix,
shard_name_template=None if output_file_format else '',
file_name_suffix=('.' +
output_file_format if output_file_format else ''),
coder=beam.coders.ProtoCoder(metrics_for_slice_pb2.MetricsForSlice))
if plots_key in evaluation and constants.PLOTS_KEY in output_paths:
plots = (
evaluation[plots_key] | 'ConvertSlicePlotsToProto' >> beam.Map(
convert_slice_plots_to_proto,
add_metrics_callbacks=add_metrics_callbacks))
file_path_prefix = output_paths[constants.PLOTS_KEY]
if output_file_format == _PARQUET_FORMAT:
_ = (
plots
|
'ConvertPlotsToParquetColumns' >> beam.Map(convert_to_parquet_columns)
| 'WritePlotsToParquet' >> beam.io.WriteToParquet(
file_path_prefix=file_path_prefix,
schema=_SLICED_PARQUET_SCHEMA,
file_name_suffix='.' + output_file_format))
elif not output_file_format or output_file_format == _TFRECORD_FORMAT:
_ = plots | 'WritePlotsToTFRecord' >> beam.io.WriteToTFRecord(
file_path_prefix=file_path_prefix,
shard_name_template=None if output_file_format else '',
file_name_suffix=('.' +
output_file_format if output_file_format else ''),
coder=beam.coders.ProtoCoder(metrics_for_slice_pb2.PlotsForSlice))
if (attributions_key in evaluation and
constants.ATTRIBUTIONS_KEY in output_paths):
attributions = (
evaluation[attributions_key] | 'ConvertSliceAttributionsToProto' >>
beam.Map(convert_slice_attributions_to_proto))
file_path_prefix = output_paths[constants.ATTRIBUTIONS_KEY]
if output_file_format == _PARQUET_FORMAT:
_ = (
attributions
| 'ConvertAttributionsToParquetColumns' >>
beam.Map(convert_to_parquet_columns)
| 'WriteAttributionsToParquet' >> beam.io.WriteToParquet(
file_path_prefix=file_path_prefix,
schema=_SLICED_PARQUET_SCHEMA,
file_name_suffix='.' + output_file_format))
elif not output_file_format or output_file_format == _TFRECORD_FORMAT:
_ = attributions | 'WriteAttributionsToTFRecord' >> beam.io.WriteToTFRecord(
file_path_prefix=file_path_prefix,
shard_name_template=None if output_file_format else '',
file_name_suffix=('.' +
output_file_format if output_file_format else ''),
coder=beam.coders.ProtoCoder(
metrics_for_slice_pb2.AttributionsForSlice))
if (validations_key in evaluation and
constants.VALIDATIONS_KEY in output_paths):
validations = (
evaluation[validations_key]
| 'MergeValidationResults' >> beam.CombineGlobally(
CombineValidations(eval_config, rubber_stamp=rubber_stamp)))
file_path_prefix = output_paths[constants.VALIDATIONS_KEY]
# We only use a single shard here because validations are usually single
# values. Setting the shard_name_template to the empty string forces this.
shard_name_template = ''
if output_file_format == _PARQUET_FORMAT:
_ = (
validations
| 'ConvertValidationsToParquetColumns' >> beam.Map(
lambda v: # pylint: disable=g-long-lambda
{_SERIALIZED_VALUE_PARQUET_COLUMN_NAME: v.SerializeToString()})
| 'WriteValidationsToParquet' >> beam.io.WriteToParquet(
file_path_prefix=file_path_prefix,
shard_name_template=shard_name_template,
schema=_UNSLICED_PARQUET_SCHEMA,
file_name_suffix='.' + output_file_format))
elif not output_file_format or output_file_format == _TFRECORD_FORMAT:
_ = (
validations
| 'WriteValidationsToTFRecord' >> beam.io.WriteToTFRecord(
file_path_prefix=file_path_prefix,
shard_name_template=shard_name_template,
file_name_suffix=('.' + output_file_format
if output_file_format else ''),
coder=beam.coders.ProtoCoder(
validation_result_pb2.ValidationResult)))
return beam.pvalue.PDone(list(evaluation.values())[0].pipeline)
|
py | b40d63de9e65db69ba8cbecae4d59505e70c18e5 | # This sample tests the handling of variadic type variables used
# within Callable types.
# pyright: reportMissingModuleSource=false
from typing import Any, Callable, Generic, Literal, Protocol, Tuple, Union
from typing_extensions import TypeVarTuple, Unpack
_Xs = TypeVarTuple("_Xs")
def func1(func: Callable[[int, Unpack[_Xs]], Any]) -> Callable[[Unpack[_Xs]], int]:
...
def func2(func: Callable[[Unpack[_Xs]], int]) -> Callable[[Unpack[_Xs]], int]:
...
def callback1(a: int) -> int:
...
def callback2(a: str) -> int:
...
def callback3(a: str) -> None:
...
def callback4(a: int, b: complex, c: str) -> int:
...
def callback5(a: int, *args: Unpack[_Xs]) -> Union[Unpack[_Xs]]:
...
def callback6(a: int, *args: Any) -> int:
...
def callback7(a: int, b: str, c: str, d: str, *args: Any) -> int:
...
c1 = func1(callback1)
t_c1: Literal["() -> int"] = reveal_type(c1)
c1_1 = c1()
t_c1_1: Literal["int"] = reveal_type(c1_1)
# This should generate an error.
c2 = func1(callback2)
# This should generate an error.
c3 = func2(callback3)
c4 = func1(callback4)
t_c4: Literal["(_p0: complex, _p1: str) -> int"] = reveal_type(c4)
c4_1 = c4(3j, "hi")
t_c4_1: Literal["int"] = reveal_type(c4_1)
# This should generate an error.
c4_2 = c4(3j)
# This should generate an error.
c4_3 = c4(3j, "hi", 4)
c5 = func1(callback5)
t_c5: Literal["(_p0: *_Xs@callback5) -> int"] = reveal_type(c5)
# This should generate an error.
c6_1 = func1(callback6)
# This should generate an error.
c6_2 = func2(callback6)
# This should generate an error.
c7_1 = func1(callback7)
# This should generate an error.
c7_2 = func2(callback7)
class CallbackA(Protocol[Unpack[_Xs]]):
def __call__(self, a: int, *args: Unpack[_Xs]) -> Any:
...
def func3(func: CallbackA[Unpack[_Xs]]) -> Callable[[Unpack[_Xs]], int]:
...
d1 = func3(callback1)
t_d1: Literal["() -> int"] = reveal_type(d1)
# This should generate an error.
d2 = func3(callback2)
# This should generate an error.
d3 = func3(callback3)
d4 = func3(callback4)
t_d4: Literal["(_p0: complex, _p1: str) -> int"] = reveal_type(d4)
d4_1 = d4(3j, "hi")
t_d4_1: Literal["int"] = reveal_type(d4_1)
# This should generate an error.
d4_2 = d4(3j)
# This should generate an error.
d4_3 = d4(3j, "hi", 4)
def func4(func: Callable[[Unpack[_Xs], int], int]) -> Callable[[Unpack[_Xs]], int]:
...
def callback8(a: int, b: str, c: complex, d: int) -> int:
...
d5_1 = func4(callback1)
t_d5_1: Literal["() -> int"] = reveal_type(d5_1)
# This should generate an error.
d5_2 = func4(callback4)
d5_3 = func4(callback8)
t_d5_3: Literal["(_p0: int, _p1: str, _p2: complex) -> int"] = reveal_type(d5_3)
|
py | b40d6428f2457d9f91ec9ed404abe8fca405c5c5 | """Support for ISY994 covers."""
import logging
from typing import Callable
from homeassistant.components.cover import DOMAIN, CoverDevice
from homeassistant.const import (
STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING, STATE_UNKNOWN)
from homeassistant.helpers.typing import ConfigType
from . import ISY994_NODES, ISY994_PROGRAMS, ISYDevice
_LOGGER = logging.getLogger(__name__)
VALUE_TO_STATE = {
0: STATE_CLOSED,
101: STATE_UNKNOWN,
102: 'stopped',
103: STATE_CLOSING,
104: STATE_OPENING
}
def setup_platform(hass, config: ConfigType,
add_entities: Callable[[list], None], discovery_info=None):
"""Set up the ISY994 cover platform."""
devices = []
for node in hass.data[ISY994_NODES][DOMAIN]:
devices.append(ISYCoverDevice(node))
for name, status, actions in hass.data[ISY994_PROGRAMS][DOMAIN]:
devices.append(ISYCoverProgram(name, status, actions))
add_entities(devices)
class ISYCoverDevice(ISYDevice, CoverDevice):
"""Representation of an ISY994 cover device."""
@property
def current_cover_position(self) -> int:
"""Return the current cover position."""
if self.is_unknown() or self.value is None:
return None
return sorted((0, self.value, 100))[1]
@property
def is_closed(self) -> bool:
"""Get whether the ISY994 cover device is closed."""
return self.state == STATE_CLOSED
@property
def state(self) -> str:
"""Get the state of the ISY994 cover device."""
if self.is_unknown():
return None
return VALUE_TO_STATE.get(self.value, STATE_OPEN)
def open_cover(self, **kwargs) -> None:
"""Send the open cover command to the ISY994 cover device."""
if not self._node.on(val=100):
_LOGGER.error("Unable to open the cover")
def close_cover(self, **kwargs) -> None:
"""Send the close cover command to the ISY994 cover device."""
if not self._node.off():
_LOGGER.error("Unable to close the cover")
class ISYCoverProgram(ISYCoverDevice):
"""Representation of an ISY994 cover program."""
def __init__(self, name: str, node: object, actions: object) -> None:
"""Initialize the ISY994 cover program."""
super().__init__(node)
self._name = name
self._actions = actions
@property
def state(self) -> str:
"""Get the state of the ISY994 cover program."""
return STATE_CLOSED if bool(self.value) else STATE_OPEN
def open_cover(self, **kwargs) -> None:
"""Send the open cover command to the ISY994 cover program."""
if not self._actions.runThen():
_LOGGER.error("Unable to open the cover")
def close_cover(self, **kwargs) -> None:
"""Send the close cover command to the ISY994 cover program."""
if not self._actions.runElse():
_LOGGER.error("Unable to close the cover")
|
py | b40d642ee956f4757df19d5795879ca45c2e7237 | #!/usr/bin/env python3
import queue
import time
import os
import atexit
import subprocess
import threading
import errno
from typing import Any, Dict, List, TextIO, Optional
class Runner:
def __init__(self,
log_dir: str,
model: str,
case: str,
verbose: bool):
self.name = ""
self.cmd = ""
self.cwd = ""
self.args: List[str]
self.env: Dict[str, str]
self.model = model
self.case = case
self.log_filename = ""
self.log_fd: TextIO
self.verbose = verbose
self.output_queue: queue.Queue[str] = queue.Queue()
self.start_time = time.time()
self.log_dir = log_dir
self.log_filename = ""
self.stop_thread: Any[threading.Event] = None
def set_log_filename(self, log_filename: str) -> None:
self.log_filename = log_filename
def get_log_filename(self) -> str:
return self.log_filename
def start(self) -> None:
if self.verbose:
print("Running: {}".format(" ".join([self.cmd] + self.args)))
atexit.register(self.stop)
if self.verbose:
print("Logging to {}".format(self.log_filename))
self.log_fd = open(self.log_filename, 'w')
self.process = subprocess.Popen(
[self.cmd] + self.args,
cwd=self.cwd,
env=self.env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True
)
self.stop_thread = threading.Event()
self.thread = threading.Thread(target=self.process_output)
self.thread.start()
def process_output(self) -> None:
assert self.process.stdout is not None
while True:
line = self.process.stdout.readline()
if not line and \
(self.stop_thread.is_set() or self.poll is not None):
break
if not line or line == "\n":
continue
self.output_queue.put(line)
self.log_fd.write(line)
self.log_fd.flush()
def poll(self) -> Optional[int]:
return self.process.poll()
def wait(self, timeout_min: float) -> Optional[int]:
try:
return self.process.wait(timeout=timeout_min*60)
except subprocess.TimeoutExpired:
print("Timeout of {} min{} reached, stopping...".
format(timeout_min, "s" if timeout_min > 1 else ""))
self.stop()
print("stopped.")
return errno.ETIMEDOUT
def get_output_line(self) -> Optional[str]:
while True:
try:
return self.output_queue.get(block=True, timeout=0.1)
except queue.Empty:
return None
def stop(self) -> int:
atexit.unregister(self.stop)
if not self.stop_thread:
return 0
returncode = self.process.poll()
if returncode is None:
if self.verbose:
print("Terminating {}".format(self.cmd))
self.process.terminate()
try:
returncode = self.process.wait(timeout=1)
except subprocess.TimeoutExpired:
pass
if returncode is None:
if self.verbose:
print("Killing {}".format(self.cmd))
self.process.kill()
returncode = self.process.poll()
if self.verbose:
print("{} exited with {}".format(
self.cmd, self.process.returncode))
self.stop_thread.set()
self.thread.join()
self.log_fd.close()
return self.process.returncode
def time_elapsed_s(self) -> float:
return time.time() - self.start_time
def add_to_env_if_set(self, var: str) -> None:
if var in os.environ:
self.env[var] = os.environ[var]
class Px4Runner(Runner):
def __init__(self, workspace_dir: str, log_dir: str,
model: str, case: str, speed_factor: float,
debugger: str, verbose: bool):
super().__init__(log_dir, model, case, verbose)
self.name = "px4"
self.cmd = workspace_dir + "/build/px4_sitl_default/bin/px4"
self.cwd = workspace_dir + "/build/px4_sitl_default/tmp/rootfs"
self.args = [
workspace_dir + "/build/px4_sitl_default/etc",
"-s",
"etc/init.d-posix/rcS",
"-t",
workspace_dir + "/test_data",
"-d"
]
self.env = {"PATH": str(os.environ['PATH']),
"PX4_SIM_MODEL": self.model,
"PX4_SIM_SPEED_FACTOR": str(speed_factor)}
self.debugger = debugger
if not self.debugger:
pass
elif self.debugger == "valgrind":
self.args = ["--track-origins=yes", "--leak-check=full", "-v",
self.cmd] + self.args
self.cmd = "valgrind"
elif self.debugger == "callgrind":
self.args = ["--tool=callgrind", "-v", self.cmd] + self.args
self.cmd = "valgrind"
elif self.debugger == "gdb":
self.args = ["--args", self.cmd] + self.args
self.cmd = "gdb"
else:
print("Using custom debugger " + self.debugger)
self.args = [self.cmd] + self.args
self.cmd = self.debugger
class GzserverRunner(Runner):
def __init__(self,
workspace_dir: str,
log_dir: str,
model: str,
case: str,
speed_factor: float,
verbose: bool):
super().__init__(log_dir, model, case, verbose)
self.name = "gzserver"
self.cwd = workspace_dir
self.env = {"PATH": os.environ['PATH'],
"HOME": os.environ['HOME'],
"GAZEBO_PLUGIN_PATH":
workspace_dir + "/build/px4_sitl_default/build_gazebo",
"GAZEBO_MODEL_PATH":
workspace_dir + "/Tools/sitl_gazebo/models",
"PX4_SIM_SPEED_FACTOR": str(speed_factor)}
self.add_to_env_if_set("DISPLAY")
self.add_to_env_if_set("PX4_HOME_LAT")
self.add_to_env_if_set("PX4_HOME_LON")
self.add_to_env_if_set("PX4_HOME_ALT")
self.cmd = "gzserver"
self.args = ["--verbose",
workspace_dir + "/Tools/sitl_gazebo/worlds/" +
"empty.world"]
class GzmodelspawnRunner(Runner):
def __init__(self,
workspace_dir: str,
log_dir: str,
model: str,
case: str,
verbose: bool):
super().__init__(log_dir, model, case, verbose)
self.name = "gzmodelspawn"
self.cwd = workspace_dir
self.env = {"PATH": os.environ['PATH'],
"HOME": os.environ['HOME'],
"GAZEBO_PLUGIN_PATH":
workspace_dir + "/build/px4_sitl_default/build_gazebo",
"GAZEBO_MODEL_PATH":
workspace_dir + "/Tools/sitl_gazebo/models"}
self.add_to_env_if_set("DISPLAY")
self.cmd = "gz"
self.args = ["model", "--spawn-file", workspace_dir +
"/Tools/sitl_gazebo/models/" +
self.model + "/" + self.model + ".sdf",
"--model-name", self.model,
"-x", "1.01", "-y", "0.98", "-z", "0.83"]
class GzclientRunner(Runner):
def __init__(self,
workspace_dir: str,
log_dir: str,
model: str,
case: str,
verbose: bool):
super().__init__(log_dir, model, case, verbose)
self.name = "gzclient"
self.cwd = workspace_dir
self.env = dict(os.environ, **{
"GAZEBO_MODEL_PATH": workspace_dir + "/Tools/sitl_gazebo/models"})
self.add_to_env_if_set("DISPLAY")
self.cmd = "gzclient"
self.args = ["--verbose"]
class TestRunner(Runner):
def __init__(self,
workspace_dir: str,
log_dir: str,
model: str,
case: str,
mavlink_connection: str,
verbose: bool):
super().__init__(log_dir, model, case, verbose)
self.name = "mavsdk_tests"
self.cwd = workspace_dir
self.env = {"PATH": os.environ['PATH']}
self.cmd = workspace_dir + \
"/build/px4_sitl_default/mavsdk_tests/mavsdk_tests"
self.args = ["--url", mavlink_connection, case]
|
py | b40d642f7d380d59121095c624bf432da8349e3c | # Generated by Django 2.0 on 2018-02-08 12:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('frontend', '0021_trialqa'),
]
operations = [
migrations.AddField(
model_name='trial',
name='no_longer_on_website',
field=models.BooleanField(default=False),
),
]
|
py | b40d645721ae8bad44199e94213d6d5d573b53f5 | from .network import MinimalClient
from .core import QuickUMLS
def get_quickumls_client(host='localhost', port=4645):
'''Return a client for a QuickUMLS server running on host at port'''
client = MinimalClient(QuickUMLS, host=host, port=port, buffersize=4096)
return client
|
py | b40d645ef8fe15de0253e11e93b8f47e7df9e956 | # Geotrek documentation build configuration file, created by
# sphinx-quickstart on Wed May 15 09:50:19 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Geotrek'
copyright = '2013-2020, Makina Corpus'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2.39'
# The full version, including alpha/beta/rc tags.
release = '2.39.2.dev0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Geotrekdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Geotrek.tex', 'Geotrek Documentation',
'Makina Corpus', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'geotrek', 'Geotrek Documentation',
['Makina Corpus'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Geotrek', 'Geotrek Documentation',
'Makina Corpus', 'Geotrek', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
|
py | b40d65ef42928f5b4ced60ebd3e6bd23921f1ea9 | import datetime
import json
import pdb
import os
import sys
root_dir = os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/..")
sys.path.insert(0, root_dir)
import ai2thor.controller
import ai2thor
import random
import copy
import time
MAX_TESTS = 20
MAX_EP_LEN = 100
scene_names = ["FloorPlan{}_physics".format(i + 1) for i in range(30)]
set_of_actions = ["mm", "rr", "ll", "w", "z", "a", "s", "u", "j", "3", "4", "p"]
nan = float("nan")
inf = float("inf")
controller = ai2thor.controller.Controller(
local_build=True,
scene=scene_names[0],
gridSize=0.25,
width=900,
height=900,
agentMode="arm",
fieldOfView=100,
agentControllerType="mid-level",
server_class=ai2thor.fifo_server.FifoServer,
)
ADITIONAL_ARM_ARGS = {
"disableRendering": True,
"restrictMovement": False,
"waitForFixedUpdate": False,
"eventCollisions": True,
"returnToStart": True,
"speed": 1,
"move_constant": 0.05,
}
def get_reachable_positions(controller):
event = controller.step("GetReachablePositions")
reachable_positions = event.metadata["reachablePositions"]
return reachable_positions
def execute_command(controller, command, action_dict_addition):
base_position = get_current_arm_state(controller)
change_height = action_dict_addition["move_constant"]
change_value = change_height
action_details = {}
if command == "w":
base_position["z"] += change_value
elif command == "z":
base_position["z"] -= change_value
elif command == "s":
base_position["x"] += change_value
elif command == "a":
base_position["x"] -= change_value
elif command == "3":
base_position["y"] += change_value
elif command == "4":
base_position["y"] -= change_value
elif command == "u":
base_position["h"] += change_height
elif command == "j":
base_position["h"] -= change_height
elif command == "/":
action_details = dict("")
pickupable = controller.last_event.metadata["arm"]["PickupableObjects"]
print(pickupable)
elif command == "d":
event = controller.step(action="DropMidLevelHand", **action_dict_addition)
action_details = dict(action="DropMidLevelHand", **action_dict_addition)
elif command == "mm":
action_dict_addition = copy.copy(action_dict_addition)
if "moveSpeed" in action_dict_addition:
action_dict_addition["speed"] = action_dict_addition["moveSpeed"]
event = controller.step(
action="MoveContinuous",
direction=dict(x=0.0, y=0.0, z=0.2),
**action_dict_addition
)
action_details = dict(
action="MoveContinuous",
direction=dict(x=0.0, y=0.0, z=0.2),
**action_dict_addition
)
elif command == "rr":
action_dict_addition = copy.copy(action_dict_addition)
if "moveSpeed" in action_dict_addition:
action_dict_addition["speed"] = action_dict_addition["moveSpeed"]
event = controller.step(
action="RotateContinuous", degrees=45, **action_dict_addition
)
action_details = dict(
action="RotateContinuous", degrees=45, **action_dict_addition
)
elif command == "ll":
action_dict_addition = copy.copy(action_dict_addition)
event = controller.step(
action="RotateContinuous", degrees=-45, **action_dict_addition
)
action_details = dict(
action="RotateContinuous", degrees=-45, **action_dict_addition
)
elif command == "m":
event = controller.step(action="MoveAhead", **action_dict_addition)
action_details = dict(action="MoveAhead", **action_dict_addition)
elif command == "r":
event = controller.step(
action="RotateRight", degrees=45, **action_dict_addition
)
action_details = dict(action="RotateRight", degrees=45, **action_dict_addition)
elif command == "l":
event = controller.step(action="RotateLeft", degrees=45, **action_dict_addition)
action_details = dict(action="RotateLeft", degrees=45, **action_dict_addition)
elif command == "p":
event = controller.step(action="PickUpMidLevelHand")
action_details = dict(action="PickUpMidLevelHand")
elif command == "q":
action_details = {}
else:
action_details = {}
if command in ["w", "z", "s", "a", "3", "4"]:
event = controller.step(
action="MoveMidLevelArm",
position=dict(
x=base_position["x"], y=base_position["y"], z=base_position["z"]
),
handCameraSpace=False,
**action_dict_addition
)
action_details = dict(
action="MoveMidLevelArm",
position=dict(
x=base_position["x"], y=base_position["y"], z=base_position["z"]
),
handCameraSpace=False,
**action_dict_addition
)
success = event.metadata["lastActionSuccess"]
elif command in ["u", "j"]:
if base_position["h"] > 1:
base_position["h"] = 1
elif base_position["h"] < 0:
base_position["h"] = 0
event = controller.step(
action="MoveArmBase", y=base_position["h"], **action_dict_addition
)
action_details = dict(
action="MoveArmBase", y=base_position["h"], **action_dict_addition
)
success = event.metadata["lastActionSuccess"]
return action_details
def get_current_arm_state(controller):
h_min = 0.450998873
h_max = 1.8009994
event = controller.last_event
joints = event.metadata["arm"]["joints"]
arm = joints[-1]
assert arm["name"] == "robot_arm_4_jnt"
xyz_dict = arm["rootRelativePosition"]
height_arm = joints[0]["position"]["y"]
xyz_dict["h"] = (height_arm - h_min) / (h_max - h_min)
# print_error([x['position']['y'] for x in joints])
return xyz_dict
def reset_the_scene_and_get_reachables(scene_name=None):
if scene_name is None:
scene_name = random.choice(scene_names)
controller.reset(scene_name)
return get_reachable_positions(controller)
def two_list_equal(l1, l2):
dict1 = {i: v for (i, v) in enumerate(l1)}
dict2 = {i: v for (i, v) in enumerate(l2)}
return two_dict_equal(dict1, dict2)
def two_dict_equal(dict1, dict2):
assert len(dict1) == len(dict2), print("different len", dict1, dict2)
equal = True
for k in dict1:
val1 = dict1[k]
val2 = dict2[k]
assert type(val1) == type(val2), print("different type", dict1, dict2)
if type(val1) == dict:
equal = two_dict_equal(val1, val2)
elif type(val1) == list:
equal = two_list_equal(val1, val2)
elif val1 != val1: # Either nan or -inf
equal = val2 != val2
elif type(val1) == float:
equal = abs(val1 - val2) < 0.001
else:
equal = val1 == val2
if not equal:
print("not equal", val1, val2)
return equal
return equal
def get_current_full_state(controller):
return {
"agent_position": controller.last_event.metadata["agent"]["position"],
"agent_rotation": controller.last_event.metadata["agent"]["rotation"],
"arm_state": controller.last_event.metadata["arm"]["joints"],
"held_object": controller.last_event.metadata["arm"]["HeldObjects"],
}
def random_tests():
all_timers = []
all_dict = {}
for i in range(MAX_TESTS):
print("test number", i)
reachable_positions = reset_the_scene_and_get_reachables()
initial_location = random.choice(reachable_positions)
initial_rotation = random.choice([i for i in range(0, 360, 45)])
event1 = controller.step(
action="TeleportFull",
x=initial_location["x"],
y=initial_location["y"],
z=initial_location["z"],
rotation=dict(x=0, y=initial_rotation, z=0),
horizon=10,
)
initial_pose = dict(
action="TeleportFull",
x=initial_location["x"],
y=initial_location["y"],
z=initial_location["z"],
rotation=dict(x=0, y=initial_rotation, z=0),
horizon=10,
)
controller.step("PausePhysicsAutoSim")
all_commands = []
before = datetime.datetime.now()
for j in range(MAX_EP_LEN):
command = random.choice(set_of_actions)
execute_command(controller, command, ADITIONAL_ARM_ARGS)
all_commands.append(command)
last_event_success = controller.last_event.metadata["lastActionSuccess"]
pickupable = controller.last_event.metadata["arm"]["PickupableObjects"]
picked_up_before = controller.last_event.metadata["arm"]["HeldObjects"]
if len(pickupable) > 0 and len(picked_up_before) == 0:
cmd = "p"
execute_command(controller, cmd, ADITIONAL_ARM_ARGS)
all_commands.append(cmd)
if controller.last_event.metadata["lastActionSuccess"] is False:
print("Failed to pick up ")
print("scene name", controller.last_event.metadata["sceneName"])
print("initial pose", initial_pose)
print("list of actions", all_commands)
break
after = datetime.datetime.now()
time_diff = after - before
seconds = time_diff.total_seconds()
all_timers.append(len(all_commands) / seconds)
final_state = get_current_full_state(
controller
) # made sure this does not require deep copy
scene_name = controller.last_event.metadata["sceneName"]
# TODO only when pick up has happened
dict_to_add = {
"initial_location": initial_location,
"initial_rotation": initial_rotation,
"all_commands": all_commands,
"final_state": final_state,
"initial_pose": initial_pose,
"scene_name": scene_name,
}
all_dict[len(all_dict)] = dict_to_add
# print('FPS', sum(all_timers) / len(all_timers))
return all_dict
def determinism_test(all_tests):
# Redo the actions 20 times:
# only do this if an object is picked up
for k, test_point in all_tests.items():
initial_location = test_point["initial_location"]
initial_rotation = test_point["initial_rotation"]
all_commands = test_point["all_commands"]
final_state = test_point["final_state"]
initial_pose = test_point["initial_pose"]
scene_name = test_point["scene_name"]
controller.reset(scene_name)
event1 = controller.step(
action="TeleportFull",
x=initial_location["x"],
y=initial_location["y"],
z=initial_location["z"],
rotation=dict(x=0, y=initial_rotation, z=0),
horizon=10,
)
controller.step("PausePhysicsAutoSim")
for cmd in all_commands:
execute_command(controller, cmd, ADITIONAL_ARM_ARGS)
last_event_success = controller.last_event.metadata["lastActionSuccess"]
current_state = get_current_full_state(controller)
if not two_dict_equal(final_state, current_state):
print("not deterministic")
print("scene name", controller.last_event.metadata["sceneName"])
print("initial pose", initial_pose)
print("list of actions", all_commands)
pdb.set_trace()
else:
print("test {} passed".format(k))
if __name__ == "__main__":
# all_dict = random_tests()
# with open('determinism_json.json' ,'w') as f:
# json.dump(all_dict, f)
with open("arm_test/determinism_json.json", "r") as f:
all_dict = json.load(f)
determinism_test(all_dict)
|
py | b40d66b0df495961e66271e55537535b8b435e3c | # coding: utf-8
"""*****************************************************************************
* Copyright (C) 2018 Microchip Technology Inc. and its subsidiaries.
*
* Subject to your compliance with these terms, you may use Microchip software
* and any derivatives exclusively with Microchip products. It is your
* responsibility to comply with third party license terms applicable to your
* use of third party software (including open source software) that may
* accompany Microchip software.
*
* THIS SOFTWARE IS SUPPLIED BY MICROCHIP "AS IS". NO WARRANTIES, WHETHER
* EXPRESS, IMPLIED OR STATUTORY, APPLY TO THIS SOFTWARE, INCLUDING ANY IMPLIED
* WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY, AND FITNESS FOR A
* PARTICULAR PURPOSE.
*
* IN NO EVENT WILL MICROCHIP BE LIABLE FOR ANY INDIRECT, SPECIAL, PUNITIVE,
* INCIDENTAL OR CONSEQUENTIAL LOSS, DAMAGE, COST OR EXPENSE OF ANY KIND
* WHATSOEVER RELATED TO THE SOFTWARE, HOWEVER CAUSED, EVEN IF MICROCHIP HAS
* BEEN ADVISED OF THE POSSIBILITY OR THE DAMAGES ARE FORESEEABLE. TO THE
* FULLEST EXTENT ALLOWED BY LAW, MICROCHIP'S TOTAL LIABILITY ON ALL CLAIMS IN
* ANY WAY RELATED TO THIS SOFTWARE WILL NOT EXCEED THE AMOUNT OF FEES, IF ANY,
* THAT YOU HAVE PAID DIRECTLY TO MICROCHIP FOR THIS SOFTWARE.
*****************************************************************************"""
#********************** Static Memory Controller Module ***********************
#------------------------------------------------------------------------------
# Dependency Functions
#------------------------------------------------------------------------------
# Function to convert Bitfield mask string to Integer
def smcConvertMaskToInt( aRegMask ):
""" function to convert bit field mask string to integer -- assumes mask is contiguous bits"""
numBits = 0;
aBinStr = '{0:32b}'.format(int( aRegMask, 16 )).strip().rstrip( "0" )
while len( aBinStr ):
aBinCh = aBinStr[-1]
aBinStr = aBinStr[0:-1]
if aBinCh == '1':
numBits += 1
else:
break
return ((2**numBits) - 1) # return max value field can contain
# Dependency function definition to enable visibility based on selection of Page Mode Enable
def smcMemoryPageSizeModeVisible(symbol, event):
id = symbol.getID()[-1]
smcChipSelNum = int(id)
if (event["value"] == True):
smcSym_MODE_PS[smcChipSelNum].setVisible(True)
else :
smcSym_MODE_PS[smcChipSelNum].setVisible(False)
# Dependency functions definitions to enable visibility based on selection of TDF Optimization
def smcTdfCyclesModeVisible(symbol, event):
id = symbol.getID()[-1]
smcChipSelNum = int(id)
if (event["value"] == True):
smcSym_MODE_TDF_CYCLES[smcChipSelNum].setVisible(True)
else :
smcSym_MODE_TDF_CYCLES[smcChipSelNum].setVisible(False)
# Dependency function definition to enable visibility based on selection of Byte Access Type
def smcByteAccessSelModeVisible(symbol, event):
symObj = event["symbol"]
id = symbol.getID()[-1]
smcChipSelNum = int(id)
if (symObj.getSelectedKey() == "SMC_MODE_DBW_16_BIT"):
smcSym_MODE_BAT[smcChipSelNum].setVisible(True)
else :
smcSym_MODE_BAT[smcChipSelNum].setVisible(False)
# Get SMC ID
smcRegModule = ATDF.getNode("/avr-tools-device-file/modules/module@[name=\"SMC\"]")
smcRegModuleID = smcRegModule.getAttribute("id")
#------------------------------------------------------------------------------
# ATDF Read to get SMC Register | Bitfield | Mask | Value Group
#------------------------------------------------------------------------------
# SMC_SETUP Register Bitfield Names and Mask
smcRegBitField_SETUP_NWE_SETUP = ATDF.getNode('/avr-tools-device-file/modules/module@[name="SMC"]/register-group@[name="SMC_CS_NUMBER"]/register@[name="SMC_SETUP"]/bitfield@[name="NWE_SETUP"]')
smcRegBitField_SETUP_NCS_WR_SETUP = ATDF.getNode('/avr-tools-device-file/modules/module@[name="SMC"]/register-group@[name="SMC_CS_NUMBER"]/register@[name="SMC_SETUP"]/bitfield@[name="NCS_WR_SETUP"]')
smcRegBitField_SETUP_NRD_SETUP = ATDF.getNode('/avr-tools-device-file/modules/module@[name="SMC"]/register-group@[name="SMC_CS_NUMBER"]/register@[name="SMC_SETUP"]/bitfield@[name="NRD_SETUP"]')
smcRegBitField_SETUP_NCS_RD_SETUP = ATDF.getNode('/avr-tools-device-file/modules/module@[name="SMC"]/register-group@[name="SMC_CS_NUMBER"]/register@[name="SMC_SETUP"]/bitfield@[name="NCS_RD_SETUP"]')
# SMC_PULSE Register Bitfield Names and Mask
smcRegBitField_PULSE_NWE_PULSE = ATDF.getNode('/avr-tools-device-file/modules/module@[name="SMC"]/register-group@[name="SMC_CS_NUMBER"]/register@[name="SMC_PULSE"]/bitfield@[name="NWE_PULSE"]')
smcRegBitField_PULSE_NCS_WR_PULSE = ATDF.getNode('/avr-tools-device-file/modules/module@[name="SMC"]/register-group@[name="SMC_CS_NUMBER"]/register@[name="SMC_PULSE"]/bitfield@[name="NCS_WR_PULSE"]')
smcRegBitField_PULSE_NRD_PULSE = ATDF.getNode('/avr-tools-device-file/modules/module@[name="SMC"]/register-group@[name="SMC_CS_NUMBER"]/register@[name="SMC_PULSE"]/bitfield@[name="NRD_PULSE"]')
smcRegBitField_PULSE_NCS_RD_PULSE = ATDF.getNode('/avr-tools-device-file/modules/module@[name="SMC"]/register-group@[name="SMC_CS_NUMBER"]/register@[name="SMC_PULSE"]/bitfield@[name="NCS_RD_PULSE"]')
# SMC_CYCLE Register Bitfield Names and Mask
smcRegBitField_CYCLE_NWE_CYCLE = ATDF.getNode('/avr-tools-device-file/modules/module@[name="SMC"]/register-group@[name="SMC_CS_NUMBER"]/register@[name="SMC_CYCLE"]/bitfield@[name="NWE_CYCLE"]')
smcRegBitField_CYCLE_NRD_CYCLE = ATDF.getNode('/avr-tools-device-file/modules/module@[name="SMC"]/register-group@[name="SMC_CS_NUMBER"]/register@[name="SMC_CYCLE"]/bitfield@[name="NRD_CYCLE"]')
# SMC_Mode Register Bitfield Names and Mask
smcRegBitField_MODE_TDF_CYCLES = ATDF.getNode('/avr-tools-device-file/modules/module@[name="SMC"]/register-group@[name="SMC_CS_NUMBER"]/register@[name="SMC_MODE"]/bitfield@[name="TDF_CYCLES"]')
#------------------------------------------------------------------------------
# Global SMC Array sysmbol declaration
#------------------------------------------------------------------------------
smcSym_MODE_BAT = []
smcSym_MODE_PS = []
smcSym_MODE_TDF_CYCLES = []
#------------------------------------------------------------------------------
# Global Variables
#------------------------------------------------------------------------------
# Get the Chip Select Count from ATDF config file
global smcChipSelCount
#------------------------------------------------------------------------------
# Constatns
#------------------------------------------------------------------------------
# Min Zero Value
SMC_DEFAULT_MIN_VALUE = 0
# Deafult value for SMC Setup Register
SMC_SETUP_DEFAULT_VALUE = 16
# Deafult value for SMC Pulse Register
SMC_PULSE_DEFAULT_VALUE = 16
# Deafult value for SMC Cycle Register
SMC_CYCLE_DEFAULT_VALUE = 3
# Deafult value for SMC MODE TDF CYCLE Register
SMC_MODE_TDF_CYCLES_DEFAULT_VALUE = 0
#------------------------------------------------------------------------------
# Instantiate SMC Component
#------------------------------------------------------------------------------
def instantiateComponent(smcComponent):
smcInstanceName = smcComponent.createStringSymbol("SMC_INSTANCE_NAME", None)
smcInstanceName.setVisible(False)
smcInstanceName.setDefaultValue(smcComponent.getID().upper())
print"--------------------------------------------------------------------"
print("************************** Running " + smcInstanceName.getValue() + " ****************************")
print"--------------------------------------------------------------------"
smcRegModule = ATDF.getNode("/avr-tools-device-file/modules/module@[name=\"SMC\"]/register-group@[name=\"SMC\"]/register-group@[name=\"SMC_CS_NUMBER\"]")
smcChipSelCount = int (smcRegModule.getAttribute("count"))
print("Total available SMC Chip Select Count is : " + str(smcChipSelCount))
smcMenu = smcComponent.createMenuSymbol("SMC_MENU", None)
smcMenu.setLabel("SMC Configurations")
# SMC Global features
smcSym_GlobalMenu= smcComponent.createMenuSymbol("SMC_GLOBAL_MENU", smcMenu)
smcSym_GlobalMenu.setLabel("SMC Global Features")
smcSym_WPMR_WPEN = smcComponent.createBooleanSymbol("SMC_WRITE_PROTECTION", smcSym_GlobalMenu)
smcSym_WPMR_WPEN.setLabel("Enable Write Protection")
smcSym_WPMR_WPEN.setDefaultValue(False)
smcSym_Key = smcComponent.createMenuSymbol("SMC_KEY_MENU", smcSym_GlobalMenu)
smcSym_Key.setLabel("Scrambling Key")
smcSym_SMC_KEY1 = smcComponent.createHexSymbol("SMC_KEY1", smcSym_Key)
smcSym_SMC_KEY1.setLabel("Scrambling Key 1")
smcSym_SMC_KEY2 = smcComponent.createHexSymbol("SMC_KEY2", smcSym_Key)
smcSym_SMC_KEY2.setLabel("Scrambling Key 2")
#--------------------------------------------------------------------------
# SMC Chip Select Selection and Settings
#--------------------------------------------------------------------------
smcSym_Chip_Select = smcComponent.createMenuSymbol("SMC_CHIP_SELECT", smcMenu)
smcSym_Chip_Select.setLabel("SMC Chip Select Selection and Settings")
smcSym_CS_COUNT = smcComponent.createIntegerSymbol("SMC_CHIP_SELECT_COUNT", smcSym_Chip_Select)
smcSym_CS_COUNT.setDefaultValue(smcChipSelCount)
smcSym_CS_COUNT.setVisible(False)
for smcChipSelNum in range(0, smcChipSelCount):
smcSym_CS = smcComponent.createBooleanSymbol("SMC_CHIP_SELECT" + str(smcChipSelNum), smcSym_Chip_Select)
smcSym_CS.setLabel("Enable Chip Select "+ str(smcChipSelNum))
smcSym_OCMS_CS_SE = smcComponent.createBooleanSymbol("SMC_MEM_SCRAMBLING_CS" + str(smcChipSelNum), smcSym_CS)
smcSym_OCMS_CS_SE.setLabel("Enable Memory Scrambling")
smcSym_OCMS_CS_SE.setDefaultValue(False)
# SMC Read Setup, Pulse and Cycle Timings
smcSym_READ_TIMING_CS = smcComponent.createMenuSymbol("SMC_SETUP_TIMING_CS" + str(smcChipSelNum), smcSym_CS)
smcSym_READ_TIMING_CS.setLabel("Read Cycle Timings")
# SMC Read Setup Timings
smcSym_SETUP_NRD_CS = smcComponent.createIntegerSymbol("SMC_NRD_SETUP_CS" + str(smcChipSelNum), smcSym_READ_TIMING_CS)
smcSym_SETUP_NRD_CS.setLabel(smcRegBitField_SETUP_NRD_SETUP.getAttribute("caption"))
smcSym_SETUP_NRD_CS.setMin(SMC_DEFAULT_MIN_VALUE)
smcSym_SETUP_NRD_CS.setMax(smcConvertMaskToInt(smcRegBitField_SETUP_NRD_SETUP.getAttribute("mask")))
smcSym_SETUP_NRD_CS.setDefaultValue(SMC_SETUP_DEFAULT_VALUE)
smcSym_SETUP_NCS_RD_CS = smcComponent.createIntegerSymbol("SMC_NCS_RD_SETUP_CS" + str(smcChipSelNum), smcSym_READ_TIMING_CS)
smcSym_SETUP_NCS_RD_CS.setLabel(smcRegBitField_SETUP_NCS_RD_SETUP.getAttribute("caption"))
smcSym_SETUP_NCS_RD_CS.setMin(SMC_DEFAULT_MIN_VALUE)
smcSym_SETUP_NCS_RD_CS.setMax(smcConvertMaskToInt(smcRegBitField_SETUP_NCS_RD_SETUP.getAttribute("mask")))
smcSym_SETUP_NCS_RD_CS.setDefaultValue(SMC_SETUP_DEFAULT_VALUE)
# SMC Read Pulse Timings
smcSym_PULSE_NRD_CS = smcComponent.createIntegerSymbol("SMC_NRD_PULSE_CS" + str(smcChipSelNum), smcSym_READ_TIMING_CS)
smcSym_PULSE_NRD_CS.setLabel(smcRegBitField_PULSE_NRD_PULSE.getAttribute("caption"))
smcSym_PULSE_NRD_CS.setMin(SMC_DEFAULT_MIN_VALUE)
smcSym_PULSE_NRD_CS.setMax(smcConvertMaskToInt(smcRegBitField_PULSE_NRD_PULSE.getAttribute("mask")))
smcSym_PULSE_NRD_CS.setDefaultValue(SMC_PULSE_DEFAULT_VALUE)
smcSym_PULSE_NCS_RD_CS = smcComponent.createIntegerSymbol("SMC_NCS_RD_PULSE_CS" + str(smcChipSelNum),smcSym_READ_TIMING_CS)
smcSym_PULSE_NCS_RD_CS.setLabel(smcRegBitField_PULSE_NCS_RD_PULSE.getAttribute("caption"))
smcSym_PULSE_NCS_RD_CS.setMin(SMC_DEFAULT_MIN_VALUE)
smcSym_PULSE_NCS_RD_CS.setMax(smcConvertMaskToInt(smcRegBitField_PULSE_NCS_RD_PULSE.getAttribute("mask")))
smcSym_PULSE_NCS_RD_CS.setDefaultValue(SMC_PULSE_DEFAULT_VALUE)
# SMC Read Cycle Timings
smcSym_SMC_CYCLE_TIMING_NRD_CS = smcComponent.createIntegerSymbol("SMC_NRD_CYCLE_CS" + str(smcChipSelNum), smcSym_READ_TIMING_CS)
smcSym_SMC_CYCLE_TIMING_NRD_CS.setLabel(smcRegBitField_CYCLE_NRD_CYCLE.getAttribute("caption"))
smcSym_SMC_CYCLE_TIMING_NRD_CS.setMin(SMC_DEFAULT_MIN_VALUE)
smcSym_SMC_CYCLE_TIMING_NRD_CS.setMax(smcConvertMaskToInt(smcRegBitField_CYCLE_NRD_CYCLE.getAttribute("mask")))
smcSym_SMC_CYCLE_TIMING_NRD_CS.setDefaultValue(SMC_CYCLE_DEFAULT_VALUE)
# SMC Write Setup, Pulse and Cycle Timings
smcSym_WRITE_TIMING_CS = smcComponent.createMenuSymbol("SMC_PULSE_TIMING_CS" + str(smcChipSelNum), smcSym_CS)
smcSym_WRITE_TIMING_CS.setLabel("Write Cycle Timings")
# SMC Write Setup Timings
smcSym_SETUP_NWE_CS = smcComponent.createIntegerSymbol("SMC_NWE_SETUP_CS" + str(smcChipSelNum), smcSym_WRITE_TIMING_CS)
smcSym_SETUP_NWE_CS.setLabel(smcRegBitField_SETUP_NWE_SETUP.getAttribute("caption"))
smcSym_SETUP_NWE_CS.setMin(SMC_DEFAULT_MIN_VALUE)
smcSym_SETUP_NWE_CS.setMax(smcConvertMaskToInt(smcRegBitField_SETUP_NWE_SETUP.getAttribute("mask")))
smcSym_SETUP_NWE_CS.setDefaultValue(SMC_SETUP_DEFAULT_VALUE)
smcSym_SETUP_NCS_WR_CS = smcComponent.createIntegerSymbol("SMC_NCS_WR_SETUP_CS" + str(smcChipSelNum), smcSym_WRITE_TIMING_CS)
smcSym_SETUP_NCS_WR_CS.setLabel(smcRegBitField_SETUP_NCS_WR_SETUP.getAttribute("caption"))
smcSym_SETUP_NCS_WR_CS.setMin(SMC_DEFAULT_MIN_VALUE)
smcSym_SETUP_NCS_WR_CS.setMax(smcConvertMaskToInt(smcRegBitField_SETUP_NCS_WR_SETUP.getAttribute("mask")))
smcSym_SETUP_NCS_WR_CS.setDefaultValue(SMC_SETUP_DEFAULT_VALUE)
# SMC Write Pulse Timings
smcSym_PULSE_NWE_CS = smcComponent.createIntegerSymbol("SMC_NWE_PULSE_CS" + str(smcChipSelNum), smcSym_WRITE_TIMING_CS)
smcSym_PULSE_NWE_CS.setLabel(smcRegBitField_PULSE_NWE_PULSE.getAttribute("caption"))
smcSym_PULSE_NWE_CS.setMin(SMC_DEFAULT_MIN_VALUE)
smcSym_PULSE_NWE_CS.setMax(smcConvertMaskToInt(smcRegBitField_PULSE_NWE_PULSE.getAttribute("mask")))
smcSym_PULSE_NWE_CS.setDefaultValue(SMC_PULSE_DEFAULT_VALUE)
smcSym_PULSE_NCS_WR_CS = smcComponent.createIntegerSymbol("SMC_NCS_WR_PULSE_CS" + str(smcChipSelNum), smcSym_WRITE_TIMING_CS)
smcSym_PULSE_NCS_WR_CS.setLabel(smcRegBitField_PULSE_NCS_WR_PULSE.getAttribute("caption"))
smcSym_PULSE_NCS_WR_CS.setMin(SMC_DEFAULT_MIN_VALUE)
smcSym_PULSE_NCS_WR_CS.setMax(smcConvertMaskToInt(smcRegBitField_PULSE_NCS_WR_PULSE.getAttribute("mask")))
smcSym_PULSE_NCS_WR_CS.setDefaultValue(SMC_PULSE_DEFAULT_VALUE)
# SMC Write Cycle Timings
smcSym_CYCLE_TIMING_NWE_CS = smcComponent.createIntegerSymbol("SMC_NWE_CYCLE_CS" + str(smcChipSelNum), smcSym_WRITE_TIMING_CS)
smcSym_CYCLE_TIMING_NWE_CS.setLabel(smcRegBitField_CYCLE_NWE_CYCLE.getAttribute("caption"))
smcSym_CYCLE_TIMING_NWE_CS.setMin(SMC_DEFAULT_MIN_VALUE)
smcSym_CYCLE_TIMING_NWE_CS.setMax(smcConvertMaskToInt(smcRegBitField_CYCLE_NWE_CYCLE.getAttribute("mask")))
smcSym_CYCLE_TIMING_NWE_CS.setDefaultValue(SMC_CYCLE_DEFAULT_VALUE)
# SMC Mode Settings
smcSym_MODE_CS_REGISTER = smcComponent.createMenuSymbol("SMC_MODE_REGISTER_CS" + str(smcChipSelNum), smcSym_CS)
smcSym_MODE_CS_REGISTER.setLabel("Mode Settings")
smcSym_MODE_DBW = smcComponent.createKeyValueSetSymbol("SMC_DATA_BUS_CS" + str(smcChipSelNum), smcSym_MODE_CS_REGISTER)
smcSym_MODE_DBW.setLabel("External Memory Data Bus Width")
smcSym_MODE_DBW.setOutputMode("Key")
smcSym_MODE_DBW.setDisplayMode("Description")
smcSym_MODE_DBW.addKey("SMC_MODE_DBW_8_BIT", "0", "8-bit Data Bus")
smcSym_MODE_DBW.addKey("SMC_MODE_DBW_16_BIT", "1", "16-bit Data Bus")
smcSym_MODE_DBW.setSelectedKey("SMC_MODE_DBW_16_BIT", 2)
smcSym_MODE_BAT.append(smcChipSelNum)
smcSym_MODE_BAT[smcChipSelNum] = smcComponent.createKeyValueSetSymbol("SMC_BAT_CS" + str(smcChipSelNum), smcSym_MODE_CS_REGISTER)
smcSym_MODE_BAT[smcChipSelNum].setOutputMode("Key")
smcSym_MODE_BAT[smcChipSelNum].setDisplayMode("Description")
smcSym_MODE_BAT[smcChipSelNum].setLabel("Byte Write or Byte Select Access")
smcSym_MODE_BAT[smcChipSelNum].addKey("SMC_MODE_BAT_BYTE_SELECT", "0", "Byte Select Access Type")
smcSym_MODE_BAT[smcChipSelNum].addKey("SMC_MODE_BAT_BYTE_WRITE", "1", "Byte Write Access Type")
smcSym_MODE_BAT[smcChipSelNum].setSelectedKey("SMC_MODE_BAT_BYTE_SELECT", 2)
smcSym_MODE_BAT[smcChipSelNum].setDependencies(smcByteAccessSelModeVisible, ["SMC_DATA_BUS_CS" + str(smcChipSelNum)])
smcSym_MODE_PMEN = smcComponent.createBooleanSymbol("SMC_PMEN_CS" + str(smcChipSelNum), smcSym_MODE_CS_REGISTER)
smcSym_MODE_PMEN.setLabel("Enable Page mode")
smcSym_MODE_PMEN.setDefaultValue(False)
smcSym_MODE_PS.append(smcChipSelNum)
smcSym_MODE_PS[smcChipSelNum] = smcComponent.createKeyValueSetSymbol("SMC_PS_CS" + str(smcChipSelNum), smcSym_MODE_CS_REGISTER)
smcSym_MODE_PS[smcChipSelNum].setOutputMode("Key")
smcSym_MODE_PS[smcChipSelNum].setDisplayMode("Description")
smcSym_MODE_PS[smcChipSelNum].setLabel("External Memory Page Size")
smcSym_MODE_PS[smcChipSelNum].setVisible(False)
smcSym_MODE_PS[smcChipSelNum].addKey("SMC_MODE_PS_4_BYTE", "0", "4-bytes")
smcSym_MODE_PS[smcChipSelNum].addKey("SMC_MODE_PS_8_BYTE", "1", "8-bytes")
smcSym_MODE_PS[smcChipSelNum].addKey("SMC_MODE_PS_16_BYTE", "2", "16-bytes")
smcSym_MODE_PS[smcChipSelNum].addKey("SMC_MODE_PS_32_BYTE", "3", "32-bytes")
smcSym_MODE_PS[smcChipSelNum].setSelectedKey("SMC_MODE_PS_4_BYTE", 2)
smcSym_MODE_PS[smcChipSelNum].setDependencies(smcMemoryPageSizeModeVisible, ["SMC_PMEN_CS" + str(smcChipSelNum)])
smcSym_MODE_TDF = smcComponent.createBooleanSymbol("SMC_TDF_OPTIMIZATION_CS" + str(smcChipSelNum), smcSym_MODE_CS_REGISTER)
smcSym_MODE_TDF.setLabel("Enable Optimization of Data Float Time")
smcSym_MODE_TDF.setDefaultValue(False)
smcSym_MODE_TDF_CYCLES.append(smcChipSelNum)
smcSym_MODE_TDF_CYCLES[smcChipSelNum] = smcComponent.createIntegerSymbol("SMC_TDF_CYCLES_CS" + str(smcChipSelNum), smcSym_MODE_CS_REGISTER)
smcSym_MODE_TDF_CYCLES[smcChipSelNum].setLabel("Data Float Time (no of cycles)")
smcSym_MODE_TDF_CYCLES[smcChipSelNum].setVisible(False)
smcSym_MODE_TDF_CYCLES[smcChipSelNum].setMin(SMC_DEFAULT_MIN_VALUE)
smcSym_MODE_TDF_CYCLES[smcChipSelNum].setMax(smcConvertMaskToInt(smcRegBitField_MODE_TDF_CYCLES.getAttribute("mask")))
smcSym_MODE_TDF_CYCLES[smcChipSelNum].setDefaultValue(SMC_MODE_TDF_CYCLES_DEFAULT_VALUE)
smcSym_MODE_TDF_CYCLES[smcChipSelNum].setDependencies(smcTdfCyclesModeVisible, ["SMC_TDF_OPTIMIZATION_CS" + str(smcChipSelNum)])
smcSym_MODE_EXNW = smcComponent.createKeyValueSetSymbol("SMC_NWAIT_MODE_CS" + str(smcChipSelNum), smcSym_MODE_CS_REGISTER)
smcSym_MODE_EXNW.setOutputMode("Key")
smcSym_MODE_EXNW.setDisplayMode("Description")
smcSym_MODE_EXNW.setLabel("External Wait Signal (NWAIT)")
smcSym_MODE_EXNW.addKey("SMC_MODE_EXNW_MODE_DISABLED", "0", "Disable")
smcSym_MODE_EXNW.addKey("SMC_MODE_EXNW_MODE_FROZEN", "2", "Frozen Mode")
smcSym_MODE_EXNW.addKey("SMC_MODE_EXNW_MODE_READY", "3", "Ready Mode")
smcSym_MODE_EXNW.setSelectedKey("SMC_MODE_EXNW_MODE_DISABLED", 2)
smcSym_MODE_READ = smcComponent.createBooleanSymbol("SMC_READ_ENABLE_MODE_CS" + str(smcChipSelNum), smcSym_MODE_CS_REGISTER)
smcSym_MODE_READ.setLabel("Read Operation is controlled by NRD Signal")
smcSym_MODE_READ.setDefaultValue(True)
smcSym_MODE_WRITE = smcComponent.createBooleanSymbol("SMC_WRITE_ENABLE_MODE_CS" + str(smcChipSelNum), smcSym_MODE_CS_REGISTER)
smcSym_MODE_WRITE.setLabel("Write Operation is controlled by NWE Signal")
smcSym_MODE_WRITE.setDefaultValue(True)
############################################################################
#### Dependency ####
############################################################################
# Enable Peripheral Clock in Clock manager
Database.clearSymbolValue("core", smcInstanceName.getValue()+"_CLOCK_ENABLE")
Database.setSymbolValue("core", smcInstanceName.getValue()+"_CLOCK_ENABLE", True, 2)
############################################################################
#### Code Generation ####
############################################################################
configName = Variables.get("__CONFIGURATION_NAME")
smcHeader1File = smcComponent.createFileSymbol("PLIB_SMC_H", None)
smcHeader1File.setSourcePath("../peripheral/smc_6498/templates/plib_smc.h.ftl")
smcHeader1File.setOutputName("plib_"+smcInstanceName.getValue().lower()+".h")
smcHeader1File.setDestPath("/peripheral/smc/")
smcHeader1File.setProjectPath("config/" + configName + "/peripheral/smc/")
smcHeader1File.setType("HEADER")
smcHeader1File.setMarkup(True)
smcSource1File = smcComponent.createFileSymbol("PLIB_SMC_C", None)
smcSource1File.setSourcePath("../peripheral/smc_6498/templates/plib_smc.c.ftl")
smcSource1File.setOutputName("plib_"+smcInstanceName.getValue().lower()+".c")
smcSource1File.setDestPath("/peripheral/smc/")
smcSource1File.setProjectPath("config/" + configName + "/peripheral/smc/")
smcSource1File.setType("SOURCE")
smcSource1File.setMarkup(True)
#Add SMC related code to common files
smcHeader1FileEntry = smcComponent.createFileSymbol("PLIB_SMC_DEFINITIONS_H", None)
smcHeader1FileEntry.setType("STRING")
smcHeader1FileEntry.setOutputName("core.LIST_SYSTEM_DEFINITIONS_H_INCLUDES")
smcHeader1FileEntry.setSourcePath("../peripheral/smc_6498/templates/system/definitions.h.ftl")
smcHeader1FileEntry.setMarkup(True)
smcSystemInitFile = smcComponent.createFileSymbol("PLIB_SMC_INITIALIZE_H", None)
smcSystemInitFile.setType("STRING")
smcSystemInitFile.setOutputName("core.LIST_SYSTEM_INIT_C_SYS_INITIALIZE_PERIPHERALS")
smcSystemInitFile.setSourcePath("../peripheral/smc_6498/templates/system/initialization.c.ftl")
smcSystemInitFile.setMarkup(True)
|
py | b40d6729201d8276cce970dd1393abd73d7e8e33 | """
Test PostgreSQL full text search.
These tests use dialogue from the 1975 film Monty Python and the Holy Grail.
All text copyright Python (Monty) Pictures. Thanks to sacred-texts.com for the
transcript.
"""
from django.db import connection
from django.db.models import F
from django.test import modify_settings, skipUnlessDBFeature
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import Character, Line, LineSavedSearch, Scene
try:
from django.contrib.postgres.search import (
SearchConfig, SearchHeadline, SearchQuery, SearchRank, SearchVector,
)
except ImportError:
pass
class GrailTestData:
@classmethod
def setUpTestData(cls):
cls.robin = Scene.objects.create(scene='Scene 10', setting='The dark forest of Ewing')
cls.minstrel = Character.objects.create(name='Minstrel')
verses = [
(
'Bravely bold Sir Robin, rode forth from Camelot. '
'He was not afraid to die, o Brave Sir Robin. '
'He was not at all afraid to be killed in nasty ways. '
'Brave, brave, brave, brave Sir Robin!'
),
(
'He was not in the least bit scared to be mashed into a pulp, '
'Or to have his eyes gouged out, and his elbows broken. '
'To have his kneecaps split, and his body burned away, '
'And his limbs all hacked and mangled, brave Sir Robin!'
),
(
'His head smashed in and his heart cut out, '
'And his liver removed and his bowels unplugged, '
'And his nostrils ripped and his bottom burned off,'
'And his --'
),
]
cls.verses = [Line.objects.create(
scene=cls.robin,
character=cls.minstrel,
dialogue=verse,
) for verse in verses]
cls.verse0, cls.verse1, cls.verse2 = cls.verses
cls.witch_scene = Scene.objects.create(scene='Scene 5', setting="Sir Bedemir's Castle")
bedemir = Character.objects.create(name='Bedemir')
crowd = Character.objects.create(name='Crowd')
witch = Character.objects.create(name='Witch')
duck = Character.objects.create(name='Duck')
cls.bedemir0 = Line.objects.create(
scene=cls.witch_scene,
character=bedemir,
dialogue='We shall use my larger scales!',
dialogue_config='english',
)
cls.bedemir1 = Line.objects.create(
scene=cls.witch_scene,
character=bedemir,
dialogue='Right, remove the supports!',
dialogue_config='english',
)
cls.duck = Line.objects.create(scene=cls.witch_scene, character=duck, dialogue=None)
cls.crowd = Line.objects.create(scene=cls.witch_scene, character=crowd, dialogue='A witch! A witch!')
cls.witch = Line.objects.create(scene=cls.witch_scene, character=witch, dialogue="It's a fair cop.")
trojan_rabbit = Scene.objects.create(scene='Scene 8', setting="The castle of Our Master Ruiz' de lu la Ramper")
guards = Character.objects.create(name='French Guards')
cls.french = Line.objects.create(
scene=trojan_rabbit,
character=guards,
dialogue='Oh. Un beau cadeau. Oui oui.',
dialogue_config='french',
)
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class SimpleSearchTest(GrailTestData, PostgreSQLTestCase):
def test_simple(self):
searched = Line.objects.filter(dialogue__search='elbows')
self.assertSequenceEqual(searched, [self.verse1])
def test_non_exact_match(self):
searched = Line.objects.filter(dialogue__search='hearts')
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms(self):
searched = Line.objects.filter(dialogue__search='heart bowel')
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms_with_partial_match(self):
searched = Line.objects.filter(dialogue__search='Robin killed')
self.assertSequenceEqual(searched, [self.verse0])
def test_search_query_config(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery('nostrils', config='simple'),
)
self.assertSequenceEqual(searched, [self.verse2])
def test_search_with_F_expression(self):
# Non-matching query.
LineSavedSearch.objects.create(line=self.verse1, query='hearts')
# Matching query.
match = LineSavedSearch.objects.create(line=self.verse1, query='elbows')
for query_expression in [F('query'), SearchQuery(F('query'))]:
with self.subTest(query_expression):
searched = LineSavedSearch.objects.filter(
line__dialogue__search=query_expression,
)
self.assertSequenceEqual(searched, [match])
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class SearchVectorFieldTest(GrailTestData, PostgreSQLTestCase):
def test_existing_vector(self):
Line.objects.update(dialogue_search_vector=SearchVector('dialogue'))
searched = Line.objects.filter(dialogue_search_vector=SearchQuery('Robin killed'))
self.assertSequenceEqual(searched, [self.verse0])
def test_existing_vector_config_explicit(self):
Line.objects.update(dialogue_search_vector=SearchVector('dialogue'))
searched = Line.objects.filter(dialogue_search_vector=SearchQuery('cadeaux', config='french'))
self.assertSequenceEqual(searched, [self.french])
def test_single_coalesce_expression(self):
searched = Line.objects.annotate(search=SearchVector('dialogue')).filter(search='cadeaux')
self.assertNotIn('COALESCE(COALESCE', str(searched.query))
class SearchConfigTests(PostgreSQLSimpleTestCase):
def test_from_parameter(self):
self.assertIsNone(SearchConfig.from_parameter(None))
self.assertEqual(SearchConfig.from_parameter('foo'), SearchConfig('foo'))
self.assertEqual(SearchConfig.from_parameter(SearchConfig('bar')), SearchConfig('bar'))
class MultipleFieldsTest(GrailTestData, PostgreSQLTestCase):
def test_simple_on_dialogue(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='elbows')
self.assertSequenceEqual(searched, [self.verse1])
def test_simple_on_scene(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='Forest')
self.assertCountEqual(searched, self.verses)
def test_non_exact_match(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='heart')
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='heart forest')
self.assertSequenceEqual(searched, [self.verse2])
def test_terms_adjacent(self):
searched = Line.objects.annotate(
search=SearchVector('character__name', 'dialogue'),
).filter(search='minstrel')
self.assertCountEqual(searched, self.verses)
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='minstrelbravely')
self.assertSequenceEqual(searched, [])
def test_search_with_null(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='bedemir')
self.assertCountEqual(searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck])
def test_search_with_non_text(self):
searched = Line.objects.annotate(
search=SearchVector('id'),
).filter(search=str(self.crowd.id))
self.assertSequenceEqual(searched, [self.crowd])
@skipUnlessDBFeature('has_phraseto_tsquery')
def test_phrase_search(self):
line_qs = Line.objects.annotate(search=SearchVector('dialogue'))
searched = line_qs.filter(search=SearchQuery('burned body his away', search_type='phrase'))
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(search=SearchQuery('his body burned away', search_type='phrase'))
self.assertSequenceEqual(searched, [self.verse1])
@skipUnlessDBFeature('has_phraseto_tsquery')
def test_phrase_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config='french'),
)
searched = line_qs.filter(
search=SearchQuery('cadeau beau un', search_type='phrase', config='french'),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery('un beau cadeau', search_type='phrase', config='french'),
)
self.assertSequenceEqual(searched, [self.french])
def test_raw_search(self):
line_qs = Line.objects.annotate(search=SearchVector('dialogue'))
searched = line_qs.filter(search=SearchQuery('Robin', search_type='raw'))
self.assertCountEqual(searched, [self.verse0, self.verse1])
searched = line_qs.filter(search=SearchQuery("Robin & !'Camelot'", search_type='raw'))
self.assertSequenceEqual(searched, [self.verse1])
def test_raw_search_with_config(self):
line_qs = Line.objects.annotate(search=SearchVector('dialogue', config='french'))
searched = line_qs.filter(
search=SearchQuery("'cadeaux' & 'beaux'", search_type='raw', config='french'),
)
self.assertSequenceEqual(searched, [self.french])
@skipUnlessDBFeature('has_websearch_to_tsquery')
def test_web_search(self):
line_qs = Line.objects.annotate(search=SearchVector('dialogue'))
searched = line_qs.filter(
search=SearchQuery(
'"burned body" "split kneecaps"',
search_type='websearch',
),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery(
'"body burned" "kneecaps split" -"nostrils"',
search_type='websearch',
),
)
self.assertSequenceEqual(searched, [self.verse1])
searched = line_qs.filter(
search=SearchQuery(
'"Sir Robin" ("kneecaps" OR "Camelot")',
search_type='websearch',
),
)
self.assertSequenceEqual(searched, [self.verse0, self.verse1])
@skipUnlessDBFeature('has_websearch_to_tsquery')
def test_web_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config='french'),
)
searched = line_qs.filter(
search=SearchQuery('cadeau -beau', search_type='websearch', config='french'),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery('beau cadeau', search_type='websearch', config='french'),
)
self.assertSequenceEqual(searched, [self.french])
def test_bad_search_type(self):
with self.assertRaisesMessage(ValueError, "Unknown search_type argument 'foo'."):
SearchQuery('kneecaps', search_type='foo')
def test_config_query_explicit(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config='french'),
).filter(search=SearchQuery('cadeaux', config='french'))
self.assertSequenceEqual(searched, [self.french])
def test_config_query_implicit(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config='french'),
).filter(search='cadeaux')
self.assertSequenceEqual(searched, [self.french])
def test_config_from_field_explicit(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config=F('dialogue_config')),
).filter(search=SearchQuery('cadeaux', config=F('dialogue_config')))
self.assertSequenceEqual(searched, [self.french])
def test_config_from_field_implicit(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config=F('dialogue_config')),
).filter(search='cadeaux')
self.assertSequenceEqual(searched, [self.french])
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class TestCombinations(GrailTestData, PostgreSQLTestCase):
def test_vector_add(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting') + SearchVector('character__name'),
).filter(search='bedemir')
self.assertCountEqual(searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck])
def test_vector_add_multi(self):
searched = Line.objects.annotate(
search=(
SearchVector('scene__setting') +
SearchVector('character__name') +
SearchVector('dialogue')
),
).filter(search='bedemir')
self.assertCountEqual(searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck])
def test_vector_combined_mismatch(self):
msg = (
'SearchVector can only be combined with other SearchVector '
'instances, got NoneType.'
)
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None + SearchVector('character__name'))
def test_combine_different_vector_configs(self):
searched = Line.objects.annotate(
search=(
SearchVector('dialogue', config='english') +
SearchVector('dialogue', config='french')
),
).filter(
search=SearchQuery('cadeaux', config='french') | SearchQuery('nostrils')
)
self.assertCountEqual(searched, [self.french, self.verse2])
def test_query_and(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search=SearchQuery('bedemir') & SearchQuery('scales'))
self.assertSequenceEqual(searched, [self.bedemir0])
def test_query_multiple_and(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search=SearchQuery('bedemir') & SearchQuery('scales') & SearchQuery('nostrils'))
self.assertSequenceEqual(searched, [])
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search=SearchQuery('shall') & SearchQuery('use') & SearchQuery('larger'))
self.assertSequenceEqual(searched, [self.bedemir0])
def test_query_or(self):
searched = Line.objects.filter(dialogue__search=SearchQuery('kneecaps') | SearchQuery('nostrils'))
self.assertCountEqual(searched, [self.verse1, self.verse2])
def test_query_multiple_or(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery('kneecaps') | SearchQuery('nostrils') | SearchQuery('Sir Robin')
)
self.assertCountEqual(searched, [self.verse1, self.verse2, self.verse0])
def test_query_invert(self):
searched = Line.objects.filter(character=self.minstrel, dialogue__search=~SearchQuery('kneecaps'))
self.assertCountEqual(searched, [self.verse0, self.verse2])
def test_combine_different_configs(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery('cadeau', config='french') |
SearchQuery('nostrils', config='english')
)
)
self.assertCountEqual(searched, [self.french, self.verse2])
def test_combined_configs(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery('nostrils', config='simple') &
SearchQuery('bowels', config='simple')
),
)
self.assertSequenceEqual(searched, [self.verse2])
@skipUnlessDBFeature('has_phraseto_tsquery')
def test_combine_raw_phrase(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery('burn:*', search_type='raw', config='simple') |
SearchQuery('rode forth from Camelot', search_type='phrase')
)
)
self.assertCountEqual(searched, [self.verse0, self.verse1, self.verse2])
def test_query_combined_mismatch(self):
msg = (
'SearchQuery can only be combined with other SearchQuery '
'instances, got NoneType.'
)
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None | SearchQuery('kneecaps'))
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None & SearchQuery('kneecaps'))
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class TestRankingAndWeights(GrailTestData, PostgreSQLTestCase):
def test_ranking(self):
searched = Line.objects.filter(character=self.minstrel).annotate(
rank=SearchRank(SearchVector('dialogue'), SearchQuery('brave sir robin')),
).order_by('rank')
self.assertSequenceEqual(searched, [self.verse2, self.verse1, self.verse0])
def test_rank_passing_untyped_args(self):
searched = Line.objects.filter(character=self.minstrel).annotate(
rank=SearchRank('dialogue', 'brave sir robin'),
).order_by('rank')
self.assertSequenceEqual(searched, [self.verse2, self.verse1, self.verse0])
def test_weights_in_vector(self):
vector = SearchVector('dialogue', weight='A') + SearchVector('character__name', weight='D')
searched = Line.objects.filter(scene=self.witch_scene).annotate(
rank=SearchRank(vector, SearchQuery('witch')),
).order_by('-rank')[:2]
self.assertSequenceEqual(searched, [self.crowd, self.witch])
vector = SearchVector('dialogue', weight='D') + SearchVector('character__name', weight='A')
searched = Line.objects.filter(scene=self.witch_scene).annotate(
rank=SearchRank(vector, SearchQuery('witch')),
).order_by('-rank')[:2]
self.assertSequenceEqual(searched, [self.witch, self.crowd])
def test_ranked_custom_weights(self):
vector = SearchVector('dialogue', weight='D') + SearchVector('character__name', weight='A')
searched = Line.objects.filter(scene=self.witch_scene).annotate(
rank=SearchRank(vector, SearchQuery('witch'), weights=[1, 0, 0, 0.5]),
).order_by('-rank')[:2]
self.assertSequenceEqual(searched, [self.crowd, self.witch])
def test_ranking_chaining(self):
searched = Line.objects.filter(character=self.minstrel).annotate(
rank=SearchRank(SearchVector('dialogue'), SearchQuery('brave sir robin')),
).filter(rank__gt=0.3)
self.assertSequenceEqual(searched, [self.verse0])
class SearchVectorIndexTests(PostgreSQLTestCase):
def test_search_vector_index(self):
"""SearchVector generates IMMUTABLE SQL in order to be indexable."""
# This test should be moved to test_indexes and use a functional
# index instead once support lands (see #26167).
query = Line.objects.all().query
resolved = SearchVector('id', 'dialogue', config='english').resolve_expression(query)
compiler = query.get_compiler(connection.alias)
sql, params = resolved.as_sql(compiler, connection)
# Indexed function must be IMMUTABLE.
with connection.cursor() as cursor:
cursor.execute(
'CREATE INDEX search_vector_index ON %s USING GIN (%s)' % (Line._meta.db_table, sql),
params,
)
class SearchQueryTests(PostgreSQLSimpleTestCase):
def test_str(self):
tests = (
(~SearchQuery('a'), '~SearchQuery(Value(a))'),
(
(SearchQuery('a') | SearchQuery('b')) & (SearchQuery('c') | SearchQuery('d')),
'((SearchQuery(Value(a)) || SearchQuery(Value(b))) && '
'(SearchQuery(Value(c)) || SearchQuery(Value(d))))',
),
(
SearchQuery('a') & (SearchQuery('b') | SearchQuery('c')),
'(SearchQuery(Value(a)) && (SearchQuery(Value(b)) || '
'SearchQuery(Value(c))))',
),
(
(SearchQuery('a') | SearchQuery('b')) & SearchQuery('c'),
'((SearchQuery(Value(a)) || SearchQuery(Value(b))) && '
'SearchQuery(Value(c)))'
),
(
SearchQuery('a') & (SearchQuery('b') & (SearchQuery('c') | SearchQuery('d'))),
'(SearchQuery(Value(a)) && (SearchQuery(Value(b)) && '
'(SearchQuery(Value(c)) || SearchQuery(Value(d)))))',
),
)
for query, expected_str in tests:
with self.subTest(query=query):
self.assertEqual(str(query), expected_str)
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class SearchHeadlineTests(GrailTestData, PostgreSQLTestCase):
def test_headline(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
F('dialogue'),
SearchQuery('brave sir robin'),
config=SearchConfig('english'),
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
'<b>Robin</b>. He was not at all afraid to be killed in nasty '
'ways. <b>Brave</b>, <b>brave</b>, <b>brave</b>, <b>brave</b> '
'<b>Sir</b> <b>Robin</b>',
)
def test_headline_untyped_args(self):
searched = Line.objects.annotate(
headline=SearchHeadline('dialogue', 'killed', config='english'),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
'Robin. He was not at all afraid to be <b>killed</b> in nasty '
'ways. Brave, brave, brave, brave Sir Robin!',
)
def test_headline_with_config(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
'dialogue',
SearchQuery('cadeaux', config='french'),
config='french',
),
).get(pk=self.french.pk)
self.assertEqual(
searched.headline,
'Oh. Un beau <b>cadeau</b>. Oui oui.',
)
def test_headline_with_config_from_field(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
'dialogue',
SearchQuery('cadeaux', config=F('dialogue_config')),
config=F('dialogue_config'),
),
).get(pk=self.french.pk)
self.assertEqual(
searched.headline,
'Oh. Un beau <b>cadeau</b>. Oui oui.',
)
def test_headline_separator_options(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
'dialogue',
'brave sir robin',
start_sel='<span>',
stop_sel='</span>',
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
'<span>Robin</span>. He was not at all afraid to be killed in '
'nasty ways. <span>Brave</span>, <span>brave</span>, <span>brave'
'</span>, <span>brave</span> <span>Sir</span> <span>Robin</span>',
)
def test_headline_highlight_all_option(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
'dialogue',
SearchQuery('brave sir robin', config='english'),
highlight_all=True,
),
).get(pk=self.verse0.pk)
self.assertIn(
'<b>Bravely</b> bold <b>Sir</b> <b>Robin</b>, rode forth from '
'Camelot. He was not afraid to die, o ',
searched.headline,
)
def test_headline_short_word_option(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
'dialogue',
SearchQuery('brave sir robin', config='english'),
short_word=6,
),
).get(pk=self.verse0.pk)
self.assertIs(searched.headline.endswith(
'<b>Brave</b>, <b>brave</b>, <b>brave</b>, <b>brave</b> <b>Sir</b>'
), True)
def test_headline_fragments_words_options(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
'dialogue',
SearchQuery('brave sir robin', config='english'),
fragment_delimiter='...<br>',
max_fragments=4,
max_words=3,
min_words=1,
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
'<b>Sir</b> <b>Robin</b>, rode...<br>'
'<b>Brave</b> <b>Sir</b> <b>Robin</b>...<br>'
'<b>Brave</b>, <b>brave</b>, <b>brave</b>...<br>'
'<b>brave</b> <b>Sir</b> <b>Robin</b>',
)
|
py | b40d67435fa526c8efae4812048bc9557fbae1f0 | # Futu Algo: Algorithmic High-Frequency Trading Framework
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Written by Bill Chan <[email protected]>, 2021
# Copyright (c) billpwchan - All Rights Reserved
import argparse
import importlib
from multiprocessing import Process
import yaml
from engines import *
from strategies.Strategies import Strategies
from util.global_vars import *
def __daily_update_filters():
filters = list(__init_filter(filter_list=['all']))
stock_filter = StockFilter(stock_filters=filters)
stock_filter.update_filtered_equity_pools()
def daily_update_data(futu_trade, stock_list: list, force_update: bool = False):
# Daily Update Filtered Security
procs = []
proc = Process(target=__daily_update_filters) # instantiating without any argument
procs.append(proc)
proc.start()
# Daily Update Stock Info (Need to Rethink!!!)
# stock_filter.update_stock_info()
# Daily Update HKEX Security List & Subscribed Data
data_engine.HKEXInterface.update_security_list_full()
# Daily Update Owner Plate for all Stocks
full_equity_list = data_engine.HKEXInterface.get_equity_list_full()
futu_trade.update_owner_plate(stock_list=full_equity_list)
# Update basic information for all markets
futu_trade.update_stock_basicinfo()
# Update historical k-line
for stock_code in stock_list:
futu_trade.update_DW_data(stock_code, force_update=force_update, k_type=KLType.K_DAY)
futu_trade.update_DW_data(stock_code, force_update=force_update, k_type=KLType.K_WEEK)
futu_trade.update_1M_data(stock_code, force_update=force_update, default_days=80)
# Daily Update FuTu Historical Data
# futu_trade.store_all_data_database()
# Clean non-trading days data
DataProcessingInterface.clear_empty_data()
for proc in procs:
proc.join()
def __dynamic_instantiation(prefix: str, module_name: str, optional_parameter=None):
filter_module = importlib.import_module(f"{prefix}.{module_name}")
# Assume the class name is identical with the file name except for the underscore _
class_ = getattr(filter_module, module_name.replace("_", ""))
if optional_parameter is not None:
return class_(optional_parameter)
else:
return class_()
def __init_strategy(strategy_name: str, input_data: dict) -> Strategies:
"""
Return a trading strategy instance using a strategy name in string.
:param strategy_name: an available strategy module name in the strategies folder
:param input_data: Initialized input data for the strategy to calculate the technical indicator
:return: a strategy instance
"""
return __dynamic_instantiation(prefix="strategies", module_name=strategy_name, optional_parameter=input_data.copy())
def __init_filter(filter_list: list) -> list:
"""
Return a list of filters instances using a list of filter names.
If 'all' is specified, all available filters will be returned
:param filter_list: a list of filter names (in strings)
:return: a list of filters
"""
if "all" in filter_list:
filter_list = [Path(file_name).name[:-3] for file_name in glob.glob("./filters/*.py") if
"__init__" not in file_name and "Filters" not in file_name]
return [__dynamic_instantiation(prefix="filters", module_name=filter_name) for filter_name in filter_list]
def init_backtesting(strategy_name: str):
start_date = datetime(2019, 3, 20).date()
end_date = datetime(2021, 3, 23).date()
stock_list = data_engine.YahooFinanceInterface.get_top_30_hsi_constituents()
bt = Backtesting(stock_list=stock_list, start_date=start_date, end_date=end_date, observation=100)
bt.prepare_input_data_file_custom_M(custom_interval=5)
# bt.prepare_input_data_file_1M()
strategy = __dynamic_instantiation(prefix="strategies", module_name=strategy_name,
optional_parameter=bt.get_backtesting_init_data())
bt.init_strategy(strategy)
bt.calculate_return()
# bt.create_tear_sheet()
def init_day_trading(futu_trade: trading_engine.FutuTrade, stock_list: list, strategy_name: str,
stock_strategy_map: dict, sub_type: SubType = SubType.K_5M):
# Subscribe to the stock list first
if futu_trade.kline_subscribe(stock_list, sub_type=sub_type):
# Subscription Success -> Get Real Time Data
input_data = futu_trade.get_data_realtime(stock_list, sub_type=sub_type, kline_num=100)
# strategy_map = dict object {'HK.00001', MACD_Cross(), 'HK.00002', MACD_Cross()...}
strategy_map = {stock_code: __init_strategy(strategy_name=stock_strategy_map.get(stock_code, strategy_name),
input_data=input_data) for stock_code in stock_list}
while True:
futu_trade.cur_kline_evalaute(stock_list=stock_list, strategy_map=strategy_map, sub_type=sub_type)
else:
exit(1)
def init_stock_filter(filter_list: list) -> list:
filters = __init_filter(filter_list)
stock_filter = StockFilter(stock_filters=filters)
return stock_filter.get_filtered_equity_pools()
def main():
# Initialize Argument Parser
parser = argparse.ArgumentParser()
# Data Related Arguments
parser.add_argument("-u", "--update", help="Daily Update Data (Execute Before Market Starts)",
action="store_true")
parser.add_argument("-fu", "--force_update",
help="Force Update All Data Up to Max. Allowed Years (USE WITH CAUTION)", action="store_true")
parser.add_argument("-d", "--database", help="Store All CSV Data to Database", action="store_true")
# Trading Related Arguments
strategy_list = [Path(file_name).name[:-3] for file_name in glob.glob("./strategies/*.py") if
"__init__" not in file_name and "Strategies" not in file_name]
parser.add_argument("-s", "--strategy", type=str, choices=strategy_list,
help="Execute HFT using Pre-defined Strategy")
# Backtesting Related Arguments
parser.add_argument("-b", "--backtesting", type=str, choices=strategy_list,
help="Backtesting a Pre-defined Strategy")
# Retrieve file names for all strategies as the argument option
filter_list = [Path(file_name).name[:-3] for file_name in glob.glob("./filters/*.py") if
"__init__" not in file_name and "Filters" not in file_name]
parser.add_argument("-f", "--filter", type=str, choices=filter_list, nargs="+",
help="Filter Stock List based on Pre-defined Filters")
parser.add_argument("-en", "--email_name", type=str, help="Name of the applied stock filtering techniques")
# Evaluate Arguments
args = parser.parse_args()
# Initialization Connection
futu_trade = trading_engine.FutuTrade()
email_handler = email_engine.Email()
# Initialize Stock List
stock_list = json.loads(config.get('TradePreference', 'StockList'))
if not stock_list:
# stock_list = data_engine.DatabaseInterface(
# database_path=config.get('Database', 'Database_path')).get_stock_list()
# Directly get list of stock codes from the data folder. Easier.
stock_list = [str(f.path).replace('./data/', '') for f in os.scandir("./data/") if f.is_dir()]
stock_list = stock_list[:-1]
if args.filter:
filtered_stock_list = init_stock_filter(args.filter)
filtered_stock_dict = YahooFinanceInterface.get_stocks_email(filtered_stock_list)
subscription_list = json.loads(config.get('Email', 'SubscriptionList'))
for subscriber in subscription_list:
filter_name = args.email_name if args.email_name else "Default Stock Filter"
email_handler.write_daily_stock_filter_email(subscriber, filter_name, filtered_stock_dict)
if args.update or args.force_update:
# Daily Update Data
daily_update_data(futu_trade=futu_trade, stock_list=stock_list, force_update=args.force_update)
if args.database:
# Update ALl Data to Database
futu_trade.store_all_data_database()
if args.strategy:
# Stock Basket => 4 Parts
# 1. Currently Holding Stocks (i.e., in the trading account with existing position)
# 2. Filtered Stocks (i.e., based on 1D data if -f option is adopted
# 3. StockList in config.ini (i.e., if empty, default use all stocks in the database)
# 4. Top 30 HSI Constituents
if args.filter:
stock_list.extend(filtered_stock_list)
# stock_list.extend(data_engine.YahooFinanceInterface.get_top_30_hsi_constituents())
init_day_trading(futu_trade, stock_list, args.strategy, stock_strategy_map)
if args.backtesting:
init_backtesting(args.backtesting)
futu_trade.display_quota()
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.