ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a33f65ef0677422f4a4cf1550566c9aca891ee3 | #!/usr/bin/env python3
# Copyright (c) 2017-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Class for litecoind node under test"""
import contextlib
import decimal
import errno
from enum import Enum
import http.client
import json
import logging
import os
import re
import subprocess
import tempfile
import time
import urllib.parse
from .authproxy import JSONRPCException
from .util import (
append_config,
delete_cookie_file,
get_rpc_proxy,
rpc_url,
wait_until,
p2p_port,
)
# For Python 3.4 compatibility
JSONDecodeError = getattr(json, "JSONDecodeError", ValueError)
BITCOIND_PROC_WAIT_TIMEOUT = 60
class FailedToStartError(Exception):
"""Raised when a node fails to start correctly."""
class ErrorMatch(Enum):
FULL_TEXT = 1
FULL_REGEX = 2
PARTIAL_REGEX = 3
class TestNode():
"""A class for representing a litecoind node under test.
This class contains:
- state about the node (whether it's running, etc)
- a Python subprocess.Popen object representing the running process
- an RPC connection to the node
- one or more P2P connections to the node
To make things easier for the test writer, any unrecognised messages will
be dispatched to the RPC connection."""
def __init__(self, i, datadir, *, rpchost, timewait, bitcoind, bitcoin_cli, mocktime, coverage_dir, extra_conf=None, extra_args=None, use_cli=False):
self.index = i
self.datadir = datadir
self.stdout_dir = os.path.join(self.datadir, "stdout")
self.stderr_dir = os.path.join(self.datadir, "stderr")
self.rpchost = rpchost
self.rpc_timeout = timewait
self.binary = bitcoind
self.coverage_dir = coverage_dir
if extra_conf != None:
append_config(datadir, extra_conf)
# Most callers will just need to add extra args to the standard list below.
# For those callers that need more flexibility, they can just set the args property directly.
# Note that common args are set in the config file (see initialize_datadir)
self.extra_args = extra_args
self.args = [
self.binary,
"-datadir=" + self.datadir,
"-logtimemicros",
"-debug",
"-debugexclude=libevent",
"-debugexclude=leveldb",
"-mocktime=" + str(mocktime),
"-uacomment=testnode%d" % i
]
self.cli = TestNodeCLI(bitcoin_cli, self.datadir)
self.use_cli = use_cli
self.running = False
self.process = None
self.rpc_connected = False
self.rpc = None
self.url = None
self.log = logging.getLogger('TestFramework.node%d' % i)
self.cleanup_on_exit = True # Whether to kill the node when this object goes away
self.p2ps = []
def get_deterministic_priv_key(self):
"""Return a deterministic priv key in base58, that only depends on the node's index"""
PRIV_KEYS = [
# adress , privkey
('mjTkW3DjgyZck4KbiRusZsqTgaYTxdSz6z', 'cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW'),
('msX6jQXvxiNhx3Q62PKeLPrhrqZQdSimTg', 'cUxsWyKyZ9MAQTaAhUQWJmBbSvHMwSmuv59KgxQV7oZQU3PXN3KE'),
('mnonCMyH9TmAsSj3M59DsbH8H63U3RKoFP', 'cTrh7dkEAeJd6b3MRX9bZK8eRmNqVCMH3LSUkE3dSFDyzjU38QxK'),
('mqJupas8Dt2uestQDvV2NH3RU8uZh2dqQR', 'cVuKKa7gbehEQvVq717hYcbE9Dqmq7KEBKqWgWrYBa2CKKrhtRim'),
('msYac7Rvd5ywm6pEmkjyxhbCDKqWsVeYws', 'cQDCBuKcjanpXDpCqacNSjYfxeQj8G6CAtH1Dsk3cXyqLNC4RPuh'),
('n2rnuUnwLgXqf9kk2kjvVm8R5BZK1yxQBi', 'cQakmfPSLSqKHyMFGwAqKHgWUiofJCagVGhiB4KCainaeCSxeyYq'),
('myzuPxRwsf3vvGzEuzPfK9Nf2RfwauwYe6', 'cQMpDLJwA8DBe9NcQbdoSb1BhmFxVjWD5gRyrLZCtpuF9Zi3a9RK'),
('mumwTaMtbxEPUswmLBBN3vM9oGRtGBrys8', 'cSXmRKXVcoouhNNVpcNKFfxsTsToY5pvB9DVsFksF1ENunTzRKsy'),
('mpV7aGShMkJCZgbW7F6iZgrvuPHjZjH9qg', 'cSoXt6tm3pqy43UMabY6eUTmR3eSUYFtB2iNQDGgb3VUnRsQys2k'),
]
return PRIV_KEYS[self.index]
def _node_msg(self, msg: str) -> str:
"""Return a modified msg that identifies this node by its index as a debugging aid."""
return "[node %d] %s" % (self.index, msg)
def _raise_assertion_error(self, msg: str):
"""Raise an AssertionError with msg modified to identify this node."""
raise AssertionError(self._node_msg(msg))
def __del__(self):
# Ensure that we don't leave any bitcoind processes lying around after
# the test ends
if self.process and self.cleanup_on_exit:
# Should only happen on test failure
# Avoid using logger, as that may have already been shutdown when
# this destructor is called.
print(self._node_msg("Cleaning up leftover process"))
self.process.kill()
def __getattr__(self, name):
"""Dispatches any unrecognised messages to the RPC connection or a CLI instance."""
if self.use_cli:
return getattr(self.cli, name)
else:
assert self.rpc_connected and self.rpc is not None, self._node_msg("Error: no RPC connection")
return getattr(self.rpc, name)
def start(self, extra_args=None, *, stdout=None, stderr=None, **kwargs):
"""Start the node."""
if extra_args is None:
extra_args = self.extra_args
# Add a new stdout and stderr file each time bitcoind is started
if stderr is None:
stderr = tempfile.NamedTemporaryFile(dir=self.stderr_dir, delete=False)
if stdout is None:
stdout = tempfile.NamedTemporaryFile(dir=self.stdout_dir, delete=False)
self.stderr = stderr
self.stdout = stdout
# Delete any existing cookie file -- if such a file exists (eg due to
# unclean shutdown), it will get overwritten anyway by bitcoind, and
# potentially interfere with our attempt to authenticate
delete_cookie_file(self.datadir)
# add environment variable LIBC_FATAL_STDERR_=1 so that libc errors are written to stderr and not the terminal
subp_env = dict(os.environ, LIBC_FATAL_STDERR_="1")
self.process = subprocess.Popen(self.args + extra_args, env=subp_env, stdout=stdout, stderr=stderr, **kwargs)
self.running = True
self.log.debug("litecoind started, waiting for RPC to come up")
def wait_for_rpc_connection(self):
"""Sets up an RPC connection to the litecoind process. Returns False if unable to connect."""
# Poll at a rate of four times per second
poll_per_s = 4
for _ in range(poll_per_s * self.rpc_timeout):
if self.process.poll() is not None:
raise FailedToStartError(self._node_msg(
'litecoind exited with status {} during initialization'.format(self.process.returncode)))
try:
self.rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir)
self.rpc.getblockcount()
# If the call to getblockcount() succeeds then the RPC connection is up
self.rpc_connected = True
self.url = self.rpc.url
self.log.debug("RPC successfully started")
return
except IOError as e:
if e.errno != errno.ECONNREFUSED: # Port not yet open?
raise # unknown IO error
except JSONRPCException as e: # Initialization phase
if e.error['code'] != -28: # RPC in warmup?
raise # unknown JSON RPC exception
except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting
if "No RPC credentials" not in str(e):
raise
time.sleep(1.0 / poll_per_s)
self._raise_assertion_error("Unable to connect to litecoind")
def get_wallet_rpc(self, wallet_name):
if self.use_cli:
return self.cli("-rpcwallet={}".format(wallet_name))
else:
assert self.rpc_connected and self.rpc, self._node_msg("RPC not connected")
wallet_path = "wallet/{}".format(urllib.parse.quote(wallet_name))
return self.rpc / wallet_path
def stop_node(self, expected_stderr=''):
"""Stop the node."""
if not self.running:
return
self.log.debug("Stopping node")
try:
self.stop()
except http.client.CannotSendRequest:
self.log.exception("Unable to stop node.")
# Check that stderr is as expected
self.stderr.seek(0)
stderr = self.stderr.read().decode('utf-8').strip()
if stderr != expected_stderr:
raise AssertionError("Unexpected stderr {} != {}".format(stderr, expected_stderr))
self.stdout.close()
self.stderr.close()
del self.p2ps[:]
def is_node_stopped(self):
"""Checks whether the node has stopped.
Returns True if the node has stopped. False otherwise.
This method is responsible for freeing resources (self.process)."""
if not self.running:
return True
return_code = self.process.poll()
if return_code is None:
return False
# process has stopped. Assert that it didn't return an error code.
assert return_code == 0, self._node_msg(
"Node returned non-zero exit code (%d) when stopping" % return_code)
self.running = False
self.process = None
self.rpc_connected = False
self.rpc = None
self.log.debug("Node stopped")
return True
def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT):
wait_until(self.is_node_stopped, timeout=timeout)
@contextlib.contextmanager
def assert_debug_log(self, expected_msgs):
debug_log = os.path.join(self.datadir, 'regtest', 'debug.log')
with open(debug_log, encoding='utf-8') as dl:
dl.seek(0, 2)
prev_size = dl.tell()
try:
yield
finally:
with open(debug_log, encoding='utf-8') as dl:
dl.seek(prev_size)
log = dl.read()
print_log = " - " + "\n - ".join(log.splitlines())
for expected_msg in expected_msgs:
if re.search(re.escape(expected_msg), log, flags=re.MULTILINE) is None:
self._raise_assertion_error('Expected message "{}" does not partially match log:\n\n{}\n\n'.format(expected_msg, print_log))
def assert_start_raises_init_error(self, extra_args=None, expected_msg=None, match=ErrorMatch.FULL_TEXT, *args, **kwargs):
"""Attempt to start the node and expect it to raise an error.
extra_args: extra arguments to pass through to litecoind
expected_msg: regex that stderr should match when litecoind fails
Will throw if litecoind starts without an error.
Will throw if an expected_msg is provided and it does not match litecoind's stdout."""
with tempfile.NamedTemporaryFile(dir=self.stderr_dir, delete=False) as log_stderr, \
tempfile.NamedTemporaryFile(dir=self.stdout_dir, delete=False) as log_stdout:
try:
self.start(extra_args, stdout=log_stdout, stderr=log_stderr, *args, **kwargs)
self.wait_for_rpc_connection()
self.stop_node()
self.wait_until_stopped()
except FailedToStartError as e:
self.log.debug('litecoind failed to start: %s', e)
self.running = False
self.process = None
# Check stderr for expected message
if expected_msg is not None:
log_stderr.seek(0)
stderr = log_stderr.read().decode('utf-8').strip()
if match == ErrorMatch.PARTIAL_REGEX:
if re.search(expected_msg, stderr, flags=re.MULTILINE) is None:
self._raise_assertion_error(
'Expected message "{}" does not partially match stderr:\n"{}"'.format(expected_msg, stderr))
elif match == ErrorMatch.FULL_REGEX:
if re.fullmatch(expected_msg, stderr) is None:
self._raise_assertion_error(
'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr))
elif match == ErrorMatch.FULL_TEXT:
if expected_msg != stderr:
self._raise_assertion_error(
'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr))
else:
if expected_msg is None:
assert_msg = "litecoind should have exited with an error"
else:
assert_msg = "litecoind should have exited with expected error " + expected_msg
self._raise_assertion_error(assert_msg)
def node_encrypt_wallet(self, passphrase):
""""Encrypts the wallet.
This causes litecoind to shutdown, so this method takes
care of cleaning up resources."""
self.encryptwallet(passphrase)
self.wait_until_stopped()
def add_p2p_connection(self, p2p_conn, *, wait_for_verack=True, **kwargs):
"""Add a p2p connection to the node.
This method adds the p2p connection to the self.p2ps list and also
returns the connection to the caller."""
if 'dstport' not in kwargs:
kwargs['dstport'] = p2p_port(self.index)
if 'dstaddr' not in kwargs:
kwargs['dstaddr'] = '127.0.0.1'
p2p_conn.peer_connect(**kwargs)()
self.p2ps.append(p2p_conn)
if wait_for_verack:
p2p_conn.wait_for_verack()
return p2p_conn
@property
def p2p(self):
"""Return the first p2p connection
Convenience property - most tests only use a single p2p connection to each
node, so this saves having to write node.p2ps[0] many times."""
assert self.p2ps, self._node_msg("No p2p connection")
return self.p2ps[0]
def disconnect_p2ps(self):
"""Close all p2p connections to the node."""
for p in self.p2ps:
p.peer_disconnect()
del self.p2ps[:]
class TestNodeCLIAttr:
def __init__(self, cli, command):
self.cli = cli
self.command = command
def __call__(self, *args, **kwargs):
return self.cli.send_cli(self.command, *args, **kwargs)
def get_request(self, *args, **kwargs):
return lambda: self(*args, **kwargs)
class TestNodeCLI():
"""Interface to litecoin-cli for an individual node"""
def __init__(self, binary, datadir):
self.options = []
self.binary = binary
self.datadir = datadir
self.input = None
self.log = logging.getLogger('TestFramework.bitcoincli')
def __call__(self, *options, input=None):
# TestNodeCLI is callable with bitcoin-cli command-line options
cli = TestNodeCLI(self.binary, self.datadir)
cli.options = [str(o) for o in options]
cli.input = input
return cli
def __getattr__(self, command):
return TestNodeCLIAttr(self, command)
def batch(self, requests):
results = []
for request in requests:
try:
results.append(dict(result=request()))
except JSONRPCException as e:
results.append(dict(error=e))
return results
def send_cli(self, command=None, *args, **kwargs):
"""Run litecoin-cli command. Deserializes returned string as python object."""
pos_args = [str(arg).lower() if type(arg) is bool else str(arg) for arg in args]
named_args = [str(key) + "=" + str(value) for (key, value) in kwargs.items()]
assert not (pos_args and named_args), "Cannot use positional arguments and named arguments in the same litecoin-cli call"
p_args = [self.binary, "-datadir=" + self.datadir] + self.options
if named_args:
p_args += ["-named"]
if command is not None:
p_args += [command]
p_args += pos_args + named_args
self.log.debug("Running litecoin-cli command: %s" % command)
process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
cli_stdout, cli_stderr = process.communicate(input=self.input)
returncode = process.poll()
if returncode:
match = re.match(r'error code: ([-0-9]+)\nerror message:\n(.*)', cli_stderr)
if match:
code, message = match.groups()
raise JSONRPCException(dict(code=int(code), message=message))
# Ignore cli_stdout, raise with cli_stderr
raise subprocess.CalledProcessError(returncode, self.binary, output=cli_stderr)
try:
return json.loads(cli_stdout, parse_float=decimal.Decimal)
except JSONDecodeError:
return cli_stdout.rstrip("\n")
|
py | 1a33f69c7e08f3e8cb26b9814ab6eeb87d9800be | import sys
import unittest
from argparse import Namespace
from .fixtures import set_up_cluster, set_up_subparser
from kafka.tools.assigner.exceptions import ConfigurationException
from kafka.tools.assigner.actions.clone import ActionClone
from kafka.tools.assigner.models.broker import Broker
class ActionCloneTests(unittest.TestCase):
def setUp(self):
self.cluster = set_up_cluster()
(self.parser, self.subparsers) = set_up_subparser()
self.args = Namespace(exclude_topics=[])
def test_create_class(self):
self.args.brokers = [1]
self.args.to_broker = 2
action = ActionClone(self.args, self.cluster)
assert isinstance(action, ActionClone)
def test_create_class_bad_target(self):
self.args.brokers = [1]
self.args.to_broker = 3
self.assertRaises(ConfigurationException, ActionClone, self.args, self.cluster)
def test_create_class_bad_source(self):
self.args.brokers = [3]
self.args.to_broker = 2
self.assertRaises(ConfigurationException, ActionClone, self.args, self.cluster)
def test_configure_args(self):
ActionClone.configure_args(self.subparsers)
sys.argv = ['kafka-assigner', 'clone', '-b', '1', '-t', '2']
parsed_args = self.parser.parse_args()
assert parsed_args.action == 'clone'
def test_process_cluster_clean_target(self):
self.cluster.add_broker(Broker(3, "brokerhost3.example.com"))
self.args.brokers = [1]
self.args.to_broker = 3
action = ActionClone(self.args, self.cluster)
action.process_cluster()
b1 = self.cluster.brokers[1]
b2 = self.cluster.brokers[2]
b3 = self.cluster.brokers[3]
assert self.cluster.topics['testTopic1'].partitions[0].replicas == [b3, b1, b2]
assert self.cluster.topics['testTopic1'].partitions[1].replicas == [b2, b3, b1]
assert self.cluster.topics['testTopic2'].partitions[0].replicas == [b2, b3, b1]
assert self.cluster.topics['testTopic2'].partitions[1].replicas == [b3, b1, b2]
def test_process_cluster_duplicates(self):
self.args.brokers = [1]
self.args.to_broker = 2
action = ActionClone(self.args, self.cluster)
action.process_cluster()
b1 = self.cluster.brokers[1]
b2 = self.cluster.brokers[2]
assert self.cluster.topics['testTopic1'].partitions[0].replicas == [b2, b1]
assert self.cluster.topics['testTopic1'].partitions[1].replicas == [b2, b1]
assert self.cluster.topics['testTopic2'].partitions[0].replicas == [b2, b1]
assert self.cluster.topics['testTopic2'].partitions[1].replicas == [b2, b1]
def test_process_cluster_no_change(self):
self.cluster.add_broker(Broker(3, "brokerhost3.example.com"))
self.args.brokers = [3]
self.args.to_broker = 1
action = ActionClone(self.args, self.cluster)
action.process_cluster()
b1 = self.cluster.brokers[1]
b2 = self.cluster.brokers[2]
assert self.cluster.topics['testTopic1'].partitions[0].replicas == [b1, b2]
assert self.cluster.topics['testTopic1'].partitions[1].replicas == [b2, b1]
assert self.cluster.topics['testTopic2'].partitions[0].replicas == [b2, b1]
assert self.cluster.topics['testTopic2'].partitions[1].replicas == [b1, b2]
|
py | 1a33f6c43f0f150f8cf2b8a9287810f80c813474 | # Copyright (c) 2021 Ben Maddison. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Number Resource Extension implementations - RFC3779."""
from __future__ import annotations
import logging
import typing
from cryptography import x509
from . import asn1, oid
from ..asn1.mod import IPAddrAndASCertExtn
from ..asn1.types import ASN1Class
from ..resources import (ASIdentifiers, AsResourcesInfo,
IPAddrBlocks, IpResourcesInfo)
log = logging.getLogger(__name__)
class X509CertificateExtension(x509.UnrecognizedExtension):
"""Custom certificate extension with ASN.1 handling."""
@classmethod
def __init_subclass__(cls,
ext_type: typing.Optional[ASN1Class] = None,
**kwargs: typing.Any) -> None:
"""Register the EXTENSION instance for DER encoding/decoding."""
super().__init_subclass__(**kwargs) # type: ignore[call-arg]
if ext_type is not None:
asn1.Certificate.register_ext_type(ext_type)
class IpResources(X509CertificateExtension,
ext_type=IPAddrAndASCertExtn.ext_IPAddrBlocks):
"""IP Address Resources X.509 certificate extension - RFC3779."""
# TODO: IPAddressRange support
def __init__(self, ip_resources: IpResourcesInfo) -> None:
"""Initialise the certificate extension."""
ip_address_blocks_data = IPAddrBlocks(ip_resources).to_der()
super().__init__(oid.IP_RESOURCES_OID, ip_address_blocks_data)
class AsResources(X509CertificateExtension,
ext_type=IPAddrAndASCertExtn.ext_ASIdentifiers):
"""AS Number Resources X.509 certificate extension - RFC3779."""
def __init__(self, as_resources: AsResourcesInfo) -> None:
"""Initialise the certificate extension."""
as_identifiers_data = ASIdentifiers(as_resources).to_der()
super().__init__(oid.AS_RESOURCES_OID, as_identifiers_data)
|
py | 1a33f73b6933fc31367b9d0014b875692ac8a3f1 | from dotenv import load_dotenv
load_dotenv("config.env")
BOT_TOKEN = "1840298314:AAFUMtMNiJpyBBt4tyGfuq_yO3ZXl88jxwk"
API_ID = 5119765
API_HASH = "ab310ff746864c1a33f3c590f1598c06"
USERBOT_PREFIX = "."
PHONE_NUMBER = "+16465640536" # Need for Userbot # Sudo users have full access to everything, don't trust anyone
LOG_GROUP_ID = -100125431255
GBAN_LOG_GROUP_ID = -1001263664495
MESSAGE_DUMP_CHAT = -1001263664495
FERNET_ENCRYPTION_KEY = "iKMq0WZMnJKjMQxZWKtv-cplMuF_LoyshXj0XbTGGWM=" # Leave this as it is
WELCOME_DELAY_KICK_SEC = 300
MONGO_DB_URI = "mongodb+srv://Satyal:[email protected]/myFirstDatabase?retryWrites=true&w=majority"
ARQ_API_KEY = "NFXKWF-UYMFGH-OVWYFN-VXDNSM-ARQ"
ARQ_API_URL = "https://thearq.tech"
LOG_MENTIONS = True
RSS_DELAY = 300 # In seconds
PM_PERMIT = False
SUDO_USERS_ID = 1741347822
|
py | 1a33f77573b9cbe673223179fe44bda49e14c245 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""This API defines FeatureColumn abstraction."""
# This file was originally under tf/python/feature_column, and was moved to
# Keras package in order to remove the reverse dependency from TF to Keras.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
from tensorflow.python.feature_column import feature_column_v2
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variable_scope
class _BaseFeaturesLayer(Layer):
"""Base class for DenseFeatures and SequenceFeatures.
Defines common methods and helpers.
Args:
feature_columns: An iterable containing the FeatureColumns to use as
inputs to your model.
expected_column_type: Expected class for provided feature columns.
trainable: Boolean, whether the layer's variables will be updated via
gradient descent during training.
name: Name to give to the DenseFeatures.
**kwargs: Keyword arguments to construct a layer.
Raises:
ValueError: if an item in `feature_columns` doesn't match
`expected_column_type`.
"""
def __init__(self,
feature_columns,
expected_column_type,
trainable,
name,
partitioner=None,
**kwargs):
super(_BaseFeaturesLayer, self).__init__(
name=name, trainable=trainable, **kwargs)
self._feature_columns = feature_column_v2._normalize_feature_columns( # pylint: disable=protected-access
feature_columns)
self._state_manager = feature_column_v2._StateManagerImpl( # pylint: disable=protected-access
self, self.trainable)
self._partitioner = partitioner
for column in self._feature_columns:
if not isinstance(column, expected_column_type):
raise ValueError(
'Items of feature_columns must be a {}. '
'You can wrap a categorical column with an '
'embedding_column or indicator_column. Given: {}'.format(
expected_column_type, column))
def build(self, _):
for column in self._feature_columns:
with variable_scope.variable_scope(
self.name, partitioner=self._partitioner):
with variable_scope.variable_scope(
_sanitize_column_name_for_variable_scope(column.name)):
column.create_state(self._state_manager)
super(_BaseFeaturesLayer, self).build(None)
def _output_shape(self, input_shape, num_elements):
"""Computes expected output shape of the layer or a column's dense tensor.
Args:
input_shape: Tensor or array with batch shape.
num_elements: Size of the last dimension of the output.
Returns:
Tuple with output shape.
"""
raise NotImplementedError('Calling an abstract method.')
def compute_output_shape(self, input_shape):
total_elements = 0
for column in self._feature_columns:
total_elements += column.variable_shape.num_elements()
return self._target_shape(input_shape, total_elements)
def _process_dense_tensor(self, column, tensor):
"""Reshapes the dense tensor output of a column based on expected shape.
Args:
column: A DenseColumn or SequenceDenseColumn object.
tensor: A dense tensor obtained from the same column.
Returns:
Reshaped dense tensor.
"""
num_elements = column.variable_shape.num_elements()
target_shape = self._target_shape(array_ops.shape(tensor), num_elements)
return array_ops.reshape(tensor, shape=target_shape)
def _verify_and_concat_tensors(self, output_tensors):
"""Verifies and concatenates the dense output of several columns."""
_verify_static_batch_size_equality(output_tensors, self._feature_columns)
return array_ops.concat(output_tensors, -1)
def get_config(self):
# Import here to avoid circular imports.
from tensorflow.python.feature_column import serialization # pylint: disable=g-import-not-at-top
column_configs = [serialization.serialize_feature_column(fc)
for fc in self._feature_columns]
config = {'feature_columns': column_configs}
config['partitioner'] = generic_utils.serialize_keras_object(
self._partitioner)
base_config = super( # pylint: disable=bad-super-call
_BaseFeaturesLayer, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@classmethod
def from_config(cls, config, custom_objects=None):
# Import here to avoid circular imports.
from tensorflow.python.feature_column import serialization # pylint: disable=g-import-not-at-top
config_cp = config.copy()
columns_by_name = {}
config_cp['feature_columns'] = [serialization.deserialize_feature_column(
c, custom_objects, columns_by_name) for c in config['feature_columns']]
config_cp['partitioner'] = generic_utils.deserialize_keras_object(
config['partitioner'], custom_objects)
return cls(**config_cp)
def _sanitize_column_name_for_variable_scope(name):
"""Sanitizes user-provided feature names for use as variable scopes."""
invalid_char = re.compile('[^A-Za-z0-9_.\\-]')
return invalid_char.sub('_', name)
def _verify_static_batch_size_equality(tensors, columns):
"""Verify equality between static batch sizes.
Args:
tensors: iterable of input tensors.
columns: Corresponding feature columns.
Raises:
ValueError: in case of mismatched batch sizes.
"""
expected_batch_size = None
for i in range(0, len(tensors)):
# bath_size is a Dimension object.
batch_size = tensor_shape.Dimension(tensor_shape.dimension_value(
tensors[i].shape[0]))
if batch_size.value is not None:
if expected_batch_size is None:
bath_size_column_index = i
expected_batch_size = batch_size
elif not expected_batch_size.is_compatible_with(batch_size):
raise ValueError(
'Batch size (first dimension) of each feature must be same. '
'Batch size of columns ({}, {}): ({}, {})'.format(
columns[bath_size_column_index].name, columns[i].name,
expected_batch_size, batch_size))
|
py | 1a33f8fec9014411714eb5b5fa9bd6853d092098 | import PILasOPENCV as Image
import PILasOPENCV as ImageDraw
import PILasOPENCV as ImageFont
# from PIL import ImageFont, ImageDraw, Image
import numpy as np
import cv2
image = cv2.imread("lena.jpg")
# Convert to PIL Image
cv2_im_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
pil_im = Image.fromarray(cv2_im_rgb)
draw = ImageDraw.Draw(pil_im)
# Choose a font
font = ImageFont.truetype("Roboto-Regular.ttf", 40)
# Draw the text
draw.text((0, 0), "Your Text Here", font=font)
draw.line((0,0,250,250), (0,0,255))
print id(draw._img_instance)
print id(pil_im._instance)
# Save the image
cv2_im_processed = pil_im.getim()
cv2.imshow("cv2_im_processed", cv2_im_processed)
cv2.waitKey() |
py | 1a33fa70afd66bbd2ff3bca0f42e10a799844055 | # -*- coding: utf8 -*-
# Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
from tencentcloud.common.abstract_client import AbstractClient
from tencentcloud.cdn.v20180606 import models
class CdnClient(AbstractClient):
_apiVersion = '2018-06-06'
_endpoint = 'cdn.tencentcloudapi.com'
def AddCdnDomain(self, request):
"""AddCdnDomain 用于新增内容分发网络加速域名。
:param request: Request instance for AddCdnDomain.
:type request: :class:`tencentcloud.cdn.v20180606.models.AddCdnDomainRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.AddCdnDomainResponse`
"""
try:
params = request._serialize()
body = self.call("AddCdnDomain", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.AddCdnDomainResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateClsLogTopic(self, request):
"""CreatClsLogTopic 用于创建日志主题。注意:一个日志集下至多可创建10个日志主题。
:param request: Request instance for CreateClsLogTopic.
:type request: :class:`tencentcloud.cdn.v20180606.models.CreateClsLogTopicRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.CreateClsLogTopicResponse`
"""
try:
params = request._serialize()
body = self.call("CreateClsLogTopic", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateClsLogTopicResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteCdnDomain(self, request):
"""DeleteCdnDomain 用于删除指定加速域名
:param request: Request instance for DeleteCdnDomain.
:type request: :class:`tencentcloud.cdn.v20180606.models.DeleteCdnDomainRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DeleteCdnDomainResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteCdnDomain", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteCdnDomainResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteClsLogTopic(self, request):
"""DeleteClsLogTopic 用于删除日志主题。注意:删除后,所有该日志主题下绑定域名的日志将不再继续投递至该主题,已经投递的日志将会被全部清空。生效时间约 5~15 分钟。
:param request: Request instance for DeleteClsLogTopic.
:type request: :class:`tencentcloud.cdn.v20180606.models.DeleteClsLogTopicRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DeleteClsLogTopicResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteClsLogTopic", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteClsLogTopicResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeBillingData(self, request):
"""DescribeBillingData 用于查询实际计费数据明细。
:param request: Request instance for DescribeBillingData.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribeBillingDataRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribeBillingDataResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeBillingData", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeBillingDataResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeCdnData(self, request):
"""DescribeCdnData 用于查询 CDN 实时访问监控数据,支持以下指标查询:
+ 流量(单位为 byte)
+ 带宽(单位为 bps)
+ 请求数(单位为 次)
+ 流量命中率(单位为 %,小数点后保留两位)
+ 状态码 2xx 汇总及各 2 开头状态码明细(单位为 个)
+ 状态码 3xx 汇总及各 3 开头状态码明细(单位为 个)
+ 状态码 4xx 汇总及各 4 开头状态码明细(单位为 个)
+ 状态码 5xx 汇总及各 5 开头状态码明细(单位为 个)
:param request: Request instance for DescribeCdnData.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribeCdnDataRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribeCdnDataResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeCdnData", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeCdnDataResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeCdnDomainLogs(self, request):
"""DescribeCdnDomainLogs 用于查询访问日志下载地址,仅支持 30 天以内的境内、境外访问日志下载链接查询。
:param request: Request instance for DescribeCdnDomainLogs.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribeCdnDomainLogsRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribeCdnDomainLogsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeCdnDomainLogs", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeCdnDomainLogsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeCdnIp(self, request):
"""DescribeCdnIp 用于查询 CDN IP 归属。
:param request: Request instance for DescribeCdnIp.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribeCdnIpRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribeCdnIpResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeCdnIp", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeCdnIpResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeCertDomains(self, request):
"""校验证书并提取SSL证书中包含的域名,返回CDN已接入的域名列表,及已配置证书的域名列表
:param request: Request instance for DescribeCertDomains.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribeCertDomainsRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribeCertDomainsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeCertDomains", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeCertDomainsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDomains(self, request):
"""DescribeDomains 用于查询内容分发网络加速域名(含境内、境外)基本配置信息,包括项目ID、服务状态,业务类型、创建时间、更新时间等信息。
:param request: Request instance for DescribeDomains.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribeDomainsRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribeDomainsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDomains", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDomainsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDomainsConfig(self, request):
"""DescribeDomainsConfig 用于查询内容分发网络加速域名(含境内、境外)的所有配置信息。
:param request: Request instance for DescribeDomainsConfig.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribeDomainsConfigRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribeDomainsConfigResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDomainsConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDomainsConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeIpStatus(self, request):
"""DescribeIpStatus 用于查询域名所在加速平台的边缘节点、回源节点明细
注意事项:接口尚未全量开放,未在内测名单中的账号不支持调用
:param request: Request instance for DescribeIpStatus.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribeIpStatusRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribeIpStatusResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeIpStatus", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeIpStatusResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeIpVisit(self, request):
"""DescribeIpVisit 用于查询 5 分钟活跃用户数,及日活跃用户数明细
+ 5 分钟活跃用户数:根据日志中客户端 IP,5 分钟粒度去重统计
+ 日活跃用户数:根据日志中客户端 IP,按天粒度去重统计
:param request: Request instance for DescribeIpVisit.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribeIpVisitRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribeIpVisitResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeIpVisit", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeIpVisitResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeMapInfo(self, request):
"""DescribeMapInfo 用于查询省份对应的 ID,运营商对应的 ID 信息。
:param request: Request instance for DescribeMapInfo.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribeMapInfoRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribeMapInfoResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeMapInfo", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeMapInfoResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeOriginData(self, request):
"""DescribeOriginData 用于查询 CDN 实时回源监控数据,支持以下指标查询:
+ 回源流量(单位为 byte)
+ 回源带宽(单位为 bps)
+ 回源请求数(单位为 次)
+ 回源失败请求数(单位为 次)
+ 回源失败率(单位为 %,小数点后保留两位)
+ 回源状态码 2xx 汇总及各 2 开头回源状态码明细(单位为 个)
+ 回源状态码 3xx 汇总及各 3 开头回源状态码明细(单位为 个)
+ 回源状态码 4xx 汇总及各 4 开头回源状态码明细(单位为 个)
+ 回源状态码 5xx 汇总及各 5 开头回源状态码明细(单位为 个)
:param request: Request instance for DescribeOriginData.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribeOriginDataRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribeOriginDataResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeOriginData", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeOriginDataResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePayType(self, request):
"""DescribePayType 用于查询用户的计费类型,计费周期等信息。
:param request: Request instance for DescribePayType.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribePayTypeRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribePayTypeResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePayType", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePayTypeResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePurgeQuota(self, request):
"""DescribePurgeQuota 用于查询账户刷新配额和每日可用量。
:param request: Request instance for DescribePurgeQuota.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribePurgeQuotaRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribePurgeQuotaResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePurgeQuota", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePurgeQuotaResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePurgeTasks(self, request):
"""DescribePurgeTasks 用于查询提交的 URL 刷新、目录刷新记录及执行进度,通过 PurgePathCache 与 PurgeUrlsCache 接口提交的任务均可通过此接口进行查询。
:param request: Request instance for DescribePurgeTasks.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribePurgeTasksRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribePurgeTasksResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePurgeTasks", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePurgeTasksResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePushQuota(self, request):
"""DescribePushQuota 用于查询预热配额和每日可用量。
:param request: Request instance for DescribePushQuota.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribePushQuotaRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribePushQuotaResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePushQuota", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePushQuotaResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePushTasks(self, request):
"""DescribePushTasks 用于查询预热任务提交历史记录及执行进度。
接口灰度中,暂未全量开放,敬请期待。
:param request: Request instance for DescribePushTasks.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribePushTasksRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribePushTasksResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePushTasks", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePushTasksResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeTrafficPackages(self, request):
"""DescribeTrafficPackages 用于查询境内 CDN 流量包详情。
:param request: Request instance for DescribeTrafficPackages.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribeTrafficPackagesRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribeTrafficPackagesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeTrafficPackages", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeTrafficPackagesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeUrlViolations(self, request):
"""DescribeUrlViolations 用于查询被 CDN 系统扫描到的域名违规 URL 列表及当前状态。
对应内容分发网络控制台【图片鉴黄】页面。
:param request: Request instance for DescribeUrlViolations.
:type request: :class:`tencentcloud.cdn.v20180606.models.DescribeUrlViolationsRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DescribeUrlViolationsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeUrlViolations", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeUrlViolationsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DisableCaches(self, request):
"""DisableCaches 用于禁用 CDN 上指定 URL 的访问,禁用完成后,全网访问会直接返回 403。(接口尚在内测中,暂未全量开放使用)
:param request: Request instance for DisableCaches.
:type request: :class:`tencentcloud.cdn.v20180606.models.DisableCachesRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DisableCachesResponse`
"""
try:
params = request._serialize()
body = self.call("DisableCaches", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DisableCachesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DisableClsLogTopic(self, request):
"""DisableClsLogTopic 用于停止日志主题投递。注意:停止后,所有绑定该日志主题域名的日志将不再继续投递至该主题,已经投递的日志将会继续保留。生效时间约 5~15 分钟。
:param request: Request instance for DisableClsLogTopic.
:type request: :class:`tencentcloud.cdn.v20180606.models.DisableClsLogTopicRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.DisableClsLogTopicResponse`
"""
try:
params = request._serialize()
body = self.call("DisableClsLogTopic", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DisableClsLogTopicResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def EnableCaches(self, request):
"""EnableCaches 用于解禁手工封禁的 URL,解禁成功后,全网生效时间约 5~10 分钟。(接口尚在内测中,暂未全量开放使用)
:param request: Request instance for EnableCaches.
:type request: :class:`tencentcloud.cdn.v20180606.models.EnableCachesRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.EnableCachesResponse`
"""
try:
params = request._serialize()
body = self.call("EnableCaches", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.EnableCachesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def EnableClsLogTopic(self, request):
"""EnableClsLogTopic 用于启动日志主题投递。注意:启动后,所有绑定该日志主题域名的日志将继续投递至该主题。生效时间约 5~15 分钟。
:param request: Request instance for EnableClsLogTopic.
:type request: :class:`tencentcloud.cdn.v20180606.models.EnableClsLogTopicRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.EnableClsLogTopicResponse`
"""
try:
params = request._serialize()
body = self.call("EnableClsLogTopic", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.EnableClsLogTopicResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def GetDisableRecords(self, request):
"""GetDisableRecords 用于查询资源禁用历史,及 URL 当前状态。(接口尚在内测中,暂未全量开放使用)
:param request: Request instance for GetDisableRecords.
:type request: :class:`tencentcloud.cdn.v20180606.models.GetDisableRecordsRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.GetDisableRecordsResponse`
"""
try:
params = request._serialize()
body = self.call("GetDisableRecords", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.GetDisableRecordsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ListClsLogTopics(self, request):
"""ListClsLogTopics 用于显示日志主题列表。注意:一个日志集下至多含10个日志主题。
:param request: Request instance for ListClsLogTopics.
:type request: :class:`tencentcloud.cdn.v20180606.models.ListClsLogTopicsRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.ListClsLogTopicsResponse`
"""
try:
params = request._serialize()
body = self.call("ListClsLogTopics", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ListClsLogTopicsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ListClsTopicDomains(self, request):
"""ListClsTopicDomains 用于获取某日志主题下绑定的域名列表。
:param request: Request instance for ListClsTopicDomains.
:type request: :class:`tencentcloud.cdn.v20180606.models.ListClsTopicDomainsRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.ListClsTopicDomainsResponse`
"""
try:
params = request._serialize()
body = self.call("ListClsTopicDomains", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ListClsTopicDomainsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ListTopData(self, request):
"""ListTopData 通过入参 Metric 和 Filter 组合不同,可以查询以下排序数据:
+ 依据总流量、总请求数对访问 URL 排序,从大至小返回 TOP 1000 URL
+ 依据总流量、总请求数对客户端省份排序,从大至小返回省份列表
+ 依据总流量、总请求数对客户端运营商排序,从大至小返回运营商列表
+ 依据总流量、峰值带宽、总请求数、平均命中率、2XX/3XX/4XX/5XX 状态码对域名排序,从大至小返回域名列表
+ 依据总回源流量、回源峰值带宽、总回源请求数、平均回源失败率、2XX/3XX/4XX/5XX 回源状态码对域名排序,从大至小返回域名列表
注意:仅支持 90 天内数据查询
:param request: Request instance for ListTopData.
:type request: :class:`tencentcloud.cdn.v20180606.models.ListTopDataRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.ListTopDataResponse`
"""
try:
params = request._serialize()
body = self.call("ListTopData", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ListTopDataResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ManageClsTopicDomains(self, request):
"""ManageClsTopicDomains 用于管理某日志主题下绑定的域名列表。
:param request: Request instance for ManageClsTopicDomains.
:type request: :class:`tencentcloud.cdn.v20180606.models.ManageClsTopicDomainsRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.ManageClsTopicDomainsResponse`
"""
try:
params = request._serialize()
body = self.call("ManageClsTopicDomains", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ManageClsTopicDomainsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def PurgePathCache(self, request):
"""PurgePathCache 用于批量提交目录刷新,根据域名的加速区域进行对应区域的刷新。
默认情况下境内、境外加速区域每日目录刷新额度为各 100 条,每次最多可提交 20 条。
:param request: Request instance for PurgePathCache.
:type request: :class:`tencentcloud.cdn.v20180606.models.PurgePathCacheRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.PurgePathCacheResponse`
"""
try:
params = request._serialize()
body = self.call("PurgePathCache", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.PurgePathCacheResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def PurgeUrlsCache(self, request):
"""PurgeUrlsCache 用于批量提交 URL 进行刷新,根据 URL 中域名的当前加速区域进行对应区域的刷新。
默认情况下境内、境外加速区域每日 URL 刷新额度各为 10000 条,每次最多可提交 1000 条。
:param request: Request instance for PurgeUrlsCache.
:type request: :class:`tencentcloud.cdn.v20180606.models.PurgeUrlsCacheRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.PurgeUrlsCacheResponse`
"""
try:
params = request._serialize()
body = self.call("PurgeUrlsCache", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.PurgeUrlsCacheResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def PushUrlsCache(self, request):
"""PushUrlsCache 用于将指定 URL 资源列表加载至 CDN 节点,支持指定加速区域预热。
默认情况下境内、境外每日预热 URL 限额为各 1000 条,每次最多可提交 20 条。
接口灰度中,暂未全量开放,敬请期待。
:param request: Request instance for PushUrlsCache.
:type request: :class:`tencentcloud.cdn.v20180606.models.PushUrlsCacheRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.PushUrlsCacheResponse`
"""
try:
params = request._serialize()
body = self.call("PushUrlsCache", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.PushUrlsCacheResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def SearchClsLog(self, request):
"""SearchClsLog 用于 CLS 日志检索。支持检索今天,24小时(可选近7中的某一天),近7天的日志数据。
:param request: Request instance for SearchClsLog.
:type request: :class:`tencentcloud.cdn.v20180606.models.SearchClsLogRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.SearchClsLogResponse`
"""
try:
params = request._serialize()
body = self.call("SearchClsLog", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.SearchClsLogResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def StartCdnDomain(self, request):
"""StartCdnDomain 用于启用已停用域名的加速服务
:param request: Request instance for StartCdnDomain.
:type request: :class:`tencentcloud.cdn.v20180606.models.StartCdnDomainRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.StartCdnDomainResponse`
"""
try:
params = request._serialize()
body = self.call("StartCdnDomain", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.StartCdnDomainResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def StopCdnDomain(self, request):
"""StopCdnDomain 用于停止域名的加速服务。
注意:停止加速服务后,访问至加速节点的请求将会直接返回 404。为避免对您的业务造成影响,请在停止加速服务前将解析切走。
:param request: Request instance for StopCdnDomain.
:type request: :class:`tencentcloud.cdn.v20180606.models.StopCdnDomainRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.StopCdnDomainResponse`
"""
try:
params = request._serialize()
body = self.call("StopCdnDomain", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.StopCdnDomainResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateDomainConfig(self, request):
"""UpdateDomainConfig 用于修改内容分发网络加速域名配置信息
注意:如果需要更新复杂类型的配置项,必须传递整个对象的所有属性,未传递的属性将使用默认值,建议通过查询接口获取配置属性后,直接修改后传递给本接口。Https配置由于证书的特殊性,更新时不用传递证书和密钥字段。
:param request: Request instance for UpdateDomainConfig.
:type request: :class:`tencentcloud.cdn.v20180606.models.UpdateDomainConfigRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.UpdateDomainConfigResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateDomainConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateDomainConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdatePayType(self, request):
"""本接口(UpdatePayType)用于修改账号计费类型,暂不支持月结用户或子账号修改。
:param request: Request instance for UpdatePayType.
:type request: :class:`tencentcloud.cdn.v20180606.models.UpdatePayTypeRequest`
:rtype: :class:`tencentcloud.cdn.v20180606.models.UpdatePayTypeResponse`
"""
try:
params = request._serialize()
body = self.call("UpdatePayType", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdatePayTypeResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) |
py | 1a33facdd8bc8a65d225f9787008df558357acf9 | from pylps.core import *
initialise(max_time=10) # Assume all time variables created here
create_fluents('fire', 'water', 'p')
create_actions('eliminate', 'escape', 'refill', 'ignite(_)',
'delay', 'delay_more')
create_events('deal_with_fire')
create_variables('X')
create_facts('flammable(_)')
observe(ignite('sofa').frm(1, 2))
observe(ignite('bed').frm(4, 5))
observe(refill.frm(7, 8))
initially(water)
flammable('sofa')
flammable('bed')
reactive_rule(fire.at(T1)).then(
deal_with_fire.frm(T2, T3))
goal(deal_with_fire.frm(T1, T2)).requires(
eliminate.frm(T1, T2),
delay.frm(T1, T2),
delay_more.frm(T1, T2))
ignite(X).initiates(fire).iff(flammable(X))
eliminate.terminates(fire)
eliminate.terminates(water)
eliminate.initiates(p)
refill.initiates(water)
false_if(eliminate, fire, ~water)
false_if(delay, p)
execute(debug=False)
show_kb_log()
'''
maxTime(10).
fluents fire, water, p.
actions eliminate, ignite(_), escape, refill, delay, delay_more.
observe ignite(sofa) from 1 to 2.
observe ignite(bed) from 4 to 5.
observe refill from 7 to 8.
initially water.
flammable(sofa).
flammable(bed).
if fire at T1
then deal_with_fire from T2 to T3.
deal_with_fire from T1 to T2
if eliminate from T1 to T2, delay from T1 to T2, delay_more from T1 to T2.
ignite(Object) initiates fire if flammable(Object).
eliminate terminates fire.
eliminate terminates water.
eliminate initiates p.
refill initiates water.
false eliminate, fire, not water.
false delay, p.
'''
|
py | 1a33fb3e61af0dffbef5192d28a516274549239c | """Example on regression using YearPredictionMSD."""
import time
import torch
import numbers
import torch.nn as nn
from torch.nn import functional as F
from sklearn.preprocessing import scale
from sklearn.datasets import load_svmlight_file
from torch.utils.data import TensorDataset, DataLoader
from torchensemble.fusion import FusionRegressor
from torchensemble.voting import VotingRegressor
from torchensemble.bagging import BaggingRegressor
from torchensemble.gradient_boosting import GradientBoostingRegressor
from torchensemble.snapshot_ensemble import SnapshotEnsembleRegressor
from torchensemble.utils.logging import set_logger
def load_data(batch_size):
# The dataset can be downloaded from:
# https://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/regression.html#YearPredictionMSD
if not isinstance(batch_size, numbers.Integral):
msg = "`batch_size` should be an integer, but got {} instead."
raise ValueError(msg.format(batch_size))
# MODIFY THE PATH IF YOU WANT
train_path = "../../Dataset/LIBSVM/yearpredictionmsd_training"
test_path = "../../Dataset/LIBSVM/yearpredictionmsd_testing"
train = load_svmlight_file(train_path)
test = load_svmlight_file(test_path)
# Numpy array -> Tensor
X_train, X_test = (
torch.FloatTensor(train[0].toarray()),
torch.FloatTensor(test[0].toarray()),
)
y_train, y_test = (
torch.FloatTensor(scale(train[1]).reshape(-1, 1)),
torch.FloatTensor(scale(test[1]).reshape(-1, 1)),
)
# Tensor -> Data loader
train_data = TensorDataset(X_train, y_train)
train_loader = DataLoader(train_data, batch_size=batch_size, shuffle=True)
test_data = TensorDataset(X_test, y_test)
test_loader = DataLoader(test_data, batch_size=batch_size, shuffle=True)
return train_loader, test_loader
def display_records(records, logger):
msg = (
"{:<28} | Testing MSE: {:.2f} | Training Time: {:.2f} s |"
" Evaluating Time: {:.2f} s"
)
print("\n")
for method, training_time, evaluating_time, mse in records:
logger.info(msg.format(method, mse, training_time, evaluating_time))
class MLP(nn.Module):
def __init__(self):
super(MLP, self).__init__()
self.linear1 = nn.Linear(90, 128)
self.linear2 = nn.Linear(128, 128)
self.linear3 = nn.Linear(128, 1)
def forward(self, x):
x = x.view(x.size()[0], -1)
x = F.relu(self.linear1(x))
x = F.relu(self.linear2(x))
x = self.linear3(x)
return x
if __name__ == "__main__":
# Hyper-parameters
n_estimators = 10
lr = 1e-3
weight_decay = 5e-4
epochs = 50
# Utils
batch_size = 512
records = []
torch.manual_seed(0)
# Load data
train_loader, test_loader = load_data(batch_size)
print("Finish loading data...\n")
logger = set_logger("regression_YearPredictionMSD_mlp")
# FusionRegressor
model = FusionRegressor(
estimator=MLP,
n_estimators=n_estimators,
cuda=True
)
# Set the optimizer
model.set_optimizer("Adam", lr=lr, weight_decay=weight_decay)
tic = time.time()
model.fit(train_loader, epochs=epochs)
toc = time.time()
training_time = toc - tic
tic = time.time()
testing_mse = model.predict(test_loader)
toc = time.time()
evaluating_time = toc - tic
records.append(("FusionRegressor", training_time, evaluating_time,
testing_mse))
# VotingRegressor
model = VotingRegressor(
estimator=MLP,
n_estimators=n_estimators,
cuda=True
)
# Set the optimizer
model.set_optimizer("Adam", lr=lr, weight_decay=weight_decay)
tic = time.time()
model.fit(train_loader, epochs=epochs)
toc = time.time()
training_time = toc - tic
tic = time.time()
testing_mse = model.predict(test_loader)
toc = time.time()
evaluating_time = toc - tic
records.append(("VotingRegressor", training_time, evaluating_time,
testing_mse))
# BaggingRegressor
model = BaggingRegressor(
estimator=MLP,
n_estimators=n_estimators,
cuda=True
)
# Set the optimizer
model.set_optimizer("Adam", lr=lr, weight_decay=weight_decay)
tic = time.time()
model.fit(train_loader, epochs=epochs)
toc = time.time()
training_time = toc - tic
tic = time.time()
testing_mse = model.predict(test_loader)
toc = time.time()
evaluating_time = toc - tic
records.append(("BaggingRegressor", training_time, evaluating_time,
testing_mse))
# GradientBoostingRegressor
model = GradientBoostingRegressor(
estimator=MLP,
n_estimators=n_estimators,
cuda=True
)
# Set the optimizer
model.set_optimizer("Adam", lr=lr, weight_decay=weight_decay)
tic = time.time()
model.fit(train_loader, epochs=epochs)
toc = time.time()
training_time = toc - tic
tic = time.time()
testing_mse = model.predict(test_loader)
toc = time.time()
evaluating_time = toc - tic
records.append(("GradientBoostingRegressor", training_time,
evaluating_time, testing_mse))
# SnapshotEnsembleRegressor
model = SnapshotEnsembleRegressor(
estimator=MLP,
n_estimators=n_estimators,
cuda=True
)
# Set the optimizer
model.set_optimizer("Adam", lr=lr, weight_decay=weight_decay)
tic = time.time()
model.fit(train_loader, epochs=epochs)
toc = time.time()
training_time = toc - tic
tic = time.time()
testing_acc = model.predict(test_loader)
toc = time.time()
evaluating_time = toc - tic
records.append(("SnapshotEnsembleRegressor", training_time,
evaluating_time, testing_acc))
# Print results on different ensemble methods
display_records(records, logger)
|
py | 1a33fb4991b1ef23f4587d4b512e522ecb417afd | from io import BytesIO
from unittest import TestCase
import json
import requests
from ecc import PrivateKey
from helper import (
decode_base58,
hash256,
encode_varint,
int_to_little_endian,
little_endian_to_int,
read_varint,
SIGHASH_ALL,
)
from script import p2pkh_script, Script
class TxFetcher:
cache = {}
@classmethod
def get_url(cls, testnet=False):
if testnet:
return 'http://blockstream.info/testnet/api'
else:
return 'http://blockstream.info/api'
@classmethod
def fetch(cls, tx_id, testnet=False, fresh=False):
if fresh or (tx_id not in cls.cache):
url = f'{cls.get_url(testnet)}/tx/{tx_id}/hex'
response = requests.get(url)
try:
raw = bytes.fromhex(response.text.strip())
except ValueError:
raise ValueError(f'unexpected response: {response.text}')
if raw[4] == 0:
raw = raw[:4] + raw[6:]
tx = Tx.parse(BytesIO(raw), testnet=testnet)
tx.locktime = little_endian_to_int(raw[-4:])
else:
tx = Tx.parse(BytesIO(raw), testnet=testnet)
# make sure the tx we got matches to the hash we requested
if tx.id() != tx_id:
raise ValueError(f'not the same id: {tx.id()} vs {tx_id}')
cls.cache[tx_id] = tx
cls.cache[tx_id].testnet = testnet
return cls.cache[tx_id]
@classmethod
def load_cache(cls, filename):
disk_cache = json.loads(open(filename, 'r').read())
for k, raw_hex in disk_cache.items():
cls.cache[k] = Tx.parse(BytesIO(bytes.fromhex(raw_hex)))
@classmethod
def dump_cache(cls, filename):
with open(filename, 'w') as f:
to_dump = {k: tx.serialize().hex() for k, tx in cls.cache.items()}
s = json.dumps(to_dump, sort_keys=True, indent=4)
f.write(s)
class Tx:
def __init__(self, version, tx_ins, tx_outs, locktime, testnet=False):
self.version = version
self.tx_ins = tx_ins
self.tx_outs = tx_outs
self.locktime = locktime
self.testnet = testnet
def __repr__(self):
tx_ins = ' '.join([f'{tx_in}' for tx_in in self.tx_ins])
tx_outs = ' '.join([f'{tx_out}' for tx_out in self.tx_outs])
return f'tx: {self.hash().hex()}\nversion: {self.version}\ntx_ins:\n{tx_ins}\ntx_outs:\n{tx_outs}\nlocktime: {self.locktime}\n'
def id(self):
'''Human-readable hexadecimal of the transaction hash'''
return self.hash().hex()
def hash(self):
'''Binary hash of the legacy serialization'''
return hash256(self.serialize())[::-1]
@classmethod
def parse(cls, s, testnet=False):
'''Takes a byte stream and parses the transaction at the start
return a Tx object
'''
# s.read(n) will return n bytes
# version has 4 bytes, little-endian, interpret as int
version = little_endian_to_int(s.read(4))
# num_inputs is a varint, use read_varint(s)
num_inputs = read_varint(s)
# each input needs parsing
inputs = []
for _ in range(num_inputs):
inputs.append(TxIn.parse(s))
# num_outputs is a varint, use read_varint(s)
num_outputs = read_varint(s)
# each output needs parsing
outputs = []
for _ in range(num_outputs):
outputs.append(TxOut.parse(s))
# locktime is 4 bytes, little-endian
locktime = little_endian_to_int(s.read(4))
# return an instance of the class (cls(...))
return cls(version, inputs, outputs, locktime, testnet=testnet)
def serialize(self):
'''Returns the byte serialization of the transaction'''
# serialize version (4 bytes, little endian)
result = int_to_little_endian(self.version, 4)
# encode_varint on the number of inputs
result += encode_varint(len(self.tx_ins))
# iterate inputs
for tx_in in self.tx_ins:
# serialize each input
result += tx_in.serialize()
# encode_varint on the number of outputs
result += encode_varint(len(self.tx_outs))
# iterate outputs
for tx_out in self.tx_outs:
# serialize each output
result += tx_out.serialize()
# serialize locktime (4 bytes, little endian)
result += int_to_little_endian(self.locktime, 4)
return result
def fee(self):
'''Returns the fee of this transaction in satoshi'''
# initialize input sum and output sum
input_sum, output_sum = 0, 0
# iterate through inputs
for tx_in in self.tx_ins:
# for each input get the value and add to input sum
input_sum += tx_in.value(self.testnet)
# iterate through outputs
for tx_out in self.tx_outs:
# for each output get the amount and add to output sum
output_sum += tx_out.amount
# return input sum - output sum
return input_sum - output_sum
def sig_hash(self, input_index):
'''Returns the integer representation of the hash that needs to get
signed for index input_index'''
# create the serialization per spec
# start with version: int_to_little_endian in 4 bytes
s = int_to_little_endian(self.version, 4)
# next, how many inputs there are: encode_varint
s += encode_varint(len(self.tx_ins))
# loop through each input: for i, tx_in in enumerate(self.tx_ins)
for i, tx_in in enumerate(self.tx_ins):
# if the input index is the one we're signing
if i == input_index:
# the previous tx's ScriptPubkey is the ScriptSig
script_sig = tx_in.script_pubkey(self.testnet)
# Otherwise, the ScriptSig is empty
else:
script_sig = None
# create a new TxIn with the same parameters
# as tx_in, but change the script_sig
new_tx_in = TxIn(
prev_tx=tx_in.prev_tx,
prev_index=tx_in.prev_index,
script_sig=script_sig,
sequence=tx_in.sequence,
)
# add the serialization of the new TxIn
s += new_tx_in.serialize()
# add how many outputs there are using encode_varint
s += encode_varint(len(self.tx_outs))
# add the serialization of each output
for tx_out in self.tx_outs:
s += tx_out.serialize()
# add the locktime using int_to_little_endian in 4 bytes
s += int_to_little_endian(self.locktime, 4)
# add SIGHASH_ALL using int_to_little_endian in 4 bytes
s += int_to_little_endian(SIGHASH_ALL, 4)
# hash256 the serialization
h256 = hash256(s)
# convert the result to an integer using int.from_bytes(x, 'big')
return int.from_bytes(h256, 'big')
def verify_input(self, input_index):
'''Returns whether the input has a valid signature'''
# get the relevant input
tx_in = self.tx_ins[input_index]
# get the sig_hash (z)
z = self.sig_hash(input_index)
# combine the scripts
combined_script = tx_in.script_sig + tx_in.script_pubkey(self.testnet)
# evaluate the combined script
return combined_script.evaluate(z)
def verify(self):
'''Verify this transaction'''
if self.fee() < 0:
return False
for i in range(len(self.tx_ins)):
if not self.verify_input(i):
return False
return True
def sign_input(self, input_index, private_key):
'''Signs the input using the private key'''
# get the sig_hash (z)
z = self.sig_hash(input_index)
# get der signature of z from private key
der = private_key.sign(z).der()
# append the SIGHASH_ALL to der (use SIGHASH_ALL.to_bytes(1, 'big'))
sig = der + SIGHASH_ALL.to_bytes(1, 'big')
# calculate the sec
sec = private_key.point.sec()
# initialize a new script with [sig, sec] as the elements
script_sig = Script([sig, sec])
# change input's script_sig to new script
self.tx_ins[input_index].script_sig = script_sig
# return whether sig is valid using self.verify_input
return self.verify_input(input_index)
class TxIn:
def __init__(self, prev_tx, prev_index, script_sig=None, sequence=0xffffffff):
self.prev_tx = prev_tx
self.prev_index = prev_index
if script_sig is None:
self.script_sig = Script()
else:
self.script_sig = script_sig
self.sequence = sequence
def __repr__(self):
return f'{self.prev_tx.hex()}:{self.prev_index}'
@classmethod
def parse(cls, s):
'''Takes a byte stream and parses the tx_input at the start
return a TxIn object
'''
# s.read(n) will return n bytes
# prev_tx is 32 bytes, little endian
prev_tx = s.read(32)[::-1]
# prev_index is 4 bytes, little endian, interpret as int
prev_index = little_endian_to_int(s.read(4))
# script_sig is a variable field (length followed by the data)
# you can use Script.parse to get the actual script
script_sig = Script.parse(s)
# sequence is 4 bytes, little-endian, interpret as int
sequence = little_endian_to_int(s.read(4))
# return an instance of the class (cls(...))
return cls(prev_tx, prev_index, script_sig, sequence)
def serialize(self):
'''Returns the byte serialization of the transaction input'''
# serialize prev_tx, little endian
result = self.prev_tx[::-1]
# serialize prev_index, 4 bytes, little endian
result += int_to_little_endian(self.prev_index, 4)
# serialize the script_sig
result += self.script_sig.serialize()
# serialize sequence, 4 bytes, little endian
result += int_to_little_endian(self.sequence, 4)
return result
def fetch_tx(self, testnet=False):
return TxFetcher.fetch(self.prev_tx.hex(), testnet=testnet)
def value(self, testnet=False):
'''Get the outpoint value by looking up the tx hash
Returns the amount in satoshi
'''
# use self.fetch_tx to get the transaction
tx = self.fetch_tx(testnet=testnet)
# get the output at self.prev_index
# return the amount property
return tx.tx_outs[self.prev_index].amount
def script_pubkey(self, testnet=False):
'''Get the scriptPubKey by looking up the tx hash
Returns a Script object
'''
# use self.fetch_tx to get the transaction
tx = self.fetch_tx(testnet=testnet)
# get the output at self.prev_index
# return the script_pubkey property
return tx.tx_outs[self.prev_index].script_pubkey
class TxOut:
def __init__(self, amount, script_pubkey):
self.amount = amount
self.script_pubkey = script_pubkey
def __repr__(self):
return f'{self.amount}:{self.script_pubkey}'
@classmethod
def parse(cls, s):
'''Takes a byte stream and parses the tx_output at the start
return a TxOut object
'''
# s.read(n) will return n bytes
# amount is 8 bytes, little endian, interpret as int
amount = little_endian_to_int(s.read(8))
# script_pubkey is a variable field (length followed by the data)
# you can use Script.parse to get the actual script
script_pubkey = Script.parse(s)
# return an instance of the class (cls(...))
return cls(amount, script_pubkey)
def serialize(self):
'''Returns the byte serialization of the transaction output'''
# serialize amount, 8 bytes, little endian
result = int_to_little_endian(self.amount, 8)
# serialize the script_pubkey
result += self.script_pubkey.serialize()
return result
class TxTest(TestCase):
cache_file = 'tx.cache'
@classmethod
def setUpClass(cls):
# fill with cache so we don't have to be online to run these tests
TxFetcher.load_cache(cls.cache_file)
def test_parse_version(self):
raw_tx = bytes.fromhex('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(tx.version, 1)
def test_parse_inputs(self):
raw_tx = bytes.fromhex('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(len(tx.tx_ins), 1)
want = bytes.fromhex('d1c789a9c60383bf715f3f6ad9d14b91fe55f3deb369fe5d9280cb1a01793f81')
self.assertEqual(tx.tx_ins[0].prev_tx, want)
self.assertEqual(tx.tx_ins[0].prev_index, 0)
want = bytes.fromhex('6b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278a')
self.assertEqual(tx.tx_ins[0].script_sig.serialize(), want)
self.assertEqual(tx.tx_ins[0].sequence, 0xfffffffe)
def test_parse_outputs(self):
raw_tx = bytes.fromhex('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(len(tx.tx_outs), 2)
want = 32454049
self.assertEqual(tx.tx_outs[0].amount, want)
want = bytes.fromhex('1976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac')
self.assertEqual(tx.tx_outs[0].script_pubkey.serialize(), want)
want = 10011545
self.assertEqual(tx.tx_outs[1].amount, want)
want = bytes.fromhex('1976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac')
self.assertEqual(tx.tx_outs[1].script_pubkey.serialize(), want)
def test_parse_locktime(self):
raw_tx = bytes.fromhex('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(tx.locktime, 410393)
def test_serialize(self):
raw_tx = bytes.fromhex('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(tx.serialize(), raw_tx)
def test_input_value(self):
tx_hash = 'd1c789a9c60383bf715f3f6ad9d14b91fe55f3deb369fe5d9280cb1a01793f81'
index = 0
want = 42505594
tx_in = TxIn(bytes.fromhex(tx_hash), index)
self.assertEqual(tx_in.value(), want)
def test_input_pubkey(self):
tx_hash = 'd1c789a9c60383bf715f3f6ad9d14b91fe55f3deb369fe5d9280cb1a01793f81'
index = 0
tx_in = TxIn(bytes.fromhex(tx_hash), index)
want = bytes.fromhex('1976a914a802fc56c704ce87c42d7c92eb75e7896bdc41ae88ac')
self.assertEqual(tx_in.script_pubkey().serialize(), want)
def test_fee(self):
raw_tx = bytes.fromhex('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(tx.fee(), 40000)
raw_tx = bytes.fromhex('010000000456919960ac691763688d3d3bcea9ad6ecaf875df5339e148a1fc61c6ed7a069e010000006a47304402204585bcdef85e6b1c6af5c2669d4830ff86e42dd205c0e089bc2a821657e951c002201024a10366077f87d6bce1f7100ad8cfa8a064b39d4e8fe4ea13a7b71aa8180f012102f0da57e85eec2934a82a585ea337ce2f4998b50ae699dd79f5880e253dafafb7feffffffeb8f51f4038dc17e6313cf831d4f02281c2a468bde0fafd37f1bf882729e7fd3000000006a47304402207899531a52d59a6de200179928ca900254a36b8dff8bb75f5f5d71b1cdc26125022008b422690b8461cb52c3cc30330b23d574351872b7c361e9aae3649071c1a7160121035d5c93d9ac96881f19ba1f686f15f009ded7c62efe85a872e6a19b43c15a2937feffffff567bf40595119d1bb8a3037c356efd56170b64cbcc160fb028fa10704b45d775000000006a47304402204c7c7818424c7f7911da6cddc59655a70af1cb5eaf17c69dadbfc74ffa0b662f02207599e08bc8023693ad4e9527dc42c34210f7a7d1d1ddfc8492b654a11e7620a0012102158b46fbdff65d0172b7989aec8850aa0dae49abfb84c81ae6e5b251a58ace5cfeffffffd63a5e6c16e620f86f375925b21cabaf736c779f88fd04dcad51d26690f7f345010000006a47304402200633ea0d3314bea0d95b3cd8dadb2ef79ea8331ffe1e61f762c0f6daea0fabde022029f23b3e9c30f080446150b23852028751635dcee2be669c2a1686a4b5edf304012103ffd6f4a67e94aba353a00882e563ff2722eb4cff0ad6006e86ee20dfe7520d55feffffff0251430f00000000001976a914ab0c0b2e98b1ab6dbf67d4750b0a56244948a87988ac005a6202000000001976a9143c82d7df364eb6c75be8c80df2b3eda8db57397088ac46430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
self.assertEqual(tx.fee(), 140500)
def test_sig_hash(self):
raw_tx = bytes.fromhex('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600')
stream = BytesIO(raw_tx)
tx = Tx.parse(stream)
want = int('27e0c5994dec7824e56dec6b2fcb342eb7cdb0d0957c2fce9882f715e85d81a6', 16)
self.assertEqual(tx.sig_hash(0), want)
def test_verify_p2pkh(self):
tx = TxFetcher.fetch('452c629d67e41baec3ac6f04fe744b4b9617f8f859c63b3002f8684e7a4fee03')
self.assertTrue(tx.verify())
tx = TxFetcher.fetch('5418099cc755cb9dd3ebc6cf1a7888ad53a1a3beb5a025bce89eb1bf7f1650a2', testnet=True)
self.assertTrue(tx.verify())
def test_sign_input(self):
private_key = PrivateKey(secret=8675309)
tx_ins = []
prev_tx = bytes.fromhex('0025bc3c0fa8b7eb55b9437fdbd016870d18e0df0ace7bc9864efc38414147c8')
tx_ins.append(TxIn(prev_tx, 0))
tx_outs = []
h160 = decode_base58('mzx5YhAH9kNHtcN481u6WkjeHjYtVeKVh2')
tx_outs.append(TxOut(amount=int(0.99 * 100000000), script_pubkey=p2pkh_script(h160)))
h160 = decode_base58('mnrVtF8DWjMu839VW3rBfgYaAfKk8983Xf')
tx_outs.append(TxOut(amount=int(0.1 * 100000000), script_pubkey=p2pkh_script(h160)))
tx = Tx(1, tx_ins, tx_outs, 0, testnet=True)
self.assertTrue(tx.sign_input(0, private_key))
|
py | 1a33fb537f4cf962fb3e5e83be860359a040f21d | import asyncio
import os
import sys
import traceback
import disnake
from disnake.ext import commands
if sys.platform == "win32":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
def fancy_traceback(exc: Exception) -> str:
"""May not fit the message content limit"""
text = "".join(traceback.format_exception(type(exc), exc, exc.__traceback__))
return f"```py\n{text[-4086:]}\n```"
class TestBot(commands.Bot):
def __init__(self):
super().__init__(
command_prefix="..",
intents=disnake.Intents.all(),
help_command=None, # type: ignore
sync_commands_debug=True,
sync_permissions=True,
test_guilds=[
570841314200125460,
768247229840359465,
808030843078836254,
723976264511389746,
],
)
def load_all_extensions(self, folder: str) -> None:
py_path = f"test_bot.{folder}"
folder = f"test_bot/{folder}"
for name in os.listdir(folder):
if name.endswith(".py") and os.path.isfile(f"{folder}/{name}"):
self.load_extension(f"{py_path}.{name[:-3]}")
async def on_ready(self):
# fmt: off
print(
f"\n"
f"The bot is ready.\n"
f"User: {self.user}\n"
f"ID: {self.user.id}\n"
)
# fmt: on
async def on_command_error(self, ctx: commands.Context, error: commands.CommandError) -> None:
embed = disnake.Embed(
title=f"Command `{ctx.command}` failed due to `{error}`",
description=fancy_traceback(error),
color=disnake.Color.red(),
)
await ctx.send(embed=embed)
async def on_slash_command_error(
self,
inter: disnake.AppCmdInter,
error: commands.CommandError,
) -> None:
embed = disnake.Embed(
title=f"Slash command `{inter.data.name}` failed due to `{error}`",
description=fancy_traceback(error),
color=disnake.Color.red(),
)
if inter.response._responded:
send = inter.channel.send
else:
send = inter.response.send_message
await send(embed=embed)
async def on_user_command_error(
self,
inter: disnake.AppCmdInter,
error: commands.CommandError,
) -> None:
embed = disnake.Embed(
title=f"User command `{inter.data.name}` failed due to `{error}`",
description=fancy_traceback(error),
color=disnake.Color.red(),
)
if inter.response._responded:
send = inter.channel.send
else:
send = inter.response.send_message
await send(embed=embed)
async def on_message_command_error(
self,
inter: disnake.AppCmdInter,
error: commands.CommandError,
) -> None:
embed = disnake.Embed(
title=f"Message command `{inter.data.name}` failed due to `{error}`",
description=fancy_traceback(error),
color=disnake.Color.red(),
)
if inter.response._responded:
send = inter.channel.send
else:
send = inter.response.send_message
await send(embed=embed)
print(f"disnake: {disnake.__version__}\n")
bot = TestBot()
bot.load_all_extensions("cogs")
bot.run(os.environ.get("BOT_TOKEN"))
|
py | 1a33fbcec39a6a15a05174eb7dcd11cfcb1e9be7 | import numpy as np
from sklearn.ensemble import RandomForestClassifier as SKRandomForestClassifier
from sklearn.feature_selection import SelectFromModel as SkSelect
from skopt.space import Real
from .feature_selector import FeatureSelector
class RFClassifierSelectFromModel(FeatureSelector):
"""Selects top features based on importance weights using a Random Forest classifier."""
name = 'RF Classifier Select From Model'
hyperparameter_ranges = {
"percent_features": Real(.01, 1),
"threshold": ['mean', -np.inf]
}
def __init__(self, number_features=None, n_estimators=10, max_depth=None,
percent_features=0.5, threshold=-np.inf, n_jobs=-1, random_seed=0, **kwargs):
parameters = {"number_features": number_features,
"n_estimators": n_estimators,
"max_depth": max_depth,
"percent_features": percent_features,
"threshold": threshold,
"n_jobs": n_jobs}
parameters.update(kwargs)
estimator = SKRandomForestClassifier(random_state=random_seed,
n_estimators=n_estimators,
max_depth=max_depth,
n_jobs=n_jobs)
max_features = max(1, int(percent_features * number_features)) if number_features else None
feature_selection = SkSelect(estimator=estimator,
max_features=max_features,
threshold=threshold,
**kwargs)
super().__init__(parameters=parameters,
component_obj=feature_selection,
random_seed=random_seed)
|
py | 1a33fc063754fd622385c1442d57869184fbce12 | import random
class Rect:
"""Define a pure and simple rectangle."""
# Class methods
@classmethod
def cross(cls, r1, r2):
"""Determine the rectangle resulting of the intersection of two rectangles."""
if r1.xmax < r2.xmin or r1.xmin > r2.xmax:
return
if r1.ymax < r2.ymin or r1.ymin > r2.ymax:
return
xmin = max(r1.xmin, r2.xmin)
ymin = max(r1.ymin, r2.ymin)
xmax = min(r1.xmax, r2.xmax)
ymax = min(r1.ymax, r2.ymax)
return Rect.createFromCorners(xmin, ymin, xmax, ymax)
@classmethod
def random(cls, borns=[-1, 1], borns_size=[0, 1]):
"""Create a random rect."""
x = random.uniform(*borns)
y = random.uniform(*borns)
sx = random.uniform(*borns_size)
sy = random.uniform(*borns_size)
return cls(x, y, sx, sy)
@classmethod
def createFromCorners(cls, *corners):
"""Create a rectangle."""
x, y, xm, ym = corners
w = xm - x
h = ym - y
return cls(x + w / 2, y + h / 2, w, h)
@classmethod
def createFromCoordinates(cls, *coordinates):
"""Create a rect using the coordinates."""
return cls(*coordinates)
@classmethod
def createFromRect(cls, *rect):
"""Create a rect from an unpacked pygame.rect"""
l, r, w, h = rect
return cls(l + w / 2, r + h / 2, w, h)
def __init__(self, x, y, w, h):
"""Create a rectangle using its x, y, width, and height, the
x and y components correspond to the center of the rectangle."""
self.components = [x, y, w, h]
def __getitem__(self, index):
return self.components[index]
def __setitem__(self, key, value):
self.components[key] = value
x = property(
lambda cls: cls.__getitem__(0),
lambda cls, value: cls.__setitem__(0, value),
doc="x component of the center",
)
y = property(
lambda cls: cls.__getitem__(1),
lambda cls, value: cls.__setitem__(1, value),
doc="y component of the center",
)
def getSize(self):
return [self.w, self.h]
def setSize(self, size):
self.w, self.height = size
size = property(getSize, setSize)
def getPosition(self):
return [self.x, self.y]
def setPosition(self, position):
self.x, self.y = position
center = position = property(getPosition, setPosition)
def __str__(self, n=2):
"""Return the string representation of a rect."""
r = self.__round__(n)
return (
"Rect(x="
+ str(r.x)
+ ",y="
+ str(r.y)
+ ",w="
+ str(r.w)
+ ",h="
+ str(r.h)
+ ")"
)
def __round__(self, n=2):
"""Round the components of the rect."""
x = round(self.x, n)
y = round(self.y, n)
w = round(self.w, n)
h = round(self.h, n)
return Rect(x, y, w, h)
def __contains__(self, position):
"""Determine if a position is in the rectangle."""
x, y = position
return (self.xmin <= x <= self.xmax) and (self.ymin <= y <= self.ymax)
def resize(self, n):
"""Allow the user to resize the rectangle."""
self.w *= n
self.h *= n
def __iter__(self):
self.iterator = 0
return self
def __next__(self):
if self.iterator < 4:
self.iterator += 1
return self.components[self.iterator - 1]
else:
raise StopIteration
# properties
# corners
def getCorners(self):
"""Return the corners of the rect."""
return [
self.x - self.w / 2,
self.y - self.h / 2,
self.x + self.w / 2,
self.y + self.h / 2,
]
def setCorners(self, corners):
"""Set the corners of the rect."""
x1, y1, x2, y2 = corners
self.w = x2 - x1
self.h = y2 - y1
self.x = x1 - self.w / 2
self.y = y1 - self.w / 2
# coordinates
def getCoordinates(self):
"""Return the coordinates of the rect."""
return [self.x, self.y, self.w, self.h]
def setCoordinates(self, coordinates):
"""Set the coordinates of the rect."""
self.position = coordinates[:2]
self.size = coordinates[2:]
# rect
def getRect(self):
"""Return the rect of the rectangle."""
return Rect.getRectFromCoordinates(self.getCoordinates())
def setRect(self, rect):
"""Set the rect of the rectangle."""
self.setCoordinates(Rect.getCoordinatesFromRect(rect))
# sx component
def getWidth(self):
"""Return the width."""
return self.components[2]
def setWidth(self, w):
"""Set the width."""
self.components[2] = w
# sy component
def getHeight(self):
"""Return the height."""
return self.components[3]
def setHeight(self, h):
"""Set the height."""
self.components[3] = h
# xmin component
def getXmin(self):
"""Return the minimum of the x component."""
return self.x - self.w / 2
def setXmin(self, xmin):
"""Set the minimum of the x component."""
self.x = xmin + w / 2
# ymin component
def getYmin(self):
"""Return the minimum of the y component."""
return self.y - self.h / 2
def setYmin(self, ymin):
"""Set the minimum of the y component."""
self.y = ymin + self.h / 2
# xmax component
def getXmax(self):
"""Return the maximum of the x component."""
return self.x + self.w / 2
def setXmax(self, xmax):
"""Set the maximum of the x component."""
self.x = xmax - self.w / 2
# ymax component
def getYmax(self):
"""Return the maximum of the y component."""
return self.y + self.h / 2
def setYmax(self, ymax):
"""Set the maximum of the y component."""
self.y = ymax - self.h / 2
corners = property(getCorners, setCorners, doc="Corners")
coordinates = property(getCoordinates, setCoordinates, doc="Center+Size")
w = sx = width = property(getWidth, setWidth, doc="Width")
h = sy = height = property(getHeight, setHeight, doc="Height")
xmin = x1 = left = l = property(getXmin, setXmin, doc="Left")
xmax = x2 = right = r = property(getXmax, setXmax, doc="Right")
ymin = y1 = bottom = b = property(getYmin, setYmin, doc="Bottom")
ymax = y2 = top = t = property(getYmax, setYmax, doc="Top")
# Static methods
@staticmethod
def getCornersFromCoordinates(coordinates):
"""Return the corners (top_left_corner,bottom_right_corner) using the coordinates (position+size)."""
"""[x,y,sx,sy] -> [mx,my,Mx,My]"""
x, y, sx, sy = coordinates
mx, my = x - sx / 2, y - sy / 2
Mx, My = x + sx / 2, y + sy / 2
return [mx, my, Mx, My]
@staticmethod
def getCoordinatesFromCorners(corners):
"""Return the coordinates (position+size) using the corners (top_left_corner,bottom_right_corner)."""
"""[mx,my,Mx,My] -> [x,y,sx,sy]"""
mx, my, Mx, My = corners
sx, sy = Mx - mx, My - my
x, y = mx + sx / 2, my + sy / 2
return [x, y, sx, sy]
@staticmethod
def getCoordinatesFromRect(rect):
"""Return the coordinates (position,size) using the rect (top_left_corner,size)."""
"""[x,y,sx,sy] -> [mx,my,sx,sy]"""
mx, my, sx, sy = rect
x, y = mx + sx / 2, my + sy / 2
return [x, y, sx, sy]
@staticmethod
def getRectFromCoordinates(coordinates):
"""Return the rect (top_left_corner,size) using the coordinates (position,size)."""
"""[mx,my,sx,sy] -> [x,y,sx,sy]"""
x, y, sx, sy = coordinates
mx, my = x - sx / 2, y - sy / 2
return [mx, my, sx, sy]
@staticmethod
def getRectFromCorners(corners):
"""Return the rect (top_left_corner,size) using the corners (top_left_corner,bottom_right_corner)."""
"""[mx,my,Mx,My] -> [mx,my,sx,sy]"""
mx, my, Mx, My = corners
sx, sy = Mx - mx, My - my
return [mx, my, sx, sy]
@staticmethod
def getCornersFromRect(rect):
"""Return the (top_left_corner,bottom_right_corner) using the corners rect (top_left_corner,size)."""
"""[mx,my,Mx,My] -> [mx,my,sx,sy]"""
mx, my, sx, sy = rect
Mx, My = mx + sx, my + sy
return [mx, my, Mx, My]
if __name__ == "__main__":
r1 = Rect.random()
r2 = Rect.random()
r1.x -= 1
print(r1, r2)
print(r1.corners)
print(r1.coordinates)
print(r1.x, r1.y)
print(r1.sx, r1.sy)
print(r1.width, r1.height)
r = Rect.cross(r1, r2)
print(*r1)
|
py | 1a33fc08e815b6345fd97407e588ffa6612e2f0b | """CGIWrapper package
A Webware for Python plugin. See Docs/index.html.
"""
def InstallInWebKit(appServer):
pass
|
py | 1a33fda49f418aedf37563ac2fb7eb9aa8c9075e | import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
import plotly.express as px
import pandas as pd
df = pd.read_csv('https://raw.githubusercontent.com/plotly/datasets/master/gapminderDataFiveYear.csv')
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
server = app.server
app.layout = html.Div([
dcc.Graph(id='graph-with-slider'),
dcc.Slider(
id='year-slider',
min=df['year'].min(),
max=df['year'].max(),
value=df['year'].min(),
marks={str(year): str(year) for year in df['year'].unique()},
step=None
)
])
@app.callback(
Output('graph-with-slider', 'figure'),
Input('year-slider', 'value'))
def update_figure(selected_year):
filtered_df = df[df.year == selected_year]
fig = px.scatter(filtered_df, x="gdpPercap", y="lifeExp",
size="pop", color="continent", hover_name="country",
log_x=False, size_max=55)
fig.update_xaxes(range=[-5000, 60000])
fig.update_yaxes(range=[20, 100])
fig.update_layout(transition_duration=500)
return fig
if __name__ == '__main__':
app.run_server(debug=True)
|
py | 1a33fe0b12617b3469fb15d08c95a0de04f92732 | """
Chombo frontend tests
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from yt.testing import \
requires_file, \
assert_equal, \
units_override_check
from yt.utilities.answer_testing.framework import \
requires_ds, \
small_patch_amr, \
data_dir_load
from yt.frontends.chombo.api import \
ChomboDataset, \
Orion2Dataset, \
PlutoDataset
_fields = ("density", "velocity_magnitude", # "velocity_divergence",
"magnetic_field_x")
gc = "GaussianCloud/data.0077.3d.hdf5"
@requires_ds(gc)
def test_gc():
ds = data_dir_load(gc)
yield assert_equal, str(ds), "data.0077.3d.hdf5"
for test in small_patch_amr(gc, _fields):
test_gc.__name__ = test.description
yield test
tb = "TurbBoxLowRes/data.0005.3d.hdf5"
@requires_ds(tb)
def test_tb():
ds = data_dir_load(tb)
yield assert_equal, str(ds), "data.0005.3d.hdf5"
for test in small_patch_amr(tb, _fields):
test_tb.__name__ = test.description
yield test
iso = "IsothermalSphere/data.0000.3d.hdf5"
@requires_ds(iso)
def test_iso():
ds = data_dir_load(iso)
yield assert_equal, str(ds), "data.0000.3d.hdf5"
for test in small_patch_amr(iso, _fields):
test_iso.__name__ = test.description
yield test
_zp_fields = ("rhs", "phi")
zp = "ZeldovichPancake/plt32.2d.hdf5"
@requires_ds(zp)
def test_zp():
ds = data_dir_load(zp)
yield assert_equal, str(ds), "plt32.2d.hdf5"
for test in small_patch_amr(zp, _zp_fields, input_center="c",
input_weight="rhs"):
test_zp.__name__ = test.description
yield test
kho = "KelvinHelmholtz/data.0004.hdf5"
@requires_ds(kho)
def test_kho():
ds = data_dir_load(kho)
yield assert_equal, str(ds), "data.0004.hdf5"
for test in small_patch_amr(kho, _fields):
test_kho.__name__ = test.description
yield test
@requires_file(zp)
def test_ChomboDataset():
assert isinstance(data_dir_load(zp), ChomboDataset)
@requires_file(gc)
def test_Orion2Dataset():
assert isinstance(data_dir_load(gc), Orion2Dataset)
@requires_file(kho)
def test_PlutoDataset():
assert isinstance(data_dir_load(kho), PlutoDataset)
@requires_file(zp)
def test_units_override_zp():
for test in units_override_check(zp):
yield test
@requires_file(gc)
def test_units_override_gc():
for test in units_override_check(gc):
yield test
@requires_file(kho)
def test_units_override_kho():
for test in units_override_check(kho):
yield test
|
py | 1a33ff36d9270c450299204b7462564fe73ee346 | # -*- coding: utf-8 -*-
"""
This module
"""
import attr
import typing
from ..core.model import (
Property, Resource, Tag, GetAtt, TypeHint, TypeCheck,
)
from ..core.constant import AttrMeta
#--- Property declaration ---
@attr.s
class CoreDefinitionCore(Property):
"""
AWS Object Type = "AWS::Greengrass::CoreDefinition.Core"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html
Property Document:
- ``rp_CertificateArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html#cfn-greengrass-coredefinition-core-certificatearn
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html#cfn-greengrass-coredefinition-core-id
- ``rp_ThingArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html#cfn-greengrass-coredefinition-core-thingarn
- ``p_SyncShadow``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html#cfn-greengrass-coredefinition-core-syncshadow
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::CoreDefinition.Core"
rp_CertificateArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "CertificateArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html#cfn-greengrass-coredefinition-core-certificatearn"""
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html#cfn-greengrass-coredefinition-core-id"""
rp_ThingArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ThingArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html#cfn-greengrass-coredefinition-core-thingarn"""
p_SyncShadow: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "SyncShadow"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-core.html#cfn-greengrass-coredefinition-core-syncshadow"""
@attr.s
class LoggerDefinitionVersionLogger(Property):
"""
AWS Object Type = "AWS::Greengrass::LoggerDefinitionVersion.Logger"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html
Property Document:
- ``rp_Component``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-component
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-id
- ``rp_Level``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-level
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-type
- ``p_Space``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-space
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::LoggerDefinitionVersion.Logger"
rp_Component: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Component"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-component"""
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-id"""
rp_Level: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Level"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-level"""
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-type"""
p_Space: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Space"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinitionversion-logger.html#cfn-greengrass-loggerdefinitionversion-logger-space"""
@attr.s
class ResourceDefinitionSecretsManagerSecretResourceData(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinition.SecretsManagerSecretResourceData"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-secretsmanagersecretresourcedata.html
Property Document:
- ``rp_ARN``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-secretsmanagersecretresourcedata.html#cfn-greengrass-resourcedefinition-secretsmanagersecretresourcedata-arn
- ``p_AdditionalStagingLabelsToDownload``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-secretsmanagersecretresourcedata.html#cfn-greengrass-resourcedefinition-secretsmanagersecretresourcedata-additionalstaginglabelstodownload
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinition.SecretsManagerSecretResourceData"
rp_ARN: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ARN"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-secretsmanagersecretresourcedata.html#cfn-greengrass-resourcedefinition-secretsmanagersecretresourcedata-arn"""
p_AdditionalStagingLabelsToDownload: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "AdditionalStagingLabelsToDownload"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-secretsmanagersecretresourcedata.html#cfn-greengrass-resourcedefinition-secretsmanagersecretresourcedata-additionalstaginglabelstodownload"""
@attr.s
class ResourceDefinitionResourceDownloadOwnerSetting(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinition.ResourceDownloadOwnerSetting"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedownloadownersetting.html
Property Document:
- ``rp_GroupOwner``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedownloadownersetting.html#cfn-greengrass-resourcedefinition-resourcedownloadownersetting-groupowner
- ``rp_GroupPermission``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedownloadownersetting.html#cfn-greengrass-resourcedefinition-resourcedownloadownersetting-grouppermission
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinition.ResourceDownloadOwnerSetting"
rp_GroupOwner: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "GroupOwner"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedownloadownersetting.html#cfn-greengrass-resourcedefinition-resourcedownloadownersetting-groupowner"""
rp_GroupPermission: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "GroupPermission"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedownloadownersetting.html#cfn-greengrass-resourcedefinition-resourcedownloadownersetting-grouppermission"""
@attr.s
class LoggerDefinitionLogger(Property):
"""
AWS Object Type = "AWS::Greengrass::LoggerDefinition.Logger"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html
Property Document:
- ``rp_Component``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-component
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-id
- ``rp_Level``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-level
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-type
- ``p_Space``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-space
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::LoggerDefinition.Logger"
rp_Component: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Component"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-component"""
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-id"""
rp_Level: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Level"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-level"""
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-type"""
p_Space: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Space"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-logger.html#cfn-greengrass-loggerdefinition-logger-space"""
@attr.s
class ConnectorDefinitionConnector(Property):
"""
AWS Object Type = "AWS::Greengrass::ConnectorDefinition.Connector"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connector.html
Property Document:
- ``rp_ConnectorArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connector.html#cfn-greengrass-connectordefinition-connector-connectorarn
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connector.html#cfn-greengrass-connectordefinition-connector-id
- ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connector.html#cfn-greengrass-connectordefinition-connector-parameters
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ConnectorDefinition.Connector"
rp_ConnectorArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ConnectorArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connector.html#cfn-greengrass-connectordefinition-connector-connectorarn"""
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connector.html#cfn-greengrass-connectordefinition-connector-id"""
p_Parameters: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Parameters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connector.html#cfn-greengrass-connectordefinition-connector-parameters"""
@attr.s
class ResourceDefinitionVersionSecretsManagerSecretResourceData(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinitionVersion.SecretsManagerSecretResourceData"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata.html
Property Document:
- ``rp_ARN``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata.html#cfn-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata-arn
- ``p_AdditionalStagingLabelsToDownload``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata.html#cfn-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata-additionalstaginglabelstodownload
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinitionVersion.SecretsManagerSecretResourceData"
rp_ARN: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ARN"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata.html#cfn-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata-arn"""
p_AdditionalStagingLabelsToDownload: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "AdditionalStagingLabelsToDownload"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata.html#cfn-greengrass-resourcedefinitionversion-secretsmanagersecretresourcedata-additionalstaginglabelstodownload"""
@attr.s
class SubscriptionDefinitionVersionSubscription(Property):
"""
AWS Object Type = "AWS::Greengrass::SubscriptionDefinitionVersion.Subscription"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html
Property Document:
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html#cfn-greengrass-subscriptiondefinitionversion-subscription-id
- ``rp_Source``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html#cfn-greengrass-subscriptiondefinitionversion-subscription-source
- ``rp_Subject``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html#cfn-greengrass-subscriptiondefinitionversion-subscription-subject
- ``rp_Target``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html#cfn-greengrass-subscriptiondefinitionversion-subscription-target
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::SubscriptionDefinitionVersion.Subscription"
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html#cfn-greengrass-subscriptiondefinitionversion-subscription-id"""
rp_Source: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Source"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html#cfn-greengrass-subscriptiondefinitionversion-subscription-source"""
rp_Subject: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Subject"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html#cfn-greengrass-subscriptiondefinitionversion-subscription-subject"""
rp_Target: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Target"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinitionversion-subscription.html#cfn-greengrass-subscriptiondefinitionversion-subscription-target"""
@attr.s
class ResourceDefinitionSageMakerMachineLearningModelResourceData(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinition.SageMakerMachineLearningModelResourceData"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata.html
Property Document:
- ``rp_DestinationPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata-destinationpath
- ``rp_SageMakerJobArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata-sagemakerjobarn
- ``p_OwnerSetting``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata-ownersetting
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinition.SageMakerMachineLearningModelResourceData"
rp_DestinationPath: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DestinationPath"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata-destinationpath"""
rp_SageMakerJobArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SageMakerJobArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata-sagemakerjobarn"""
p_OwnerSetting: typing.Union['ResourceDefinitionResourceDownloadOwnerSetting', dict] = attr.ib(
default=None,
converter=ResourceDefinitionResourceDownloadOwnerSetting.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionResourceDownloadOwnerSetting)),
metadata={AttrMeta.PROPERTY_NAME: "OwnerSetting"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-sagemakermachinelearningmodelresourcedata-ownersetting"""
@attr.s
class CoreDefinitionVersionCore(Property):
"""
AWS Object Type = "AWS::Greengrass::CoreDefinitionVersion.Core"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html
Property Document:
- ``rp_CertificateArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html#cfn-greengrass-coredefinitionversion-core-certificatearn
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html#cfn-greengrass-coredefinitionversion-core-id
- ``rp_ThingArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html#cfn-greengrass-coredefinitionversion-core-thingarn
- ``p_SyncShadow``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html#cfn-greengrass-coredefinitionversion-core-syncshadow
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::CoreDefinitionVersion.Core"
rp_CertificateArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "CertificateArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html#cfn-greengrass-coredefinitionversion-core-certificatearn"""
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html#cfn-greengrass-coredefinitionversion-core-id"""
rp_ThingArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ThingArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html#cfn-greengrass-coredefinitionversion-core-thingarn"""
p_SyncShadow: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "SyncShadow"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinitionversion-core.html#cfn-greengrass-coredefinitionversion-core-syncshadow"""
@attr.s
class FunctionDefinitionVersionRunAs(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinitionVersion.RunAs"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-runas.html
Property Document:
- ``p_Gid``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-runas.html#cfn-greengrass-functiondefinitionversion-runas-gid
- ``p_Uid``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-runas.html#cfn-greengrass-functiondefinitionversion-runas-uid
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinitionVersion.RunAs"
p_Gid: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Gid"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-runas.html#cfn-greengrass-functiondefinitionversion-runas-gid"""
p_Uid: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Uid"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-runas.html#cfn-greengrass-functiondefinitionversion-runas-uid"""
@attr.s
class ResourceDefinitionVersionResourceDownloadOwnerSetting(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinitionVersion.ResourceDownloadOwnerSetting"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedownloadownersetting.html
Property Document:
- ``rp_GroupOwner``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedownloadownersetting.html#cfn-greengrass-resourcedefinitionversion-resourcedownloadownersetting-groupowner
- ``rp_GroupPermission``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedownloadownersetting.html#cfn-greengrass-resourcedefinitionversion-resourcedownloadownersetting-grouppermission
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinitionVersion.ResourceDownloadOwnerSetting"
rp_GroupOwner: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "GroupOwner"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedownloadownersetting.html#cfn-greengrass-resourcedefinitionversion-resourcedownloadownersetting-groupowner"""
rp_GroupPermission: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "GroupPermission"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedownloadownersetting.html#cfn-greengrass-resourcedefinitionversion-resourcedownloadownersetting-grouppermission"""
@attr.s
class FunctionDefinitionRunAs(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinition.RunAs"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-runas.html
Property Document:
- ``p_Gid``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-runas.html#cfn-greengrass-functiondefinition-runas-gid
- ``p_Uid``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-runas.html#cfn-greengrass-functiondefinition-runas-uid
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinition.RunAs"
p_Gid: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Gid"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-runas.html#cfn-greengrass-functiondefinition-runas-gid"""
p_Uid: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Uid"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-runas.html#cfn-greengrass-functiondefinition-runas-uid"""
@attr.s
class DeviceDefinitionVersionDevice(Property):
"""
AWS Object Type = "AWS::Greengrass::DeviceDefinitionVersion.Device"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html
Property Document:
- ``rp_CertificateArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html#cfn-greengrass-devicedefinitionversion-device-certificatearn
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html#cfn-greengrass-devicedefinitionversion-device-id
- ``rp_ThingArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html#cfn-greengrass-devicedefinitionversion-device-thingarn
- ``p_SyncShadow``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html#cfn-greengrass-devicedefinitionversion-device-syncshadow
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::DeviceDefinitionVersion.Device"
rp_CertificateArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "CertificateArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html#cfn-greengrass-devicedefinitionversion-device-certificatearn"""
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html#cfn-greengrass-devicedefinitionversion-device-id"""
rp_ThingArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ThingArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html#cfn-greengrass-devicedefinitionversion-device-thingarn"""
p_SyncShadow: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "SyncShadow"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinitionversion-device.html#cfn-greengrass-devicedefinitionversion-device-syncshadow"""
@attr.s
class ResourceDefinitionGroupOwnerSetting(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinition.GroupOwnerSetting"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-groupownersetting.html
Property Document:
- ``rp_AutoAddGroupOwner``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-groupownersetting.html#cfn-greengrass-resourcedefinition-groupownersetting-autoaddgroupowner
- ``p_GroupOwner``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-groupownersetting.html#cfn-greengrass-resourcedefinition-groupownersetting-groupowner
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinition.GroupOwnerSetting"
rp_AutoAddGroupOwner: bool = attr.ib(
default=None,
validator=attr.validators.instance_of(bool),
metadata={AttrMeta.PROPERTY_NAME: "AutoAddGroupOwner"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-groupownersetting.html#cfn-greengrass-resourcedefinition-groupownersetting-autoaddgroupowner"""
p_GroupOwner: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "GroupOwner"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-groupownersetting.html#cfn-greengrass-resourcedefinition-groupownersetting-groupowner"""
@attr.s
class GroupGroupVersion(Property):
"""
AWS Object Type = "AWS::Greengrass::Group.GroupVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html
Property Document:
- ``p_ConnectorDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-connectordefinitionversionarn
- ``p_CoreDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-coredefinitionversionarn
- ``p_DeviceDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-devicedefinitionversionarn
- ``p_FunctionDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-functiondefinitionversionarn
- ``p_LoggerDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-loggerdefinitionversionarn
- ``p_ResourceDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-resourcedefinitionversionarn
- ``p_SubscriptionDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-subscriptiondefinitionversionarn
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::Group.GroupVersion"
p_ConnectorDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ConnectorDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-connectordefinitionversionarn"""
p_CoreDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CoreDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-coredefinitionversionarn"""
p_DeviceDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DeviceDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-devicedefinitionversionarn"""
p_FunctionDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "FunctionDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-functiondefinitionversionarn"""
p_LoggerDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "LoggerDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-loggerdefinitionversionarn"""
p_ResourceDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ResourceDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-resourcedefinitionversionarn"""
p_SubscriptionDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SubscriptionDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-group-groupversion.html#cfn-greengrass-group-groupversion-subscriptiondefinitionversionarn"""
@attr.s
class ResourceDefinitionLocalDeviceResourceData(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinition.LocalDeviceResourceData"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localdeviceresourcedata.html
Property Document:
- ``rp_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localdeviceresourcedata.html#cfn-greengrass-resourcedefinition-localdeviceresourcedata-sourcepath
- ``p_GroupOwnerSetting``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localdeviceresourcedata.html#cfn-greengrass-resourcedefinition-localdeviceresourcedata-groupownersetting
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinition.LocalDeviceResourceData"
rp_SourcePath: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SourcePath"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localdeviceresourcedata.html#cfn-greengrass-resourcedefinition-localdeviceresourcedata-sourcepath"""
p_GroupOwnerSetting: typing.Union['ResourceDefinitionGroupOwnerSetting', dict] = attr.ib(
default=None,
converter=ResourceDefinitionGroupOwnerSetting.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionGroupOwnerSetting)),
metadata={AttrMeta.PROPERTY_NAME: "GroupOwnerSetting"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localdeviceresourcedata.html#cfn-greengrass-resourcedefinition-localdeviceresourcedata-groupownersetting"""
@attr.s
class DeviceDefinitionDevice(Property):
"""
AWS Object Type = "AWS::Greengrass::DeviceDefinition.Device"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html
Property Document:
- ``rp_CertificateArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html#cfn-greengrass-devicedefinition-device-certificatearn
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html#cfn-greengrass-devicedefinition-device-id
- ``rp_ThingArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html#cfn-greengrass-devicedefinition-device-thingarn
- ``p_SyncShadow``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html#cfn-greengrass-devicedefinition-device-syncshadow
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::DeviceDefinition.Device"
rp_CertificateArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "CertificateArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html#cfn-greengrass-devicedefinition-device-certificatearn"""
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html#cfn-greengrass-devicedefinition-device-id"""
rp_ThingArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ThingArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html#cfn-greengrass-devicedefinition-device-thingarn"""
p_SyncShadow: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "SyncShadow"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-device.html#cfn-greengrass-devicedefinition-device-syncshadow"""
@attr.s
class DeviceDefinitionDeviceDefinitionVersion(Property):
"""
AWS Object Type = "AWS::Greengrass::DeviceDefinition.DeviceDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-devicedefinitionversion.html
Property Document:
- ``rp_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-devicedefinitionversion.html#cfn-greengrass-devicedefinition-devicedefinitionversion-devices
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::DeviceDefinition.DeviceDefinitionVersion"
rp_Devices: typing.List[typing.Union['DeviceDefinitionDevice', dict]] = attr.ib(
default=None,
converter=DeviceDefinitionDevice.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(DeviceDefinitionDevice), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Devices"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-devicedefinition-devicedefinitionversion.html#cfn-greengrass-devicedefinition-devicedefinitionversion-devices"""
@attr.s
class SubscriptionDefinitionSubscription(Property):
"""
AWS Object Type = "AWS::Greengrass::SubscriptionDefinition.Subscription"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html
Property Document:
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html#cfn-greengrass-subscriptiondefinition-subscription-id
- ``rp_Source``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html#cfn-greengrass-subscriptiondefinition-subscription-source
- ``rp_Subject``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html#cfn-greengrass-subscriptiondefinition-subscription-subject
- ``rp_Target``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html#cfn-greengrass-subscriptiondefinition-subscription-target
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::SubscriptionDefinition.Subscription"
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html#cfn-greengrass-subscriptiondefinition-subscription-id"""
rp_Source: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Source"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html#cfn-greengrass-subscriptiondefinition-subscription-source"""
rp_Subject: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Subject"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html#cfn-greengrass-subscriptiondefinition-subscription-subject"""
rp_Target: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Target"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscription.html#cfn-greengrass-subscriptiondefinition-subscription-target"""
@attr.s
class CoreDefinitionCoreDefinitionVersion(Property):
"""
AWS Object Type = "AWS::Greengrass::CoreDefinition.CoreDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-coredefinitionversion.html
Property Document:
- ``rp_Cores``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-coredefinitionversion.html#cfn-greengrass-coredefinition-coredefinitionversion-cores
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::CoreDefinition.CoreDefinitionVersion"
rp_Cores: typing.List[typing.Union['CoreDefinitionCore', dict]] = attr.ib(
default=None,
converter=CoreDefinitionCore.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(CoreDefinitionCore), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Cores"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-coredefinition-coredefinitionversion.html#cfn-greengrass-coredefinition-coredefinitionversion-cores"""
@attr.s
class ResourceDefinitionVersionS3MachineLearningModelResourceData(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinitionVersion.S3MachineLearningModelResourceData"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata.html
Property Document:
- ``rp_DestinationPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata-destinationpath
- ``rp_S3Uri``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata-s3uri
- ``p_OwnerSetting``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata-ownersetting
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinitionVersion.S3MachineLearningModelResourceData"
rp_DestinationPath: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DestinationPath"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata-destinationpath"""
rp_S3Uri: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "S3Uri"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata-s3uri"""
p_OwnerSetting: typing.Union['ResourceDefinitionVersionResourceDownloadOwnerSetting', dict] = attr.ib(
default=None,
converter=ResourceDefinitionVersionResourceDownloadOwnerSetting.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionVersionResourceDownloadOwnerSetting)),
metadata={AttrMeta.PROPERTY_NAME: "OwnerSetting"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-s3machinelearningmodelresourcedata-ownersetting"""
@attr.s
class ResourceDefinitionLocalVolumeResourceData(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinition.LocalVolumeResourceData"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localvolumeresourcedata.html
Property Document:
- ``rp_DestinationPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localvolumeresourcedata.html#cfn-greengrass-resourcedefinition-localvolumeresourcedata-destinationpath
- ``rp_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localvolumeresourcedata.html#cfn-greengrass-resourcedefinition-localvolumeresourcedata-sourcepath
- ``p_GroupOwnerSetting``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localvolumeresourcedata.html#cfn-greengrass-resourcedefinition-localvolumeresourcedata-groupownersetting
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinition.LocalVolumeResourceData"
rp_DestinationPath: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DestinationPath"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localvolumeresourcedata.html#cfn-greengrass-resourcedefinition-localvolumeresourcedata-destinationpath"""
rp_SourcePath: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SourcePath"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localvolumeresourcedata.html#cfn-greengrass-resourcedefinition-localvolumeresourcedata-sourcepath"""
p_GroupOwnerSetting: typing.Union['ResourceDefinitionGroupOwnerSetting', dict] = attr.ib(
default=None,
converter=ResourceDefinitionGroupOwnerSetting.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionGroupOwnerSetting)),
metadata={AttrMeta.PROPERTY_NAME: "GroupOwnerSetting"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-localvolumeresourcedata.html#cfn-greengrass-resourcedefinition-localvolumeresourcedata-groupownersetting"""
@attr.s
class FunctionDefinitionResourceAccessPolicy(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinition.ResourceAccessPolicy"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-resourceaccesspolicy.html
Property Document:
- ``rp_ResourceId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-resourceaccesspolicy.html#cfn-greengrass-functiondefinition-resourceaccesspolicy-resourceid
- ``p_Permission``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-resourceaccesspolicy.html#cfn-greengrass-functiondefinition-resourceaccesspolicy-permission
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinition.ResourceAccessPolicy"
rp_ResourceId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ResourceId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-resourceaccesspolicy.html#cfn-greengrass-functiondefinition-resourceaccesspolicy-resourceid"""
p_Permission: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Permission"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-resourceaccesspolicy.html#cfn-greengrass-functiondefinition-resourceaccesspolicy-permission"""
@attr.s
class ResourceDefinitionVersionGroupOwnerSetting(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinitionVersion.GroupOwnerSetting"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-groupownersetting.html
Property Document:
- ``rp_AutoAddGroupOwner``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-groupownersetting.html#cfn-greengrass-resourcedefinitionversion-groupownersetting-autoaddgroupowner
- ``p_GroupOwner``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-groupownersetting.html#cfn-greengrass-resourcedefinitionversion-groupownersetting-groupowner
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinitionVersion.GroupOwnerSetting"
rp_AutoAddGroupOwner: bool = attr.ib(
default=None,
validator=attr.validators.instance_of(bool),
metadata={AttrMeta.PROPERTY_NAME: "AutoAddGroupOwner"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-groupownersetting.html#cfn-greengrass-resourcedefinitionversion-groupownersetting-autoaddgroupowner"""
p_GroupOwner: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "GroupOwner"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-groupownersetting.html#cfn-greengrass-resourcedefinitionversion-groupownersetting-groupowner"""
@attr.s
class FunctionDefinitionVersionResourceAccessPolicy(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinitionVersion.ResourceAccessPolicy"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-resourceaccesspolicy.html
Property Document:
- ``rp_ResourceId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-resourceaccesspolicy.html#cfn-greengrass-functiondefinitionversion-resourceaccesspolicy-resourceid
- ``p_Permission``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-resourceaccesspolicy.html#cfn-greengrass-functiondefinitionversion-resourceaccesspolicy-permission
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinitionVersion.ResourceAccessPolicy"
rp_ResourceId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ResourceId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-resourceaccesspolicy.html#cfn-greengrass-functiondefinitionversion-resourceaccesspolicy-resourceid"""
p_Permission: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Permission"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-resourceaccesspolicy.html#cfn-greengrass-functiondefinitionversion-resourceaccesspolicy-permission"""
@attr.s
class LoggerDefinitionLoggerDefinitionVersion(Property):
"""
AWS Object Type = "AWS::Greengrass::LoggerDefinition.LoggerDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-loggerdefinitionversion.html
Property Document:
- ``rp_Loggers``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-loggerdefinitionversion.html#cfn-greengrass-loggerdefinition-loggerdefinitionversion-loggers
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::LoggerDefinition.LoggerDefinitionVersion"
rp_Loggers: typing.List[typing.Union['LoggerDefinitionLogger', dict]] = attr.ib(
default=None,
converter=LoggerDefinitionLogger.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(LoggerDefinitionLogger), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Loggers"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-loggerdefinition-loggerdefinitionversion.html#cfn-greengrass-loggerdefinition-loggerdefinitionversion-loggers"""
@attr.s
class ConnectorDefinitionVersionConnector(Property):
"""
AWS Object Type = "AWS::Greengrass::ConnectorDefinitionVersion.Connector"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinitionversion-connector.html
Property Document:
- ``rp_ConnectorArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinitionversion-connector.html#cfn-greengrass-connectordefinitionversion-connector-connectorarn
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinitionversion-connector.html#cfn-greengrass-connectordefinitionversion-connector-id
- ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinitionversion-connector.html#cfn-greengrass-connectordefinitionversion-connector-parameters
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ConnectorDefinitionVersion.Connector"
rp_ConnectorArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ConnectorArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinitionversion-connector.html#cfn-greengrass-connectordefinitionversion-connector-connectorarn"""
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinitionversion-connector.html#cfn-greengrass-connectordefinitionversion-connector-id"""
p_Parameters: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Parameters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinitionversion-connector.html#cfn-greengrass-connectordefinitionversion-connector-parameters"""
@attr.s
class FunctionDefinitionVersionExecution(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinitionVersion.Execution"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-execution.html
Property Document:
- ``p_IsolationMode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-execution.html#cfn-greengrass-functiondefinitionversion-execution-isolationmode
- ``p_RunAs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-execution.html#cfn-greengrass-functiondefinitionversion-execution-runas
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinitionVersion.Execution"
p_IsolationMode: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "IsolationMode"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-execution.html#cfn-greengrass-functiondefinitionversion-execution-isolationmode"""
p_RunAs: typing.Union['FunctionDefinitionVersionRunAs', dict] = attr.ib(
default=None,
converter=FunctionDefinitionVersionRunAs.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(FunctionDefinitionVersionRunAs)),
metadata={AttrMeta.PROPERTY_NAME: "RunAs"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-execution.html#cfn-greengrass-functiondefinitionversion-execution-runas"""
@attr.s
class ResourceDefinitionS3MachineLearningModelResourceData(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinition.S3MachineLearningModelResourceData"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-s3machinelearningmodelresourcedata.html
Property Document:
- ``rp_DestinationPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-s3machinelearningmodelresourcedata-destinationpath
- ``rp_S3Uri``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-s3machinelearningmodelresourcedata-s3uri
- ``p_OwnerSetting``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-s3machinelearningmodelresourcedata-ownersetting
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinition.S3MachineLearningModelResourceData"
rp_DestinationPath: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DestinationPath"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-s3machinelearningmodelresourcedata-destinationpath"""
rp_S3Uri: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "S3Uri"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-s3machinelearningmodelresourcedata-s3uri"""
p_OwnerSetting: typing.Union['ResourceDefinitionResourceDownloadOwnerSetting', dict] = attr.ib(
default=None,
converter=ResourceDefinitionResourceDownloadOwnerSetting.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionResourceDownloadOwnerSetting)),
metadata={AttrMeta.PROPERTY_NAME: "OwnerSetting"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-s3machinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinition-s3machinelearningmodelresourcedata-ownersetting"""
@attr.s
class SubscriptionDefinitionSubscriptionDefinitionVersion(Property):
"""
AWS Object Type = "AWS::Greengrass::SubscriptionDefinition.SubscriptionDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscriptiondefinitionversion.html
Property Document:
- ``rp_Subscriptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscriptiondefinitionversion.html#cfn-greengrass-subscriptiondefinition-subscriptiondefinitionversion-subscriptions
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::SubscriptionDefinition.SubscriptionDefinitionVersion"
rp_Subscriptions: typing.List[typing.Union['SubscriptionDefinitionSubscription', dict]] = attr.ib(
default=None,
converter=SubscriptionDefinitionSubscription.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(SubscriptionDefinitionSubscription), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Subscriptions"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-subscriptiondefinition-subscriptiondefinitionversion.html#cfn-greengrass-subscriptiondefinition-subscriptiondefinitionversion-subscriptions"""
@attr.s
class ResourceDefinitionVersionLocalDeviceResourceData(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinitionVersion.LocalDeviceResourceData"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localdeviceresourcedata.html
Property Document:
- ``rp_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localdeviceresourcedata.html#cfn-greengrass-resourcedefinitionversion-localdeviceresourcedata-sourcepath
- ``p_GroupOwnerSetting``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localdeviceresourcedata.html#cfn-greengrass-resourcedefinitionversion-localdeviceresourcedata-groupownersetting
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinitionVersion.LocalDeviceResourceData"
rp_SourcePath: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SourcePath"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localdeviceresourcedata.html#cfn-greengrass-resourcedefinitionversion-localdeviceresourcedata-sourcepath"""
p_GroupOwnerSetting: typing.Union['ResourceDefinitionVersionGroupOwnerSetting', dict] = attr.ib(
default=None,
converter=ResourceDefinitionVersionGroupOwnerSetting.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionVersionGroupOwnerSetting)),
metadata={AttrMeta.PROPERTY_NAME: "GroupOwnerSetting"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localdeviceresourcedata.html#cfn-greengrass-resourcedefinitionversion-localdeviceresourcedata-groupownersetting"""
@attr.s
class FunctionDefinitionVersionDefaultConfig(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinitionVersion.DefaultConfig"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-defaultconfig.html
Property Document:
- ``rp_Execution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-defaultconfig.html#cfn-greengrass-functiondefinitionversion-defaultconfig-execution
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinitionVersion.DefaultConfig"
rp_Execution: typing.Union['FunctionDefinitionVersionExecution', dict] = attr.ib(
default=None,
converter=FunctionDefinitionVersionExecution.from_dict,
validator=attr.validators.instance_of(FunctionDefinitionVersionExecution),
metadata={AttrMeta.PROPERTY_NAME: "Execution"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-defaultconfig.html#cfn-greengrass-functiondefinitionversion-defaultconfig-execution"""
@attr.s
class ConnectorDefinitionConnectorDefinitionVersion(Property):
"""
AWS Object Type = "AWS::Greengrass::ConnectorDefinition.ConnectorDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connectordefinitionversion.html
Property Document:
- ``rp_Connectors``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connectordefinitionversion.html#cfn-greengrass-connectordefinition-connectordefinitionversion-connectors
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ConnectorDefinition.ConnectorDefinitionVersion"
rp_Connectors: typing.List[typing.Union['ConnectorDefinitionConnector', dict]] = attr.ib(
default=None,
converter=ConnectorDefinitionConnector.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(ConnectorDefinitionConnector), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Connectors"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-connectordefinition-connectordefinitionversion.html#cfn-greengrass-connectordefinition-connectordefinitionversion-connectors"""
@attr.s
class ResourceDefinitionVersionLocalVolumeResourceData(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinitionVersion.LocalVolumeResourceData"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localvolumeresourcedata.html
Property Document:
- ``rp_DestinationPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localvolumeresourcedata.html#cfn-greengrass-resourcedefinitionversion-localvolumeresourcedata-destinationpath
- ``rp_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localvolumeresourcedata.html#cfn-greengrass-resourcedefinitionversion-localvolumeresourcedata-sourcepath
- ``p_GroupOwnerSetting``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localvolumeresourcedata.html#cfn-greengrass-resourcedefinitionversion-localvolumeresourcedata-groupownersetting
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinitionVersion.LocalVolumeResourceData"
rp_DestinationPath: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DestinationPath"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localvolumeresourcedata.html#cfn-greengrass-resourcedefinitionversion-localvolumeresourcedata-destinationpath"""
rp_SourcePath: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SourcePath"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localvolumeresourcedata.html#cfn-greengrass-resourcedefinitionversion-localvolumeresourcedata-sourcepath"""
p_GroupOwnerSetting: typing.Union['ResourceDefinitionVersionGroupOwnerSetting', dict] = attr.ib(
default=None,
converter=ResourceDefinitionVersionGroupOwnerSetting.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionVersionGroupOwnerSetting)),
metadata={AttrMeta.PROPERTY_NAME: "GroupOwnerSetting"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-localvolumeresourcedata.html#cfn-greengrass-resourcedefinitionversion-localvolumeresourcedata-groupownersetting"""
@attr.s
class FunctionDefinitionExecution(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinition.Execution"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-execution.html
Property Document:
- ``p_IsolationMode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-execution.html#cfn-greengrass-functiondefinition-execution-isolationmode
- ``p_RunAs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-execution.html#cfn-greengrass-functiondefinition-execution-runas
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinition.Execution"
p_IsolationMode: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "IsolationMode"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-execution.html#cfn-greengrass-functiondefinition-execution-isolationmode"""
p_RunAs: typing.Union['FunctionDefinitionRunAs', dict] = attr.ib(
default=None,
converter=FunctionDefinitionRunAs.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(FunctionDefinitionRunAs)),
metadata={AttrMeta.PROPERTY_NAME: "RunAs"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-execution.html#cfn-greengrass-functiondefinition-execution-runas"""
@attr.s
class ResourceDefinitionVersionSageMakerMachineLearningModelResourceData(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinitionVersion.SageMakerMachineLearningModelResourceData"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata.html
Property Document:
- ``rp_DestinationPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata-destinationpath
- ``rp_SageMakerJobArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata-sagemakerjobarn
- ``p_OwnerSetting``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata-ownersetting
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinitionVersion.SageMakerMachineLearningModelResourceData"
rp_DestinationPath: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DestinationPath"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata-destinationpath"""
rp_SageMakerJobArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SageMakerJobArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata-sagemakerjobarn"""
p_OwnerSetting: typing.Union['ResourceDefinitionVersionResourceDownloadOwnerSetting', dict] = attr.ib(
default=None,
converter=ResourceDefinitionVersionResourceDownloadOwnerSetting.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionVersionResourceDownloadOwnerSetting)),
metadata={AttrMeta.PROPERTY_NAME: "OwnerSetting"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata.html#cfn-greengrass-resourcedefinitionversion-sagemakermachinelearningmodelresourcedata-ownersetting"""
@attr.s
class FunctionDefinitionEnvironment(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinition.Environment"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html
Property Document:
- ``p_AccessSysfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html#cfn-greengrass-functiondefinition-environment-accesssysfs
- ``p_Execution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html#cfn-greengrass-functiondefinition-environment-execution
- ``p_ResourceAccessPolicies``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html#cfn-greengrass-functiondefinition-environment-resourceaccesspolicies
- ``p_Variables``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html#cfn-greengrass-functiondefinition-environment-variables
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinition.Environment"
p_AccessSysfs: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "AccessSysfs"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html#cfn-greengrass-functiondefinition-environment-accesssysfs"""
p_Execution: typing.Union['FunctionDefinitionExecution', dict] = attr.ib(
default=None,
converter=FunctionDefinitionExecution.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(FunctionDefinitionExecution)),
metadata={AttrMeta.PROPERTY_NAME: "Execution"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html#cfn-greengrass-functiondefinition-environment-execution"""
p_ResourceAccessPolicies: typing.List[typing.Union['FunctionDefinitionResourceAccessPolicy', dict]] = attr.ib(
default=None,
converter=FunctionDefinitionResourceAccessPolicy.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(FunctionDefinitionResourceAccessPolicy), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "ResourceAccessPolicies"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html#cfn-greengrass-functiondefinition-environment-resourceaccesspolicies"""
p_Variables: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Variables"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-environment.html#cfn-greengrass-functiondefinition-environment-variables"""
@attr.s
class FunctionDefinitionVersionEnvironment(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinitionVersion.Environment"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html
Property Document:
- ``p_AccessSysfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html#cfn-greengrass-functiondefinitionversion-environment-accesssysfs
- ``p_Execution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html#cfn-greengrass-functiondefinitionversion-environment-execution
- ``p_ResourceAccessPolicies``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html#cfn-greengrass-functiondefinitionversion-environment-resourceaccesspolicies
- ``p_Variables``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html#cfn-greengrass-functiondefinitionversion-environment-variables
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinitionVersion.Environment"
p_AccessSysfs: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "AccessSysfs"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html#cfn-greengrass-functiondefinitionversion-environment-accesssysfs"""
p_Execution: typing.Union['FunctionDefinitionVersionExecution', dict] = attr.ib(
default=None,
converter=FunctionDefinitionVersionExecution.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(FunctionDefinitionVersionExecution)),
metadata={AttrMeta.PROPERTY_NAME: "Execution"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html#cfn-greengrass-functiondefinitionversion-environment-execution"""
p_ResourceAccessPolicies: typing.List[typing.Union['FunctionDefinitionVersionResourceAccessPolicy', dict]] = attr.ib(
default=None,
converter=FunctionDefinitionVersionResourceAccessPolicy.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(FunctionDefinitionVersionResourceAccessPolicy), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "ResourceAccessPolicies"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html#cfn-greengrass-functiondefinitionversion-environment-resourceaccesspolicies"""
p_Variables: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Variables"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-environment.html#cfn-greengrass-functiondefinitionversion-environment-variables"""
@attr.s
class ResourceDefinitionVersionResourceDataContainer(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinitionVersion.ResourceDataContainer"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html
Property Document:
- ``p_LocalDeviceResourceData``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-localdeviceresourcedata
- ``p_LocalVolumeResourceData``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-localvolumeresourcedata
- ``p_S3MachineLearningModelResourceData``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-s3machinelearningmodelresourcedata
- ``p_SageMakerMachineLearningModelResourceData``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-sagemakermachinelearningmodelresourcedata
- ``p_SecretsManagerSecretResourceData``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-secretsmanagersecretresourcedata
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinitionVersion.ResourceDataContainer"
p_LocalDeviceResourceData: typing.Union['ResourceDefinitionVersionLocalDeviceResourceData', dict] = attr.ib(
default=None,
converter=ResourceDefinitionVersionLocalDeviceResourceData.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionVersionLocalDeviceResourceData)),
metadata={AttrMeta.PROPERTY_NAME: "LocalDeviceResourceData"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-localdeviceresourcedata"""
p_LocalVolumeResourceData: typing.Union['ResourceDefinitionVersionLocalVolumeResourceData', dict] = attr.ib(
default=None,
converter=ResourceDefinitionVersionLocalVolumeResourceData.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionVersionLocalVolumeResourceData)),
metadata={AttrMeta.PROPERTY_NAME: "LocalVolumeResourceData"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-localvolumeresourcedata"""
p_S3MachineLearningModelResourceData: typing.Union['ResourceDefinitionVersionS3MachineLearningModelResourceData', dict] = attr.ib(
default=None,
converter=ResourceDefinitionVersionS3MachineLearningModelResourceData.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionVersionS3MachineLearningModelResourceData)),
metadata={AttrMeta.PROPERTY_NAME: "S3MachineLearningModelResourceData"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-s3machinelearningmodelresourcedata"""
p_SageMakerMachineLearningModelResourceData: typing.Union['ResourceDefinitionVersionSageMakerMachineLearningModelResourceData', dict] = attr.ib(
default=None,
converter=ResourceDefinitionVersionSageMakerMachineLearningModelResourceData.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionVersionSageMakerMachineLearningModelResourceData)),
metadata={AttrMeta.PROPERTY_NAME: "SageMakerMachineLearningModelResourceData"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-sagemakermachinelearningmodelresourcedata"""
p_SecretsManagerSecretResourceData: typing.Union['ResourceDefinitionVersionSecretsManagerSecretResourceData', dict] = attr.ib(
default=None,
converter=ResourceDefinitionVersionSecretsManagerSecretResourceData.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionVersionSecretsManagerSecretResourceData)),
metadata={AttrMeta.PROPERTY_NAME: "SecretsManagerSecretResourceData"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourcedatacontainer.html#cfn-greengrass-resourcedefinitionversion-resourcedatacontainer-secretsmanagersecretresourcedata"""
@attr.s
class ResourceDefinitionResourceDataContainer(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinition.ResourceDataContainer"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html
Property Document:
- ``p_LocalDeviceResourceData``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-localdeviceresourcedata
- ``p_LocalVolumeResourceData``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-localvolumeresourcedata
- ``p_S3MachineLearningModelResourceData``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-s3machinelearningmodelresourcedata
- ``p_SageMakerMachineLearningModelResourceData``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-sagemakermachinelearningmodelresourcedata
- ``p_SecretsManagerSecretResourceData``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-secretsmanagersecretresourcedata
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinition.ResourceDataContainer"
p_LocalDeviceResourceData: typing.Union['ResourceDefinitionLocalDeviceResourceData', dict] = attr.ib(
default=None,
converter=ResourceDefinitionLocalDeviceResourceData.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionLocalDeviceResourceData)),
metadata={AttrMeta.PROPERTY_NAME: "LocalDeviceResourceData"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-localdeviceresourcedata"""
p_LocalVolumeResourceData: typing.Union['ResourceDefinitionLocalVolumeResourceData', dict] = attr.ib(
default=None,
converter=ResourceDefinitionLocalVolumeResourceData.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionLocalVolumeResourceData)),
metadata={AttrMeta.PROPERTY_NAME: "LocalVolumeResourceData"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-localvolumeresourcedata"""
p_S3MachineLearningModelResourceData: typing.Union['ResourceDefinitionS3MachineLearningModelResourceData', dict] = attr.ib(
default=None,
converter=ResourceDefinitionS3MachineLearningModelResourceData.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionS3MachineLearningModelResourceData)),
metadata={AttrMeta.PROPERTY_NAME: "S3MachineLearningModelResourceData"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-s3machinelearningmodelresourcedata"""
p_SageMakerMachineLearningModelResourceData: typing.Union['ResourceDefinitionSageMakerMachineLearningModelResourceData', dict] = attr.ib(
default=None,
converter=ResourceDefinitionSageMakerMachineLearningModelResourceData.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionSageMakerMachineLearningModelResourceData)),
metadata={AttrMeta.PROPERTY_NAME: "SageMakerMachineLearningModelResourceData"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-sagemakermachinelearningmodelresourcedata"""
p_SecretsManagerSecretResourceData: typing.Union['ResourceDefinitionSecretsManagerSecretResourceData', dict] = attr.ib(
default=None,
converter=ResourceDefinitionSecretsManagerSecretResourceData.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionSecretsManagerSecretResourceData)),
metadata={AttrMeta.PROPERTY_NAME: "SecretsManagerSecretResourceData"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedatacontainer.html#cfn-greengrass-resourcedefinition-resourcedatacontainer-secretsmanagersecretresourcedata"""
@attr.s
class FunctionDefinitionVersionFunctionConfiguration(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinitionVersion.FunctionConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html
Property Document:
- ``p_EncodingType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-encodingtype
- ``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-environment
- ``p_ExecArgs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-execargs
- ``p_Executable``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-executable
- ``p_MemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-memorysize
- ``p_Pinned``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-pinned
- ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-timeout
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinitionVersion.FunctionConfiguration"
p_EncodingType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EncodingType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-encodingtype"""
p_Environment: typing.Union['FunctionDefinitionVersionEnvironment', dict] = attr.ib(
default=None,
converter=FunctionDefinitionVersionEnvironment.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(FunctionDefinitionVersionEnvironment)),
metadata={AttrMeta.PROPERTY_NAME: "Environment"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-environment"""
p_ExecArgs: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ExecArgs"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-execargs"""
p_Executable: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Executable"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-executable"""
p_MemorySize: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "MemorySize"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-memorysize"""
p_Pinned: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "Pinned"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-pinned"""
p_Timeout: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Timeout"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-functionconfiguration.html#cfn-greengrass-functiondefinitionversion-functionconfiguration-timeout"""
@attr.s
class ResourceDefinitionVersionResourceInstance(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinitionVersion.ResourceInstance"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourceinstance.html
Property Document:
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourceinstance.html#cfn-greengrass-resourcedefinitionversion-resourceinstance-id
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourceinstance.html#cfn-greengrass-resourcedefinitionversion-resourceinstance-name
- ``rp_ResourceDataContainer``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourceinstance.html#cfn-greengrass-resourcedefinitionversion-resourceinstance-resourcedatacontainer
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinitionVersion.ResourceInstance"
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourceinstance.html#cfn-greengrass-resourcedefinitionversion-resourceinstance-id"""
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourceinstance.html#cfn-greengrass-resourcedefinitionversion-resourceinstance-name"""
rp_ResourceDataContainer: typing.Union['ResourceDefinitionVersionResourceDataContainer', dict] = attr.ib(
default=None,
converter=ResourceDefinitionVersionResourceDataContainer.from_dict,
validator=attr.validators.instance_of(ResourceDefinitionVersionResourceDataContainer),
metadata={AttrMeta.PROPERTY_NAME: "ResourceDataContainer"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinitionversion-resourceinstance.html#cfn-greengrass-resourcedefinitionversion-resourceinstance-resourcedatacontainer"""
@attr.s
class FunctionDefinitionFunctionConfiguration(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinition.FunctionConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html
Property Document:
- ``p_EncodingType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-encodingtype
- ``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-environment
- ``p_ExecArgs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-execargs
- ``p_Executable``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-executable
- ``p_MemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-memorysize
- ``p_Pinned``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-pinned
- ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-timeout
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinition.FunctionConfiguration"
p_EncodingType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EncodingType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-encodingtype"""
p_Environment: typing.Union['FunctionDefinitionEnvironment', dict] = attr.ib(
default=None,
converter=FunctionDefinitionEnvironment.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(FunctionDefinitionEnvironment)),
metadata={AttrMeta.PROPERTY_NAME: "Environment"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-environment"""
p_ExecArgs: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ExecArgs"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-execargs"""
p_Executable: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Executable"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-executable"""
p_MemorySize: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "MemorySize"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-memorysize"""
p_Pinned: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "Pinned"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-pinned"""
p_Timeout: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Timeout"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functionconfiguration.html#cfn-greengrass-functiondefinition-functionconfiguration-timeout"""
@attr.s
class FunctionDefinitionVersionFunction(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinitionVersion.Function"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-function.html
Property Document:
- ``rp_FunctionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-function.html#cfn-greengrass-functiondefinitionversion-function-functionarn
- ``rp_FunctionConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-function.html#cfn-greengrass-functiondefinitionversion-function-functionconfiguration
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-function.html#cfn-greengrass-functiondefinitionversion-function-id
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinitionVersion.Function"
rp_FunctionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "FunctionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-function.html#cfn-greengrass-functiondefinitionversion-function-functionarn"""
rp_FunctionConfiguration: typing.Union['FunctionDefinitionVersionFunctionConfiguration', dict] = attr.ib(
default=None,
converter=FunctionDefinitionVersionFunctionConfiguration.from_dict,
validator=attr.validators.instance_of(FunctionDefinitionVersionFunctionConfiguration),
metadata={AttrMeta.PROPERTY_NAME: "FunctionConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-function.html#cfn-greengrass-functiondefinitionversion-function-functionconfiguration"""
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinitionversion-function.html#cfn-greengrass-functiondefinitionversion-function-id"""
@attr.s
class FunctionDefinitionDefaultConfig(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinition.DefaultConfig"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-defaultconfig.html
Property Document:
- ``rp_Execution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-defaultconfig.html#cfn-greengrass-functiondefinition-defaultconfig-execution
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinition.DefaultConfig"
rp_Execution: typing.Union['FunctionDefinitionExecution', dict] = attr.ib(
default=None,
converter=FunctionDefinitionExecution.from_dict,
validator=attr.validators.instance_of(FunctionDefinitionExecution),
metadata={AttrMeta.PROPERTY_NAME: "Execution"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-defaultconfig.html#cfn-greengrass-functiondefinition-defaultconfig-execution"""
@attr.s
class ResourceDefinitionResourceInstance(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinition.ResourceInstance"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourceinstance.html
Property Document:
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourceinstance.html#cfn-greengrass-resourcedefinition-resourceinstance-id
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourceinstance.html#cfn-greengrass-resourcedefinition-resourceinstance-name
- ``rp_ResourceDataContainer``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourceinstance.html#cfn-greengrass-resourcedefinition-resourceinstance-resourcedatacontainer
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinition.ResourceInstance"
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourceinstance.html#cfn-greengrass-resourcedefinition-resourceinstance-id"""
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourceinstance.html#cfn-greengrass-resourcedefinition-resourceinstance-name"""
rp_ResourceDataContainer: typing.Union['ResourceDefinitionResourceDataContainer', dict] = attr.ib(
default=None,
converter=ResourceDefinitionResourceDataContainer.from_dict,
validator=attr.validators.instance_of(ResourceDefinitionResourceDataContainer),
metadata={AttrMeta.PROPERTY_NAME: "ResourceDataContainer"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourceinstance.html#cfn-greengrass-resourcedefinition-resourceinstance-resourcedatacontainer"""
@attr.s
class FunctionDefinitionFunction(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinition.Function"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-function.html
Property Document:
- ``rp_FunctionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-function.html#cfn-greengrass-functiondefinition-function-functionarn
- ``rp_FunctionConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-function.html#cfn-greengrass-functiondefinition-function-functionconfiguration
- ``rp_Id``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-function.html#cfn-greengrass-functiondefinition-function-id
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinition.Function"
rp_FunctionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "FunctionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-function.html#cfn-greengrass-functiondefinition-function-functionarn"""
rp_FunctionConfiguration: typing.Union['FunctionDefinitionFunctionConfiguration', dict] = attr.ib(
default=None,
converter=FunctionDefinitionFunctionConfiguration.from_dict,
validator=attr.validators.instance_of(FunctionDefinitionFunctionConfiguration),
metadata={AttrMeta.PROPERTY_NAME: "FunctionConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-function.html#cfn-greengrass-functiondefinition-function-functionconfiguration"""
rp_Id: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Id"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-function.html#cfn-greengrass-functiondefinition-function-id"""
@attr.s
class FunctionDefinitionFunctionDefinitionVersion(Property):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinition.FunctionDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functiondefinitionversion.html
Property Document:
- ``rp_Functions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functiondefinitionversion.html#cfn-greengrass-functiondefinition-functiondefinitionversion-functions
- ``p_DefaultConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functiondefinitionversion.html#cfn-greengrass-functiondefinition-functiondefinitionversion-defaultconfig
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinition.FunctionDefinitionVersion"
rp_Functions: typing.List[typing.Union['FunctionDefinitionFunction', dict]] = attr.ib(
default=None,
converter=FunctionDefinitionFunction.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(FunctionDefinitionFunction), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Functions"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functiondefinitionversion.html#cfn-greengrass-functiondefinition-functiondefinitionversion-functions"""
p_DefaultConfig: typing.Union['FunctionDefinitionDefaultConfig', dict] = attr.ib(
default=None,
converter=FunctionDefinitionDefaultConfig.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(FunctionDefinitionDefaultConfig)),
metadata={AttrMeta.PROPERTY_NAME: "DefaultConfig"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-functiondefinition-functiondefinitionversion.html#cfn-greengrass-functiondefinition-functiondefinitionversion-defaultconfig"""
@attr.s
class ResourceDefinitionResourceDefinitionVersion(Property):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinition.ResourceDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedefinitionversion.html
Property Document:
- ``rp_Resources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedefinitionversion.html#cfn-greengrass-resourcedefinition-resourcedefinitionversion-resources
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinition.ResourceDefinitionVersion"
rp_Resources: typing.List[typing.Union['ResourceDefinitionResourceInstance', dict]] = attr.ib(
default=None,
converter=ResourceDefinitionResourceInstance.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(ResourceDefinitionResourceInstance), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Resources"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-greengrass-resourcedefinition-resourcedefinitionversion.html#cfn-greengrass-resourcedefinition-resourcedefinitionversion-resources"""
#--- Resource declaration ---
@attr.s
class ConnectorDefinitionVersion(Resource):
"""
AWS Object Type = "AWS::Greengrass::ConnectorDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinitionversion.html
Property Document:
- ``rp_ConnectorDefinitionId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinitionversion.html#cfn-greengrass-connectordefinitionversion-connectordefinitionid
- ``rp_Connectors``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinitionversion.html#cfn-greengrass-connectordefinitionversion-connectors
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ConnectorDefinitionVersion"
rp_ConnectorDefinitionId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ConnectorDefinitionId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinitionversion.html#cfn-greengrass-connectordefinitionversion-connectordefinitionid"""
rp_Connectors: typing.List[typing.Union['ConnectorDefinitionVersionConnector', dict]] = attr.ib(
default=None,
converter=ConnectorDefinitionVersionConnector.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(ConnectorDefinitionVersionConnector), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Connectors"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinitionversion.html#cfn-greengrass-connectordefinitionversion-connectors"""
@attr.s
class ResourceDefinition(Resource):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinition"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html#cfn-greengrass-resourcedefinition-name
- ``p_InitialVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html#cfn-greengrass-resourcedefinition-initialversion
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html#cfn-greengrass-resourcedefinition-tags
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinition"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html#cfn-greengrass-resourcedefinition-name"""
p_InitialVersion: typing.Union['ResourceDefinitionResourceDefinitionVersion', dict] = attr.ib(
default=None,
converter=ResourceDefinitionResourceDefinitionVersion.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ResourceDefinitionResourceDefinitionVersion)),
metadata={AttrMeta.PROPERTY_NAME: "InitialVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html#cfn-greengrass-resourcedefinition-initialversion"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html#cfn-greengrass-resourcedefinition-tags"""
@property
def rv_LatestVersionArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html#aws-resource-greengrass-resourcedefinition-return-values"""
return GetAtt(resource=self, attr_name="LatestVersionArn")
@property
def rv_Id(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html#aws-resource-greengrass-resourcedefinition-return-values"""
return GetAtt(resource=self, attr_name="Id")
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html#aws-resource-greengrass-resourcedefinition-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinition.html#aws-resource-greengrass-resourcedefinition-return-values"""
return GetAtt(resource=self, attr_name="Name")
@attr.s
class DeviceDefinition(Resource):
"""
AWS Object Type = "AWS::Greengrass::DeviceDefinition"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html#cfn-greengrass-devicedefinition-name
- ``p_InitialVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html#cfn-greengrass-devicedefinition-initialversion
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html#cfn-greengrass-devicedefinition-tags
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::DeviceDefinition"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html#cfn-greengrass-devicedefinition-name"""
p_InitialVersion: typing.Union['DeviceDefinitionDeviceDefinitionVersion', dict] = attr.ib(
default=None,
converter=DeviceDefinitionDeviceDefinitionVersion.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(DeviceDefinitionDeviceDefinitionVersion)),
metadata={AttrMeta.PROPERTY_NAME: "InitialVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html#cfn-greengrass-devicedefinition-initialversion"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html#cfn-greengrass-devicedefinition-tags"""
@property
def rv_LatestVersionArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html#aws-resource-greengrass-devicedefinition-return-values"""
return GetAtt(resource=self, attr_name="LatestVersionArn")
@property
def rv_Id(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html#aws-resource-greengrass-devicedefinition-return-values"""
return GetAtt(resource=self, attr_name="Id")
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html#aws-resource-greengrass-devicedefinition-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinition.html#aws-resource-greengrass-devicedefinition-return-values"""
return GetAtt(resource=self, attr_name="Name")
@attr.s
class LoggerDefinitionVersion(Resource):
"""
AWS Object Type = "AWS::Greengrass::LoggerDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinitionversion.html
Property Document:
- ``rp_LoggerDefinitionId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinitionversion.html#cfn-greengrass-loggerdefinitionversion-loggerdefinitionid
- ``rp_Loggers``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinitionversion.html#cfn-greengrass-loggerdefinitionversion-loggers
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::LoggerDefinitionVersion"
rp_LoggerDefinitionId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "LoggerDefinitionId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinitionversion.html#cfn-greengrass-loggerdefinitionversion-loggerdefinitionid"""
rp_Loggers: typing.List[typing.Union['LoggerDefinitionVersionLogger', dict]] = attr.ib(
default=None,
converter=LoggerDefinitionVersionLogger.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(LoggerDefinitionVersionLogger), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Loggers"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinitionversion.html#cfn-greengrass-loggerdefinitionversion-loggers"""
@attr.s
class FunctionDefinitionVersion(Resource):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinitionversion.html
Property Document:
- ``rp_FunctionDefinitionId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinitionversion.html#cfn-greengrass-functiondefinitionversion-functiondefinitionid
- ``rp_Functions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinitionversion.html#cfn-greengrass-functiondefinitionversion-functions
- ``p_DefaultConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinitionversion.html#cfn-greengrass-functiondefinitionversion-defaultconfig
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinitionVersion"
rp_FunctionDefinitionId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "FunctionDefinitionId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinitionversion.html#cfn-greengrass-functiondefinitionversion-functiondefinitionid"""
rp_Functions: typing.List[typing.Union['FunctionDefinitionVersionFunction', dict]] = attr.ib(
default=None,
converter=FunctionDefinitionVersionFunction.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(FunctionDefinitionVersionFunction), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Functions"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinitionversion.html#cfn-greengrass-functiondefinitionversion-functions"""
p_DefaultConfig: typing.Union['FunctionDefinitionVersionDefaultConfig', dict] = attr.ib(
default=None,
converter=FunctionDefinitionVersionDefaultConfig.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(FunctionDefinitionVersionDefaultConfig)),
metadata={AttrMeta.PROPERTY_NAME: "DefaultConfig"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinitionversion.html#cfn-greengrass-functiondefinitionversion-defaultconfig"""
@attr.s
class Group(Resource):
"""
AWS Object Type = "AWS::Greengrass::Group"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#cfn-greengrass-group-name
- ``p_InitialVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#cfn-greengrass-group-initialversion
- ``p_RoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#cfn-greengrass-group-rolearn
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#cfn-greengrass-group-tags
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::Group"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#cfn-greengrass-group-name"""
p_InitialVersion: typing.Union['GroupGroupVersion', dict] = attr.ib(
default=None,
converter=GroupGroupVersion.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(GroupGroupVersion)),
metadata={AttrMeta.PROPERTY_NAME: "InitialVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#cfn-greengrass-group-initialversion"""
p_RoleArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "RoleArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#cfn-greengrass-group-rolearn"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#cfn-greengrass-group-tags"""
@property
def rv_RoleAttachedAt(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#aws-resource-greengrass-group-return-values"""
return GetAtt(resource=self, attr_name="RoleAttachedAt")
@property
def rv_LatestVersionArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#aws-resource-greengrass-group-return-values"""
return GetAtt(resource=self, attr_name="LatestVersionArn")
@property
def rv_Id(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#aws-resource-greengrass-group-return-values"""
return GetAtt(resource=self, attr_name="Id")
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#aws-resource-greengrass-group-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_RoleArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#aws-resource-greengrass-group-return-values"""
return GetAtt(resource=self, attr_name="RoleArn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-group.html#aws-resource-greengrass-group-return-values"""
return GetAtt(resource=self, attr_name="Name")
@attr.s
class ConnectorDefinition(Resource):
"""
AWS Object Type = "AWS::Greengrass::ConnectorDefinition"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html#cfn-greengrass-connectordefinition-name
- ``p_InitialVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html#cfn-greengrass-connectordefinition-initialversion
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html#cfn-greengrass-connectordefinition-tags
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ConnectorDefinition"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html#cfn-greengrass-connectordefinition-name"""
p_InitialVersion: typing.Union['ConnectorDefinitionConnectorDefinitionVersion', dict] = attr.ib(
default=None,
converter=ConnectorDefinitionConnectorDefinitionVersion.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ConnectorDefinitionConnectorDefinitionVersion)),
metadata={AttrMeta.PROPERTY_NAME: "InitialVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html#cfn-greengrass-connectordefinition-initialversion"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html#cfn-greengrass-connectordefinition-tags"""
@property
def rv_LatestVersionArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html#aws-resource-greengrass-connectordefinition-return-values"""
return GetAtt(resource=self, attr_name="LatestVersionArn")
@property
def rv_Id(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html#aws-resource-greengrass-connectordefinition-return-values"""
return GetAtt(resource=self, attr_name="Id")
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html#aws-resource-greengrass-connectordefinition-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-connectordefinition.html#aws-resource-greengrass-connectordefinition-return-values"""
return GetAtt(resource=self, attr_name="Name")
@attr.s
class FunctionDefinition(Resource):
"""
AWS Object Type = "AWS::Greengrass::FunctionDefinition"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html#cfn-greengrass-functiondefinition-name
- ``p_InitialVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html#cfn-greengrass-functiondefinition-initialversion
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html#cfn-greengrass-functiondefinition-tags
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::FunctionDefinition"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html#cfn-greengrass-functiondefinition-name"""
p_InitialVersion: typing.Union['FunctionDefinitionFunctionDefinitionVersion', dict] = attr.ib(
default=None,
converter=FunctionDefinitionFunctionDefinitionVersion.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(FunctionDefinitionFunctionDefinitionVersion)),
metadata={AttrMeta.PROPERTY_NAME: "InitialVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html#cfn-greengrass-functiondefinition-initialversion"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html#cfn-greengrass-functiondefinition-tags"""
@property
def rv_LatestVersionArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html#aws-resource-greengrass-functiondefinition-return-values"""
return GetAtt(resource=self, attr_name="LatestVersionArn")
@property
def rv_Id(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html#aws-resource-greengrass-functiondefinition-return-values"""
return GetAtt(resource=self, attr_name="Id")
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html#aws-resource-greengrass-functiondefinition-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-functiondefinition.html#aws-resource-greengrass-functiondefinition-return-values"""
return GetAtt(resource=self, attr_name="Name")
@attr.s
class SubscriptionDefinitionVersion(Resource):
"""
AWS Object Type = "AWS::Greengrass::SubscriptionDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinitionversion.html
Property Document:
- ``rp_SubscriptionDefinitionId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinitionversion.html#cfn-greengrass-subscriptiondefinitionversion-subscriptiondefinitionid
- ``rp_Subscriptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinitionversion.html#cfn-greengrass-subscriptiondefinitionversion-subscriptions
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::SubscriptionDefinitionVersion"
rp_SubscriptionDefinitionId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SubscriptionDefinitionId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinitionversion.html#cfn-greengrass-subscriptiondefinitionversion-subscriptiondefinitionid"""
rp_Subscriptions: typing.List[typing.Union['SubscriptionDefinitionVersionSubscription', dict]] = attr.ib(
default=None,
converter=SubscriptionDefinitionVersionSubscription.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(SubscriptionDefinitionVersionSubscription), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Subscriptions"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinitionversion.html#cfn-greengrass-subscriptiondefinitionversion-subscriptions"""
@attr.s
class CoreDefinitionVersion(Resource):
"""
AWS Object Type = "AWS::Greengrass::CoreDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinitionversion.html
Property Document:
- ``rp_CoreDefinitionId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinitionversion.html#cfn-greengrass-coredefinitionversion-coredefinitionid
- ``rp_Cores``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinitionversion.html#cfn-greengrass-coredefinitionversion-cores
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::CoreDefinitionVersion"
rp_CoreDefinitionId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "CoreDefinitionId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinitionversion.html#cfn-greengrass-coredefinitionversion-coredefinitionid"""
rp_Cores: typing.List[typing.Union['CoreDefinitionVersionCore', dict]] = attr.ib(
default=None,
converter=CoreDefinitionVersionCore.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(CoreDefinitionVersionCore), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Cores"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinitionversion.html#cfn-greengrass-coredefinitionversion-cores"""
@attr.s
class LoggerDefinition(Resource):
"""
AWS Object Type = "AWS::Greengrass::LoggerDefinition"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html#cfn-greengrass-loggerdefinition-name
- ``p_InitialVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html#cfn-greengrass-loggerdefinition-initialversion
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html#cfn-greengrass-loggerdefinition-tags
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::LoggerDefinition"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html#cfn-greengrass-loggerdefinition-name"""
p_InitialVersion: typing.Union['LoggerDefinitionLoggerDefinitionVersion', dict] = attr.ib(
default=None,
converter=LoggerDefinitionLoggerDefinitionVersion.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(LoggerDefinitionLoggerDefinitionVersion)),
metadata={AttrMeta.PROPERTY_NAME: "InitialVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html#cfn-greengrass-loggerdefinition-initialversion"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html#cfn-greengrass-loggerdefinition-tags"""
@property
def rv_LatestVersionArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html#aws-resource-greengrass-loggerdefinition-return-values"""
return GetAtt(resource=self, attr_name="LatestVersionArn")
@property
def rv_Id(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html#aws-resource-greengrass-loggerdefinition-return-values"""
return GetAtt(resource=self, attr_name="Id")
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html#aws-resource-greengrass-loggerdefinition-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-loggerdefinition.html#aws-resource-greengrass-loggerdefinition-return-values"""
return GetAtt(resource=self, attr_name="Name")
@attr.s
class CoreDefinition(Resource):
"""
AWS Object Type = "AWS::Greengrass::CoreDefinition"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html#cfn-greengrass-coredefinition-name
- ``p_InitialVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html#cfn-greengrass-coredefinition-initialversion
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html#cfn-greengrass-coredefinition-tags
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::CoreDefinition"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html#cfn-greengrass-coredefinition-name"""
p_InitialVersion: typing.Union['CoreDefinitionCoreDefinitionVersion', dict] = attr.ib(
default=None,
converter=CoreDefinitionCoreDefinitionVersion.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(CoreDefinitionCoreDefinitionVersion)),
metadata={AttrMeta.PROPERTY_NAME: "InitialVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html#cfn-greengrass-coredefinition-initialversion"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html#cfn-greengrass-coredefinition-tags"""
@property
def rv_LatestVersionArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html#aws-resource-greengrass-coredefinition-return-values"""
return GetAtt(resource=self, attr_name="LatestVersionArn")
@property
def rv_Id(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html#aws-resource-greengrass-coredefinition-return-values"""
return GetAtt(resource=self, attr_name="Id")
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html#aws-resource-greengrass-coredefinition-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-coredefinition.html#aws-resource-greengrass-coredefinition-return-values"""
return GetAtt(resource=self, attr_name="Name")
@attr.s
class DeviceDefinitionVersion(Resource):
"""
AWS Object Type = "AWS::Greengrass::DeviceDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinitionversion.html
Property Document:
- ``rp_DeviceDefinitionId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinitionversion.html#cfn-greengrass-devicedefinitionversion-devicedefinitionid
- ``rp_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinitionversion.html#cfn-greengrass-devicedefinitionversion-devices
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::DeviceDefinitionVersion"
rp_DeviceDefinitionId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DeviceDefinitionId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinitionversion.html#cfn-greengrass-devicedefinitionversion-devicedefinitionid"""
rp_Devices: typing.List[typing.Union['DeviceDefinitionVersionDevice', dict]] = attr.ib(
default=None,
converter=DeviceDefinitionVersionDevice.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(DeviceDefinitionVersionDevice), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Devices"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-devicedefinitionversion.html#cfn-greengrass-devicedefinitionversion-devices"""
@attr.s
class SubscriptionDefinition(Resource):
"""
AWS Object Type = "AWS::Greengrass::SubscriptionDefinition"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html#cfn-greengrass-subscriptiondefinition-name
- ``p_InitialVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html#cfn-greengrass-subscriptiondefinition-initialversion
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html#cfn-greengrass-subscriptiondefinition-tags
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::SubscriptionDefinition"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html#cfn-greengrass-subscriptiondefinition-name"""
p_InitialVersion: typing.Union['SubscriptionDefinitionSubscriptionDefinitionVersion', dict] = attr.ib(
default=None,
converter=SubscriptionDefinitionSubscriptionDefinitionVersion.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(SubscriptionDefinitionSubscriptionDefinitionVersion)),
metadata={AttrMeta.PROPERTY_NAME: "InitialVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html#cfn-greengrass-subscriptiondefinition-initialversion"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html#cfn-greengrass-subscriptiondefinition-tags"""
@property
def rv_LatestVersionArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html#aws-resource-greengrass-subscriptiondefinition-return-values"""
return GetAtt(resource=self, attr_name="LatestVersionArn")
@property
def rv_Id(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html#aws-resource-greengrass-subscriptiondefinition-return-values"""
return GetAtt(resource=self, attr_name="Id")
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html#aws-resource-greengrass-subscriptiondefinition-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-subscriptiondefinition.html#aws-resource-greengrass-subscriptiondefinition-return-values"""
return GetAtt(resource=self, attr_name="Name")
@attr.s
class ResourceDefinitionVersion(Resource):
"""
AWS Object Type = "AWS::Greengrass::ResourceDefinitionVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinitionversion.html
Property Document:
- ``rp_ResourceDefinitionId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinitionversion.html#cfn-greengrass-resourcedefinitionversion-resourcedefinitionid
- ``rp_Resources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinitionversion.html#cfn-greengrass-resourcedefinitionversion-resources
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::ResourceDefinitionVersion"
rp_ResourceDefinitionId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ResourceDefinitionId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinitionversion.html#cfn-greengrass-resourcedefinitionversion-resourcedefinitionid"""
rp_Resources: typing.List[typing.Union['ResourceDefinitionVersionResourceInstance', dict]] = attr.ib(
default=None,
converter=ResourceDefinitionVersionResourceInstance.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(ResourceDefinitionVersionResourceInstance), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Resources"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-resourcedefinitionversion.html#cfn-greengrass-resourcedefinitionversion-resources"""
@attr.s
class GroupVersion(Resource):
"""
AWS Object Type = "AWS::Greengrass::GroupVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html
Property Document:
- ``rp_GroupId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-groupid
- ``p_ConnectorDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-connectordefinitionversionarn
- ``p_CoreDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-coredefinitionversionarn
- ``p_DeviceDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-devicedefinitionversionarn
- ``p_FunctionDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-functiondefinitionversionarn
- ``p_LoggerDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-loggerdefinitionversionarn
- ``p_ResourceDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-resourcedefinitionversionarn
- ``p_SubscriptionDefinitionVersionArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-subscriptiondefinitionversionarn
"""
AWS_OBJECT_TYPE = "AWS::Greengrass::GroupVersion"
rp_GroupId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "GroupId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-groupid"""
p_ConnectorDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ConnectorDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-connectordefinitionversionarn"""
p_CoreDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CoreDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-coredefinitionversionarn"""
p_DeviceDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DeviceDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-devicedefinitionversionarn"""
p_FunctionDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "FunctionDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-functiondefinitionversionarn"""
p_LoggerDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "LoggerDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-loggerdefinitionversionarn"""
p_ResourceDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ResourceDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-resourcedefinitionversionarn"""
p_SubscriptionDefinitionVersionArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SubscriptionDefinitionVersionArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-greengrass-groupversion.html#cfn-greengrass-groupversion-subscriptiondefinitionversionarn"""
|
py | 1a33ffc125ea9157c66e1f3945667b69140a6f1f | from jcudc24ingesterapi.models.sampling import _Sampling
from jcudc24ingesterapi import typed, APIDomainObject
from simplesos.client import SOSVersions
from simplesos.varients import _52North, SOSVariants, getSOSVariant
"""
Defines all possible data sources or in other words data input methods that can be provisioned.
"""
__author__ = 'Casey Bajema'
class _DataSource(APIDomainObject):
"""
Base data source class that does nothing beyond defining a known type.
Data sources are known types that provide a known set of information but are unrelated to the data type.
The ingester platform will need to implement data type specific ingesters for each data source.
"""
processing_script = typed("_processing_script", str, "Script to run after download")
def __init__(self, processing_script=None):
self.processing_script = processing_script
class DatasetDataSource(_DataSource):
"""
Uses the resulting data_entry from another dataset and processes it further.
"""
__xmlrpc_class__ = "dataset_data_source"
dataset_id = typed("_dataset_id", int, "")
def __init__(self, dataset_id=None, processing_script=None):
self.dataset_id = dataset_id
self.processing_script = processing_script
class PullDataSource(_DataSource):
"""
A data source that polls a URI for data of the dataset's data type.
"""
__xmlrpc_class__ = "pull_data_source"
url = typed("_url", (str,unicode), "URL of the directory to scan")
pattern = typed("_pattern", (str,unicode), "Pattern for identifying files, regex")
recursive = typed("_recursive", bool, "Should the URL be treated as an index page")
mime_type = typed("_mime_type", (str,unicode), "Mime type of the file")
field = typed("_field", (str,unicode), "Field name to ingest into")
sampling = typed("_sampling", _Sampling, "Script to run to determine when to sample")
def __init__(self, url=None, pattern=None, recursive=False, mime_type=None, field=None, processing_script=None, sampling=None):
"""Initialise the PullDataSource with a URI for the source file, and the field that
the uri will be saved to.
"""
self.url = url
self.field = field
self.pattern = pattern
self.mime_type = mime_type
self.processing_script = processing_script
self.sampling = sampling
self.recursive = recursive
class PushDataSource(_DataSource):
"""
A data source where the external application will use the ingester platform API to pass data into.
"""
__xmlrpc_class__ = "push_data_source"
path = typed("_path", (str,unicode), "Path to monitor for new files")
pattern = typed("_pattern", (str,unicode), "Pattern for identifying files, regex")
archive = typed("_archive", (str,unicode), "Path where processed files are archived")
field = typed("_field", (str,unicode), "Field name to ingest into")
sampling = typed("_sampling", _Sampling, "Script to run to determine when to sample")
def __init__(self, path=None, pattern=None, archive=None, field=None, sampling=None):
self.path = path
self.pattern = pattern
self.archive = archive
self.field = field
self.sampling = sampling
class SOSScraperDataSource(_DataSource):
__xmlrpc_class__ = "sos_scraper_data_source"
url = typed("_url", (str,unicode), "URL of the directory to scan")
field = typed("_field", (str,unicode), "Field name to ingest into")
sampling = typed("_sampling", _Sampling, "Script to run to determine when to sample")
variant = typed("_variant", (str,unicode), "The SOS varient.")
version = typed("_version", (str,unicode), "The SOS API version to use.")
def __init__(self, url=None, field=None, sampling=None, processing_script=None, version=SOSVersions.v_1_0_0, variant="52North"):
self.url = url
self.field = field
self.sampling = sampling
self.variant = variant
self.version = version
self.processing_script = processing_script
class SOSDataSource(_DataSource):
"""
A data source that provides a Sensor Observation Service accessible over the web.
SOS standards will be followed such as:
* No authentication required
* Invalid data is dropped
""" # TODO: Work out the exact implementation details
sensor_id = None # Need to check the sensor_id type
sensorml = None
pass
class UploadDataSource(_DataSource):
"""
A data source where the user manually uploads a file using the provisioning system.
This data source will be very similar to PushDataSource but:
* Won't require authentication as it is using the standard provisioning system API by passing a data_entry object
* The provisioning system will setup an upload form.
"""
pass
class FormDataSource(_DataSource):
"""
A data source where the user manually enters data into a form within the provisioning interface
The data entry's will be passed to the ingester platform through the API as data_entry objects.
"""
__xmlrpc_class__ = "form_data_source"
pass
class DataTurbineDataSource(_DataSource):
"""
A data source that implements a data turbine sink.
"""
__xmlrpc_class__ = "data_turbine_data_source"
url = typed("_url", (str,unicode), "URL of the directory to scan")
mime_type = typed("_mime_type", (str,unicode), "Mime type of the channels to read from.")
data_type = typed("_data_type", (str,unicode), "What type data will be read from data turbine as (eg Float32)")
field = typed("_field", (str,unicode), "Field name to ingest into")
sampling = typed("_sampling", _Sampling, "Script to run to determine when to sample")
def __init__(self, url=None, data_type=False, mime_type=None, field=None, processing_script=None, sampling=None):
"""Initialise the PullDataSource with a URI for the source file, and the field that
the uri will be saved to.
"""
self.url = url
self.field = field
self.mime_type = mime_type
self.data_type = data_type
self.processing_script = processing_script
self.sampling = sampling
|
py | 1a3401969c065a5cecb98311672a6c43b6e32c10 | import torch
import torch.optim as optim
import torch.optim.lr_scheduler as lr_scheduler
from torch.utils.data import DataLoader
import dataset
import simple_net
def train_one_epoch(network, criterion, trainloader, optimizer):
network.train()
losses = []
correct = 0
total = 0
for idx, (feature, label) in enumerate(trainloader):
optimizer.zero_grad()
output = network(feature)
_, ind = torch.max(output, dim = 1)
correct += (ind == label).sum().item()
total += len(label)
loss = criterion(output, label)
losses.append(loss.item())
loss.backward()
optimizer.step()
message = '\r[{:5d}/{:5d}({:3.0%})] train loss: {:.2f}\ttrain acc: {:.2%}'.format(len(label) * idx, 40000, len(label) * idx / 40000, loss, correct / total)
print(message, end = '')
print()
message = 'Train Avg loss: {:.2f}\tTrain Acc: {:.2%}'.format(sum(losses) / len(losses), correct / total)
print(message)
def valid(network, validloader):
network.eval()
correct = 0
total = 0
with torch.no_grad():
for (feature, label) in validloader:
output = network(feature)
_, idx = torch.max(output, dim = 1)
correct += (idx == label).sum().item()
total += len(label)
message = 'Valid Acc: {:.2%}'.format(correct / total)
print(message)
def train(network, criterion, trainloader, validloader, optimizer, scheduler, start_epoch = 0, n_epochs = 20):
for _ in range(start_epoch):
scheduler.step()
for epoch in range(start_epoch, n_epochs):
train_one_epoch(network, criterion, trainloader, optimizer)
scheduler.step()
if (epoch + 1) % 3 == 0:
valid(network, validloader)
torch.save({'state_dict': network,
'optimizer': optimizer.state_dict()},
'checkpoint.pth')
def main():
trainset = dataset.Trainset()
validset = dataset.Trainset(training = False)
trainloader = DataLoader(trainset, batch_size = 64, shuffle = True, num_workers = 4)
validloader = DataLoader(validset, batch_size = 16, shuffle = True, num_workers = 4)
network = simple_net.SimpleNet()
optimizer = optim.SGD(network.parameters(), lr = 0.001, momentum = 0.9, weight_decay = 0.00001)
criterion = torch.nn.CrossEntropyLoss()
scheduler = lr_scheduler.StepLR(optimizer, step_size = 5, gamma = 0.5, last_epoch = -1)
train(network, criterion, trainloader, validloader, optimizer, scheduler)
if __name__ == "__main__":
main() |
py | 1a3401f3c8bc87828201970c3fafe708430643e5 | from typing import List, Optional, Callable, Union, Any, Tuple
import re
import copy
import warnings
import numpy as np
import os.path as osp
from collections.abc import Sequence
import torch.utils.data
from torch import Tensor
from .data import Data
from .utils import makedirs
IndexType = Union[slice, Tensor, np.ndarray, Sequence]
class Dataset(torch.utils.data.Dataset):
r"""Dataset base class for creating graph datasets.
See `here <https://pytorch-geometric.readthedocs.io/en/latest/notes/
create_dataset.html>`__ for the accompanying tutorial.
Args:
root (string, optional): Root directory where the dataset should be
saved. (optional: :obj:`None`)
transform (callable, optional): A function/transform that takes in an
:obj:`torch_geometric.data.Data` object and returns a transformed
version. The data object will be transformed before every access.
(default: :obj:`None`)
pre_transform (callable, optional): A function/transform that takes in
an :obj:`torch_geometric.data.Data` object and returns a
transformed version. The data object will be transformed before
being saved to disk. (default: :obj:`None`)
pre_filter (callable, optional): A function that takes in an
:obj:`torch_geometric.data.Data` object and returns a boolean
value, indicating whether the data object should be included in the
final dataset. (default: :obj:`None`)
"""
@property
def raw_file_names(self) -> Union[str, List[str], Tuple]:
r"""The name of the files to find in the :obj:`self.raw_dir` folder in
order to skip the download."""
raise NotImplementedError
@property
def processed_file_names(self) -> Union[str, List[str], Tuple]:
r"""The name of the files to find in the :obj:`self.processed_dir`
folder in order to skip the processing."""
raise NotImplementedError
def download(self):
r"""Downloads the dataset to the :obj:`self.raw_dir` folder."""
raise NotImplementedError
def process(self):
r"""Processes the dataset to the :obj:`self.processed_dir` folder."""
raise NotImplementedError
def len(self) -> int:
raise NotImplementedError
def get(self, idx: int) -> Data:
r"""Gets the data object at index :obj:`idx`."""
raise NotImplementedError
def __init__(
self,
root: Optional[str] = None,
transform: Optional[Callable] = None,
pre_transform: Optional[Callable] = None,
pre_filter: Optional[Callable] = None,
):
super().__init__()
if isinstance(root, str):
root = osp.expanduser(osp.normpath(root))
self.root = root
self.transform = transform
self.pre_transform = pre_transform
self.pre_filter = pre_filter
self._indices: Optional[Sequence] = None
if "download" in self.__class__.__dict__.keys():
self._download()
if "process" in self.__class__.__dict__.keys():
self._process()
def indices(self) -> Sequence:
return range(self.len()) if self._indices is None else self._indices
@property
def raw_dir(self) -> str:
return osp.join(self.root, "raw")
@property
def processed_dir(self) -> str:
return osp.join(self.root, "processed")
@property
def num_node_features(self) -> int:
r"""Returns the number of features per node in the dataset."""
data = self[0]
if hasattr(data, "num_node_features"):
return data.num_node_features
raise AttributeError(
f"'{data.__class__.__name__}' object has no "
f"attribute 'num_node_features'"
)
@property
def num_features(self) -> int:
r"""Alias for :py:attr:`~num_node_features`."""
return self.num_node_features
@property
def num_edge_features(self) -> int:
r"""Returns the number of features per edge in the dataset."""
data = self[0]
if hasattr(data, "num_edge_features"):
return data.num_edge_features
raise AttributeError(
f"'{data.__class__.__name__}' object has no "
f"attribute 'num_edge_features'"
)
@property
def raw_paths(self) -> List[str]:
r"""The filepaths to find in order to skip the download."""
files = to_list(self.raw_file_names)
return [osp.join(self.raw_dir, f) for f in files]
@property
def processed_paths(self) -> List[str]:
r"""The filepaths to find in the :obj:`self.processed_dir`
folder in order to skip the processing."""
files = to_list(self.processed_file_names)
return [osp.join(self.processed_dir, f) for f in files]
def _download(self):
if files_exist(self.raw_paths): # pragma: no cover
return
makedirs(self.raw_dir)
self.download()
def _process(self):
f = osp.join(self.processed_dir, "pre_transform.pt")
if osp.exists(f) and torch.load(f) != _repr(self.pre_transform):
warnings.warn(
f"The `pre_transform` argument differs from the one used in "
f"the pre-processed version of this dataset. If you want to "
f"make use of another pre-processing technique, make sure to "
f"sure to delete '{self.processed_dir}' first"
)
f = osp.join(self.processed_dir, "pre_filter.pt")
if osp.exists(f) and torch.load(f) != _repr(self.pre_filter):
warnings.warn(
"The `pre_filter` argument differs from the one used in the "
"pre-processed version of this dataset. If you want to make "
"use of another pre-fitering technique, make sure to delete "
"'{self.processed_dir}' first"
)
if files_exist(self.processed_paths): # pragma: no cover
return
print("Processing...")
makedirs(self.processed_dir)
self.process()
path = osp.join(self.processed_dir, "pre_transform.pt")
torch.save(_repr(self.pre_transform), path)
path = osp.join(self.processed_dir, "pre_filter.pt")
torch.save(_repr(self.pre_filter), path)
print("Done!")
def __len__(self) -> int:
r"""The number of examples in the dataset."""
return len(self.indices())
def __getitem__(
self,
idx: Union[int, np.integer, IndexType],
) -> Union["Dataset", Data]:
r"""In case :obj:`idx` is of type integer, will return the data object
at index :obj:`idx` (and transforms it in case :obj:`transform` is
present).
In case :obj:`idx` is a slicing object, *e.g.*, :obj:`[2:5]`, a list, a
tuple, a PyTorch :obj:`LongTensor` or a :obj:`BoolTensor`, or a numpy
:obj:`np.array`, will return a subset of the dataset at the specified
indices."""
if (
isinstance(idx, (int, np.integer))
or (isinstance(idx, Tensor) and idx.dim() == 0)
or (isinstance(idx, np.ndarray) and np.isscalar(idx))
):
data = self.get(self.indices()[idx])
data = data if self.transform is None else self.transform(data)
return data
else:
return self.index_select(idx)
def index_select(self, idx: IndexType) -> "Dataset":
indices = self.indices()
if isinstance(idx, slice):
indices = indices[idx]
elif isinstance(idx, Tensor) and idx.dtype == torch.long:
return self.index_select(idx.flatten().tolist())
elif isinstance(idx, Tensor) and idx.dtype == torch.bool:
idx = idx.flatten().nonzero(as_tuple=False)
return self.index_select(idx.flatten().tolist())
elif isinstance(idx, np.ndarray) and idx.dtype == np.int64:
return self.index_select(idx.flatten().tolist())
elif isinstance(idx, np.ndarray) and idx.dtype == np.bool:
idx = idx.flatten().nonzero()[0]
return self.index_select(idx.flatten().tolist())
elif isinstance(idx, Sequence) and not isinstance(idx, str):
indices = [indices[i] for i in idx]
else:
raise IndexError(
f"Only integers, slices (':'), list, tuples, torch.tensor and "
f"np.ndarray of dtype long or bool are valid indices (got "
f"'{type(idx).__name__}')"
)
dataset = copy.copy(self)
dataset._indices = indices
return dataset
def shuffle(
self,
return_perm: bool = False,
) -> Union["Dataset", Tuple["Dataset", Tensor]]:
r"""Randomly shuffles the examples in the dataset.
Args:
return_perm (bool, optional): If set to :obj:`True`, will return
the random permutation used to shuffle the dataset in addition.
(default: :obj:`False`)
"""
perm = torch.randperm(len(self))
dataset = self.index_select(perm)
return (dataset, perm) if return_perm is True else dataset
def __repr__(self) -> str:
arg_repr = str(len(self)) if len(self) > 1 else ""
return f"{self.__class__.__name__}({arg_repr})"
def to_list(value: Any) -> Sequence:
if isinstance(value, Sequence) and not isinstance(value, str):
return value
else:
return [value]
def files_exist(files: List[str]) -> bool:
# NOTE: We return `False` in case `files` is empty, leading to a
# re-processing of files on every instantiation.
return len(files) != 0 and all([osp.exists(f) for f in files])
def _repr(obj: Any) -> str:
if obj is None:
return "None"
return re.sub("(<.*?)\\s.*(>)", r"\1\2", obj.__repr__())
|
py | 1a3403717b4af5d29c174843fdff29a1ed2f8362 | #!/usr/bin/env python
from cereal import car, log
from common.realtime import sec_since_boot
from selfdrive.config import Conversions as CV
from selfdrive.controls.lib.drive_helpers import create_event, EventTypes as ET
from selfdrive.controls.lib.vehicle_model import VehicleModel
from selfdrive.car.gm.values import DBC, CAR, STOCK_CONTROL_MSGS, AUDIO_HUD
from selfdrive.car.gm.carstate import CarState, CruiseButtons, get_powertrain_can_parser
try:
from selfdrive.car.gm.carcontroller import CarController
except ImportError:
CarController = None
class CanBus(object):
def __init__(self):
self.powertrain = 0
self.obstacle = 1
self.chassis = 2
self.sw_gmlan = 3
class CarInterface(object):
def __init__(self, CP, sendcan=None):
self.CP = CP
self.frame = 0
self.gas_pressed_prev = False
self.brake_pressed_prev = False
self.can_invalid_count = 0
self.acc_active_prev = 0
# *** init the major players ***
canbus = CanBus()
self.CS = CarState(CP, canbus)
self.VM = VehicleModel(CP)
self.pt_cp = get_powertrain_can_parser(CP, canbus)
self.ch_cp_dbc_name = DBC[CP.carFingerprint]['chassis']
# sending if read only is False
if sendcan is not None:
self.sendcan = sendcan
self.CC = CarController(canbus, CP.carFingerprint, CP.enableCamera)
@staticmethod
def compute_gb(accel, speed):
return float(accel) / 4.0
@staticmethod
def calc_accel_override(a_ego, a_target, v_ego, v_target):
return 1.0
@staticmethod
def get_params(candidate, fingerprint):
ret = car.CarParams.new_message()
ret.carName = "gm"
ret.carFingerprint = candidate
ret.enableCruise = False
# Presence of a camera on the object bus is ok.
# Have to go passive if ASCM is online (ACC-enabled cars),
# or camera is on powertrain bus (LKA cars without ACC).
ret.enableCamera = not any(x for x in STOCK_CONTROL_MSGS[candidate] if x in fingerprint)
std_cargo = 136
if candidate == CAR.VOLT:
# supports stop and go, but initial engage must be above 18mph (which include conservatism)
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
# kg of standard extra cargo to count for driver, gas, etc...
ret.mass = 1607 + std_cargo
ret.safetyModel = car.CarParams.SafetyModels.gm
ret.wheelbase = 2.69
ret.steerRatio = 15.7
ret.steerRatioRear = 0.
ret.centerToFront = ret.wheelbase * 0.4 # wild guess
elif candidate == CAR.MALIBU:
# supports stop and go, but initial engage must be above 18mph (which include conservatism)
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.mass = 1496 + std_cargo
ret.safetyModel = car.CarParams.SafetyModels.gm
ret.wheelbase = 2.83
ret.steerRatio = 15.8
ret.steerRatioRear = 0.
ret.centerToFront = ret.wheelbase * 0.4 # wild guess
elif candidate == CAR.HOLDEN_ASTRA:
# kg of standard extra cargo to count for driver, gas, etc...
ret.mass = 1363 + std_cargo
ret.wheelbase = 2.662
# Remaining parameters copied from Volt for now
ret.centerToFront = ret.wheelbase * 0.4
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.safetyModel = car.CarParams.SafetyModels.gm
ret.steerRatio = 15.7
ret.steerRatioRear = 0.
elif canidate == CAR.EQUINOX:
# kg of standard extra cargo to count for driver, gas, etc...
ret.mass = 1363 + std_cargo
ret.wheelbase = 2.662
# Remaining parameters copied from Volt for now. Lowerd set speed to test
ret.centerToFront = ret.wheelbase * 0.4
ret.minEnableSpeed = 5 * CV.MPH_TO_MS
ret.safetyModel = car.CarParams.SafetyModels.gm
ret.steerRatio = 15.7
ret.steerRatioRear = 0.
elif candidate == CAR.CADILLAC_CT6:
# engage speed is decided by pcm
ret.minEnableSpeed = -1
# kg of standard extra cargo to count for driver, gas, etc...
ret.mass = 4016. * CV.LB_TO_KG + std_cargo
ret.safetyModel = car.CarParams.SafetyModels.cadillac
ret.wheelbase = 3.11
ret.steerRatio = 14.6 # it's 16.3 without rear active steering
ret.steerRatioRear = 0. # TODO: there is RAS on this car!
ret.centerToFront = ret.wheelbase * 0.465
# hardcoding honda civic 2016 touring params so they can be used to
# scale unknown params for other cars
mass_civic = 2923. * CV.LB_TO_KG + std_cargo
wheelbase_civic = 2.70
centerToFront_civic = wheelbase_civic * 0.4
centerToRear_civic = wheelbase_civic - centerToFront_civic
rotationalInertia_civic = 2500
tireStiffnessFront_civic = 85400
tireStiffnessRear_civic = 90000
centerToRear = ret.wheelbase - ret.centerToFront
# TODO: get actual value, for now starting with reasonable value for
# civic and scaling by mass and wheelbase
ret.rotationalInertia = rotationalInertia_civic * \
ret.mass * ret.wheelbase**2 / (mass_civic * wheelbase_civic**2)
# TODO: start from empirically derived lateral slip stiffness for the civic and scale by
# mass and CG position, so all cars will have approximately similar dyn behaviors
ret.tireStiffnessFront = tireStiffnessFront_civic * \
ret.mass / mass_civic * \
(centerToRear / ret.wheelbase) / (centerToRear_civic / wheelbase_civic)
ret.tireStiffnessRear = tireStiffnessRear_civic * \
ret.mass / mass_civic * \
(ret.centerToFront / ret.wheelbase) / (centerToFront_civic / wheelbase_civic)
# same tuning for Volt and CT6 for now
ret.steerKiBP, ret.steerKpBP = [[0.], [0.]]
ret.steerKpV, ret.steerKiV = [[0.2], [0.00]]
ret.steerKf = 0.00004 # full torque for 20 deg at 80mph means 0.00007818594
ret.steerMaxBP = [0.] # m/s
ret.steerMaxV = [1.]
ret.gasMaxBP = [0.]
ret.gasMaxV = [.5]
ret.brakeMaxBP = [0.]
ret.brakeMaxV = [1.]
ret.longPidDeadzoneBP = [0.]
ret.longPidDeadzoneV = [0.]
ret.longitudinalKpBP = [5., 35.]
ret.longitudinalKpV = [2.4, 1.5]
ret.longitudinalKiBP = [0.]
ret.longitudinalKiV = [0.36]
ret.steerLimitAlert = True
ret.stoppingControl = True
ret.startAccel = 0.8
ret.steerActuatorDelay = 0.1 # Default delay, not measured yet
ret.steerRateCost = 1.0
ret.steerControlType = car.CarParams.SteerControlType.torque
return ret
# returns a car.CarState
def update(self, c):
self.pt_cp.update(int(sec_since_boot() * 1e9), False)
self.CS.update(self.pt_cp)
# create message
ret = car.CarState.new_message()
# speeds
ret.vEgo = self.CS.v_ego
ret.aEgo = self.CS.a_ego
ret.vEgoRaw = self.CS.v_ego_raw
ret.yawRate = self.VM.yaw_rate(self.CS.angle_steers * CV.DEG_TO_RAD, self.CS.v_ego)
ret.standstill = self.CS.standstill
ret.wheelSpeeds.fl = self.CS.v_wheel_fl
ret.wheelSpeeds.fr = self.CS.v_wheel_fr
ret.wheelSpeeds.rl = self.CS.v_wheel_rl
ret.wheelSpeeds.rr = self.CS.v_wheel_rr
# gas pedal information.
ret.gas = self.CS.pedal_gas / 254.0
ret.gasPressed = self.CS.user_gas_pressed
# brake pedal
ret.brake = self.CS.user_brake / 0xd0
ret.brakePressed = self.CS.brake_pressed
# steering wheel
ret.steeringAngle = self.CS.angle_steers
# torque and user override. Driver awareness
# timer resets when the user uses the steering wheel.
ret.steeringPressed = self.CS.steer_override
ret.steeringTorque = self.CS.steer_torque_driver
# cruise state
ret.cruiseState.available = bool(self.CS.main_on)
cruiseEnabled = self.CS.pcm_acc_status != 0
ret.cruiseState.enabled = cruiseEnabled
ret.cruiseState.standstill = self.CS.pcm_acc_status == 4
ret.leftBlinker = self.CS.left_blinker_on
ret.rightBlinker = self.CS.right_blinker_on
ret.doorOpen = not self.CS.door_all_closed
ret.seatbeltUnlatched = not self.CS.seatbelt
ret.gearShifter = self.CS.gear_shifter
buttonEvents = []
# blinkers
if self.CS.left_blinker_on != self.CS.prev_left_blinker_on:
be = car.CarState.ButtonEvent.new_message()
be.type = 'leftBlinker'
be.pressed = self.CS.left_blinker_on
buttonEvents.append(be)
if self.CS.right_blinker_on != self.CS.prev_right_blinker_on:
be = car.CarState.ButtonEvent.new_message()
be.type = 'rightBlinker'
be.pressed = self.CS.right_blinker_on
buttonEvents.append(be)
if self.CS.cruise_buttons != self.CS.prev_cruise_buttons:
be = car.CarState.ButtonEvent.new_message()
be.type = 'unknown'
if self.CS.cruise_buttons != CruiseButtons.UNPRESS:
be.pressed = True
but = self.CS.cruise_buttons
else:
be.pressed = False
but = self.CS.prev_cruise_buttons
if but == CruiseButtons.RES_ACCEL:
if not (cruiseEnabled and self.CS.standstill):
be.type = 'accelCruise' # Suppress resume button if we're resuming from stop so we don't adjust speed.
elif but == CruiseButtons.DECEL_SET:
be.type = 'decelCruise'
elif but == CruiseButtons.CANCEL:
be.type = 'cancel'
elif but == CruiseButtons.MAIN:
be.type = 'altButton3'
buttonEvents.append(be)
ret.buttonEvents = buttonEvents
events = []
if not self.CS.can_valid:
self.can_invalid_count += 1
if self.can_invalid_count >= 5:
events.append(create_event('commIssue', [ET.NO_ENTRY, ET.IMMEDIATE_DISABLE]))
else:
self.can_invalid_count = 0
if self.CS.steer_error:
events.append(create_event('steerUnavailable', [ET.NO_ENTRY, ET.IMMEDIATE_DISABLE, ET.PERMANENT]))
if self.CS.steer_not_allowed:
events.append(create_event('steerTempUnavailable', [ET.NO_ENTRY, ET.WARNING]))
if ret.doorOpen:
events.append(create_event('doorOpen', [ET.NO_ENTRY, ET.SOFT_DISABLE]))
if ret.seatbeltUnlatched:
events.append(create_event('seatbeltNotLatched', [ET.NO_ENTRY, ET.SOFT_DISABLE]))
if self.CS.car_fingerprint in (CAR.VOLT, CAR.MALIBU, CAR.HOLDEN_ASTRA):
if self.CS.brake_error:
events.append(create_event('brakeUnavailable', [ET.NO_ENTRY, ET.IMMEDIATE_DISABLE, ET.PERMANENT]))
if not self.CS.gear_shifter_valid:
events.append(create_event('wrongGear', [ET.NO_ENTRY, ET.SOFT_DISABLE]))
if self.CS.esp_disabled:
events.append(create_event('espDisabled', [ET.NO_ENTRY, ET.SOFT_DISABLE]))
if not self.CS.main_on:
events.append(create_event('wrongCarMode', [ET.NO_ENTRY, ET.USER_DISABLE]))
if self.CS.gear_shifter == 3:
events.append(create_event('reverseGear', [ET.NO_ENTRY, ET.IMMEDIATE_DISABLE]))
if ret.vEgo < self.CP.minEnableSpeed:
events.append(create_event('speedTooLow', [ET.NO_ENTRY]))
if self.CS.park_brake:
events.append(create_event('parkBrake', [ET.NO_ENTRY, ET.USER_DISABLE]))
# disable on pedals rising edge or when brake is pressed and speed isn't zero
if (ret.gasPressed and not self.gas_pressed_prev) or \
(ret.brakePressed): # and (not self.brake_pressed_prev or ret.vEgo > 0.001)):
events.append(create_event('pedalPressed', [ET.NO_ENTRY, ET.USER_DISABLE]))
if ret.gasPressed:
events.append(create_event('pedalPressed', [ET.PRE_ENABLE]))
if ret.cruiseState.standstill:
events.append(create_event('resumeRequired', [ET.WARNING]))
# handle button presses
for b in ret.buttonEvents:
# do enable on both accel and decel buttons
if b.type in ["accelCruise", "decelCruise"] and not b.pressed:
events.append(create_event('buttonEnable', [ET.ENABLE]))
# do disable on button down
if b.type == "cancel" and b.pressed:
events.append(create_event('buttonCancel', [ET.USER_DISABLE]))
if self.CS.car_fingerprint == CAR.CADILLAC_CT6:
if self.CS.acc_active and not self.acc_active_prev:
events.append(create_event('pcmEnable', [ET.ENABLE]))
if not self.CS.acc_active:
events.append(create_event('pcmDisable', [ET.USER_DISABLE]))
ret.events = events
# update previous brake/gas pressed
self.acc_active_prev = self.CS.acc_active
self.gas_pressed_prev = ret.gasPressed
self.brake_pressed_prev = ret.brakePressed
# cast to reader so it can't be modified
return ret.as_reader()
# pass in a car.CarControl
# to be called @ 100hz
def apply(self, c, perception_state=log.Live20Data.new_message()):
hud_v_cruise = c.hudControl.setSpeed
if hud_v_cruise > 70:
hud_v_cruise = 0
chime, chime_count = AUDIO_HUD[c.hudControl.audibleAlert.raw]
# For Openpilot, "enabled" includes pre-enable.
# In GM, PCM faults out if ACC command overlaps user gas.
enabled = c.enabled and not self.CS.user_gas_pressed
self.CC.update(self.sendcan, enabled, self.CS, self.frame, \
c.actuators,
hud_v_cruise, c.hudControl.lanesVisible, \
c.hudControl.leadVisible, \
chime, chime_count)
self.frame += 1
|
py | 1a34039405901e442c010d269d410ecfcb2ab7dd | from .configuration import Configuration
from .driver import Driver
from .benchmark import Benchmark
from .pipeline import Pipeline
from .job import Job |
py | 1a3403d6195624fe7e32e2940b7e78f613f8a624 | from time import sleep
import pyautogui
from textblob import TextBlob
from yandex_music_parser import YandexMusicParser
# add your Yandex mail, password and full link to your VK music page
YANDEX_MAIL = "*@yandex.com"
PASSWORD = "*"
VK_MUSIC_LINK = "https://vk.com/audios240917398"
CHROME_ICON = (215, 1055)
CHROME_URL = (410, 70)
SEARCH = (901, 406)
ADD_TRACK = (1462, 525)
SWITCH_LANGUAGE_step1 = (1732, 1059)
SWITCH_LANGUAGE_RUS = (1817, 834)
SWITCH_LANGUAGE_ENG = (1835, 919)
# used to determine the location of the cursor
screenWidth, screenHeight = pyautogui.size()
x, y = pyautogui.position()
print((x, y))
def open_browser():
print("Opening Google Chrome browser")
pyautogui.click(CHROME_ICON)
sleep(1)
def add_track(track_fullname):
sleep(1)
pyautogui.click(SEARCH)
sleep(1)
pyautogui.hotkey('ctrl', 'a')
sleep(1)
pyautogui.keyDown('backspace')
sleep(1)
pyautogui.typewrite(track_fullname)
sleep(1)
pyautogui.keyDown('enter')
sleep(1)
start = None
count = 5
while not start:
if not start:
start = pyautogui.locateCenterOnScreen('images/pattern_screenshot.png')
count -= 1
if count == 0:
break
pyautogui.moveTo(start)
x, y = pyautogui.position()
print((x, y))
ADD_TRACK = (x + 417, y + 74)
pyautogui.moveTo(ADD_TRACK)
pyautogui.click(ADD_TRACK)
sleep(1)
def fix_layout(track_fullname):
eng_chars = u"~!@#$%^&qwertyuiop[]asdfghjkl;'zxcvbnm,./QWERTYUIOP{}ASDFGHJKL:\"|ZXCVBNM<>?"
rus_chars = u"ё!\"№;%:?йцукенгшщзхъфывапролджэячсмитьбю.ЙЦУКЕНГШЩЗХЪФЫВАПРОЛДЖЭ/ЯЧСМИТЬБЮ,"
trans_table = dict(zip(rus_chars, eng_chars))
return ''.join([trans_table.get(c, c) for c in track_fullname])
if __name__ == "__main__":
data = YandexMusicParser(YANDEX_MAIL, PASSWORD)
tracks_fullnames = data.parse_tracks()
open_browser()
for track_fullname in tracks_fullnames[::-1]:
language = TextBlob(track_fullname).detect_language()
if language == "ru":
pyautogui.moveTo(SWITCH_LANGUAGE_step1)
pyautogui.click(SWITCH_LANGUAGE_step1)
pyautogui.moveTo(SWITCH_LANGUAGE_RUS)
pyautogui.click(SWITCH_LANGUAGE_RUS)
add_track(fix_layout(track_fullname))
continue
else:
pyautogui.moveTo(SWITCH_LANGUAGE_step1)
pyautogui.click(SWITCH_LANGUAGE_step1)
pyautogui.moveTo(SWITCH_LANGUAGE_ENG)
pyautogui.click(SWITCH_LANGUAGE_ENG)
add_track(track_fullname)
sleep(1)
|
py | 1a34048614cbd1ccb0641d1288d510e54f8edb91 | from typing import Any
import requests
import pytest
from _pytest.monkeypatch import MonkeyPatch
from unittest.mock import Mock
from weather.libs.api.open_weather_map import OpenWeatherMap
from weather.libs.api.request_flow_controller import RequestFlowController
class TestOpenWeatherMap:
def test_init(self, fake_token: str, fake_owm: OpenWeatherMap) -> None:
assert fake_owm._token == fake_token
assert fake_owm._BASE_URL == 'https://api.openweathermap.org/data/'
assert fake_owm._VERSION == '2.5'
assert fake_owm.units == 'metric'
assert isinstance(fake_owm.flow_ctrl, RequestFlowController)
def test__url(self, fake_owm: OpenWeatherMap) -> None:
assert fake_owm._url == 'https://api.openweathermap.org/data/2.5/'
def test__get(
self,
fake_owm: OpenWeatherMap,
location_fake_data: dict[str, Any],
monkeypatch: MonkeyPatch,
) -> None:
class ResponsePatch:
def raise_for_status(self) -> None:
pass
def json(self) -> None:
return {'hello': 'world!'}
fake_get: Mock = Mock(return_value=ResponsePatch())
monkeypatch.setattr(requests, 'get', fake_get)
params: dict[str, Any] = location_fake_data
res: dict[str, Any] = fake_owm._get(
url=fake_owm._url + 'weather', params=params
)
assert res == fake_get.return_value.json()
def test_get_weather_by_coord(
self,
fake_owm: OpenWeatherMap,
location_fake_data: dict[str, Any],
monkeypatch: MonkeyPatch,
) -> None:
fake_get: Mock = Mock(return_value={'weather': 'Good'})
monkeypatch.setattr(OpenWeatherMap, '_get', fake_get)
res: dict[str, Any] = fake_owm.get_weather_by_coord(
**location_fake_data
)
assert res == fake_get.return_value
def test_sub_map(self, fake_owm: OpenWeatherMap) -> None:
assert len(list(fake_owm.sub_map(5))) == 2_592
|
py | 1a3404cc54ecc772bb412c183e1668e77263c22c | # coding: utf-8
"""
Feedback Submissions
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from hubspot.crm.objects.feedback_submissions.configuration import Configuration
class BatchInputSimplePublicObjectBatchInput(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {"inputs": "list[SimplePublicObjectBatchInput]"}
attribute_map = {"inputs": "inputs"}
def __init__(self, inputs=None, local_vars_configuration=None): # noqa: E501
"""BatchInputSimplePublicObjectBatchInput - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._inputs = None
self.discriminator = None
self.inputs = inputs
@property
def inputs(self):
"""Gets the inputs of this BatchInputSimplePublicObjectBatchInput. # noqa: E501
:return: The inputs of this BatchInputSimplePublicObjectBatchInput. # noqa: E501
:rtype: list[SimplePublicObjectBatchInput]
"""
return self._inputs
@inputs.setter
def inputs(self, inputs):
"""Sets the inputs of this BatchInputSimplePublicObjectBatchInput.
:param inputs: The inputs of this BatchInputSimplePublicObjectBatchInput. # noqa: E501
:type: list[SimplePublicObjectBatchInput]
"""
if (
self.local_vars_configuration.client_side_validation and inputs is None
): # noqa: E501
raise ValueError(
"Invalid value for `inputs`, must not be `None`"
) # noqa: E501
self._inputs = inputs
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, BatchInputSimplePublicObjectBatchInput):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, BatchInputSimplePublicObjectBatchInput):
return True
return self.to_dict() != other.to_dict()
|
py | 1a3405bb7e830eda7cfceba0f8342c4eaab3ef70 | from unittest import TestCase
from unittest import main as unittest_main
from offconf import funcs, get_func, pipe
class TestUtils(TestCase):
expr = "foo|prepend('bar_')|append('_can')"
expect = "bar_foo_can"
def test_pipe(self):
self.assertEqual(pipe("foo|b64encode", funcs), "Zm9v")
self.assertEqual(pipe(self.expr, funcs), self.expect)
def test_get_func(self):
for idx, expr in enumerate(self.expr.split("|")):
if idx == 0:
self.assertEqual(get_func(funcs, expr, "null"), "foo")
else:
self.assertTrue(callable(get_func(funcs, expr, None)))
if __name__ == "__main__":
unittest_main()
|
py | 1a3405da5879627065c5a2486386df825c73e5f9 | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.
# oneflow.python.onnx.oneflow.python.onnx - rewrite oneflow graph to onnx graph
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
import collections
import itertools
import logging
import os
import os.path
import sys
import traceback
from typing import Text, Optional, Dict, Callable, List
import numpy as np
from onnx import helper, onnx_pb
import oneflow
import oneflow.python.framework.c_api_util as c_api_util
import oneflow.python.framework.session_context as session_ctx
from oneflow.python.oneflow_export import oneflow_export
import oneflow.python.onnx
from oneflow.python.onnx import constants, schemas, util, handler, optimizer
from oneflow.python.onnx.graph import Graph
import oneflow.python.onnx.onnx_opset # pylint: disable=unused-import
logger = logging.getLogger(__name__)
def FlowToOnnxNaive(graph, shape_override):
"""
Convert node from oneflow format to onnx format.
Convert the oneflow nodes into an onnx graph with minimal rewrites so
we can use the onnx graph as intermediate graph.
The input/output/attr of each node are kept here and will be converted in other
following functions.
"""
dtypes = {}
for lbn in graph.helper.lbn2logical_blob_desc:
lbd = graph.helper.lbn2logical_blob_desc[lbn]
if lbn not in shape_override:
shape_override[lbn] = list(lbd.body.shape.dim)
dtypes[lbn] = util.Flow2OnnxDtype(lbd.body.data_type)
# some stats
op_cnt = collections.Counter()
attr_cnt = collections.Counter()
onnx_nodes = []
def is_user_op(node):
return node.WhichOneof("op_type") == "user_conf"
def get_op_conf(node):
conf_type = node.WhichOneof("op_type")
conf = getattr(node, conf_type)
return conf
def get_op_type(node):
if is_user_op(node):
return node.user_conf.op_type_name
return node.WhichOneof("op_type")[:-5]
def get_inputs(node):
if is_user_op(node):
ibns = handler.flow_op.ibn4op_type(get_op_type(node))
if ibns is None:
return list(
itertools.chain(*[x.s for x in node.user_conf.input.values()])
)
ipts = []
for ibn in ibns:
for key, val in node.user_conf.input.items():
if key == ibn:
assert len(val.s) == 1
ipts.append(val.s[0])
break
else:
raise ValueError(
"ibn {} of node {} (type {}) not found".format(
ibn, node.name, get_op_type(node)
)
)
return ipts
else:
conf = get_op_conf(node)
# it cannot cover all legacy op but it's enough
if hasattr(conf, "in"):
op_in = getattr(conf, "in")
if isinstance(op_in, str):
return [op_in]
else:
return op_in
else:
return []
def get_outputs(node):
if is_user_op(node):
obns = handler.flow_op.obn4op_type(get_op_type(node))
if obns is None:
assert all([len(x.s) == 1 for x in node.user_conf.output.values()])
return [x.s[0] for x in node.user_conf.output.values()]
outputs = []
for obn in obns:
for key, val in node.user_conf.output.items():
if key == obn:
assert len(val.s) == 1
outputs.append(val.s[0])
break
else:
raise ValueError(
"obn {} of node {} (type {}) not found".format(
obn, node.name, get_op_type(node)
)
)
else:
conf = get_op_conf(node)
# it cannot cover all legacy op but it's enough
if hasattr(conf, "out"):
out = getattr(conf, "out")
if isinstance(out, str):
outputs = [out]
else:
outputs = out
else:
outputs = []
outputs = ["{}/{}".format(node.name, output) for output in outputs]
return outputs
# minimal conversion of attributes
for node in graph.net.op:
attr = {}
op_cnt[get_op_type(node)] += 1
attrs = node.user_conf.attr.keys() if is_user_op(node) else []
for a in attrs:
attr_cnt[a] += 1
if a == "dtype":
attr[a] = util.Flow2OnnxDtype(util.get_flow_node_attr(node, "dtype"))
else:
attr[a] = util.get_flow_node_attr(node, a)
try:
op_type = get_op_type(node)
input_names = get_inputs(node)
output_names = get_outputs(node)
onnx_node = helper.make_node(
op_type, input_names, output_names, name=node.name, **attr
)
onnx_nodes.append(onnx_node)
except Exception as ex:
logger.error("pass1 convert failed for %s, ex=%s", node, ex)
raise
return onnx_nodes, op_cnt, attr_cnt, dtypes, shape_override
def FlowOnnxMapping(g, ops_mapping):
logger.debug("Mapping Oneflow node to ONNX node(s)")
mapped_op = collections.Counter()
unmapped_op = collections.Counter()
exceptions = []
ops = list(g.get_nodes())
for node in ops:
logger.debug("Process node: %s\n%s", node.name, node.summary)
if node.skip_conversion:
logger.debug("explicitly skip node " + node.name)
continue
op = node.op_type
map_info = ops_mapping.get(op)
if map_info is None:
unmapped_op[op] += 1
logger.error("oneflow op [%s: %s] is not supported", node.name, op)
continue
mapped_op[op] += 1
func, onnx_op, kwargs = map_info
if onnx_op is not None:
node.op_type = onnx_op
try:
func(g, node, **kwargs)
node.skip_conversion = True
except Exception as ex:
logger.error(
"Failed to convert node %s\n%s", node.name, node.summary, exc_info=1
)
exceptions.append(ex)
return mapped_op, unmapped_op, exceptions
def TopologicalSort(g, continue_on_error):
ops = g.get_nodes()
if not continue_on_error:
g.TopologicalSort(ops)
else:
try:
g.TopologicalSort(ops)
except: # pylint: disable=bare-except
# if we continue on error, ignore graph cycles so we can report all missing ops
pass
@session_ctx.try_init_default_session
@oneflow_export("onnx.export")
def Export(
job_func: Callable,
model_save_dir: Text,
onnx_filename: Text,
continue_on_error: bool = False,
opset: Optional[int] = None,
extra_opset: Optional[int] = None,
shape_override: Optional[Dict[Text, List[int]]] = None,
external_data: bool = False,
):
r"""Export a oneflow model into ONNX format.
Args:
job_func: The job function
model_save_dir: The directory containing oneflow model weights. Users are expected to call check_point.save(dir), wait for the model saving finishing, and pass the argument 'dir' as model_save_dir.
onnx_filename: a string for the output filename
continue_on_error: if an op can't be processed (aka there is no mapping), continue
opset: the opset to be used (int, default is oneflow.python.onnx.constants.PREFERRED_OPSET)
extra_opset: list of extra opset's, for example the opset's used by custom ops
shape_override: dict with inputs that override the shapes given by oneflow
external_data: Save weights as ONNX external data, usually to bypass the 2GB file size limit of protobuf.
"""
assert os.getenv("ENABLE_USER_OP") != "False"
assert os.path.isdir(model_save_dir)
job_set = c_api_util.GetJobSet()
job_name = job_func.__name__
for job in job_set.job:
if job.job_conf.job_name == job_name:
onnx_graph = ProcessFlowGraph(
job,
model_save_dir,
continue_on_error=continue_on_error,
opset=opset,
extra_opset=extra_opset,
shape_override=shape_override,
)
onnx_graph = optimizer.OptimizeGraph(onnx_graph)
model_proto = onnx_graph.MakeModel(
job_name, onnx_filename, external_data=external_data
)
with open(onnx_filename, "wb") as f:
try:
f.write(model_proto.SerializeToString())
except ValueError as e:
raise ValueError(
"Error occured when running model_proto.SerializeToString(). If the model is larger than 2GB, please specify external_data=True when calling flow.onnx.export. Original error message:\n{}".format(
e
)
)
return
raise ValueError('Cannot find job "{}" in jobset'.format(job_name))
def ProcessFlowGraph(
flow_graph,
model_save_dir,
continue_on_error=False,
opset=None,
extra_opset=None,
shape_override=None,
):
opset = util.FindOpset(opset)
logger.info("Using opset <onnx, %s>", opset)
if opset > schemas.get_max_supported_opset_version():
logger.warning(
"Currently installed onnx package %s is too low to support opset %s, "
"please upgrade onnx package to avoid potential conversion issue.",
util.get_onnx_version(),
opset,
)
if shape_override is None:
shape_override = {}
(onnx_nodes, op_cnt, attr_cnt, dtypes, output_shapes,) = FlowToOnnxNaive(
flow_graph, shape_override
)
g = Graph(onnx_nodes, model_save_dir, output_shapes, dtypes, opset, extra_opset,)
# create ops mapping for the desired opsets
ops_mapping = handler.flow_op.CreateMapping(g.opset, g.extra_opset)
# some nodes may already copied into inner Graph, so remove them from main Graph.
TopologicalSort(g, continue_on_error)
mapped_op, unmapped_op, exceptions = FlowOnnxMapping(g, ops_mapping)
if unmapped_op:
logger.error("Unsupported ops: %s", unmapped_op)
if exceptions and not continue_on_error:
raise exceptions[0]
# onnx requires topological sorting
TopologicalSort(g, continue_on_error)
g.UpdateProto()
logger.debug(
"Summay Stats:\n"
"\toneflow ops: {}\n"
"\toneflow attr: {}\n"
"\tonnx mapped: {}\n"
"\tonnx unmapped: {}".format(op_cnt, attr_cnt, mapped_op, unmapped_op)
)
return g
|
py | 1a34060a6040f5a09656cfdb57fe8c152284115f | # Copyright (c) 2015 Yubico AB
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import click
import logging
from threading import Timer
from .util import (
click_force_option,
click_postpone_execution,
click_callback,
click_parse_b32_key,
click_prompt,
prompt_for_touch,
EnumChoice,
)
from yubikit.core import USB_INTERFACE
from yubikit.core.smartcard import ApduError, SW
from yubikit.oath import (
OathSession,
CredentialData,
OATH_TYPE,
HASH_ALGORITHM,
parse_b32_key,
)
from ..oath import is_steam, calculate_steam, is_hidden
from ..device import is_fips_version
from ..settings import Settings
logger = logging.getLogger(__name__)
click_touch_option = click.option(
"-t", "--touch", is_flag=True, help="Require touch on YubiKey to generate code."
)
click_show_hidden_option = click.option(
"-H", "--show-hidden", is_flag=True, help="Include hidden credentials."
)
def _string_id(credential):
return credential.id.decode("utf-8")
@click_callback()
def _clear_callback(ctx, param, clear):
if clear:
ensure_validated(ctx)
app = ctx.obj["controller"]
settings = ctx.obj["settings"]
app.unset_key()
keys = settings.setdefault("keys", {})
if app.info.device_id in keys:
del keys[app.info.device_id]
settings.write()
click.echo("Password cleared.")
ctx.exit()
return clear
@click_callback()
def click_parse_uri(ctx, param, val):
try:
return CredentialData.parse_uri(val)
except ValueError:
raise click.BadParameter("URI seems to have the wrong format.")
@click.group()
@click.pass_context
@click_postpone_execution
@click.option("-p", "--password", help="Provide a password to unlock the " "YubiKey.")
def oath(ctx, password):
"""
Manage OATH Application.
Examples:
\b
Generate codes for credentials starting with 'yubi':
$ ykman oath code yubi
\b
Add a touch credential with the secret key f5up4ub3dw and the name yubico:
$ ykman oath add yubico f5up4ub3dw --touch
\b
Set a password for the OATH application:
$ ykman oath set-password
"""
try:
controller = OathSession(ctx.obj["conn"])
ctx.obj["controller"] = controller
ctx.obj["settings"] = Settings("oath")
except ApduError as e:
if e.sw == SW.FILE_NOT_FOUND:
ctx.fail("The OATH application can't be found on this YubiKey.")
raise
if password:
ctx.obj["key"] = controller.derive_key(password)
@oath.command()
@click.pass_context
def info(ctx):
"""
Display status of OATH application.
"""
app = ctx.obj["controller"]
version = app.info.version
click.echo("OATH version: {}.{}.{}".format(version[0], version[1], version[2]))
click.echo("Password protection " + ("enabled" if app.locked else "disabled"))
keys = ctx.obj["settings"].get("keys", {})
if app.locked and app.info.device_id in keys:
click.echo("The password for this YubiKey is remembered by ykman.")
if is_fips_version(version):
click.echo("FIPS Approved Mode: {}".format("Yes" if app.locked else "No"))
@oath.command()
@click.pass_context
@click.confirmation_option(
"-f",
"--force",
prompt="WARNING! This will delete "
"all stored OATH credentials and restore factory settings?",
)
def reset(ctx):
"""
Reset all OATH data.
This action will wipe all credentials and reset factory settings for
the OATH application on the YubiKey.
"""
app = ctx.obj["controller"]
click.echo("Resetting OATH data...")
old_id = app.info.device_id
app.reset()
settings = ctx.obj["settings"]
keys = settings.setdefault("keys", {})
if old_id in keys:
del keys[old_id]
settings.write()
click.echo("Success! All OATH credentials have been cleared from your YubiKey.")
@oath.command()
@click.argument("name")
@click.argument("secret", callback=click_parse_b32_key, required=False)
@click.option(
"-o",
"--oath-type",
type=EnumChoice(OATH_TYPE),
default=OATH_TYPE.TOTP.name,
help="Time-based (TOTP) or counter-based (HOTP) credential.",
show_default=True,
)
@click.option(
"-d",
"--digits",
type=click.Choice(["6", "7", "8"]),
default="6",
help="Number of digits in generated code.",
show_default=True,
)
@click.option(
"-a",
"--algorithm",
type=EnumChoice(HASH_ALGORITHM),
default=HASH_ALGORITHM.SHA1.name,
show_default=True,
help="Algorithm to use for code generation.",
)
@click.option(
"-c",
"--counter",
type=click.INT,
default=0,
help="Initial counter value for HOTP credentials.",
)
@click.option("-i", "--issuer", help="Issuer of the credential.")
@click.option(
"-p",
"--period",
help="Number of seconds a TOTP code is valid.",
default=30,
show_default=True,
)
@click_touch_option
@click_force_option
@click.pass_context
def add(
ctx,
secret,
name,
issuer,
period,
oath_type,
digits,
touch,
algorithm,
counter,
force,
):
"""
Add a new credential.
This will add a new credential to your YubiKey.
"""
digits = int(digits)
if not secret:
while True:
secret = click_prompt("Enter a secret key (base32)")
try:
secret = parse_b32_key(secret)
break
except Exception as e:
click.echo(e)
ensure_validated(ctx)
_add_cred(
ctx,
CredentialData(
name, oath_type, algorithm, secret, digits, period, counter, issuer
),
touch,
force,
)
@oath.command()
@click.argument("uri", callback=click_parse_uri, required=False)
@click_touch_option
@click_force_option
@click.pass_context
def uri(ctx, uri, touch, force):
"""
Add a new credential from URI.
Use a URI to add a new credential to your YubiKey.
"""
if not uri:
while True:
uri = click_prompt("Enter an OATH URI")
try:
uri = CredentialData.parse_uri(uri)
break
except Exception as e:
click.echo(e)
ensure_validated(ctx)
data = uri
# Steam is a special case where we allow the otpauth
# URI to contain a 'digits' value of '5'.
if data.digits == 5 and is_steam(data):
data.digits = 6
_add_cred(ctx, data, touch, force)
def _add_cred(ctx, data, touch, force):
app = ctx.obj["controller"]
version = app.info.version
if not (0 < len(data.name) <= 64):
ctx.fail("Name must be between 1 and 64 bytes.")
if len(data.secret) < 2:
ctx.fail("Secret must be at least 2 bytes.")
if touch and version < (4, 2, 6):
ctx.fail("Touch-required credentials not supported on this key.")
if data.counter and data.oath_type != OATH_TYPE.HOTP:
ctx.fail("Counter only supported for HOTP credentials.")
if data.hash_algorithm == HASH_ALGORITHM.SHA512 and (
version < (4, 3, 1) or is_fips_version(version)
):
ctx.fail("Algorithm SHA512 not supported on this YubiKey.")
creds = app.list_credentials()
cred_id = data.get_id()
if not force and any(cred.id == cred_id for cred in creds):
click.confirm(
"A credential called {} already exists on this YubiKey."
" Do you want to overwrite it?".format(data.name),
abort=True,
err=True,
)
firmware_overwrite_issue = (4, 0, 0) < version < (4, 3, 5)
cred_is_subset = any(
(cred.id.startswith(cred_id) and cred.id != cred_id) for cred in creds
)
# YK4 has an issue with credential overwrite in firmware versions < 4.3.5
if firmware_overwrite_issue and cred_is_subset:
ctx.fail("Choose a name that is not a subset of an existing credential.")
try:
app.put_credential(data, touch)
except ApduError as e:
if e.sw == SW.NO_SPACE:
ctx.fail("No space left on your YubiKey for OATH credentials.")
elif e.sw == SW.COMMAND_ABORTED:
# Some NEOs do not use the NO_SPACE error.
ctx.fail("The command failed. Is there enough space on your YubiKey?")
else:
raise
@oath.command()
@click_show_hidden_option
@click.pass_context
@click.option("-o", "--oath-type", is_flag=True, help="Display the OATH type.")
@click.option("-p", "--period", is_flag=True, help="Display the period.")
def list(ctx, show_hidden, oath_type, period):
"""
List all credentials.
List all credentials stored on your YubiKey.
"""
ensure_validated(ctx)
controller = ctx.obj["controller"]
creds = [
cred
for cred in controller.list_credentials()
if show_hidden or not is_hidden(cred)
]
creds.sort()
for cred in creds:
click.echo(_string_id(cred), nl=False)
if oath_type:
click.echo(u", {}".format(cred.oath_type.name), nl=False)
if period:
click.echo(", {}".format(cred.period), nl=False)
click.echo()
@oath.command()
@click_show_hidden_option
@click.pass_context
@click.argument("query", required=False, default="")
@click.option(
"-s",
"--single",
is_flag=True,
help="Ensure only a single match, and output only the code.",
)
def code(ctx, show_hidden, query, single):
"""
Generate codes.
Generate codes from credentials stored on your YubiKey.
Provide a query string to match one or more specific credentials.
Touch and HOTP credentials require a single match to be triggered.
"""
ensure_validated(ctx)
app = ctx.obj["controller"]
entries = app.calculate_all()
creds = _search(entries.keys(), query, show_hidden)
if len(creds) == 1:
cred = creds[0]
code = entries[cred]
if cred.touch_required:
prompt_for_touch()
try:
if cred.oath_type == OATH_TYPE.HOTP:
# HOTP might require touch, we don't know.
# Assume yes after 500ms.
hotp_touch_timer = Timer(0.500, prompt_for_touch)
hotp_touch_timer.start()
code = app.calculate_code(cred)
hotp_touch_timer.cancel()
elif code is None:
code = app.calculate_code(cred)
except ApduError as e:
if e.sw == SW.SECURITY_CONDITION_NOT_SATISFIED:
ctx.fail("Touch credential timed out!")
entries[cred] = code
elif single and len(creds) > 1:
_error_multiple_hits(ctx, creds)
elif single and len(creds) == 0:
ctx.fail("No matching credential found.")
if single and creds:
if is_steam(cred):
click.echo(calculate_steam(app, cred))
else:
click.echo(code.value)
else:
outputs = []
for cred in sorted(creds):
code = entries[cred]
if code:
code = code.value
elif cred.touch_required:
code = "[Touch Credential]"
elif cred.oath_type == OATH_TYPE.HOTP:
code = "[HOTP Credential]"
else:
code = ""
if is_steam(cred):
code = calculate_steam(app, cred)
outputs.append((_string_id(cred), code))
longest_name = max(len(n) for (n, c) in outputs) if outputs else 0
longest_code = max(len(c) for (n, c) in outputs) if outputs else 0
format_str = u"{:<%d} {:>%d}" % (longest_name, longest_code)
for name, result in outputs:
click.echo(format_str.format(name, result))
@oath.command()
@click.pass_context
@click.argument("query")
@click.option("-f", "--force", is_flag=True, help="Confirm deletion without prompting")
def delete(ctx, query, force):
"""
Delete a credential.
Delete a credential from your YubiKey.
Provide a query string to match the credential to delete.
"""
ensure_validated(ctx)
app = ctx.obj["controller"]
creds = app.list_credentials()
hits = _search(creds, query, True)
if len(hits) == 0:
click.echo("No matches, nothing to be done.")
elif len(hits) == 1:
cred = hits[0]
if force or (
click.confirm(
u"Delete credential: {} ?".format(_string_id(cred)),
default=False,
err=True,
)
):
app.delete_credential(cred.id)
click.echo(u"Deleted {}.".format(_string_id(cred)))
else:
click.echo("Deletion aborted by user.")
else:
_error_multiple_hits(ctx, hits)
@oath.command("set-password")
@click.pass_context
@click.option(
"-c",
"--clear",
is_flag=True,
expose_value=False,
callback=_clear_callback,
is_eager=True,
help="Clear the current password.",
)
@click.option("-n", "--new-password", help="Provide a new password as an argument.")
@click.option(
"-r", "--remember", is_flag=True, help="Remember the new password on this machine.",
)
def set_password(ctx, new_password, remember):
"""
Password protect the OATH credentials.
Allows you to set a password that will be required to access the OATH
credentials stored on your YubiKey.
"""
ensure_validated(ctx, prompt="Enter your current password")
if not new_password:
new_password = click_prompt(
"Enter your new password", hide_input=True, confirmation_prompt=True
)
app = ctx.obj["controller"]
device_id = app.info.device_id
settings = ctx.obj["settings"]
keys = settings.setdefault("keys", {})
key = app.derive_key(new_password)
app.set_key(key)
click.echo("Password updated.")
if remember:
keys[device_id] = key.hex()
settings.write()
click.echo("Password remembered")
elif device_id in keys:
del keys[device_id]
settings.write()
@oath.command("remember-password")
@click.pass_context
@click.option("-F", "--forget", is_flag=True, help="Forget a password.")
@click.option(
"-c",
"--clear-all",
is_flag=True,
help="Remove all stored passwords from this computer.",
)
def remember_password(ctx, forget, clear_all):
"""
Manage local password storage.
Store your YubiKeys password on this computer to avoid having to enter it
on each use, or delete stored passwords.
"""
app = ctx.obj["controller"]
device_id = app.info.device_id
settings = ctx.obj["settings"]
keys = settings.setdefault("keys", {})
if clear_all:
del settings["keys"]
settings.write()
click.echo("All passwords have been cleared.")
elif forget:
if device_id in keys:
del keys[device_id]
settings.write()
click.echo("Password forgotten.")
else:
ensure_validated(ctx, remember=True)
def ensure_validated(ctx, prompt="Enter your password", remember=False):
app = ctx.obj["controller"]
device_id = app.info.device_id
if app.locked:
# If password given as arg, use it
if "key" in ctx.obj:
_validate(ctx, ctx.obj["key"], remember)
return
# Use stored key if available
keys = ctx.obj["settings"].setdefault("keys", {})
if device_id in keys:
try:
app.validate(bytes.fromhex(keys[device_id]))
return
except Exception as e:
logger.debug("Error", exc_info=e)
del keys[device_id]
# Prompt for password
password = click_prompt(prompt, hide_input=True)
key = app.derive_key(password)
_validate(ctx, key, remember)
def _validate(ctx, key, remember):
try:
app = ctx.obj["controller"]
app.validate(key)
if remember:
settings = ctx.obj["settings"]
keys = settings.setdefault("keys", {})
keys[app.info.device_id] = key.hex()
settings.write()
click.echo("Password remembered.")
except Exception:
ctx.fail("Authentication to the YubiKey failed. Wrong password?")
def _search(creds, query, show_hidden):
hits = []
for c in creds:
cred_id = _string_id(c)
if not show_hidden and is_hidden(c):
continue
if cred_id == query:
return [c]
if query.lower() in cred_id.lower():
hits.append(c)
return hits
def _error_multiple_hits(ctx, hits):
click.echo(
"Error: Multiple matches, please make the query more specific.", err=True
)
click.echo("", err=True)
for cred in hits:
click.echo(_string_id(cred), err=True)
ctx.exit(1)
oath.interfaces = USB_INTERFACE.CCID # type: ignore
|
py | 1a3406aa5a632bb3c909ff3bfb3cfe402372c5c0 | # Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from batchgenerators.utilities.file_and_folder_operations import *
def remove_trailing_slash(filename: str):
while filename.endswith('\\'):
filename = filename[:-1]
return filename
def maybe_add_0000_to_all_niigz(folder):
nii_gz = subfiles(folder, suffix='.nii.gz')
for n in nii_gz:
n = remove_trailing_slash(n)
if not n.endswith('_0000.nii.gz'):
os.rename(n, n[:-7] + '_0000.nii.gz')
|
py | 1a3406d70d513d88acb95686fbf569f3516e9ac2 | from datetime import datetime
from .worker.notebook import create_notebook
from .worker.image import create_image
from .worker.writer import path
from .worker.date import resolve_date
def run_generator():
print('Aplikasi Generator Ujian Logic Pondok Programmer')
name = input('Masukkan Nama Lengkap : ')
email = input('Masukkan Email : ')
date = resolve_date(datetime.now())
create_image()
create_notebook(name, email, date)
def run_notebook():
from subprocess import call
call(['jupyter', 'notebook', path])
def run():
run_generator()
run_notebook() |
py | 1a3407214a44b413b6f625b4ab60c741ab7de1fd | # -*- coding: utf-8 -*-
"""
The MIT License
Copyright (c) 2009 Cedric RICARD
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
try:
from configparser import RawConfigParser
except ImportError:
from ConfigParser import RawConfigParser
class Config(RawConfigParser):
def get(self, section, option, default=None, *args, **kwargs):
if self.has_option(section, option) or default is None:
return RawConfigParser.get(self, section, option)
else:
return default
def getint(self, section, option, default=None, *args, **kwargs):
if self.has_option(section, option) or not isinstance(default, int):
return RawConfigParser.getint(self, section, option)
else:
return default
def getfloat(self, section, option, default=None, *args, **kwargs):
if self.has_option(section, option) or not isinstance(default, float):
return RawConfigParser.getfloat(self, section, option)
else:
return default
def getboolean(self, section, option, default=None, *args, **kwargs):
if self.has_option(section, option) or not isinstance(default, bool):
return RawConfigParser.getboolean(self, section, option)
else:
return default
def set(self, section, option, value=None):
if not self.has_section(section):
self.add_section(section)
RawConfigParser.set(self, section, option, value)
def getlist(self, section, option, default=None):
if self.has_option(section, option) or default is None:
return RawConfigParser.get(self, section, option).split(',')
else:
return default
|
py | 1a340724c423de23b8ca9ac71562c9931a94b7ed | import logging
import re
from geopy import Point
from pytz import timezone, UTC
from typing import Optional, Union
from idunn.api.utils import Verbosity, build_blocks
from idunn.datasources.wiki_es import wiki_es
from idunn.utils import maps_urls, tz
from .place import Place, PlaceMeta
logger = logging.getLogger(__name__)
ZONE_TYPE_ORDER_KEY = {
"suburb": 1,
"city_district": 2,
"city": 3,
"state_district": 4,
"state": 5,
"country_region": 6,
"country": 7,
}
# List of official languages for given country codes.
COUNTRY_LANGUAGES = {
# European coutries (source: https://en.wikipedia.org/wiki/Member_state_of_the_European_Union)
"at": ["de"],
"be": ["nl", "fr", "de"],
"bg": ["bg"],
"hr": ["hr"],
"cy": ["el", "tr"],
"cz": ["cs"],
"dk": ["da"],
"ee": ["et"],
"fi": ["fi", "sv"],
"fr": ["fr"],
"de": ["de"],
"gr": ["el"],
"hu": ["hu"],
"ie": ["en", "ga"],
"it": ["it"],
"lv": ["lv"],
"lt": ["lt"],
"lu": ["fr", "de", "lu"],
"mt": ["mt", "en"],
"nl": ["nl"],
"pl": ["pl"],
"pt": ["pt"],
"ro": ["ro"],
"sk": ["sk"],
"si": ["sl"],
"es": ["es", "gl", "ca", "oc", "eu"],
"se": ["sv"],
# Other countries
"gb": ["en"],
"us": ["en"],
}
class BasePlace(dict):
PLACE_TYPE = ""
def __init__(self, d):
if not self.PLACE_TYPE:
raise Exception(f"Missing PLACE_TYPE in class {self.__class__.__name__}")
super().__init__(d)
self._wiki_resp = {}
self.properties = {}
@property
def wikidata_id(self):
return self.properties.get("wikidata")
def get_wiki_resp(self, lang):
if lang not in self._wiki_resp:
self._wiki_resp[lang] = None
if (
self.wikidata_id is not None
and wiki_es.enabled()
and wiki_es.is_lang_available(lang)
):
self._wiki_resp[lang] = wiki_es.get_info(self.wikidata_id, lang)
return self._wiki_resp.get(lang)
def get_name(self, _lang):
return self.get_local_name()
def get_local_name(self):
return self.get("name", "")
def get_class_name(self):
return self.PLACE_TYPE
def get_subclass_name(self):
return self.PLACE_TYPE
def get_raw_address(self):
return self.get("address") or {}
def get_raw_street(self):
raw_address = self.get_raw_address()
if raw_address.get("type") == "street":
return raw_address
return raw_address.get("street") or {}
def get_raw_admins(self):
return self.get("administrative_regions") or []
def get_country_codes(self):
"""
The list of codes is ordered from the least specific to the most specific
For example for a placed located in La Réunion: ["FR","RE","RE"]
for the country, the state ("région") and the state_district ("département")
:return: List of ISO 3166-1 alpha-2 country codes
"""
ordered_admins = sorted(
self.get_raw_admins(),
key=lambda a: ZONE_TYPE_ORDER_KEY.get(a.get("zone_type"), 0),
reverse=True,
)
return [c.upper() for admin in ordered_admins for c in admin.get("country_codes", [])]
def get_country_code(self):
return next(iter(self.get_country_codes()), None)
def get_postcodes(self):
return self.get_raw_address().get("zip_codes")
def build_address(self, lang):
"""
Method to build the address field for an Address,
a Street, an Admin or a POI.
"""
raw_address = self.get_raw_address()
postcodes = self.get_postcodes()
if postcodes is not None:
if isinstance(postcodes, list):
if len(postcodes) == 1:
postcodes = postcodes[0]
else:
postcodes = None
addr_id = raw_address.get("id")
name = raw_address.get("name")
label = raw_address.get("label")
street = self.build_street()
# ES raw data uses "house_number" whereas Bragi returns "housenumber"
housenumber = raw_address.get("house_number") or raw_address.get("housenumber")
return {
"id": addr_id,
"name": name or street.get("name"),
"housenumber": housenumber,
"postcode": postcodes,
"label": label or street.get("label"),
"admin": self.build_admin(lang),
"street": street,
"admins": self.build_admins(lang),
"country_code": self.get_country_code(),
}
def build_admin(self, _lang=None):
return None
def build_admins(self, lang=None) -> list:
raw_admins = self.get_raw_admins()
admins = []
if not raw_admins is None:
for raw_admin in raw_admins:
admin = {
"id": raw_admin.get("id"),
"label": raw_admin.get("labels", {}).get(lang) or raw_admin.get("label"),
"name": raw_admin.get("names", {}).get(lang) or raw_admin.get("name"),
"class_name": raw_admin.get("zone_type"),
"postcodes": raw_admin.get("zip_codes"),
}
admins.append(admin)
return admins
def build_street(self):
raw_street = self.get_raw_street()
return {
"id": raw_street.get("id"),
"name": raw_street.get("name"),
"label": raw_street.get("label"),
"postcodes": raw_street.get("zip_codes"),
}
def get_id(self):
return self.get("id", "")
def find_property_value(self, fallback_keys):
for k in fallback_keys:
val = self.properties.get(k)
if val:
return val
return None
def get_phone(self):
phone = self.find_property_value(["phone", "contact:phone", "contact:mobile"])
if phone is None:
return None
return phone.split(";")[0]
def get_website(self):
return self.find_property_value(["contact:website", "website"])
def get_website_label(self):
return None
@staticmethod
def build_social_if_not_url(template, field):
if field is None or re.match("^https?://", field):
return field
return template.format(field.lstrip("@"))
def get_facebook(self):
return self.build_social_if_not_url(
"https://www.facebook.com/{}",
self.find_property_value(["facebook", "contact:facebook"]),
)
def get_twitter(self):
return self.build_social_if_not_url(
"https://twitter.com/{}",
self.find_property_value(["twitter", "contact:twitter"]),
)
def get_instagram(self):
return self.build_social_if_not_url(
"https://www.instagram.com/{}",
self.find_property_value(["instagram", "contact:instagram"]),
)
def get_youtube(self):
return self.build_social_if_not_url(
"https://www.youtube.com/{}",
self.find_property_value(["contact:youtube"]),
)
def get_coord(self):
return self.get("coord")
def get_point(self):
coord = self.get_coord()
return Point(latitude=coord["lat"], longitude=coord["lon"])
def get_raw_opening_hours(self):
return self.properties.get("opening_hours")
def get_raw_wheelchair(self):
return self.properties.get("wheelchair")
def get_source(self):
return None
def get_source_url(self):
return None
def get_contribute_url(self):
return None
def get_meta(self):
place_id = self.get_id()
return PlaceMeta(
source=self.get_source(),
source_url=self.get_source_url(),
contribute_url=self.get_contribute_url(),
maps_place_url=maps_urls.get_place_url(place_id),
maps_directions_url=maps_urls.get_directions_url(place_id),
)
def load_place(self, lang, verbosity: Verbosity = Verbosity.default()) -> Place:
return Place(
type=self.PLACE_TYPE,
id=self.get_id(),
name=self.get_name(lang),
local_name=self.get_local_name(),
class_name=self.get_class_name(),
subclass_name=self.get_subclass_name(),
geometry=self.get_geometry(),
address=self.build_address(lang),
blocks=build_blocks(self, lang, verbosity),
meta=self.get_meta(),
)
def get_images_urls(self):
return []
def get_raw_grades(self):
return {}
def get_reviews_url(self):
return ""
def get_booking_url(self):
return None
def get_appointment_url(self):
return None
def get_quotation_request_url(self):
return None
def get_description(self, lang):
if f"description:{lang}" in self.properties:
return self.properties.get(f"description:{lang}")
country_code = self.get_country_code()
# Check that there is little to no ambiguity on local language and that
# it matches `lang`.
if not country_code or COUNTRY_LANGUAGES.get(country_code.lower()) != [lang.lower()]:
return None
return self.properties.get("description")
def get_description_url(self, _lang):
return None
def has_click_and_collect(self):
return False
def has_delivery(self):
return self.properties.get("delivery") == "yes"
def has_takeaway(self):
return self.properties.get("takeaway") in ("yes", "only")
def get_bbox(self):
return None
def get_tz(self):
"""
>>> from idunn.places import POI
>>> poi1 = POI({"coord": {"lon": 2.3, "lat":48.9}})
>>> poi1.get_tz().zone
'Europe/Paris'
>>> poi2 = POI({'coord':{"lon":-12.8218, "lat": 37.5118}})
>>> poi2.get_tz().zone
'UTC'
"""
coords = self.get_coord()
tz_name = tz.tzNameAt(latitude=coords["lat"], longitude=coords["lon"], forceTZ=True)
if tz_name is None:
return UTC
return timezone(tz_name)
def get_geometry(self):
"""Returns GeoJSON-like geometry. Requires "lon" and "lat" coordinates.
>>> from idunn.places import POI
>>> assert POI({}).get_geometry() is None
>>> assert POI({'coord':{"lon": None, "lat": 48.85}}).get_geometry() is None
>>> assert POI({'coord':{"lon": 2.29, "lat": None}}).get_geometry() is None
>>> POI({'coord':{"lon": 2.29, "lat": 48.85}}).get_geometry()
{'type': 'Point', 'coordinates': [2.29, 48.85], 'center': [2.29, 48.85]}
"""
geom = None
coord = self.get_coord()
if coord is not None:
lon = coord.get("lon")
lat = coord.get("lat")
if lon is not None and lat is not None:
geom = {"type": "Point", "coordinates": [lon, lat], "center": [lon, lat]}
bbox = self.get_bbox() # pylint: disable=assignment-from-none
if bbox is not None:
geom["bbox"] = bbox
return geom
STARS_REGEX = re.compile(r"(?P<rating>\d+(\.\d+)?)S?")
def _get_stars_value(self):
raw_stars = self.properties.get("stars")
if not raw_stars:
return None
if raw_stars == "0":
return False
match_stars = self.STARS_REGEX.match(raw_stars)
if not match_stars:
return None
return float(match_stars.group("rating"))
def get_lodging_stars(self) -> Optional[Union[bool, float]]:
if self.get_class_name() != "lodging":
return None
return self._get_stars_value()
def get_restaurant_stars(self) -> Optional[Union[bool, float]]:
if self.get_class_name() == "lodging":
return None
return self._get_stars_value()
|
py | 1a3407c767f866fbb5ceb102af71c5fc3af1ea8c | # coding=utf-8
"""TEC === Tools to calculate total electron content value in the ionosphere
using data derived from global navigation satellite systems."""
# Shortcut
from .glo import collect_freq_nums
from .gnss import BAND_PRIORITY
from .rinex import ObsFileV2
from .rinex import ObsFileV3
# General information
__version__ = '1.1.1'
__author__ = __maintainer__ = 'Ilya Zhivetiev'
__email__ = '[email protected]'
def rnx(file, band_priority=BAND_PRIORITY, glo_freq_nums=None):
"""Return a reader object which will iterate over observation records in
the given file. Each iteration will return Tec object. The file can be any
object which supports iterator protocol.
Parameters
----------
file : file-like object
band_priority : dict
glo_freq_nums : dict
Returns
-------
reader : iterator
Yields Tec object for each satellite of the epoch.
"""
if glo_freq_nums is None:
glo_freq_nums = {}
try:
row = next(file)
rinex_version = float(row[:9])
rinex_type = row[20]
# rinex_sat_system = row[40]
except StopIteration:
raise ValueError("rnx: Empty input file")
except ValueError:
raise ValueError("rnx: Unknown file type")
if rinex_type.upper() != 'O':
raise Exception('rnx: Not an observation file')
rinex_reader = {
(2.0, 2.1, 2.11, 2.12): ObsFileV2,
(3.0, 3.01, 3.02, 3.03): ObsFileV3
}
reader = None
for ver in rinex_reader:
if rinex_version in ver:
reader = rinex_reader[ver]
if reader is None:
raise Exception('Unknown RINEX version: {}'.format(rinex_version))
return reader(
file,
version=rinex_version,
band_priority=band_priority,
glo_freq_nums=glo_freq_nums,
)
|
py | 1a3408117b5d75df47acef3f97d021df921fbf9f | # Generated by Django 2.2.12 on 2020-04-10 10:54
from django.db import migrations, models
import django.utils.timezone
import users.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('email', models.EmailField(max_length=254, unique=True, verbose_name='email address')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', users.models.UserManager()),
],
),
]
|
py | 1a34085b4a72722455af1b1e0817a93e8c4e8a16 | import random, time
from UNP.Core import Account, ActTable
class Loginer:
def __init__(self):
self.mode = "001"
self.timer = -1
self.file = ""
def __str__(self):
string = "mode-" + self.mode + "_timer-" + str(self.timer)
if self.file != "":
string = string + "_filename-" + self.file
return string
def _iterator(self):
userlist_201901 = []
userlist_201906 = []
userlist_201907 = []
userlist = []
if self.mode[0] == '1':
userlist = userlist + userlist_201901
if self.mode[1] == '1':
userlist = userlist + userlist_201906
if self.mode[2] == '1':
userlist = userlist + userlist_201907
random.shuffle(userlist)
for user in userlist:
yield Account(username=user[0], name=user[1])
def active(self):
if self.mode[0].lower() in ['f', 't']:
for account in ActTable(input("Enter filename:")).iterator():
account.load()
if account.accessibility:
print("Welcome! " + account.name)
if self.timer == -1:
if input("Enter Y to stop:").lower() == 'y':
return
else:
print("refresh in " + str(self.timer) + " seconds...")
time.sleep(self.timer)
elif self.mode[0].lower() in ['p', 'c']:
for account in ActTable("customize.csv").iterator():
account.load()
if account.accessibility:
print("Welcome! " + account.name)
if self.timer == -1:
if input("Enter Y to stop:").lower() == 'y':
return
else:
print("refresh in " + str(self.timer) + " seconds...")
time.sleep(self.timer)
else:
for account in self._iterator():
account.load()
if account.accessibility:
print("Welcome! " + account.name)
if self.timer == -1:
if input("Enter Y to stop:").lower() == 'y':
return
else:
print("refresh in " + str(self.timer) + " seconds...")
time.sleep(self.timer)
if self.timer == -1:
input("accounts out")
else:
print("...another run...")
self.active()
def passive(self):
for account in self._iterator():
account.load()
if account.accessibility:
return
|
py | 1a340918ca1191b35000f43783e42332aba1bd51 | import pandas as pd
pd.read_csv('cities.csv').to_html('table.html', classes='table table-striped') |
py | 1a340a1cc30c97508d211353bf497b19a54361d8 | #!/usr/bin/env python
import logging
from contextlib import redirect_stdout
from io import StringIO
from itertools import count
from unittest import main
from unittest.mock import Mock
from cli_command_parser import Command, Action, no_exit_handler, ActionFlag, ParamGroup
from cli_command_parser.actions import help_action
from cli_command_parser.context import Context
from cli_command_parser.parameters import before_main, after_main, action_flag
from cli_command_parser.exceptions import CommandDefinitionError, ParameterDefinitionError, ParamConflict
from cli_command_parser.testing import ParserTest
log = logging.getLogger(__name__)
class ActionFlagTest(ParserTest):
def test_help_action(self):
mock = Mock(__name__='bar')
class Foo(Command, error_handler=no_exit_handler):
action = Action()
action.register(mock)
sio = StringIO()
with redirect_stdout(sio):
foo = Foo.parse(['bar', '-h'])
foo()
self.assertTrue(sio.getvalue().startswith('usage: '))
self.assertEqual(mock.call_count, 0)
def test_af_func_missing(self):
class Foo(Command):
foo = ActionFlag()
with self.assertRaisesRegex(ParameterDefinitionError, 'No function was registered'):
Foo.parse([])
def test_af_order_conflict(self):
class Foo(Command):
foo = ActionFlag()(Mock())
bar = ActionFlag()(Mock())
with self.assertRaisesRegex(CommandDefinitionError, 'different order values'):
Foo.parse([])
def test_af_non_me_group_conflict(self):
class Foo(Command):
with ParamGroup() as group:
foo = ActionFlag()(Mock())
bar = ActionFlag()(Mock())
with self.assertRaisesRegex(CommandDefinitionError, 'different order values'):
Foo.parse([])
def test_af_md_group_conflict(self):
class Foo(Command):
with ParamGroup(mutually_dependent=True) as group:
foo = ActionFlag()(Mock())
bar = ActionFlag()(Mock())
with self.assertRaisesRegex(CommandDefinitionError, 'different order values'):
Foo.parse([])
def test_af_me_group_ok(self):
class Foo(Command):
with ParamGroup(mutually_exclusive=True) as group:
foo = ActionFlag()(Mock())
bar = ActionFlag()(Mock())
self.assert_parse_results(Foo, [], {'foo': False, 'bar': False})
def test_af_mixed_grouping_rejected(self):
class Foo(Command):
with ParamGroup(mutually_exclusive=True) as group:
foo = ActionFlag()(Mock())
bar = ActionFlag()(Mock())
baz = ActionFlag()(Mock())
with self.assertRaisesRegex(CommandDefinitionError, 'different order values'):
Foo.parse([])
def test_af_mixed_grouping_ordered_ok(self):
attrs = ('foo', 'bar', 'baz')
for i, attr in enumerate(attrs):
with self.subTest(attr=attr):
mocks = [Mock(), Mock(), Mock()]
class Foo(Command):
with ParamGroup(mutually_exclusive=True) as group:
foo = ActionFlag()(mocks[0])
bar = ActionFlag()(mocks[1])
baz = ActionFlag(order=2)(mocks[2])
foo = Foo.parse([f'--{attr}'])
foo()
self.assertTrue(mocks[i].called)
for j in {0, 1, 2} - {i}:
self.assertFalse(mocks[j].called)
parsed = foo.ctx.get_parsed()
self.assertTrue(parsed[attr])
for a in set(attrs) - {attr}:
self.assertFalse(parsed[a])
def test_no_reassign(self):
with self.assertRaises(CommandDefinitionError):
class Foo(Command):
foo = ActionFlag()(Mock())
@foo
def bar(self):
pass
def test_short_option_conflict_rejected(self):
class Foo(Command):
bar = ActionFlag('-b', order=1)(Mock())
baz = ActionFlag('-b', order=2)(Mock())
with self.assertRaises(CommandDefinitionError):
Foo.parse([])
def test_extra_flags_provided_cause_error(self):
mocks = [Mock(), Mock()]
class Foo(Command, error_handler=None, multiple_action_flags=False):
foo = ActionFlag('-f', order=1)(mocks[0])
bar = ActionFlag('-b', order=2)(mocks[1])
expected_error_text = r'--foo / -f, --bar / -b \(combining multiple action flags is disabled\)'
with self.assertRaisesRegex(ParamConflict, expected_error_text):
Foo.parse_and_run(['-fb'])
with self.assertRaisesRegex(ParamConflict, expected_error_text):
Foo.parse_and_run(['--foo', '--bar'])
def test_multi_flag_order_followed(self):
class Foo(Command, multiple_action_flags=True):
def __init__(self):
self.call_order = {}
self.counter = count()
@action_flag('-f', order=1)
def foo(self):
self.call_order['foo'] = next(self.counter)
@action_flag('-b', order=2)
def bar(self):
self.call_order['bar'] = next(self.counter)
for case, args in {'combined': ['-fb'], 'split': ['-b', '-f']}.items():
with self.subTest(case=case):
foo = Foo.parse_and_run(args)
self.assertLess(foo.call_order['foo'], foo.call_order['bar'])
def test_before_and_after_flags(self):
class Foo(Command, multiple_action_flags=True):
def __init__(self):
self.call_order = {}
self.counter = count()
@before_main('-f', order=1)
def foo(self):
self.call_order['foo'] = next(self.counter)
def main(self):
super().main()
self.call_order['main'] = next(self.counter)
@after_main('-b', order=2)
def bar(self):
self.call_order['bar'] = next(self.counter)
for case, args in {'combined': ['-fb'], 'split': ['-b', '-f']}.items():
with self.subTest(case=case):
foo = Foo.parse_and_run(args)
self.assertLess(foo.call_order['foo'], foo.call_order['main'])
self.assertLess(foo.call_order['main'], foo.call_order['bar'])
self.assertEqual(2, foo.ctx.actions_taken) # 2 because no non-flag Actions
with self.subTest(case='only after'):
foo = Foo.parse_and_run(['-b'])
self.assertNotIn('foo', foo.call_order)
self.assertLess(foo.call_order['main'], foo.call_order['bar'])
self.assertEqual(1, foo.ctx.actions_taken) # 1 because no non-flag Actions
with self.subTest(case='only before'):
foo = Foo.parse_and_run(['-f'])
self.assertLess(foo.call_order['foo'], foo.call_order['main'])
self.assertNotIn('bar', foo.call_order)
self.assertEqual(1, foo.ctx.actions_taken) # 1 because no non-flag Actions
def test_af_before_and_after_with_action(self):
class Foo(Command):
action = Action()
def __init__(self):
self.call_order = {}
self.counter = count()
@action(default=True)
def default_action(self):
self.call_order['default_action'] = next(self.counter)
@before_main('-f')
def foo(self):
self.call_order['foo'] = next(self.counter)
@after_main('-b')
def bar(self):
self.call_order['bar'] = next(self.counter)
foo = Foo.parse_and_run(['-fb'])
self.assertLess(foo.call_order['foo'], foo.call_order['default_action'])
self.assertLess(foo.call_order['default_action'], foo.call_order['bar'])
self.assertEqual(3, foo.ctx.actions_taken)
def test_bad_action(self):
with self.assertRaises(ParameterDefinitionError):
class Foo(Command):
action_flag(action='store')(Mock())
def test_equals(self):
self.assertEqual(help_action, help_action)
def test_dunder_get(self):
mock = Mock()
class Foo(Command):
@action_flag('-f')
def foo(self):
mock()
Foo.parse(['-f']).foo()
self.assertTrue(mock.called)
def test_no_result(self):
mock = Mock()
class Foo(Command):
@action_flag('-b')
def bar(self):
mock()
foo = Foo.parse(['-b'])
self.assertIsInstance(Foo.bar, ActionFlag)
with foo.ctx:
self.assertFalse(Foo.bar.result()(foo))
def test_no_func(self):
flag = ActionFlag()
with Context() as ctx:
flag.store_const()
with self.assertRaises(ParameterDefinitionError):
flag.result()
def test_not_provided(self):
flag = ActionFlag()
with Context() as ctx:
self.assertFalse(flag.result())
def test_before_main_sorts_before_after_main(self):
a, b = ActionFlag(before_main=False), ActionFlag(before_main=True)
expected = [b, a]
self.assertListEqual(expected, sorted([a, b]))
def test_after_main_always_available(self):
with self.assertRaisesRegex(ParameterDefinitionError, 'cannot be combined with'):
ActionFlag(before_main=False, always_available=True)
def test_nargs_not_allowed(self):
with self.assertRaises(TypeError):
ActionFlag(nargs='+')
def test_type_not_allowed(self):
with self.assertRaises(TypeError):
ActionFlag(type=int)
def test_choices_not_allowed(self):
with self.assertRaises(TypeError):
ActionFlag(choices=(1, 2))
if __name__ == '__main__':
try:
main(warnings='ignore', verbosity=2, exit=False)
except KeyboardInterrupt:
print()
|
py | 1a340ab4251cf1e281c6c39ff9c75e93970ff884 | from django.contrib import admin
from .models import Device
# Register your models here.
class DeviceAdmin(admin.ModelAdmin):
pass
admin.site.register(Device, DeviceAdmin)
|
py | 1a340b8a9d96aa897b9928e71f13cf1334e2808c | #!/usr/bin/env python3
"""Fetch RSS feed from phpBB forum and post it to Slack channel.
2017/Nov/15 @ Zdenek Styblik <[email protected]>
"""
import argparse
import logging
import sys
import time
import traceback
from typing import Dict, List
import feedparser
import rss2irc
import rss2slack
CACHE_EXPIRATION = 86400 # seconds
HTTP_TIMEOUT = 30 # seconds
def format_message(
url: str, msg_attrs: Dict[str, str], handle: str = ''
) -> Dict:
"""Return formatted message as Slack's BlockKit section.
:raises: `KeyError`
"""
if handle:
if 'category' in msg_attrs and msg_attrs['category']:
tag = '[{:s}-{:s}] '.format(handle, msg_attrs['category'])
else:
tag = '[{:s}] '.format(handle)
else:
tag = ''
return {
'type': 'section',
'text': {
'type': 'mrkdwn',
'text': '{:s}<{:s}|{:s}> ({:d})'.format(
tag, url, msg_attrs['title'], msg_attrs['comments_cnt']
)
}
}
def get_authors_from_file(logger: logging.Logger, fname: str) -> List[str]:
"""Return list of authors of interest from given file."""
if not fname:
return []
try:
with open(fname, 'rb') as fhandle:
authors = [
line.decode('utf-8').strip()
for line in fhandle.readlines()
if line.decode('utf-8').strip() != ''
]
except Exception:
logger.error(traceback.format_exc())
authors = []
return authors
def main():
"""Fetch phpBB RSS feed and post RSS news to Slack."""
logging.basicConfig(stream=sys.stdout, level=logging.ERROR)
logger = logging.getLogger('phpbb2slack')
args = parse_args()
if args.verbosity:
logger.setLevel(logging.DEBUG)
if args.cache_expiration < 0:
logger.error("Cache expiration can't be less than 0.")
sys.exit(1)
try:
slack_token = rss2slack.get_slack_token()
authors = get_authors_from_file(logger, args.authors_file)
data = rss2irc.get_rss(logger, args.rss_url, args.rss_http_timeout)
if not data:
logger.error('Failed to get RSS from %s', args.rss_url)
sys.exit(1)
news = parse_news(data, authors)
if not news:
logger.info('No news?')
sys.exit(0)
cache = rss2irc.read_cache(logger, args.cache)
scrub_cache(logger, cache)
for key in list(news.keys()):
if key not in cache.items:
continue
logger.debug('Key %s found in cache', key)
comments_cached = int(cache.items[key]['comments_cnt'])
comments_actual = int(news[key]['comments_cnt'])
if comments_cached == comments_actual:
cache.items[key]['expiration'] = (
int(time.time()) + args.cache_expiration
)
news.pop(key)
slack_client = rss2slack.get_slack_web_client(
slack_token, args.slack_base_url, args.slack_timeout
)
if not args.cache_init:
for url in list(news.keys()):
msg_blocks = [
format_message(url, news[url], args.handle)
]
try:
rss2slack.post_to_slack(
logger, msg_blocks, slack_client, args.slack_channel,
)
except ValueError:
news.pop(url)
finally:
time.sleep(args.sleep)
expiration = int(time.time()) + args.cache_expiration
update_cache(cache, news, expiration)
rss2irc.write_cache(cache, args.cache)
except Exception:
logger.debug(traceback.format_exc())
# TODO(zstyblik):
# 1. touch error file
# 2. send error message to the channel
finally:
sys.exit(0)
def parse_args() -> argparse.Namespace:
"""Return parsed CLI args."""
parser = argparse.ArgumentParser()
parser.add_argument(
'--authors-of-interest',
dest='authors_file', type=str, default=None,
help='Path to file which contains list of authors, one per line. '
'Only threads which are started by one of the authors on the '
'list will be pushed.'
)
parser.add_argument(
'--cache',
dest='cache', type=str, default=None,
help='Path to cache file.'
)
parser.add_argument(
'--cache-expiration',
dest='cache_expiration', type=int,
default=CACHE_EXPIRATION,
help='Time, in seconds, for how long to keep items in cache.'
)
parser.add_argument(
'--cache-init',
dest='cache_init', action='store_true', default=False,
help='Prevents posting news to IRC. This is useful '
'when bootstrapping new RSS feed.'
)
parser.add_argument(
'--handle',
dest='handle', type=str, default=None,
help='Handle/callsign of this feed.'
)
parser.add_argument(
'--rss-url',
dest='rss_url', type=str, required=True,
help='URL of RSS Feed.'
)
parser.add_argument(
'--rss-http-timeout',
dest='rss_http_timeout', type=int,
default=HTTP_TIMEOUT,
help='HTTP Timeout. Defaults to {:d} seconds.'.format(HTTP_TIMEOUT)
)
parser.add_argument(
'--slack-base-url',
dest='slack_base_url', type=str,
default=rss2slack.SLACK_BASE_URL,
help='Base URL for Slack client.'
)
parser.add_argument(
'--slack-channel',
dest='slack_channel', type=str, required=True,
help='Name of Slack channel to send formatted news to.'
)
parser.add_argument(
'--slack-timeout',
dest='slack_timeout', type=int,
default=HTTP_TIMEOUT,
help='Slack API Timeout. Defaults to {:d} seconds.'.format(
HTTP_TIMEOUT
)
)
parser.add_argument(
'--sleep',
dest='sleep', type=int, default=2,
help='Sleep between messages in order to avoid '
'possible excess flood/API call rate limit.'
)
parser.add_argument(
'-v', '--verbose',
dest='verbosity', action='store_true', default=False,
help='Increase logging verbosity.'
)
return parser.parse_args()
def parse_news(data: str, authors: List[str]) -> Dict:
"""Parse-out link and title out of XML."""
news = {}
feed = feedparser.parse(data)
for entry in feed['entries']:
link = entry.pop('link', None)
title = entry.pop('title', None)
author_detail = entry.pop('author_detail', {'name': None})
if (
not 'link'
and not 'title'
):
continue
if authors and author_detail['name'] not in authors:
continue
category = entry.pop('category', None)
comments_cnt = entry.pop('slash_comments', 0)
try:
comments_cnt = int(comments_cnt)
except ValueError:
comments_cnt = 0
news[link] = {
'title': title,
'category': category,
'comments_cnt': int(comments_cnt),
}
return news
def scrub_cache(logger: logging.Logger, cache: rss2irc.CachedData) -> None:
"""Scrub cache and remove expired items."""
time_now = int(time.time())
for key in list(cache.items.keys()):
try:
expiration = int(cache.items[key]['expiration'])
except (KeyError, ValueError):
logger.error(traceback.format_exc())
logger.error(
"Invalid cache entry will be removed: '%s'", cache.items[key]
)
cache.items.pop(key)
continue
if expiration < time_now:
logger.debug('URL %s has expired.', key)
cache.items.pop(key)
def update_cache(
cache: rss2irc.CachedData, news: Dict, expiration: int
) -> None:
"""Update cache contents."""
for key in list(news.keys()):
cache.items[key] = {
'expiration': expiration,
'comments_cnt': int(news[key]['comments_cnt']),
}
if __name__ == '__main__':
main()
|
py | 1a340cffab448f20dcc7d9eeb4e1a2c8eefa75ea | import math
import random
import smtplib
import re
import json
import pandas as pd
import requests
from bs4 import BeautifulSoup
class PostalUtils:
def __init__(self, pc):
self.pc = str(pc)
self.data = None
def get_details(self):
import requests
self.data = requests.get(f'https://thezipcodes.com/api/v1/search?zipCode={str(self.pc)}&countryCode=IN&apiKey=66a4d8e95477daca5f139eedbca5ca3d')
if self.data.status_code != 200:
self.data = None
def extract_info(self):
self.data = json.loads(self.data.text)
if self.data['success']:
country = self.data['location'][0]['country']
region = self.data['location'][0]['city']
state = self.data['location'][0]['state']
return region, state, country
return 'Data unavailable. Check PINCODE!'
class OTP:
def __init__(self):
self.otp = None
def generate_otp(self, leng = 6):
digits="0123456789"
OTP=""
for i in range(leng):
OTP+=digits[math.floor(random.random()*10)]
return OTP
def send_email(self, to_mail):
try:
s = smtplib.SMTP('smtp.gmail.com', 587)
s.starttls()
s.ehlo()
s.login("[email protected]", "!1Abcderf")
self.otp = self.generate_otp()
s.sendmail("[email protected]", to_mail, self.otp)
s.quit()
return self.otp, 'Success'
except Exception as e:
return self.otp, e
def validate_details(otp, email_otp_field, dob, aadhar, pan, passport):
from datetime import date
try:
today = date.today()
birthDate = dob
age = today.year - birthDate.year - ((today.month, today.day) < (birthDate.month, birthDate.day))
if age < 18:
return 'Should be 18 years atleast!'
elif not aadharNumVerify(aadhar):
return 'Invalid Aadhar!'
elif validate_pan(pan):
return 'Invalid PAN'
elif not passport_validator(passport):
return 'Invalid passport number'
elif str(email_otp_field) != str(otp):
return 'Incorrect OTP!'
except Exception as e:
return e
def validate_pincode(pincode):
try:
postal_details = PostalUtils(pincode)
postal_details.get_details()
r,s,c = postal_details.extract_info()
return r, s, c
except Exception as e:
return e
def validate_pan(pan, flag = 'individual'):
pan = pan.upper()
if flag == 'individual':
regex = "[A-Z]{3}P[A-Z][0-9]{4}[A-Z]{1}"
p = re.compile(regex)
if not (re.search(p, pan) and len(pan) == 10):
return True
def aadharNumVerify(aadhar) :
"""
Reference : https://stackoverflow.com/questions/27686384/validating-the-aadhar-card-number-in-a-application
"""
verhoeff_table_d = (
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9),
(1, 2, 3, 4, 0, 6, 7, 8, 9, 5),
(2, 3, 4, 0, 1, 7, 8, 9, 5, 6),
(3, 4, 0, 1, 2, 8, 9, 5, 6, 7),
(4, 0, 1, 2, 3, 9, 5, 6, 7, 8),
(5, 9, 8, 7, 6, 0, 4, 3, 2, 1),
(6, 5, 9, 8, 7, 1, 0, 4, 3, 2),
(7, 6, 5, 9, 8, 2, 1, 0, 4, 3),
(8, 7, 6, 5, 9, 3, 2, 1, 0, 4),
(9, 8, 7, 6, 5, 4, 3, 2, 1, 0))
verhoeff_table_p = (
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9),
(1, 5, 7, 6, 2, 8, 3, 0, 9, 4),
(5, 8, 0, 3, 7, 9, 6, 1, 4, 2),
(8, 9, 1, 6, 0, 4, 3, 5, 2, 7),
(9, 4, 5, 3, 1, 2, 6, 8, 7, 0),
(4, 2, 8, 6, 5, 7, 3, 9, 0, 1),
(2, 7, 9, 3, 8, 0, 6, 4, 1, 5),
(7, 0, 4, 6, 9, 1, 3, 2, 5, 8))
# verhoeff_table_inv = (0, 4, 3, 2, 1, 5, 6, 7, 8, 9)
def checksum(aadhar_inner):
"""For a given number generates a Verhoeff digit and
returns number + digit"""
c = 0
for i, item in enumerate(reversed(aadhar_inner)):
c = verhoeff_table_d[c][verhoeff_table_p[i % 8][int(item)]]
return c
# Validate Verhoeff checksum
return checksum(str(aadhar)) == 0 and len(str(aadhar)) == 12
def passport_validator(passp):
skeleton = "^[A-PR-WYa-pr-wy][1-9]\\d\\s?\\d{4}[1-9]$"
p = re.compile(skeleton)
m = re.match(p, passp)
if m is None or len(passp) != 8:
return False
else:
return True
class Scraper_1:
def __init__(self, c_name, cin):
self.data = {}
self.c_name = c_name
self.cin = cin
self.dins_reference = []
self.link = f'https://www.zaubacorp.com/company/{self.c_name.replace(" ", "-").upper()}/{self.cin}'
def scrape(self):
try:
table_MN = pd.read_html(self.link)
if table_MN is not None:
self.data = {table_MN[0].columns[0]:table_MN[0].columns[1]}
self.data.update({value[0]:value[1] for value in table_MN[0].values})
for value in table_MN[7].iloc[:,0].values:
if value.isnumeric():
self.dins_reference.append(value)
self.dins_reference = set(self.dins_reference)
response = requests.get(self.link)
content = BeautifulSoup(response.text, "html.parser")
add_c = content.find_all("div", class_= 'col-lg-6 col-md-6 col-sm-12 col-xs-12')[2].text.split('Address: ')[1]
self.data['address'] = add_c
else:
return 'Incorrect name'
except Exception as e:
return e
def check_c(corporate_name, c_city, c_reg_no, cin_no, c_status, c_doi, c_DIN, c_gstin, c_pan, c_cat, f_ly, f_ly_2, v1, v2, c_address, c_state):
score = 0
scr = Scraper_1(corporate_name, cin_no)
content = scr.scrape()
err = ''
if content is not None:
return content
else:
if corporate_name == scr.data['Company Name'].upper():
score += 1
else:
err += 'No such Corp. found with the given name;'
if c_address in scr.data['address']:
score += 1
else:
err += 'Address incorrect;'
if c_city in scr.data['address']:
score +=1
else:
err += 'Incorrect city;'
if c_state in scr.data['address']:
score +=1
else:
err += 'Incorrect State;'
if c_status == scr.data['Company Status']:
score += 1
else:
err += 'Incorrect company status;'
if c_cat == scr.data['Company Sub Category']:
score += 1
else:
err += 'Incorrect company category;'
if c_reg_no == str(scr.data['Registration Number']):
score += 1
else:
err += 'Incorrect registration number;'
if cin_no == str(scr.data['CIN']):
score += 1
else:
err += 'Incorrect CIN;'
if set(c_DIN.split(';')) == scr.dins_reference:
score += 1
else:
err += 'DINs missing or not mentioned completely;'
regex = "^[0-9]{2}[A-Z]{5}[0-9]{4}" + "[A-Z]{1}[1-9A-Z]{1}" + "Z[0-9A-Z]{1}$"
p = re.compile(regex)
if (re.search(p, str(c_gstin))):
score += 1
else:
err += 'Invalid GSTIN;'
if str(c_gstin)[2:12] == c_pan:
score += 1
else:
err += 'Invalid PAN;'
if str(v1).replace(',', '') == str(f_ly) and str(v2).replace(',', '') == str(f_ly_2):
score += 1
else:
err += 'Invalid financials'
return err
|
py | 1a340d00b7bef95b3528606b615808a95d3f7a53 | import numpy as np
import pandas as pd
import xarray as xr
import glob
from statsrat.expr.schedule import schedule
from statsrat.expr.oat import oat
from copy import deepcopy
class experiment:
"""
A class used to represent learning experiments.
Attributes
----------
resp_type : str
The type of behavioral response made by the learner. Must be the same for
all schedules in the experiment. Can be either 'choice' (discrete responses),
'exct' (excitatory) or 'supr' (suppression of an ongoing activity).
schedules : dict
A dictionary of the experiment's schedules (sequences of stimuli and feedback etc
that typically correspond to groups in the experimental design).
schedule_names : list
Names of the experiment's schedules.
oats : dict
A dictionary of the experiment's ordinal adequacy tests (OATs).
notes : str or None
Notes on the experiment (e.g. explanation of design, references).
Methods
-------
make_trials(self)
Create a time step level dataset for the whole experiment.
read_csv(self, path, x_col, resp_col, resp_map, ident_col = None, conf_col = None, schedule = None, other_info = None, header = 'infer', n_final = 8)
Import empirical data from .csv files.
See Also
--------
See 'predef.cat' for category learning examples.
See 'predef.pvl_iti' for Pavlovian conditioning examples.
"""
def __init__(self, schedules, oats = None, notes = None):
"""
Parameters
----------
schedules : dict
A dictionary of the experiment's schedules (sequences of stimuli and feedback etc
that typically correspond to groups in the experimental design).
oats : dict or None, optional
A dictionary of the experiment's ordinal adequacy tests (OATs), or
else None (experiment has no OATs). Defaults to None.
notes : str or None, optional
Notes on the experiment (e.g. explanation of design, references).
Defaults to None (i.e. no notes).
"""
# check that everything in the 'schedules' argument is a schedule object
is_scd = []
for s in schedules.values():
is_scd += [isinstance(s, schedule)]
assert not (False in is_scd), 'Non-schedule object input as schedule.'
# check that everything in the 'oat' argument is an oat object
if not oats is None:
if len(oats) > 0:
is_oat = []
for o in oats.values():
is_oat += [isinstance(o, oat)]
assert not (False in is_oat), 'Non-oat object input as oat.'
# check that that all schedules have the same response type
self.resp_type = schedules[list(schedules.keys())[0]].resp_type
if len(schedules) > 1:
match_resp_type = []
for s in schedules.values():
match_resp_type += [self.resp_type == s.resp_type]
assert not (False in match_resp_type), 'Schedules have non-matching response types (resp_type).'
# add other data to 'self'
self.schedules = deepcopy(schedules)
for s in self.schedules:
self.schedules[s].name = s # assign schedule name attributes based on dictionary keys
self.schedule_names = list(self.schedules.keys())
self.oats = oats
self.notes = notes
def make_trials(self, schedule = None):
"""
Create a time step level dataset for the whole experiment.
Parameters
----------
schedule : str, optional
Name of the schedule from which to make trials. By default
selects the first schedule in the experiment object's
definition.
Returns
-------
dataset (xarray)
Contains time step level data (stimuli, outcomes etc.). See
documentation on the schedule class for more details.
Notes
-----
Adds in 'time', an alternative coordinate for time steps (dimension t).
This indicates real world time (in abstract units), including possible delays
since previous time steps (e.g. for an experiment with several sessions
on different days). Starts at 0 for the first time step, and each time
step represents a time unit of 1.
"""
# determine experimental schedule to use
if schedule is None:
scd = self.schedules[list(self.schedules.keys())[0]]
else:
scd = self.schedules[schedule]
# make list of time steps
t_order = []
trial_index = []
m = 0 # index for trials
for st in scd.stages:
iti = scd.stages[st].iti
order = scd.stages[st].order
if scd.stages[st].intro_length > 0:
trial_def_bool = np.array( (scd.trial_def.stage_name == st) & (scd.trial_def.trial_name == 'intro') )
trial_def_index = list( scd.trial_def.t[trial_def_bool].values )
t_order += trial_def_index
trial_index += scd.stages[st].intro_length*[m]
m += 1
for j in range(scd.stages[st].n_rep):
if scd.stages[st].order_fixed == False:
np.random.shuffle(order)
for k in range(scd.stages[st].n_trial):
trial_def_bool = np.array( (scd.trial_def.stage_name == st) & (scd.trial_def.trial == order[k]) )
trial_def_index = list( scd.trial_def.t[trial_def_bool].values )
t_order += trial_def_index
trial_index += (iti + 1)*[m]
m += 1
if scd.stages[st].outro_length > 0:
trial_def_bool = np.array( (scd.trial_def.stage_name == st) & (scd.trial_def.trial_name == 'outro') )
trial_def_index = list( scd.trial_def.t[trial_def_bool].values )
t_order += trial_def_index
trial_index += scd.stages[st].outro_length*[m]
m += 1
# make list for 'time' coordinate
st_names = list(scd.stages.keys())
time = list(np.arange(scd.stages[st_names[0]].n_t))
for i in range(1, scd.n_stage):
time += list(np.arange(scd.stages[st_names[i]].n_t) + scd.delays[i - 1] + time[-1] + 1)
# make new trials object
trials = scd.trial_def.loc[{'t' : t_order}]
trials = trials.assign_coords({'t' : range(scd.n_t)})
trials = trials.assign_coords({'trial' : ('t', trial_index)})
trials = trials.assign_coords({'time' : ('t', time)})
trials = trials.assign_attrs({'schedule': scd.name})
return trials
def read_csv(self, path, x_col, resp_col, resp_map, ident_col = None, conf_col = None, schedule = None, other_info = None, header = 'infer', n_final = 8):
"""
Import empirical data from .csv files.
Parameters
----------
path: str
Path to the .csv files.
x_col: list
Names of columns (strings) indicating cues (stimulus
attributes, i.e. columns of 'x').
resp_col: list
Names of columns (strings) indicating responses.
resp_map: dict
Maps response names in the raw data to response names in the
schedule definition.
ident_col: str or None, optional
If string, name of column indicating individual identifier
(the 'ident' variable). If None, then file names are used
as 'ident'. Defaults to None.
conf_col: str or None, optional
Name of the column indicating confidence responses (i.e.
a measure of confidence following choices, typically
obtained in the test stages of human classification tasks).
Defaults to None (suitable for data without confidence responses).
schedule: str, optional
Name of the schedule from which to make trials. By default
selects the first schedule in the experiment object's
definition.
other_info: dict or None, optional
Specifies other information (e.g. demographics) to be imported.
Dictionary keys are variable names (e.g. 'sex', 'age'), while the
values give the corresponding row index (e.g. a question such as
'What is your age?') and column name as a tuple. Defaults to None
(do not import any additional data).
header: int or list of int, default ‘infer’
Passed to pandas.read_csv. Row number(s) to use as the column names,
and the start of the data.
n_final: int, optional
Number of trials at end of each stage to use for calculating percent correct
choices. For example, set n_final = 10 to compute percent correct choices
using the last 10 trials of each stage.
Returns
-------
ds : dataset (xarray)
Contains time step level data (stimuli, outcomes, behavior,
possible outcomes etc.).
summary : dataframe (pandas)
Each row corresponds to a participant. Contains proportion of
correct responses in each non-test stage, plus OAT scores.
Notes
-----
To avoid confusion, data from different schedules (e.g. different experimental
groups) should be kept in separate directories.
It is assumed that any numeric particpant identifiers ('ident') are
integers rather than floats.
The 'correct' variable encodes whether participant behavior ('b') matched
the outcome ('y'). It is only really valid for category learning and similar
experiments, and does not mean anything for stages without feedback (i.e. test stages).
Participant IDs (called 'ident') should be unique. Any duplicates will be modified by
adding '-1', '-2', '-3' etc. (respectively for the second, third, fourth etc. instance
of the ID) to the end of the ID string.
Current Limitations:
For now, I assume that each time step represents a trial (i.e. iti = 0).
I also assume that all 'x_names' in the Python schedule object are lower case.
I also assume that each stage has at most one trial type for any set of punctate cues.
I also assume that the Python schedule object has exactly the right number of trials.
It is assumed that there are no intros or outros to any stages.
Currently, the 'time' (real world time) coordinate is only a copy of 't' (the time step
number). This represents the assumption that there are no delays between stages of the
experiment.
"""
# list .csv files in the directory
file_set = [file for file in glob.glob(path + "**/*.csv", recursive=True)]
assert len(file_set) > 0, 'Cannot find any files in specified path.'
# determine experimental schedule to use
if schedule is None:
scd = self.schedules[list(self.schedules.keys())[0]]
else:
scd = self.schedules[schedule]
# set up pct_correct
n_stage = len(scd.stages)
pct_correct = dict()
for st in scd.stages:
not_test = scd.stages[st].lrn
if not_test:
var_name = st + '_' + 'last' + str(n_final) + '_pct_correct'
pct_correct[var_name] = []
# **** loop through files ****
n_f = len(file_set)
ds_dict = {}
did_not_work_read = []
did_not_work_ident = []
did_not_work_b = []
did_not_work_misc = []
raw_ident = [] # raw particpant IDs (used to detect duplicates)
n_xc = len(x_col) # number of cue columns in raw data frame
n_rc = len(resp_col) # number of response columns in raw data frame
if conf_col is None:
usecols = x_col + resp_col # columns to import as the data frame 'raw'
else:
usecols = x_col + resp_col + [conf_col] # columns to import as the data frame 'raw'
for i in range(n_f):
# **** import raw data ****
try:
raw = pd.read_csv(file_set[i], error_bad_lines = False, warn_bad_lines = False, header = header, usecols = usecols)
raw.dropna(subset = x_col, thresh = 1, inplace = True) # drop rows without recorded cues ('x')
raw.dropna(subset = resp_col, thresh = 1, inplace = True) # drop rows without recorded responses
raw_full = pd.read_csv(file_set[i], error_bad_lines = False, warn_bad_lines = False, header = header, na_filter = True) # copy of 'raw' whose rows won't be dropped (used for importing 'ident' and 'other info', e.g. demographics)
index = np.zeros(raw.shape[0])
# drop rows in which none of the response columns has one of the expected responses
for col in resp_col:
index += raw[col].isin(list(resp_map.keys()))
raw = raw.loc[np.array(index > 0)]
n_r = raw.shape[0] # number of rows in raw data frame
raw.index = range(n_r) # re-index 'raw'
assert n_r == scd.n_t, 'wrong number of trials for file {}'.format(file_set[i]) + '\n' + 'trials found: ' + str(n_r) + '\n' + 'trials expected: ' + str(scd.n_t)
except Exception as e:
print(e)
did_not_work_read += [file_set[i]]
if not file_set[i] in did_not_work_read:
# **** figure out 'ident' (participant ID) ****
if ident_col is None:
ident = file_set[i].replace('.csv', '').replace(path + '/', '') # participant ID is file name
else:
try:
ident_col_vals = np.array(raw_full[ident_col].values, dtype = 'str')
lengths = np.char.str_len(ident_col_vals)
ident = ident_col_vals[np.argmax(lengths)]
if not isinstance(ident, str): # change participant ID to string if it's not already a string
if ident.dtype == float:
ident = ident.astype(int)
ident = ident.astype(str)
# **** if the participant ID is a duplicate, modify it ****
if i > 0:
ident_array = np.array(raw_ident) # array of IDs already imported
n_repeat = np.sum(ident_array == ident) # number of times the ID has already been imported
else:
n_repeat = 0 # obviously the first ID won't already be in the imported data
raw_ident += [ident]
if n_repeat > 0:
ident += '-' + str(n_repeat)
except Exception as e:
print(e)
did_not_work_ident += [file_set[i]]
if not file_set[i] in (did_not_work_read + did_not_work_ident + did_not_work_misc):
try:
# **** determine b (response) from raw data ****
b = xr.DataArray(0, coords = [range(scd.n_t), scd.y_names], dims = ['t', 'y_name']) # observed responses
for m in range(scd.n_t):
for k in range(n_rc):
if pd.notna(raw.loc[m, resp_col[k]]):
raw_y_name = raw.loc[m, resp_col[k]].lower()
assert raw_y_name in resp_map.keys(), 'raw data response name "{}" is not found in "resp_map" (trial {})'.format(raw_y_name, m)
mapped_y_name = resp_map[raw_y_name]
b.loc[{'t' : m, 'y_name' : mapped_y_name}] = 1
except Exception as e:
print(e)
did_not_work_b += [file_set[i]]
if not file_set[i] in (did_not_work_read + did_not_work_ident + did_not_work_b + did_not_work_misc):
try:
# **** determine trial type from raw data ****
t_order = [] # list of time steps to produce the 'trials' data frame
trial_list = []
m = 0 # index for trials
for st in scd.stages:
iti = scd.stages[st].iti
n_stage_trials = scd.stages[st].n_trial * scd.stages[st].n_rep
for j in range(n_stage_trials):
# determine x (stimulus vector) from raw data
raw_x = pd.Series(0, index = scd.x_names)
for k in range(n_xc):
if pd.notna(raw.loc[m, x_col[k]]):
raw_x_name = raw.loc[m, x_col[k]].lower()
if raw_x_name in scd.x_names:
raw_x[raw_x_name] = 1
# find corresponding trial definition (will only work if ITI = 0)
match_raw_x = (scd.trial_def['x'] == np.array(raw_x)).all(axis = 1)
match_stage = scd.trial_def['stage_name'] == st
trial_def_bool = match_stage & match_raw_x
trial_def_index = list(scd.trial_def['t'].loc[{'t' : trial_def_bool}])
if np.sum(trial_def_bool) == 0:
print('cue combination found that is not in schedule definition for stage:') # for debugging
print('stage')
print(st)
print('trial')
print(m)
print('cue combination')
print(raw_x)
# add to list of time steps indices, etc.
t_order += trial_def_index
trial_list += (iti + 1)*[m]
m += 1
# **** make new dataset ****
ds_new = scd.trial_def.loc[{'t' : t_order}]
n_t = len(t_order)
ds_new = ds_new.assign_coords({'t' : range(n_t), 'trial' : ('t', range(len(t_order))), 'time': ('t', range(n_t))})
ds_new = ds_new.assign(b = b)
ds_new = ds_new.expand_dims(ident = [ident])
# **** add confidence ratings ****
if not conf_col is None:
conf_val = np.array(raw[conf_col].values, dtype = 'float')
conf = xr.DataArray(conf_val, coords = [range(scd.n_t)], dims = ['t'])
ds_new = ds_new.assign(conf = conf)
# **** add other information (e.g. demographics) ****
if not other_info is None:
other_dict = dict()
for var_name in other_info:
row = raw_full[other_info[var_name][0]] == other_info[var_name][1]
column = other_info[var_name][2]
var = raw_full.loc[row, column].values[0]
other_dict[var_name] = (['ident'], np.array([var]))
ds_other = xr.Dataset(data_vars = other_dict, coords = {'ident': [ident]})
ds_new = ds_new.merge(ds_other)
# **** code each trial as correct (u matches b) or incorrect ****
u = ds_new['y'].squeeze()
b = ds_new['b'].squeeze()
correct = np.all(u == b, axis = 1)
ds_new = ds_new.assign(correct = correct)
# **** calculate percent correct per stage (excluding test stages) ****
for st in scd.stages:
not_test = scd.stages[st].lrn
if not_test:
stage_name = scd.stages[st].name
index = np.array(ds_new.stage_name == stage_name)
var_name = stage_name + '_' + 'last' + str(n_final) + '_pct_correct'
pct_correct[var_name] += [100*ds_new['correct'].loc[{'t': index}][-n_final:].mean().values]
# **** add individual's dataset to ds_dict ****
ds_dict[ident] = ds_new
except Exception as e:
print(e)
did_not_work_misc += [file_set[i]]
n_dnw_r = len(did_not_work_read)
if n_dnw_r > 0:
print('The following files could not be read by Pandas:')
for i in range(n_dnw_r):
print(did_not_work_read[i])
n_dnw_i = len(did_not_work_ident)
if n_dnw_i > 0:
print('Participant ID (ident) could not be read from the following files:')
for i in range(n_dnw_i):
print(did_not_work_ident[i])
n_dnw_b = len(did_not_work_b)
if n_dnw_b > 0:
print('Behavior (b) could not be read from the following files:')
for i in range(n_dnw_b):
print(did_not_work_b[i])
n_dnw_m = len(did_not_work_misc)
if n_dnw_m > 0:
print('There was a problem importing the following files:')
for i in range(n_dnw_m):
print(did_not_work_misc[i])
# **** merge datasets together ****
try:
ds = xr.combine_nested(list(ds_dict.values()), concat_dim = 'ident', combine_attrs = 'override')
except Exception as e:
print(e)
print('There was a problem merging individual datasets together.')
# **** create summary data frame (each row corresponds to a participant) ****
summary = ds.drop_dims(['t', 'x_name', 'y_name']).to_dataframe()
# **** add pct_correct ****
for st in scd.stages:
not_test = scd.stages[st].lrn
if not_test:
stage_name = scd.stages[st].name
var_name = stage_name + '_' + 'last' + str(n_final) + '_pct_correct'
summary[var_name] = pct_correct[var_name]
# **** calculate behavioral scores ****
n_oats = len(self.oats)
if conf_col is None:
has_conf = False
else:
has_conf = True
for oat in range(n_oats):
oat_name = list(self.oats.keys())[oat]
oat = self.oats[oat_name]
if scd.name in oat.schedule_pos:
summary[oat_name] = oat.behav_score_pos.compute_scores(ds, has_conf)
else:
if scd.name in oat.schedule_neg:
summary[oat_name] = oat.behav_score_neg.compute_scores(ds, has_conf)
summary = summary.set_index(ds.ident.to_series(), drop = True)
return (ds, summary) |
py | 1a34100298b46e7f90670c71a96632e936f0bed1 | #!/usr/bin/env python
"""Copyright (c) 2005-2017, University of Oxford.
All rights reserved.
University of Oxford means the Chancellor, Masters and Scholars of the
University of Oxford, having an administrative office at Wellington
Square, Oxford OX1 2JD, UK.
This file is part of Chaste.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the University of Oxford nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import os
import re
import sys
deprecated_notice = re.compile(r"""Copyright \(c\) 2005-\d{4}, University of Oxford.
All rights reserved.
University of Oxford means the Chancellor, Masters and Scholars of the
University of Oxford, having an administrative office at Wellington
Square, Oxford OX1 2JD, UK.
((
This file is part of Chaste.
)?)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
\* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
\* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
\* Neither the name of the University of Oxford nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES \(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION\)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT \(INCLUDING NEGLIGENCE OR OTHERWISE\) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
""", re.MULTILINE)
deprecated_notice_GPL = re.compile(r"""Copyright \(C\) University of Oxford, 2005-\d{4}
University of Oxford means the Chancellor, Masters and Scholars of the
University of Oxford, having an administrative office at Wellington
Square, Oxford OX1 2JD, UK.
((
This file is part of Chaste.
)?)
Chaste is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published
by the Free Software Foundation, either version 2.1 of the License, or
\(at your option\) any later version.
Chaste is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details. The offer of Chaste under the terms of the
License is subject to the License being interpreted in accordance with
English Law and subject to any action against the University of Oxford
being under the jurisdiction of the English Courts.
You should have received a copy of the GNU Lesser General Public License
along with Chaste. If not, see <http://www.gnu.org/licenses/>.
""", re.MULTILINE)
current_notice="""Copyright (c) 2005-2017, University of Oxford.
All rights reserved.
University of Oxford means the Chancellor, Masters and Scholars of the
University of Oxford, having an administrative office at Wellington
Square, Oxford OX1 2JD, UK.
This file is part of Chaste.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the University of Oxford nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
py_current_notice='"""'+current_notice+'"""\n'
cpp_current_notice='/*\n\n'+current_notice+'\n*/'
cpp_notice_to_add = cpp_current_notice + "\n\n"
# This is used when replacing a deprecated notice with the latest version,
# to account for the optional text.
replacement_notice = current_notice.replace("\nThis file is part of Chaste.\n", r"\1")
output_notice=current_notice.replace("\nThis file is part of Chaste.\n", "")
boost_normal_distribution_notice = """/* boost random/normal_distribution.hpp header file
*
* Copyright Jens Maurer 2000-2001
* Copyright Steven Watanabe 2010-2011
* Distributed under the Boost Software License, Version 1.0. (See
* accompanying file LICENSE_1_0.txt or copy at
* http://www.boost.org/LICENSE_1_0.txt)
*"""
pycml_notice=" Processed by pycml - CellML Tools in Python"
xsd2_notice="// Copyright (C) 2005-2007 Code Synthesis Tools CC"
xsd3_notice="// Copyright (C) 2005-2008 Code Synthesis Tools CC"
triangle_notice="""/* Copyright 1993, 1995, 1997, 1998, 2002, 2005 */
/* Jonathan Richard Shewchuk */"""
tetgen_notice="""///////////////////////////////////////////////////////////////////////////////
// //
// TetGen //
// //
// A Quality Tetrahedral Mesh Generator and 3D Delaunay Triangulator //
// //
// Version 1.4 //
// April 16, 2007 //
// //
// Copyright (C) 2002--2007 //
// Hang Si //
// Research Group Numerical Mathematics and Scientific Computing //
// Weierstrass Institute for Applied Analysis and Stochastics //
// Mohrenstr. 39, 10117 Berlin, Germany //
// [email protected] //
// //
// TetGen is freely available through the website: http://tetgen.berlios.de. //
// It may be copied, modified, and redistributed for non-commercial use. //
// Please consult the file LICENSE for the detailed copyright notices. //
// //
///////////////////////////////////////////////////////////////////////////////
"""
tetgen_predicates_notice="""/*****************************************************************************/
/* */
/* Routines for Arbitrary Precision Floating-point Arithmetic */
/* and Fast Robust Geometric Predicates */
/* (predicates.c) */
/* */
/* May 18, 1996 */
/* */
/* Placed in the public domain by */
/* Jonathan Richard Shewchuk */
/* School of Computer Science */
/* Carnegie Mellon University */
/* 5000 Forbes Avenue */
/* Pittsburgh, Pennsylvania 15213-3891 */
/* [email protected] */
"""
py_lgpl_notice = """# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details."""
def CheckForCopyrightNotice(findStrOrRe, fileIn):
"""Test if the (possibly multi-line) string/regexp findStr is contained anywhere in fileIn."""
fileIn.seek(0)
file_text = fileIn.read()
if isinstance(findStrOrRe, type('')):
found = file_text.find(findStrOrRe) >= 0
else:
found = findStrOrRe.search(file_text) is not None
return found
def UpdateFile(oldFilePath, newFilePath):
"""Replace the contents of oldFilePath with newFilePath.
This removes the old file and renames the new to match, but also
transfers permissions etc.
"""
perm = os.stat(oldFilePath).st_mode
os.rename(newFilePath, oldFilePath)
os.chmod(oldFilePath, perm)
def ReplaceStringInFile(findRe, repStr, filePath):
"""Replaces all strings matching findRe by repStr in file filePath."""
tempName = filePath+'~'
input = open(filePath)
output = open(tempName, 'w')
s = input.read()
output.write(findRe.sub(repStr, s))
output.close()
input.close()
UpdateFile(filePath, tempName)
print 'Notice: replaced deprecated copyright notice in', filePath
def HeadAppendStringInFile(appendString, filePath):
"""Adds appendStr to the top of file filePath"""
tempName = filePath+'~'
input = open(filePath)
output = open(tempName, 'w')
s = input.read()
output.write(appendString)
output.write(s)
output.close()
input.close()
UpdateFile(filePath, tempName)
print 'Notice: applied copyright notice in ', filePath
def InspectFile(fileName):
file_in = open(fileName)
if fileName[-21:] == 'CheckForCopyrights.py':
#Can't really check this one, since it knows all the licences
return True
valid_notice = False
if (CheckForCopyrightNotice(cpp_current_notice, file_in) or
CheckForCopyrightNotice(py_current_notice, file_in) or
CheckForCopyrightNotice(output_notice, file_in)):
#print 'Found current notice in '+file_name
valid_notice=True
if (CheckForCopyrightNotice(pycml_notice, file_in) or
CheckForCopyrightNotice(boost_normal_distribution_notice, file_in) or
CheckForCopyrightNotice(xsd2_notice, file_in) or
CheckForCopyrightNotice(xsd3_notice, file_in) or
CheckForCopyrightNotice(triangle_notice, file_in) or
CheckForCopyrightNotice(tetgen_predicates_notice, file_in) or
CheckForCopyrightNotice(tetgen_notice, file_in) or
CheckForCopyrightNotice(py_lgpl_notice, file_in)):
#print 'Found 3rd party notice in '+file_name
if valid_notice:
print "Multiple notices on", file_name
return False
else:
return True
if valid_notice:
return True
if CheckForCopyrightNotice(deprecated_notice, file_in):
print 'Found deprecated copyright notice for', fileName
if apply_update:
ReplaceStringInFile(deprecated_notice, replacement_notice, fileName)
return True
else:
print 'Fix this by doing:',sys.argv[0],'-update'
return False
if CheckForCopyrightNotice(deprecated_notice_GPL, file_in):
print 'Found deprecated GPL copyright notice for', fileName
if apply_update:
ReplaceStringInFile(deprecated_notice_GPL, replacement_notice, fileName)
return True
else:
print 'Fix this by doing:',sys.argv[0],'-update'
return False
print 'Found no copyright notice for', fileName
if apply_new:
if fileName[-3:] == '.py':
print 'Not implemented for .py files'
return False
else:
HeadAppendStringInFile(cpp_notice_to_add, fileName)
return True
else:
print 'Fix this by doing:',sys.argv[0],'-new'
return False
if __name__ == '__main__':
# Check, apply or modify the copyright notices.
# .cpp, .hpp., .py, .java are C++, Python and Java code.
exts = ['.cpp', '.hpp', '.py', '.java']
# SCons files
# output.chaste files in acceptance tests (all Chaste executables should output the valid copyright notice)
# Version.cpp.in is the provenance file
named_files = ['SConscript', 'SConstruct', 'output.chaste', 'Version.cpp.in']
dir_ignores = ['Debug', 'Release', 'build', 'cxxtest', 'testoutput', 'doc', 'projects', 'hierwikiplugin']
startchar_ignores = ['_', '.']
exclusions = ['python/pycml/enum.py', 'python/pycml/pyparsing.py', 'python/pycml/schematron.py']
apply_update = '-update' in sys.argv
apply_new = '-new' in sys.argv
chaste_dir = '.'
if '-dir' in sys.argv:
i = sys.argv.index('-dir')
chaste_dir = os.path.realpath(sys.argv[i+1])
num_no_copyrights = 0
num_copyrights = 0
chaste_dir_len = len(os.path.join(chaste_dir, ''))
for root, dirs, files in os.walk(chaste_dir):
relative_root = root[chaste_dir_len:]
# Check for ignored dirs
for dirname in dirs[:]:
if dirname in dir_ignores or dirname[0] in startchar_ignores:
dirs.remove(dirname)
# Check for source files
for file in files:
relative_path = os.path.join(relative_root, file)
name, ext = os.path.splitext(file)
if ((ext in exts or file in named_files) and
relative_path not in exclusions):
file_name = os.path.join(root, file)
if InspectFile(file_name) == False:
num_no_copyrights += 1
else:
num_copyrights += 1
# Let the test summary script know
if chaste_dir == ".":
dir = os.getcwd()
else:
dir = chaste_dir
print "Copyright test run over ",dir," (",num_no_copyrights+num_copyrights,") files"
if num_no_copyrights > 0:
print
print "The next line is for the benefit of the test summary scripts."
print "Failed",num_no_copyrights,"of",num_no_copyrights+num_copyrights,"tests"
# Return a non-zero exit code if orphans were found
sys.exit(num_no_copyrights)
else:
print "Infrastructure test passed ok."
|
py | 1a34118f7e585938298855a14246c9e558e4925e | # Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, List, Optional, Tuple
from synapse.api.errors import StoreError
from synapse.storage._base import SQLBaseStore
from synapse.storage.databases.main.roommember import ProfileInfo
from synapse.types import UserID
from synapse.util.caches.descriptors import cached
BATCH_SIZE = 100
class ProfileWorkerStore(SQLBaseStore):
async def get_profileinfo(self, user_localpart: str) -> ProfileInfo:
try:
profile = await self.db_pool.simple_select_one(
table="profiles",
keyvalues={"user_id": user_localpart},
retcols=("displayname", "avatar_url"),
desc="get_profileinfo",
)
except StoreError as e:
if e.code == 404:
# no match
return ProfileInfo(None, None)
else:
raise
return ProfileInfo(
avatar_url=profile["avatar_url"], display_name=profile["displayname"]
)
@cached(max_entries=5000)
async def get_profile_displayname(self, user_localpart: str) -> Optional[str]:
return await self.db_pool.simple_select_one_onecol(
table="profiles",
keyvalues={"user_id": user_localpart},
retcol="displayname",
desc="get_profile_displayname",
)
@cached(max_entries=5000)
async def get_profile_avatar_url(self, user_localpart: str) -> Optional[str]:
return await self.db_pool.simple_select_one_onecol(
table="profiles",
keyvalues={"user_id": user_localpart},
retcol="avatar_url",
desc="get_profile_avatar_url",
)
async def get_latest_profile_replication_batch_number(self):
def f(txn):
txn.execute("SELECT MAX(batch) as maxbatch FROM profiles")
rows = self.db_pool.cursor_to_dict(txn)
return rows[0]["maxbatch"]
return await self.db_pool.runInteraction(
"get_latest_profile_replication_batch_number", f
)
async def get_profile_batch(self, batchnum):
return await self.db_pool.simple_select_list(
table="profiles",
keyvalues={"batch": batchnum},
retcols=("user_id", "displayname", "avatar_url", "active"),
desc="get_profile_batch",
)
async def assign_profile_batch(self):
def f(txn):
sql = (
"UPDATE profiles SET batch = "
"(SELECT COALESCE(MAX(batch), -1) + 1 FROM profiles) "
"WHERE user_id in ("
" SELECT user_id FROM profiles WHERE batch is NULL limit ?"
")"
)
txn.execute(sql, (BATCH_SIZE,))
return txn.rowcount
return await self.db_pool.runInteraction("assign_profile_batch", f)
async def get_replication_hosts(self):
def f(txn):
txn.execute(
"SELECT host, last_synced_batch FROM profile_replication_status"
)
rows = self.db_pool.cursor_to_dict(txn)
return {r["host"]: r["last_synced_batch"] for r in rows}
return await self.db_pool.runInteraction("get_replication_hosts", f)
async def update_replication_batch_for_host(
self, host: str, last_synced_batch: int
):
return await self.db_pool.simple_upsert(
table="profile_replication_status",
keyvalues={"host": host},
values={"last_synced_batch": last_synced_batch},
desc="update_replication_batch_for_host",
)
async def get_from_remote_profile_cache(
self, user_id: str
) -> Optional[Dict[str, Any]]:
return await self.db_pool.simple_select_one(
table="remote_profile_cache",
keyvalues={"user_id": user_id},
retcols=("displayname", "avatar_url"),
allow_none=True,
desc="get_from_remote_profile_cache",
)
async def create_profile(self, user_localpart: str) -> None:
await self.db_pool.simple_insert(
table="profiles", values={"user_id": user_localpart}, desc="create_profile"
)
async def set_profile_displayname(
self, user_localpart: str, new_displayname: Optional[str], batchnum: int
) -> None:
# Invalidate the read cache for this user
self.get_profile_displayname.invalidate((user_localpart,))
await self.db_pool.simple_upsert(
table="profiles",
keyvalues={"user_id": user_localpart},
values={"displayname": new_displayname, "batch": batchnum},
desc="set_profile_displayname",
lock=False, # we can do this because user_id has a unique index
)
async def set_profile_avatar_url(
self, user_localpart: str, new_avatar_url: Optional[str], batchnum: int
) -> None:
# Invalidate the read cache for this user
self.get_profile_avatar_url.invalidate((user_localpart,))
await self.db_pool.simple_upsert(
table="profiles",
keyvalues={"user_id": user_localpart},
values={"avatar_url": new_avatar_url, "batch": batchnum},
desc="set_profile_avatar_url",
lock=False, # we can do this because user_id has a unique index
)
async def set_profiles_active(
self,
users: List[UserID],
active: bool,
hide: bool,
batchnum: int,
) -> None:
"""Given a set of users, set active and hidden flags on them.
Args:
users: A list of UserIDs
active: Whether to set the users to active or inactive
hide: Whether to hide the users (withold from replication). If
False and active is False, users will have their profiles
erased
batchnum: The batch number, used for profile replication
"""
# Convert list of localparts to list of tuples containing localparts
user_localparts = [(user.localpart,) for user in users]
# Generate list of value tuples for each user
value_names = ("active", "batch")
values = [(int(active), batchnum) for _ in user_localparts] # type: List[Tuple]
if not active and not hide:
# we are deactivating for real (not in hide mode)
# so clear the profile information
value_names += ("avatar_url", "displayname")
values = [v + (None, None) for v in values]
return await self.db_pool.runInteraction(
"set_profiles_active",
self.db_pool.simple_upsert_many_txn,
table="profiles",
key_names=("user_id",),
key_values=user_localparts,
value_names=value_names,
value_values=values,
)
async def add_remote_profile_cache(
self, user_id: str, displayname: str, avatar_url: str
) -> None:
"""Ensure we are caching the remote user's profiles.
This should only be called when `is_subscribed_remote_profile_for_user`
would return true for the user.
"""
await self.db_pool.simple_upsert(
table="remote_profile_cache",
keyvalues={"user_id": user_id},
values={
"displayname": displayname,
"avatar_url": avatar_url,
"last_check": self._clock.time_msec(),
},
desc="add_remote_profile_cache",
)
async def update_remote_profile_cache(
self, user_id: str, displayname: str, avatar_url: str
) -> int:
return await self.db_pool.simple_upsert(
table="remote_profile_cache",
keyvalues={"user_id": user_id},
values={
"displayname": displayname,
"avatar_url": avatar_url,
"last_check": self._clock.time_msec(),
},
desc="update_remote_profile_cache",
)
async def maybe_delete_remote_profile_cache(self, user_id):
"""Check if we still care about the remote user's profile, and if we
don't then remove their profile from the cache
"""
subscribed = await self.is_subscribed_remote_profile_for_user(user_id)
if not subscribed:
await self.db_pool.simple_delete(
table="remote_profile_cache",
keyvalues={"user_id": user_id},
desc="delete_remote_profile_cache",
)
async def is_subscribed_remote_profile_for_user(self, user_id):
"""Check whether we are interested in a remote user's profile."""
res = await self.db_pool.simple_select_one_onecol(
table="group_users",
keyvalues={"user_id": user_id},
retcol="user_id",
allow_none=True,
desc="should_update_remote_profile_cache_for_user",
)
if res:
return True
res = await self.db_pool.simple_select_one_onecol(
table="group_invites",
keyvalues={"user_id": user_id},
retcol="user_id",
allow_none=True,
desc="should_update_remote_profile_cache_for_user",
)
if res:
return True
async def get_remote_profile_cache_entries_that_expire(
self, last_checked: int
) -> List[Dict[str, str]]:
"""Get all users who haven't been checked since `last_checked`"""
def _get_remote_profile_cache_entries_that_expire_txn(txn):
sql = """
SELECT user_id, displayname, avatar_url
FROM remote_profile_cache
WHERE last_check < ?
"""
txn.execute(sql, (last_checked,))
return self.db_pool.cursor_to_dict(txn)
return await self.db_pool.runInteraction(
"get_remote_profile_cache_entries_that_expire",
_get_remote_profile_cache_entries_that_expire_txn,
)
class ProfileStore(ProfileWorkerStore):
def __init__(self, database, db_conn, hs):
super().__init__(database, db_conn, hs)
self.db_pool.updates.register_background_index_update(
"profile_replication_status_host_index",
index_name="profile_replication_status_idx",
table="profile_replication_status",
columns=["host"],
unique=True,
)
async def add_remote_profile_cache(
self, user_id: str, displayname: str, avatar_url: str
) -> None:
"""Ensure we are caching the remote user's profiles.
This should only be called when `is_subscribed_remote_profile_for_user`
would return true for the user.
"""
await self.db_pool.simple_upsert(
table="remote_profile_cache",
keyvalues={"user_id": user_id},
values={
"displayname": displayname,
"avatar_url": avatar_url,
"last_check": self._clock.time_msec(),
},
desc="add_remote_profile_cache",
)
|
py | 1a341290eff04ae574ec824497f381da191f0f57 | import random
def main():
"""This script generates random numbers and asks to the user to guess the
generated number"""
print "Hello there, it's time for you to guess numbers, ready?"
print "Try to guess the number i'm thinking"
while True: # uuhh a forbiden loop
r_input = raw_input("> ")
if r_input == 'exit':
break
else:
number = int(r_input)
random_number = random.randint(1, 9)
if number == random_number:
print "exactly right"
elif number > random_number:
print "too high"
else:
print "too low"
print "Bye bye"
if __name__ == '__main__':
main()
|
py | 1a3412ad92d98c523a7af02a02c5291f76611f7f | from app import app
if __name__=="__main__":
app.run()
|
py | 1a341328f476b84d663a22d062f6e1d1f69dd3fa | """SAC-Agent implementation"""
from typing import Optional, Callable
import jax
import jax.numpy as jnp
import numpy as np
import optax
from jaxdl.rl.networks.actor_nets import create_normal_dist_policy_fn, sample_actions
from jaxdl.rl.networks.critic_nets import create_double_critic_network_fn
from jaxdl.rl.networks.temperature_nets import create_temperature_network_fn
from jaxdl.rl.agents.sac.critic_fns import update_critic, update_target
from jaxdl.rl.agents.sac.actor_fns import update_actor
from jaxdl.rl.agents.sac.temperature_fns import update_temperature
from jaxdl.utils.commons import InfoDict, Module, save_train_state, restore_train_state
from jaxdl.utils.commons import create_train_state
from jaxdl.rl.utils.replay_buffer import Batch
from jaxdl.rl.utils.commons import RLAgent
class SACAgent(RLAgent):
"""An JAX implementation of the Soft-Actor-Critic (SAC)
Original paper: https://arxiv.org/abs/1812.05905
Usage:
agent = SACAgent(0, env.observation_space, env.action_space)
agent.restore('./tmp/')
agent.sample(observation)
agent.save('./tmp/')
"""
def __init__(self,
seed: int,
observations: np.ndarray,
actions: np.ndarray,
critic_net_fn: Callable = create_double_critic_network_fn,
actor_net_fn: Callable = create_normal_dist_policy_fn,
temperature_net_fn: Callable = create_temperature_network_fn,
actor_lr: float = 3e-4,
critic_lr: float = 3e-4,
temperature_lr: float = 3e-4,
discount: float = 0.99,
tau: float = 0.005,
target_update_period: int = 1,
target_entropy: Optional[float] = None):
# split rng and generate keys
rng = jax.random.PRNGKey(seed)
rng, actor_key, critic_key, temperature_key = jax.random.split(rng, 4)
# set target entropy
action_dim = actions.shape[-1]
self.target_entropy = target_entropy or - action_dim / 2
# actor network
actor_net = create_train_state(
actor_net_fn(action_dim=action_dim), [actor_key, observations],
optax.adam(learning_rate=actor_lr))
# critic networks
critic_net = create_train_state(
critic_net_fn(), [critic_key, observations, actions],
optax.adam(learning_rate=critic_lr))
target_critic_net = create_train_state(
critic_net_fn(), [critic_key, observations, actions],
optax.adam(learning_rate=critic_lr))
# temperature network
temperature_net = create_train_state(
temperature_net_fn(), [temperature_key],
tx=optax.adam(learning_rate=temperature_lr))
# networks
self.actor_net = actor_net
self.critic_net = critic_net
self.target_critic_net = target_critic_net
self.temperature_net = temperature_net
# parameters
self.rng = rng
self.step_num = 1
self.target_update_period = target_update_period
self.discount = discount
self.tau = tau
def restore(self, path):
"""Loads the networks of the agents."""
self.actor_net = restore_train_state(self.actor_net, path, prefix="actor")
self.critic_net = restore_train_state(self.critic_net, path, prefix="critic")
self.target_critic_net = restore_train_state(
self.target_critic_net, path, prefix="target_critic")
self.temperature_net = restore_train_state(
self.temperature_net, path, prefix="temperature")
def save(self, path):
"""Saves the networks of the agents."""
save_train_state(self.actor_net, path, prefix="actor")
save_train_state(self.critic_net, path, prefix="critic")
save_train_state(self.target_critic_net, path, prefix="target_critic")
save_train_state(self.temperature_net, path, prefix="temperature")
def sample(self, observations: np.ndarray,
temperature: float = 1.0, evaluate: bool = False) -> np.ndarray:
"""Samples (clipped) actions given an observation"""
self.rng, actions = sample_actions(
self.rng, self.actor_net, observations, temperature)
actions = np.asarray(actions)
# Rescaling of actions is done by gym.RescaleAction
return np.clip(actions, -1, 1)
def update(self, batch: Batch) -> InfoDict:
"""Updates all networks of the SAC-Agent."""
self.step_num += 1
# update critic
self.rng, self.critic_net, critic_info = update_critic(
self.rng, self.actor_net, self.critic_net, self.target_critic_net,
self.temperature_net, batch, self.discount, soft_critic=True)
# update target net
if self.step_num % self.target_update_period == 0:
self.target_critic_net = update_target(
self.critic_net, self.target_critic_net, self.tau)
# update actor
self.rng, self.actor_net, actor_info = update_actor(
self.rng, self.actor_net, self.critic_net, self.temperature_net, batch)
# update temperature
self.temperature_net, alpha_info = update_temperature(
self.temperature_net, actor_info["entropy"], self.target_entropy)
# increase step count
return {**critic_info, **actor_info, **alpha_info} |
py | 1a341353eb2d3ab01cadacafdbbc27a675a12a26 | from django.db import models
from django.utils import timezone
class Post(models.Model):
author = models.ForeignKey(
'auth.User', on_delete=models.CASCADE)
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
|
py | 1a341607b9c6a3c7ea51c543bbc6e5c6e4986eb6 | """
dataset_maker.py
================
Holds the DatasetMaker class for building example files.
"""
# Standard library imports
import os, re
import importlib
from itertools import product
import json
import random
import calendar
import logging
# Third-party imports
from netCDF4 import Dataset
import numpy
# Local imports
from time_series_generator import TimeSeriesGenerator
from nc4_maker import *
logging.basicConfig()
log = logging.getLogger(__name__)
RECIPE_DIR = "recipes"
TP_NAME = '__time_period__'
class DatasetMaker(object):
"""
Class to generate example datasets of synthetic data.
"""
def __init__(self, project, dataset_id, constraints=None, base_dir="fakedata"):
"""
:param project: project id [string]
:param dataset_id: dataset id [string]
:param constraints: dictionary of constraints to reduce the amount of data generated.
:param base_dir: base directory for outputs.
"""
self.project = project
self.dataset_id = dataset_id
self.base_dir = base_dir
# Set constraints as empty dictionary at start
self.constraints = {}
# Load main settings from JSON file
self._load_options()
# Update with constraints sent in as argument
self.set_constraints(constraints)
# Set time units
self._set_time_units_from_settings()
def _load_options(self):
"""
Reads the configuration file for a given project/dataset and stores
that information in `self.settings` ready for use.
:return: None
"""
# Check constraints and options
config_file = os.path.join(RECIPE_DIR, self.project, "{}.json".format(self.dataset_id))
if not os.path.isfile(config_file):
raise Exception("[ERROR] file '{}' not found.".format(config_file))
with open(config_file) as reader:
self.settings = json.load(reader)
# Read in any other JSON files from "__include_files__" property
include_files = self.get_setting("__include_files__", default={})
if include_files:
for fpath in include_files:
with open(fpath) as reader:
print "Parsing extra settings from: {}".format(fpath)
_settings = json.load(reader)
for key in _settings.keys():
# Only override if setting does NOT already exist
if key not in self.settings:
self.settings[key] = _settings[key]
# Update settings using "__includes__" in the JSON
self._add_includes_to_settings()
def _add_includes_to_settings(self):
"""
Searches for the "__include__" option in the settings and replaces
with common sections in the "__inclusions__" part of the JSON.
:return:
"""
INCLUSIONS_KEY = "__inclusions__"
INCLUDE_KEY = "__include__"
inclusions = self.settings.get(INCLUSIONS_KEY, {})
def update_dct_from_inclusions(dct):
"""
Updates current dct key if set as an "__include__".
:param dct: a dictionary (part of settings)
:return: None
"""
for key, value in dct.items():
if type(value) is dict:
update_dct_from_inclusions(value)
continue
elif key == INCLUSIONS_KEY or key != INCLUDE_KEY:
continue
# Only main "__include__" will get here, now update it
for dkey, dvalue in inclusions[value].items():
dct[dkey] = dvalue
# And remove the include item to tidy up
del dct[INCLUDE_KEY]
# Start with whole settings and then recursively call the updater function
dct = self.settings
update_dct_from_inclusions(dct)
def _set_time_units_from_settings(self):
"""
Sets the time units based on first date in settings/constraints.
:return: None
"""
# Set the time units for all output files based on the first time step requested
start_time = self.get_setting('time', 'start')
time_units = "days since {:04d}-{:02d}-{:02d} 00:00:00".format(*start_time)
self.settings['time']['attributes']['units'] = time_units
def _load_input_data(self):
"""
Loads input data from data file specified in settings.
:return: None
"""
self.input_data = {}
ds = Dataset(self.get_setting('source', 'source_file'))
self.input_data['ds'] = ds
self.input_data['variables'] = ds.variables
self.input_data['dimensions'] = ds.dimensions
def _setup_facets(self):
"""
Reads settings to generate facet information.
:return: None
"""
# Set up facet order
pattn = re.compile(r'\{(.+?)\}')
file_name_tmpl = self.get_setting('path_template')
self.facet_order = []
for match in pattn.findall(file_name_tmpl):
if match not in self.facet_order:
self.facet_order.append(match)
if TP_NAME in self.facet_order:
self.facet_order.remove(TP_NAME)
# Set up facets super list
self.facet_super_lists = []
for facet_name in self.facet_order:
# Handle dataset_id differently
if facet_name == 'dataset_id':
value = ['__TO_BE_DETERMINED_FROM_TEMPLATE__']
else:
value = self.get_setting('facets', facet_name)
self.facet_super_lists.append(value)
def _get_time_generator(self):
"""
Returns a generator for all date/times required.
:return: TimeSeriesGenerator instance.
"""
time_generator = TimeSeriesGenerator(
self.get_setting('time', 'start'),
self.get_setting('time', 'end'),
self.get_setting('time', 'delta'),
self.get_setting('time', 'attributes', 'calendar'),
format='datetime')
return time_generator
def generate(self, constraints=None, max_num=999999, randomise=False):
"""
Generator to return the next file path based on an optional set of `constraints`.
Specifying `max_num` will return after yielding the number given.
Setting `randomise` to True will return them in a random order.
:param constraints:
:param max_num:
:param randomise:
:return:
"""
if constraints:
log.info("Setting constraints")
self.set_constraints(constraints)
# Load up input data
log.info("Loading input data")
self._load_input_data()
# Set up facets
log.info("Setting up facets")
self._setup_facets()
# Get all permutations of all facets
facet_permutations = [prod for prod in product(*self.facet_super_lists)]
# Randomise order if specified
if randomise:
random.shuffle(facet_permutations)
file_count = 0
stop = False
time_array_len = -1
# Loop through all permutations
for facets in facet_permutations:
if stop: break
# Create instance dictionary to store current options
self.current = {}
self.current['facets'] = dict([(key, facets[i]) for i, key in enumerate(self.facet_order)])
file_name_tmpl = self.get_setting('path_template').replace('{{{}}}'.format(TP_NAME), '__TIME_PERIOD__')
# Set up time generator to step through time values
time_generator = self._get_time_generator()
time_array = []
date_times = []
count_per_file = 0
# Loop through time steps and write a new file whenever the number of times
# matches the number allowed per file
time_items = [_tm for _tm in time_generator]
for value, dt in time_items:
count_per_file += 1
time_array.append(value)
date_times.append(dt)
if count_per_file == self.get_setting('time', 'per_file') or \
(value, dt) == time_items[-1]:
self.current['date_times'] = date_times
# Get output path and write output file
output_path = self._get_output_path(time_array, date_times, file_name_tmpl)
self._write_output_file(output_path, time_array, date_times)
file_count += 1
time_array_len = len(time_array) # for reporting
# Reset some settings ready for next file to be populated
count_per_file = 0
date_times = []
time_array = []
if file_count >= max_num:
stop = True
break
print "Ran {} files; for {} time steps per file".format(file_count, time_array_len)
def _get_output_path(self, time_array, date_times, file_name_tmpl):
"""
Work out output file path and return full path.
:param time_array:
:param date_times:
:param file_name_tmpl:
:return: path for output file.
"""
# Define output file path
time_format = self.get_setting('time', 'format')
start = date_times[0]
end = date_times[-1]
# Check if we should set dates in file name to day 1 of month at start
# and last day of final month (rather than day used in file).
span_month_days = self.get_setting('time', 'span_month_days', default=False)
_calendar = self.get_setting('time', 'attributes', 'calendar')
if span_month_days:
_start_time_format = time_format.replace('%d', '01')
if _calendar == "360_day":
_end_time_format = time_format.replace('%d', '30')
else:
days_in_end_month = calendar.monthrange(end.year, end.month)
_end_time_format = time_format.replace('%d', '{}'.format(days_in_end_month))
start = start.strftime(_start_time_format)
end = end.strftime(_end_time_format)
else:
start, end = [_dt.strftime(time_format) for _dt in start, end]
fname_time_comp = "{}-{}".format(start, end)
# Add in the current time range to the file name template
file_name_tmpl = file_name_tmpl.replace('__TIME_PERIOD__', fname_time_comp)
# Generate the 'dataset_id' value from the 'dataset_id_template' facet
self.current['facets']['dataset_id'] = \
self.get_setting('dataset_id_template').format(**self.current['facets'])
# Work out file path
fpath = os.path.join(self.base_dir, file_name_tmpl.format(**self.current['facets']))
return fpath
def _get_coord_var_id_from_dim_id(self, dim_id):
"""
Returns coordinate variable ID from facet lookup of dimension ID.
:param dim_id: dimension ID
:return: coordinate variable ID
"""
facet_id = dim_id.split(":")[-1]
coord_var_id = self.current['facets'][facet_id]
return coord_var_id
def _load_extra_coord_vars(self):
"""
Call out to external code to get extra coordinate variables required for this
variable.
:return: a dictionary of coordinate variables.
"""
var_id = self.current['facets']['var_id']
required_dims = self.get_setting('variables', var_id, 'dimensions')
coord_vars = {}
for dim in required_dims:
if dim.find("facet:") == 0:
coord_var_id = self._get_coord_var_id_from_dim_id(dim)
# Import modifier module then call the loader function
lookup = self.get_setting('variables', var_id, 'coord_var_loaders', coord_var_id)
coord_var = self._evaluate_lookup(lookup)
coord_vars[coord_var_id] = coord_var
return coord_vars
def _get_modified_variable(self, variable):
"""
Call out to external code to modify the array if specified in settings.
Returns a tuple of: (new_array, dimensions_list).
:param variable: netCDF4 Variable (from input data).
:return: Tuple of: (new_array, dimensions_list).
"""
var_info = self.get_setting('variables', self.current['facets']['var_id'])
modifier = var_info.get('array_modifier', None)
conversion_factor = var_info.get('conversion_factor', None)
# Call modifier code if set
if modifier != None:
new_array, dims_list = self._evaluate_lookup(modifier, *[variable, self.current["date_times"]],
**self.current["facets"])
# Apply conversion factor if set
elif conversion_factor != None:
new_array = variable[:] * conversion_factor
dims_list = variable.dimensions
else:
return variable[:], variable.dimensions
return new_array, dims_list
def _resolve_variable_arrays_by_facet(self, var_id):
"""
:param var_id:
:return: array
"""
lookup = self.get_setting('variables_by_facet', var_id)
# Import modifier module then send the variable to the modifier function
array = self._evaluate_lookup(lookup, **self.current["facets"])
return array
def _evaluate_lookup(self, lookup, *args, **kwargs):
"""
Resolves a lookup and imports and evaluates a call, returning the response.
:param lookup: look-up string (module import then "#" then function.
:param args: list of arguments
:param kwargs: dictionary of keyword arguments
:return: return call to the relevant function with arguments.
"""
path, func = lookup.split('#')
# Import module then send the args and kwargs to the function
module = importlib.import_module(path)
response = getattr(module, func)(*args, **kwargs)
return response
def get_global_attributes(self):
"""
Looks up and generates a dictionary of global attributes for the NC file.
return: dictionary of global attributes to write.
"""
global_attrs = self.get_setting("global_attributes").copy()
facets = self.current['facets']
# Update global attrs if any values are calculated dynamically
CALC_FROM = 'calculate_from:'
DO_NOT_SET = '__DO_NOT_SET__'
for key in global_attrs.keys():
if key == DO_NOT_SET: continue
value = global_attrs[key]
if value.startswith(CALC_FROM):
lookup = value.replace(CALC_FROM, "")
value = self._evaluate_lookup(lookup, **facets)
global_attrs[key] = value
elif key in facets:
global_attrs[key] = facets[key]
# Now add facets but ignore omissions
not_to_set = global_attrs.get(DO_NOT_SET, [])
for key, value in facets.items():
if key in not_to_set: continue
global_attrs[key] = value
# Remove DO NOT SET value if there
if DO_NOT_SET in global_attrs: del global_attrs[DO_NOT_SET]
return global_attrs
def _write_output_file(self, fpath, time_array, date_times):
"""
Writes the output file to: `fpath`.
Uses information saved in the settings and input data
and associates them with the times in the `time_array`.
:param time_array: list of time values (as numbers)
:param date_times: list of datetimes
:return: None
"""
print "Starting to write to: {}".format(fpath)
# Create output file and write contents to it
output = NetCDF4Maker(fpath, verbose=False)
# Get the dimensions from the input file
dim_args = []
for key, value in self.input_data['dimensions'].items():
if value.isunlimited():
length = None
else:
length = len(value)
# Override length of time which is dynamic - set to "unlimited"
if key == "time":
length = None
# length = len(time_array)
dim_args.append((key, length))
# Load up any extra coordinate variables also required by this dataset
extra_coord_vars = self._load_extra_coord_vars()
# Add extra coord vars to dimensions list
for key, value in extra_coord_vars.items():
dim_args.append((key, len(value)))
# Write dimensions
output.create_dimensions(*dim_args)
# Loop through the files in the input data and modify them before writing
# as specified in the settings
# Also loop through the extra_coord_vars
all_vars = {}
for dct in (self.input_data['variables'], extra_coord_vars):
for key, value in dct.items():
all_vars[key] = value
# Now loop through and create all variables
for var_id, variable in all_vars.items():
if var_id == "climatology_bounds":
print "IGNORING WRITING: climatology_bounds - for now!"
continue
if var_id == "season_year":
# Assumes Met Office-style DJF, MAM, JJA, SON seasons
new_var_id = var_id
dtype = numpy.int32
var_attrs = {'long_name': 'season_year', 'units': '1'}
# Extract
years = []
for _dt in date_times:
_year = _dt.year
if _dt.month == 12:
_year += 1
years.append(_year)
data = numpy.array(years, 'int32')
dims_list = ['time']
elif var_id == self.get_setting('source', 'source_var'):
new_var_id = self.current['facets']['var_id']
var_info = self.get_setting('variables', new_var_id)
var_attrs = var_info['attributes']
dtype = getattr(numpy, var_info['dtype'])
# Modify array if necessary
data, dims_list = self._get_modified_variable(variable)
elif var_id == self.get_setting('source', 'source_time_var'):
new_var_id = 'time'
var_attrs = self.get_setting('time', 'attributes')
data = numpy.array(time_array, 'f')
dims_list = variable.dimensions
dtype = numpy.float32
# Add time bounds
self._add_time_bounds(output, data, var_attrs)
else:
new_var_id = var_id
data = variable[:]
dtype = variable.dtype
# Resolve the variable array if required
if self.get_setting('variables_by_facet', new_var_id, default=[]):
data = self._resolve_variable_arrays_by_facet(new_var_id)
if hasattr(variable, "dimensions"):
dims_list = variable.dimensions
# Assume that it is a coordinate variable that will have its own dimension
else:
dims_list = [new_var_id]
if hasattr(variable, "ncattrs"):
var_attrs = dict([(key, getattr(variable, key)) for key in variable.ncattrs() if key
not in ('_FillValue',)])
else:
var_attrs = {'long_name': new_var_id}
print "Now writing variable: {}".format(new_var_id)
fill_value = getattr(variable, "_FillValue", None)
output.create_variable(new_var_id, data, dtype, dims_list,
fill_value=fill_value, attributes=var_attrs)
global_attrs = self.get_global_attributes()
output.create_global_attrs(**global_attrs)
output.close()
print "Wrote: {}".format(fpath)
def _add_time_bounds(self, output, time_data, time_var_attrs):
"""
Write the `time_bounds` variable to the output file.
Also modify the attributes dictionary: time_var_attrs
:param output: output writer job
:param data: time array
:param var_attrs: time attributes
:return: None
"""
var_id = "time_bounds"
time_var_attrs["bounds"] = var_id
interval = (time_data[1] - time_data[0]) / 2.
values = [[value - interval, value + interval] for value in time_data[:]]
array = numpy.array(values)
output.create_variable(var_id, array, "float64", ["time", "bnds"])
def set_constraints(self, constraints=None):
"""
Sets constraints that override the settings to reduce the number of output files.
Takes a dictionary that can include the following keys:
['time']['start'|'end'] - can only override start/end of time
['variables']['*'] - can override any part of variables settings
['facets']['*'] - can override any part of facets settings
:param constraints: dictionary of dictionaries specifying data files to be produced.
:return: None
"""
if not constraints:
return
if type(constraints) != dict:
raise Exception("Constraints must be provided as a dictionary.")
allowed_constraints = ("time", "variables", "facets")
for key, value in constraints.items():
if key not in allowed_constraints:
raise Exception("Constraints on '{}' are not permitted.".format(key))
self.constraints[key] = constraints[key]
def _resolve_nested_lookup(self, dct, keys, default=None):
"""
Resolves and returns item held in nested dictionary `dct` based on a tuple of
`keys` as the lookup.
Returns `default` if not found.
:param dct: nested dictionary.
:param keys: tuple of keys.
:return: value or default.
"""
value = dct
for key in keys:
try:
value = value[key]
except:
return default
return value
def get_setting(self, *options, **kwargs):
"""
Looks up a setting in `self.constraints`. If not held there it looks it up in
`self.settings`. The `options` are defined using a tuple of keys, such as:
("variable", "precip", long_name").
If the setting cannot be found then an exception is raised.
:param options: setting specifier [tuple].
:param kwargs: keyword arguments - to provide default.
:return: The value of the setting.
"""
default = kwargs.get('default', None)
value = self._resolve_nested_lookup(self.constraints, options, default=default)
if value:
return value
value = self._resolve_nested_lookup(self.settings, options, default=default)
if value == None:
raise Exception("Could not find value in constraints or settings for: '{}'.".format(options))
return value
def __iter__(self):
"""
:return:
"""
return self
def __next__(self):
"""
Returns next file path.
:return:
"""
# Returns next path
# use itertools.product here
def next(self):
"""
:return:
"""
return self.__next__()
|
py | 1a34164d4d0514e66931cb42e2e1b84ca13725ce | # Copyright (C) 2015, Dennis Forster <[email protected]>
#
# LICENSE: THE SOFTWARE IS PROVIDED "AS IS" UNDER THE
# ACADEMIC FREE LICENSE (AFL) v3.0.
#
import os
from mpi4py import MPI
import numpy as np
import math
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
def visualize_inputs(output, model, nmultilayer, config, first=0, last=100, figure=1, show=False, save=True, ion=True):
if ( output._PICTURE_OUTPUT and (MPI.COMM_WORLD.Get_rank() == 0) ):
Layer = model.MultiLayer[nmultilayer].Layer[0]
# create figure if not given
if (issubclass(type(figure), matplotlib.figure.Figure)):
pass
elif (issubclass(type(figure), int)):
figure = plt.figure(figure)
else:
figure = plt.figure()
figure.clf()
if ( (last is None) or (last > Layer.get_input_data().shape[0]) ):
last = Layer.get_input_data().shape[0]
# plot all given images on sub-plots
cols = int(np.ceil(math.sqrt(last-first)))
rows = int(np.ceil((last-first)/float(cols)))
img_2D = Layer.get_input_data()[first:last,:]
pixel_width = config.get()['dataset']['PIXEL_WIDTH']
pixel_height = config.get()['dataset']['PIXEL_HEIGHT']
D = Layer.get_input_data().shape[1]
try: #Grayscale Image
img_2D = np.append(img_2D,np.zeros(shape=(last-first, pixel_width*pixel_height-D)),axis=1)
img_2D = np.reshape(img_2D,(last-first,pixel_height,pixel_width),order='C')
grayscale = True
except: #RGB Image
img_2D = np.reshape(img_2D,(last-first,pixel_height,pixel_width,Layer.get_input_data().shape[2]),order='C')
grayscale = False
scale = 4 #adjust for higher resolution
#scale = int(800/(np.ceil(math.sqrt(D))*cols))
gs_pixel_width = float(scale*pixel_width*cols + (cols+1))
gs_pixel_height = float(scale*pixel_height*rows + (rows+1))
gs = gridspec.GridSpec(rows, cols)
# unfortunately, the spacing seems to not be exact, but to introduce arbitrary deviations, which have to be compensated
# gs.update(left=1./gs_pixel_width, right=1.-1./gs_pixel_width, bottom = 1./gs_pixel_height, top = 1.-2.*1./gs_pixel_height, wspace = 1.*(cols+1)/float(scale*pixel_width*cols), hspace = 2.14*(rows+1)/float(scale*pixel_height*rows))
gs.update(left=1./gs_pixel_width, right=1.-1./gs_pixel_width, bottom = 1./gs_pixel_height, top = 1.-2.*1./gs_pixel_height, wspace = 2.14*(cols+1)/float(scale*pixel_width*cols), hspace = 2.14*(rows+1)/float(scale*pixel_height*rows))
# for squares data set:
#gs.update(left=1./gs_pixel_width, right=1.-2.*1./gs_pixel_width, bottom = 1./gs_pixel_height, top = 1.-2.25/gs_pixel_height, wspace = (cols+1)/float(scale*np.ceil(math.sqrt(D))*cols), hspace = 1.65*(rows+1)/float(scale*np.ceil(math.sqrt(D))*rows))
figure.set_figwidth(gs_pixel_width/100)
figure.set_figheight(gs_pixel_height/100)
figure.set_facecolor('black')
for h in xrange(last-first):
figure.add_subplot(plt.subplot(gs[h]))
plt.axis('off')
if grayscale:
plt.imshow(img_2D[h], cmap="Greys", interpolation="nearest", aspect='auto')
#plt.imshow(img_2D[h], cmap="jet", interpolation="nearest", aspect='auto')
else:
plt.imshow(img_2D[h], interpolation="nearest", aspect='auto')
plt.ioff()
if save:
if not os.path.exists('./output/%s/pictures/' % output._txtfoldername):
os.makedirs('./output/%s/pictures/' % output._txtfoldername)
filename = './output/%s/pictures/%s - Input %d-%d.png' %(output._txtfoldername, output._txtfilename, first+1, last)
plt.savefig(filename,facecolor=figure.get_facecolor())
if show:
if ion:
plt.ion()
plt.draw()
plt.show()
else:
plt.close(figure)
def visualize_weights(output, model, nmultilayer, nlayer, config, first=0, last=100, figure=1, show=False, save=True, ion=True):
# TODO: check for same memory leak as was in VisualizeAllWeights!
if ( output._PICTURE_OUTPUT
and (MPI.COMM_WORLD.Get_rank() == 0)
and ( (model.MultiLayer[nmultilayer].get_iteration() % output._PICTURE_EVERY_N_ITERATIONS == 0)
or (model.MultiLayer[nmultilayer].get_iteration() == model.MultiLayer[nmultilayer].get_iterations()) ) ):
Layer = model.MultiLayer[nmultilayer].Layer[nlayer]
# create figure if not given
if (issubclass(type(figure), matplotlib.figure.Figure)):
pass
elif (issubclass(type(figure), int)):
figure = plt.figure(figure)
else:
figure = plt.figure()
figure.clf()
if ( (last is None) or (last > Layer.GetNumberOfNeurons()) ):
last = Layer.GetNumberOfNeurons()
D = Layer.get_input_data().shape[1]
if nlayer == 1:
pixel_width = config.get()['dataset']['PIXEL_WIDTH']
pixel_height = config.get()['dataset']['PIXEL_HEIGHT']
else:
pixel_width = np.ceil(math.sqrt(Layer.D[0]))
pixel_height = np.ceil(math.sqrt(Layer.D[0]))
# plot all given images on sub-plots
cols = np.ceil(math.sqrt(last-first))
rows = np.ceil((last-first)/cols)
try:
#Grayscale Image
img_2D = np.append(Layer.get_weights()[first:last,:],np.zeros(shape=(last-first,pixel_width*pixel_height-Layer.D[0])),axis=1)
img_2D = np.reshape(img_2D,(last-first,pixel_width,pixel_height),order='C')
except:
#RGB Image
img_2D = np.append(Layer.get_weights()[first:last,:],np.zeros(shape=(last-first,pixel_width*pixel_height-Layer.D[0],Layer.get_weights().shape[2])),axis=1)
img_2D = np.reshape(img_2D,(last-first,pixel_width,pixel_height,Layer.get_weights().shape[2]),order='C')
#gs_pixel_width = 5.*np.ceil(math.sqrt(Layer.D[0]))*cols + (cols-1.)
#gs_pixel_height = 5.*np.ceil(math.sqrt(Layer.D[0]))*rows + (rows-1.)
scale = int(800/(np.ceil(math.sqrt(Layer.D[0]))*cols))
gs_pixel_width = scale*pixel_width*cols + (cols-1.)
gs_pixel_height = scale*pixel_height*rows + (rows-1.)
figure.set_figwidth(gs_pixel_width/100)
figure.set_figheight(gs_pixel_height/100)
figure.set_facecolor('black')
for h in xrange(last-first):
figure.add_subplot(rows, cols, h+1)
plt.axis('off')
plt.subplots_adjust(left = 0., right = 1., bottom = 0., top = 1., wspace = 2.*(cols-1.)/gs_pixel_width, hspace = 3.*(rows-1.)/gs_pixel_height)
plt.imshow(img_2D[h], cmap="Greys", interpolation="nearest", aspect='auto')
plt.ioff()
if (save == True):
if not os.path.exists('./output/%s/pictures/' % output._txtfoldername):
os.makedirs('./output/%s/pictures/' % output._txtfoldername)
filename = './output/%s/pictures/%s - Run%d - M%dL%d - %d.png' %(output._txtfoldername, output._txtfilename, model.MultiLayer[nmultilayer].run(), nmultilayer+1, nlayer, model.MultiLayer[nmultilayer].get_iteration())
plt.savefig(filename,facecolor=figure.get_facecolor())
if (show == True):
if (ion == True):
plt.ion()
plt.draw()
plt.show()
else:
plt.close(figure)
def visualize_all_weights(output, model, nmultilayer, config, first=0, last=100, figure=1, show=False, save=True, ion=True):
# Optimized for two processing layers
if ( output._PICTURE_OUTPUT
and (MPI.COMM_WORLD.Get_rank() == 0)
and ( (model.MultiLayer[nmultilayer].get_iteration() % output._PICTURE_EVERY_N_ITERATIONS == 0)
or (model.MultiLayer[nmultilayer].get_iteration() == model.MultiLayer[nmultilayer].get_iterations()) ) ):
Layer = model.MultiLayer[nmultilayer].Layer
# create figure if not given
if (issubclass(type(figure), matplotlib.figure.Figure)):
pass
elif (issubclass(type(figure), int)):
figure = plt.figure(figure)
else:
figure = plt.figure()
figure.clf()
if ( (last is None) or (last > Layer[1].C) ):
last = Layer[1].C
# plot all given images on sub-plots
cols = int(np.ceil(math.sqrt(last-first)))
rows = int(np.ceil((last-first)/float(cols)))
#for squares data set:
#cols = 1
#rows = last-first
NLAYERS = model.MultiLayer[nmultilayer].number_of_layers()
width_ratios = []
# 1: 1/N1 : 1/N2 = N1N2 : N2 : N1
ratio = 1
for nlayer in xrange(2,NLAYERS):
ratio *= Layer[nlayer].get_weights().shape[0]
for _ in xrange(cols):
for nlayer in xrange(1,NLAYERS):
if (nlayer == 1):
width_ratios.append(ratio)
else:
width_ratios.append(ratio/Layer[nlayer].get_weights().shape[0])
pixel_width = []
pixel_height = []
for nlayer in xrange(1,NLAYERS):
if nlayer == 1 and nmultilayer == 0:
pixel_width.append(config.get()['dataset']['PIXEL_WIDTH'])
pixel_height.append(config.get()['dataset']['PIXEL_HEIGHT'])
elif nlayer == NLAYERS-1 and nmultilayer == 0:
pixel_width.append(1)
pixel_height.append(Layer[nlayer].C)
else:
pixel_width.append(np.ceil(math.sqrt(Layer[nlayer].D[0])))
pixel_height.append(np.ceil(math.sqrt(Layer[nlayer].D[0])))
npixels_width = pixel_width[0]
for nlayer in xrange(2,NLAYERS):
npixels_width += pixel_width[0]/Layer[nlayer].get_weights().shape[0]
npixels_width *= cols
npixels_height = pixel_height[0]*rows
scale = max(4, np.ceil(np.max(pixel_height)/float(pixel_height[0]))) #adjust for higher resolution
gs_pixel_width = scale*npixels_width + (NLAYERS-1)*cols+1
gs_pixel_height = scale*npixels_height + (rows+1)
gs = gridspec.GridSpec(rows, (NLAYERS-1)*cols, width_ratios=width_ratios)
# the spacing has some problems which require the arbitrary factors 2. and 2.14 in 'right', 'top' and 'wspace', 'hspace'
#gs.update(left=1./gs_pixel_width, right=1.-2.*1./gs_pixel_width, bottom = 1./gs_pixel_height, top = 1.-2.*1./gs_pixel_height, wspace = 2.14*((NLAYERS-1)*cols+1)/(scale*npixels_width), hspace = 2.14*(rows+1)/float((scale*npixels_height)))
# gs.update(left=1./gs_pixel_width, right=1.-1./gs_pixel_width, bottom = 1./gs_pixel_height, top = 1.-2.*1./gs_pixel_height, wspace = 1.*((NLAYERS-1)*cols+1)/(scale*npixels_width), hspace = 2.14*(rows+1)/float((scale*npixels_height)))
gs.update(left=1./gs_pixel_width, right=1.-1./gs_pixel_width, bottom = 1./gs_pixel_height, top = 1.-2.*1./gs_pixel_height, wspace = 2.14*((NLAYERS-1)*cols+1)/(scale*npixels_width), hspace = 2.14*(rows+1)/float((scale*npixels_height)))
# for C10:
#gs.update(left=1./gs_pixel_width, right=1.-2.*1./gs_pixel_width, bottom = 1./gs_pixel_height, top = 1.-2.*1./gs_pixel_height, wspace = 1.*((NLAYERS-1)*cols+1)/(scale*npixels_width), hspace = 1.*(rows+1)/float((scale*npixels_height)))
# for squares data set:
#gs.update(left=1./gs_pixel_width, right=1.-1./float(gs_pixel_width), bottom = 1./float(gs_pixel_height), top = 1.-2./float(gs_pixel_height), wspace = 1.*((NLAYERS-1)*cols+1)/(scale*npixels_width), hspace = 1.65*(rows+1)/float((scale*npixels_height)))
figure.set_figwidth(gs_pixel_width/100)
figure.set_figheight(gs_pixel_height/100)
figure.set_facecolor('black')
all_img_2D = [(Layer[nlayer].get_weights()) for nlayer in xrange(1,NLAYERS)]
# # Limits for colormap. If these are not given the colormap of each
# # subplot is scaled independently
# vmin = []
# vmax = []
# for nlayer in xrange(1,NLAYERS):
# vmin.append(np.min(all_img_2D[nlayer-1]))
# vmax.append(np.max(all_img_2D[nlayer-1]))
for nimage in xrange(first,last):
for nlayer in xrange(1,NLAYERS):
if (nlayer == 1):
# for some reason this produces a memory leak in combination with imshow:
#img_2D = Layer[nlayer].get_weights()[nimage,:]
img_2D = all_img_2D[nlayer-1][nimage,:]
# try:
#Grayscale Image
img_2D = np.append(img_2D,np.zeros(shape=(pixel_height[nlayer-1]*pixel_width[nlayer-1]-img_2D.shape[0])),axis=0)
img_2D = np.reshape(img_2D,(pixel_height[nlayer-1],pixel_width[nlayer-1]),order='C')
# except:
# try:
# #RGB Image
# #-- TODO: implement np.append for RGB image
# img_2D = np.reshape(img_2D,(pixel_width[nlayer-1],pixel_height[nlayer-1],Layer.get_weights().shape[2]),order='C')
# except:
# pass
else:
img_2D = Layer[nlayer].get_weights()[:,nimage]
img_2D = np.reshape(img_2D,(img_2D.shape[0],1),order='C')
#figure.add_subplot(plt.subplot(gs[nimage*(NLAYERS-1)-first+nlayer-1]))
plt.subplot(gs[nimage*(NLAYERS-1)-first+nlayer-1])
plt.axis('off')
plt.imshow(img_2D, cmap="Greys", interpolation="nearest", aspect='auto')
# plt.imshow(img_2D, cmap="jet", interpolation="nearest", aspect='auto')
# if nlayer == 1:
# plt.imshow(img_2D, cmap="Greys", interpolation="nearest", aspect='auto')
# else:
# # If vmin and vmax is not given, the colormap of each
# # subplot is saled independently. This can be helpful
# # to better see the class belonging of each patch when
# # only very few labels are given.
# # plt.imshow(img_2D, cmap="Greys", interpolation="nearest",
# # aspect='auto', vmin=0, vmax=vmax[nlayer-1])
# plt.imshow(img_2D, cmap="jet", interpolation="nearest",
# aspect='auto', vmin=0, vmax=1)
plt.ioff()
if (save == True):
if not os.path.exists('./output/%s/pictures/' % output._txtfoldername):
os.makedirs('./output/%s/pictures/' % output._txtfoldername)
filename = './output/%s/pictures/%s - Run%d - M%d - %d.png' %(output._txtfoldername, output._txtfilename, model.MultiLayer[nmultilayer].run(), nmultilayer+1, model.MultiLayer[nmultilayer].get_iteration())
plt.savefig(filename,facecolor=figure.get_facecolor())
if (show == True):
if (ion == True):
plt.ion()
plt.draw()
plt.show()
else:
figure.clf()
plt.clf()
plt.close()
|
py | 1a341736cfc00fd5d838a946e621d781951b0a32 | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import config
from . import state
class prefix_limit(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/bgp/global/afi-safis/afi-safi/ipv6-unicast/prefix-limit. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
__slots__ = ("_path_helper", "_extmethods", "__config", "__state")
_yang_name = "prefix-limit"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"bgp",
"global",
"afi-safis",
"afi-safi",
"ipv6-unicast",
"prefix-limit",
]
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_unicast/prefix_limit/config (container)
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_unicast/prefix_limit/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_unicast/prefix_limit/state (container)
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_unicast/prefix_limit/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([("config", config), ("state", state)])
from . import config
from . import state
class prefix_limit(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/bgp/global/afi-safis/afi-safi/ipv6-unicast/prefix-limit. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
__slots__ = ("_path_helper", "_extmethods", "__config", "__state")
_yang_name = "prefix-limit"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"bgp",
"global",
"afi-safis",
"afi-safi",
"ipv6-unicast",
"prefix-limit",
]
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_unicast/prefix_limit/config (container)
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_unicast/prefix_limit/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_unicast/prefix_limit/state (container)
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_unicast/prefix_limit/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([("config", config), ("state", state)])
|
py | 1a34178be17b85357216c5ca133aa6e256c1a7fa | #!/usr/bin/env python3.9
# -*- coding: utf-8 -*-
# if you're interested in development, my test server is usually
# up at https://c.cmyui.xyz. just use the same `-devserver cmyui.xyz`
# connection method you would with any other modern server and you
# should have no problems connecting. registration is done in-game
# with osu!'s built-in registration (if you're worried about not being
# properly connected while registering, the server should send back
# https://i.cmyui.xyz/8-Vzy9NllPBp5K7L.png if you use a random login).
# you can also test gulag's rest api using my test server,
# e.g https://osu.cmyui.xyz/api/get_player_scores?id=3&scope=best
import asyncio
import io
import os
import sys
from datetime import datetime
from pathlib import Path
import aiohttp
import aiomysql
import cmyui
import datadog
import orjson
import geoip2.database
import subprocess
from cmyui.logging import Ansi
from cmyui.logging import log
import bg_loops
import utils.misc
from constants.privileges import Privileges
from objects.achievement import Achievement
from objects.collections import Players
from objects.collections import Matches
from objects.collections import Channels
from objects.collections import Clans
from objects.collections import MapPools
from objects.player import Player
from utils.updater import Updater
__all__ = ()
# we print utf-8 content quite often
if isinstance(sys.stdout, io.TextIOWrapper):
sys.stdout.reconfigure(encoding='utf-8')
# set cwd to /gulag
os.chdir(os.path.dirname(os.path.realpath(__file__)))
try:
from objects import glob
except ModuleNotFoundError as exc:
if exc.name == 'config':
# config file doesn't exist; create it from the default.
import shutil
shutil.copy('ext/config.sample.py', 'config.py')
log('A config file has been generated, '
'please configure it to your needs.', Ansi.LRED)
raise SystemExit(1)
else:
raise
utils.misc.install_excepthook()
# current version of gulag
# NOTE: this is used internally for the updater, it may be
# worth reading through it's code before playing with it.
glob.version = cmyui.Version(3, 5, 4)
OPPAI_PATH = Path.cwd() / 'oppai-ng'
GEOLOC_DB_FILE = Path.cwd() / 'ext/GeoLite2-City.mmdb'
DEBUG_HOOKS_PATH = Path.cwd() / '_testing/runtime.py'
DATA_PATH = Path.cwd() / '.data'
ACHIEVEMENTS_ASSETS_PATH = DATA_PATH / 'assets/medals/client'
async def setup_collections(db_cursor: aiomysql.DictCursor) -> None:
"""Setup & cache many global collections."""
# dynamic (active) sets, only in ram
glob.players = Players()
glob.matches = Matches()
# static (inactive) sets, in ram & sql
glob.channels = await Channels.prepare(db_cursor)
glob.clans = await Clans.prepare(db_cursor)
glob.pools = await MapPools.prepare(db_cursor)
# create bot & add it to online players
glob.bot = Player(
id=1,
name=await utils.misc.fetch_bot_name(db_cursor),
login_time=float(0x7fffffff), # (never auto-dc)
priv=Privileges.Normal,
bot_client=True
)
glob.players.append(glob.bot)
# global achievements (sorted by vn gamemodes)
glob.achievements = []
await db_cursor.execute('SELECT * FROM achievements')
async for row in db_cursor:
# NOTE: achievement conditions are stored as stringified python
# expressions in the database to allow for extensive customizability.
condition = eval(f'lambda score, mode_vn: {row.pop("cond")}')
achievement = Achievement(**row, cond=condition)
glob.achievements.append(achievement)
# static api keys
await db_cursor.execute(
'SELECT id, api_key FROM users '
'WHERE api_key IS NOT NULL'
)
glob.api_keys = {
row['api_key']: row['id']
async for row in db_cursor
}
async def before_serving() -> None:
"""Called before the server begins serving connections."""
glob.loop = asyncio.get_running_loop()
if glob.has_internet:
# retrieve a client session to use for http connections.
glob.http = aiohttp.ClientSession(json_serialize=orjson.dumps) # type: ignore
else:
glob.http = None
# retrieve a pool of connections to use for mysql interaction.
glob.db = cmyui.AsyncSQLPool()
await glob.db.connect(glob.config.mysql)
# run the sql & submodule updater (uses http & db).
# TODO: updating cmyui_pkg should run before it's import
updater = Updater(glob.version)
await updater.run()
await updater.log_startup()
# open a connection to our local geoloc database,
# if the database file is present.
if GEOLOC_DB_FILE.exists():
glob.geoloc_db = geoip2.database.Reader(GEOLOC_DB_FILE)
else:
glob.geoloc_db = None
# support for https://datadoghq.com
if all(glob.config.datadog.values()):
datadog.initialize(**glob.config.datadog)
glob.datadog = datadog.ThreadStats()
glob.datadog.start(flush_in_thread=True,
flush_interval=15)
# wipe any previous stats from the page.
glob.datadog.gauge('gulag.online_players', 0)
else:
glob.datadog = None
new_coros = []
# cache many global collections/objects from sql,
# such as channels, mappools, clans, bot, etc.
async with glob.db.pool.acquire() as conn:
async with conn.cursor(aiomysql.DictCursor) as db_cursor:
await setup_collections(db_cursor)
# create a task for each donor expiring in 30d.
new_coros.extend(await bg_loops.donor_expiry(db_cursor))
# setup a loop to kick inactive ghosted players.
new_coros.append(bg_loops.disconnect_ghosts())
'''
# if the surveillance webhook has a value, run
# automatic (still very primitive) detections on
# replays deemed by the server's configurable values.
if glob.config.webhooks['surveillance']:
new_coros.append(bg_loops.replay_detections())
'''
# reroll the bot's random status every `interval` sec.
new_coros.append(bg_loops.reroll_bot_status(interval=300))
for coro in new_coros:
glob.app.add_pending_task(coro)
async def after_serving() -> None:
"""Called after the server stops serving connections."""
if hasattr(glob, 'http') and glob.http is not None:
await glob.http.close()
if hasattr(glob, 'db') and glob.db.pool is not None:
await glob.db.close()
if hasattr(glob, 'geoloc_db') and glob.geoloc_db is not None:
glob.geoloc_db.close()
if hasattr(glob, 'datadog') and glob.datadog is not None:
glob.datadog.stop()
glob.datadog.flush()
def ensure_supported_platform() -> int:
"""Ensure we're running on an appropriate platform for gulag."""
if sys.platform != 'linux':
log('gulag currently only supports linux', Ansi.LRED)
if sys.platform == 'win32':
log("you could also try wsl(2), i'd recommend ubuntu 18.04 "
"(i use it to test gulag)", Ansi.LBLUE)
return 1
if sys.version_info < (3, 9):
log('gulag uses many modern python features, '
'and the minimum python version is 3.9.', Ansi.LRED)
return 1
return 0
def ensure_local_services_are_running() -> int:
"""Ensure all required services (mysql) are running."""
# NOTE: if you have any problems with this, please contact me
# @cmyui#0425/[email protected]. i'm interested in knowing
# how people are using the software so that i can keep it
# in mind while developing new features & refactoring.
if glob.config.mysql['host'] in ('localhost', '127.0.0.1', None):
# sql server running locally, make sure it's running
for service in ('mysqld', 'mariadb'):
if os.path.exists(f'/var/run/{service}/{service}.pid'):
break
else:
# not found, try pgrep
pgrep_exit_code = os.system('pgrep mysqld')
if pgrep_exit_code != 0:
log('Please start your mysqld server.', Ansi.LRED)
return 1
return 0
def ensure_directory_structure() -> int:
"""Ensure the .data directory and git submodules are ready."""
# create /.data and its subdirectories.
DATA_PATH.mkdir(exist_ok=True)
for sub_dir in ('avatars', 'logs', 'osu', 'osr', 'ss'):
subdir = DATA_PATH / sub_dir
subdir.mkdir(exist_ok=True)
if not ACHIEVEMENTS_ASSETS_PATH.exists():
if not glob.has_internet:
# TODO: make it safe to run without achievements
return 1
ACHIEVEMENTS_ASSETS_PATH.mkdir(parents=True)
utils.misc.download_achievement_images(ACHIEVEMENTS_ASSETS_PATH)
return 0
def ensure_dependencies_and_requirements() -> int:
"""Make sure all of gulag's dependencies are ready."""
if not OPPAI_PATH.exists():
log('No oppai-ng submodule found, attempting to clone.', Ansi.LMAGENTA)
p = subprocess.Popen(args=['git', 'submodule', 'init'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
if exit_code := p.wait():
log('Failed to initialize git submodules.', Ansi.LRED)
return exit_code
p = subprocess.Popen(args=['git', 'submodule', 'update'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
if exit_code := p.wait():
log('Failed to update git submodules.', Ansi.LRED)
return exit_code
if not (OPPAI_PATH / 'liboppai.so').exists():
log('No oppai-ng library found, attempting to build.', Ansi.LMAGENTA)
p = subprocess.Popen(args=['./libbuild'], cwd='oppai-ng',
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
if exit_code := p.wait():
log('Failed to build oppai-ng automatically.', Ansi.LRED)
return exit_code
return 0
def __install_debugging_hooks() -> None:
"""Change internals to help with debugging & active development."""
if DEBUG_HOOKS_PATH.exists():
from _testing import runtime # type: ignore
runtime.setup()
def display_startup_dialog() -> None:
"""Print any general information or warnings to the console."""
if glob.config.advanced:
log('running in advanced mode', Ansi.LRED)
# running on root grants the software potentally dangerous and
# unnecessary power over the operating system and is not advised.
if os.geteuid() == 0:
log('It is not recommended to run gulag as root, '
'especially in production..', Ansi.LYELLOW)
if glob.config.advanced:
log('The risk is even greater with features '
'such as config.advanced enabled.', Ansi.LRED)
if not glob.has_internet:
log('Running in offline mode, some features '
'will not be available.', Ansi.LRED)
def main() -> int:
for safety_check in (
ensure_supported_platform, # linux only at the moment
ensure_local_services_are_running, # mysql (if local)
ensure_directory_structure, # .data/ & achievements/ dir structure
ensure_dependencies_and_requirements # submodules & oppai-ng built
):
if (exit_code := safety_check()) != 0:
return exit_code
'''Server is safe to start up'''
glob.boot_time = datetime.now()
# install any debugging hooks from
# _testing/runtime.py, if present
__install_debugging_hooks()
# check our internet connection status
glob.has_internet = utils.misc.check_connection(timeout=1.5)
# show info & any contextual warnings.
display_startup_dialog()
# create the server object; this will handle http connections
# for us via the transport (tcp/ip) socket interface, and will
# handle housekeeping (setup, cleanup) for us automatically.
glob.app = cmyui.Server(
name=f'gulag v{glob.version}',
gzip=4, debug=glob.config.debug
)
# add the domains and their respective endpoints to our server object
from domains.cho import domain as cho_domain # c[e4-6]?.ppy.sh
from domains.osu import domain as osu_domain # osu.ppy.sh
from domains.ava import domain as ava_domain # a.ppy.sh
from domains.map import domain as map_domain # b.ppy.sh
glob.app.add_domains({cho_domain, osu_domain,
ava_domain, map_domain})
# attach housekeeping tasks (setup, cleanup)
glob.app.before_serving = before_serving
glob.app.after_serving = after_serving
# run the server (this is a blocking call)
glob.app.run(addr=glob.config.server_addr,
handle_restart=True) # (using SIGUSR1)
return 0
if __name__ == '__main__':
raise SystemExit(main())
elif __name__ == 'main':
# check specifically for asgi servers since many related projects
# (such as gulag-web) use them, so people may assume we do as well.
if utils.misc.running_via_asgi_webserver(sys.argv[0]):
raise RuntimeError(
"gulag does not use an ASGI framework, and uses it's own custom "
"web framework implementation; please run it directly (./main.py)."
)
else:
raise RuntimeError('gulag should only be run directly (./main.py).')
|
py | 1a341795cd102aaa7e8d9f5ad57c226d736e901f | from nightwatch.model import API
from typing import Any, List, Tuple
def source(api: API, errors: List[Any]) -> Tuple[str, str]:
guestlib_srcs = api.guestlib_srcs.split()
guestlib_srcs = ["${CMAKE_SOURCE_DIR}/guestlib/" + src for src in guestlib_srcs]
worker_srcs = api.worker_srcs.split()
worker_srcs = ["${CMAKE_SOURCE_DIR}/worker/" + src for src in worker_srcs]
common_utility_srcs = api.common_utility_srcs.split()
common_utility_srcs = ["${CMAKE_SOURCE_DIR}/common/" + src for src in common_utility_srcs]
so_link_code = [
f"""install(CODE "
EXECUTE_PROCESS(COMMAND ln -sf libguestlib.so {api_so_name}
WORKING_DIRECTORY ${{CMAKE_INSTALL_PREFIX}}/{api.identifier.lower()}/${{CMAKE_INSTALL_LIBDIR}})
")
"""
for api_so_name in api.soname.split(" ")
]
cmakelists = f"""
cmake_minimum_required(VERSION 3.13)
project({api.identifier.lower()}_nw C CXX)
set(SUBPROJECT_PREFIX "{api.identifier.lower()}")
list(APPEND CMAKE_MODULE_PATH "${{CMAKE_CURRENT_BINARY_DIR}}/../..")
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF) #...without compiler extensions like gnu++11
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
set(c_flags {api.cflags})
set(cxx_flags {api.cxxflags})
add_compile_options("$<$<COMPILE_LANGUAGE:C>:${{c_flags}}>")
add_compile_options("$<$<COMPILE_LANGUAGE:CXX>:${{cxx_flags}}>")
add_compile_options(-Wall -Wextra -pedantic -D_FILE_OFFSET_BITS=64 -fPIC -rdynamic -fpermissive -Wno-unused-parameter)
string(TOUPPER "${{CMAKE_BUILD_TYPE}}" cmake_build_type_upper)
if (cmake_build_type_upper MATCHES RELEASE)
add_compile_options(-DNDEBUG -flto)
endif()
###### Required dependencies ######
###### Compile ######
add_definitions(-D_GNU_SOURCE)
add_executable(${{SUBPROJECT_PREFIX}}_worker
${{CMAKE_SOURCE_DIR}}/worker/worker.cpp
${{CMAKE_SOURCE_DIR}}/worker/cmd_channel_socket_tcp.cpp
${{CMAKE_SOURCE_DIR}}/worker/provision_gpu.cpp
{' '.join(worker_srcs)}
{' '.join(common_utility_srcs)}
{api.c_worker_spelling}
${{CMAKE_SOURCE_DIR}}/common/cmd_channel.cpp
${{CMAKE_SOURCE_DIR}}/common/logging.cpp
${{CMAKE_SOURCE_DIR}}/common/murmur3.cpp
${{CMAKE_SOURCE_DIR}}/common/cmd_handler.cpp
${{CMAKE_SOURCE_DIR}}/common/endpoint_lib.cpp
${{CMAKE_SOURCE_DIR}}/common/socket.cpp
${{CMAKE_SOURCE_DIR}}/common/cmd_channel_record.cpp
${{CMAKE_SOURCE_DIR}}/common/cmd_channel_hv.cpp
${{CMAKE_SOURCE_DIR}}/common/shadow_thread_pool.cpp
${{CMAKE_SOURCE_DIR}}/common/cmd_channel_socket_utilities.cpp
${{CMAKE_SOURCE_DIR}}/common/cmd_channel_socket_tcp.cpp
)
target_link_libraries(${{SUBPROJECT_PREFIX}}_worker
${{GLIB2_LIBRARIES}}
${{Boost_LIBRARIES}}
Threads::Threads
fmt::fmt
GSL
{api.libs}
)
set_target_properties(${{SUBPROJECT_PREFIX}}_worker PROPERTIES OUTPUT_NAME "worker")
add_library(${{SUBPROJECT_PREFIX}}_guestlib SHARED
${{CMAKE_SOURCE_DIR}}/guestlib/init.cpp
${{CMAKE_SOURCE_DIR}}/guestlib/guest_config.cpp
${{CMAKE_SOURCE_DIR}}/guestlib/migration.cpp
${{CMAKE_SOURCE_DIR}}/guestlib/cmd_channel_socket_tcp.cpp
{' '.join(guestlib_srcs)}
{' '.join(common_utility_srcs)}
{api.c_library_spelling}
${{CMAKE_SOURCE_DIR}}/common/cmd_channel.cpp
${{CMAKE_SOURCE_DIR}}/common/logging.cpp
${{CMAKE_SOURCE_DIR}}/common/murmur3.cpp
${{CMAKE_SOURCE_DIR}}/common/cmd_handler.cpp
${{CMAKE_SOURCE_DIR}}/common/endpoint_lib.cpp
${{CMAKE_SOURCE_DIR}}/common/socket.cpp
${{CMAKE_SOURCE_DIR}}/common/cmd_channel_record.cpp
${{CMAKE_SOURCE_DIR}}/common/cmd_channel_hv.cpp
${{CMAKE_SOURCE_DIR}}/common/shadow_thread_pool.cpp
${{CMAKE_SOURCE_DIR}}/common/cmd_channel_socket_utilities.cpp
${{CMAKE_SOURCE_DIR}}/common/cmd_channel_socket_tcp.cpp
${{CMAKE_SOURCE_DIR}}/proto/manager_service.proto.cpp
)
target_link_libraries(${{SUBPROJECT_PREFIX}}_guestlib
${{GLIB2_LIBRARIES}}
${{Boost_LIBRARIES}}
Threads::Threads
fmt::fmt
GSL
${{Config++}}
)
target_compile_options(${{SUBPROJECT_PREFIX}}_guestlib
PUBLIC -fvisibility=hidden
)
set_target_properties(${{SUBPROJECT_PREFIX}}_guestlib PROPERTIES OUTPUT_NAME "guestlib")
include(GNUInstallDirs)
install(TARGETS ${{SUBPROJECT_PREFIX}}_worker
RUNTIME DESTINATION {api.identifier.lower()}/${{CMAKE_INSTALL_BINDIR}})
install(TARGETS ${{SUBPROJECT_PREFIX}}_guestlib
LIBRARY DESTINATION {api.identifier.lower()}/${{CMAKE_INSTALL_LIBDIR}})
if(CMAKE_HOST_UNIX)
{''.join(so_link_code).strip()}
endif(CMAKE_HOST_UNIX)
""".strip()
return "CMakeLists.txt", cmakelists
|
py | 1a3417985542fdaa86a261dc94b0bf3b7b5231a6 | # send_notification_email.py
""" This routine sends and email alerting the user of missing fields. """
import os
import sys
where_i_am = os.path.dirname(os.path.realpath(__file__))
sys.path.append(where_i_am)
sys.path.append(where_i_am + "/dependencies")
import boto3 # noqa: E402
from botocore.errorfactory import ClientError # noqa: E402
from sentry_sdk import capture_exception # noqa: E402
def create_and_send_email_notification(missing_fields, notification_email_address, sender):
""" Create and then send an email alerting someone about missing fields """
recipients = notification_email_address.split(",")
subject = "Metadata is missing required fields"
body_html = _create_email_html_body(missing_fields)
body_text = ''
_send_email(sender, recipients, subject, body_html, body_text)
def _create_email_html_body(missing_fields):
""" Create the body of the email in html format """
body_html = """<html>
<head></head>
<body>
<h1>Missing required fields when processing metadata</h1>
<p> """ + missing_fields + """</p>
</body>
</html>"""
body_html = body_html.replace('\n', '<br/>')
return body_html
def _send_email(sender, recipients, subject, body_html, body_text):
""" Actually send the email. """
AWS_REGION = "us-east-1"
CHARSET = "UTF-8"
client = boto3.client('ses', region_name=AWS_REGION)
email_message_json = {
'Body': {},
'Subject': {
'Charset': CHARSET,
'Data': subject,
},
}
if body_html > '':
email_message_json['Body']['Html'] = {'Charset': CHARSET, 'Data': body_html}
elif body_text > '':
email_message_json['Body']['Text'] = {'Charset': CHARSET, 'Data': body_text}
try:
response = client.send_email(
Destination={'ToAddresses': recipients},
Message=email_message_json,
Source=sender
)
except ClientError as e:
capture_exception(e.response['Error']['Message'])
else:
print("Email sent! Message ID:"),
print(response['MessageId'])
return
|
py | 1a3417fa729d30ed9ce7623d59db1bc327ce4e13 | import vim
import re
from os.path import abspath, basename, dirname, relpath
from vim_pad.timestamps import timestamp
from vim_pad.utils import get_save_dir
class PadInfo(object):
__slots__ = "id", "summary", "body", "isEmpty", "folder"
def __init__(self, source):
"""
source can be:
* a vim buffer
* a file object
* a list of strings, one per line
"""
nchars = int(vim.eval("g:pad#read_nchars_from_files"))
self.summary = ""
self.body = ""
self.isEmpty = True
self.folder = ""
self.id = timestamp()
if source is vim.current.buffer:
source = source[:10]
elif source.__class__ == file:
save_dir = get_save_dir()
if abspath(source.name).startswith(save_dir):
pos = len(get_save_dir()), len(basename(source.name))
self.folder = abspath(source.name)[pos[0]:-pos[1]]
else:
self.folder = dirname(relpath(source.name, vim.eval('getcwd()')))
if vim.eval("g:pad#title_first_line") == '1':
source = source.readline().split("\n")
else:
source = source.read(nchars).split('\n')
data = [line.strip() for line in source if line != ""]
if data != []:
# we discard modelines
if re.match("^.* vim: set .*:.*$", data[0]):
data = data[1:]
self.summary = data[0].strip()
# vim-orgmode adds tags after whitespace
org_tags_data = re.search("\s+(?P<tags>:.*$)", self.summary)
if org_tags_data:
self.summary = re.sub("\s+:.*$", "", self.summary)
if self.summary[0] in ("%", "#"): # pandoc and markdown titles
self.summary = str(self.summary[1:]).strip()
self.body = u'\u21b2'.encode('utf-8').join(data[1:]).strip()
# if we have orgmode tag data, add it to the body
if org_tags_data:
self.body = ' '.join(\
[" ".join(\
map(lambda a: "@" + a, \
filter(lambda a: a != "", \
org_tags_data.group("tags").split(":")))), \
self.body])
# remove extra spaces in bodies
self.body = re.sub("\s{2,}", "", self.body)
if self.summary != "":
self.isEmpty = False
self.id = self.summary.lower().replace(" ", "_")
# remove ilegal characters from names (using rules for windows
# systems to err on the side of precaution)
self.id = re.sub("[*:<>/\|^]", "", self.id)
if self.id.startswith("."):
self.id = re.sub("^\.*", "", self.id)
|
py | 1a34186c67ee7e12efa0c403c74263ab209a9145 | import boto3
import json
import os
class ApiClient():
def __init__(self):
apiId = os.environ['WEBSOCKET_API_ID']
region = os.environ['AWS_REGION']
stage = os.environ['STAGE']
url = f'https://{apiId}.execute-api.{region}.amazonaws.com/{stage}'
self.client = boto3.client('apigatewaymanagementapi', endpoint_url=url)
def send(self, connectionId, message):
dumped = json.dumps(message)
binMessage = bytes(dumped, 'utf-8')
self.client.post_to_connection(
Data = binMessage,
ConnectionId = connectionId)
def deregister(self, connectionId):
self.client.delete_connection(ConnectionId = connectionId)
|
py | 1a3419d9a369b6178fead14fb5ba597f06af7429 | '''
n1 = float(input('Comprimento do cateto oposto: '))
n2 = float(input('Comprimento do cateto adjacente: '))
hi = (n1 ** 2 + n2 ** 2) ** (1/2)
print('A hipotenusa vai medir {:.2f}'.format(hi))
'''
'''
from math import hypot
n1 = float(input('Comprimento do cateto oposto: '))
n2 = float(input('Comprimento do cateto adjacente: '))
hi = hypot(n1, n2)
print('A hipotenusa vai medir {:.2f}'.format(hi))
'''
'''
import math
n1 = float(input('Comprimento do cateto oposto: '))
n2 = float(input('Comprimento do cateto adjacente: '))
hi = math.hypot(n1, n2)
print('A hipotenusa vai medir {:.2f}'.format(hi))
'''
'''import math
n1 = float(input('Comprimento do cateto oposto: '))
n2 = float(input('Comprimento do cateto adjacente: '))
print('A hipotenusa vai medir {:.2f}'.format(math.hypot(n1, n2)))'''
from math import hypot
n1 = float(input('Comprimento do cateto oposto: '))
n2 = float(input('Comprimento do cateto adjacente: '))
print('A hipotenusa vai medir {:.2f}'.format(hypot(n1,n2))) |
py | 1a341a235597070a2d201da1cbcdc6ba1d1ca025 | from .._tier0 import execute
from .._tier0 import create
from .._tier0 import create_none
from .._tier0 import plugin_function
from .._tier0 import Image
@plugin_function(output_creator=create_none)
def crop(input : Image, output : Image = None, start_x : int = 0, start_y : int = 0, start_z : int = 0, width : int = 1, height : int = 1, depth : int = 1):
"""Crops a given sub-stack out of a given image stack.
Note: If the destination image pre-exists already, it will be overwritten and
keep it's dimensions.
Parameters
----------
source : Image
destination : Image
start_x : Number
start_y : Number
start_z : Number
width : Number
height : Number
depth : Number
Returns
-------
destination
Examples
--------
>>> import pyclesperanto_prototype as cle
>>> cle.crop(source, destination, start_x, start_y, start_z, width, height, depth)
References
----------
.. [1] https://clij.github.io/clij2-docs/reference_crop3D
"""
if output is None:
if len(input.shape) == 2:
output = create([height, width])
else:
output = create([depth, height, width])
parameters = {
"dst": output,
"src": input,
"start_x": int(start_x),
"start_y": int(start_y),
}
if len(output.shape) == 3:
# 3D image
parameters.update({"start_z": int(start_z)})
execute(__file__, '../clij-opencl-kernels/kernels/crop_' + str(len(output.shape)) + 'd_x.cl', 'crop_' + str(len(output.shape)) + 'd', output.shape, parameters)
return output
|
py | 1a341a69d976a91a05932e7939e10b9e559e6f88 | #!/usr/bin/env python
#pylint: skip-file
# This source code is licensed under the Apache license found in the
# LICENSE file in the root directory of this project.
class ApplicationListResult(object):
def __init__(self):
"""
Attributes:
swaggerTypes (dict): The key is attribute name and the value is attribute type.
attributeMap (dict): The key is attribute name and the value is json key in definition.
"""
self.swaggerTypes = {
'version': 'str',
'response': 'list[ApplicationDTO]'
}
self.attributeMap = {
'version': 'version',
'response': 'response'
}
self.version = None # str
self.response = None # list[ApplicationDTO]
|
py | 1a341a7c939077967eb617baef48e108de9ea0d5 | import gmpy2
from Crypto.Cipher import AES
from Crypto.Util.number import long_to_bytes
g = 10
p = 0x13862420eba6fc60ee4d0d85ca7ab02705bb17da22a8ecb43f20208f08cf9b6b3d34cd6a8f14650a7c1
pubA = 0xe58b9d1d41dfc8c82984e8bd6f06148c74d651a0e1fc51ddbed14a9c4918ad2826201a5ca70e3c89cb
pubB = 0xb95280ad174b58689cafba85ad968a7448d7074dafbf5fb319495380e8d444275ad2f952e7cfffc84b
enc_key = 0x639d0641f794654b0e7f30b17bca3cafb4fa8b87d514485816eabffdd8c29f5b91ccea9a4ba4e2d8f9
cipher = "\x8d\xaa\x19\x2c\x19\xdc\x40\x37\xb5\x8d\xef\x29\x35\x62\x37\x04\x85\x67\x79\xce\xfe\x83\xff\x90\x42\x67\x7b\x9b\x62\x66\x1c\x59"
privA = 333623895364814584400934325632016654841259729259576270868893933041709102871414502757155867187502100
privB = 68366528803802774494102028092185614536187281887082630883946649435775005432542
'''
F = GF(p)
g = F(10)
b = F(pubA)
N = p-1
qi = [p^N.valuation(p) for p in prime_factors(N)]
l = len(qi)
Nqi = [ N/q for q in qi ]
ai = [g^r for r in Nqi ]
bi = [b^r for r in Nqi ]
xi = [ discrete_log(bi[i],ai[i]) for i in range(l) ]
x = CRT(xi,qi)
'''
assert((gmpy2.powmod(g, privA, p) == pubA) and (gmpy2.powmod(g, privB, p) == pubB))
shared_secret = gmpy2.powmod(pubA, privB, p)
print shared_secret
k = gmpy2.invert(shared_secret, p)
k = long_to_bytes((k * enc_key) % p)
aes = AES.new(k, AES.MODE_ECB)
print aes.decrypt(cipher)
|
py | 1a341b6b01d330c7084b9594913f2b97b06fb00c | import jsonpickle
from model.group import Group
import random, string
import os.path
import getopt
import sys
#n - колво генеруемых данных, опция f задает файл в который это все должно помещаться
try:
opts, args = getopt.getopt(sys.argv[1:], "n:f", ["numbers of groups", "file"])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 2
f = "data/groups.json"
for o, a in opts:
if o =="-n":
n = int(a)
elif o == "-f":
f = a
#+ string.punctuation + " "*10
def random_string(maxlen):
symbols = string.ascii_letters + string.digits
return "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
test_data = [Group(name="", header="", footer="")] + [
Group(name=random_string(10), header=random_string(10), footer=random_string(10))
for i in range(n)
]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)
with open(file, "w") as out:
jsonpickle.set_encoder_options("json", indent=2)
out.write(jsonpickle.encode(test_data))
|
py | 1a341b8a0cb56c758f495c42823dab48b38ba040 | last_names = [
"Smith",
"Johnson",
"Williams",
"Brown",
"Jones",
"Miller",
"Davis",
"Garcia",
"Rodriguez",
"Wilson",
"Martinez",
"Anderson",
"Taylor",
"Thomas",
"Hernandez",
"Moore",
"Martin",
"Jackson",
"Thompson",
"White",
"Lopez",
"Lee",
"Gonzalez",
"Harris",
"Clark",
"Lewis",
"Robinson",
"Walker",
"Perez",
"Hall",
"Young",
"Allen",
"Sanchez",
"Wright",
"King",
"Scott",
"Green",
"Baker",
"Adams",
"Nelson",
"Hill",
"Ramirez",
"Campbell",
"Mitchell",
"Roberts",
"Carter",
"Phillips",
"Evans",
"Turner",
"Torres",
"Parker",
"Collins",
"Edwards",
"Stewart",
"Flores",
"Morris",
"Nguyen",
"Murphy",
"Rivera",
"Cook",
"Rogers",
"Morgan",
"Peterson",
"Cooper",
"Reed",
"Bailey",
"Bell",
"Gomez",
"Kelly",
"Howard",
"Ward",
"Cox",
"Diaz",
"Richardson",
"Wood",
"Watson",
"Brooks",
"Bennett",
"Gray",
"James",
"Reyes",
"Cruz",
"Hughes",
"Price",
"Myers",
"Long",
"Foster",
"Sanders",
"Ross",
"Morales",
"Powell",
"Sullivan",
"Russell",
"Ortiz",
"Jenkins",
"Gutierrez",
"Perry",
"Butler",
"Barnes",
"Fisher",
"Henderson",
"Coleman",
"Simmons",
"Patterson",
"Jordan",
"Reynolds",
"Hamilton",
"Graham",
"Kim",
"Gonzales",
"Alexander",
"Ramos",
"Wallace",
"Griffin",
"West",
"Cole",
"Hayes",
"Chavez",
"Gibson",
"Bryant",
"Ellis",
"Stevens",
"Murray",
"Ford",
"Marshall",
"Owens",
"Mcdonald",
"Harrison",
"Ruiz",
"Kennedy",
"Wells",
"Alvarez",
"Woods",
"Mendoza",
"Castillo",
"Olson",
"Webb",
"Washington",
"Tucker",
"Freeman",
"Burns",
"Henry",
"Vasquez",
"Snyder",
"Simpson",
"Crawford",
"Jimenez",
"Porter",
"Mason",
"Shaw",
"Gordon",
"Wagner",
"Hunter",
"Romero",
"Hicks",
"Dixon",
"Hunt",
"Palmer",
"Robertson",
"Black",
"Holmes",
"Stone",
"Meyer",
"Boyd",
"Mills",
"Warren",
"Fox",
"Rose",
"Rice",
"Moreno",
"Schmidt",
"Patel",
"Ferguson",
"Nichols",
"Herrera",
"Medina",
"Ryan",
"Fernandez",
"Weaver",
"Daniels",
"Stephens",
"Gardner",
"Payne",
"Kelley",
"Dunn",
"Pierce",
"Arnold",
"Tran",
"Spencer",
"Peters",
"Hawkins",
"Grant",
"Hansen",
"Castro",
"Hoffman",
"Hart",
"Elliott",
"Cunningham",
"Knight",
"Bradley",
"Carroll",
"Hudson",
"Duncan",
"Armstrong",
"Berry",
"Andrews",
"Johnston",
"Ray",
"Lane",
"Riley",
"Carpenter",
"Perkins",
"Aguilar",
"Silva",
"Richards",
"Willis",
"Matthews",
"Chapman",
"Lawrence",
"Garza",
"Vargas",
"Watkins",
"Wheeler",
"Larson",
"Carlson",
"Harper",
"George",
"Greene",
"Burke",
"Guzman",
"Morrison",
"Munoz",
"Jacobs",
"Obrien",
"Lawson",
"Franklin",
"Lynch",
"Bishop",
"Carr",
"Salazar",
"Austin",
"Mendez",
"Gilbert",
"Jensen",
"Williamson",
"Montgomery",
"Harvey",
"Oliver",
"Howell",
"Dean",
"Hanson",
"Weber",
"Garrett",
"Sims",
"Burton",
"Fuller",
"Soto",
"Mccoy",
"Welch",
"Chen",
"Schultz",
"Walters",
"Reid",
"Fields",
"Walsh",
"Little",
"Fowler",
"Bowman",
"Davidson",
"May",
"Day",
"Schneider",
"Newman",
"Brewer",
"Lucas",
"Holland",
"Wong",
"Banks",
"Santos",
"Curtis",
"Pearson",
"Delgado",
"Valdez",
"Pena",
"Rios",
"Douglas",
"Sandoval",
"Barrett",
"Hopkins",
"Keller",
"Guerrero",
"Stanley",
"Bates",
"Alvarado",
"Beck",
"Ortega",
"Wade",
"Estrada",
"Contreras",
"Barnett",
"Caldwell",
"Santiago",
"Lambert",
"Powers",
"Chambers",
"Nunez",
"Craig",
"Leonard",
"Lowe",
"Rhodes",
"Byrd",
"Gregory",
"Shelton",
"Frazier",
"Becker",
"Maldonado",
"Fleming",
"Vega",
"Sutton",
"Cohen",
"Jennings",
"Parks",
"Mcdaniel",
"Watts",
"Barker",
"Norris",
"Vaughn",
"Vazquez",
"Holt",
"Schwartz",
"Steele",
"Benson",
"Neal",
"Dominguez",
"Horton",
"Terry",
"Wolfe",
"Hale",
"Lyons",
"Graves",
"Haynes",
"Miles",
"Park",
"Warner",
"Padilla",
"Bush",
"Thornton",
"Mccarthy",
"Mann",
"Zimmerman",
"Erickson",
"Fletcher",
"Mckinney",
"Page",
"Dawson",
"Joseph",
"Marquez",
"Reeves",
"Klein",
"Espinoza",
"Baldwin",
"Moran",
"Love",
"Robbins",
"Higgins",
"Ball",
"Cortez",
"Le",
"Griffith",
"Bowen",
"Sharp",
"Cummings",
"Ramsey",
"Hardy",
"Swanson",
"Barber",
"Acosta",
"Luna",
"Chandler",
"Blair",
"Daniel",
"Cross",
"Simon",
"Dennis",
"Oconnor",
"Quinn",
"Gross",
"Navarro",
"Moss",
"Fitzgerald",
"Doyle",
"Mclaughlin",
"Rojas",
"Rodgers",
"Stevenson",
"Singh",
"Yang",
"Figueroa",
"Harmon",
"Newton",
"Paul",
"Manning",
"Garner",
"Mcgee",
"Reese",
"Francis",
"Burgess",
"Adkins",
"Goodman",
"Curry",
"Brady",
"Christensen",
"Potter",
"Walton",
"Goodwin",
"Mullins",
"Molina",
"Webster",
"Fischer",
"Campos",
"Avila",
"Sherman",
"Todd",
"Chang",
"Blake",
"Malone",
"Wolf",
"Hodges",
"Juarez",
"Gill",
"Farmer",
"Hines",
"Gallagher",
"Duran",
"Hubbard",
"Cannon",
"Miranda",
"Wang",
"Saunders",
"Tate",
"Mack",
"Hammond",
"Carrillo",
"Townsend",
"Wise",
"Ingram",
"Barton",
"Mejia",
"Ayala",
"Schroeder",
"Hampton",
"Rowe",
"Parsons",
"Frank",
"Waters",
"Strickland",
"Osborne",
"Maxwell",
"Chan",
"Deleon",
"Norman",
"Harrington",
"Casey",
"Patton",
"Logan",
"Bowers",
"Mueller",
"Glover",
"Floyd",
"Hartman",
"Buchanan",
"Cobb",
"French",
"Kramer",
"Mccormick",
"Clarke",
"Tyler",
"Gibbs",
"Moody",
"Conner",
"Sparks",
"Mcguire",
"Leon",
"Bauer",
"Norton",
"Pope",
"Flynn",
"Hogan",
"Robles",
"Salinas",
"Yates",
"Lindsey",
"Lloyd",
"Marsh",
"Mcbride",
"Owen",
"Solis",
"Pham",
"Lang",
"Pratt",
"Lara",
"Brock",
"Ballard",
"Trujillo",
"Shaffer",
"Drake",
"Roman",
"Aguirre",
"Morton",
"Stokes",
"Lamb",
"Pacheco",
"Patrick",
"Cochran",
"Shepherd",
"Cain",
"Burnett",
"Hess",
"Li",
"Cervantes",
"Olsen",
"Briggs",
"Ochoa",
"Cabrera",
"Velasquez",
"Montoya",
"Roth",
"Meyers",
"Cardenas",
"Fuentes",
"Weiss",
"Hoover",
"Wilkins",
"Nicholson",
"Underwood",
"Short",
"Carson",
"Morrow",
"Colon",
"Holloway",
"Summers",
"Bryan",
"Petersen",
"Mckenzie",
"Serrano",
"Wilcox",
"Carey",
"Clayton",
"Poole",
"Calderon",
"Gallegos",
"Greer",
"Rivas",
"Guerra",
"Decker",
"Collier",
"Wall",
"Whitaker",
"Bass",
"Flowers",
"Davenport",
"Conley",
"Houston",
"Huff",
"Copeland",
"Hood",
"Monroe",
"Massey",
"Roberson",
"Combs",
"Franco",
"Larsen",
"Pittman",
"Randall",
"Skinner",
"Wilkinson",
"Kirby",
"Cameron",
"Bridges",
"Anthony",
"Richard",
"Kirk",
"Bruce",
"Singleton",
"Mathis",
"Bradford",
"Boone",
"Abbott",
"Charles",
"Allison",
"Sweeney",
"Atkinson",
"Horn",
"Jefferson",
"Rosales",
"York",
"Christian",
"Phelps",
"Farrell",
"Castaneda",
"Nash",
"Dickerson",
"Bond",
"Wyatt",
"Foley",
"Chase",
"Gates",
"Vincent",
"Mathews",
"Hodge",
"Garrison",
"Trevino",
"Villarreal",
"Heath",
"Dalton",
"Valencia",
"Callahan",
"Hensley",
"Atkins",
"Huffman",
"Roy",
"Boyer",
"Shields",
"Lin",
"Hancock",
"Grimes",
"Glenn",
"Cline",
"Delacruz",
"Camacho",
"Dillon",
"Parrish",
"Oneill",
"Melton",
"Booth",
"Kane",
"Berg",
"Harrell",
"Pitts",
"Savage",
"Wiggins",
"Brennan",
"Salas",
"Marks",
"Russo",
"Sawyer",
"Baxter",
"Golden",
"Hutchinson",
"Liu",
"Walter",
"Mcdowell",
"Wiley",
"Rich",
"Humphrey",
"Johns",
"Koch",
"Suarez",
"Hobbs",
"Beard",
"Gilmore",
"Ibarra",
"Keith",
"Macias",
"Khan",
"Andrade",
"Ware",
"Stephenson",
"Henson",
"Wilkerson",
"Dyer",
"Mcclure",
"Blackwell",
"Mercado",
"Tanner",
"Eaton",
"Clay",
"Barron",
"Beasley",
"Oneal",
"Preston",
"Small",
"Wu",
"Zamora",
"Macdonald",
"Vance",
"Snow",
"Mcclain",
"Stafford",
"Orozco",
"Barry",
"English",
"Shannon",
"Kline",
"Jacobson",
"Woodard",
"Huang",
"Kemp",
"Mosley",
"Prince",
"Merritt",
"Hurst",
"Villanueva",
"Roach",
"Nolan",
"Lam",
"Yoder",
"Mccullough",
"Lester",
"Santana",
"Valenzuela",
"Winters",
"Barrera",
"Leach",
"Orr",
"Berger",
"Mckee",
"Strong",
"Conway",
"Stein",
"Whitehead",
"Bullock",
"Escobar",
"Knox",
"Meadows",
"Solomon",
"Velez",
"Odonnell",
"Kerr",
"Stout",
"Blankenship",
"Browning",
"Kent",
"Lozano",
"Bartlett",
"Pruitt",
"Buck",
"Barr",
"Gaines",
"Durham",
"Gentry",
"Mcintyre",
"Sloan",
"Melendez",
"Rocha",
"Herman",
"Sexton",
"Moon",
"Hendricks",
"Rangel",
"Stark",
"Lowery",
"Hardin",
"Hull",
"Sellers",
"Ellison",
"Calhoun",
"Gillespie",
"Mora",
"Knapp",
"Mccall",
"Morse",
"Dorsey",
"Weeks",
"Nielsen",
"Livingston",
"Leblanc",
"Mclean",
"Bradshaw",
"Glass",
"Middleton",
"Buckley",
"Schaefer",
"Frost",
"Howe",
"House",
"Mcintosh",
"Ho",
"Pennington",
"Reilly",
"Hebert",
"Mcfarland",
"Hickman",
"Noble",
"Spears",
"Conrad",
"Arias",
"Galvan",
"Velazquez",
"Huynh",
"Frederick",
"Randolph",
"Cantu",
"Fitzpatrick",
"Mahoney",
"Peck",
"Villa",
"Michael",
"Donovan",
"Mcconnell",
"Walls",
"Boyle",
"Mayer",
"Zuniga",
"Giles",
"Pineda",
"Pace",
"Hurley",
"Mays",
"Mcmillan",
"Crosby",
"Ayers",
"Case",
"Bentley",
"Shepard",
"Everett",
"Pugh",
"David",
"Mcmahon",
"Dunlap",
"Bender",
"Hahn",
"Harding",
"Acevedo",
"Raymond",
"Blackburn",
"Duffy",
"Landry",
"Dougherty",
"Bautista",
"Shah",
"Potts",
"Arroyo",
"Valentine",
"Meza",
"Gould",
"Vaughan",
"Fry",
"Rush",
"Avery",
"Herring",
"Dodson",
"Clements",
"Sampson",
"Tapia",
"Bean",
"Lynn",
"Crane",
"Farley",
"Cisneros",
"Benton",
"Ashley",
"Mckay",
"Finley",
"Best",
"Blevins",
"Friedman",
"Moses",
"Sosa",
"Blanchard",
"Huber",
"Frye",
"Krueger",
"Bernard",
"Rosario",
"Rubio",
"Mullen",
"Benjamin",
"Haley",
"Chung",
"Moyer",
"Choi",
"Horne",
"Yu",
"Woodward",
"Ali",
"Nixon",
"Hayden",
"Rivers",
"Estes",
"Mccarty",
"Richmond",
"Stuart",
"Maynard",
"Brandt",
"Oconnell",
"Hanna",
"Sanford",
"Sheppard",
"Church",
"Burch",
"Levy",
"Rasmussen",
"Coffey",
"Ponce",
"Faulkner",
"Donaldson",
"Schmitt",
"Novak",
"Costa",
"Montes",
"Booker",
"Cordova",
"Waller",
"Arellano",
"Maddox",
"Mata",
"Bonilla",
"Stanton",
"Compton",
"Kaufman",
"Dudley",
"Mcpherson",
"Beltran",
"Dickson",
"Mccann",
"Villegas",
"Proctor",
"Hester",
"Cantrell",
"Daugherty",
"Cherry",
"Bray",
"Davila",
"Rowland",
"Levine",
"Madden",
"Spence",
"Good",
"Irwin",
"Werner",
"Krause",
"Petty",
"Whitney",
"Baird",
"Hooper",
"Pollard",
"Zavala",
"Jarvis",
"Holden",
"Haas",
"Hendrix",
"Mcgrath",
"Bird",
"Lucero",
"Terrell",
"Riggs",
"Joyce",
"Mercer",
"Rollins",
"Galloway",
"Duke",
"Odom",
"Andersen",
"Downs",
"Hatfield",
"Benitez",
"Archer",
"Huerta",
"Travis",
"Mcneil",
"Hinton",
"Zhang",
"Hays",
"Mayo",
"Fritz",
"Branch",
"Mooney",
"Ewing",
"Ritter",
"Esparza",
"Frey",
"Braun",
"Gay",
"Riddle",
"Haney",
"Kaiser",
"Holder",
"Chaney",
"Mcknight",
"Gamble",
"Vang",
"Cooley",
"Carney",
"Cowan",
"Forbes",
"Ferrell",
"Davies",
"Barajas",
"Shea",
"Osborn",
"Bright",
"Cuevas",
"Bolton",
"Murillo",
"Lutz",
"Duarte",
"Kidd",
"Key",
"Cooke",
"Goff",
"Dejesus",
"Marin",
"Dotson",
"Bonner",
"Cotton",
"Merrill",
"Lindsay",
"Lancaster",
"Mcgowan",
"Felix",
"Salgado",
"Slater",
"Carver",
"Guthrie",
"Holman",
"Fulton",
"Snider",
"Sears",
"Witt",
"Newell",
"Byers",
"Lehman",
"Gorman",
"Costello",
"Donahue",
"Delaney",
"Albert",
"Workman",
"Rosas",
"Springer",
"Justice",
"Kinney",
"Odell",
"Lake",
"Donnelly",
"Law",
"Dailey",
"Guevara",
"Shoemaker",
"Barlow",
"Marino",
"Winter",
"Craft",
"Katz",
"Pickett",
"Espinosa",
"Daly",
"Maloney",
"Goldstein",
"Crowley",
"Vogel",
"Kuhn",
"Pearce",
"Hartley",
"Cleveland",
"Palacios",
"Mcfadden",
"Britt"
];
|
py | 1a341be8ba0c02ffb145e77168920d3f154c6b5d | import os
import unittest
from smqtk_core.configuration import configuration_test_helper
import numpy
import pytest
from smqtk_classifier import ClassifyDescriptor
from smqtk_classifier.impls.classify_descriptor.classify_index_label_descriptor import ClassifyIndexLabelDescriptor
from tests import TEST_DATA_DIR
class TestClassifyIndexLabelDescriptor(unittest.TestCase):
EXPECTED_LABEL_VEC = [
b'label_1',
b'label_2',
b'negative',
b'label_3',
b'Kitware',
b'label_4',
]
FILEPATH_TEST_LABELS = os.path.join(TEST_DATA_DIR, 'test_labels.txt')
def test_is_usable(self) -> None:
# Should always be available
self.assertTrue(ClassifyIndexLabelDescriptor.is_usable())
def test_impl_findable(self) -> None:
self.assertIn(ClassifyIndexLabelDescriptor,
ClassifyDescriptor.get_impls())
def test_configurable(self) -> None:
c = ClassifyIndexLabelDescriptor(self.FILEPATH_TEST_LABELS)
for inst in configuration_test_helper(c):
assert inst.index_to_label_uri == self.FILEPATH_TEST_LABELS
def test_new(self) -> None:
c = ClassifyIndexLabelDescriptor(self.FILEPATH_TEST_LABELS)
self.assertEqual(c.label_vector, self.EXPECTED_LABEL_VEC)
def test_get_labels(self) -> None:
c = ClassifyIndexLabelDescriptor(self.FILEPATH_TEST_LABELS)
self.assertEqual(c.get_labels(), self.EXPECTED_LABEL_VEC)
def test_configuration(self) -> None:
cfg = ClassifyIndexLabelDescriptor.get_default_config()
self.assertEqual(cfg, {'index_to_label_uri': None})
cfg['index_to_label_uri'] = self.FILEPATH_TEST_LABELS
c = ClassifyIndexLabelDescriptor.from_config(cfg)
self.assertEqual(c.get_config(), cfg)
def test_classify_arrays(self) -> None:
c = ClassifyIndexLabelDescriptor(self.FILEPATH_TEST_LABELS)
c_expected = {
b'label_1': 1,
b'label_2': 2,
b'negative': 3,
b'label_3': 4,
b'Kitware': 5,
b'label_4': 6,
}
a = numpy.array([1, 2, 3, 4, 5, 6])
c_result = list(c._classify_arrays([a]))[0]
self.assertEqual(c_result, c_expected)
def test_classify_arrays_invalid_descriptor_dimensions(self) -> None:
c = ClassifyIndexLabelDescriptor(self.FILEPATH_TEST_LABELS)
# One less
a = numpy.array([1, 2, 3, 4, 5])
with pytest.raises(RuntimeError):
list(c._classify_arrays([a]))
# One more
a = numpy.array([1, 2, 3, 4, 5, 6, 7])
with pytest.raises(RuntimeError):
list(c._classify_arrays([a]))
|
py | 1a341d14a7e9eeeb88c6b5df91fc19a9a80ec967 | from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
from config import *
from app import app, db
import pymysql
pymysql.install_as_MySQLdb()
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
py | 1a341da83e288c401035fd40c14ef782b1bcd6d4 | from abc import ABC, abstractclassmethod
from typing import Any
class UnitOfWork(ABC):
"""
Port (Interface) for transaction management (usually, but not only database
transactions).
"""
@abstractclassmethod
def __enter__(self):
raise NotImplementedError
@abstractclassmethod
def __exit__(self, *args: Any) -> None:
raise NotImplementedError
|
py | 1a341f1dbb469c7e83aeffac343dd4dec249dceb | import json
import discord
import logging
from pantheon import pantheon
from util.decorator import only_owner
logger = logging.getLogger("Verif")
with open("private/rgapikey") as key:
panth = pantheon.Pantheon("euw1", key.read(), True)
#verified = {"discordId":"summonerId"}
NOT_VERIFIED = "Vous n'êtes vérifié.\nPour le devenir, connectez vous sur le "\
+ "client League of Legends, puis paramètre > code de vérification tier.\n"\
+ "Entrez votre ID discord ({}) puis cliquez sur valider.\n"\
+ "Entrez ensuite /verif {{votre_nom_d'invocateur}}"
VERIFIED = "Vous êtes vérifié !\nNom d'invocateur : {name}\nNiveau : {summonerLevel}"
BAD_CODE = "Erreur : Le code que vous avez rentrez rentrer ne corespond pas à votre"\
+ " id discord, veuillez résayer. Si le problème persiste, "\
+ "essayez de redémarrer votre client"
ICON_URL = "http://ddragon.canisback.com/latest/img/profileicon/{}.png"
def load_verif():
with open("data/summoners", 'r') as fd:
return json.loads(fd.read())
def save_verif(dic):
with open("data/summoners", 'w') as fd:
fd.write(json.dumps(dic))
class CmdVerif:
@only_owner
async def cmd_importverif(self, *args, message, client, **_):
guild = client.get_guild(511938608475930644)
count = 0
members = [member for member in guild.members if "Joueur" in [
role.name for role in member.roles]
]
verified = load_verif()
for member in members:
if str(member.id) not in verified.keys():
logger.info("Verifing " + member.display_name)
try:
summ_data = await panth.getSummonerByName(member.display_name)
except:
await message.channel.send("Impossible de vérifier {}".format(member.display_name))
continue
verified[str(member.id)] = summ_data['id']
count += 1
save_verif(verified)
await message.channel.send("{} membres ont été ajouté".format(count))
async def cmd_verif(self, *args, channel, member, message, **_):
verified = load_verif()
if not args:
if str(member.id) in verified.keys():
data = await panth.getSummoner(verified[str(member.id)])
em = discord.Embed(title="Vérification",
description=VERIFIED.format(**data)
)
em.set_author(name=data['name'], icon_url=ICON_URL.format(data['profileIconId']))
await channel.send(embed=em)
else:
await channel.send(NOT_VERIFIED.format(member.id))
else:
try:
summ_data = await panth.getSummonerByName(" ".join(args))
except:
await channel.send("Impossible de trouver l'invocateur")
return False
try:
code = await panth.getThirdPartyCode(summ_data['id'])
if code != str(member.id):
raise Exception('bad_code')
except:
await channel.send(BAD_CODE)
return False
verified[str(member.id)] = summ_data['id']
save_verif(verified)
await self.cmd_verif(*args, message=message, channel=channel, member=member)
|
py | 1a341f29cb94601623ea044e02dd964e5d3d87e5 | import functools
from types import FunctionType
def log_request_and_response(func):
"""
Decorator that logs the responses (and the requests they are responses to) returned by any given 'func'. Useful if
you want to log all the responses returned to / requests made by an API wrapper.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
response = None
try:
response = func(*args, **kwargs)
except ResponseException as e:
response = e.response # Got a non-20x response
raise # May need to change this to preserve the original traceback in Python 3
finally:
# We still want to log the request/response if we receive an IdResponseException (i.e. and error response)
if response is not None: # Note: failure responses are falsey!
# Attempt to find a logger, and log the request and response
logger = getattr(args[0], 'logger', None)
if logger: # Only exists if the calling class has a logger attribute
logger.info(logger.format.format_request(response.request))
logger.info(logger.format.format_response(response))
return response
return wrapper
class MetaApi(type):
"""
Metaclass for API wrapper classes that allows all requests/responses to be pretty-printed and logged (at 'info'
logging level and above).
"""
def __new__(mcs, class_name, bases, class_dict):
new_class_dict = {}
ancestor = MetaApi.get_furthest_ancestor(bases[0])
for attribute_name, attribute in class_dict.items():
# Log the pretty-printed request and response, if this method represents an API call
if not attribute_name.startswith('__') and isinstance(attribute, FunctionType):
if hasattr(ancestor, attribute_name): # I.e. this method overrides a method in the furthest ancestor
attribute = log_request_and_response(attribute)
new_class_dict[attribute_name] = attribute
return type.__new__(mcs, class_name, bases, new_class_dict)
@classmethod
def get_furthest_ancestor(mcs, base):
"""
Gets the first class in an inheritance hierarchy that has this class as its metaclass.
"""
ancestor = base
while getattr(base.__base__, '__metaclass__', None) == mcs:
ancestor = base.__base__
return ancestor
class ResponseException(Exception):
"""
Thrown when an error response is received from an API.
"""
def __init__(self, message, response):
"""
:param message: Message for the exception
:param response HTTP response object
"""
super(Exception, self).__init__(message)
self.status_code = response.status_code
self.error_code = int(response.headers.get('X-Serato-ErrorCode') or 0)
self.response = response
|
py | 1a3420721aa189e569cc7bdd9d370b1a896d5e2e | from rest_framework import serializers
from profiles_api import models
class HelloSerializer(serializers.Serializer):
"""Serializes a name field for esting our APIView"""
name = serializers.CharField(max_length = 10)
class UserProfileSerializer(serializers.ModelSerializer):
"""Serializes a user profile object"""
class Meta:
model = models.UserProfile
fields = ('id', 'email', 'name', 'password')
extra_kwargs = {
'password': {
'write_only': True,
'style': {'input_type': 'password'}
}
}
def create(self, validated_data):
"""Create and return a new user"""
user = models.UserProfile.objects.create_user(
email = validated_data['email'],
name = validated_data['name'],
password = validated_data['password']
)
return user
def update(self, instance, validated_data):
"""Handle updating user account"""
if 'password' in validated_data:
password = validated_data.pop('password')
instance.set_password(password)
return super().update(instance, validated_data)
class ProfileFeedItemSerializer(serializers.ModelSerializer):
"""Serializes profile feed items"""
class Meta:
model = models.ProfileFeedItem
fields = ('id', 'user_profile', 'status_text', 'created_on')
extra_kwargs = {'user_profile': {'read_only': True}}
|
py | 1a34213d677f3e2abc672bde0b1c3f6bb74af196 | import tensorflow as tf
from tensorflow.python import debug
import constants as const
import utils
import os
import models
import exports
from time import time, sleep
from os import path
import random
from tensorflow.python.client import timeline
import inputs
import keras
import keras.backend as K
import keras.layers as KL
import keras.engine as KE
import keras.models as KM
from ipdb import set_trace as st
class SessionOperator(object):
def __init__(self):
if not const.eager:
config = tf.ConfigProto()
if const.DEBUG_PLACEMENT:
config.log_device_placement = True
self.sess = tf.Session(config=config)
K.set_session(self.sess)
self.run = self.sess.run
else:
self.sess = None
def save(self):
utils.utils.nyi()
def load(self):
return 0
def setup(self):
T1 = time()
print('finished graph creation in %f seconds' % (time() - const.T0))
if not const.eager:
self.run(tf.global_variables_initializer())
self.run(tf.local_variables_initializer())
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord, sess=self.sess)
#must come after the queue runners
if const.DEBUG_NAN:
self.sess = debug.LocalCLIDebugWrapperSession(self.sess)
self.sess.add_tensor_filter("has_inf_or_nan", debug.has_inf_or_nan)
self.step = self.load()
#it's in another graph
# if const.generate_views: #not sure why this is necessary....
# #self.run(tf.variables_initializer(inputs.foo_counters))
# self.run(inputs.foo_counters)
if not const.eager:
tf.get_default_graph().finalize()
print('finished graph initialization in %f seconds' % (time() - T1))
def go(self, mode):
self.setup()
if mode == 'train':
self.train()
elif mode == 'test':
#tf.logging.set_verbosity(tf.logging.FATAL)
#prevents end of iterator error print outs
self.test()
def test(self):
utils.utils.nyi()
def train(self):
utils.utils.nyi()
class ModelOperator(SessionOperator):
def __init__(self, model,
savename=None, loadname=None,
vis=None, tb=None, evaluator=None):
self.model = model
self.savename = savename
self.loadname = loadname
self.vis = vis
self.tb = tb
self.evaluator = evaluator
# self.run_metadata = tf.RunMetadata()
super(ModelOperator, self).__init__()
def load(self):
if not self.loadname:
return 0
else:
return self.model.load(self.sess, self.loadname)
def save(self):
if not self.savename:
return
self.model.save(self.sess, self.savename, self.step)
def fd_for_mode(self, mode):
input_collection_to_number = {'train': 0, 'val': 1, 'test': 2}
data_name = self.model.get_data_name(mode)
fd = {self.model.data_selector: input_collection_to_number[data_name]}
if self.model.data_selector is None:
return {}
else:
return fd
def run_steps(self, modes, same_batch = True):
if const.DEBUG_SPEED:
print('====')
print('running', modes)
t0 = time()
stuff = []
for mode in modes:
if const.SKIP_RUN:
print('skipping run')
continue
if const.DEBUG_SPEED:
print('running mode:', mode)
stuff_ = self.model.run(mode, self.sess,self.kl_coeff)
stuff.append(stuff_)
if const.DEBUG_SPEED:
t1 = time()
print('time: %f' % (t1 - t0))
print('====')
return stuff
def train(self):
print('STARTING TRAIN')
self.kl_coeff = 0.0
if const.DEBUG_MEMORY:
#need to write to log, since leak means process would be killed
utils.utils.ensure('memory_log')
f = open('memory_log/%s.log' % const.exp_name, 'w')
for step in range(self.step, const.NB_STEPS):
self.step = step
print('step %d' % step)
if const.DEBUG_MEMORY:
m = utils.utils.memory_consumption()
print('memory consumption is', m)
f.write(str(m)+'\n')
f.flush()
os.fsync(f.fileno())
if not(step % const.savep) and step != 0:
print('saving')
self.save()
if step % 5000 == 0:
self.kl_coeff = step / (float(100 + 1) * float(625))
if self.kl_coeff >= 0.6:
self.kl_coeff = 0.6
print('kl penalty coefficient: ', self.kl_coeff, 'alpha upperbound:', 0.6)
a = time()
self.train_step(step)
print("time taken ",time()-a)
if not(step % const.valp):
self.val_step(step)
def test(self):
step = 0
#while 1:
self.kl_coeff = 0.0
for _ in range(10000):
step += 1
if not self.test_step(step):
break
print('test step %d' % step)
if self.evaluator:
self.evaluator.finish()
def train_step(self, step):
utils.utils.nyi()
def val_step(self, step):
utils.utils.nyi()
def test_step(self, step):
utils.utils.nyi()
class ModalOperator(ModelOperator):
def __init__(self, model, train_modes, val_modes, test_modes,
savename=None, loadname=None,
vis=None, tb=None, evaluator=None):
if not isinstance(train_modes, list):
train_modes = [train_modes]
if not isinstance(val_modes, list):
val_modes = [val_modes]
if not isinstance(test_modes, list):
test_modes = [test_modes]
self.train_modes = train_modes
self.val_modes = val_modes
self.test_modes = test_modes
super(ModalOperator, self).__init__(
model, savename=savename, loadname=loadname, vis=vis, tb=tb, evaluator=evaluator
)
if const.DEBUG_FULL_TRACE:
self.graph_writer = tf.summary.FileWriter(path.join(const.tb_dir, 'graph'),
self.sess.graph)
def train_step(self, step):
train_stuffs = self.run_steps(self.train_modes, same_batch = True)
# st()
if const.SKIP_EXPORT or const.SKIP_TRAIN_EXPORT:
print('skipping exports')
return
if const.DEBUG_SPEED:
print('processing outputs')
for mode, train_stuff in zip(self.train_modes, train_stuffs):
if not train_stuff:
continue
if 'summary' in train_stuff:
self.tb.process(train_stuff['summary'], mode, step)
def val_step(self, step):
val_stuffs = self.run_steps(self.val_modes, same_batch = False)
if const.SKIP_EXPORT or const.SKIP_VAL_EXPORT:
print('skipping exports')
return
if const.DEBUG_SPEED:
print('processing outputs')
for mode, val_stuff in zip(self.val_modes, val_stuffs):
if not val_stuff:
return
if 'vis' in val_stuff and self.vis:
self.vis.process(val_stuff['vis'], mode, step)
if 'summary' in val_stuff and self.tb:
self.tb.process(val_stuff['summary'], mode, step)
def test_step(self, step):
assert len(self.test_modes) == 1, "can't have multiple test modes"
# st()
try:
test_stuff = self.run_steps(self.test_modes)[0]
except tf.errors.OutOfRangeError:
return False
if 'evaluator' in test_stuff and self.evaluator:
self.evaluator.process(test_stuff['evaluator'], None, None)
if 'vis' in test_stuff and self.vis:
self.vis.process(test_stuff['vis'], self.test_modes[0], step)
if 'summary' in test_stuff and self.tb:
self.tb.process(test_stuff['summary'], self.test_modes[0], step)
return True
class GenerateViews(ModalOperator):
def test_step(self, step):
try:
test_stuffs = self.run_steps(self.test_modes)
except tf.errors.OutOfRangeError:
return False
visualizations = [test_stuff['vis']['pred_views'][0] for test_stuff in test_stuffs]
self.vis.process(test_stuffs[0]['vis'], self.test_modes[0], step)
self.vis.process({'gen_views': visualizations}, self.test_modes[0], step)
if False: #plot immediately
#just for visualization purposes
def chunks(l, n):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
import numpy as np
row_size = const.AZIMUTH_GRANULARITY if (const.ELEV_GRANULARITY > 1) else 12
rows = list(chunks(visualizations, row_size))
rows = [np.concatenate(row, axis = 1) for row in rows]
total = np.concatenate(rows, axis = 0)
import matplotlib.pyplot as plt
plt.imshow(total)
plt.show()
return True
|
py | 1a3421cecaf46eccf0a0db27fc271d8f5ec1e511 | """Helpers that help with state related things."""
import json
import logging
from collections import defaultdict
import homeassistant.util.dt as dt_util
from homeassistant.components.media_player import (
ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_SEEK_POSITION,
ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, SERVICE_PLAY_MEDIA,
SERVICE_SELECT_SOURCE, ATTR_INPUT_SOURCE)
from homeassistant.components.notify import (
ATTR_MESSAGE, SERVICE_NOTIFY)
from homeassistant.components.sun import (
STATE_ABOVE_HORIZON, STATE_BELOW_HORIZON)
from homeassistant.components.thermostat import (
ATTR_AWAY_MODE, ATTR_FAN, SERVICE_SET_AWAY_MODE, SERVICE_SET_FAN_MODE,
SERVICE_SET_TEMPERATURE)
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_TEMPERATURE, SERVICE_ALARM_ARM_AWAY,
SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER,
SERVICE_CLOSE, SERVICE_LOCK, SERVICE_MEDIA_PAUSE, SERVICE_MEDIA_PLAY,
SERVICE_MEDIA_SEEK, SERVICE_MOVE_DOWN, SERVICE_MOVE_UP, SERVICE_OPEN,
SERVICE_TURN_OFF, SERVICE_TURN_ON, SERVICE_UNLOCK, SERVICE_VOLUME_MUTE,
SERVICE_VOLUME_SET, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME,
STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, STATE_CLOSED, STATE_LOCKED,
STATE_OFF, STATE_ON, STATE_OPEN, STATE_PAUSED, STATE_PLAYING,
STATE_UNKNOWN, STATE_UNLOCKED)
from homeassistant.core import State
_LOGGER = logging.getLogger(__name__)
GROUP_DOMAIN = 'group'
HASS_DOMAIN = 'homeassistant'
# Update this dict of lists when new services are added to HA.
# Each item is a service with a list of required attributes.
SERVICE_ATTRIBUTES = {
SERVICE_PLAY_MEDIA: [ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_CONTENT_ID],
SERVICE_MEDIA_SEEK: [ATTR_MEDIA_SEEK_POSITION],
SERVICE_VOLUME_MUTE: [ATTR_MEDIA_VOLUME_MUTED],
SERVICE_VOLUME_SET: [ATTR_MEDIA_VOLUME_LEVEL],
SERVICE_NOTIFY: [ATTR_MESSAGE],
SERVICE_SET_AWAY_MODE: [ATTR_AWAY_MODE],
SERVICE_SET_FAN_MODE: [ATTR_FAN],
SERVICE_SET_TEMPERATURE: [ATTR_TEMPERATURE],
SERVICE_SELECT_SOURCE: [ATTR_INPUT_SOURCE],
}
# Update this dict when new services are added to HA.
# Each item is a service with a corresponding state.
SERVICE_TO_STATE = {
SERVICE_TURN_ON: STATE_ON,
SERVICE_TURN_OFF: STATE_OFF,
SERVICE_MEDIA_PLAY: STATE_PLAYING,
SERVICE_MEDIA_PAUSE: STATE_PAUSED,
SERVICE_ALARM_ARM_AWAY: STATE_ALARM_ARMED_AWAY,
SERVICE_ALARM_ARM_HOME: STATE_ALARM_ARMED_HOME,
SERVICE_ALARM_DISARM: STATE_ALARM_DISARMED,
SERVICE_ALARM_TRIGGER: STATE_ALARM_TRIGGERED,
SERVICE_LOCK: STATE_LOCKED,
SERVICE_UNLOCK: STATE_UNLOCKED,
SERVICE_CLOSE: STATE_CLOSED,
SERVICE_OPEN: STATE_OPEN,
SERVICE_MOVE_UP: STATE_OPEN,
SERVICE_MOVE_DOWN: STATE_CLOSED,
}
# pylint: disable=too-few-public-methods, attribute-defined-outside-init
class TrackStates(object):
"""
Record the time when the with-block is entered.
Add all states that have changed since the start time to the return list
when with-block is exited.
"""
def __init__(self, hass):
"""Initialize a TrackStates block."""
self.hass = hass
self.states = []
def __enter__(self):
"""Record time from which to track changes."""
self.now = dt_util.utcnow()
return self.states
def __exit__(self, exc_type, exc_value, traceback):
"""Add changes states to changes list."""
self.states.extend(get_changed_since(self.hass.states.all(), self.now))
def get_changed_since(states, utc_point_in_time):
"""Return list of states that have been changed since utc_point_in_time."""
return [state for state in states
if state.last_updated >= utc_point_in_time]
def reproduce_state(hass, states, blocking=False):
"""Reproduce given state."""
if isinstance(states, State):
states = [states]
to_call = defaultdict(list)
for state in states:
if hass.states.get(state.entity_id) is None:
_LOGGER.warning('reproduce_state: Unable to find entity %s',
state.entity_id)
continue
if state.domain == GROUP_DOMAIN:
service_domain = HASS_DOMAIN
else:
service_domain = state.domain
domain_services = hass.services.services[service_domain]
service = None
for _service in domain_services.keys():
if (_service in SERVICE_ATTRIBUTES and
all(attr in state.attributes
for attr in SERVICE_ATTRIBUTES[_service]) or
_service in SERVICE_TO_STATE and
SERVICE_TO_STATE[_service] == state.state):
service = _service
if (_service in SERVICE_TO_STATE and
SERVICE_TO_STATE[_service] == state.state):
break
if not service:
_LOGGER.warning("reproduce_state: Unable to reproduce state %s",
state)
continue
# We group service calls for entities by service call
# json used to create a hashable version of dict with maybe lists in it
key = (service_domain, service,
json.dumps(dict(state.attributes), sort_keys=True))
to_call[key].append(state.entity_id)
for (service_domain, service, service_data), entity_ids in to_call.items():
data = json.loads(service_data)
data[ATTR_ENTITY_ID] = entity_ids
hass.services.call(service_domain, service, data, blocking)
def state_as_number(state):
"""
Try to coerce our state to a number.
Raises ValueError if this is not possible.
"""
if state.state in (STATE_ON, STATE_LOCKED, STATE_ABOVE_HORIZON,
STATE_OPEN):
return 1
elif state.state in (STATE_OFF, STATE_UNLOCKED, STATE_UNKNOWN,
STATE_BELOW_HORIZON, STATE_CLOSED):
return 0
return float(state.state)
|
py | 1a342322423daf937705546b409a0976b1c6a3cb | from classes.requester import Requester
from classes.specializedMatchers import MD5Matcher, StringMatcher, RegexMatcher, HeaderMatcher
from collections import Counter
class CMSReq(Requester):
def __init__(self, host, cache, results):
super().__init__(host, cache, results)
self.category = "CMS"
self.match_class = None
def prepare_results(self, matches):
data = []
weight_dict = Counter()
# calulate the total weights for urls in the matches
for m in matches:
url = m['response'].url
weight = m['weight'] if 'weight' in m else 1
weight_dict[url] += weight
# apply the weights just calculated
for m in matches:
url = m['response'].url
version = m['output']
weight = weight_dict[url]
m['count'] = weight
data.append( {'url': url, 'count': weight, 'version': version} )
return data
def run(self):
# make requests
requested = self.request_uniq()
# find matches
matcher = self.match_class(requested)
matches = matcher.get_matches()
# add to results
intermediate_results = self.prepare_results(matches)
self.add_results(intermediate_results)
class CMSReqMD5(CMSReq):
def __init__(self, host, cache, results):
super().__init__(host, cache, results)
self.match_class = MD5Matcher
self.use_weights = True
class CMSReqString(CMSReq):
def __init__(self, host, cache, results):
super().__init__(host, cache, results)
self.match_class = StringMatcher
class CMSReqRegex(CMSReq):
def __init__(self, host, cache, results):
super().__init__(host, cache, results)
self.match_class = RegexMatcher
class CMSReqHeader(CMSReq):
def __init__(self, host, cache, results):
super().__init__(host, cache, results)
self.match_class = HeaderMatcher
|
py | 1a3423de816070bb83d147ecdf5dc6c89fc5ef98 | import _plotly_utils.basevalidators
class ShowticklabelsValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self,
plotly_name='showticklabels',
parent_name='choropleth.colorbar',
**kwargs
):
super(ShowticklabelsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'colorbars'),
role=kwargs.pop('role', 'style'),
**kwargs
)
|
py | 1a34253bb5004f3dfeae74eb43813c2a0ed1969a | # -*- coding:utf-8 -*-
# ------------------------
# written by Songjian Chen
# 2019-02
# ------------------------
from scipy.ndimage.filters import gaussian_filter
import scipy
import math
import numpy as np
#this is borrowed from https://github.com/davideverona/deep-crowd-counting_crowdnet
def gaussian_filter_density(gt):
density = np.zeros(gt.shape, dtype=np.float32)
gt_count = np.count_nonzero(gt)
if gt_count == 0:
return density
pts = np.array(list(zip(np.nonzero(gt)[1], np.nonzero(gt)[0])))
leafsize = 2048
# build kdtree
tree = scipy.spatial.KDTree(pts.copy(), leafsize=leafsize)
# query kdtree
distances, locations = tree.query(pts, k=4)
print('generate density...')
num = pts.shape[0] - 1
for i, pt in enumerate(pts):
pt2d = np.zeros(gt.shape, dtype=np.float32)
pt2d[math.floor(pt[1]), math.floor(pt[0])] = 1.
if gt_count > 1:
sigma = (distances[i][1]+distances[i][2]+distances[i][3])*0.1
else:
sigma = np.average(np.array(gt.shape))/2./2. #case: 1 point
density += scipy.ndimage.filters.gaussian_filter(pt2d, sigma, mode='constant')
print('done.')
return density |
py | 1a3426dcdcb47e1ef63c29f35aa76450fc361eb4 | import PySimpleGUI as sg
from elevate import elevate
import write
import sys
import os
elevate()
path = os.getcwd()
chosenPath = path
sg.theme('DarkBrown4')
sg.set_global_icon('ASSINGMENTS.ico')
data = []
heading = ['Index','Name','Price','Quantity','Total','Link']
index = 0
tempStore = {}
layout = [
[sg.Text("Amazon Estimation List \nDeveloped by Adwait Narayan Pradhan", relief=sg.RELIEF_RAISED, size = (99,0), justification='center',)],
[sg.Table(values=data,headings = heading,justification='center',key = '-table-',
auto_size_columns=False,size=(90,15),hide_vertical_scroll=True,col_widths=(5,30,8,8,8,30),
header_background_color='brown',alternating_row_color='Yellow'),],
[sg.Text('Name'),sg.Input(key='name', size=(34,0),do_not_clear=False),
sg.Text('Price'),sg.Input(key='price', size=(8,0),do_not_clear=False),
sg.Text('Quantity'),sg.Input(key='quantity', size=(4,0),do_not_clear=False),
sg.Text('Link'),sg.Input(key='link', size=(34,0),do_not_clear=False),],
[sg.Text("",size = (6,0)),
sg.Button('Choose Location', size = (12,0),enable_events=True),
sg.Text("",size = (4,0)),
sg.Button('Generate Total', size = (12,0),enable_events=True),
sg.Text("",size = (4,0)),
sg.Button('Update',enable_events=True),
sg.Text("",size = (4,0)),
sg.Button('Write to File', size = (12,0),enable_events=True,disabled=True),
sg.Text("",size = (4,0)),
sg.Button('Close', size = (12,0),enable_events=True)],
]
testWin = sg.Window('Amazon Estimation List',layout,keep_on_top=True)
sg.PopupAnnoying("Please be careful while entering the data, Duplicates are not checked and are directly written to the file.\nEverytime you click on the Write to file, New data pack is created and is written iinto the file,\nso ensure that before writting into the file you have entered all the dataor you can have multiple datapacks.",keep_on_top=True,grab_anywhere=False)
while True:
event, values = testWin.read()
if event in (None, 'cancel','Close'):
testWin.Close()
sg.PopupAnnoying("Thank You for using my Application.",keep_on_top=True,auto_close=True,auto_close_duration=4)
break
elif event in ('Update'):
if values['price'] != '' and values['name'] != '' and values['quantity'] != '' and values['link'] != '':
try:
index+=1
dat1 = [index,values['name'],values['price'],values['quantity'],str( int(values['quantity']) * int(values['price'])),values['link']]
data.append(dat1)
testWin['-table-'].update(values = data)
write.ParseData(values)
testWin['Write to File'].update(disabled = False)
except ValueError:
sg.PopupNoTitlebar('Problems with values of Price or Quantity',keep_on_top=True)
else:
sg.PopupNoTitlebar('Empty Feilds detected',button_type=None,keep_on_top=True)
pass
elif event in ('Generate Total'):
total = write.Total()
testWin['Update'].update(disabled = True)
testWin['Generate Total'].update(disabled = True)
testWin['Choose Location'].update(disabled = True)
testWin['Write to File'].update(disabled = True)
testWin['Close'].update(disabled = True)
sg.popup_annoying(f"Your total expense will be {total}",icon=sg.EVENT_SYSTEM_TRAY_ICON_ACTIVATED,keep_on_top=True,)
testWin['Update'].update(disabled = False)
testWin['Generate Total'].update(disabled = False)
testWin['Choose Location'].update(disabled = False)
testWin['Write to File'].update(disabled = False)
testWin['Close'].update(disabled = False)
elif event in 'Choose Location':
testWin['Update'].update(disabled = True)
testWin['Generate Total'].update(disabled = True)
testWin['Choose Location'].update(disabled = True)
testWin['Write to File'].update(disabled = True)
testWin['Close'].update(disabled = True)
chosenPath = sg.PopupGetFolder("Please Browse to the location to save the file or continuw with the default path.",default_path=path,keep_on_top=True,)
testWin['Update'].update(disabled = False)
testWin['Generate Total'].update(disabled = False)
testWin['Choose Location'].update(disabled = False)
testWin['Write to File'].update(disabled = False)
testWin['Close'].update(disabled = False)
elif event in 'Write to File':
testWin['Update'].update(disabled = True)
testWin['Generate Total'].update(disabled = True)
testWin['Choose Location'].update(disabled = True)
testWin['Write to File'].update(disabled = True)
testWin['Close'].update(disabled = True)
status = write.WritetoCSV(path = chosenPath)
sg.popup_annoying(f"Writing process sucessful.\nFile saved as 'Amazon Estimation list.csv' at {chosenPath}.\nOpen the file using Excel or any other application.",keep_on_top=True)
testWin['Update'].update(disabled = False)
testWin['Generate Total'].update(disabled = False)
testWin['Choose Location'].update(disabled = False)
testWin['Write to File'].update(disabled = False)
testWin['Close'].update(disabled = False)
sys.exit(0)
|
py | 1a342747eb9af39a679aaef9b59aa23fbbd4bfa8 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"module_name": "WebApp",
"color": "grey",
"icon": "octicon octicon-file-directory",
"type": "module",
"label": _("WebApp")
}
]
|
py | 1a342756645b6235cf36c17632a00125dce11da4 | import os
import sys
import random
import math
import numpy as np
import skimage.io
import matplotlib
import cv2
import matplotlib.pyplot as plt
# Root directory of the project
ROOT_DIR = os.path.abspath("../")
# Import Mask RCNN
sys.path.append(ROOT_DIR) # To find local version of the library
from mrcnn import utils
import mrcnn.model as modellib
from mrcnn import visualize
# Import COCO config
sys.path.append(os.path.join(ROOT_DIR, "samples/coco/")) # To find local version
import coco
# Directory to save logs and trained model
MODEL_DIR = os.path.join(ROOT_DIR, "logs")
# Local path to trained weights file
COCO_MODEL_PATH = os.path.join(ROOT_DIR, "mask_rcnn_coco.h5")
# Download COCO trained weights from Releases if needed
if not os.path.exists(COCO_MODEL_PATH):
utils.download_trained_weights(COCO_MODEL_PATH)
class InferenceConfig(coco.CocoConfig):
# Set batch size to 1 since we'll be running inference on
# one image at a time. Batch size = GPU_COUNT * IMAGES_PER_GPU
GPU_COUNT = 1
IMAGES_PER_GPU = 1
# COCO Class names
# Index of the class in the list is its ID. For example, to get ID of
# the teddy bear class, use: class_names.index('teddy bear')
class_names = ['BG', 'person', 'bicycle', 'car', 'motorcycle', 'airplane',
'bus', 'train', 'truck', 'boat', 'traffic light',
'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird',
'cat', 'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear',
'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie',
'suitcase', 'frisbee', 'skis', 'snowboard', 'sports ball',
'kite', 'baseball bat', 'baseball glove', 'skateboard',
'surfboard', 'tennis racket', 'bottle', 'wine glass', 'cup',
'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple',
'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza',
'donut', 'cake', 'chair', 'couch', 'potted plant', 'bed',
'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote',
'keyboard', 'cell phone', 'microwave', 'oven', 'toaster',
'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors',
'teddy bear', 'hair drier', 'toothbrush']
def write_seg(Dir, Count, R):
seg = np.zeros(R['masks'].shape[:2])
movable_objects = [1,2,3,4,6,8]
for objec_idx in range(R['class_ids'].shape[0]):
if R['class_ids'][objec_idx] in movable_objects:
seg = np.where(np.invert(R['masks'][...,objec_idx]), seg, R['class_ids'][objec_idx])
if not os.path.isdir(Dir):
os.mkdir(Dir)
cv2.imwrite(os.path.join(Dir, "%06d.png"%Count), seg)
# Load a random image from the images folder
def run_folder(file_names, model):
for f in file_names:
if os.path.isfile(BASE_DIR + "/rcnnseg_" + Folder + "/" + f):
print(f + "continue")
continue
if not os.path.splitext(f)[-1] == ".png":
continue
image = skimage.io.imread(os.path.join(IMAGE_DIR, f))
# Run detection
results = model.detect([image], verbose=1)
# Visualize results
r = results[0]
visualize.display_instances(image, r['rois'], r['masks'], r['class_ids'],
class_names, r['scores'], save_path = BASE_DIR + "/mrcnn_" + Folder + "/" + f)
write_seg(BASE_DIR + "/rcnnseg_" + Folder, int(os.path.splitext(f)[0]), r)
config = InferenceConfig()
config.display()
# Create model object in inference mode.
model = modellib.MaskRCNN(mode="inference", model_dir=MODEL_DIR, config=config)
# Load weights trained on MS-COCO
model.load_weights(COCO_MODEL_PATH, by_name=True)
BASE_DIR = "/data/shibuya_640_360_fov45_few_people_bags/2020-08-29-03-56-21"
Folder = "image_0"
IMAGE_DIR = os.path.join(BASE_DIR, Folder)
file_names = next(os.walk(IMAGE_DIR))[2]
file_names.sort()
if not os.path.isdir(BASE_DIR + "/mrcnn_" + Folder):
os.mkdir(BASE_DIR + "/mrcnn_" + Folder)
run_folder(file_names, model)
Folder = "image_1"
IMAGE_DIR = os.path.join(BASE_DIR, Folder)
file_names = next(os.walk(IMAGE_DIR))[2]
if not os.path.isdir(BASE_DIR + "/mrcnn_" + Folder):
os.mkdir(BASE_DIR + "/mrcnn_" + Folder)
run_folder(file_names, model)
|
py | 1a3427977ebd76ec1ac20298adfe104b75d3eb27 | import psycopg2
import psycopg2.extras
from website_monitor.stats import Stats
from website_monitor.url_probe import UrlProbe
class Repository:
"""
The URL probe repository.
Implements the repository pattern to hide the database interaction details.
"""
def __init__(self, connection_string) -> None:
self.connection_string = connection_string
def setup(self):
with psycopg2.connect(self.connection_string) as conn:
with conn.cursor() as cursor:
cursor.execute(
"""
create table if not exists url_probes(
id bigserial primary key,
url text not null,
timestamp timestamp not null,
http_status_code int not null,
response_time_ms int not null
);
"""
)
def delete_all(self):
with psycopg2.connect(self.connection_string) as conn:
with conn.cursor() as cursor:
cursor.execute("truncate table url_probes;")
def find_all(self) -> list[UrlProbe]:
with psycopg2.connect(self.connection_string) as conn:
with conn.cursor() as cursor:
cursor.execute(
"select url, timestamp, http_status_code, response_time_ms from url_probes;"
)
return list(map(UrlProbe._make, cursor.fetchall()))
def save(self, url_probes: list[UrlProbe]):
with psycopg2.connect(self.connection_string) as conn:
with conn.cursor() as cursor:
psycopg2.extras.execute_values(
cursor,
"insert into url_probes(url, timestamp, http_status_code, response_time_ms) values %s",
[
(up.url, up.timestamp, up.http_status_code, up.response_time_ms)
for up in url_probes
],
)
def get_stats(self) -> list[Stats]:
with psycopg2.connect(self.connection_string) as conn:
with conn.cursor() as cursor:
cursor.execute(
"""
select url,
count(*) as probes,
percentile_cont(0.5) within group (order by url_probes.response_time_ms) as p50_ms,
percentile_cont(0.95) within group (order by url_probes.response_time_ms) as p95_ms,
percentile_cont(0.99) within group (order by url_probes.response_time_ms) as p99_ms
from url_probes
group by url;
"""
)
return list(map(Stats._make, cursor.fetchall()))
|
py | 1a3427d93d2af5e7308264c893c4ee23d2b382db | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-classes-have-attributes
"""Keras layers that implement explicit (approximate) kernel feature maps."""
import tensorflow.compat.v2 as tf
import numpy as np
from keras import initializers
from keras.engine import base_layer
from keras.engine import input_spec
from tensorflow.python.util.tf_export import keras_export
_SUPPORTED_RBF_KERNEL_TYPES = ['gaussian', 'laplacian']
@keras_export('keras.layers.experimental.RandomFourierFeatures')
class RandomFourierFeatures(base_layer.Layer):
r"""Layer that projects its inputs into a random feature space.
This layer implements a mapping from input space to a space with `output_dim`
dimensions, which approximates shift-invariant kernels. A kernel function
`K(x, y)` is shift-invariant if `K(x, y) == k(x - y)` for some function `k`.
Many popular Radial Basis Functions (RBF), including Gaussian and
Laplacian kernels, are shift-invariant.
The implementation of this layer is based on the following paper:
["Random Features for Large-Scale Kernel Machines"](
https://people.eecs.berkeley.edu/~brecht/papers/07.rah.rec.nips.pdf)
by Ali Rahimi and Ben Recht.
The distribution from which the parameters of the random features map (layer)
are sampled determines which shift-invariant kernel the layer approximates
(see paper for more details). You can use the distribution of your
choice. The layer supports out-of-the-box
approximations of the following two RBF kernels:
- Gaussian: `K(x, y) == exp(- square(x - y) / (2 * square(scale)))`
- Laplacian: `K(x, y) = exp(-abs(x - y) / scale))`
**Note:** Unlike what is described in the paper and unlike what is used in
the Scikit-Learn implementation, the output of this layer does not apply
the `sqrt(2 / D)` normalization factor.
**Usage:** Typically, this layer is used to "kernelize" linear models by
applying a non-linear transformation (this layer) to the input features and
then training a linear model on top of the transformed features. Depending on
the loss function of the linear model, the composition of this layer and the
linear model results to models that are equivalent (up to approximation) to
kernel SVMs (for hinge loss), kernel logistic regression (for logistic loss),
kernel linear regression (for squared loss), etc.
Examples:
A kernel multinomial logistic regression model with Gaussian kernel for MNIST:
```python
model = keras.Sequential([
keras.Input(shape=(784,)),
RandomFourierFeatures(
output_dim=4096,
scale=10.,
kernel_initializer='gaussian'),
layers.Dense(units=10, activation='softmax'),
])
model.compile(
optimizer='adam',
loss='categorical_crossentropy',
metrics=['categorical_accuracy']
)
```
A quasi-SVM classifier for MNIST:
```python
model = keras.Sequential([
keras.Input(shape=(784,)),
RandomFourierFeatures(
output_dim=4096,
scale=10.,
kernel_initializer='gaussian'),
layers.Dense(units=10),
])
model.compile(
optimizer='adam',
loss='hinge',
metrics=['categorical_accuracy']
)
```
To use another kernel, just replace the layer creation line with:
```python
random_features_layer = RandomFourierFeatures(
output_dim=500,
kernel_initializer=<my_initializer>,
scale=...,
...)
```
Args:
output_dim: Positive integer, the dimension of the layer's output, i.e., the
number of random features used to approximate the kernel.
kernel_initializer: Determines the distribution of the parameters of the
random features map (and therefore the kernel approximated by the layer).
It can be either a string identifier or a Keras `Initializer` instance.
Currently only 'gaussian' and 'laplacian' are supported string
identifiers (case insensitive). Note that the kernel matrix is not
trainable.
scale: For Gaussian and Laplacian kernels, this corresponds to a scaling
factor of the corresponding kernel approximated by the layer (see concrete
definitions above). When provided, it should be a positive float. If None,
a default value is used: if the kernel initializer is set to "gaussian",
`scale` defaults to `sqrt(input_dim / 2)`, otherwise, it defaults to 1.0.
Both the approximation error of the kernel and the classification quality
are sensitive to this parameter. If `trainable` is set to `True`, this
parameter is learned end-to-end during training and the provided value
serves as the initial value.
**Note:** When features from this layer are fed to a linear model,
by making `scale` trainable, the resulting optimization problem is
no longer convex (even if the loss function used by the linear model
is convex).
trainable: Whether the scaling parameter of the layer should be trainable.
Defaults to `False`.
name: String, name to use for this layer.
"""
def __init__(self,
output_dim,
kernel_initializer='gaussian',
scale=None,
trainable=False,
name=None,
**kwargs):
if output_dim <= 0:
raise ValueError(
f'`output_dim` should be a positive integer. Received: {output_dim}')
if isinstance(kernel_initializer, str):
if kernel_initializer.lower() not in _SUPPORTED_RBF_KERNEL_TYPES:
raise ValueError(
f'Unsupported `kernel_initializer`: {kernel_initializer} '
f'Expected one of: {_SUPPORTED_RBF_KERNEL_TYPES}')
if scale is not None and scale <= 0.0:
raise ValueError('When provided, `scale` should be a positive float. '
f'Received: {scale}')
super(RandomFourierFeatures, self).__init__(
trainable=trainable, name=name, **kwargs)
self.output_dim = output_dim
self.kernel_initializer = kernel_initializer
self.scale = scale
def build(self, input_shape):
input_shape = tf.TensorShape(input_shape)
# TODO(pmol): Allow higher dimension inputs. Currently the input is expected
# to have shape [batch_size, dimension].
if input_shape.rank != 2:
raise ValueError(
'The rank of the input tensor should be 2. '
f'Received input with rank {input_shape.ndims} instead. '
f'Full input shape received: {input_shape}')
if input_shape.dims[1].value is None:
raise ValueError(
'The last dimension of the input tensor should be defined. '
f'Found `None`. Full input shape received: {input_shape}')
self.input_spec = input_spec.InputSpec(
ndim=2, axes={1: input_shape.dims[1].value})
input_dim = input_shape.dims[1].value
kernel_initializer = _get_random_features_initializer(
self.kernel_initializer, shape=(input_dim, self.output_dim))
self.unscaled_kernel = self.add_weight(
name='unscaled_kernel',
shape=(input_dim, self.output_dim),
dtype=tf.float32,
initializer=kernel_initializer,
trainable=False)
self.bias = self.add_weight(
name='bias',
shape=(self.output_dim,),
dtype=tf.float32,
initializer=initializers.RandomUniform(minval=0.0, maxval=2 * np.pi),
trainable=False)
if self.scale is None:
self.scale = _get_default_scale(self.kernel_initializer, input_dim)
self.kernel_scale = self.add_weight(
name='kernel_scale',
shape=(1,),
dtype=tf.float32,
initializer=tf.compat.v1.constant_initializer(self.scale),
trainable=True,
constraint='NonNeg')
super(RandomFourierFeatures, self).build(input_shape)
def call(self, inputs):
inputs = tf.convert_to_tensor(inputs, dtype=self.dtype)
inputs = tf.cast(inputs, tf.float32)
kernel = (1.0 / self.kernel_scale) * self.unscaled_kernel
outputs = tf.raw_ops.MatMul(a=inputs, b=kernel)
outputs = tf.nn.bias_add(outputs, self.bias)
return tf.cos(outputs)
def compute_output_shape(self, input_shape):
input_shape = tf.TensorShape(input_shape)
input_shape = input_shape.with_rank(2)
if input_shape.dims[-1].value is None:
raise ValueError(
'The last dimension of the input tensor should be defined. '
f'Found `None`. Full input shape received: {input_shape}')
return input_shape[:-1].concatenate(self.output_dim)
def get_config(self):
kernel_initializer = self.kernel_initializer
if not isinstance(kernel_initializer, str):
kernel_initializer = initializers.serialize(kernel_initializer)
config = {
'output_dim': self.output_dim,
'kernel_initializer': kernel_initializer,
'scale': self.scale,
}
base_config = super(RandomFourierFeatures, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def _get_random_features_initializer(initializer, shape):
"""Returns Initializer object for random features."""
def _get_cauchy_samples(loc, scale, shape):
probs = np.random.uniform(low=0., high=1., size=shape)
return loc + scale * np.tan(np.pi * (probs - 0.5))
random_features_initializer = initializer
if isinstance(initializer, str):
if initializer.lower() == 'gaussian':
random_features_initializer = initializers.RandomNormal(stddev=1.0)
elif initializer.lower() == 'laplacian':
random_features_initializer = initializers.Constant(
_get_cauchy_samples(loc=0.0, scale=1.0, shape=shape))
else:
raise ValueError(
f'Unsupported `kernel_initializer`: "{initializer}" '
f'Expected one of: {_SUPPORTED_RBF_KERNEL_TYPES}')
return random_features_initializer
def _get_default_scale(initializer, input_dim):
if (isinstance(initializer, str) and
initializer.lower() == 'gaussian'):
return np.sqrt(input_dim / 2.0)
return 1.0
|
py | 1a3427f546d270d3e5a9f0aea309e7e59bfc8788 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
----------------------------------------------------------------------------
PROJECT BLACKBOX - https://comunidadconocimiento.org
----------------------------------------------------------------------------
BlackBox
~~~~~~~~~~~~~~
Project by Comunidad de Conocimiento to automate the analysis of
wireless networking.
With this project we want to learn about raspberry, unix, networking,
programming python collaboratively and have fun.
Based on the crozono idea.
"""
def banner():
from pyfiglet import figlet_format
print(figlet_format('Black', font='isometric3'))
print(figlet_format(' Box', font='isometric3'))
print(figlet_format(' Version 0.1', font='slant'))
print("Comunidad de Conocimiento - https://comunidadconocimiento.org")
#cprint(figlet_format('missile!', font='starwars'),'yellow', 'on_red', attrs=['bold'])
def main():
banner()
# Mainprocess
main()
|
py | 1a342888b79b3e28cde8a7c02c3abc5c92c4df5b | # coding=utf-8
# *** WARNING: this file was generated by the Kulado Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import kulado
import kulado.runtime
from .. import utilities, tables
class Plan(kulado.CustomResource):
app_service_environment_id: kulado.Output[str]
"""
The ID of the App Service Environment where the App Service Plan should be located. Changing forces a new resource to be created.
"""
is_xenon: kulado.Output[bool]
kind: kulado.Output[str]
"""
The kind of the App Service Plan to create. Possible values are `Windows` (also available as `App`), `Linux`, `elastic` (for Premium Consumption) and `FunctionApp` (for a Consumption Plan). Defaults to `Windows`. Changing this forces a new resource to be created.
"""
location: kulado.Output[str]
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
maximum_elastic_worker_count: kulado.Output[float]
"""
The maximum number of total workers allowed for this ElasticScaleEnabled App Service Plan.
"""
maximum_number_of_workers: kulado.Output[float]
"""
The maximum number of workers supported with the App Service Plan's sku.
"""
name: kulado.Output[str]
"""
Specifies the name of the App Service Plan component. Changing this forces a new resource to be created.
"""
per_site_scaling: kulado.Output[bool]
"""
Can Apps assigned to this App Service Plan be scaled independently? If set to `false` apps assigned to this plan will scale to all instances of the plan. Defaults to `false`.
"""
properties: kulado.Output[dict]
reserved: kulado.Output[bool]
"""
Is this App Service Plan `Reserved`. Defaults to `false`.
"""
resource_group_name: kulado.Output[str]
"""
The name of the resource group in which to create the App Service Plan component.
"""
sku: kulado.Output[dict]
"""
A `sku` block as documented below.
"""
tags: kulado.Output[dict]
"""
A mapping of tags to assign to the resource.
"""
def __init__(__self__, resource_name, opts=None, app_service_environment_id=None, is_xenon=None, kind=None, location=None, maximum_elastic_worker_count=None, name=None, per_site_scaling=None, properties=None, reserved=None, resource_group_name=None, sku=None, tags=None, __name__=None, __opts__=None):
"""
Manage an App Service Plan component.
:param str resource_name: The name of the resource.
:param kulado.ResourceOptions opts: Options for the resource.
:param kulado.Input[str] app_service_environment_id: The ID of the App Service Environment where the App Service Plan should be located. Changing forces a new resource to be created.
:param kulado.Input[str] kind: The kind of the App Service Plan to create. Possible values are `Windows` (also available as `App`), `Linux`, `elastic` (for Premium Consumption) and `FunctionApp` (for a Consumption Plan). Defaults to `Windows`. Changing this forces a new resource to be created.
:param kulado.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param kulado.Input[float] maximum_elastic_worker_count: The maximum number of total workers allowed for this ElasticScaleEnabled App Service Plan.
:param kulado.Input[str] name: Specifies the name of the App Service Plan component. Changing this forces a new resource to be created.
:param kulado.Input[bool] per_site_scaling: Can Apps assigned to this App Service Plan be scaled independently? If set to `false` apps assigned to this plan will scale to all instances of the plan. Defaults to `false`.
:param kulado.Input[bool] reserved: Is this App Service Plan `Reserved`. Defaults to `false`.
:param kulado.Input[str] resource_group_name: The name of the resource group in which to create the App Service Plan component.
:param kulado.Input[dict] sku: A `sku` block as documented below.
:param kulado.Input[dict] tags: A mapping of tags to assign to the resource.
> This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/app_service_plan.html.markdown.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if not resource_name:
raise TypeError('Missing resource name argument (for URN creation)')
if not isinstance(resource_name, str):
raise TypeError('Expected resource name to be a string')
if opts and not isinstance(opts, kulado.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
__props__ = dict()
__props__['app_service_environment_id'] = app_service_environment_id
__props__['is_xenon'] = is_xenon
__props__['kind'] = kind
__props__['location'] = location
__props__['maximum_elastic_worker_count'] = maximum_elastic_worker_count
__props__['name'] = name
__props__['per_site_scaling'] = per_site_scaling
__props__['properties'] = properties
__props__['reserved'] = reserved
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if sku is None:
raise TypeError("Missing required property 'sku'")
__props__['sku'] = sku
__props__['tags'] = tags
__props__['maximum_number_of_workers'] = None
super(Plan, __self__).__init__(
'azure:appservice/plan:Plan',
resource_name,
__props__,
opts)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
py | 1a3429c89eed915af0c1ef8a2fdd1463116f2a9b | from plotly.basedatatypes import BaseLayoutHierarchyType as _BaseLayoutHierarchyType
import copy as _copy
class Font(_BaseLayoutHierarchyType):
# class properties
# --------------------
_parent_path_str = "layout.slider"
_path_str = "layout.slider.font"
_valid_props = {"color", "family", "size"}
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cloud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
Returns
-------
str
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
Returns
-------
int|float
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
"""
def __init__(self, arg=None, color=None, family=None, size=None, **kwargs):
"""
Construct a new Font object
Sets the font of the slider step labels.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.layout.slider.Font`
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
Returns
-------
Font
"""
super(Font, self).__init__("font")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.layout.slider.Font
constructor must be a dict or
an instance of :class:`plotly.graph_objs.layout.slider.Font`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("family", None)
_v = family if family is not None else _v
if _v is not None:
self["family"] = _v
_v = arg.pop("size", None)
_v = size if size is not None else _v
if _v is not None:
self["size"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
|
py | 1a342a16442d2f2a90b7411bbd71f8c197a0ac4f | """Subclass of settings_dialog, which is generated by wxFormBuilder."""
import os
import re
import wx
from . import dialog_base
def pop_error(msg):
wx.MessageBox(msg, 'Error', wx.OK | wx.ICON_ERROR)
class SettingsDialog(dialog_base.SettingsDialogBase):
def __init__(self, extra_data_func, config_save_func,
file_name_format_hint, version):
dialog_base.SettingsDialogBase.__init__(self, None)
self.panel = SettingsDialogPanel(
self, extra_data_func, config_save_func, file_name_format_hint)
best_size = self.panel.BestSize
# hack for some gtk themes that incorrectly calculate best size
best_size.IncBy(dx=0, dy=30)
self.SetClientSize(best_size)
self.SetTitle('InteractiveHtmlBom %s' % version)
# hack for new wxFormBuilder generating code incompatible with old wxPython
# noinspection PyMethodOverriding
def SetSizeHints(self, sz1, sz2):
try:
# wxPython 3
self.SetSizeHintsSz(sz1, sz2)
except TypeError:
# wxPython 4
super(SettingsDialog, self).SetSizeHints(sz1, sz2)
def set_extra_data_path(self, extra_data_file):
self.panel.extra.netlistFilePicker.Path = extra_data_file
self.panel.extra.OnNetlistFileChanged(None)
# Implementing settings_dialog
class SettingsDialogPanel(dialog_base.SettingsDialogPanel):
def __init__(self, parent, extra_data_func, config_save_func,
file_name_format_hint):
self.config_save_func = config_save_func
dialog_base.SettingsDialogPanel.__init__(self, parent)
self.general = GeneralSettingsPanel(self.notebook,
file_name_format_hint)
self.html = HtmlSettingsPanel(self.notebook)
self.extra = ExtraFieldsPanel(self.notebook, extra_data_func)
self.notebook.AddPage(self.general, "General")
self.notebook.AddPage(self.html, "Html defaults")
self.notebook.AddPage(self.extra, "Extra fields")
def OnExit(self, event):
self.GetParent().EndModal(wx.ID_CANCEL)
def OnSaveSettings(self, event):
self.config_save_func(self)
def OnGenerateBom(self, event):
self.GetParent().EndModal(wx.ID_OK)
def finish_init(self):
self.html.OnBoardRotationSlider(None)
# Implementing HtmlSettingsPanelBase
class HtmlSettingsPanel(dialog_base.HtmlSettingsPanelBase):
def __init__(self, parent):
dialog_base.HtmlSettingsPanelBase.__init__(self, parent)
# Handlers for HtmlSettingsPanelBase events.
def OnBoardRotationSlider(self, event):
degrees = self.boardRotationSlider.Value * 5
self.rotationDegreeLabel.LabelText = u"{}\u00B0".format(degrees)
# Implementing GeneralSettingsPanelBase
class GeneralSettingsPanel(dialog_base.GeneralSettingsPanelBase):
def __init__(self, parent, file_name_format_hint):
dialog_base.GeneralSettingsPanelBase.__init__(self, parent)
self.file_name_format_hint = file_name_format_hint
# Handlers for GeneralSettingsPanelBase events.
def OnComponentSortOrderUp(self, event):
selection = self.componentSortOrderBox.Selection
if selection != wx.NOT_FOUND and selection > 0:
item = self.componentSortOrderBox.GetString(selection)
self.componentSortOrderBox.Delete(selection)
self.componentSortOrderBox.Insert(item, selection - 1)
self.componentSortOrderBox.SetSelection(selection - 1)
def OnComponentSortOrderDown(self, event):
selection = self.componentSortOrderBox.Selection
size = self.componentSortOrderBox.Count
if selection != wx.NOT_FOUND and selection < size - 1:
item = self.componentSortOrderBox.GetString(selection)
self.componentSortOrderBox.Delete(selection)
self.componentSortOrderBox.Insert(item, selection + 1)
self.componentSortOrderBox.SetSelection(selection + 1)
def OnComponentSortOrderAdd(self, event):
item = wx.GetTextFromUser(
"Characters except for A-Z will be ignored.",
"Add sort order item")
item = re.sub('[^A-Z]', '', item.upper())
if item == '':
return
found = self.componentSortOrderBox.FindString(item)
if found != wx.NOT_FOUND:
self.componentSortOrderBox.SetSelection(found)
return
self.componentSortOrderBox.Append(item)
self.componentSortOrderBox.SetSelection(
self.componentSortOrderBox.Count - 1)
def OnComponentSortOrderRemove(self, event):
selection = self.componentSortOrderBox.Selection
if selection != wx.NOT_FOUND:
item = self.componentSortOrderBox.GetString(selection)
if item == '~':
pop_error("You can not delete '~' item")
return
self.componentSortOrderBox.Delete(selection)
if self.componentSortOrderBox.Count > 0:
self.componentSortOrderBox.SetSelection(max(selection - 1, 0))
def OnComponentBlacklistAdd(self, event):
item = wx.GetTextFromUser(
"Characters except for A-Z 0-9 and * will be ignored.",
"Add blacklist item")
item = re.sub('[^A-Z0-9*]', '', item.upper())
if item == '':
return
found = self.blacklistBox.FindString(item)
if found != wx.NOT_FOUND:
self.blacklistBox.SetSelection(found)
return
self.blacklistBox.Append(item)
self.blacklistBox.SetSelection(
self.blacklistBox.Count - 1)
def OnComponentBlacklistRemove(self, event):
selection = self.blacklistBox.Selection
if selection != wx.NOT_FOUND:
self.blacklistBox.Delete(selection)
if self.blacklistBox.Count > 0:
self.blacklistBox.SetSelection(max(selection - 1, 0))
def OnNameFormatHintClick(self, event):
wx.MessageBox(self.file_name_format_hint, 'File name format help',
style=wx.ICON_NONE | wx.OK)
def OnSize(self, event):
# Trick the listCheckBox best size calculations
tmp = self.componentSortOrderBox.GetStrings()
self.componentSortOrderBox.SetItems([])
self.Layout()
self.componentSortOrderBox.SetItems(tmp)
# Implementing ExtraFieldsPanelBase
class ExtraFieldsPanel(dialog_base.ExtraFieldsPanelBase):
NONE_STRING = '<none>'
def __init__(self, parent, extra_data_func):
dialog_base.ExtraFieldsPanelBase.__init__(self, parent)
self.extra_data_func = extra_data_func
self.extra_field_data = None
# Handlers for ExtraFieldsPanelBase events.
def OnExtraFieldsUp(self, event):
selection = self.extraFieldsList.Selection
if selection != wx.NOT_FOUND and selection > 0:
item = self.extraFieldsList.GetString(selection)
checked = self.extraFieldsList.IsChecked(selection)
self.extraFieldsList.Delete(selection)
self.extraFieldsList.Insert(item, selection - 1)
if checked:
self.extraFieldsList.Check(selection - 1)
self.extraFieldsList.SetSelection(selection - 1)
def OnExtraFieldsDown(self, event):
selection = self.extraFieldsList.Selection
size = self.extraFieldsList.Count
if selection != wx.NOT_FOUND and selection < size - 1:
item = self.extraFieldsList.GetString(selection)
checked = self.extraFieldsList.IsChecked(selection)
self.extraFieldsList.Delete(selection)
self.extraFieldsList.Insert(item, selection + 1)
if checked:
self.extraFieldsList.Check(selection + 1)
self.extraFieldsList.SetSelection(selection + 1)
def OnNetlistFileChanged(self, event):
netlist_file = self.netlistFilePicker.Path
if not os.path.isfile(netlist_file):
return
self.extra_field_data = None
try:
self.extra_field_data = self.extra_data_func(
netlist_file, self.normalizeCaseCheckbox.Value)
except Exception as e:
pop_error(
"Failed to parse file %s\n\n%s" % (netlist_file, e.message))
self.netlistFilePicker.Path = ''
if self.extra_field_data is not None:
field_list = list(self.extra_field_data[0])
self.extraFieldsList.SetItems(field_list)
field_list.append(self.NONE_STRING)
self.boardVariantFieldBox.SetItems(field_list)
self.boardVariantFieldBox.SetStringSelection(self.NONE_STRING)
self.boardVariantWhitelist.Clear()
self.boardVariantBlacklist.Clear()
self.dnpFieldBox.SetItems(field_list)
self.dnpFieldBox.SetStringSelection(self.NONE_STRING)
def OnBoardVariantFieldChange(self, event):
selection = self.boardVariantFieldBox.Value
if not selection or selection == self.NONE_STRING \
or self.extra_field_data is None:
self.boardVariantWhitelist.Clear()
self.boardVariantBlacklist.Clear()
return
variant_set = set()
for _, field_dict in self.extra_field_data[1].items():
if selection in field_dict:
variant_set.add(field_dict[selection])
self.boardVariantWhitelist.SetItems(list(variant_set))
self.boardVariantBlacklist.SetItems(list(variant_set))
def OnSize(self, event):
# Trick the listCheckBox best size calculations
items = self.extraFieldsList.GetStrings()
checked_items = self.extraFieldsList.GetCheckedStrings()
self.extraFieldsList.SetItems([])
self.Layout()
self.extraFieldsList.SetItems(items)
self.extraFieldsList.SetCheckedStrings(checked_items)
|
py | 1a342a3da256d87bc1f0167b5bb87b3593d126c8 | from django.conf.urls import url
from corehq.apps.hqcase.views import ExplodeCasesView
urlpatterns = [
# for load testing
url(r'explode/', ExplodeCasesView.as_view(), name=ExplodeCasesView.url_name)
]
|
py | 1a342acebea52ecade98bdfa0aeb7459c6e325e8 | """
User input utilities
"""
# Author: Ben Gravell
def yes_or_no(question):
reply = str(input(question+' (y/n): ')).lower().strip()
if reply[0] == 'y':
return True
elif reply[0] == 'n':
return False
else:
return yes_or_no("Invalid input... please enter ") |
py | 1a342c4a5ebb1b285d02b6542bdb9fc3d7604021 | from __future__ import print_function
import argparse
import torch.multiprocessing as mp
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
import torch.utils.data.distributed
import horovod.torch as hvd
# Training settings
parser = argparse.ArgumentParser(description='PyTorch MNIST Example')
parser.add_argument('--batch-size', type=int, default=64, metavar='N',
help='input batch size for training (default: 64)')
parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N',
help='input batch size for testing (default: 1000)')
parser.add_argument('--epochs', type=int, default=10, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--lr', type=float, default=0.01, metavar='LR',
help='learning rate (default: 0.01)')
parser.add_argument('--momentum', type=float, default=0.5, metavar='M',
help='SGD momentum (default: 0.5)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--seed', type=int, default=42, metavar='S',
help='random seed (default: 42)')
parser.add_argument('--log-interval', type=int, default=10, metavar='N',
help='how many batches to wait before logging training status')
parser.add_argument('--fp16-allreduce', action='store_true', default=False,
help='use fp16 compression during allreduce')
parser.add_argument('--use-adasum', action='store_true', default=False,
help='use adasum algorithm to do reduction')
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
self.conv2_drop = nn.Dropout2d()
self.fc1 = nn.Linear(320, 50)
self.fc2 = nn.Linear(50, 10)
def forward(self, x):
x = F.relu(F.max_pool2d(self.conv1(x), 2))
x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
x = x.view(-1, 320)
x = F.relu(self.fc1(x))
x = F.dropout(x, training=self.training)
x = self.fc2(x)
return F.log_softmax(x)
def train(epoch):
model.train()
# Horovod: set epoch to sampler for shuffling.
train_sampler.set_epoch(epoch)
for batch_idx, (data, target) in enumerate(train_loader):
if args.cuda:
data, target = data.cuda(), target.cuda()
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output, target)
loss.backward()
optimizer.step()
if batch_idx % args.log_interval == 0:
# Horovod: use train_sampler to determine the number of examples in
# this worker's partition.
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_sampler),
100. * batch_idx / len(train_loader), loss.item()))
def metric_average(val, name):
tensor = torch.tensor(val)
avg_tensor = hvd.allreduce(tensor, name=name)
return avg_tensor.item()
def test():
model.eval()
test_loss = 0.
test_accuracy = 0.
for data, target in test_loader:
if args.cuda:
data, target = data.cuda(), target.cuda()
output = model(data)
# sum up batch loss
test_loss += F.nll_loss(output, target, size_average=False).item()
# get the index of the max log-probability
pred = output.data.max(1, keepdim=True)[1]
test_accuracy += pred.eq(target.data.view_as(pred)).cpu().float().sum()
# Horovod: use test_sampler to determine the number of examples in
# this worker's partition.
test_loss /= len(test_sampler)
test_accuracy /= len(test_sampler)
# Horovod: average metric values across workers.
test_loss = metric_average(test_loss, 'avg_loss')
test_accuracy = metric_average(test_accuracy, 'avg_accuracy')
# Horovod: print output only on first rank.
if hvd.rank() == 0:
print('\nTest set: Average loss: {:.4f}, Accuracy: {:.2f}%\n'.format(
test_loss, 100. * test_accuracy))
if __name__ == '__main__':
args = parser.parse_args()
args.cuda = not args.no_cuda and torch.cuda.is_available()
# Horovod: initialize library.
hvd.init()
torch.manual_seed(args.seed)
if args.cuda:
# Horovod: pin GPU to local rank.
torch.cuda.set_device(hvd.local_rank())
torch.cuda.manual_seed(args.seed)
# Horovod: limit # of CPU threads to be used per worker.
torch.set_num_threads(1)
kwargs = {'num_workers': 1, 'pin_memory': True} if args.cuda else {}
# When supported, use 'forkserver' to spawn dataloader workers instead of 'fork' to prevent
# issues with Infiniband implementations that are not fork-safe
if (kwargs.get('num_workers', 0) > 0 and hasattr(mp, '_supports_context') and
mp._supports_context and 'forkserver' in mp.get_all_start_methods()):
kwargs['multiprocessing_context'] = 'forkserver'
train_dataset = \
datasets.MNIST('data-%d' % hvd.rank(), train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
]))
# Horovod: use DistributedSampler to partition the training data.
train_sampler = torch.utils.data.distributed.DistributedSampler(
train_dataset, num_replicas=hvd.size(), rank=hvd.rank())
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=args.batch_size, sampler=train_sampler, **kwargs)
test_dataset = \
datasets.MNIST('data-%d' % hvd.rank(), train=False, transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
]))
# Horovod: use DistributedSampler to partition the test data.
test_sampler = torch.utils.data.distributed.DistributedSampler(
test_dataset, num_replicas=hvd.size(), rank=hvd.rank())
test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=args.test_batch_size,
sampler=test_sampler, **kwargs)
model = Net()
# By default, Adasum doesn't need scaling up learning rate.
lr_scaler = hvd.size() if not args.use_adasum else 1
if args.cuda:
# Move model to GPU.
model.cuda()
# If using GPU Adasum allreduce, scale learning rate by local_size.
if args.use_adasum and hvd.nccl_built():
lr_scaler = hvd.local_size()
# Horovod: scale learning rate by lr_scaler.
optimizer = optim.SGD(model.parameters(), lr=args.lr * lr_scaler,
momentum=args.momentum)
# Horovod: broadcast parameters & optimizer state.
hvd.broadcast_parameters(model.state_dict(), root_rank=0)
hvd.broadcast_optimizer_state(optimizer, root_rank=0)
# Horovod: (optional) compression algorithm.
compression = hvd.Compression.fp16 if args.fp16_allreduce else hvd.Compression.none
# Horovod: wrap optimizer with DistributedOptimizer.
optimizer = hvd.DistributedOptimizer(optimizer,
named_parameters=model.named_parameters(),
compression=compression,
op=hvd.Adasum if args.use_adasum else hvd.Average)
for epoch in range(1, args.epochs + 1):
train(epoch)
test()
|
py | 1a342c9d16ebc7d212581eff4ac77268321c28f2 | """A notebook manager that uses the local file system for storage.
Authors:
* Brian Granger
* Zach Sailer
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import io
import os
import glob
import shutil
from tornado import web
from .nbmanager import NotebookManager
from IPython.nbformat import current
from IPython.utils.traitlets import Unicode, Bool, TraitError
from IPython.utils.py3compat import getcwd
from IPython.utils import tz
from IPython.html.utils import is_hidden, to_os_path
def sort_key(item):
"""Case-insensitive sorting."""
return item['name'].lower()
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class FileNotebookManager(NotebookManager):
save_script = Bool(False, config=True,
help="""Automatically create a Python script when saving the notebook.
For easier use of import, %run and %load across notebooks, a
<notebook-name>.py script will be created next to any
<notebook-name>.ipynb on each save. This can also be set with the
short `--script` flag.
"""
)
notebook_dir = Unicode(getcwd(), config=True)
def _notebook_dir_changed(self, name, old, new):
"""Do a bit of validation of the notebook dir."""
if not os.path.isabs(new):
# If we receive a non-absolute path, make it absolute.
self.notebook_dir = os.path.abspath(new)
return
if not os.path.exists(new) or not os.path.isdir(new):
raise TraitError("notebook dir %r is not a directory" % new)
checkpoint_dir = Unicode('.ipynb_checkpoints', config=True,
help="""The directory name in which to keep notebook checkpoints
This is a path relative to the notebook's own directory.
By default, it is .ipynb_checkpoints
"""
)
def _copy(self, src, dest):
"""copy src to dest
like shutil.copy2, but log errors in copystat
"""
shutil.copyfile(src, dest)
try:
shutil.copystat(src, dest)
except OSError as e:
self.log.debug("copystat on %s failed", dest, exc_info=True)
def get_notebook_names(self, path=''):
"""List all notebook names in the notebook dir and path."""
path = path.strip('/')
if not os.path.isdir(self._get_os_path(path=path)):
raise web.HTTPError(404, 'Directory not found: ' + path)
names = glob.glob(self._get_os_path('*'+self.filename_ext, path))
names = [os.path.basename(name)
for name in names]
return names
def path_exists(self, path):
"""Does the API-style path (directory) actually exist?
Parameters
----------
path : string
The path to check. This is an API path (`/` separated,
relative to base notebook-dir).
Returns
-------
exists : bool
Whether the path is indeed a directory.
"""
path = path.strip('/')
os_path = self._get_os_path(path=path)
return os.path.isdir(os_path)
def is_hidden(self, path):
"""Does the API style path correspond to a hidden directory or file?
Parameters
----------
path : string
The path to check. This is an API path (`/` separated,
relative to base notebook-dir).
Returns
-------
exists : bool
Whether the path is hidden.
"""
path = path.strip('/')
os_path = self._get_os_path(path=path)
return is_hidden(os_path, self.notebook_dir)
def _get_os_path(self, name=None, path=''):
"""Given a notebook name and a URL path, return its file system
path.
Parameters
----------
name : string
The name of a notebook file with the .ipynb extension
path : string
The relative URL path (with '/' as separator) to the named
notebook.
Returns
-------
path : string
A file system path that combines notebook_dir (location where
server started), the relative path, and the filename with the
current operating system's url.
"""
if name is not None:
path = path + '/' + name
return to_os_path(path, self.notebook_dir)
def notebook_exists(self, name, path=''):
"""Returns a True if the notebook exists. Else, returns False.
Parameters
----------
name : string
The name of the notebook you are checking.
path : string
The relative path to the notebook (with '/' as separator)
Returns
-------
bool
"""
path = path.strip('/')
nbpath = self._get_os_path(name, path=path)
return os.path.isfile(nbpath)
# TODO: Remove this after we create the contents web service and directories are
# no longer listed by the notebook web service.
def list_dirs(self, path):
"""List the directories for a given API style path."""
path = path.strip('/')
os_path = self._get_os_path('', path)
if not os.path.isdir(os_path):
raise web.HTTPError(404, u'directory does not exist: %r' % os_path)
elif is_hidden(os_path, self.notebook_dir):
self.log.info("Refusing to serve hidden directory, via 404 Error")
raise web.HTTPError(404, u'directory does not exist: %r' % os_path)
dir_names = os.listdir(os_path)
dirs = []
for name in dir_names:
os_path = self._get_os_path(name, path)
if os.path.isdir(os_path) and not is_hidden(os_path, self.notebook_dir)\
and self.should_list(name):
try:
model = self.get_dir_model(name, path)
except IOError:
pass
dirs.append(model)
dirs = sorted(dirs, key=sort_key)
return dirs
# TODO: Remove this after we create the contents web service and directories are
# no longer listed by the notebook web service.
def get_dir_model(self, name, path=''):
"""Get the directory model given a directory name and its API style path"""
path = path.strip('/')
os_path = self._get_os_path(name, path)
if not os.path.isdir(os_path):
raise IOError('directory does not exist: %r' % os_path)
info = os.stat(os_path)
last_modified = tz.utcfromtimestamp(info.st_mtime)
created = tz.utcfromtimestamp(info.st_ctime)
# Create the notebook model.
model ={}
model['name'] = name
model['path'] = path
model['last_modified'] = last_modified
model['created'] = created
model['type'] = 'directory'
return model
def list_notebooks(self, path):
"""Returns a list of dictionaries that are the standard model
for all notebooks in the relative 'path'.
Parameters
----------
path : str
the URL path that describes the relative path for the
listed notebooks
Returns
-------
notebooks : list of dicts
a list of the notebook models without 'content'
"""
path = path.strip('/')
notebook_names = self.get_notebook_names(path)
notebooks = [self.get_notebook(name, path, content=False)
for name in notebook_names if self.should_list(name)]
notebooks = sorted(notebooks, key=sort_key)
return notebooks
def get_notebook(self, name, path='', content=True):
""" Takes a path and name for a notebook and returns its model
Parameters
----------
name : str
the name of the notebook
path : str
the URL path that describes the relative path for
the notebook
Returns
-------
model : dict
the notebook model. If contents=True, returns the 'contents'
dict in the model as well.
"""
path = path.strip('/')
if not self.notebook_exists(name=name, path=path):
raise web.HTTPError(404, u'Notebook does not exist: %s' % name)
os_path = self._get_os_path(name, path)
info = os.stat(os_path)
last_modified = tz.utcfromtimestamp(info.st_mtime)
created = tz.utcfromtimestamp(info.st_ctime)
# Create the notebook model.
model ={}
model['name'] = name
model['path'] = path
model['last_modified'] = last_modified
model['created'] = created
model['type'] = 'notebook'
if content:
with io.open(os_path, 'r', encoding='utf-8') as f:
try:
nb = current.read(f, u'json')
except Exception as e:
raise web.HTTPError(400, u"Unreadable Notebook: %s %s" % (os_path, e))
self.mark_trusted_cells(nb, name, path)
model['content'] = nb
return model
def save_notebook(self, model, name='', path=''):
"""Save the notebook model and return the model with no content."""
path = path.strip('/')
if 'content' not in model:
raise web.HTTPError(400, u'No notebook JSON data provided')
# One checkpoint should always exist
if self.notebook_exists(name, path) and not self.list_checkpoints(name, path):
self.create_checkpoint(name, path)
new_path = model.get('path', path).strip('/')
new_name = model.get('name', name)
if path != new_path or name != new_name:
self.rename_notebook(name, path, new_name, new_path)
# Save the notebook file
os_path = self._get_os_path(new_name, new_path)
nb = current.to_notebook_json(model['content'])
self.check_and_sign(nb, new_name, new_path)
if 'name' in nb['metadata']:
nb['metadata']['name'] = u''
try:
self.log.debug("Autosaving notebook %s", os_path)
with io.open(os_path, 'w', encoding='utf-8') as f:
current.write(nb, f, u'json')
except Exception as e:
raise web.HTTPError(400, u'Unexpected error while autosaving notebook: %s %s' % (os_path, e))
# Save .py script as well
if self.save_script:
py_path = os.path.splitext(os_path)[0] + '.py'
self.log.debug("Writing script %s", py_path)
try:
with io.open(py_path, 'w', encoding='utf-8') as f:
current.write(nb, f, u'py')
except Exception as e:
raise web.HTTPError(400, u'Unexpected error while saving notebook as script: %s %s' % (py_path, e))
model = self.get_notebook(new_name, new_path, content=False)
return model
def update_notebook(self, model, name, path=''):
"""Update the notebook's path and/or name"""
path = path.strip('/')
new_name = model.get('name', name)
new_path = model.get('path', path).strip('/')
if path != new_path or name != new_name:
self.rename_notebook(name, path, new_name, new_path)
model = self.get_notebook(new_name, new_path, content=False)
return model
def delete_notebook(self, name, path=''):
"""Delete notebook by name and path."""
path = path.strip('/')
os_path = self._get_os_path(name, path)
if not os.path.isfile(os_path):
raise web.HTTPError(404, u'Notebook does not exist: %s' % os_path)
# clear checkpoints
for checkpoint in self.list_checkpoints(name, path):
checkpoint_id = checkpoint['id']
cp_path = self.get_checkpoint_path(checkpoint_id, name, path)
if os.path.isfile(cp_path):
self.log.debug("Unlinking checkpoint %s", cp_path)
os.unlink(cp_path)
self.log.debug("Unlinking notebook %s", os_path)
os.unlink(os_path)
def rename_notebook(self, old_name, old_path, new_name, new_path):
"""Rename a notebook."""
old_path = old_path.strip('/')
new_path = new_path.strip('/')
if new_name == old_name and new_path == old_path:
return
new_os_path = self._get_os_path(new_name, new_path)
old_os_path = self._get_os_path(old_name, old_path)
# Should we proceed with the move?
if os.path.isfile(new_os_path):
raise web.HTTPError(409, u'Notebook with name already exists: %s' % new_os_path)
if self.save_script:
old_py_path = os.path.splitext(old_os_path)[0] + '.py'
new_py_path = os.path.splitext(new_os_path)[0] + '.py'
if os.path.isfile(new_py_path):
raise web.HTTPError(409, u'Python script with name already exists: %s' % new_py_path)
# Move the notebook file
try:
shutil.move(old_os_path, new_os_path)
except Exception as e:
raise web.HTTPError(500, u'Unknown error renaming notebook: %s %s' % (old_os_path, e))
# Move the checkpoints
old_checkpoints = self.list_checkpoints(old_name, old_path)
for cp in old_checkpoints:
checkpoint_id = cp['id']
old_cp_path = self.get_checkpoint_path(checkpoint_id, old_name, old_path)
new_cp_path = self.get_checkpoint_path(checkpoint_id, new_name, new_path)
if os.path.isfile(old_cp_path):
self.log.debug("Renaming checkpoint %s -> %s", old_cp_path, new_cp_path)
shutil.move(old_cp_path, new_cp_path)
# Move the .py script
if self.save_script:
shutil.move(old_py_path, new_py_path)
# Checkpoint-related utilities
def get_checkpoint_path(self, checkpoint_id, name, path=''):
"""find the path to a checkpoint"""
path = path.strip('/')
basename, _ = os.path.splitext(name)
filename = u"{name}-{checkpoint_id}{ext}".format(
name=basename,
checkpoint_id=checkpoint_id,
ext=self.filename_ext,
)
os_path = self._get_os_path(path=path)
cp_dir = os.path.join(os_path, self.checkpoint_dir)
if not os.path.exists(cp_dir):
os.mkdir(cp_dir)
cp_path = os.path.join(cp_dir, filename)
return cp_path
def get_checkpoint_model(self, checkpoint_id, name, path=''):
"""construct the info dict for a given checkpoint"""
path = path.strip('/')
cp_path = self.get_checkpoint_path(checkpoint_id, name, path)
stats = os.stat(cp_path)
last_modified = tz.utcfromtimestamp(stats.st_mtime)
info = dict(
id = checkpoint_id,
last_modified = last_modified,
)
return info
# public checkpoint API
def create_checkpoint(self, name, path=''):
"""Create a checkpoint from the current state of a notebook"""
path = path.strip('/')
nb_path = self._get_os_path(name, path)
# only the one checkpoint ID:
checkpoint_id = u"checkpoint"
cp_path = self.get_checkpoint_path(checkpoint_id, name, path)
self.log.debug("creating checkpoint for notebook %s", name)
self._copy(nb_path, cp_path)
# return the checkpoint info
return self.get_checkpoint_model(checkpoint_id, name, path)
def list_checkpoints(self, name, path=''):
"""list the checkpoints for a given notebook
This notebook manager currently only supports one checkpoint per notebook.
"""
path = path.strip('/')
checkpoint_id = "checkpoint"
os_path = self.get_checkpoint_path(checkpoint_id, name, path)
if not os.path.exists(os_path):
return []
else:
return [self.get_checkpoint_model(checkpoint_id, name, path)]
def restore_checkpoint(self, checkpoint_id, name, path=''):
"""restore a notebook to a checkpointed state"""
path = path.strip('/')
self.log.info("restoring Notebook %s from checkpoint %s", name, checkpoint_id)
nb_path = self._get_os_path(name, path)
cp_path = self.get_checkpoint_path(checkpoint_id, name, path)
if not os.path.isfile(cp_path):
self.log.debug("checkpoint file does not exist: %s", cp_path)
raise web.HTTPError(404,
u'Notebook checkpoint does not exist: %s-%s' % (name, checkpoint_id)
)
# ensure notebook is readable (never restore from an unreadable notebook)
with io.open(cp_path, 'r', encoding='utf-8') as f:
current.read(f, u'json')
self._copy(cp_path, nb_path)
self.log.debug("copying %s -> %s", cp_path, nb_path)
def delete_checkpoint(self, checkpoint_id, name, path=''):
"""delete a notebook's checkpoint"""
path = path.strip('/')
cp_path = self.get_checkpoint_path(checkpoint_id, name, path)
if not os.path.isfile(cp_path):
raise web.HTTPError(404,
u'Notebook checkpoint does not exist: %s%s-%s' % (path, name, checkpoint_id)
)
self.log.debug("unlinking %s", cp_path)
os.unlink(cp_path)
def info_string(self):
return "Serving notebooks from local directory: %s" % self.notebook_dir
|
py | 1a342d11fec9ea6e49f9b2c0bc717c390cdd61d2 | import re
from bs4 import BeautifulSoup
from time import sleep
import pickle
import praw
import OAuth2Util
from allpages import getPages
from lookup import findItem
r = praw.Reddit('bot1')
m = re.compile(r"\[\[[^\]]*\]\]")
def respond(lim, rate, subs):
with open('ids.pickle', 'rb') as handle:
ids = pickle.load(handle)
i = 0
while True:
if i % 100 == 0:
getPages()
i += 1
for sub in subs:
subreddit = r.subreddit(sub)
for submission in subreddit.new(limit=lim):
comment_queue = submission.comments[:]
while comment_queue:
com = comment_queue.pop(0)
if "[[" in com.body and "]]" in com.body and com.id not in ids:
print("Found Comment:" + com.id)
reply = ""
for item in m.findall(com.body)[:10]:
isPOE = sub.lower()=="pathofexile"
temp = findItem(item[2:-2], isPOE)
reply += temp
if temp != "":
reply += "\n\n---------\n\n"
if reply != "":
reply += " ^I ^am ^a ^bot. ^Reply ^to ^me ^with ^up ^to ^7 ^[[item names]]."
reply += " ^Please ^contact ^/u/liortulip, ^my ^creator"
reply += " ^with ^any ^questions ^or ^concerns. ^Thanks!"
print("Replying...")
com.reply(reply)
else:
print("False Reply ^")
ids.append(com.id)
comment_queue.extend(com.replies)
with open('ids.pickle', 'wb') as handle:
pickle.dump(ids, handle, protocol=pickle.HIGHEST_PROTOCOL)
sleep(rate)
respond(50,10, ["test"])
|
py | 1a342e125ef400bb2e0de13762c7163a32adb5b2 | from django.contrib.contenttypes.fields import GenericRelation
from django.db import models
from openbook_auth.models import UserNotificationsSubscription
from openbook_notifications.models.notification import Notification
from openbook_posts.models import Post
class UserNewPostNotification(models.Model):
notification = GenericRelation(Notification)
user_notifications_subscription = models.ForeignKey(UserNotificationsSubscription, on_delete=models.CASCADE)
post = models.ForeignKey(Post, on_delete=models.CASCADE)
@classmethod
def create_user_new_post_notification(cls, user_notifications_subscription_id, post_id, owner_id):
user_new_post_notification = cls.objects.create(
post_id=post_id,
user_notifications_subscription_id=user_notifications_subscription_id)
Notification.create_notification(type=Notification.USER_NEW_POST,
content_object=user_new_post_notification,
owner_id=owner_id)
return user_new_post_notification
@classmethod
def delete_user_new_post_notification(cls, user_notifications_subscription_id, post_id, owner_id):
cls.objects.filter(user_notifications_subscription_id=user_notifications_subscription_id,
post_id=post_id,
notification__owner_id=owner_id).delete()
|
py | 1a342f3326de1f24c8b29167c707b2e997c93f83 | from abc import abstractmethod
import datetime
import numpy as np
import xarray as xr
from pyproj import CRS
from RAiDER.logger import *
from RAiDER import utilFcns as util
from RAiDER.models.model_levels import (
LEVELS_137_HEIGHTS,
LEVELS_25_HEIGHTS,
A_137_HRES,
B_137_HRES,
)
from RAiDER.models.weatherModel import WeatherModel
class ECMWF(WeatherModel):
'''
Implement ECMWF models
'''
def __init__(self):
# initialize a weather model
WeatherModel.__init__(self)
# model constants
self._k1 = 0.776 # [K/Pa]
self._k2 = 0.233 # [K/Pa]
self._k3 = 3.75e3 # [K^2/Pa]
self._lon_res = 0.2
self._lat_res = 0.2
self._proj = CRS.from_epsg(4326)
self._model_level_type = 'ml' # Default
def setLevelType(self, levelType):
'''Set the level type to model levels or pressure levels'''
if levelType in ['ml', 'pl']:
self._model_level_type = levelType
else:
raise RuntimeError('Level type {} is not recognized'.format(levelType))
if levelType == 'ml':
self.__model_levels__()
else:
self.__pressure_levels__()
@abstractmethod
def __pressure_levels__(self):
pass
def __model_levels__(self):
self._levels = 137
self._zlevels = np.flipud(LEVELS_137_HEIGHTS)
self._a = A_137_HRES
self._b = B_137_HRES
def load_weather(self, *args, **kwargs):
'''
Consistent class method to be implemented across all weather model types.
As a result of calling this method, all of the variables (x, y, z, p, q,
t, wet_refractivity, hydrostatic refractivity, e) should be fully
populated.
'''
self._load_model_level(*self.files)
def _load_model_level(self, fname):
# read data from netcdf file
lats, lons, xs, ys, t, q, lnsp, z = self._makeDataCubes(
fname,
verbose=False
)
# ECMWF appears to give me this backwards
if lats[0] > lats[1]:
z = z[::-1]
lnsp = lnsp[::-1]
t = t[:, ::-1]
q = q[:, ::-1]
lats = lats[::-1]
# Lons is usually ok, but we'll throw in a check to be safe
if lons[0] > lons[1]:
z = z[..., ::-1]
lnsp = lnsp[..., ::-1]
t = t[..., ::-1]
q = q[..., ::-1]
lons = lons[::-1]
# pyproj gets fussy if the latitude is wrong, plus our
# interpolator isn't clever enough to pick up on the fact that
# they are the same
lons[lons > 180] -= 360
self._t = t
self._q = q
geo_hgt, pres, hgt = self._calculategeoh(z, lnsp)
# re-assign lons, lats to match heights
_lons = np.broadcast_to(lons[np.newaxis, np.newaxis, :], hgt.shape)
_lats = np.broadcast_to(lats[np.newaxis, :, np.newaxis], hgt.shape)
# ys is latitude
self._get_heights(_lats, hgt)
h = self._zs.copy()
# We want to support both pressure levels and true pressure grids.
# If the shape has one dimension, we'll scale it up to act as a
# grid, otherwise we'll leave it alone.
if len(pres.shape) == 1:
self._p = np.broadcast_to(pres[:, np.newaxis, np.newaxis], self._zs.shape)
else:
self._p = pres
# Re-structure everything from (heights, lats, lons) to (lons, lats, heights)
self._p = np.transpose(self._p, (1, 2, 0))
self._t = np.transpose(self._t, (1, 2, 0))
self._q = np.transpose(self._q, (1, 2, 0))
h = np.transpose(h, (1, 2, 0))
self._lats = np.transpose(_lats, (1, 2, 0))
self._lons = np.transpose(_lons, (1, 2, 0))
# Flip all the axis so that zs are in order from bottom to top
# lats / lons are simply replicated to all heights so they don't need flipped
self._p = np.flip(self._p, axis=2)
self._t = np.flip(self._t, axis=2)
self._q = np.flip(self._q, axis=2)
self._ys = self._lats.copy()
self._xs = self._lons.copy()
self._zs = np.flip(h, axis=2)
def _fetch(self, lats, lons, time, out, Nextra=2):
'''
Fetch a weather model from ECMWF
'''
# bounding box plus a buffer
lat_min, lat_max, lon_min, lon_max = self._get_ll_bounds(lats, lons, Nextra)
# execute the search at ECMWF
try:
self._get_from_ecmwf(
lat_min,
lat_max,
self._lat_res,
lon_min,
lon_max,
self._lon_res,
time,
out
)
except Exception as e:
logger.warning('Query point bounds are {}/{}/{}/{}'.format(lat_min, lat_max, lon_min, lon_max))
logger.warning('Query time: {}'.format(time))
logger.exception(e)
def _get_from_ecmwf(self, lat_min, lat_max, lat_step, lon_min, lon_max,
lon_step, time, out):
import ecmwfapi
server = ecmwfapi.ECMWFDataServer()
corrected_date = util.round_date(time, datetime.timedelta(hours=6))
server.retrieve({
"class": self._classname, # ERA-Interim
'dataset': self._dataset,
"expver": "{}".format(self._expver),
# They warn me against all, but it works well
"levelist": 'all',
"levtype": "ml", # Model levels
"param": "lnsp/q/z/t", # Necessary variables
"stream": "oper",
# date: Specify a single date as "2015-08-01" or a period as
# "2015-08-01/to/2015-08-31".
"date": datetime.datetime.strftime(corrected_date, "%Y-%m-%d"),
# type: Use an (analysis) unless you have a particular reason to
# use fc (forecast).
"type": "an",
# time: With type=an, time can be any of
# "00:00:00/06:00:00/12:00:00/18:00:00". With type=fc, time can
# be any of "00:00:00/12:00:00",
"time": datetime.time.strftime(corrected_date.time(), "%H:%M:%S"),
# step: With type=an, step is always "0". With type=fc, step can
# be any of "3/6/9/12".
"step": "0",
# grid: Only regular lat/lon grids are supported.
"grid": '{}/{}'.format(lat_step, lon_step),
"area": '{}/{}/{}/{}'.format(lat_max, lon_min, lat_min, lon_max), # area: N/W/S/E
"format": "netcdf",
"resol": "av",
"target": out, # target: the name of the output file.
})
def _get_from_cds(
self,
lat_min,
lat_max,
lat_step,
lon_min,
lon_max,
lon_step,
acqTime,
outname
):
import cdsapi
c = cdsapi.Client(verify=0)
if self._model_level_type == 'pl':
var = ['z', 'q', 't']
levType = 'pressure_level'
else:
var = "129/130/133/152" # 'lnsp', 'q', 'z', 't'
levType = 'model_level'
bbox = [lat_max, lon_min, lat_min, lon_max]
dataDict = {
"product_type": "reanalysis",
"{}".format(levType): 'all',
"levtype": "{}".format(self._model_level_type), # 'ml' for model levels or 'pl' for pressure levels
'param': var,
"stream": "oper",
"type": "an",
"year": "{}".format(acqTime.year),
"month": "{}".format(acqTime.month),
"day": "{}".format(acqTime.day),
"time": "{}".format(datetime.time.strftime(acqTime.time(), '%H:%M')),
# step: With type=an, step is always "0". With type=fc, step can
# be any of "3/6/9/12".
"step": "0",
"area": bbox,
"format": "netcdf"}
try:
c.retrieve('reanalysis-era5-pressure-levels', dataDict, outname)
except Exception as e:
logger.warning('Query point bounds are {}/{} latitude and {}/{} longitude'.format(lat_min, lat_max, lon_min, lon_max))
logger.warning('Query time: {}'.format(acqTime))
logger.exception(e)
raise Exception
def _download_ecmwf(self, lat_min, lat_max, lat_step, lon_min, lon_max, lon_step, time, out):
from ecmwfapi import ECMWFService
server = ECMWFService("mars")
corrected_date = util.round_date(time, datetime.timedelta(hours=6))
if self._model_level_type == 'ml':
param = "129/130/133/152"
else:
param = "129.128/130.128/133.128/152"
server.execute(
{
'class': self._classname,
'dataset': self._dataset,
'expver': "{}".format(self._expver),
'resol': "av",
'stream': "oper",
'type': "an",
'levelist': "all",
'levtype': "{}".format(self._model_level_type),
'param': param,
'date': datetime.datetime.strftime(corrected_date, "%Y-%m-%d"),
'time': "{}".format(datetime.time.strftime(corrected_date.time(), '%H:%M')),
'step': "0",
'grid': "{}/{}".format(lon_step, lat_step),
'area': "{}/{}/{}/{}".format(lat_max, util.floorish(lon_min, 0.1), util.floorish(lat_min, 0.1), lon_max),
'format': "netcdf",
},
out
)
def _load_pressure_level(self, filename, *args, **kwargs):
with xr.open_dataset(filename) as block:
# Pull the data
z = np.squeeze(block['z'].values)
t = np.squeeze(block['t'].values)
q = np.squeeze(block['q'].values)
lats = np.squeeze(block.latitude.values)
lons = np.squeeze(block.longitude.values)
levels = np.squeeze(block.level.values) * 100
z = np.flip(z, axis=1)
# ECMWF appears to give me this backwards
if lats[0] > lats[1]:
z = z[::-1]
t = t[:, ::-1]
q = q[:, ::-1]
lats = lats[::-1]
# Lons is usually ok, but we'll throw in a check to be safe
if lons[0] > lons[1]:
z = z[..., ::-1]
t = t[..., ::-1]
q = q[..., ::-1]
lons = lons[::-1]
# pyproj gets fussy if the latitude is wrong, plus our
# interpolator isn't clever enough to pick up on the fact that
# they are the same
lons[lons > 180] -= 360
self._t = t
self._q = q
geo_hgt = z / self._g0
# re-assign lons, lats to match heights
_lons = np.broadcast_to(lons[np.newaxis, np.newaxis, :],
geo_hgt.shape)
_lats = np.broadcast_to(lats[np.newaxis, :, np.newaxis],
geo_hgt.shape)
# correct heights for latitude
self._get_heights(_lats, geo_hgt)
self._p = np.broadcast_to(levels[:, np.newaxis, np.newaxis],
self._zs.shape)
# Re-structure everything from (heights, lats, lons) to (lons, lats, heights)
self._p = np.transpose(self._p)
self._t = np.transpose(self._t)
self._q = np.transpose(self._q)
self._lats = np.transpose(_lats)
self._lons = np.transpose(_lons)
self._ys = self._lats.copy()
self._xs = self._lons.copy()
self._zs = np.transpose(self._zs)
# check this
# data cube format should be lats,lons,heights
self._lats = self._lats.swapaxes(0, 1)
self._lons = self._lons.swapaxes(0, 1)
self._xs = self._xs.swapaxes(0, 1)
self._ys = self._ys.swapaxes(0, 1)
self._zs = self._zs.swapaxes(0, 1)
self._p = self._p.swapaxes(0, 1)
self._q = self._q.swapaxes(0, 1)
self._t = self._t.swapaxes(0, 1)
# For some reason z is opposite the others
self._p = np.flip(self._p, axis=2)
self._t = np.flip(self._t, axis=2)
self._q = np.flip(self._q, axis=2)
def _makeDataCubes(self, fname, verbose=False):
'''
Create a cube of data representing temperature and relative humidity
at specified pressure levels
'''
# get ll_bounds
S, N, W, E = self._ll_bounds
with xr.open_dataset(fname) as ds:
ds = ds.assign_coords(longitude=(((ds.longitude + 180) % 360) - 180))
# mask based on query bounds
m1 = (S <= ds.latitude) & (N >= ds.latitude)
m2 = (W <= ds.longitude) & (E >= ds.longitude)
block = ds.where(m1 & m2, drop=True)
# Pull the data
z = np.squeeze(block['z'].values)[0, ...]
t = np.squeeze(block['t'].values)
q = np.squeeze(block['q'].values)
lnsp = np.squeeze(block['lnsp'].values)[0, ...]
lats = np.squeeze(block.latitude.values)
lons = np.squeeze(block.longitude.values)
xs = lons.copy()
ys = lats.copy()
if z.size == 0:
raise RuntimeError('There is no data in z, '
'you may have a problem with your mask')
return lats, lons, xs, ys, t, q, lnsp, z
|
py | 1a342f6c00ecb92dbecf9b43ce36855a39491144 | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Utilities for property-based testing for TFP distributions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import inspect
from absl import logging
import hypothesis as hp
from hypothesis import strategies as hps
import numpy as np
import six
import tensorflow.compat.v2 as tf
from tensorflow_probability.python import bijectors as tfb
from tensorflow_probability.python import distributions as tfd
from tensorflow_probability.python import util as tfp_util
from tensorflow_probability.python.bijectors import hypothesis_testlib as bijector_hps
from tensorflow_probability.python.internal import hypothesis_testlib as tfp_hps
from tensorflow_probability.python.internal import tensorshape_util
JAX_MODE = False
# pylint is unable to handle @hps.composite (e.g. complains "No value for
# argument 'batch_shape' in function call"), so disable this lint for the file.
# pylint: disable=no-value-for-parameter
TF2_FRIENDLY_DISTS = (
'Bates',
'Bernoulli',
'Beta',
'BetaBinomial',
'Binomial',
'Chi',
'Chi2',
'CholeskyLKJ',
'Categorical',
'Cauchy',
'ContinuousBernoulli',
'Deterministic',
'DeterminantalPointProcess',
'Dirichlet',
'DirichletMultinomial',
'DoublesidedMaxwell',
'Empirical',
'Exponential',
'ExpGamma',
'ExpInverseGamma',
'FiniteDiscrete',
'Gamma',
'GammaGamma',
'GeneralizedNormal',
'GeneralizedPareto',
'Geometric',
'Gumbel',
'GeneralizedExtremeValue',
'HalfCauchy',
'HalfNormal',
'HalfStudentT',
'Horseshoe',
'InverseGamma',
'InverseGaussian',
'JohnsonSU',
'Kumaraswamy',
'Laplace',
'LKJ',
'LogLogistic',
'LogNormal',
'Logistic',
'Normal',
'Moyal',
'Multinomial',
'NegativeBinomial',
'OneHotCategorical',
'OrderedLogistic',
'Pareto',
'PERT',
'PlackettLuce',
'Poisson',
'PowerSpherical',
# 'PoissonLogNormalQuadratureCompound' TODO(b/137956955): Add support
# for hypothesis testing
'ProbitBernoulli',
'RelaxedBernoulli',
'ExpRelaxedOneHotCategorical',
# 'SinhArcsinh' TODO(b/137956955): Add support for hypothesis testing
'Skellam',
'SphericalUniform',
'StudentT',
'Triangular',
'TruncatedCauchy',
'TruncatedNormal',
'Uniform',
'VonMises',
'VonMisesFisher',
'Weibull',
'WishartTriL',
'Zipf',
)
# SPECIAL_DISTS are distributions that should not be drawn by
# `base_distributions`, because they are parameterized by one or more
# sub-distributions themselves. This list is used to suppress warnings from
# `_instantiable_base_dists`, below.
SPECIAL_DISTS = (
'Autoregressive',
'BatchReshape', # (has strategy)
'Blockwise',
'Distribution', # Base class; not a distribution at all
'Empirical', # Base distribution with custom instantiation; (has strategy)
'JointDistribution',
'JointDistributionCoroutine',
'JointDistributionCoroutineAutoBatched',
'JointDistributionNamed',
'JointDistributionNamedAutoBatched',
'JointDistributionSequential',
'JointDistributionSequentialAutoBatched',
'Independent', # (has strategy)
'Mixture', # (has strategy)
'MixtureSameFamily', # (has strategy)
'Sample', # (has strategy)
'TransformedDistribution', # (has strategy)
'QuantizedDistribution', # (has strategy)
)
# MUTEX_PARAMS are mutually exclusive parameters that cannot be drawn together
# in broadcasting_params.
MUTEX_PARAMS = (
set(['logits', 'probs']),
set(['probits', 'probs']),
set(['rate', 'log_rate']),
set(['rate1', 'log_rate1']),
set(['rate2', 'log_rate2']),
set(['scale', 'log_scale']),
set(['scale', 'scale_tril', 'scale_diag', 'scale_identity_multiplier']),
)
# Allowlist of underlying distributions for QuantizedDistribution (must have
# continuous, infinite support -- QuantizedDistribution also works for finite-
# support distributions for which the length of the support along each dimension
# is at least 1, though it is difficult to construct draws of these
# distributions in general, and wouldn't contribute much to test coverage.)
QUANTIZED_BASE_DISTS = (
'Chi2',
'Exponential',
'LogNormal',
'Logistic',
'Normal',
'Pareto',
'Poisson',
'StudentT',
)
# Functions used to constrain randomly sampled parameter ndarrays.
# TODO(b/128518790): Eliminate / minimize the fudge factors in here.
def constrain_between_eps_and_one_minus_eps(eps=1e-6):
return lambda x: eps + (1 - 2 * eps) * tf.sigmoid(x)
def ensure_high_gt_low(low, high):
"""Returns a value with shape matching `high` and gt broadcastable `low`."""
new_high = tf.maximum(low + tf.abs(low) * .1 + .1, high)
reduce_dims = []
if (tensorshape_util.rank(new_high.shape) >
tensorshape_util.rank(high.shape)):
reduced_leading_axes = tf.range(
tensorshape_util.rank(new_high.shape) -
tensorshape_util.rank(high.shape))
new_high = tf.math.reduce_max(
new_high, axis=reduced_leading_axes)
reduce_dims = [
d for d in range(tensorshape_util.rank(high.shape))
if high.shape[d] < new_high.shape[d]
]
if reduce_dims:
new_high = tf.math.reduce_max(
new_high, axis=reduce_dims, keepdims=True)
return new_high
def fix_finite_discrete(d):
size = d.get('probs', d.get('logits', None)).shape[-1]
return dict(d, outcomes=tf.linspace(-1.0, 1.0, size))
def fix_lkj(d):
return dict(d, concentration=d['concentration'] + 1, dimension=3)
def fix_spherical_uniform(d):
return dict(d, dimension=5, batch_shape=[])
def fix_pert(d):
peak = ensure_high_gt_low(d['low'], d['peak'])
high = ensure_high_gt_low(peak, d['high'])
temperature = ensure_high_gt_low(
np.zeros(d['temperature'].shape, dtype=np.float32), d['temperature'])
return dict(d, peak=peak, high=high, temperature=temperature)
def fix_triangular(d):
peak = ensure_high_gt_low(d['low'], d['peak'])
high = ensure_high_gt_low(peak, d['high'])
return dict(d, peak=peak, high=high)
def fix_wishart(d):
df = d['df']
scale = d.get('scale', d.get('scale_tril'))
return dict(d, df=tf.maximum(df, tf.cast(scale.shape[-1], df.dtype)))
def fix_bates(d):
total_count = tf.math.maximum(
tf.math.minimum(
d['total_count'],
tfd.bates.BATES_TOTAL_COUNT_STABILITY_LIMITS[ # pylint: disable=protected-access
d['total_count'].dtype]),
1.)
high = ensure_high_gt_low(d['low'], d['high'])
return dict(d, total_count=total_count, high=high)
CONSTRAINTS = {
'atol':
tf.math.softplus,
'rtol':
tf.math.softplus,
'concentration':
tfp_hps.softplus_plus_eps(),
'GeneralizedPareto.concentration': # Permits +ve and -ve concentrations.
lambda x: tf.math.tanh(x) * 0.24,
'concentration0':
tfp_hps.softplus_plus_eps(),
'concentration1':
tfp_hps.softplus_plus_eps(),
'covariance_matrix':
tfp_hps.positive_definite,
'df':
tfp_hps.softplus_plus_eps(),
'DeterminantalPointProcess.eigenvalues':
tfp_hps.softplus_plus_eps(),
'eigenvectors':
tfp_hps.orthonormal,
'InverseGaussian.loc':
tfp_hps.softplus_plus_eps(),
'JohnsonSU.tailweight':
tfp_hps.softplus_plus_eps(),
'PowerSpherical.mean_direction':
lambda x: tf.math.l2_normalize(tf.math.sigmoid(x) + 1e-6, -1),
'VonMisesFisher.mean_direction': # max ndims is 3 to avoid instability.
lambda x: tf.math.l2_normalize(tf.math.sigmoid(x[..., :3]) + 1e-6, -1),
'Categorical.probs':
tf.math.softmax,
'ExpRelaxedOneHotCategorical.probs':
tf.math.softmax,
'FiniteDiscrete.probs':
tf.math.softmax,
'Multinomial.probs':
tf.math.softmax,
'OneHotCategorical.probs':
tf.math.softmax,
'RelaxedCategorical.probs':
tf.math.softmax,
'Zipf.power':
tfp_hps.softplus_plus_eps(1 + 1e-6), # strictly > 1
'ContinuousBernoulli.probs':
tf.sigmoid,
'Geometric.logits': # TODO(b/128410109): re-enable down to -50
# Capping at 15. so that probability is less than 1, and entropy is
# defined. b/147394924
lambda x: tf.minimum(tf.maximum(x, -16.), 15.), # works around the bug
'Geometric.probs':
constrain_between_eps_and_one_minus_eps(),
'Binomial.probs':
tf.sigmoid,
'NegativeBinomial.probs':
tf.sigmoid,
'Bernoulli.probs':
tf.sigmoid,
'PlackettLuce.scores':
tfp_hps.softplus_plus_eps(),
'ProbitBernoulli.probs':
tf.sigmoid,
'RelaxedBernoulli.probs':
tf.sigmoid,
'cutpoints':
# Permit values that aren't too large
lambda x: tfb.Ascending().forward(10 * tf.math.tanh(x)),
'log_rate':
lambda x: tf.maximum(x, -16.),
# Capping log_rate1 and log_rate2 to 15. This is because if both are large
# (meaning the rates are `inf`), then the Skellam distribution is undefined.
'log_rate1':
lambda x: tf.minimum(tf.maximum(x, -16.), 15.),
'log_rate2':
lambda x: tf.minimum(tf.maximum(x, -16.), 15.),
'log_scale':
lambda x: tf.maximum(x, -16.),
'mixing_concentration':
tfp_hps.softplus_plus_eps(),
'mixing_rate':
tfp_hps.softplus_plus_eps(),
'rate':
tfp_hps.softplus_plus_eps(),
'rate1':
tfp_hps.softplus_plus_eps(),
'rate2':
tfp_hps.softplus_plus_eps(),
'scale':
tfp_hps.softplus_plus_eps(),
'Wishart.scale':
tfp_hps.positive_definite,
'scale_diag':
tfp_hps.softplus_plus_eps(),
'scale_identity_multiplier':
tfp_hps.softplus_plus_eps(),
'scale_tril':
tfp_hps.lower_tril_positive_definite,
'tailweight':
tfp_hps.softplus_plus_eps(),
'temperature':
tfp_hps.softplus_plus_eps(),
'total_count':
lambda x: tf.floor(tf.sigmoid(x / 100) * 100) + 1,
'Bates':
fix_bates,
'Bernoulli':
lambda d: dict(d, dtype=tf.float32),
'CholeskyLKJ':
fix_lkj,
'LKJ':
fix_lkj,
'PERT':
fix_pert,
'Triangular':
fix_triangular,
'TruncatedCauchy':
lambda d: dict(d, high=ensure_high_gt_low(d['low'], d['high'])),
'TruncatedNormal':
lambda d: dict(d, high=ensure_high_gt_low(d['low'], d['high'])),
'Uniform':
lambda d: dict(d, high=ensure_high_gt_low(d['low'], d['high'])),
'SphericalUniform':
fix_spherical_uniform,
'Wishart':
fix_wishart,
'WishartTriL':
fix_wishart,
'Zipf':
lambda d: dict(d, dtype=tf.float32),
'FiniteDiscrete':
fix_finite_discrete,
'GeneralizedNormal.power':
tfp_hps.softplus_plus_eps(),
}
def constraint_for(dist=None, param=None):
if param is not None:
return CONSTRAINTS.get('{}.{}'.format(dist, param),
CONSTRAINTS.get(param, tfp_hps.identity_fn))
return CONSTRAINTS.get(dist, tfp_hps.identity_fn)
class DistInfo(collections.namedtuple(
'DistInfo', ['cls', 'params_event_ndims'])):
"""Sufficient information to instantiate a Distribution.
To wit
- The Python class `cls` giving the class, and
- A Python dict `params_event_ndims` giving the event dimensions for the
parameters (so that parameters can be built with predictable batch shapes).
Specifically, the `params_event_ndims` dict maps string parameter names to
Python integers. Each integer gives how many (trailing) dimensions of that
parameter are part of the event.
"""
__slots__ = ()
def _instantiable_base_dists():
"""Computes the table of mechanically instantiable base Distributions.
A Distribution is mechanically instantiable if
- The class appears as a symbol binding in `tfp.distributions`;
- The class defines a `_params_event_ndims` method (necessary
to generate parameter Tensors with predictable batch shapes); and
- The name is not blocklisted in `SPECIAL_DISTS`.
Additionally, the Empricial distribution is hardcoded with special
instantiation rules for each choice of event_ndims among 0, 1, and 2.
Compound distributions like TransformedDistribution have their own
instantiation rules hard-coded in the `distributions` strategy.
Returns:
instantiable_base_dists: A Python dict mapping distribution name
(as a string) to a `DistInfo` carrying the information necessary to
instantiate it.
"""
result = {}
for dist_name in dir(tfd):
dist_class = getattr(tfd, dist_name)
if (not inspect.isclass(dist_class) or
not issubclass(dist_class, tfd.Distribution) or
dist_name in SPECIAL_DISTS):
continue
try:
params_event_ndims = dist_class._params_event_ndims() # pylint: disable=protected-access
except NotImplementedError:
msg = 'Unable to test tfd.%s: _params_event_ndims not implemented.'
logging.warning(msg, dist_name)
continue
result[dist_name] = DistInfo(dist_class, params_event_ndims)
# Empirical._params_event_ndims depends on `self.event_ndims`, so we have to
# explicitly list these entries.
result['Empirical|event_ndims=0'] = DistInfo( #
functools.partial(tfd.Empirical, event_ndims=0), dict(samples=1))
result['Empirical|event_ndims=1'] = DistInfo( #
functools.partial(tfd.Empirical, event_ndims=1), dict(samples=2))
result['Empirical|event_ndims=2'] = DistInfo( #
functools.partial(tfd.Empirical, event_ndims=2), dict(samples=3))
return result
# INSTANTIABLE_BASE_DISTS is a map from str->(DistClass, params_event_ndims)
INSTANTIABLE_BASE_DISTS = _instantiable_base_dists()
del _instantiable_base_dists
INSTANTIABLE_META_DISTS = (
'BatchReshape',
'Independent',
'Mixture',
'MixtureSameFamily',
'Sample',
'TransformedDistribution',
'QuantizedDistribution',
)
def _report_non_instantiable_meta_dists():
for dist_name in SPECIAL_DISTS:
if dist_name in ['Distribution', 'Empirical']: continue
if dist_name in INSTANTIABLE_META_DISTS: continue
msg = 'Unable to test tfd.%s: no instantiation strategy.'
logging.warning(msg, dist_name)
_report_non_instantiable_meta_dists()
del _report_non_instantiable_meta_dists
@hps.composite
def valid_slices(draw, batch_shape):
"""Samples a legal (possibly empty) slice for shape batch_shape."""
# We build up a list of slices in several stages:
# 1. Choose 0 to batch_rank slices to come before an Ellipsis (...).
# 2. Decide whether or not to add an Ellipsis; if using, updating the indexing
# used (e.g. batch_shape[i]) to identify safe bounds.
# 3. Choose 0 to [remaining_dims] slices to come last.
# 4. Decide where to insert between 0 and 3 newaxis slices.
batch_shape = tf.TensorShape(batch_shape).as_list()
slices = []
batch_rank = len(batch_shape)
arbitrary_slices = hps.tuples(
hps.one_of(hps.just(None), hps.integers(min_value=-100, max_value=100)),
hps.one_of(hps.just(None), hps.integers(min_value=-100, max_value=100)),
hps.one_of(
hps.just(None),
hps.integers(min_value=-100, max_value=100).filter(lambda x: x != 0))
).map(lambda tup: slice(*tup))
# 1. Choose 0 to batch_rank slices to come before an Ellipsis (...).
nslc_before_ellipsis = draw(hps.integers(min_value=0, max_value=batch_rank))
for i in range(nslc_before_ellipsis):
slc = draw(
hps.one_of(
hps.integers(min_value=0, max_value=batch_shape[i] - 1),
arbitrary_slices))
slices.append(slc)
# 2. Decide whether or not to add an Ellipsis; if using, updating the indexing
# used (e.g. batch_shape[i]) to identify safe bounds.
has_ellipsis = draw(hps.booleans().map(lambda x: (Ellipsis, x)))[1]
nslc_after_ellipsis = draw(
hps.integers(min_value=0, max_value=batch_rank - nslc_before_ellipsis))
if has_ellipsis:
slices.append(Ellipsis)
remain_start, remain_end = (batch_rank - nslc_after_ellipsis, batch_rank)
else:
remain_start = nslc_before_ellipsis
remain_end = nslc_before_ellipsis + nslc_after_ellipsis
# 3. Choose 0 to [remaining_dims] slices to come last.
for i in range(remain_start, remain_end):
slc = draw(
hps.one_of(
hps.integers(min_value=0, max_value=batch_shape[i] - 1),
arbitrary_slices))
slices.append(slc)
# 4. Decide where to insert between 0 and 3 newaxis slices.
newaxis_positions = draw(
hps.lists(hps.integers(min_value=0, max_value=len(slices)), max_size=3))
for i in sorted(newaxis_positions, reverse=True):
slices.insert(i, tf.newaxis)
slices = tuple(slices)
# Since `d[0]` ==> `d.__getitem__(0)` instead of `d.__getitem__((0,))`;
# and similarly `d[:3]` ==> `d.__getitem__(slice(None, 3))` instead of
# `d.__getitem__((slice(None, 3),))`; it is useful to test such scenarios.
if len(slices) == 1 and draw(hps.booleans()):
# Sometimes only a single item non-tuple.
return slices[0]
return slices
def stringify_slices(slices):
"""Returns a list of strings describing the items in `slices`.
Each returned string (in order) encodes what to do with one dimension of the
slicee:
- That number for a single integer slice;
- 'a:b:c' for a start-stop-step slice, omitting any missing components;
- 'tf.newaxis' for an axis insertion; or
- The ellipsis '...' for an arbitrary-rank gap.
Args:
slices: A single-dimension slice or a Python tuple of single-dimension
slices.
Returns:
pretty_slices: A list of Python strings encoding each slice.
"""
pretty_slices = []
slices = slices if isinstance(slices, tuple) else (slices,)
for slc in slices:
if slc == Ellipsis:
pretty_slices.append('...')
elif isinstance(slc, slice):
pretty_slices.append('{}:{}:{}'.format(
*['' if s is None else s for s in (slc.start, slc.stop, slc.step)]))
elif isinstance(slc, int) or tf.is_tensor(slc):
pretty_slices.append(str(slc))
elif slc is tf.newaxis:
pretty_slices.append('tf.newaxis')
else:
raise ValueError('Unexpected slice type: {}'.format(type(slc)))
return pretty_slices
def prime_factors(v):
"""Compute the prime factors of v."""
factors = []
primes = []
factor = 2
while v > 1:
while any(factor % p == 0 for p in primes):
factor += 1
primes.append(factor)
while v % factor == 0:
factors.append(factor)
v //= factor
return factors
@hps.composite
def reshapes_of(draw, shape, max_ndims=4):
"""Strategy for valid reshapes of the given shape, rank at most max_ndims."""
factors = draw(hps.permutations(
prime_factors(tensorshape_util.num_elements(shape))))
split_points = sorted(draw(
hps.lists(hps.integers(min_value=0, max_value=len(factors)),
min_size=0, max_size=max_ndims - 1)))
result = ()
for start, stop in zip([0] + split_points, split_points + [len(factors)]):
result += (int(np.prod(factors[start:stop])),)
return result
def assert_shapes_unchanged(target_shaped_dict, possibly_bcast_dict):
for param, target_param_val in six.iteritems(target_shaped_dict):
np.testing.assert_array_equal(
tensorshape_util.as_list(target_param_val.shape),
tensorshape_util.as_list(possibly_bcast_dict[param].shape))
@hps.composite
def base_distribution_unconstrained_params(draw,
dist_name,
batch_shape=None,
event_dim=None,
enable_vars=False,
param_strategy_fn=None,
params=None):
"""Strategy for drawing unconstrained parameters of a base Distribution.
This does not draw parameters for compound distributions like `Independent`,
`MixtureSameFamily`, or `TransformedDistribution`; only base Distributions
that do not accept other Distributions as arguments.
Args:
draw: Hypothesis strategy sampler supplied by `@hps.composite`.
dist_name: Optional Python `str`. If given, the produced distributions
will all have this type.
batch_shape: An optional `TensorShape`. The batch shape of the resulting
Distribution. Hypothesis will pick a batch shape if omitted.
event_dim: Optional Python int giving the size of each of the
distribution's parameters' event dimensions. This is shared across all
parameters, permitting square event matrices, compatible location and
scale Tensors, etc. If omitted, Hypothesis will choose one.
enable_vars: TODO(bjp): Make this `True` all the time and put variable
initialization in slicing_test. If `False`, the returned parameters are
all `tf.Tensor`s and not {`tf.Variable`, `tfp.util.DeferredTensor`
`tfp.util.TransformedVariable`}.
param_strategy_fn: Optional callable with signature
`strategy = param_strategy_fn(shape, dtype, constraint_fn)`. If provided,
overrides the default strategy for generating float-valued parameters.
Default value: `None`.
params: An optional set of Distribution parameters. If params are not
provided, Hypothesis will choose a set of parameters.
Returns:
dists: A strategy for drawing Distribution parameters with the specified
`batch_shape` (or an arbitrary one if omitted).
"""
if params is not None:
assert batch_shape is not None, ('Need to pass in valid `batch_shape` when'
' passing in `params`.')
return params, batch_shape
if batch_shape is None:
batch_shape = draw(tfp_hps.shapes())
# Draw raw parameters
if dist_name not in INSTANTIABLE_BASE_DISTS:
raise ValueError('Unknown Distribution name {}'.format(dist_name))
params_event_ndims = INSTANTIABLE_BASE_DISTS[dist_name].params_event_ndims
params_kwargs = draw(
tfp_hps.broadcasting_params(
batch_shape,
params_event_ndims,
event_dim=event_dim,
enable_vars=enable_vars,
constraint_fn_for=lambda param: constraint_for(dist_name, param),
mutex_params=MUTEX_PARAMS,
param_strategy_fn=param_strategy_fn))
hp.note('Forming dist {} with raw parameters {}'.format(dist_name,
params_kwargs))
return params_kwargs, batch_shape
def constrain_params(params_unconstrained, dist_name):
"""Constrains a parameters dictionary to a distribution's parameter space."""
# Constrain them to legal values
params_constrained = constraint_for(dist_name)(params_unconstrained)
# Sometimes the "distribution constraint" fn may replace c2t-tracking
# DeferredTensor params with Tensor params (e.g. fix_triangular). In such
# cases, we preserve the c2t-tracking DeferredTensors by wrapping them but
# ignoring the value. We similarly reinstate raw tf.Variables, so they
# appear in the distribution's `variables` list and can be initialized.
for k in params_constrained:
if (k in params_unconstrained and
isinstance(params_unconstrained[k],
(tfp_util.DeferredTensor, tf.Variable))
and params_unconstrained[k] is not params_constrained[k]):
def constrained_value(v, val=params_constrained[k]): # pylint: disable=cell-var-from-loop
# While the gradient to v will be 0, we only care about the c2t
# counts.
return v * 0 + val
params_constrained[k] = tfp_util.DeferredTensor(
params_unconstrained[k], constrained_value)
assert_shapes_unchanged(params_unconstrained, params_constrained)
hp.note('Forming dist {} with constrained parameters {}'.format(
dist_name, params_constrained))
return params_constrained
def modify_params(params, dist_name, validate_args):
params = dict(params)
params['validate_args'] = validate_args
if dist_name in ['Wishart', 'WishartTriL']:
# With the default `input_output_cholesky = False`, Wishart occasionally
# produces samples for which the Cholesky decompositions fail, causing
# an error in testDistribution when `log_prob` is called on a sample.
params['input_output_cholesky'] = True
return params
@hps.composite
def base_distributions(draw,
dist_name=None,
batch_shape=None,
event_dim=None,
enable_vars=False,
eligibility_filter=lambda name: True,
params=None,
param_strategy_fn=None,
validate_args=True):
"""Strategy for drawing arbitrary base Distributions.
This does not draw compound distributions like `Independent`,
`MixtureSameFamily`, or `TransformedDistribution`; only base Distributions
that do not accept other Distributions as arguments.
Args:
draw: Hypothesis strategy sampler supplied by `@hps.composite`.
dist_name: Optional Python `str`. If given, the produced distributions
will all have this type.
batch_shape: An optional `TensorShape`. The batch shape of the resulting
Distribution. Hypothesis will pick a batch shape if omitted.
event_dim: Optional Python int giving the size of each of the
distribution's parameters' event dimensions. This is shared across all
parameters, permitting square event matrices, compatible location and
scale Tensors, etc. If omitted, Hypothesis will choose one.
enable_vars: TODO(bjp): Make this `True` all the time and put variable
initialization in slicing_test. If `False`, the returned parameters are
all `tf.Tensor`s and not {`tf.Variable`, `tfp.util.DeferredTensor`
`tfp.util.TransformedVariable`}.
eligibility_filter: Optional Python callable. Blacklists some Distribution
class names so they will not be drawn at the top level.
params: An optional set of Distribution parameters. If params are not
provided, Hypothesis will choose a set of parameters.
param_strategy_fn: Optional callable with signature
`strategy = param_strategy_fn(shape, dtype, constraint_fn)`. If provided,
overrides the default strategy for generating float-valued parameters.
Default value: `None`.
validate_args: Python `bool`; whether to enable runtime assertions.
Returns:
dists: A strategy for drawing Distributions with the specified `batch_shape`
(or an arbitrary one if omitted).
"""
if dist_name is None:
names = [k for k in INSTANTIABLE_BASE_DISTS if eligibility_filter(k)]
dist_name = draw(hps.sampled_from(sorted(names)))
if dist_name == 'Empirical':
variants = [k for k in INSTANTIABLE_BASE_DISTS
if eligibility_filter(k) and 'Empirical' in k]
dist_name = draw(hps.sampled_from(sorted(variants)))
if dist_name == 'SphericalUniform':
return draw(spherical_uniforms(
batch_shape=batch_shape, event_dim=event_dim,
validate_args=validate_args))
if params is None:
params_unconstrained, batch_shape = draw(
base_distribution_unconstrained_params(
dist_name,
batch_shape=batch_shape,
event_dim=event_dim,
enable_vars=enable_vars,
param_strategy_fn=param_strategy_fn))
params = constrain_params(params_unconstrained, dist_name)
params = modify_params(params, dist_name, validate_args=validate_args)
# Actually construct the distribution
dist_cls = INSTANTIABLE_BASE_DISTS[dist_name].cls
result_dist = dist_cls(**params)
# Check that the batch shape came out as expected
if batch_shape != result_dist.batch_shape:
msg = ('Distributions strategy generated a bad batch shape '
'for {}, should have been {}.').format(result_dist, batch_shape)
raise AssertionError(msg)
return result_dist
def depths():
return hps.integers(min_value=0, max_value=4)
def params_used(dist):
return [k for k, v in six.iteritems(dist.parameters) if v is not None]
@hps.composite
def spherical_uniforms(
draw, batch_shape=None, event_dim=None, validate_args=True):
"""Strategy for drawing `SphericalUniform` distributions.
The underlying distribution is drawn from the `distributions` strategy.
Args:
draw: Hypothesis strategy sampler supplied by `@hps.composite`.
batch_shape: An optional `TensorShape`. The batch shape of the resulting
`SphericalUniform` distribution.
event_dim: Optional Python int giving the size of the
distribution's event dimension.
validate_args: Python `bool`; whether to enable runtime assertions.
Returns:
dists: A strategy for drawing `UniformSphere` distributions with the
specified `batch_shape` (or an arbitrary one if omitted).
"""
if batch_shape is None:
batch_shape = draw(tfp_hps.shapes(min_ndims=0, max_side=4))
if event_dim is None:
event_dim = draw(hps.integers(min_value=1, max_value=10))
result_dist = tfd.SphericalUniform(
dimension=event_dim, batch_shape=batch_shape, validate_args=validate_args)
return result_dist
@hps.composite
def batch_reshapes(
draw, batch_shape=None, event_dim=None,
enable_vars=False, depth=None,
eligibility_filter=lambda name: True, validate_args=True):
"""Strategy for drawing `BatchReshape` distributions.
The underlying distribution is drawn from the `distributions` strategy.
Args:
draw: Hypothesis strategy sampler supplied by `@hps.composite`.
batch_shape: An optional `TensorShape`. The batch shape of the resulting
`BatchReshape` distribution. Note that the underlying distribution will
in general have a different batch shape, to make the reshaping
non-trivial. Hypothesis will pick one if omitted.
event_dim: Optional Python int giving the size of each of the underlying
distribution's parameters' event dimensions. This is shared across all
parameters, permitting square event matrices, compatible location and
scale Tensors, etc. If omitted, Hypothesis will choose one.
enable_vars: TODO(bjp): Make this `True` all the time and put variable
initialization in slicing_test. If `False`, the returned parameters are
all `tf.Tensor`s and not {`tf.Variable`, `tfp.util.DeferredTensor`
`tfp.util.TransformedVariable`}
depth: Python `int` giving maximum nesting depth of compound Distributions.
eligibility_filter: Optional Python callable. Blocks some Distribution
class names so they will not be drawn.
validate_args: Python `bool`; whether to enable runtime assertions.
Returns:
dists: A strategy for drawing `BatchReshape` distributions with the
specified `batch_shape` (or an arbitrary one if omitted).
"""
if depth is None:
depth = draw(depths())
if batch_shape is None:
batch_shape = draw(tfp_hps.shapes(min_ndims=1, max_side=13))
underlying_batch_shape = draw(reshapes_of(batch_shape))
underlying = draw(
distributions(
batch_shape=underlying_batch_shape,
event_dim=event_dim,
enable_vars=enable_vars,
depth=depth - 1,
eligibility_filter=eligibility_filter,
validate_args=validate_args))
hp.note('Forming BatchReshape with underlying dist {}; '
'parameters {}; batch_shape {}'.format(
underlying, params_used(underlying), batch_shape))
result_dist = tfd.BatchReshape(
underlying, batch_shape=batch_shape, validate_args=True)
return result_dist
@hps.composite
def independents(
draw, batch_shape=None, event_dim=None,
enable_vars=False, depth=None, eligibility_filter=lambda name: True,
validate_args=True):
"""Strategy for drawing `Independent` distributions.
The underlying distribution is drawn from the `distributions` strategy.
Args:
draw: Hypothesis strategy sampler supplied by `@hps.composite`.
batch_shape: An optional `TensorShape`. The batch shape of the resulting
`Independent` distribution. Note that the underlying distribution will in
general have a higher-rank batch shape, to make room for reinterpreting
some of those dimensions as the `Independent`'s event. Hypothesis will
pick one if omitted.
event_dim: Optional Python int giving the size of each of the underlying
distribution's parameters' event dimensions. This is shared across all
parameters, permitting square event matrices, compatible location and
scale Tensors, etc. If omitted, Hypothesis will choose one.
enable_vars: TODO(bjp): Make this `True` all the time and put variable
initialization in slicing_test. If `False`, the returned parameters are
all `tf.Tensor`s and not {`tf.Variable`, `tfp.util.DeferredTensor`
`tfp.util.TransformedVariable`}
depth: Python `int` giving maximum nesting depth of compound Distributions.
eligibility_filter: Optional Python callable. Blocks some Distribution
class names so they will not be drawn.
validate_args: Python `bool`; whether to enable runtime assertions.
Returns:
dists: A strategy for drawing `Independent` distributions with the specified
`batch_shape` (or an arbitrary one if omitted).
"""
if depth is None:
depth = draw(depths())
reinterpreted_batch_ndims = draw(hps.integers(min_value=0, max_value=2))
if batch_shape is None:
batch_shape = draw(
tfp_hps.shapes(min_ndims=reinterpreted_batch_ndims))
else: # This independent adds some batch dims to its underlying distribution.
batch_shape = tensorshape_util.concatenate(
batch_shape,
draw(tfp_hps.shapes(
min_ndims=reinterpreted_batch_ndims,
max_ndims=reinterpreted_batch_ndims)))
underlying = draw(
distributions(
batch_shape=batch_shape,
event_dim=event_dim,
enable_vars=enable_vars,
depth=depth - 1,
eligibility_filter=eligibility_filter,
validate_args=validate_args))
hp.note('Forming Independent with underlying dist {}; '
'parameters {}; reinterpreted_batch_ndims {}'.format(
underlying, params_used(underlying), reinterpreted_batch_ndims))
result_dist = tfd.Independent(
underlying,
reinterpreted_batch_ndims=reinterpreted_batch_ndims,
validate_args=validate_args)
expected_shape = batch_shape[:len(batch_shape) - reinterpreted_batch_ndims]
if expected_shape != result_dist.batch_shape:
msg = ('Independent strategy generated a bad batch shape '
'for {}, should have been {}.').format(result_dist, expected_shape)
raise AssertionError(msg)
return result_dist
@hps.composite
def samples(
draw, batch_shape=None, event_dim=None,
enable_vars=False, depth=None, eligibility_filter=lambda name: True,
validate_args=True):
"""Strategy for drawing `Sample` distributions.
The underlying distribution is drawn from the `distributions` strategy.
Args:
draw: Hypothesis strategy sampler supplied by `@hps.composite`.
batch_shape: An optional `TensorShape`. The batch shape of the resulting
`Sample` distribution. Hypothesis will pick one if omitted.
event_dim: Optional Python int giving the size of each of the underlying
distribution's parameters' event dimensions. This is shared across all
parameters, permitting square event matrices, compatible location and
scale Tensors, etc. If omitted, Hypothesis will choose one.
enable_vars: TODO(bjp): Make this `True` all the time and put variable
initialization in slicing_test. If `False`, the returned parameters are
all `tf.Tensor`s and not {`tf.Variable`, `tfp.util.DeferredTensor`
`tfp.util.TransformedVariable`}
depth: Python `int` giving maximum nesting depth of compound Distributions.
eligibility_filter: Optional Python callable. Blocks some Distribution
class names so they will not be drawn.
validate_args: Python `bool`; whether to enable runtime assertions.
Returns:
dists: A strategy for drawing `Sample` distributions with the specified
`batch_shape` (or an arbitrary one if omitted).
"""
if depth is None:
depth = draw(depths())
if event_dim is None:
event_dim = draw(hps.integers(min_value=2, max_value=6))
sample_shape = draw(hps.lists(hps.just(event_dim), min_size=0, max_size=2))
if batch_shape is None:
batch_shape = draw(tfp_hps.shapes())
underlying = draw(
distributions(
batch_shape=batch_shape,
event_dim=event_dim,
enable_vars=enable_vars,
depth=depth - 1,
eligibility_filter=eligibility_filter,
validate_args=validate_args))
hp.note('Forming Sample with underlying dist {}; '
'parameters {}; sample_shape {}'.format(
underlying, params_used(underlying), sample_shape))
result_dist = tfd.Sample(
underlying,
sample_shape=sample_shape,
validate_args=validate_args)
if batch_shape != result_dist.batch_shape:
msg = ('`Sample` strategy generated a bad batch shape '
'for {}, should have been {}.').format(result_dist, batch_shape)
raise AssertionError(msg)
return result_dist
@hps.composite
def transformed_distributions(draw,
batch_shape=None,
event_dim=None,
enable_vars=False,
depth=None,
eligibility_filter=lambda name: True,
validate_args=True):
"""Strategy for drawing `TransformedDistribution`s.
The transforming bijector is drawn from the
`bijectors.hypothesis_testlib.unconstrained_bijectors` strategy.
The underlying distribution is drawn from the `distributions` strategy, except
that it must be compatible with the bijector according to
`bijectors.hypothesis_testlib.distribution_filter_for` (these generally check
that vector bijectors are not combined with scalar distributions, etc).
Args:
draw: Hypothesis strategy sampler supplied by `@hps.composite`.
batch_shape: An optional `TensorShape`. The batch shape of the resulting
`TransformedDistribution`. The underlying distribution will sometimes
have the same `batch_shape`, and sometimes have scalar batch shape.
Hypothesis will pick a `batch_shape` if omitted.
event_dim: Optional Python int giving the size of each of the underlying
distribution's parameters' event dimensions. This is shared across all
parameters, permitting square event matrices, compatible location and
scale Tensors, etc. If omitted, Hypothesis will choose one.
enable_vars: TODO(bjp): Make this `True` all the time and put variable
initialization in slicing_test. If `False`, the returned parameters are
all `tf.Tensor`s and not {`tf.Variable`, `tfp.util.DeferredTensor`
`tfp.util.TransformedVariable`}
depth: Python `int` giving maximum nesting depth of compound Distributions.
eligibility_filter: Optional Python callable. Blocks some Distribution
class names so they will not be drawn.
validate_args: Python `bool`; whether to enable runtime assertions.
Returns:
dists: A strategy for drawing `TransformedDistribution`s with the specified
`batch_shape` (or an arbitrary one if omitted).
"""
if depth is None:
depth = draw(depths())
bijector = draw(bijector_hps.unconstrained_bijectors(
validate_args=validate_args))
hp.note('Drawing TransformedDistribution with bijector {}'.format(bijector))
if batch_shape is None:
batch_shape = draw(tfp_hps.shapes())
def eligibility_fn(name):
if not eligibility_filter(name):
return False
return bijector_hps.distribution_eligilibility_filter_for(bijector)(name)
underlyings = distributions(
batch_shape=batch_shape,
event_dim=event_dim,
enable_vars=enable_vars,
depth=depth - 1,
eligibility_filter=eligibility_fn,
validate_args=validate_args).filter(
bijector_hps.distribution_filter_for(bijector))
to_transform = draw(underlyings)
hp.note('Forming TransformedDistribution with '
'underlying distribution {}; parameters {}'.format(
to_transform, params_used(to_transform)))
result_dist = tfd.TransformedDistribution(
bijector=bijector,
distribution=to_transform,
validate_args=validate_args)
if batch_shape != result_dist.batch_shape:
msg = ('TransformedDistribution strategy generated a bad batch shape '
'for {}, should have been {}.').format(result_dist, batch_shape)
raise AssertionError(msg)
return result_dist
@hps.composite
def quantized_distributions(draw,
batch_shape=None,
event_dim=None,
enable_vars=False,
eligibility_filter=lambda name: True,
validate_args=True):
"""Strategy for drawing `QuantizedDistribution`s.
The underlying distribution is drawn from the `base_distributions` strategy.
Args:
draw: Hypothesis strategy sampler supplied by `@hps.composite`.
batch_shape: An optional `TensorShape`. The batch shape of the resulting
`QuantizedDistribution`. Hypothesis will pick a `batch_shape` if omitted.
event_dim: Optional Python int giving the size of each of the underlying
distribution's parameters' event dimensions. This is shared across all
parameters, permitting square event matrices, compatible location and
scale Tensors, etc. If omitted, Hypothesis will choose one.
enable_vars: TODO(bjp): Make this `True` all the time and put variable
initialization in slicing_test. If `False`, the returned parameters are
all Tensors, never Variables or DeferredTensor.
eligibility_filter: Optional Python callable. Blocks some Distribution
class names so they will not be drawn.
validate_args: Python `bool`; whether to enable runtime assertions.
Returns:
dists: A strategy for drawing `QuantizedDistribution`s with the specified
`batch_shape` (or an arbitrary one if omitted).
"""
if batch_shape is None:
batch_shape = draw(tfp_hps.shapes())
low_quantile = draw(
hps.one_of(
hps.just(None),
hps.floats(min_value=0.01, max_value=0.7)))
high_quantile = draw(
hps.one_of(
hps.just(None),
hps.floats(min_value=0.3, max_value=.99)))
def ok(name):
return eligibility_filter(name) and name in QUANTIZED_BASE_DISTS
underlyings = base_distributions(
batch_shape=batch_shape,
event_dim=event_dim,
enable_vars=enable_vars,
eligibility_filter=ok,
validate_args=validate_args,
)
underlying = draw(underlyings)
if high_quantile is not None:
high_quantile = tf.convert_to_tensor(high_quantile, dtype=underlying.dtype)
if low_quantile is not None:
low_quantile = tf.convert_to_tensor(low_quantile, dtype=underlying.dtype)
if high_quantile is not None:
high_quantile = ensure_high_gt_low(low_quantile, high_quantile)
hp.note('Drawing QuantizedDistribution with underlying distribution'
' {}'.format(underlying))
try:
low = None if low_quantile is None else underlying.quantile(low_quantile)
high = None if high_quantile is None else underlying.quantile(high_quantile)
except NotImplementedError:
# The following code makes ReproducibilityTest flaky in graph mode (but not
# eager). Failures are due either to partial mismatch in the samples in
# ReproducibilityTest or to `low` and/or `high` being NaN. For now, to avoid
# this, we set `low` and `high` to `None` for distributions not implementing
# `quantile`.
# seed = test_util.test_seed(hardcoded_seed=123)
# low = (None if low_quantile is None
# else underlying.sample(low_quantile.shape, seed=seed))
# high = (None if high_quantile is None else
# underlying.sample(high_quantile.shape, seed=seed))
low = None
high = None
# Ensure that `low` and `high` are ints contained in distribution support
# and span at least a few bins.
if high is not None:
high = tf.clip_by_value(high, -2**23, 2**23)
high = tf.math.ceil(high + 5.)
if low is not None:
low = tf.clip_by_value(low, -2**23, 2**23)
low = tf.math.ceil(low)
result_dist = tfd.QuantizedDistribution(
distribution=underlying,
low=low,
high=high,
validate_args=validate_args)
return result_dist
@hps.composite
def mixtures_same_family(draw,
batch_shape=None,
event_dim=None,
enable_vars=False,
depth=None,
eligibility_filter=lambda name: True,
validate_args=True):
"""Strategy for drawing `MixtureSameFamily` distributions.
The component distribution is drawn from the `distributions` strategy.
The Categorical mixture distributions are either shared across all batch
members, or drawn independently for the full batch (as required by
`MixtureSameFamily`).
Args:
draw: Hypothesis strategy sampler supplied by `@hps.composite`.
batch_shape: An optional `TensorShape`. The batch shape of the resulting
`MixtureSameFamily` distribution. The component distribution will have a
batch shape of 1 rank higher (for the components being mixed). Hypothesis
will pick a batch shape if omitted.
event_dim: Optional Python int giving the size of each of the component
distribution's parameters' event dimensions. This is shared across all
parameters, permitting square event matrices, compatible location and
scale Tensors, etc. If omitted, Hypothesis will choose one.
enable_vars: TODO(bjp): Make this `True` all the time and put variable
initialization in slicing_test. If `False`, the returned parameters are
all `tf.Tensor`s and not {`tf.Variable`, `tfp.util.DeferredTensor`
`tfp.util.TransformedVariable`}
depth: Python `int` giving maximum nesting depth of compound Distributions.
eligibility_filter: Optional Python callable. Blocks some Distribution
class names so they will not be drawn.
validate_args: Python `bool`; whether to enable runtime assertions.
Returns:
dists: A strategy for drawing `MixtureSameFamily` distributions with the
specified `batch_shape` (or an arbitrary one if omitted).
"""
if depth is None:
depth = draw(depths())
if batch_shape is None:
# Ensure the components dist has at least one batch dim (a component dim).
batch_shape = draw(tfp_hps.shapes(min_ndims=1, min_lastdimsize=2))
else: # This mixture adds a batch dim to its underlying components dist.
batch_shape = tensorshape_util.concatenate(
batch_shape,
draw(tfp_hps.shapes(min_ndims=1, max_ndims=1, min_lastdimsize=2)))
# Cannot put a BatchReshape into a MixtureSameFamily, because the former
# doesn't support broadcasting, and the latter relies on it. b/161984806.
def nested_eligibility_filter(dist_name):
if dist_name == 'BatchReshape':
return False
return eligibility_filter(dist_name)
component = draw(
distributions(
batch_shape=batch_shape,
event_dim=event_dim,
enable_vars=enable_vars,
eligibility_filter=nested_eligibility_filter,
depth=depth - 1,
validate_args=validate_args))
hp.note('Drawing MixtureSameFamily with component {}; parameters {}'.format(
component, params_used(component)))
# scalar or same-shaped categorical?
mixture_batch_shape = draw(
hps.one_of(hps.just(batch_shape[:-1]), hps.just(tf.TensorShape([]))))
mixture_dist = draw(base_distributions(
dist_name='Categorical',
batch_shape=mixture_batch_shape,
event_dim=tensorshape_util.as_list(batch_shape)[-1],
enable_vars=enable_vars,
validate_args=validate_args))
hp.note(('Forming MixtureSameFamily with '
'mixture distribution {}; parameters {}').format(
mixture_dist, params_used(mixture_dist)))
result_dist = tfd.MixtureSameFamily(
components_distribution=component,
mixture_distribution=mixture_dist,
validate_args=validate_args)
if batch_shape[:-1] != result_dist.batch_shape:
msg = ('MixtureSameFamily strategy generated a bad batch shape '
'for {}, should have been {}.').format(result_dist, batch_shape[:-1])
raise AssertionError(msg)
return result_dist
@hps.composite
def mixtures(draw,
batch_shape=None,
event_dim=None,
enable_vars=False,
depth=None,
eligibility_filter=lambda name: True,
validate_args=True):
"""Strategy for drawing `Mixture` distributions.
The component distributions are drawn from the `distributions` strategy.
Args:
draw: Hypothesis strategy sampler supplied by `@hps.composite`.
batch_shape: An optional `TensorShape`. The batch shape of the resulting
`MixtureSameFamily` distribution. The component distribution will have a
batch shape of 1 rank higher (for the components being mixed). Hypothesis
will pick a batch shape if omitted.
event_dim: Optional Python int giving the size of each of the component
distribution's parameters' event dimensions. This is shared across all
parameters, permitting square event matrices, compatible location and
scale Tensors, etc. If omitted, Hypothesis will choose one.
enable_vars: TODO(bjp): Make this `True` all the time and put variable
initialization in slicing_test. If `False`, the returned parameters are
all `tf.Tensor`s and not {`tf.Variable`, `tfp.util.DeferredTensor`
`tfp.util.TransformedVariable`}
depth: Python `int` giving maximum nesting depth of compound Distributions.
eligibility_filter: Optional Python callable. Blocks some Distribution
class names so they will not be drawn.
validate_args: Python `bool`; whether to enable runtime assertions.
Returns:
dists: A strategy for drawing `Mixture` distributions with the specified
`batch_shape` (or an arbitrary one if omitted).
"""
if depth is None:
depth = draw(depths())
if batch_shape is None:
batch_shape = draw(tfp_hps.shapes())
if event_dim is None:
event_dim = draw(hps.integers(min_value=2, max_value=6))
# TODO(b/169441746): Re-enable nesting MixtureSameFamily inside Mixture when
# the weird edge case gets fixed.
def nested_eligibility_filter(dist_name):
if dist_name in ['MixtureSameFamily']:
return False
return eligibility_filter(dist_name)
component_strategy = distributions(
batch_shape=batch_shape,
event_dim=event_dim,
enable_vars=enable_vars,
eligibility_filter=nested_eligibility_filter,
depth=depth - 1,
validate_args=validate_args)
# Must ensure matching event shapes and dtypes.
c0 = draw(component_strategy)
components = [c0] + draw(hps.lists(
component_strategy.filter(
lambda d: (d.event_shape, d.dtype) == (c0.event_shape, c0.dtype)),
min_size=1, max_size=5))
hp.note('Drawing Mixture with components {}; parameters {}'.format(
components, [params_used(c) for c in components]))
cat = draw(base_distributions(
dist_name='Categorical',
batch_shape=batch_shape,
event_dim=len(components),
enable_vars=enable_vars,
validate_args=validate_args))
hp.note('Forming Mixture with cat distribution {}; parameters {}'.format(
cat, params_used(cat)))
result_dist = tfd.Mixture(
cat=cat, components=components,
validate_args=validate_args)
if batch_shape != result_dist.batch_shape:
msg = ('Mixture strategy generated a bad batch shape for {}, should have'
' been {}.').format(result_dist, batch_shape)
raise AssertionError(msg)
return result_dist
@hps.composite
def distributions(draw,
dist_name=None,
batch_shape=None,
event_dim=None,
enable_vars=False,
depth=None,
eligibility_filter=lambda name: True,
validate_args=True):
"""Strategy for drawing arbitrary Distributions.
This may draw compound distributions (i.e., `Independent`,
`MixtureSameFamily`, and/or `TransformedDistribution`), in which case the
underlying distributions are drawn recursively from this strategy as well.
Args:
draw: Hypothesis strategy sampler supplied by `@hps.composite`.
dist_name: Optional Python `str`. If given, the produced distributions
will all have this type.
batch_shape: An optional `TensorShape`. The batch shape of the resulting
Distribution. Hypothesis will pick a batch shape if omitted.
event_dim: Optional Python int giving the size of each of the
distribution's parameters' event dimensions. This is shared across all
parameters, permitting square event matrices, compatible location and
scale Tensors, etc. If omitted, Hypothesis will choose one.
enable_vars: TODO(bjp): Make this `True` all the time and put variable
initialization in slicing_test. If `False`, the returned parameters are
all `tf.Tensor`s and not {`tf.Variable`, `tfp.util.DeferredTensor`
`tfp.util.TransformedVariable`}.
depth: Python `int` giving maximum nesting depth of compound Distributions.
If `None`, Hypothesis will bias choose one, with a bias towards shallow
nests.
eligibility_filter: Optional Python callable. Blocks some Distribution
class names so they will not be drawn.
validate_args: Python `bool`; whether to enable runtime assertions.
Returns:
dists: A strategy for drawing Distributions with the specified `batch_shape`
(or an arbitrary one if omitted).
Raises:
ValueError: If it doesn't know how to instantiate a Distribution of class
`dist_name`.
"""
if depth is None:
depth = draw(depths())
if dist_name is None and depth > 0:
bases = hps.just(None)
candidates = ['BatchReshape', 'Independent',
'MixtureSameFamily', 'TransformedDistribution']
names = [name for name in candidates if eligibility_filter(name)]
compounds = hps.one_of(map(hps.just, names))
dist_name = draw(hps.one_of([bases, compounds]))
if (dist_name is None
or dist_name in INSTANTIABLE_BASE_DISTS
or dist_name == 'Empirical'):
return draw(base_distributions(
dist_name,
batch_shape=batch_shape,
event_dim=event_dim,
enable_vars=enable_vars,
eligibility_filter=eligibility_filter,
validate_args=validate_args))
if dist_name == 'BatchReshape':
return draw(batch_reshapes(
batch_shape, event_dim, enable_vars, depth,
eligibility_filter, validate_args))
if dist_name == 'Independent':
return draw(independents(
batch_shape, event_dim, enable_vars, depth,
eligibility_filter, validate_args))
if dist_name == 'Sample':
return draw(samples(
batch_shape, event_dim, enable_vars, depth,
eligibility_filter, validate_args))
if dist_name == 'MixtureSameFamily':
return draw(mixtures_same_family(
batch_shape, event_dim, enable_vars, depth,
eligibility_filter, validate_args))
if dist_name == 'Mixture':
return draw(mixtures(
batch_shape, event_dim, enable_vars, depth,
eligibility_filter, validate_args))
if dist_name == 'TransformedDistribution':
return draw(transformed_distributions(
batch_shape, event_dim, enable_vars, depth,
eligibility_filter, validate_args))
if dist_name == 'QuantizedDistribution':
return draw(quantized_distributions(
batch_shape, event_dim, enable_vars,
eligibility_filter, validate_args))
raise ValueError('Unknown Distribution name {}'.format(dist_name))
|
py | 1a343043337fd4904992429d2ff8cc905f1529e7 | from rest_framework import serializers
from .models import Entry
from django.contrib.auth.models import User
class UserSerializer(serializers.Serializer):
username = serializers.CharField(max_length=255, min_length=2)
first_name = serializers.CharField(max_length=255, min_length=2)
last_name = serializers.CharField(max_length=255, min_length=2)
password = serializers.CharField(max_length=65, min_length=8, write_only=True)
email = serializers.EmailField(max_length=255, min_length=4)
class Meta:
model = User
fields = [
'id',
'username',
'first_name',
'last_name',
'email'
]
def validate(self, attrs):
email = attrs.get('email', '')
username = attrs.get('username')
if User.objects.filter(email=email).exists():
raise serializers.ValidationError({'email': ('Email already in use')})
if User.objects.filter(username=username).exists():
raise serializers.ValidationError({'usermane': ('Username already in use')})
return super().validate(attrs)
def create(self, validated_data):
return User.objects.create_user(**validated_data)
class EntrySerializer(serializers.ModelSerializer):
class Meta:
model = Entry
fields = [
'id',
'owner',
'title',
'state',
'lga',
'ward',
'PMV_name',
'geopoint',
'patientRecordAvailable',
'patientWithFebrileIllness',
'totalNoOfFeverCases',
'testToKnowCauseOfFever',
'typeOfTest',
'noOf5mRDTTestedFeverCases',
'noOfU5mRDTTestedFeverCases',
'noOf5mRDTTestedPositiveFeverCases',
'noOfU5mRDTTestedPositiveFeverCases',
'typeOfTreamentGivenToPositivePatient',
'typeOfTreamentGivenToFebrilePatientAndNotTested',
'IECMaterialAvailableOnDisplay',
'date'
]
# title = serializers.CharField(max_length=255)
# state = serializers.CharField(max_length=255)
# lga =serializers.CharField(max_length=255)
# ward = serializers.CharField(max_length=255)
# PMV_name = serializers.CharField(max_length=255)
# geopoint = serializers.CharField(max_length=255)
# patientRecordAvailable = serializers.BooleanField(default=True)
# patientWithFebrileIllness = serializers.BooleanField(default=False)
# totalNoOfFeverCases = serializers.CharField(max_length=255)
# testToKnowCauseOfFever = serializers.BooleanField(default=True)
# typeOfTest = serializers.CharField(max_length=255)
# noOf5mRDTTestedFeverCases = serializers.CharField(max_length=255)
# noOfU5mRDTTestedFeverCases = serializers.CharField(max_length=255)
# noOf5mRDTTestedPositiveFeverCases = serializers.CharField(max_length=255)
# noOfU5mRDTTestedPositiveFeverCases = serializers.CharField(max_length=255)
# typeOfTreamentGivenToPositivePatient = serializers.CharField(max_length=255)
# typeOfTreamentGivenToFebrilePatientAndNotTested = serializers.CharField(max_length=255)
# IECMaterialAvailableOnDisplay = serializers.BooleanField(default=True)
# date = serializers.DateTimeField()
# def create(self, validated_data):
# return Entry.objects.create(validated_data)
# def update(self, instance, validated_data):
# instance.title = validated_data.get('title', instance.title)
# instance.state = validated_data.get('state', instance.state)
# instance.lga = validated_data.get('lga', instance.lga)
# instance.ward = validated_data.get('ward', instance.ward)
# instance.PMV_name = validated_data.get('PMV_name', instance.PMV_name)
# instance.geopoint = validated_data.get('geopoint', instance.geopoint)
# instance.patientRecordAvailable = validated_data.get('patientRecordAvailable', instance.patientRecordAvailable)
# instance.patientWithFebrileIllness = validated_data.get('patientWithFebrileIllness', instance.patientWithFebrileIllness)
# instance.totalNoOfFeverCases = validated_data.get('totalNoOfFeverCases', instance.totalNoOfFeverCases)
# instance.testToKnowCauseOfFever = validated_data.get('testToKnowCauseOfFever', instance.testToKnowCauseOfFever)
# instance.typeOfTest = validated_data.get('typeOfTest', instance.typeOfTest)
# instance.noOf5mRDTTestedFeverCases = validated_data.get('noOf5mRDTTestedFeverCases', instance.noOf5mRDTTestedFeverCases)
# instance.noOfU5mRDTTestedFeverCases = validated_data.get('noOfU5mRDTTestedFeverCases', instance.noOfU5mRDTTestedFeverCases)
# instance.noOf5mRDTTestedPositiveFeverCases = validated_data.get('noOf5mRDTTestedPositiveFeverCases', instance.noOf5mRDTTestedPositiveFeverCases)
# instance.noOfU5mRDTTestedPositiveFeverCases = validated_data.get('noOfU5mRDTTestedPositiveFeverCases', instance.noOfU5mRDTTestedPositiveFeverCases)
# instance.typeOfTreamentGivenToPositivePatient = validated_data.get('typeOfTreamentGivenToPositivePatient', instance.typeOfTreamentGivenToPositivePatient)
# instance.typeOfTreamentGivenToFebrilePatientAndNotTested = validated_data.get('typeOfTreamentGivenToFebrilePatientAndNotTested', instance.typeOfTreamentGivenToFebrilePatientAndNotTested)
# instance.IECMaterialAvailableOnDisplay = validated_data.get('IECMaterialAvailableOnDisplay', instance.IECMaterialAvailableOnDisplay)
# instance.date = validated_data.get('date', instance.date)
# instance.save()
# return instance
|
py | 1a343047665686e285bf12eb37ea30ca7f907a41 | """An ellipse widget."""
from typing import Optional
from kivy.graphics.vertex_instructions import Ellipse as KivyEllipse
from kivy.graphics.context_instructions import Color, Rotate, Scale
from kivy.properties import NumericProperty
from mpfmc.uix.widget import Widget
MYPY = False
if MYPY: # pragma: no cover
from mpfmc.core.mc import MpfMc # pylint: disable-msg=cyclic-import,unused-import
class Ellipse(Widget):
"""An ellipse widget."""
widget_type_name = 'Ellipse'
animation_properties = ('x', 'y', 'width', 'pos', 'height', 'size', 'color',
'angle_start', 'angle_end', 'opacity', 'rotation', 'scale')
merge_settings = ('width', 'height')
def __init__(self, mc: "MpfMc", config: dict, key: Optional[str] = None, **kwargs) -> None:
del kwargs
super().__init__(mc=mc, config=config, key=key)
# Bind to all properties that when changed need to force
# the widget to be redrawn
self.bind(pos=self._draw_widget,
size=self._draw_widget,
color=self._draw_widget,
rotation=self._draw_widget,
scale=self._draw_widget,
segments=self._draw_widget,
angle_start=self._draw_widget,
angle_end=self._draw_widget)
self._draw_widget()
def _draw_widget(self, *args) -> None:
del args
if self.canvas is None:
return
anchor = (self.x - self.anchor_offset_pos[0], self.y - self.anchor_offset_pos[1])
self.canvas.clear()
with self.canvas:
Color(*self.color)
Rotate(angle=self.rotation, origin=anchor)
Scale(self.scale).origin = anchor
KivyEllipse(pos=self.pos, size=self.size,
segments=self.segments,
angle_start=self.angle_start,
angle_end=self.angle_end)
#
# Properties
#
segments = NumericProperty(180)
'''Defines how many segments will be used for drawing the ellipse. The
drawing will be smoother if you have many segments.
'''
angle_start = NumericProperty(0)
'''Specifies the starting angle, in degrees, of the disk portion of
the ellipse.
'''
angle_end = NumericProperty(360)
'''Specifies the ending angle, in degrees, of the disk portion of
the ellipse.
'''
rotation = NumericProperty(0)
scale = NumericProperty(1.0)
widget_classes = [Ellipse]
|
py | 1a3433933c80eb88689f837ab62a0098bdf86fc5 | # Copyright 2015 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Driver for Analog Devices AD5248 digital potentiometer.
AD5248 is a two-channel potentiometer. The resistance Rwb between terminal W1
and B1, or W2 and B2, is determined by each RDAC byte register, which can be
programmed via I2C interface.
Output resistance Rwb(D) = D/256 * full resistance + 2 * wiper resistance
- D: RDAC register value
- full resistance: depend on spec, may be 2.5k, 10k, 50k, 100k Ohm
- wiper resistance: contact resistance on wiper, 160 Ohm in spec
Note that the greater D causes the greater Rwb. On the other hand, Rwa is the
complementally resistance between terminal W and A.
TODO(johnylin): add Rwa support if necessary.
For subtype 'rdac':
- set: set RDAC value
- get: read out RDAC value
For subtype 'r2p5k', 'r10k', 'r50k', 'r100k':
- set: specify a resistance value (in Ohm) within spec range. Since ad5248 is
only 256-step, servo will find the closest step to set.
(Note: you may not 'get' the same value after 'set' due to this)
- get: get equivalent output resistance value (in Ohm).
"""
import hw_driver
WIPER_RESISTANCE = 160
FULL_RESISTANCE_SPEC = {
'r2p5k': 2500,
'r10k': 10000,
'r50k': 50000,
'r100k': 100000
}
class Ad5248Error(hw_driver.HwDriverError):
"""Error occurred accessing AD5248."""
class ad5248(hw_driver.HwDriver):
"""Object to access drv=ad5248 controls."""
def __init__(self, interface, params):
"""Constructor.
Args:
interface: FTDI interface object to handle low-level communication to
control
params: dictionary of params needed to perform operations on ad5248
devices. All items are strings initially but should be cast to types
detailed below.
Mandatory Params:
child: integer, 7-bit i2c child address
port: integer, either 0 || 1
subtype: string, supporting 'rdac', 'r2p5k', 'r10k', 'r50k', and 'r100k'
Optional Params:
"""
super(ad5248, self).__init__(interface, params)
self._child = self._get_child()
self._port = self._get_port()
self._subtype = self._get_subtype()
def _get_child(self):
"""Check and return child param.
Returns:
child: 7-bit i2c address.
"""
if 'child' not in self._params:
raise Ad5248Error('getting child address')
child = int(self._params['child'], 0)
return child
def _get_port(self):
"""Check and return port param.
Returns:
port: port ( 0 | 1 ) on the ad5248.
"""
if 'port' not in self._params:
raise Ad5248Error('getting port')
port = int(self._params['port'], 0)
if port & 0x1 != port:
raise Ad5248Error('port value should be 0 | 1')
return port
def _get_subtype(self):
"""Check and return subtype param.
Returns:
subtype: subtype for full resistance spec.
"""
if 'subtype' not in self._params:
raise Ad5248Error('getting subtype')
subtype = self._params['subtype']
if subtype != 'rdac' and subtype not in FULL_RESISTANCE_SPEC:
raise Ad5248Error(
"subtype value should be 'rdac' or %s" % FULL_RESISTANCE_SPEC.keys())
return subtype
def _set_rdac(self, byte):
"""Sets RDAC register value of ad5248.
Args:
byte: 8-bit value. The format could be either a string '0xNN' or an
integer.
"""
if isinstance(byte, str):
byte = int(byte, 0)
if not 0 <= byte <= 255:
raise Ad5248Error('setting value out of range 0~255')
self._interface.wr_rd(self._child, [self._port << 7, byte])
def _set_resistance_value(self, value):
"""Sets real output resistance value of ad5248.
Since ad5248 is digital, there are only 256 steps of resistance value
supported. This function will find the step which is most closed to and
lower than the given value.
Args:
value: an integer of proposed output resistance value.
"""
full_resistance = FULL_RESISTANCE_SPEC[self._subtype]
# Rwb(max) = full resistance - 1 LSB + 2 * wiper resistance
lsb_value = full_resistance / 256
max_value = full_resistance + 2 * WIPER_RESISTANCE - lsb_value
if not 0 <= value <= max_value:
raise Ad5248Error('setting value out of range 0~%d' % max_value)
if value <= 2 * WIPER_RESISTANCE:
write_byte = 0
else:
write_byte = (value - 2 * WIPER_RESISTANCE) * 256 / full_resistance
self._set_rdac(write_byte)
def _get_rdac(self):
"""Gets RDAC register value of ad5248.
Returns:
byte: 8-bit value as integer.
"""
values = self._interface.wr_rd(self._child, [self._port << 7], 1)
return values[0]
def _get_resistance_value(self):
"""Gets real output resistance value of ad5248.
Returns:
resistance: output resistance value by Ohm.
"""
rdac_value = self._get_rdac()
full_resistance = FULL_RESISTANCE_SPEC[self._subtype]
return rdac_value * full_resistance / 256 + 2 * WIPER_RESISTANCE
def _Set_rdac(self, byte):
self._set_rdac(byte)
def _Set_r2p5k(self, value):
self._set_resistance_value(value)
def _Set_r10k(self, value):
self._set_resistance_value(value)
def _Set_r50k(self, value):
self._set_resistance_value(value)
def _Set_r100k(self, value):
self._set_resistance_value(value)
def _Get_rdac(self):
return self._get_rdac()
def _Get_r2p5k(self):
return self._get_resistance_value()
def _Get_r10k(self):
return self._get_resistance_value()
def _Get_r50k(self):
return self._get_resistance_value()
def _Get_r100k(self):
return self._get_resistance_value()
|
py | 1a3433f06e7e5632ca3f4d57ba9e40d381b8897b | # Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
"""Utils for scripts to interface with the outside world."""
import time
from contextlib import contextmanager
from pathlib import Path
from typing import Iterator, Tuple
_SCRIPT_START_TIME = time.time()
_CLEAR_LINE = "\x1b[K"
_COLOR_BLUE = "\x1b[34m"
_COLOR_RED = "\x1b[31m"
_COLOR_GREEN = "\x1b[32m"
_COLOR_RESET = "\x1b[0m"
def die(message: str) -> None:
raise SystemExit(f"{_COLOR_RED}{message}{_COLOR_RESET}")
def green(message: str) -> None:
print(f"{_COLOR_GREEN}{message}{_COLOR_RESET}")
def banner(message: str) -> None:
minutes, seconds = elapsed_time()
print(f"{_COLOR_BLUE}[=== {minutes:02d}:{seconds:02d} {message} ===]{_COLOR_RESET}")
def elapsed_time() -> Tuple[int, int]:
now = time.time()
elapsed_seconds = int(now - _SCRIPT_START_TIME)
return elapsed_seconds // 60, elapsed_seconds % 60
@contextmanager
def travis_section(slug: str, message: str) -> Iterator[None]:
travis_fold_state = "/tmp/.travis_fold_current"
def travis_fold(action: str, target: str) -> None:
print(f"travis_fold:{action}:{target}\r{_CLEAR_LINE}", end="")
def read_travis_fold_state() -> str:
with open(travis_fold_state, "r") as f:
return f.readline()
def write_slug_to_travis_fold_state() -> None:
with open(travis_fold_state, "w") as f:
f.write(slug)
def remove_travis_fold_state() -> None:
Path(travis_fold_state).unlink()
travis_fold("start", slug)
write_slug_to_travis_fold_state()
banner(message)
try:
yield
finally:
travis_fold("end", read_travis_fold_state())
remove_travis_fold_state()
|
py | 1a3435a207593307974afa12891f9316a627dc4c | #!/usr/bin/python
#
# Copyright (c) 2016 Matt Davis, <[email protected]>
# Chris Houseknecht, <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_publicipaddress
version_added: "2.1"
short_description: Manage Azure Public IP Addresses
description:
- Create, update and delete a Public IP address.
- Allows setting and updating the address allocation method and domain name label.
- Use the M(azure_rm_networkinterface) module to associate a Public IP with a network interface.
options:
resource_group:
description:
- Name of resource group with which the Public IP is associated.
required: true
allocation_method:
description:
- Control whether the assigned Public IP remains permanently assigned to the object.
- If not set to C(Static), the IP address my changed anytime an associated virtual machine is power cycled.
choices:
- dynamic
- static
- Static
- Dynamic
default: dynamic
domain_name:
description:
- The customizable portion of the FQDN assigned to public IP address. This is an explicit setting.
- If no value is provided, any existing value will be removed on an existing public IP.
aliases:
- domain_name_label
name:
description:
- Name of the Public IP.
required: true
state:
description:
- Assert the state of the Public IP. Use C(present) to create or update a and C(absent) to delete.
default: present
choices:
- absent
- present
location:
description:
- Valid Azure location. Defaults to location of the resource group.
sku:
description:
- The public IP address SKU.
choices:
- basic
- standard
- Basic
- Standard
version_added: "2.6"
ip_tags:
description:
- List of IpTag associated with the public IP address.
- Each element should contain type:value pair.
suboptions:
type:
description:
- Sets the ip_tags type.
value:
description:
- Sets the ip_tags value.
version_added: "2.8"
idle_timeout:
description:
- Idle timeout in minutes.
type: int
version_added: "2.8"
version:
description:
- The public IP address version.
choices:
- ipv4
- ipv6
default: ipv4
version_added: "2.8"
extends_documentation_fragment:
- azure
- azure_tags
author:
- Chris Houseknecht (@chouseknecht)
- Matt Davis (@nitzmahone)
'''
EXAMPLES = '''
- name: Create a public ip address
azure_rm_publicipaddress:
resource_group: myResourceGroup
name: my_public_ip
allocation_method: static
domain_name: foobar
- name: Delete public ip
azure_rm_publicipaddress:
resource_group: myResourceGroup
name: my_public_ip
state: absent
'''
RETURN = '''
state:
description:
- Facts about the current state of the object.
returned: always
type: complex
contains:
dns_settings:
description:
- The FQDN of the DNS record associated with the public IP address.
returned: always
type: dict
sample: {
"domain_name_label": "ansible-b57dc95985712e45eb8b9c2e",
"fqdn": "ansible-b57dc95985712e45eb8b9c2e.eastus.cloudapp.azure.com",
"reverse_fqdn": null
}
etag:
description:
- A unique read-only string that changes whenever the resource is updated.
returned: always
type: str
sample: "W/'1905ee13-7623-45b1-bc6b-4a12b2fb9d15'"
idle_timeout_in_minutes:
description:
- The idle timeout of the public IP address.
returned: always
type: int
sample: 4
ip_address:
description:
- The Public IP Prefix this Public IP Address should be allocated from.
returned: always
type: str
sample: 52.160.103.93
location:
description:
- Resource location.
returned: always
type: str
example: eastus
name:
description:
- Name of the Public IP Address.
returned: always
type: str
example: publicip002
provisioning_state:
description:
- The provisioning state of the Public IP resource.
returned: always
type: str
example: Succeeded
public_ip_allocation_method:
description:
- The public IP allocation method.
returned: always
type: str
sample: static
public_ip_address_version:
description:
- The public IP address version.
returned: always
type: str
sample: ipv4
sku:
description:
- The public IP address SKU.
returned: always
type: str
sample: Basic
tags:
description:
- The resource tags.
returned: always
type: dict
sample: {
"delete": "on-exit",
"testing": "testing"
}
type:
description:
- Type of the resource.
returned: always
type: str
sample: "Microsoft.Network/publicIPAddresses"
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
from ansible.module_utils._text import to_native
try:
from msrestazure.azure_exceptions import CloudError
except ImportError:
# This is handled in azure_rm_common
pass
def pip_to_dict(pip):
result = dict(
name=pip.name,
type=pip.type,
location=pip.location,
tags=pip.tags,
public_ip_allocation_method=pip.public_ip_allocation_method.lower(),
public_ip_address_version=pip.public_ip_address_version.lower(),
dns_settings=dict(),
ip_address=pip.ip_address,
idle_timeout_in_minutes=pip.idle_timeout_in_minutes,
provisioning_state=pip.provisioning_state,
etag=pip.etag,
sku=pip.sku.name
)
if pip.dns_settings:
result['dns_settings']['domain_name_label'] = pip.dns_settings.domain_name_label
result['dns_settings']['fqdn'] = pip.dns_settings.fqdn
result['dns_settings']['reverse_fqdn'] = pip.dns_settings.reverse_fqdn
if pip.ip_tags:
result['ip_tags'] = [dict(type=to_native(x.ip_tag_type), value=to_native(x.tag)) for x in pip.ip_tags]
return result
ip_tag_spec = dict(
type=dict(type='str', required=True),
value=dict(type='str', required=True)
)
class AzureRMPublicIPAddress(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(type='str', required=True),
name=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['present', 'absent']),
location=dict(type='str'),
version=dict(type='str', default='ipv4', choices=['ipv4', 'ipv6']),
allocation_method=dict(type='str', default='dynamic', choices=['Dynamic', 'Static', 'dynamic', 'static']),
domain_name=dict(type='str', aliases=['domain_name_label']),
sku=dict(type='str', choices=['Basic', 'Standard', 'basic', 'standard']),
ip_tags=dict(type='list', elements='dict', options=ip_tag_spec),
idle_timeout=dict(type='int')
)
self.resource_group = None
self.name = None
self.location = None
self.state = None
self.tags = None
self.allocation_method = None
self.domain_name = None
self.sku = None
self.version = None
self.ip_tags = None
self.idle_timeout = None
self.results = dict(
changed=False,
state=dict()
)
super(AzureRMPublicIPAddress, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()) + ['tags']:
setattr(self, key, kwargs[key])
results = dict()
changed = False
pip = None
# capitalize the sku and allocation_method. basic => Basic, Basic => Basic.
self.allocation_method = self.allocation_method.capitalize() if self.allocation_method else None
self.sku = self.sku.capitalize() if self.sku else None
self.version = 'IPv4' if self.version == 'ipv4' else 'IPv6'
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
# Set default location
self.location = resource_group.location
try:
self.log("Fetch public ip {0}".format(self.name))
pip = self.network_client.public_ip_addresses.get(self.resource_group, self.name)
self.check_provisioning_state(pip, self.state)
self.log("PIP {0} exists".format(self.name))
if self.state == 'present':
results = pip_to_dict(pip)
domain_lable = results['dns_settings'].get('domain_name_label')
if self.domain_name is not None and ((self.domain_name or domain_lable) and self.domain_name != domain_lable):
self.log('CHANGED: domain_name_label')
changed = True
results['dns_settings']['domain_name_label'] = self.domain_name
if self.allocation_method.lower() != results['public_ip_allocation_method'].lower():
self.log("CHANGED: allocation_method")
changed = True
results['public_ip_allocation_method'] = self.allocation_method
if self.sku and self.sku != results['sku']:
self.log("CHANGED: sku")
changed = True
results['sku'] = self.sku
if self.version.lower() != results['public_ip_address_version'].lower():
self.log("CHANGED: version")
changed = True
results['public_ip_address_version'] = self.version
if self.idle_timeout and self.idle_timeout != results['idle_timeout_in_minutes']:
self.log("CHANGED: idle_timeout")
changed = True
results['idle_timeout_in_minutes'] = self.idle_timeout
if str(self.ip_tags or []) != str(results.get('ip_tags') or []):
self.log("CHANGED: ip_tags")
changed = True
results['ip_tags'] = self.ip_tags
update_tags, results['tags'] = self.update_tags(results['tags'])
if update_tags:
changed = True
elif self.state == 'absent':
self.log("CHANGED: public ip {0} exists but requested state is 'absent'".format(self.name))
changed = True
except CloudError:
self.log('Public ip {0} does not exist'.format(self.name))
if self.state == 'present':
self.log("CHANGED: pip {0} does not exist but requested state is 'present'".format(self.name))
changed = True
self.results['state'] = results
self.results['changed'] = changed
if self.check_mode:
return results
if changed:
if self.state == 'present':
if not pip:
self.log("Create new Public IP {0}".format(self.name))
pip = self.network_models.PublicIPAddress(
location=self.location,
public_ip_address_version=self.version,
public_ip_allocation_method=self.allocation_method if self.version == 'IPv4' else None,
sku=self.network_models.PublicIPAddressSku(name=self.sku) if self.sku else None,
idle_timeout_in_minutes=self.idle_timeout if self.idle_timeout and self.idle_timeout > 0 else None
)
if self.ip_tags:
pip.ip_tags = [self.network_models.IpTag(ip_tag_type=x.type, tag=x.value) for x in self.ip_tags]
if self.tags:
pip.tags = self.tags
if self.domain_name:
pip.dns_settings = self.network_models.PublicIPAddressDnsSettings(
domain_name_label=self.domain_name
)
else:
self.log("Update Public IP {0}".format(self.name))
pip = self.network_models.PublicIPAddress(
location=results['location'],
public_ip_allocation_method=results['public_ip_allocation_method'],
tags=results['tags']
)
if self.domain_name:
pip.dns_settings = self.network_models.PublicIPAddressDnsSettings(
domain_name_label=self.domain_name
)
self.results['state'] = self.create_or_update_pip(pip)
elif self.state == 'absent':
self.log('Delete public ip {0}'.format(self.name))
self.delete_pip()
return self.results
def create_or_update_pip(self, pip):
try:
poller = self.network_client.public_ip_addresses.create_or_update(self.resource_group, self.name, pip)
pip = self.get_poller_result(poller)
except Exception as exc:
self.fail("Error creating or updating {0} - {1}".format(self.name, str(exc)))
return pip_to_dict(pip)
def delete_pip(self):
try:
poller = self.network_client.public_ip_addresses.delete(self.resource_group, self.name)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error deleting {0} - {1}".format(self.name, str(exc)))
# Delete returns nada. If we get here, assume that all is well.
self.results['state']['status'] = 'Deleted'
return True
def main():
AzureRMPublicIPAddress()
if __name__ == '__main__':
main()
|
py | 1a34373bf96291edf1ad9cb8eb345521608d7bd8 | # -*- coding: utf-8 -*-
"""Python's built-in :mod:`functools` module builds several useful
utilities on top of Python's first-class function
support. ``funcutils`` generally stays in the same vein, adding to and
correcting Python's standard metaprogramming facilities.
"""
from __future__ import print_function
import sys
import re
import inspect
import functools
import itertools
from types import MethodType, FunctionType
try:
xrange
make_method = MethodType
except NameError:
# Python 3
make_method = lambda desc, obj, obj_type: MethodType(desc, obj)
basestring = (str, bytes) # Python 3 compat
_IS_PY2 = False
else:
_IS_PY2 = True
try:
_inspect_iscoroutinefunction = inspect.iscoroutinefunction
except AttributeError:
# Python 3.4
_inspect_iscoroutinefunction = lambda func: False
try:
from boltons.typeutils import make_sentinel
NO_DEFAULT = make_sentinel(var_name='NO_DEFAULT')
except ImportError:
NO_DEFAULT = object()
def get_module_callables(mod, ignore=None):
"""Returns two maps of (*types*, *funcs*) from *mod*, optionally
ignoring based on the :class:`bool` return value of the *ignore*
callable. *mod* can be a string name of a module in
:data:`sys.modules` or the module instance itself.
"""
if isinstance(mod, basestring):
mod = sys.modules[mod]
types, funcs = {}, {}
for attr_name in dir(mod):
if ignore and ignore(attr_name):
continue
try:
attr = getattr(mod, attr_name)
except Exception:
continue
try:
attr_mod_name = attr.__module__
except AttributeError:
continue
if attr_mod_name != mod.__name__:
continue
if isinstance(attr, type):
types[attr_name] = attr
elif callable(attr):
funcs[attr_name] = attr
return types, funcs
def mro_items(type_obj):
"""Takes a type and returns an iterator over all class variables
throughout the type hierarchy (respecting the MRO).
>>> sorted(set([k for k, v in mro_items(int) if not k.startswith('__') and 'bytes' not in k and not callable(v)]))
['denominator', 'imag', 'numerator', 'real']
"""
# TODO: handle slots?
return itertools.chain.from_iterable(ct.__dict__.items()
for ct in type_obj.__mro__)
def dir_dict(obj, raise_exc=False):
"""Return a dictionary of attribute names to values for a given
object. Unlike ``obj.__dict__``, this function returns all
attributes on the object, including ones on parent classes.
"""
# TODO: separate function for handling descriptors on types?
ret = {}
for k in dir(obj):
try:
ret[k] = getattr(obj, k)
except Exception:
if raise_exc:
raise
return ret
def copy_function(orig, copy_dict=True):
"""Returns a shallow copy of the function, including code object,
globals, closure, etc.
>>> func = lambda: func
>>> func() is func
True
>>> func_copy = copy_function(func)
>>> func_copy() is func
True
>>> func_copy is not func
True
Args:
orig (function): The function to be copied. Must be a
function, not just any method or callable.
copy_dict (bool): Also copy any attributes set on the function
instance. Defaults to ``True``.
"""
ret = FunctionType(orig.__code__,
orig.__globals__,
name=orig.__name__,
argdefs=getattr(orig, "__defaults__", None),
closure=getattr(orig, "__closure__", None))
if copy_dict:
ret.__dict__.update(orig.__dict__)
return ret
def partial_ordering(cls):
"""Class decorator, similar to :func:`functools.total_ordering`,
except it is used to define `partial orderings`_ (i.e., it is
possible that *x* is neither greater than, equal to, or less than
*y*). It assumes the presence of the ``__le__()`` and ``__ge__()``
method, but nothing else. It will not override any existing
additional comparison methods.
.. _partial orderings: https://en.wikipedia.org/wiki/Partially_ordered_set
>>> @partial_ordering
... class MySet(set):
... def __le__(self, other):
... return self.issubset(other)
... def __ge__(self, other):
... return self.issuperset(other)
...
>>> a = MySet([1,2,3])
>>> b = MySet([1,2])
>>> c = MySet([1,2,4])
>>> b < a
True
>>> b > a
False
>>> b < c
True
>>> a < c
False
>>> c > a
False
"""
def __lt__(self, other): return self <= other and not self >= other
def __gt__(self, other): return self >= other and not self <= other
def __eq__(self, other): return self >= other and self <= other
if not hasattr(cls, '__lt__'): cls.__lt__ = __lt__
if not hasattr(cls, '__gt__'): cls.__gt__ = __gt__
if not hasattr(cls, '__eq__'): cls.__eq__ = __eq__
return cls
class InstancePartial(functools.partial):
""":class:`functools.partial` is a huge convenience for anyone
working with Python's great first-class functions. It allows
developers to curry arguments and incrementally create simpler
callables for a variety of use cases.
Unfortunately there's one big gap in its usefulness:
methods. Partials just don't get bound as methods and
automatically handed a reference to ``self``. The
``InstancePartial`` type remedies this by inheriting from
:class:`functools.partial` and implementing the necessary
descriptor protocol. There are no other differences in
implementation or usage. :class:`CachedInstancePartial`, below,
has the same ability, but is slightly more efficient.
"""
def __get__(self, obj, obj_type):
return make_method(self, obj, obj_type)
class CachedInstancePartial(functools.partial):
"""The ``CachedInstancePartial`` is virtually the same as
:class:`InstancePartial`, adding support for method-usage to
:class:`functools.partial`, except that upon first access, it
caches the bound method on the associated object, speeding it up
for future accesses, and bringing the method call overhead to
about the same as non-``partial`` methods.
See the :class:`InstancePartial` docstring for more details.
"""
def __get__(self, obj, obj_type):
# These assignments could've been in __init__, but there was
# no simple way to do it without breaking one of PyPy or Py3.
self.__name__ = None
self.__doc__ = self.func.__doc__
self.__module__ = self.func.__module__
name = self.__name__
if name is None:
for k, v in mro_items(obj_type):
if v is self:
self.__name__ = name = k
if obj is None:
return make_method(self, obj, obj_type)
try:
# since this is a data descriptor, this block
# is probably only hit once (per object)
return obj.__dict__[name]
except KeyError:
obj.__dict__[name] = ret = make_method(self, obj, obj_type)
return ret
partial = CachedInstancePartial
# # #
# # # Function builder
# # #
def wraps(func, injected=None, expected=None, **kw):
"""Modeled after the built-in :func:`functools.wraps`, this function is
used to make your decorator's wrapper functions reflect the
wrapped function's:
* Name
* Documentation
* Module
* Signature
The built-in :func:`functools.wraps` copies the first three, but
does not copy the signature. This version of ``wraps`` can copy
the inner function's signature exactly, allowing seamless usage
and :mod:`introspection <inspect>`. Usage is identical to the
built-in version::
>>> from boltons.funcutils import wraps
>>>
>>> def print_return(func):
... @wraps(func)
... def wrapper(*args, **kwargs):
... ret = func(*args, **kwargs)
... print(ret)
... return ret
... return wrapper
...
>>> @print_return
... def example():
... '''docstring'''
... return 'example return value'
>>>
>>> val = example()
example return value
>>> example.__name__
'example'
>>> example.__doc__
'docstring'
In addition, the boltons version of wraps supports modifying the
outer signature based on the inner signature. By passing a list of
*injected* argument names, those arguments will be removed from
the outer wrapper's signature, allowing your decorator to provide
arguments that aren't passed in.
Args:
func (function): The callable whose attributes are to be copied.
injected (list): An optional list of argument names which
should not appear in the new wrapper's signature.
expected (list): An optional list of argument names (or (name,
default) pairs) representing new arguments introduced by
the wrapper (the opposite of *injected*). See
:meth:`FunctionBuilder.add_arg()` for more details.
update_dict (bool): Whether to copy other, non-standard
attributes of *func* over to the wrapper. Defaults to True.
inject_to_varkw (bool): Ignore missing arguments when a
``**kwargs``-type catch-all is present. Defaults to True.
For more in-depth wrapping of functions, see the
:class:`FunctionBuilder` type, on which wraps was built.
"""
if injected is None:
injected = []
elif isinstance(injected, basestring):
injected = [injected]
else:
injected = list(injected)
expected_items = _parse_wraps_expected(expected)
if isinstance(func, (classmethod, staticmethod)):
raise TypeError('wraps does not support wrapping classmethods and'
' staticmethods, change the order of wrapping to'
' wrap the underlying function: %r'
% (getattr(func, '__func__', None),))
update_dict = kw.pop('update_dict', True)
inject_to_varkw = kw.pop('inject_to_varkw', True)
if kw:
raise TypeError('unexpected kwargs: %r' % kw.keys())
fb = FunctionBuilder.from_func(func)
for arg in injected:
try:
fb.remove_arg(arg)
except MissingArgument:
if inject_to_varkw and fb.varkw is not None:
continue # keyword arg will be caught by the varkw
raise
for arg, default in expected_items:
fb.add_arg(arg, default) # may raise ExistingArgument
if fb.is_async:
fb.body = 'return await _call(%s)' % fb.get_invocation_str()
else:
fb.body = 'return _call(%s)' % fb.get_invocation_str()
def wrapper_wrapper(wrapper_func):
execdict = dict(_call=wrapper_func, _func=func)
fully_wrapped = fb.get_func(execdict, with_dict=update_dict)
fully_wrapped.__wrapped__ = func # ref to the original function (#115)
return fully_wrapped
return wrapper_wrapper
def _parse_wraps_expected(expected):
# expected takes a pretty powerful argument, it's processed
# here. admittedly this would be less trouble if I relied on
# OrderedDict (there's an impl of that in the commit history if
# you look
if expected is None:
expected = []
elif isinstance(expected, basestring):
expected = [(expected, NO_DEFAULT)]
expected_items = []
try:
expected_iter = iter(expected)
except TypeError as e:
raise ValueError('"expected" takes string name, sequence of string names,'
' iterable of (name, default) pairs, or a mapping of '
' {name: default}, not %r (got: %r)' % (expected, e))
for argname in expected_iter:
if isinstance(argname, basestring):
# dict keys and bare strings
try:
default = expected[argname]
except TypeError:
default = NO_DEFAULT
else:
# pairs
try:
argname, default = argname
except (TypeError, ValueError):
raise ValueError('"expected" takes string name, sequence of string names,'
' iterable of (name, default) pairs, or a mapping of '
' {name: default}, not %r')
if not isinstance(argname, basestring):
raise ValueError('all "expected" argnames must be strings, not %r' % (argname,))
expected_items.append((argname, default))
return expected_items
class FunctionBuilder(object):
"""The FunctionBuilder type provides an interface for programmatically
creating new functions, either based on existing functions or from
scratch.
Values are passed in at construction or set as attributes on the
instance. For creating a new function based of an existing one,
see the :meth:`~FunctionBuilder.from_func` classmethod. At any
point, :meth:`~FunctionBuilder.get_func` can be called to get a
newly compiled function, based on the values configured.
>>> fb = FunctionBuilder('return_five', doc='returns the integer 5',
... body='return 5')
>>> f = fb.get_func()
>>> f()
5
>>> fb.varkw = 'kw'
>>> f_kw = fb.get_func()
>>> f_kw(ignored_arg='ignored_val')
5
Note that function signatures themselves changed quite a bit in
Python 3, so several arguments are only applicable to
FunctionBuilder in Python 3. Except for *name*, all arguments to
the constructor are keyword arguments.
Args:
name (str): Name of the function.
doc (str): `Docstring`_ for the function, defaults to empty.
module (str): Name of the module from which this function was
imported. Defaults to None.
body (str): String version of the code representing the body
of the function. Defaults to ``'pass'``, which will result
in a function which does nothing and returns ``None``.
args (list): List of argument names, defaults to empty list,
denoting no arguments.
varargs (str): Name of the catch-all variable for positional
arguments. E.g., "args" if the resultant function is to have
``*args`` in the signature. Defaults to None.
varkw (str): Name of the catch-all variable for keyword
arguments. E.g., "kwargs" if the resultant function is to have
``**kwargs`` in the signature. Defaults to None.
defaults (dict): A mapping of argument names to default values.
kwonlyargs (list): Argument names which are only valid as
keyword arguments. **Python 3 only.**
kwonlydefaults (dict): A mapping, same as normal *defaults*,
but only for the *kwonlyargs*. **Python 3 only.**
annotations (dict): Mapping of type hints and so
forth. **Python 3 only.**
filename (str): The filename that will appear in
tracebacks. Defaults to "boltons.funcutils.FunctionBuilder".
indent (int): Number of spaces with which to indent the
function *body*. Values less than 1 will result in an error.
dict (dict): Any other attributes which should be added to the
functions compiled with this FunctionBuilder.
All of these arguments are also made available as attributes which
can be mutated as necessary.
.. _Docstring: https://en.wikipedia.org/wiki/Docstring#Python
"""
if _IS_PY2:
_argspec_defaults = {'args': list,
'varargs': lambda: None,
'varkw': lambda: None,
'defaults': lambda: None}
@classmethod
def _argspec_to_dict(cls, f):
args, varargs, varkw, defaults = inspect.getargspec(f)
return {'args': args,
'varargs': varargs,
'varkw': varkw,
'defaults': defaults}
else:
_argspec_defaults = {'args': list,
'varargs': lambda: None,
'varkw': lambda: None,
'defaults': lambda: None,
'kwonlyargs': list,
'kwonlydefaults': dict,
'annotations': dict}
@classmethod
def _argspec_to_dict(cls, f):
argspec = inspect.getfullargspec(f)
return dict((attr, getattr(argspec, attr))
for attr in cls._argspec_defaults)
_defaults = {'doc': str,
'dict': dict,
'is_async': lambda: False,
'module': lambda: None,
'body': lambda: 'pass',
'indent': lambda: 4,
'filename': lambda: 'boltons.funcutils.FunctionBuilder'}
_defaults.update(_argspec_defaults)
_compile_count = itertools.count()
def __init__(self, name, **kw):
self.name = name
for a, default_factory in self._defaults.items():
val = kw.pop(a, None)
if val is None:
val = default_factory()
setattr(self, a, val)
if kw:
raise TypeError('unexpected kwargs: %r' % kw.keys())
return
# def get_argspec(self): # TODO
if _IS_PY2:
def get_sig_str(self):
return inspect.formatargspec(self.args, self.varargs,
self.varkw, [])
def get_invocation_str(self):
return inspect.formatargspec(self.args, self.varargs,
self.varkw, [])[1:-1]
else:
def get_sig_str(self):
return inspect.formatargspec(self.args,
self.varargs,
self.varkw,
[],
self.kwonlyargs,
{},
self.annotations)
_KWONLY_MARKER = re.compile(r"""
\* # a star
\s* # followed by any amount of whitespace
, # followed by a comma
\s* # followed by any amount of whitespace
""", re.VERBOSE)
def get_invocation_str(self):
kwonly_pairs = None
formatters = {}
if self.kwonlyargs:
kwonly_pairs = dict((arg, arg)
for arg in self.kwonlyargs)
formatters['formatvalue'] = lambda value: '=' + value
sig = inspect.formatargspec(self.args,
self.varargs,
self.varkw,
[],
kwonly_pairs,
kwonly_pairs,
{},
**formatters)
sig = self._KWONLY_MARKER.sub('', sig)
return sig[1:-1]
@classmethod
def from_func(cls, func):
"""Create a new FunctionBuilder instance based on an existing
function. The original function will not be stored or
modified.
"""
# TODO: copy_body? gonna need a good signature regex.
# TODO: might worry about __closure__?
if not callable(func):
raise TypeError('expected callable object, not %r' % (func,))
kwargs = {'name': func.__name__,
'doc': func.__doc__,
'module': func.__module__,
'dict': getattr(func, '__dict__', {})}
kwargs.update(cls._argspec_to_dict(func))
if _inspect_iscoroutinefunction(func):
kwargs['is_async'] = True
return cls(**kwargs)
def get_func(self, execdict=None, add_source=True, with_dict=True):
"""Compile and return a new function based on the current values of
the FunctionBuilder.
Args:
execdict (dict): The dictionary representing the scope in
which the compilation should take place. Defaults to an empty
dict.
add_source (bool): Whether to add the source used to a
special ``__source__`` attribute on the resulting
function. Defaults to True.
with_dict (bool): Add any custom attributes, if
applicable. Defaults to True.
To see an example of usage, see the implementation of
:func:`~boltons.funcutils.wraps`.
"""
execdict = execdict or {}
body = self.body or self._default_body
tmpl = 'def {name}{sig_str}:'
tmpl += '\n{body}'
if self.is_async:
tmpl = 'async ' + tmpl
body = _indent(self.body, ' ' * self.indent)
name = self.name.replace('<', '_').replace('>', '_') # lambdas
src = tmpl.format(name=name, sig_str=self.get_sig_str(),
doc=self.doc, body=body)
self._compile(src, execdict)
func = execdict[name]
func.__name__ = self.name
func.__doc__ = self.doc
func.__defaults__ = self.defaults
if not _IS_PY2:
func.__kwdefaults__ = self.kwonlydefaults
if with_dict:
func.__dict__.update(self.dict)
func.__module__ = self.module
# TODO: caller module fallback?
if add_source:
func.__source__ = src
return func
def get_defaults_dict(self):
"""Get a dictionary of function arguments with defaults and the
respective values.
"""
ret = dict(reversed(list(zip(reversed(self.args),
reversed(self.defaults or [])))))
return ret
if _IS_PY2:
def add_arg(self, arg_name, default=NO_DEFAULT):
"Add an argument with optional *default* (defaults to ``funcutils.NO_DEFAULT``)."
if arg_name in self.args:
raise ExistingArgument('arg %r already in func %s arg list' % (arg_name, self.name))
self.args.append(arg_name)
if default is not NO_DEFAULT:
self.defaults = (self.defaults or ()) + (default,)
return
else:
def add_arg(self, arg_name, default=NO_DEFAULT, kwonly=False):
"""Add an argument with optional *default* (defaults to
``funcutils.NO_DEFAULT``). Pass *kwonly=True* to add a
keyword-only argument
"""
if arg_name in self.args:
raise ExistingArgument('arg %r already in func %s arg list' % (arg_name, self.name))
if arg_name in self.kwonlyargs:
raise ExistingArgument('arg %r already in func %s kwonly arg list' % (arg_name, self.name))
if not kwonly:
self.args.append(arg_name)
if default is not NO_DEFAULT:
self.defaults = (self.defaults or ()) + (default,)
else:
self.kwonlyargs.append(arg_name)
if default is not NO_DEFAULT:
self.kwonlydefaults[arg_name] = default
return
def remove_arg(self, arg_name):
"""Remove an argument from this FunctionBuilder's argument list. The
resulting function will have one less argument per call to
this function.
Args:
arg_name (str): The name of the argument to remove.
Raises a :exc:`ValueError` if the argument is not present.
"""
args = self.args
d_dict = self.get_defaults_dict()
try:
args.remove(arg_name)
except ValueError:
try:
self.kwonlyargs.remove(arg_name)
except (AttributeError, ValueError):
# py2, or py3 and missing from both
exc = MissingArgument('arg %r not found in %s argument list:'
' %r' % (arg_name, self.name, args))
exc.arg_name = arg_name
raise exc
else:
self.kwonlydefaults.pop(arg_name, None)
else:
d_dict.pop(arg_name, None)
self.defaults = tuple([d_dict[a] for a in args if a in d_dict])
return
def _compile(self, src, execdict):
filename = ('<%s-%d>'
% (self.filename, next(self._compile_count),))
try:
code = compile(src, filename, 'single')
exec(code, execdict)
except Exception:
raise
return execdict
class MissingArgument(ValueError):
pass
class ExistingArgument(ValueError):
pass
def _indent(text, margin, newline='\n', key=bool):
"based on boltons.strutils.indent"
indented_lines = [(margin + line if key(line) else line)
for line in text.splitlines()]
return newline.join(indented_lines)
try:
from functools import total_ordering # 2.7+
except ImportError:
# python 2.6
def total_ordering(cls):
"""Class decorator that fills in missing comparators/ordering
methods. Backport of :func:`functools.total_ordering` to work
with Python 2.6.
Code from http://code.activestate.com/recipes/576685/
"""
convert = {
'__lt__': [
('__gt__',
lambda self, other: not (self < other or self == other)),
('__le__',
lambda self, other: self < other or self == other),
('__ge__',
lambda self, other: not self < other)],
'__le__': [
('__ge__',
lambda self, other: not self <= other or self == other),
('__lt__',
lambda self, other: self <= other and not self == other),
('__gt__',
lambda self, other: not self <= other)],
'__gt__': [
('__lt__',
lambda self, other: not (self > other or self == other)),
('__ge__',
lambda self, other: self > other or self == other),
('__le__',
lambda self, other: not self > other)],
'__ge__': [
('__le__',
lambda self, other: (not self >= other) or self == other),
('__gt__',
lambda self, other: self >= other and not self == other),
('__lt__',
lambda self, other: not self >= other)]
}
roots = set(dir(cls)) & set(convert)
if not roots:
raise ValueError('must define at least one ordering operation:'
' < > <= >=')
root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
for opname, opfunc in convert[root]:
if opname not in roots:
opfunc.__name__ = opname
opfunc.__doc__ = getattr(int, opname).__doc__
setattr(cls, opname, opfunc)
return cls
# end funcutils.py
|
py | 1a3437e759be8cdc0b97568a11c9900423b7c56b | """imw_28363 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
path("api/v1/", include("dating.api.v1.urls")),
path("dating/", include("dating.urls")),
path("home/", include("home.urls")),
]
admin.site.site_header = "IMW"
admin.site.site_title = "IMW Admin Portal"
admin.site.index_title = "IMW Admin"
# swagger
api_info = openapi.Info(
title="IMW API",
default_version="v1",
description="API documentation for IMW App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
urlpatterns += [path("", TemplateView.as_view(template_name="index.html"))]
urlpatterns += [
re_path(r"^(?:.*)/?$", TemplateView.as_view(template_name="index.html"))
]
|
py | 1a343801e1cba9cb0178db220aa35d87b49dc435 | # Generated by Django 2.1 on 2019-05-08 13:17
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0008_auto_20190419_1414'),
('api', '0007_opnfvapiconfig_opnfv_config'),
]
operations = [
]
|
py | 1a343873879aa44d89f07a92c7d7309ba53360d0 | def create_thread_by_reacted(posted_title, person):
return {
"type": "section",
"text": {
"type": "mrkdwn",
"text": "こんばんは!\nあなたの投稿「"
+ posted_title
+ " 」に"
+ person
+ "さんからリアクションが届きました。",
},
} |
py | 1a3438a14aed0dfb6aaa1076be7c7712055d3abd | # SPDX-License-Identifier: Apache-2.0
from ..common._apply_operation import apply_cast
from ..common._registration import register_converter
from ..common._topology import Scope, Operator
from ..common._container import ModelComponentContainer
from .._supported_operators import sklearn_operator_name_map
def convert_sklearn_cast(scope: Scope, operator: Operator,
container: ModelComponentContainer):
inp = operator.inputs[0]
exptype = operator.outputs[0]
res = exptype.type.to_onnx_type()
et = res.tensor_type.elem_type
apply_cast(scope, inp.full_name, exptype.full_name,
container, to=et)
def convert_sklearn_cast_regressor(scope: Scope, operator: Operator,
container: ModelComponentContainer):
op = operator.raw_operator
estimator = op.estimator
op_type = sklearn_operator_name_map[type(estimator)]
this_operator = scope.declare_local_operator(op_type, estimator)
this_operator.inputs = operator.inputs
cls = operator.inputs[0].type.__class__
var_name = scope.declare_local_variable('cast_est', cls())
this_operator.outputs.append(var_name)
var_name = var_name.onnx_name
exptype = operator.outputs[0]
res = exptype.type.to_onnx_type()
et = res.tensor_type.elem_type
apply_cast(scope, var_name, exptype.full_name,
container, to=et)
register_converter('SklearnCastTransformer', convert_sklearn_cast)
register_converter('SklearnCastRegressor', convert_sklearn_cast_regressor)
register_converter('SklearnCast', convert_sklearn_cast)
|
py | 1a3438fa99ded3e9a7ac66389ea637ecf246f5b5 | from hypothesis import given
from rithm import Int
from tests.utils import (IntWithBuiltin,
is_equivalent_to_builtin_int)
from . import strategies
@given(strategies.ints, strategies.ints)
def test_alternatives(first: Int, second: Int) -> None:
assert first - second == first + (-second)
@given(strategies.ints_with_builtins, strategies.ints_with_builtins)
def test_connection_with_builtin(first_with_builtin: IntWithBuiltin,
second_with_builtin: IntWithBuiltin
) -> None:
first, first_builtin = first_with_builtin
second, second_builtin = second_with_builtin
assert is_equivalent_to_builtin_int(first - second,
first_builtin - second_builtin)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.