id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/auterion-cli-1.3.0.tar.gz/auterion-cli-1.3.0/auterioncli/main.py
|
import os
import argparse
from auterioncli.commands import available_commands
from auterioncli.meta_util import PersistentState, check_for_updates, get_version, eprint, \
get_device_presence, get_host_device_type
def main():
this_device_type = get_host_device_type()
persistent = PersistentState(this_device_type)
selected_serial = persistent["selected_serial"]
selected_address = persistent["selected_address"]
have_selected_device = selected_serial is not None and selected_address is not None
env_address = os.getenv('AUTERION_DEVICE_ADDRESS', "127.0.0.1" if this_device_type == 'skynode' else "10.41.1.1")
config = {
"persistent": persistent,
"persistent_dir": persistent.persistent_dir,
"device_address": selected_address if have_selected_device else env_address,
"device_serial": selected_serial if have_selected_device else None,
"have_selected_device": have_selected_device,
"this_device_type": this_device_type
}
commands = available_commands(config)
main_parser = argparse.ArgumentParser()
main_parser.add_argument('--version', help='Print version of this tool', action='store_true')
main_parser.add_argument('--no-update-check', help='Disable automatic update checks', action='store_true')
command_subparsers = main_parser.add_subparsers(title="command", metavar='<command>', dest="root_command")
for name, command in commands.items():
parser = command_subparsers.add_parser(name, help=command.help())
command.setup_parser(parser)
args = main_parser.parse_args()
if args.version:
print(get_version())
exit(0)
if args.root_command is None:
main_parser.print_help()
exit(1)
# Do not check for updates on skynode, or if the user has explicitly disabled it
if not args.no_update_check and not config['this_device_type'] == 'skynode':
check_for_updates(config["persistent"])
# warn user if no device is selected
if commands[args.root_command].needs_device(args):
device_present, serial_at_address = get_device_presence(config['device_address'])
if not device_present:
eprint(f"Error: No device reachable at {config['device_address']}.\n"
f" Use 'device discover' command to show available devices.\n"
f"Aborting.\n")
exit(1)
elif not config['have_selected_device']:
if config['this_device_type'] != 'skynode':
eprint(f'Warn: No device serial selected.\n'
f' Use \'device discover\' and \'device select\' commands to specify which device to use.\n'
f' Falling back to device with serial {serial_at_address} on {config["device_address"]}\n')
elif serial_at_address != config['device_serial']:
if serial_at_address == '':
eprint(f"Warn: Could not verify serial number of device at address {config['device_address']}.\n"
f"You may be connected to the wrong device, "
f"or your device may be experiencing networking problems.\n"
f"Continuing anyways..")
else:
eprint(f"Error: Device on {config['device_address']} has serial {serial_at_address}, which is different\n"
f" from the selected device {config['device_serial']}. Re-select the device to interact with.\n"
f"Aborting.\n")
exit(1)
try:
# Run command
commands[args.root_command].run(args)
except Exception as e:
# Give command modules as chance to handle their exceptions
commands[args.root_command].handle_exception(e)
except KeyboardInterrupt:
eprint("Aborting..")
exit(1)
config["persistent"].persist()
if __name__ == "__main__":
main()
|
PypiClean
|
/moban-0.8.2.tar.gz/moban-0.8.2/docs/level-7-use-custom-jinja2-filter-test-n-global/README.rst
|
Level 7: Custom jinja filters, tests and globals
================================================================================
Level 7 example demonstrates advanced plugin capabilities of moban. The following
moban file had `plugin_dir` specified::
configuration:
template_dir:
- my-templates
plugin_dir:
- custom-jj2-plugin
configuration: data.yml
targets:
- filter.output: filter.jj2
- test.output: test.jj2
Where `custom-jj2-plugin` is a directory holding all jinja2 filters, tests
and globals. Under it, there are 4 files::
__init__.py filter.py test.py global.py
It is very important to have `__init__.py`, otherwise, it will NOT work. Other three
files are named to show case the feature. You can choose whichever name you prefer,
as long as you and your team could make sense of the names.
.. note::
if you intend to use extensions for one off usage, please use '-pd' cli option.
i.e. `moban -td my-templates/ -t filter.jj2 -pd custom-jj2-plugin`
Evaluation
--------------------------------------------------------------------------------
Please go to `docs/level-7-use-custom-jinja2-filter-test-n-global` directory,
Here is the command to launch it:
.. code-block:: bash
$ moban
Templating filter.jj2 to filter.output
Templating test.jj2 to test.output
Templating global.jj2 to global.output
Templated 3 files.
Everything is up to date!
Please examine individual template and its associated plugin for more details.
|
PypiClean
|
/pulumi_aws_native-0.75.1a1693503310.tar.gz/pulumi_aws_native-0.75.1a1693503310/pulumi_aws_native/apigatewayv2/get_integration.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetIntegrationResult',
'AwaitableGetIntegrationResult',
'get_integration',
'get_integration_output',
]
@pulumi.output_type
class GetIntegrationResult:
def __init__(__self__, connection_id=None, connection_type=None, content_handling_strategy=None, credentials_arn=None, description=None, id=None, integration_method=None, integration_subtype=None, integration_type=None, integration_uri=None, passthrough_behavior=None, payload_format_version=None, request_parameters=None, request_templates=None, response_parameters=None, template_selection_expression=None, timeout_in_millis=None, tls_config=None):
if connection_id and not isinstance(connection_id, str):
raise TypeError("Expected argument 'connection_id' to be a str")
pulumi.set(__self__, "connection_id", connection_id)
if connection_type and not isinstance(connection_type, str):
raise TypeError("Expected argument 'connection_type' to be a str")
pulumi.set(__self__, "connection_type", connection_type)
if content_handling_strategy and not isinstance(content_handling_strategy, str):
raise TypeError("Expected argument 'content_handling_strategy' to be a str")
pulumi.set(__self__, "content_handling_strategy", content_handling_strategy)
if credentials_arn and not isinstance(credentials_arn, str):
raise TypeError("Expected argument 'credentials_arn' to be a str")
pulumi.set(__self__, "credentials_arn", credentials_arn)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if integration_method and not isinstance(integration_method, str):
raise TypeError("Expected argument 'integration_method' to be a str")
pulumi.set(__self__, "integration_method", integration_method)
if integration_subtype and not isinstance(integration_subtype, str):
raise TypeError("Expected argument 'integration_subtype' to be a str")
pulumi.set(__self__, "integration_subtype", integration_subtype)
if integration_type and not isinstance(integration_type, str):
raise TypeError("Expected argument 'integration_type' to be a str")
pulumi.set(__self__, "integration_type", integration_type)
if integration_uri and not isinstance(integration_uri, str):
raise TypeError("Expected argument 'integration_uri' to be a str")
pulumi.set(__self__, "integration_uri", integration_uri)
if passthrough_behavior and not isinstance(passthrough_behavior, str):
raise TypeError("Expected argument 'passthrough_behavior' to be a str")
pulumi.set(__self__, "passthrough_behavior", passthrough_behavior)
if payload_format_version and not isinstance(payload_format_version, str):
raise TypeError("Expected argument 'payload_format_version' to be a str")
pulumi.set(__self__, "payload_format_version", payload_format_version)
if request_parameters and not isinstance(request_parameters, dict):
raise TypeError("Expected argument 'request_parameters' to be a dict")
pulumi.set(__self__, "request_parameters", request_parameters)
if request_templates and not isinstance(request_templates, dict):
raise TypeError("Expected argument 'request_templates' to be a dict")
pulumi.set(__self__, "request_templates", request_templates)
if response_parameters and not isinstance(response_parameters, dict):
raise TypeError("Expected argument 'response_parameters' to be a dict")
pulumi.set(__self__, "response_parameters", response_parameters)
if template_selection_expression and not isinstance(template_selection_expression, str):
raise TypeError("Expected argument 'template_selection_expression' to be a str")
pulumi.set(__self__, "template_selection_expression", template_selection_expression)
if timeout_in_millis and not isinstance(timeout_in_millis, int):
raise TypeError("Expected argument 'timeout_in_millis' to be a int")
pulumi.set(__self__, "timeout_in_millis", timeout_in_millis)
if tls_config and not isinstance(tls_config, dict):
raise TypeError("Expected argument 'tls_config' to be a dict")
pulumi.set(__self__, "tls_config", tls_config)
@property
@pulumi.getter(name="connectionId")
def connection_id(self) -> Optional[str]:
return pulumi.get(self, "connection_id")
@property
@pulumi.getter(name="connectionType")
def connection_type(self) -> Optional[str]:
return pulumi.get(self, "connection_type")
@property
@pulumi.getter(name="contentHandlingStrategy")
def content_handling_strategy(self) -> Optional[str]:
return pulumi.get(self, "content_handling_strategy")
@property
@pulumi.getter(name="credentialsArn")
def credentials_arn(self) -> Optional[str]:
return pulumi.get(self, "credentials_arn")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
@property
@pulumi.getter
def id(self) -> Optional[str]:
return pulumi.get(self, "id")
@property
@pulumi.getter(name="integrationMethod")
def integration_method(self) -> Optional[str]:
return pulumi.get(self, "integration_method")
@property
@pulumi.getter(name="integrationSubtype")
def integration_subtype(self) -> Optional[str]:
return pulumi.get(self, "integration_subtype")
@property
@pulumi.getter(name="integrationType")
def integration_type(self) -> Optional[str]:
return pulumi.get(self, "integration_type")
@property
@pulumi.getter(name="integrationUri")
def integration_uri(self) -> Optional[str]:
return pulumi.get(self, "integration_uri")
@property
@pulumi.getter(name="passthroughBehavior")
def passthrough_behavior(self) -> Optional[str]:
return pulumi.get(self, "passthrough_behavior")
@property
@pulumi.getter(name="payloadFormatVersion")
def payload_format_version(self) -> Optional[str]:
return pulumi.get(self, "payload_format_version")
@property
@pulumi.getter(name="requestParameters")
def request_parameters(self) -> Optional[Any]:
return pulumi.get(self, "request_parameters")
@property
@pulumi.getter(name="requestTemplates")
def request_templates(self) -> Optional[Any]:
return pulumi.get(self, "request_templates")
@property
@pulumi.getter(name="responseParameters")
def response_parameters(self) -> Optional[Any]:
return pulumi.get(self, "response_parameters")
@property
@pulumi.getter(name="templateSelectionExpression")
def template_selection_expression(self) -> Optional[str]:
return pulumi.get(self, "template_selection_expression")
@property
@pulumi.getter(name="timeoutInMillis")
def timeout_in_millis(self) -> Optional[int]:
return pulumi.get(self, "timeout_in_millis")
@property
@pulumi.getter(name="tlsConfig")
def tls_config(self) -> Optional['outputs.IntegrationTlsConfig']:
return pulumi.get(self, "tls_config")
class AwaitableGetIntegrationResult(GetIntegrationResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetIntegrationResult(
connection_id=self.connection_id,
connection_type=self.connection_type,
content_handling_strategy=self.content_handling_strategy,
credentials_arn=self.credentials_arn,
description=self.description,
id=self.id,
integration_method=self.integration_method,
integration_subtype=self.integration_subtype,
integration_type=self.integration_type,
integration_uri=self.integration_uri,
passthrough_behavior=self.passthrough_behavior,
payload_format_version=self.payload_format_version,
request_parameters=self.request_parameters,
request_templates=self.request_templates,
response_parameters=self.response_parameters,
template_selection_expression=self.template_selection_expression,
timeout_in_millis=self.timeout_in_millis,
tls_config=self.tls_config)
def get_integration(id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetIntegrationResult:
"""
Resource Type definition for AWS::ApiGatewayV2::Integration
"""
__args__ = dict()
__args__['id'] = id
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('aws-native:apigatewayv2:getIntegration', __args__, opts=opts, typ=GetIntegrationResult).value
return AwaitableGetIntegrationResult(
connection_id=pulumi.get(__ret__, 'connection_id'),
connection_type=pulumi.get(__ret__, 'connection_type'),
content_handling_strategy=pulumi.get(__ret__, 'content_handling_strategy'),
credentials_arn=pulumi.get(__ret__, 'credentials_arn'),
description=pulumi.get(__ret__, 'description'),
id=pulumi.get(__ret__, 'id'),
integration_method=pulumi.get(__ret__, 'integration_method'),
integration_subtype=pulumi.get(__ret__, 'integration_subtype'),
integration_type=pulumi.get(__ret__, 'integration_type'),
integration_uri=pulumi.get(__ret__, 'integration_uri'),
passthrough_behavior=pulumi.get(__ret__, 'passthrough_behavior'),
payload_format_version=pulumi.get(__ret__, 'payload_format_version'),
request_parameters=pulumi.get(__ret__, 'request_parameters'),
request_templates=pulumi.get(__ret__, 'request_templates'),
response_parameters=pulumi.get(__ret__, 'response_parameters'),
template_selection_expression=pulumi.get(__ret__, 'template_selection_expression'),
timeout_in_millis=pulumi.get(__ret__, 'timeout_in_millis'),
tls_config=pulumi.get(__ret__, 'tls_config'))
@_utilities.lift_output_func(get_integration)
def get_integration_output(id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetIntegrationResult]:
"""
Resource Type definition for AWS::ApiGatewayV2::Integration
"""
...
|
PypiClean
|
/kaf_pas-0.0.121-py3-none-any.whl/kaf_pas/ckk/views/item_document.py
|
from isc_common.http.DSResponse import DSResponseUpdate, DSResponseAdd, DSResponse, JsonResponseWithException
from isc_common.http.RPCResponse import RPCResponseConstant
from isc_common.http.response import JsonResponse
from kaf_pas.ckk.models.item_document import Item_document, Item_documentManager
@JsonResponseWithException()
def Item_document_Fetch(request):
return JsonResponse(
DSResponse(
request=request,
data=Item_document.objects.
select_related().
get_range_rows1(
request=request,
function=Item_documentManager.getRecord
),
status=RPCResponseConstant.statusSuccess).response)
@JsonResponseWithException()
def Item_document_Add(request):
return JsonResponse(DSResponseAdd(data=Item_document.objects.createFromRequest(request=request), status=RPCResponseConstant.statusSuccess).response)
@JsonResponseWithException()
def Item_document_Update(request):
return JsonResponse(DSResponseUpdate(data=Item_document.objects.updateFromRequest(request), status=RPCResponseConstant.statusSuccess).response)
@JsonResponseWithException()
def Item_document_Remove(request):
return JsonResponse(DSResponse(request=request, data=Item_document.objects.deleteFromRequest(request=request), status=RPCResponseConstant.statusSuccess).response)
@JsonResponseWithException()
def Item_document_Lookup(request):
return JsonResponse(DSResponse(request=request, data=Item_document.objects.lookupFromRequest(request=request), status=RPCResponseConstant.statusSuccess).response)
@JsonResponseWithException()
def Item_document_Info(request):
return JsonResponse(DSResponse(request=request, data=Item_document.objects.get_queryset().get_info(request=request), status=RPCResponseConstant.statusSuccess).response)
@JsonResponseWithException()
def Item_document_Copy(request):
return JsonResponse(DSResponse(request=request, data=Item_document.objects.copyFromRequest(request=request), status=RPCResponseConstant.statusSuccess).response)
|
PypiClean
|
/com.precisely.apis-16.0.3-py3-none-any.whl/com/precisely/apis/model/properties.py
|
import re # noqa: F401
import sys # noqa: F401
from com.precisely.apis.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from com.precisely.apis.exceptions import ApiAttributeError
class Properties(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'name': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'name': 'name', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501
"""Properties - a model defined in OpenAPI
Args:
name (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.name = name
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, name, *args, **kwargs): # noqa: E501
"""Properties - a model defined in OpenAPI
Args:
name (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.name = name
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
PypiClean
|
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/network/v20190401/get_express_route_cross_connection_peering.py
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetExpressRouteCrossConnectionPeeringResult',
'AwaitableGetExpressRouteCrossConnectionPeeringResult',
'get_express_route_cross_connection_peering',
]
@pulumi.output_type
class GetExpressRouteCrossConnectionPeeringResult:
"""
Peering in an ExpressRoute Cross Connection resource.
"""
def __init__(__self__, azure_asn=None, etag=None, gateway_manager_etag=None, id=None, ipv6_peering_config=None, last_modified_by=None, microsoft_peering_config=None, name=None, peer_asn=None, peering_type=None, primary_azure_port=None, primary_peer_address_prefix=None, provisioning_state=None, secondary_azure_port=None, secondary_peer_address_prefix=None, shared_key=None, state=None, vlan_id=None):
if azure_asn and not isinstance(azure_asn, int):
raise TypeError("Expected argument 'azure_asn' to be a int")
pulumi.set(__self__, "azure_asn", azure_asn)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if gateway_manager_etag and not isinstance(gateway_manager_etag, str):
raise TypeError("Expected argument 'gateway_manager_etag' to be a str")
pulumi.set(__self__, "gateway_manager_etag", gateway_manager_etag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if ipv6_peering_config and not isinstance(ipv6_peering_config, dict):
raise TypeError("Expected argument 'ipv6_peering_config' to be a dict")
pulumi.set(__self__, "ipv6_peering_config", ipv6_peering_config)
if last_modified_by and not isinstance(last_modified_by, str):
raise TypeError("Expected argument 'last_modified_by' to be a str")
pulumi.set(__self__, "last_modified_by", last_modified_by)
if microsoft_peering_config and not isinstance(microsoft_peering_config, dict):
raise TypeError("Expected argument 'microsoft_peering_config' to be a dict")
pulumi.set(__self__, "microsoft_peering_config", microsoft_peering_config)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if peer_asn and not isinstance(peer_asn, float):
raise TypeError("Expected argument 'peer_asn' to be a float")
pulumi.set(__self__, "peer_asn", peer_asn)
if peering_type and not isinstance(peering_type, str):
raise TypeError("Expected argument 'peering_type' to be a str")
pulumi.set(__self__, "peering_type", peering_type)
if primary_azure_port and not isinstance(primary_azure_port, str):
raise TypeError("Expected argument 'primary_azure_port' to be a str")
pulumi.set(__self__, "primary_azure_port", primary_azure_port)
if primary_peer_address_prefix and not isinstance(primary_peer_address_prefix, str):
raise TypeError("Expected argument 'primary_peer_address_prefix' to be a str")
pulumi.set(__self__, "primary_peer_address_prefix", primary_peer_address_prefix)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if secondary_azure_port and not isinstance(secondary_azure_port, str):
raise TypeError("Expected argument 'secondary_azure_port' to be a str")
pulumi.set(__self__, "secondary_azure_port", secondary_azure_port)
if secondary_peer_address_prefix and not isinstance(secondary_peer_address_prefix, str):
raise TypeError("Expected argument 'secondary_peer_address_prefix' to be a str")
pulumi.set(__self__, "secondary_peer_address_prefix", secondary_peer_address_prefix)
if shared_key and not isinstance(shared_key, str):
raise TypeError("Expected argument 'shared_key' to be a str")
pulumi.set(__self__, "shared_key", shared_key)
if state and not isinstance(state, str):
raise TypeError("Expected argument 'state' to be a str")
pulumi.set(__self__, "state", state)
if vlan_id and not isinstance(vlan_id, int):
raise TypeError("Expected argument 'vlan_id' to be a int")
pulumi.set(__self__, "vlan_id", vlan_id)
@property
@pulumi.getter(name="azureASN")
def azure_asn(self) -> int:
"""
The Azure ASN.
"""
return pulumi.get(self, "azure_asn")
@property
@pulumi.getter
def etag(self) -> str:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="gatewayManagerEtag")
def gateway_manager_etag(self) -> Optional[str]:
"""
The GatewayManager Etag.
"""
return pulumi.get(self, "gateway_manager_etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="ipv6PeeringConfig")
def ipv6_peering_config(self) -> Optional['outputs.Ipv6ExpressRouteCircuitPeeringConfigResponse']:
"""
The IPv6 peering configuration.
"""
return pulumi.get(self, "ipv6_peering_config")
@property
@pulumi.getter(name="lastModifiedBy")
def last_modified_by(self) -> Optional[str]:
"""
Gets whether the provider or the customer last modified the peering.
"""
return pulumi.get(self, "last_modified_by")
@property
@pulumi.getter(name="microsoftPeeringConfig")
def microsoft_peering_config(self) -> Optional['outputs.ExpressRouteCircuitPeeringConfigResponse']:
"""
The Microsoft peering configuration.
"""
return pulumi.get(self, "microsoft_peering_config")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="peerASN")
def peer_asn(self) -> Optional[float]:
"""
The peer ASN.
"""
return pulumi.get(self, "peer_asn")
@property
@pulumi.getter(name="peeringType")
def peering_type(self) -> Optional[str]:
"""
The peering type.
"""
return pulumi.get(self, "peering_type")
@property
@pulumi.getter(name="primaryAzurePort")
def primary_azure_port(self) -> str:
"""
The primary port.
"""
return pulumi.get(self, "primary_azure_port")
@property
@pulumi.getter(name="primaryPeerAddressPrefix")
def primary_peer_address_prefix(self) -> Optional[str]:
"""
The primary address prefix.
"""
return pulumi.get(self, "primary_peer_address_prefix")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="secondaryAzurePort")
def secondary_azure_port(self) -> str:
"""
The secondary port.
"""
return pulumi.get(self, "secondary_azure_port")
@property
@pulumi.getter(name="secondaryPeerAddressPrefix")
def secondary_peer_address_prefix(self) -> Optional[str]:
"""
The secondary address prefix.
"""
return pulumi.get(self, "secondary_peer_address_prefix")
@property
@pulumi.getter(name="sharedKey")
def shared_key(self) -> Optional[str]:
"""
The shared key.
"""
return pulumi.get(self, "shared_key")
@property
@pulumi.getter
def state(self) -> Optional[str]:
"""
The peering state.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="vlanId")
def vlan_id(self) -> Optional[int]:
"""
The VLAN ID.
"""
return pulumi.get(self, "vlan_id")
class AwaitableGetExpressRouteCrossConnectionPeeringResult(GetExpressRouteCrossConnectionPeeringResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetExpressRouteCrossConnectionPeeringResult(
azure_asn=self.azure_asn,
etag=self.etag,
gateway_manager_etag=self.gateway_manager_etag,
id=self.id,
ipv6_peering_config=self.ipv6_peering_config,
last_modified_by=self.last_modified_by,
microsoft_peering_config=self.microsoft_peering_config,
name=self.name,
peer_asn=self.peer_asn,
peering_type=self.peering_type,
primary_azure_port=self.primary_azure_port,
primary_peer_address_prefix=self.primary_peer_address_prefix,
provisioning_state=self.provisioning_state,
secondary_azure_port=self.secondary_azure_port,
secondary_peer_address_prefix=self.secondary_peer_address_prefix,
shared_key=self.shared_key,
state=self.state,
vlan_id=self.vlan_id)
def get_express_route_cross_connection_peering(cross_connection_name: Optional[str] = None,
peering_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetExpressRouteCrossConnectionPeeringResult:
"""
Use this data source to access information about an existing resource.
:param str cross_connection_name: The name of the ExpressRouteCrossConnection.
:param str peering_name: The name of the peering.
:param str resource_group_name: The name of the resource group.
"""
__args__ = dict()
__args__['crossConnectionName'] = cross_connection_name
__args__['peeringName'] = peering_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:network/v20190401:getExpressRouteCrossConnectionPeering', __args__, opts=opts, typ=GetExpressRouteCrossConnectionPeeringResult).value
return AwaitableGetExpressRouteCrossConnectionPeeringResult(
azure_asn=__ret__.azure_asn,
etag=__ret__.etag,
gateway_manager_etag=__ret__.gateway_manager_etag,
id=__ret__.id,
ipv6_peering_config=__ret__.ipv6_peering_config,
last_modified_by=__ret__.last_modified_by,
microsoft_peering_config=__ret__.microsoft_peering_config,
name=__ret__.name,
peer_asn=__ret__.peer_asn,
peering_type=__ret__.peering_type,
primary_azure_port=__ret__.primary_azure_port,
primary_peer_address_prefix=__ret__.primary_peer_address_prefix,
provisioning_state=__ret__.provisioning_state,
secondary_azure_port=__ret__.secondary_azure_port,
secondary_peer_address_prefix=__ret__.secondary_peer_address_prefix,
shared_key=__ret__.shared_key,
state=__ret__.state,
vlan_id=__ret__.vlan_id)
|
PypiClean
|
/tensor2tensorM-2.0.11.tar.gz/tensor2tensorM-2.0.11/tensor2tensor/data_generators/bair_robot_pushing.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tarfile
import numpy as np
from tensor2tensor.data_generators import generator_utils
from tensor2tensor.data_generators import problem
from tensor2tensor.data_generators import video_utils
from tensor2tensor.layers import modalities
from tensor2tensor.utils import registry
import tensorflow as tf
DATA_URL = (
"http://rail.eecs.berkeley.edu/datasets/bair_robot_pushing_dataset_v0.tar")
# Lazy load PIL.Image
def PIL_Image(): # pylint: disable=invalid-name
from PIL import Image # pylint: disable=g-import-not-at-top
return Image
@registry.register_problem
class VideoBairRobotPushing(video_utils.VideoProblem):
"""Berkeley (BAIR) robot pushing dataset."""
@property
def num_channels(self):
return 3
@property
def frame_height(self):
return 64
@property
def frame_width(self):
return 64
@property
def is_generate_per_split(self):
return True
# num_train_files * num_videos * num_frames
@property
def total_number_of_frames(self):
return 167 * 256 * 30
def max_frames_per_video(self, hparams):
return 30
@property
def random_skip(self):
return False
def eval_metrics(self):
return []
@property
def only_keep_videos_from_0th_frame(self):
return True
@property
def use_not_breaking_batching(self):
return True
@property
def dataset_splits(self):
"""Splits of data to produce and number of output shards for each."""
return [
{"split": problem.DatasetSplit.TRAIN, "shards": 10},
{"split": problem.DatasetSplit.EVAL, "shards": 1},
{"split": problem.DatasetSplit.TEST, "shards": 1}]
@property
def extra_reading_spec(self):
"""Additional data fields to store on disk and their decoders."""
data_fields = {
"frame_number": tf.FixedLenFeature([1], tf.int64),
}
decoders = {
"frame_number": tf.contrib.slim.tfexample_decoder.Tensor(
tensor_key="frame_number"),
}
return data_fields, decoders
def hparams(self, defaults, unused_model_hparams):
p = defaults
p.modality = {"inputs": modalities.ModalityType.VIDEO,
"targets": modalities.ModalityType.VIDEO}
p.vocab_size = {"inputs": 256,
"targets": 256}
def parse_frames(self, filenames):
image_key = "{}/image_aux1/encoded"
action_key = "{}/action"
state_key = "{}/endeffector_pos"
for f in filenames:
print("Parsing ", f)
for serialized_example in tf.python_io.tf_record_iterator(f):
x = tf.train.Example()
x.ParseFromString(serialized_example)
# there are 4 features per frame
# main image, aux image, actions and states
nf = len(x.features.feature.keys()) // 4
for i in range(nf):
image_name = image_key.format(i)
action_name = action_key.format(i)
state_name = state_key.format(i)
byte_str = x.features.feature[image_name].bytes_list.value[0]
img = PIL_Image().frombytes(
"RGB", (self.frame_width, self.frame_height), byte_str)
arr = np.array(img.getdata())
frame = arr.reshape(
self.frame_width, self.frame_height, self.num_channels)
state = x.features.feature[state_name].float_list.value
action = x.features.feature[action_name].float_list.value
yield i, frame, state, action
def generate_samples(self, data_dir, tmp_dir, dataset_split):
path = generator_utils.maybe_download(
tmp_dir, os.path.basename(DATA_URL), DATA_URL)
tar = tarfile.open(path)
tar.extractall(tmp_dir)
tar.close()
if dataset_split == problem.DatasetSplit.TEST:
base_dir = os.path.join(tmp_dir, "softmotion30_44k/test/*")
filenames = tf.gfile.Glob(base_dir)
else:
base_dir = os.path.join(tmp_dir, "softmotion30_44k/train/*")
filenames = tf.gfile.Glob(base_dir)
# the test-set contains just 256 videos so this should be sufficient.
if dataset_split == problem.DatasetSplit.TRAIN:
filenames = filenames[:-2]
else:
filenames = filenames[-2:]
for frame_number, frame, state, action in self.parse_frames(filenames):
yield {
"frame_number": [frame_number],
"frame": frame,
"state": state,
"action": action,
}
@registry.register_problem
class VideoBairRobotPushingWithActions(VideoBairRobotPushing):
"""Berkeley (BAIR) robot pushing dataset with actions."""
@property
def extra_reading_spec(self):
"""Additional data fields to store on disk and their decoders."""
data_fields = {
"frame_number": tf.FixedLenFeature([1], tf.int64),
"action": tf.FixedLenFeature([4], tf.float32),
}
decoders = {
"frame_number": tf.contrib.slim.tfexample_decoder.Tensor(
tensor_key="frame_number"),
"action": tf.contrib.slim.tfexample_decoder.Tensor(tensor_key="action"),
}
return data_fields, decoders
|
PypiClean
|
/pcapng3-0.1.23.tar.gz/pcapng3-0.1.23/README.rst
|
Tool for reading/writing PCAPNG network packet capture files
============================================================
Alan Thompson, Brocade
[email protected]
Please see the IETF document `PCAP Next Generation
(pcapng) Capture File Format <https://pcapng.github.io/pcapng/>`_
Please also see the project `home page on GitLab <https://gitlab.com/netdev-americas/pcapng/>`_
and `at PyPI - the Python Package Index <https://pypi.python.org/pypi/pcapng>`_
===========
Quick Start
===========
PCAPNG files must begin with a Section Header Block::
import pcapng.block
import pcapng.linktype
import pcapng.option
pcap_fp = open( 'data.pcapng', 'wb' );
shb_opts = [ pcapng.option.ShbHardware( "Dell" ),
pcapng.option.ShbOs( "Ubuntu" ),
pcapng.option.ShbUserAppl( "IntelliJ Idea" ) ]
shb_obj = pcapng.block.SectionHeaderBlock( shb_opts )
shb_packed_bytes = shb_obj.pack()
pcap_fp.write( shb_packed_bytes ) # must be 1st block
where the options list may be omitted for this or any other block type. After the SHB, one or more
Interface Description Blocks may be included::
idb_opts = [ pcapng.option.IdbName( interface_name ),
pcapng.option.IdbDescription( "primary interface on host" ),
pcapng.option.IdbSpeed( 12345 ) ]
idb_obj = pcapng.block.InterfaceDescBlock( linktype.LINKTYPE_ETHERNET, idb_opts ) # optional block
pcap_fp.write( idb_obj.pack() )
After the SHB and any optional IDBs, one may include packet information as either Simple Packet
Blocks or Enhanced Packet Blocks::
pkt_bytes = get_next_packet( socket_fd )
dbg_print( pkt_bytes )
pcap_fp.write( pcapng.block.SimplePacketBlock( pkt_bytes ).pack() )
pkt_bytes = get_next_packet( socket_fd )
dbg_print( pkt_bytes )
epb_opts = [ pcapng.option.EpbFlags( [13,14,15,16] ),
pcapng.option.EpbHash( 'just about any hash spec can go here' ),
pcapng.option.EpbDropCount( 13 ) ]
pcap_fp.write( pcapng.block.EnhancedPacketBlock( 0, pkt_bytes, len(pkt_bytes), epb_opts ).pack() )
Blocks may also be serialized & deserialized in bulk, as seen in the unit tests::
def test_blocks_lst():
blk_lst = [
# SHB must be 1st block
pcapng.block.SectionHeaderBlock( [ pcapng.option.ShbHardware( "Dell" ),
pcapng.option.ShbOs( "Ubuntu" ),
pcapng.option.ShbUserAppl( "IntelliJ Idea" ) ] ),
pcapng.block.InterfaceDescBlock( linktype.LINKTYPE_ETHERNET,
[ pcapng.option.IdbName( "Carrier Pigeon" ),
pcapng.option.IdbDescription( "Something profound here..." ),
pcapng.option.IdbIpv4Addr( [192, 168, 13, 7], [255, 255, 255, 0] ),
pcapng.option.IdbOs( 'Ubuntu Xenial 16.04.1 LTS' ) ] ),
pcapng.block.SimplePacketBlock('abc'),
pcapng.block.EnhancedPacketBlock( 0, "<<<Stand-in for actual packet data>>>" ),
pcapng.block.CustomBlockCopyable( pen.BROCADE_PEN, 'User-defined custom data' ),
]
packed_bytes = pcapng.block.pack_all( blk_lst )
if False:
pcap_fp = open( 'block_list.pcapng', 'wb' )
pcap_fp.write( packed_bytes )
pcap_fp.close()
util.assert_block32_length( packed_bytes )
blk_lst_unpacked = pcapng.block.unpack_all( packed_bytes )
assert blk_lst == blk_lst_unpacked
Installation
============
Install from the Python Package Index (PyPI)::
sudo pip install pcapng
API Documentation
=================
Point your browser to the included HTML documentation::
firefox doc/pcapng/index.html # or similar (system dependent)
Sample Programs
===============
Please see the sample programs::
isis_agent_pcapng.py # real-time packet capture from your machine into a PCAPNG file
isis_demo_mrt.py # same as above but save in Custom Block MRT format
pcapng_timing.py # capure 1M sample packets
The program isis_agent_pcapng.py creates an output file ``data.pcapng``, which is `viewable in
Wireshark. <https://www.wireshark.org/>`_
The program ``isis_demo_mrt.py`` creates two output files ``isis.mrt`` & ``isis.pcapng``. The first of
thes is in raw MRT format and is not viewable by Wireshark. For the second file, each raw MRT block
is wrapped in a PCAPNG Custom Block. The file may be loaded successfully in Wireshark; however,
since Wireshark doesn't understand the custom format, it produces a blank display.
The third program ``pcapng_timing.py`` writes 1 million dummy packets to a PCAPNG file. A flag selects
either Simple Packet Block or Enhanced Packet Block output format. Execution on a representative
computer yields execution times of ~6 seconds and ~16 seconds for SPB and EPB formats, respectively.
Generating Documentation
========================
Documentation uses the ``pdoc`` tool. Note that pdoc generates documentation from the installed
``pcapng`` package, not directly from thesource code. To use::
sudo pip install pdoc # install pdoc if not present
./generate-docs.bash # generate docs
Endian Convention
=================
`The PCAPNG specification <https://pcapng.github.io/pcapng/>`_ mandates that data be saved in the
native endian format of the capturing machine. This avoids the possible need for byte-swapping
during data capture, which may aid in efficiency. However, a reader of a PCAPNG file is obligated to
examine the special BYTE_ORDER_MAGIC field of the Section Header Block in order to determine the
endian convention used in generating the file. Additionaly, since several PCAPNG files may be
concatenated together to form a larger, valid PCAPNG file, the reader must re-evaluate the endian
convention for each subsequent Section Header Block encountered.
Currently, this library does not implement endian-sensitive decoding logic, using native endian
encoding for both writing and reading. The library thus assumes that both the capturing maching and
the reading machine share the same endian conventions. The library may be extended in the future to
implement the endian-sensitive logic for reading PCAPNG written on foreign hosts.
|
PypiClean
|
/steamers_misago-0.0.2-py3-none-any.whl/misago/static/misago/momentjs/bo.js
|
(function (factory) {
if (typeof define === 'function' && define.amd) {
define(['moment'], factory); // AMD
} else if (typeof exports === 'object') {
module.exports = factory(require('../moment')); // Node
} else {
factory((typeof global !== 'undefined' ? global : this).moment); // node or other global
}
}(function (moment) {
var symbolMap = {
'1': '༡',
'2': '༢',
'3': '༣',
'4': '༤',
'5': '༥',
'6': '༦',
'7': '༧',
'8': '༨',
'9': '༩',
'0': '༠'
},
numberMap = {
'༡': '1',
'༢': '2',
'༣': '3',
'༤': '4',
'༥': '5',
'༦': '6',
'༧': '7',
'༨': '8',
'༩': '9',
'༠': '0'
};
return moment.defineLocale('bo', {
months : 'ཟླ་བ་དང་པོ_ཟླ་བ་གཉིས་པ_ཟླ་བ་གསུམ་པ_ཟླ་བ་བཞི་པ_ཟླ་བ་ལྔ་པ_ཟླ་བ་དྲུག་པ_ཟླ་བ་བདུན་པ_ཟླ་བ་བརྒྱད་པ_ཟླ་བ་དགུ་པ_ཟླ་བ་བཅུ་པ_ཟླ་བ་བཅུ་གཅིག་པ_ཟླ་བ་བཅུ་གཉིས་པ'.split('_'),
monthsShort : 'ཟླ་བ་དང་པོ_ཟླ་བ་གཉིས་པ_ཟླ་བ་གསུམ་པ_ཟླ་བ་བཞི་པ_ཟླ་བ་ལྔ་པ_ཟླ་བ་དྲུག་པ_ཟླ་བ་བདུན་པ_ཟླ་བ་བརྒྱད་པ_ཟླ་བ་དགུ་པ_ཟླ་བ་བཅུ་པ_ཟླ་བ་བཅུ་གཅིག་པ_ཟླ་བ་བཅུ་གཉིས་པ'.split('_'),
weekdays : 'གཟའ་ཉི་མ་_གཟའ་ཟླ་བ་_གཟའ་མིག་དམར་_གཟའ་ལྷག་པ་_གཟའ་ཕུར་བུ_གཟའ་པ་སངས་_གཟའ་སྤེན་པ་'.split('_'),
weekdaysShort : 'ཉི་མ་_ཟླ་བ་_མིག་དམར་_ལྷག་པ་_ཕུར་བུ_པ་སངས་_སྤེན་པ་'.split('_'),
weekdaysMin : 'ཉི་མ་_ཟླ་བ་_མིག་དམར་_ལྷག་པ་_ཕུར་བུ_པ་སངས་_སྤེན་པ་'.split('_'),
longDateFormat : {
LT : 'A h:mm',
LTS : 'LT:ss',
L : 'DD/MM/YYYY',
LL : 'D MMMM YYYY',
LLL : 'D MMMM YYYY, LT',
LLLL : 'dddd, D MMMM YYYY, LT'
},
calendar : {
sameDay : '[དི་རིང] LT',
nextDay : '[སང་ཉིན] LT',
nextWeek : '[བདུན་ཕྲག་རྗེས་མ], LT',
lastDay : '[ཁ་སང] LT',
lastWeek : '[བདུན་ཕྲག་མཐའ་མ] dddd, LT',
sameElse : 'L'
},
relativeTime : {
future : '%s ལ་',
past : '%s སྔན་ལ',
s : 'ལམ་སང',
m : 'སྐར་མ་གཅིག',
mm : '%d སྐར་མ',
h : 'ཆུ་ཚོད་གཅིག',
hh : '%d ཆུ་ཚོད',
d : 'ཉིན་གཅིག',
dd : '%d ཉིན་',
M : 'ཟླ་བ་གཅིག',
MM : '%d ཟླ་བ',
y : 'ལོ་གཅིག',
yy : '%d ལོ'
},
preparse: function (string) {
return string.replace(/[༡༢༣༤༥༦༧༨༩༠]/g, function (match) {
return numberMap[match];
});
},
postformat: function (string) {
return string.replace(/\d/g, function (match) {
return symbolMap[match];
});
},
meridiemParse: /མཚན་མོ|ཞོགས་ཀས|ཉིན་གུང|དགོང་དག|མཚན་མོ/,
isPM: function (input) {
return /^(ཉིན་གུང|དགོང་དག|མཚན་མོ)$/.test(input);
},
meridiem : function (hour, minute, isLower) {
if (hour < 4) {
return 'མཚན་མོ';
} else if (hour < 10) {
return 'ཞོགས་ཀས';
} else if (hour < 17) {
return 'ཉིན་གུང';
} else if (hour < 20) {
return 'དགོང་དག';
} else {
return 'མཚན་མོ';
}
},
week : {
dow : 0, // Sunday is the first day of the week.
doy : 6 // The week that contains Jan 1st is the first week of the year.
}
});
}));
|
PypiClean
|
/collective.folderishtypes-3.1.tar.gz/collective.folderishtypes-3.1/CHANGES.rst
|
Changes
=======
3.1 (2022-11-15)
----------------
- Fix problem with incomplete loaded z3c.caching which causes a "Unknown directive" ConfigurationError.
[thet]
- Add Transifex.net service integration to manage the translation process.
[macagua]
- Added Spanish translation.
[macagua]
- Updated gettext files support.
[macagua]
3.0.0 (2019-12-17)
------------------
- Moved to named behaviors.
[iham]
2.2.0 (2019-03-20)
------------------
- Add support for Python 3 and Plone 5.2.
[pbauer]
2.1 (2017-10-03)
----------------
- Use 'getAllowedSizes' from CMFPlone if available to allow AT free use
[tomgross]
- Change portlets to support z3c.form and make it Plone 5 compatible (no more formlib).
[thet]
2.0.2 (2015-06-30)
------------------
- Configure folderish types to use the ``plone.content.itemView`` caching
ruleset instead the folderView one.
[thet]
2.0.1 (2015-04-08)
------------------
- Fix redirection to parent context after editing for Archetypes. We cannot
import something from Acquisition in a Filesystem script.
[thet]
2.0 (2015-03-04)
----------------
- Plone 5 + 4 safe call of the ``@@folder_listing`` ``listing_macro``.
[thet]
- Provide a ``redirectparent.zcml`` file. When loading it the user is
redirected to the parent folder after adding or editing. This avoids
confusing behavior of creating nested content structures, while one might add
multiple content items to one folder. For consistency reasons, this is done
for all types.
For Archetypes, this behavior is enabled by default.
[thet]
- Add a browserlayer for collective.folderishtypes including the .at and .dx
sub packages together with upgrade steps. Bind relevant browser views to
these browser layers.
[thet]
- Allow "Site Administrator" to add ATContentTypes based Folderish Content
Types.
[thet]
- Better PloneArticle migration: Set content_type and filename for files and
images. Plus: migrate contentleadimage, if available.
[thet]
- Update unsintall profiles.
[thet]
2.0b2 (2014-10-17)
------------------
- Fix broken release
[rnix]
2.0b1 (2014-10-02)
------------------
- Add ISelectableConstrainTypes to behaviors of Dexterity folderish types.
[agitator]
- Add content listing viewlet.
[agitator]
- Add migration step for Products.PloneArticle objects to Archetypes based
foldersh document objects.
[thet]
- Initialize permissions and roles for Archetypes foldersh types.
[thet]
- Dexterity support.
[thet]
1.8 (2013-11-23)
----------------
- In folder_summary_view do a more sane check, if an image is available.
[thet]
1.7 (2013-08-23)
----------------
- Remove bobo_traverse in Foldersh News Item, since plone.app.imaging does the
traversing.
[thet]
- Remove vCalendar action entry from FTI config of Folderish Event.
[thet]
- More documentation: How To create own content types based on
collective.folderishtypes, How to migrate non-folderishtypes to folderish
ones.
[thet]
1.6 (2013-04-23)
----------------
- Update translations and translate folder_listing.
[thet]
- Add z3c.autoinclude.plugin entry point.
[thet]
1.5 (2012-11-30)
----------------
- Let the folderish types derive from Products.ATContentTypes ATFolder instead
of plone.app.folder's implementation. It inherits some i18n classes. This
fixes the issue that on translation of folderish types LinguaPlone's
translation view wasn't shown.
[thet]
1.4 (2012-11-28)
----------------
- Give the original, non-folderish types another title, so that they can be
easily kept apart from the folderish ones.
[thet]
- For folderish events, move the location field back to the main schemata.
[thet]
1.3 (2012-11-26)
----------------
- Allow discussions from plone.app.discussion on Folderish Types.
[thet]
1.2.1 (2012-09-06)
------------------
- CSS fix
[rnix]
1.2 (2012-09-04)
----------------
- Removed folderish=True when finalizing the schema, which prevented
relateditems from being displayed. Updated import to plone.app.folder.
[agitator]
- Fix meta types in FTI to follow consistent conventions.
[rnixx]
- For Folderish Event, use plone.app.event's ATEvent implementation if
available.
[thet]
1.1.1 (2012-02-05)
------------------
- Add a custom style class for the contextual contents portlet based on the
portlet's name.
[thet]
1.1 (2012-02-04)
----------------
- Add a contextual contents portlet, which shows the contents of folderish
types in a portlet.
[thet]
1.0 (2012-02-02)
----------------
- No Changes, release as 1.0 final.
[thet]
1.0b5 (2011-09-07)
------------------
- Updated documentation: Missing viewlet issue.
[thet]
1.0b4 (2011-09-07)
------------------
- Locales Update.
[thet]
- Add types to image_types in portal_atct to allow image scale recreation.
[thet]
- Add HistroyAwareMixin and configure types to be versionable.
[thet]
- Remove folderish_listing_viewlet, since there is already a portlet which can
be used.
[thet]
- Include CSS via link instead of import, which can then be better processed by
XML manipulation tools like Deliverance or Diazo.
[thet]
- More explicit content-icons background styles which don't override
background-color.
[thet]
- Use plone.app.imaging scales - apply a schema patch to FolderishNewsItem.
[thet]
- Fixed traversing to image scales for FolderishNewsItem. Subclasses should
implement __bobo_traverse__ too.
[thet]
1.0b3 (2011-03-22)
------------------
- Add rolemap.xml for generic setup to have proper configured permissions.
[thet]
1.0b2 (2011-03-10)
------------------
- Make portlet registration optional and register portlets only for folderish
types.
[thet]
- Updated folder_listing.pt and folder_summary_view.pt to current Plone trunk.
[thet]
1.0b1 (2011-02-18)
------------------
- Initial release
|
PypiClean
|
/gooddata_metadata_client-1.3.0-py3-none-any.whl/gooddata_metadata_client/model/json_api_user_out_list.py
|
import re # noqa: F401
import sys # noqa: F401
from gooddata_metadata_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from gooddata_metadata_client.exceptions import ApiAttributeError
def lazy_import():
from gooddata_metadata_client.model.json_api_user_group_out_with_links import JsonApiUserGroupOutWithLinks
from gooddata_metadata_client.model.json_api_user_out_with_links import JsonApiUserOutWithLinks
from gooddata_metadata_client.model.list_links import ListLinks
globals()['JsonApiUserGroupOutWithLinks'] = JsonApiUserGroupOutWithLinks
globals()['JsonApiUserOutWithLinks'] = JsonApiUserOutWithLinks
globals()['ListLinks'] = ListLinks
class JsonApiUserOutList(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
('data',): {
},
('included',): {
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'data': ([JsonApiUserOutWithLinks],), # noqa: E501
'included': ([JsonApiUserGroupOutWithLinks],), # noqa: E501
'links': (ListLinks,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'data': 'data', # noqa: E501
'included': 'included', # noqa: E501
'links': 'links', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, data, *args, **kwargs): # noqa: E501
"""JsonApiUserOutList - a model defined in OpenAPI
Args:
data ([JsonApiUserOutWithLinks]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
included ([JsonApiUserGroupOutWithLinks]): Included resources. [optional] # noqa: E501
links (ListLinks): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', True)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.data = data
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, data, *args, **kwargs): # noqa: E501
"""JsonApiUserOutList - a model defined in OpenAPI
Args:
data ([JsonApiUserOutWithLinks]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
included ([JsonApiUserGroupOutWithLinks]): Included resources. [optional] # noqa: E501
links (ListLinks): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.data = data
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
PypiClean
|
/pyramid_notebook-0.3.0-py3-none-any.whl/pyramid_notebook/utils.py
|
import copy
import os
import os.path
# Courtesy of http://stackoverflow.com/questions/5884066/hashing-a-python-dictionary
def make_dict_hash(o):
"""Make a hash from a dictionary, list, tuple or set to any level, containing
only other hashable types (including any lists, tuples, sets, and dictionaries).
"""
if isinstance(o, (set, tuple, list)):
return tuple([make_dict_hash(e) for e in o])
elif not isinstance(o, dict):
return hash(o)
new_o = copy.deepcopy(o)
for k, v in new_o.items():
new_o[k] = make_dict_hash(v)
return hash(tuple(frozenset(sorted(new_o.items()))))
class change_directory:
"""ChangeDirectory is a context manager that allows you to temporary change the working directory.
Courtesy of http://code.activestate.com/recipes/576620-changedirectory-context-manager/
"""
def __init__(self, directory):
self._dir = directory
self._cwd = os.getcwd()
self._pwd = self._cwd
@property
def current(self):
return self._cwd
@property
def previous(self):
return self._pwd
@property
def relative(self):
cwd = self._cwd.split(os.path.sep)
pwd = self._pwd.split(os.path.sep)
length = min(len(cwd), len(pwd))
idx = 0
while idx < length and cwd[idx] == pwd[idx]:
idx += 1
return os.path.normpath(os.path.join(*(['.'] + (['..'] * (len(cwd) - idx)) + pwd[idx:])))
def __enter__(self):
self._pwd = self._cwd
os.chdir(self._dir)
self._cwd = os.getcwd()
return self
def __exit__(self, *args):
os.chdir(self._pwd)
self._cwd = self._pwd
def route_to_alt_domain(request, url):
"""Route URL to a different subdomain.
Used to rewrite URLs to point to websocket serving domain.
"""
# Do we need to route IPython Notebook request from a different location
alternative_domain = request.registry.settings.get("pyramid_notebook.alternative_domain", "").strip()
if alternative_domain:
url = url.replace(request.host_url, alternative_domain)
return url
|
PypiClean
|
/crytic_compile-0.3.4-py3-none-any.whl/crytic_compile/utils/naming.py
|
import logging
import os.path
import platform
from dataclasses import dataclass
from pathlib import Path
from typing import TYPE_CHECKING, Union, Callable, Optional
from crytic_compile.platform.exceptions import InvalidCompilation
# Cycle dependency
if TYPE_CHECKING:
from crytic_compile import CryticCompile
LOGGER = logging.getLogger("CryticCompile")
@dataclass
class Filename:
"""Path metadata for each file in the compilation unit"""
def __init__(self, absolute: str, used: str, relative: str, short: str):
self.absolute = absolute
self.used = used
self.relative = relative
self.short = short
def __hash__(self) -> int:
return hash(self.relative)
def __eq__(self, other: object) -> bool:
if not isinstance(other, Filename):
return NotImplemented
return self.relative == other.relative
def __repr__(self) -> str:
return f"Filename(absolute={self.absolute}, used={self.used}, relative={self.relative}, short={self.short}))"
def extract_name(name: str) -> str:
"""Convert '/path:Contract' to Contract
Args:
name (str): name to convert
Returns:
str: extracted contract name
"""
return name[name.rfind(":") + 1 :]
def extract_filename(name: str) -> str:
"""Convert '/path:Contract' to /path
Args:
name (str): name to convert
Returns:
str: extracted filename
"""
if not ":" in name:
return name
return name[: name.rfind(":")]
def combine_filename_name(filename: str, name: str) -> str:
"""Combine the filename with the contract name
Args:
filename (str): filename
name (str): contract name
Returns:
str: Combined names
"""
return filename + ":" + name
def _verify_filename_existence(filename: Path, cwd: Path) -> Path:
"""
Check if the filename exist. If it does not, try multiple heuristics to find the right filename:
- Look for contracts/FILENAME
- Look for node_modules/FILENAME
- Look for node_modules/FILENAME in all the parents directories
Args:
filename (Path): filename to check
cwd (Path): directory
Raises:
InvalidCompilation: if the filename is not found
Returns:
Path: the filename
"""
if filename.exists():
return filename
if cwd.joinpath(Path("contracts"), filename).exists():
filename = cwd.joinpath("contracts", filename)
elif cwd.joinpath(filename).exists():
filename = cwd.joinpath(filename)
# how node.js loads dependencies from node_modules:
# https://nodejs.org/api/modules.html#loading-from-node_modules-folders
elif cwd.joinpath(Path("node_modules"), filename).exists():
filename = cwd.joinpath("node_modules", filename)
else:
for parent in cwd.parents:
if parent.joinpath(Path("node_modules"), filename).exists():
filename = parent.joinpath(Path("node_modules"), filename)
break
if not filename.exists():
raise InvalidCompilation(f"Unknown file: {filename}")
return filename
# pylint: disable=too-many-branches
def convert_filename(
used_filename: Union[str, Path],
relative_to_short: Callable[[Path], Path],
crytic_compile: "CryticCompile",
working_dir: Optional[Union[str, Path]] = None,
) -> Filename:
"""Convert a filename to CryticCompile Filename object.
The used_filename can be absolute, relative, or missing node_modules/contracts directory
Args:
used_filename (Union[str, Path]): Used filename
relative_to_short (Callable[[Path], Path]): Callback to translate the relative to short
crytic_compile (CryticCompile): Associated CryticCompile object
working_dir (Optional[Union[str, Path]], optional): Working directory. Defaults to None.
Returns:
Filename: Filename converted
"""
filename_txt = used_filename
if platform.system() == "Windows":
elements = list(Path(filename_txt).parts)
if elements[0] == "/" or elements[0] == "\\":
elements = elements[1:] # remove '/'
elements[0] = elements[0] + ":/" # add :/
filename = Path(*elements)
else:
filename = Path(filename_txt)
# cwd points to the directory to be used
if working_dir is None:
cwd = Path.cwd()
else:
working_dir = Path(working_dir)
if working_dir.is_absolute():
cwd = working_dir
else:
cwd = Path.cwd().joinpath(Path(working_dir)).resolve()
if crytic_compile.package_name:
try:
filename = filename.relative_to(Path(crytic_compile.package_name))
except ValueError:
pass
filename = _verify_filename_existence(filename, cwd)
absolute = Path(os.path.abspath(filename))
# This returns original path if *path* and *start* are on different drives (for Windows platform).
try:
relative = Path(os.path.relpath(filename, Path.cwd()))
except ValueError:
relative = Path(filename)
# Build the short path
try:
if cwd.is_absolute():
short = absolute.relative_to(cwd)
else:
short = relative.relative_to(cwd)
except ValueError:
short = relative
except RuntimeError:
short = relative
short = relative_to_short(short)
return Filename(
absolute=str(absolute),
relative=relative.as_posix(),
short=short.as_posix(),
used=str(used_filename),
)
|
PypiClean
|
/antchain_ak_1fef8815252948ebb01da07898dd0fb2-1.0.2.tar.gz/antchain_ak_1fef8815252948ebb01da07898dd0fb2-1.0.2/antchain_sdk_ak_1fef8815252948ebb01da07898dd0fb2/client.py
|
import time
from Tea.exceptions import TeaException, UnretryableException
from Tea.request import TeaRequest
from Tea.core import TeaCore
from antchain_alipay_util.antchain_utils import AntchainUtils
from typing import Dict
from antchain_sdk_ak_1fef8815252948ebb01da07898dd0fb2 import models as ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models
from alibabacloud_tea_util.client import Client as UtilClient
from alibabacloud_tea_util import models as util_models
from alibabacloud_rpc_util.client import Client as RPCUtilClient
class Client:
_endpoint: str = None
_region_id: str = None
_access_key_id: str = None
_access_key_secret: str = None
_protocol: str = None
_user_agent: str = None
_read_timeout: int = None
_connect_timeout: int = None
_http_proxy: str = None
_https_proxy: str = None
_socks_5proxy: str = None
_socks_5net_work: str = None
_no_proxy: str = None
_max_idle_conns: int = None
_security_token: str = None
_max_idle_time_millis: int = None
_keep_alive_duration_millis: int = None
_max_requests: int = None
_max_requests_per_host: int = None
def __init__(
self,
config: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.Config,
):
"""
Init client with Config
@param config: config contains the necessary information to create a client
"""
if UtilClient.is_unset(config):
raise TeaException({
'code': 'ParameterMissing',
'message': "'config' can not be unset"
})
self._access_key_id = config.access_key_id
self._access_key_secret = config.access_key_secret
self._security_token = config.security_token
self._endpoint = config.endpoint
self._protocol = config.protocol
self._user_agent = config.user_agent
self._read_timeout = UtilClient.default_number(config.read_timeout, 20000)
self._connect_timeout = UtilClient.default_number(config.connect_timeout, 20000)
self._http_proxy = config.http_proxy
self._https_proxy = config.https_proxy
self._no_proxy = config.no_proxy
self._socks_5proxy = config.socks_5proxy
self._socks_5net_work = config.socks_5net_work
self._max_idle_conns = UtilClient.default_number(config.max_idle_conns, 60000)
self._max_idle_time_millis = UtilClient.default_number(config.max_idle_time_millis, 5)
self._keep_alive_duration_millis = UtilClient.default_number(config.keep_alive_duration_millis, 5000)
self._max_requests = UtilClient.default_number(config.max_requests, 100)
self._max_requests_per_host = UtilClient.default_number(config.max_requests_per_host, 100)
def do_request(
self,
version: str,
action: str,
protocol: str,
method: str,
pathname: str,
request: dict,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> dict:
"""
Encapsulate the request and invoke the network
@param action: api name
@param protocol: http or https
@param method: e.g. GET
@param pathname: pathname of every api
@param request: which contains request params
@param runtime: which controls some details of call api, such as retry times
@return: the response
"""
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'maxIdleTimeMillis': self._max_idle_time_millis,
'keepAliveDurationMillis': self._keep_alive_duration_millis,
'maxRequests': self._max_requests,
'maxRequestsPerHost': self._max_requests_per_host,
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl,
# 键值对,兼容map用
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
_request.protocol = UtilClient.default_string(self._protocol, protocol)
_request.method = method
_request.pathname = pathname
_request.query = {
'method': action,
'version': version,
'sign_type': 'HmacSHA1',
'req_time': AntchainUtils.get_timestamp(),
'req_msg_id': AntchainUtils.get_nonce(),
'access_key': self._access_key_id,
'base_sdk_version': 'TeaSDK-2.0',
'sdk_version': '1.0.2',
'_prod_code': 'ak_1fef8815252948ebb01da07898dd0fb2',
'_prod_channel': 'saas'
}
if not UtilClient.empty(self._security_token):
_request.query['security_token'] = self._security_token
_request.headers = TeaCore.merge({
'host': UtilClient.default_string(self._endpoint, 'openapi.antchain.antgroup.com'),
'user-agent': UtilClient.get_user_agent(self._user_agent)
}, headers)
tmp = UtilClient.anyify_map_value(RPCUtilClient.query(request))
_request.body = UtilClient.to_form_string(tmp)
_request.headers['content-type'] = 'application/x-www-form-urlencoded'
signed_param = TeaCore.merge(_request.query,
RPCUtilClient.query(request))
_request.query['sign'] = AntchainUtils.get_signature(signed_param, self._access_key_secret)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
raw = UtilClient.read_as_string(_response.body)
obj = UtilClient.parse_json(raw)
res = UtilClient.assert_as_map(obj)
resp = UtilClient.assert_as_map(res.get('response'))
if AntchainUtils.has_error(raw, self._access_key_secret):
raise TeaException({
'message': resp.get('result_msg'),
'data': resp,
'code': resp.get('result_code')
})
return resp
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
async def do_request_async(
self,
version: str,
action: str,
protocol: str,
method: str,
pathname: str,
request: dict,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> dict:
"""
Encapsulate the request and invoke the network
@param action: api name
@param protocol: http or https
@param method: e.g. GET
@param pathname: pathname of every api
@param request: which contains request params
@param runtime: which controls some details of call api, such as retry times
@return: the response
"""
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'maxIdleTimeMillis': self._max_idle_time_millis,
'keepAliveDurationMillis': self._keep_alive_duration_millis,
'maxRequests': self._max_requests,
'maxRequestsPerHost': self._max_requests_per_host,
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl,
# 键值对,兼容map用
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
_request.protocol = UtilClient.default_string(self._protocol, protocol)
_request.method = method
_request.pathname = pathname
_request.query = {
'method': action,
'version': version,
'sign_type': 'HmacSHA1',
'req_time': AntchainUtils.get_timestamp(),
'req_msg_id': AntchainUtils.get_nonce(),
'access_key': self._access_key_id,
'base_sdk_version': 'TeaSDK-2.0',
'sdk_version': '1.0.2',
'_prod_code': 'ak_1fef8815252948ebb01da07898dd0fb2',
'_prod_channel': 'saas'
}
if not UtilClient.empty(self._security_token):
_request.query['security_token'] = self._security_token
_request.headers = TeaCore.merge({
'host': UtilClient.default_string(self._endpoint, 'openapi.antchain.antgroup.com'),
'user-agent': UtilClient.get_user_agent(self._user_agent)
}, headers)
tmp = UtilClient.anyify_map_value(RPCUtilClient.query(request))
_request.body = UtilClient.to_form_string(tmp)
_request.headers['content-type'] = 'application/x-www-form-urlencoded'
signed_param = TeaCore.merge(_request.query,
RPCUtilClient.query(request))
_request.query['sign'] = AntchainUtils.get_signature(signed_param, self._access_key_secret)
_last_request = _request
_response = await TeaCore.async_do_action(_request, _runtime)
raw = await UtilClient.read_as_string_async(_response.body)
obj = UtilClient.parse_json(raw)
res = UtilClient.assert_as_map(obj)
resp = UtilClient.assert_as_map(res.get('response'))
if AntchainUtils.has_error(raw, self._access_key_secret):
raise TeaException({
'message': resp.get('result_msg'),
'data': resp,
'code': resp.get('result_code')
})
return resp
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def auth_antchain_bbp_customer(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.AuthAntchainBbpCustomerRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.AuthAntchainBbpCustomerResponse:
"""
Description: 对企业/个人进行身份认证
Summary: 统一客户认证接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.auth_antchain_bbp_customer_ex(request, headers, runtime)
async def auth_antchain_bbp_customer_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.AuthAntchainBbpCustomerRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.AuthAntchainBbpCustomerResponse:
"""
Description: 对企业/个人进行身份认证
Summary: 统一客户认证接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.auth_antchain_bbp_customer_ex_async(request, headers, runtime)
def auth_antchain_bbp_customer_ex(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.AuthAntchainBbpCustomerRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.AuthAntchainBbpCustomerResponse:
"""
Description: 对企业/个人进行身份认证
Summary: 统一客户认证接口
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.AuthAntchainBbpCustomerResponse(),
self.do_request('1.0', 'antchain.bbp.customer.auth', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def auth_antchain_bbp_customer_ex_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.AuthAntchainBbpCustomerRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.AuthAntchainBbpCustomerResponse:
"""
Description: 对企业/个人进行身份认证
Summary: 统一客户认证接口
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.AuthAntchainBbpCustomerResponse(),
await self.do_request_async('1.0', 'antchain.bbp.customer.auth', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def create_antchain_bbp_customer(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.CreateAntchainBbpCustomerRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.CreateAntchainBbpCustomerResponse:
"""
Description: 智科统一客户注册
Summary: 统一客户注册接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.create_antchain_bbp_customer_ex(request, headers, runtime)
async def create_antchain_bbp_customer_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.CreateAntchainBbpCustomerRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.CreateAntchainBbpCustomerResponse:
"""
Description: 智科统一客户注册
Summary: 统一客户注册接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.create_antchain_bbp_customer_ex_async(request, headers, runtime)
def create_antchain_bbp_customer_ex(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.CreateAntchainBbpCustomerRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.CreateAntchainBbpCustomerResponse:
"""
Description: 智科统一客户注册
Summary: 统一客户注册接口
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.CreateAntchainBbpCustomerResponse(),
self.do_request('1.0', 'antchain.bbp.customer.create', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def create_antchain_bbp_customer_ex_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.CreateAntchainBbpCustomerRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.CreateAntchainBbpCustomerResponse:
"""
Description: 智科统一客户注册
Summary: 统一客户注册接口
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.CreateAntchainBbpCustomerResponse(),
await self.do_request_async('1.0', 'antchain.bbp.customer.create', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_antchain_bbp_customer(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpCustomerRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpCustomerResponse:
"""
Description: 客户信息查询
Summary: 客户信息查询
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_antchain_bbp_customer_ex(request, headers, runtime)
async def query_antchain_bbp_customer_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpCustomerRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpCustomerResponse:
"""
Description: 客户信息查询
Summary: 客户信息查询
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_antchain_bbp_customer_ex_async(request, headers, runtime)
def query_antchain_bbp_customer_ex(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpCustomerRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpCustomerResponse:
"""
Description: 客户信息查询
Summary: 客户信息查询
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpCustomerResponse(),
self.do_request('1.0', 'antchain.bbp.customer.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_antchain_bbp_customer_ex_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpCustomerRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpCustomerResponse:
"""
Description: 客户信息查询
Summary: 客户信息查询
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpCustomerResponse(),
await self.do_request_async('1.0', 'antchain.bbp.customer.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def apply_antchain_bbp_contract_rule(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ApplyAntchainBbpContractRuleRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ApplyAntchainBbpContractRuleResponse:
"""
Description: 定义外包的薪资方案接口,以【服务商、项目类别、领域、角色、资质】为unique
Summary: 外包薪资规则合约发布接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.apply_antchain_bbp_contract_rule_ex(request, headers, runtime)
async def apply_antchain_bbp_contract_rule_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ApplyAntchainBbpContractRuleRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ApplyAntchainBbpContractRuleResponse:
"""
Description: 定义外包的薪资方案接口,以【服务商、项目类别、领域、角色、资质】为unique
Summary: 外包薪资规则合约发布接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.apply_antchain_bbp_contract_rule_ex_async(request, headers, runtime)
def apply_antchain_bbp_contract_rule_ex(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ApplyAntchainBbpContractRuleRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ApplyAntchainBbpContractRuleResponse:
"""
Description: 定义外包的薪资方案接口,以【服务商、项目类别、领域、角色、资质】为unique
Summary: 外包薪资规则合约发布接口
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ApplyAntchainBbpContractRuleResponse(),
self.do_request('1.0', 'antchain.bbp.contract.rule.apply', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def apply_antchain_bbp_contract_rule_ex_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ApplyAntchainBbpContractRuleRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ApplyAntchainBbpContractRuleResponse:
"""
Description: 定义外包的薪资方案接口,以【服务商、项目类别、领域、角色、资质】为unique
Summary: 外包薪资规则合约发布接口
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ApplyAntchainBbpContractRuleResponse(),
await self.do_request_async('1.0', 'antchain.bbp.contract.rule.apply', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def get_antchain_bbp_contract_rule(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.GetAntchainBbpContractRuleRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.GetAntchainBbpContractRuleResponse:
"""
Description: 调用链上合约接口,查询外包的薪资方案
Summary: 外包薪资规则合约查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_antchain_bbp_contract_rule_ex(request, headers, runtime)
async def get_antchain_bbp_contract_rule_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.GetAntchainBbpContractRuleRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.GetAntchainBbpContractRuleResponse:
"""
Description: 调用链上合约接口,查询外包的薪资方案
Summary: 外包薪资规则合约查询接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_antchain_bbp_contract_rule_ex_async(request, headers, runtime)
def get_antchain_bbp_contract_rule_ex(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.GetAntchainBbpContractRuleRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.GetAntchainBbpContractRuleResponse:
"""
Description: 调用链上合约接口,查询外包的薪资方案
Summary: 外包薪资规则合约查询接口
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.GetAntchainBbpContractRuleResponse(),
self.do_request('1.0', 'antchain.bbp.contract.rule.get', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def get_antchain_bbp_contract_rule_ex_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.GetAntchainBbpContractRuleRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.GetAntchainBbpContractRuleResponse:
"""
Description: 调用链上合约接口,查询外包的薪资方案
Summary: 外包薪资规则合约查询接口
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.GetAntchainBbpContractRuleResponse(),
await self.do_request_async('1.0', 'antchain.bbp.contract.rule.get', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def exec_antchain_bbp_contract_reconciliation(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ExecAntchainBbpContractReconciliationRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ExecAntchainBbpContractReconciliationResponse:
"""
Description: 对账单执行
Summary: 对账单执行接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.exec_antchain_bbp_contract_reconciliation_ex(request, headers, runtime)
async def exec_antchain_bbp_contract_reconciliation_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ExecAntchainBbpContractReconciliationRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ExecAntchainBbpContractReconciliationResponse:
"""
Description: 对账单执行
Summary: 对账单执行接口
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.exec_antchain_bbp_contract_reconciliation_ex_async(request, headers, runtime)
def exec_antchain_bbp_contract_reconciliation_ex(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ExecAntchainBbpContractReconciliationRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ExecAntchainBbpContractReconciliationResponse:
"""
Description: 对账单执行
Summary: 对账单执行接口
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ExecAntchainBbpContractReconciliationResponse(),
self.do_request('1.0', 'antchain.bbp.contract.reconciliation.exec', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def exec_antchain_bbp_contract_reconciliation_ex_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ExecAntchainBbpContractReconciliationRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ExecAntchainBbpContractReconciliationResponse:
"""
Description: 对账单执行
Summary: 对账单执行接口
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ExecAntchainBbpContractReconciliationResponse(),
await self.do_request_async('1.0', 'antchain.bbp.contract.reconciliation.exec', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def confirm_antchain_bbp_contract_reconciliation(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ConfirmAntchainBbpContractReconciliationRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ConfirmAntchainBbpContractReconciliationResponse:
"""
Description: 结算单确认
Summary: 结算单确认
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.confirm_antchain_bbp_contract_reconciliation_ex(request, headers, runtime)
async def confirm_antchain_bbp_contract_reconciliation_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ConfirmAntchainBbpContractReconciliationRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ConfirmAntchainBbpContractReconciliationResponse:
"""
Description: 结算单确认
Summary: 结算单确认
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.confirm_antchain_bbp_contract_reconciliation_ex_async(request, headers, runtime)
def confirm_antchain_bbp_contract_reconciliation_ex(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ConfirmAntchainBbpContractReconciliationRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ConfirmAntchainBbpContractReconciliationResponse:
"""
Description: 结算单确认
Summary: 结算单确认
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ConfirmAntchainBbpContractReconciliationResponse(),
self.do_request('1.0', 'antchain.bbp.contract.reconciliation.confirm', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def confirm_antchain_bbp_contract_reconciliation_ex_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ConfirmAntchainBbpContractReconciliationRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ConfirmAntchainBbpContractReconciliationResponse:
"""
Description: 结算单确认
Summary: 结算单确认
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ConfirmAntchainBbpContractReconciliationResponse(),
await self.do_request_async('1.0', 'antchain.bbp.contract.reconciliation.confirm', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_antchain_bbp_contract_reconciliation(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpContractReconciliationRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpContractReconciliationResponse:
"""
Description: 查询结算单
Summary: 查询结算单
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_antchain_bbp_contract_reconciliation_ex(request, headers, runtime)
async def query_antchain_bbp_contract_reconciliation_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpContractReconciliationRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpContractReconciliationResponse:
"""
Description: 查询结算单
Summary: 查询结算单
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_antchain_bbp_contract_reconciliation_ex_async(request, headers, runtime)
def query_antchain_bbp_contract_reconciliation_ex(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpContractReconciliationRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpContractReconciliationResponse:
"""
Description: 查询结算单
Summary: 查询结算单
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpContractReconciliationResponse(),
self.do_request('1.0', 'antchain.bbp.contract.reconciliation.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_antchain_bbp_contract_reconciliation_ex_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpContractReconciliationRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpContractReconciliationResponse:
"""
Description: 查询结算单
Summary: 查询结算单
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryAntchainBbpContractReconciliationResponse(),
await self.do_request_async('1.0', 'antchain.bbp.contract.reconciliation.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_demo_saas_test_testa(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryDemoSaasTestTestaRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryDemoSaasTestTestaResponse:
"""
Description: testa
Summary: 测试用api
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_demo_saas_test_testa_ex(request, headers, runtime)
async def query_demo_saas_test_testa_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryDemoSaasTestTestaRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryDemoSaasTestTestaResponse:
"""
Description: testa
Summary: 测试用api
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_demo_saas_test_testa_ex_async(request, headers, runtime)
def query_demo_saas_test_testa_ex(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryDemoSaasTestTestaRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryDemoSaasTestTestaResponse:
"""
Description: testa
Summary: 测试用api
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryDemoSaasTestTestaResponse(),
self.do_request('1.0', 'demo.saas.test.testa.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_demo_saas_test_testa_ex_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryDemoSaasTestTestaRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryDemoSaasTestTestaResponse:
"""
Description: testa
Summary: 测试用api
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.QueryDemoSaasTestTestaResponse(),
await self.do_request_async('1.0', 'demo.saas.test.testa.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def reset_demo_saas_test_testd(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ResetDemoSaasTestTestdRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ResetDemoSaasTestTestdResponse:
"""
Description: 共享能力租户流量测试
Summary: 共享能力租户流量测试
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.reset_demo_saas_test_testd_ex(request, headers, runtime)
async def reset_demo_saas_test_testd_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ResetDemoSaasTestTestdRequest,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ResetDemoSaasTestTestdResponse:
"""
Description: 共享能力租户流量测试
Summary: 共享能力租户流量测试
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.reset_demo_saas_test_testd_ex_async(request, headers, runtime)
def reset_demo_saas_test_testd_ex(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ResetDemoSaasTestTestdRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ResetDemoSaasTestTestdResponse:
"""
Description: 共享能力租户流量测试
Summary: 共享能力租户流量测试
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ResetDemoSaasTestTestdResponse(),
self.do_request('1.0', 'demo.saas.test.testd.reset', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def reset_demo_saas_test_testd_ex_async(
self,
request: ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ResetDemoSaasTestTestdRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ResetDemoSaasTestTestdResponse:
"""
Description: 共享能力租户流量测试
Summary: 共享能力租户流量测试
"""
UtilClient.validate_model(request)
return TeaCore.from_map(
ak__1fef_8815252948ebb_01da_07898dd_0fb_2_models.ResetDemoSaasTestTestdResponse(),
await self.do_request_async('1.0', 'demo.saas.test.testd.reset', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
|
PypiClean
|
/dwave_preprocessing-0.6.3-cp39-cp39-win_amd64.whl/dwave/preprocessing/composites/spin_reversal_transform.py
|
import typing
import dimod
import numpy as np
from dimod import Vartype
__all__ = ['SpinReversalTransformComposite']
class SpinReversalTransformComposite(dimod.core.Sampler, dimod.core.Composite):
"""Composite for applying spin reversal transform preprocessing.
Spin reversal transforms (or "gauge transformations") are applied
by randomly flipping the spin of variables in the Ising problem. After
sampling the transformed Ising problem, the same bits are flipped in the
resulting sample [#km]_.
Args:
sampler: A `dimod` sampler object.
seed: As passed to :class:`numpy.random.default_rng`.
Examples:
This example composes a dimod ExactSolver sampler with spin transforms then
uses it to sample an Ising problem.
>>> from dimod import ExactSolver
>>> from dwave.preprocessing.composites import SpinReversalTransformComposite
>>> base_sampler = ExactSolver()
>>> composed_sampler = SpinReversalTransformComposite(base_sampler)
... # Sample an Ising problem
>>> response = composed_sampler.sample_ising({'a': -0.5, 'b': 1.0}, {('a', 'b'): -1})
>>> response.first.sample
{'a': -1, 'b': -1}
References
----------
.. [#km] Andrew D. King and Catherine C. McGeoch. Algorithm engineering
for a quantum annealing platform. https://arxiv.org/abs/1410.2628,
2014.
"""
_children: typing.List[dimod.core.Sampler]
_parameters: typing.Dict[str, typing.Sequence[str]]
_properties: typing.Dict[str, typing.Any]
def __init__(self, child: dimod.core.Sampler, *, seed=None):
self._child = child
self.rng = np.random.default_rng(seed)
@property
def children(self) -> typing.List[dimod.core.Sampler]:
try:
return self._children
except AttributeError:
pass
self._children = children = [self._child]
return children
@property
def parameters(self) -> typing.Dict[str, typing.Sequence[str]]:
try:
return self._parameters
except AttributeError:
pass
self._parameters = parameters = dict(spin_reversal_variables=tuple())
parameters.update(self._child.parameters)
return parameters
@property
def properties(self) -> typing.Dict[str, typing.Any]:
try:
return self._properties
except AttributeError:
pass
self._properties = dict(child_properties=self._child.properties)
return self._properties
class _SampleSets:
def __init__(self, samplesets: typing.List[dimod.SampleSet]):
self.samplesets = samplesets
def done(self) -> bool:
return all(ss.done() for ss in self.samplesets)
@dimod.decorators.nonblocking_sample_method
def sample(self, bqm: dimod.BinaryQuadraticModel, *,
num_spin_reversal_transforms: int = 1,
**kwargs,
):
"""Sample from the binary quadratic model.
Args:
bqm: Binary quadratic model to be sampled from.
num_spin_reversal_transforms:
Number of spin reversal transform runs.
A value of ``0`` will not transform the problem.
If you specify a nonzero value, each spin reversal transform
will result in an independent run of the child sampler.
Returns:
A sample set. Note that for a sampler that returns ``num_reads`` samples,
the sample set will contain ``num_reads*num_spin_reversal_transforms`` samples.
Examples:
This example runs 100 spin reversals applied to one variable of a QUBO problem.
>>> from dimod import ExactSolver
>>> from dwave.preprocessing.composites import SpinReversalTransformComposite
>>> base_sampler = ExactSolver()
>>> composed_sampler = SpinReversalTransformComposite(base_sampler)
...
>>> Q = {('a', 'a'): -1, ('b', 'b'): -1, ('a', 'b'): 2}
>>> response = composed_sampler.sample_qubo(Q,
... num_spin_reversal_transforms=100)
>>> len(response)
400
"""
sampler = self._child
# No SRTs, so just pass the problem through
if not num_spin_reversal_transforms or not bqm.num_variables:
sampleset = sampler.sample(bqm, **kwargs)
# yield twice because we're using the @nonblocking_sample_method
yield sampleset # this one signals done()-ness
yield sampleset # this is the one actually used by the user
return
# we'll be modifying the BQM, so make a copy
bqm = bqm.copy()
# We maintain the Leap behavior that num_spin_reversal_transforms == 1
# corresponds to a single problem with randomly flipped variables.
# Get the SRT matrix
SRT = self.rng.random((num_spin_reversal_transforms, bqm.num_variables)) > .5
# Submit the problems
samplesets: typing.List[dimod.SampleSet] = []
flipped = np.zeros(bqm.num_variables, dtype=bool) # what variables are currently flipped
for i in range(num_spin_reversal_transforms):
# determine what needs to be flipped
transform = flipped != SRT[i, :]
# apply the transform
for v, flip in zip(bqm.variables, transform):
if flip:
bqm.flip_variable(v)
flipped[transform] = ~flipped[transform]
samplesets.append(sampler.sample(bqm, **kwargs))
# Yield a view of the samplesets that reports done()-ness
yield self._SampleSets(samplesets)
# Undo the SRTs according to vartype
if bqm.vartype is Vartype.BINARY:
for i, sampleset in enumerate(samplesets):
sampleset.record.sample[:, SRT[i, :]] = 1 - sampleset.record.sample[:, SRT[i, :]]
elif bqm.vartype is Vartype.SPIN:
for i, sampleset in enumerate(samplesets):
sampleset.record.sample[:, SRT[i, :]] *= -1
else:
raise RuntimeError("unexpected vartype")
# finally combine all samplesets together
yield dimod.concatenate(samplesets)
|
PypiClean
|
/dbc_reader-0.1.2.tar.gz/dbc_reader-0.1.2/README.md
|
# dbc_reader for python
Python utils classes to read **DATASUS** compressed DBF files (*.dbc), that a distributed without compliance with DBF and PKWare specification.
## How to use
### Install
```bash
pip install dbc_reader
```
### Fast mode
```python
from dbc_reader import DbcReader
rows = [row for row in DbcReader('my.dbc')]
```
### Full control
```python
from dbc_reader import DbcReader
with open("my.dbc", 'rb') as f:
dbc_reader = DbcReader(f)
# File info
print(dbc_reader.encoding)
print(dbc_reader.actual_record)
print(dbc_reader.records)
print(dbc_reader.last_update)
print(dbc_reader.file_size)
# Table info
print(dbc_reader.definition.dbf_format)
print(dbc_reader.definition.headerlen)
print(dbc_reader.definition.numfields)
print(dbc_reader.definition.record_size)
print(dbc_reader.definition.terminator)
print(dbc_reader.definition.actual_record)
# Fields info
for field in dbc_reader.definition.fields:
print(field.order, field.name, field.type, field.size, field.decimals, field.flags)
for row in DbcReader('my.dbc'):
print(row)
```
## Development and test
```bash
docker build -t python_dbc_reader .
docker run -it --rm -v "$PWD:/app" -w /app python_dbc_reader bash -c 'flake8 . --count --max-complexity=11 --max-line-length=404 --statistics && coverage run -m unittest tests && coverage report -m'
```
|
PypiClean
|
/repl_openbb-2.1.7-py3-none-any.whl/openbb_terminal/core/log/collection/s3_sender.py
|
from pathlib import Path
from shutil import copyfile
from typing import Any, Dict
# IMPORTATION THIRDPARTY
import requests
try:
import boto3
except ImportError:
WITH_BOTO3 = False
else:
WITH_BOTO3 = True
# IMPORTATION INTERNAL
from openbb_terminal.core.log.constants import (
DEFAULT_API_URL,
DEFAULT_BUCKET,
)
from openbb_terminal.core.log.generation.settings import AWSSettings
# DO NOT USE THE FILE LOGGER IN THIS MODULE
def send_to_s3_directly(
aws_access_key_id: str,
aws_secret_access_key: str,
bucket: str,
file: Path,
object_key: str,
):
if not WITH_BOTO3:
raise ModuleNotFoundError("Library `boto3` is required to directly access S3.")
s3_client = boto3.client(
service_name="s3",
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
)
s3_client.upload_file(str(file), bucket, object_key)
def fetch_presigned_url(api_url: str, object_key: str) -> Dict[str, Any]:
raw_response = requests.put(
json={"object_key": object_key},
timeout=3,
url=api_url,
)
raw_response.raise_for_status()
response = raw_response.json()
return response
def send_to_s3_using_presigned_url(
api_url: str,
file: Path,
object_key: str,
):
presigned_info = fetch_presigned_url(api_url=api_url, object_key=object_key)
with open(file, "rb") as f:
files = {"file": f}
raw_response = requests.post(
data=presigned_info["fields"],
files=files, # type: ignore
timeout=3,
url=presigned_info["url"],
)
raw_response.raise_for_status()
# SUCCESS IF STATUS_CODE == 204
def send_to_s3(
archives_file: Path,
aws_settings: AWSSettings,
file: Path,
object_key: str,
tmp_file: Path,
last: bool = False,
):
"""Send a file into a s3 bucket.
Args:
archives_file (Path):
Destination Path after processing.
aws_settings (AWSSettings):
AWS settings.
file (Path):
Path of the file to process.
object_key (str): _description_
File location inside the s3 bucket.
tmp_file (Path):
Temporary Path in which to put the file during processing.
last (bool, optional):
Whether or not this is the last sending before program exit.
Defaults to False.
Raises:
AttributeError:
If `file` is empty.
"""
api_url = DEFAULT_API_URL
bucket = DEFAULT_BUCKET
if file.stat().st_size <= 0:
file.unlink(missing_ok=True)
raise AttributeError(f"File is empty : {file}")
tmp_file.parent.mkdir(exist_ok=True)
if last:
copyfile(file, tmp_file)
else:
file.rename(tmp_file)
if (
not last
and aws_settings.aws_access_key_id != "REPLACE_ME"
and aws_settings.aws_secret_access_key != "REPLACE_ME"
):
send_to_s3_directly(
aws_access_key_id=aws_settings.aws_access_key_id,
aws_secret_access_key=aws_settings.aws_secret_access_key,
bucket=bucket,
file=tmp_file,
object_key=object_key,
)
else:
send_to_s3_using_presigned_url(
api_url=api_url,
file=tmp_file,
object_key=object_key,
)
archives_file.parent.mkdir(exist_ok=True)
tmp_file.rename(archives_file)
|
PypiClean
|
/nsxramlclient_py3-2.0.8.tar.gz/nsxramlclient_py3-2.0.8/nsxramlclient/client.py
|
__author__ = 'yfauser'
import re
import pprint
import pyraml.parser
from lxml import etree as et
from . import http_session
from . import xmloperations
class NsxClient(object):
def __init__(self, raml_file, nsxmanager, nsx_username, nsx_password, debug=None, verify=None,
suppress_warnings=None, fail_mode=None):
"""
:param raml_file: This mandatory parameter is a RAML File used as the basis of all URL compossitions and
to extract body schemas and convert them into python dictionaries
:param nsxmanager: This mandatory parameter is either the hostname or IP Address of the NSX Manager
:param nsx_username: This mandatory parameter is the Username on NSX Manager used to do API Calls
:param nsx_password: This mandatory parameter is the Password of the User used to do API Calls
:param debug: Optional: If set to True, the client will print extensive HTTP session information to stdout.
Default: False
:param verify: Optional: If set to True, the client will strictly verify the certificate passed by NSX Manager.
Default: False
:param suppress_warnings: Optional: If set to True, the client will print out a warning if NSX Manager uses
a self signed certificate. Default: True
:param fail_mode: Optional: If not set, the client will exit using sys.exit when receiving any error status code
from NSX like 400, etc. If fail_mode is set to 'raise', the exception nsxramlclient.exceptions.NsxError
will be raised with status being the HTTP status code received and msg being the error message returned
by NSX in the body. If set to 'continue', no error will be raised, and the status and body is returned
like in successful cases. The default is 'exit'
:return: Returns a NsxClient Session Object
"""
self._nsx_raml_file = raml_file
self._nsxraml = NsxRaml(self._nsx_raml_file, nsxmanager)
self._nsx_username = nsx_username
self._nsx_password = nsx_password
self._debug = debug
self._verify = verify
if suppress_warnings:
self._suppress_warnings = suppress_warnings
else:
self._suppress_warnings = True
if fail_mode:
self.fail_mode = fail_mode
else:
self.fail_mode = 'exit'
self._httpsession = http_session.Session(self._nsx_username, self._nsx_password, self._debug, self._verify,
self._suppress_warnings, self.fail_mode)
def read(self, searched_resource, uri_parameters=None, request_body_dict=None, query_parameters_dict=None,
additional_headers=None):
"""
This method is used to read a resource using the GET HTTP Method
:param searched_resource: A valid display name in the RAML file matching the resource
:param uri_parameters: A dictionary with the URI Parameters expected by the resource
:param request_body_dict: A dictionary containing the body parameter in the format
{'baseObject': {nested parameters}}. You can use extract_resource_body_schema to create it
:param query_parameters_dict: A dictionary containing optional or mandatory query parameters
:param additional_headers: a dictionary of additional Headers to send in your request, e.g. if-match used
with the dfw calls
:return: This method returns a dictionary containing the received header and body data
"""
return self._request(searched_resource, 'get', uri_parameters, request_body_dict, query_parameters_dict,
additional_headers)
def create(self, searched_resource, uri_parameters=None, request_body_dict=None, query_parameters_dict=None,
additional_headers=None):
"""
This method is used to create a resource using the POST HTTP Method
:param searched_resource: A valid display name in the RAML file matching the resource
:param uri_parameters: A dictionary with the URI Parameters expected by the resource
:param request_body_dict: A dictionary containing the body parameter in the format
{'baseObject': {nested parameters}}. You can use extract_resource_body_schema to create it
:param query_parameters_dict: A dictionary containing optional or mandatory query parameters
:param additional_headers: a dictionary of additional Headers to send in your request, e.g. if-match used
with the dfw calls
:return: This method returns a dictionary containing the received header and body data
NOTE: The _resource_url and _request_body are constructed and passed by the decorator function
"""
return self._request(searched_resource, 'post', uri_parameters, request_body_dict, query_parameters_dict,
additional_headers)
def update(self, searched_resource, uri_parameters=None, request_body_dict=None, query_parameters_dict=None,
additional_headers=None):
"""
This method is used to update a resource using the PUT HTTP Method
:param searched_resource: A valid display name in the RAML file matching the resource
:param uri_parameters: A dictionary with the URI Parameters expected by the resource
:param request_body_dict: A dictionary containing the body parameter in the format
{'baseObject': {nested parameters}}. You can use extract_resource_body_schema to create it
:param query_parameters_dict: A dictionary containing optional or mandatory query parameters
:param additional_headers: a dictionary of additional Headers to send in your request, e.g. if-match used
with the dfw calls
:return: This method returns a dictionary containing the received header and body data
NOTE: The _resource_url and _request_body are constructed and passed by the decorator function
"""
return self._request(searched_resource, 'put', uri_parameters, request_body_dict, query_parameters_dict,
additional_headers)
def delete(self, searched_resource, uri_parameters=None, request_body_dict=None, query_parameters_dict=None,
additional_headers=None):
"""
This method is used to delete a resource using the DELETE HTTP Method
:param searched_resource: A valid display name in the RAML file matching the resource
:param uri_parameters: A dictionary with the URI Parameters expected by the resource
:param request_body_dict: A dictionary containing the body parameter in the format
{'baseObject': {nested parameters}}. You can use extract_resource_body_schema to create it
:param query_parameters_dict: A dictionary containing optional or mandatory query parameters
:param additional_headers: a dictionary of additional Headers to send in your request, e.g. if-match used
with the dfw calls
:return: This method returns a dictionary containing the received header and body data
NOTE: The _resource_url and _request_body are constructed and passed by the decorator function
"""
return self._request(searched_resource, 'delete', uri_parameters, request_body_dict, query_parameters_dict,
additional_headers)
def _request(self, searched_resource, method, uri_parameters=None, request_body_dict=None,
query_parameters_dict=None, additional_headers=None):
found_res_object = self._nsxraml.find_resource_recursively(searched_resource)
assert found_res_object, 'The searched displayName could not be found in RAML File'
self._nsxraml.check_resource_methods_by_displayname(searched_resource, method)
resource_url = self._nsxraml.contruct_resource_url(searched_resource, uri_parameters)
query_parameters = self._nsxraml.get_method_mandatory_query_parameters(searched_resource, method)
if request_body_dict:
request_body = xmloperations.dict_to_xml(request_body_dict)
else:
request_body = None
if query_parameters_dict:
resource_url = self._nsxraml.add_query_parameter_url(resource_url, searched_resource, method,
query_parameters_dict)
else:
assert not query_parameters, 'missing mandatory query parameter {}'.format(query_parameters)
mandatory_add_headers = self._nsxraml.get_method_mandatory_add_headers(searched_resource, method)
if additional_headers:
assert set(mandatory_add_headers).issubset(set(additional_headers.keys())), \
'missing mandatory additonal headers {}'.format(mandatory_add_headers)
headers = additional_headers
else:
assert mandatory_add_headers is None, 'missing mandatory additonal headers {}'.format(mandatory_add_headers)
headers = None
response = self._httpsession.do_request(method, resource_url, data=request_body, headers=headers)
# TODO: Add a check for mandatory body attributes (if needed)
return response
def view_resource_body_schema(self, searched_resource, method):
xml_schema_result = self._nsxraml.get_xml_schema_by_displayname(searched_resource, method)
print(et.tostring(xml_schema_result, pretty_print=True))
def view_resource_body_example(self, searched_resource, method, remove_content=None, remove_comments=None):
xml_schema_result = self._nsxraml.get_xml_example_by_displayname(searched_resource, method,
remove_comments=remove_comments,
remove_content=remove_content)
print(et.tostring(xml_schema_result, pretty_print=True))
def extract_resource_body_schema(self, searched_resource, method):
# NOTE: THis method is deprecated and will be removed in future version
xml_schema_result = self._nsxraml.get_xml_schema_by_displayname(searched_resource, method)
print('\033[91m' + "DEPRECATION WARNING: This method is deprecated in nsxramlclient v2.x and " \
"will be removed in future.\nPlease start using the method extract_resource_body_example " \
"instead.\nThis method does not support the NSXv 6.2.4 and later RAML specs" + '\033[0m')
return xmloperations.xml_to_dict(xml_schema_result)
def extract_resource_body_example(self, searched_resource, method, remove_content=None, remove_comments=None):
xml_schema_result = self._nsxraml.get_xml_example_by_displayname(searched_resource, method,
remove_comments=remove_comments,
remove_content=remove_content)
return xmloperations.xml_to_dict(xml_schema_result)
@staticmethod
def view_response(ordered_dict):
pretty_printer = pprint.PrettyPrinter()
print('HTTP status code:\n{}\n'.format(ordered_dict['status']))
if ordered_dict['location']:
print('HTTP location header:\n{}\n'.format(ordered_dict['location']))
if ordered_dict['objectId']:
print('NSX Object Id:\n{}\n'.format(ordered_dict['objectId']))
if ordered_dict['Etag']:
print('Etag Header:\n{}\n'.format(ordered_dict['Etag']))
if ordered_dict['body']:
print('HTTP Body Content:')
pretty_printer.pprint(ordered_dict['body'])
@staticmethod
def view_body_dict(body_dict):
pretty_printer = pprint.PrettyPrinter()
pretty_printer.pprint(body_dict)
def view_resource_display_names(self):
output_text = []
for display_name, details in sorted(self._nsxraml.list_all_resources().items()):
output_text.append('Displayname: {}\nDescription: {}\nSupports: {}\n'.format(display_name,
details[0],
details[1]))
if details[2]:
output_text.append('uriParameters: {}\n'.format(details[2]))
if details[3]:
output_text.append('queryParameters: {}\n'.format(details[3]))
if details[4]:
output_text.append('Add. Headers: {}\n'.format(details[4]))
output_text.append('\n')
print(''.join(output_text))
def read_all_pages(self, searched_resource, uri_parameters=None, request_body_dict=None,
query_parameters_dict=None, additional_headers=None):
supported_objects = ['virtualWires', 'pagedEdgeList']
first_page = self._request(searched_resource, 'get', uri_parameters, request_body_dict, query_parameters_dict,
additional_headers)['body']
first_key = list(first_page.keys())[0]
assert first_key in supported_objects, 'unsupported object {}, currently only {} ' \
'are supported'.format(first_key, supported_objects)
if first_key == 'virtualWires':
paging_info = first_page['virtualWires']['dataPage']['pagingInfo']
total_count = int(paging_info['totalCount'])
page_size = int(paging_info['pageSize'])
start_index = int(paging_info['startIndex'])
if not query_parameters_dict:
query_parameters_dict = {'pagesize': paging_info['pageSize'], 'startindex': paging_info['startIndex']}
if total_count == 0:
return []
elif total_count == 1:
return [first_page['virtualWires']['dataPage']['virtualWire']]
elif page_size >= total_count:
return first_page['virtualWires']['dataPage']['virtualWire']
collected_values = first_page['virtualWires']['dataPage']['virtualWire']
for page_start_index in range(start_index+page_size, total_count, page_size):
query_parameters_dict['startindex'] = str(page_start_index)
sub_page = self._request(searched_resource, 'get', uri_parameters, request_body_dict,
query_parameters_dict, additional_headers)['body']
if isinstance(sub_page['virtualWires']['dataPage']['virtualWire'], dict):
collected_values.append(sub_page['virtualWires']['dataPage']['virtualWire'])
if isinstance(sub_page['virtualWires']['dataPage']['virtualWire'], list):
collected_values.extend(sub_page['virtualWires']['dataPage']['virtualWire'])
return collected_values
if first_key == 'pagedEdgeList':
paging_info = first_page['pagedEdgeList']['edgePage']['pagingInfo']
total_count = int(paging_info['totalCount'])
page_size = int(paging_info['pageSize'])
start_index = int(paging_info['startIndex'])
if not query_parameters_dict:
query_parameters_dict = {'pagesize': paging_info['pageSize'], 'startindex': paging_info['startIndex']}
if total_count == 0:
return []
elif total_count == 1:
return [first_page['pagedEdgeList']['edgePage']['edgeSummary']]
elif page_size >= total_count:
return first_page['pagedEdgeList']['edgePage']['edgeSummary']
collected_values = first_page['pagedEdgeList']['edgePage']['edgeSummary']
for page_start_index in range(start_index+page_size, total_count, page_size):
query_parameters_dict['startindex'] = str(page_start_index)
sub_page = self._request(searched_resource, 'get', uri_parameters, request_body_dict,
query_parameters_dict, additional_headers)['body']
if isinstance(sub_page['pagedEdgeList']['edgePage']['edgeSummary'], dict):
collected_values.append(sub_page['pagedEdgeList']['edgePage']['edgeSummary'])
if isinstance(sub_page['pagedEdgeList']['edgePage']['edgeSummary'], list):
collected_values.extend(sub_page['pagedEdgeList']['edgePage']['edgeSummary'])
return collected_values
@staticmethod
def normalize_list_return(input_object):
if not input_object:
return []
elif isinstance(input_object, dict):
return [input_object]
elif isinstance(input_object, list):
return input_object
else:
return []
class NsxRaml(object):
def __init__(self, raml_file, nsxmanager):
self._nsxraml = pyraml.parser.load(raml_file)
self._base_uri = re.sub('\{nsxmanager\}', nsxmanager, self._nsxraml.baseUri)
def find_resource_recursively(self, display_name, raml_resource_root=None):
# this method runs through the base raml file recursively until it finds the first
# occurrence of the searched displayName in the resource
if raml_resource_root:
searched_tuples = list(raml_resource_root.resources.items())
else:
searched_tuples = list(self._nsxraml.resources.items())
for resource_tuple in searched_tuples:
if resource_tuple[1].displayName == str(display_name):
return resource_tuple
elif resource_tuple[1].resources:
recursive_result = self.find_resource_recursively(display_name, raml_resource_root=resource_tuple[1])
if recursive_result:
return recursive_result
def contruct_resource_url(self, display_name, uri_parameters):
found_resource = self.find_resource_recursively(display_name)
resource_url_data = self._get_resource_url_data(found_resource)
resource_url = self._base_uri + resource_url_data['constructed_url']
if len(resource_url_data['uri_parameters']) > 0:
assert uri_parameters, 'The resource requires dict uri_parameters to be passed as kwarg'
try:
resource_uri_params = [uri_parameter for uri_parameter in uri_parameters
if resource_url_data['uri_parameters'][uri_parameter].required]
except KeyError:
raise Exception('one of the passed URI parameter could not be found in RAMl File')
for uri_parameter in resource_uri_params:
assert uri_parameter in list(uri_parameters.keys()), \
'one required URI parameter is missing in the passed URI parameters, ' \
'required parameters are {}'.format(resource_uri_params)
resource_url = re.sub('\{' + uri_parameter + '\}', uri_parameters[uri_parameter], resource_url)
return resource_url
def _get_resource_url_data(self, resource, res_url_data=None):
# this method runs through the base raml file backwards recursively to construct the
# url of the resource and collect all uri parameters back to the root
if not res_url_data:
res_url_data = {'constructed_url': '', 'uri_parameters': {}, 'query_parameters': {}}
if resource[1].parentResource:
try:
resource[1].parentResource.displayName
except:
raise Exception('The parent resource of {} is missing a display '
'name in the RAMl File'.format(resource[0]))
parent_display_name = resource[1].parentResource.displayName
parent_resource = self.find_resource_recursively(parent_display_name, self._nsxraml)
res_url_data['constructed_url'] = resource[0] + res_url_data['constructed_url']
if resource[1].uriParameters:
res_url_data['uri_parameters'].update(resource[1].uriParameters)
return self._get_resource_url_data(parent_resource, res_url_data)
else:
res_url_data['constructed_url'] = resource[0] + res_url_data['constructed_url']
if resource[1].uriParameters:
res_url_data['uri_parameters'].update(resource[1].uriParameters)
return res_url_data
def check_resource_methods_by_displayname(self, display_name, method):
found_res_object = self.find_resource_recursively(display_name)
assert method in found_res_object[1].methods, 'The resource does not have a {} method in the ' \
'RAML File'.format(method.upper())
def get_method_mandatory_query_parameters(self, display_name, method):
found_res_object = self.find_resource_recursively(display_name)
if found_res_object[1].methods[method].queryParameters:
return [parameter for parameter in list(found_res_object[1].methods[method].queryParameters.keys()) if
found_res_object[1].methods[method].queryParameters[parameter].required]
def get_method_mandatory_add_headers(self, display_name, method):
found_res_object = self.find_resource_recursively(display_name)
if found_res_object[1].methods[method].headers:
return [header for header in list(found_res_object[1].methods[method].headers.keys()) if
found_res_object[1].methods[method].headers[header].required]
def add_query_parameter_url(self, url, display_name, method, query_parameters_dict):
found_res_object = self.find_resource_recursively(display_name)
mandatory_query_parameters = [parameter for parameter in
list(found_res_object[1].methods[method].queryParameters.keys()) if
found_res_object[1].methods[method].queryParameters[parameter].required]
missing_mandatory_qparameters = [parameter for parameter in mandatory_query_parameters if
parameter not in list(query_parameters_dict.keys())]
assert len(missing_mandatory_qparameters) == 0, 'Missing required query ' \
'parameters : {}'.format(missing_mandatory_qparameters)
url = '{}?'.format(url)
for query_parameter in list(query_parameters_dict.keys()):
url = '{}&{}={}'.format(url, query_parameter, query_parameters_dict[query_parameter])
return url
def get_xml_schema_by_displayname(self, display_name, method):
# NOTE: THis method is deprecated and will be removed in future versions
method_options = {'read': 'get', 'create': 'post', 'delete': 'delete', 'update': 'put'}
matched_resource = self.find_resource_recursively(display_name)
assert matched_resource, 'The searched displayName could not be found in RAML File'
assert method_options[method] in matched_resource[1].methods, 'the resource does not support ' \
'the {} method'.format(method)
assert matched_resource[1].methods[method_options[method]].body, 'the resource does not have a ' \
'body schema in the RAML File'
matched_resource_body = matched_resource[1].methods[method_options[method]].body
base_et_element = type(et.Element('base'))
if isinstance(matched_resource_body['application/xml'].schema, base_et_element):
return matched_resource_body['application/xml'].schema
elif isinstance(matched_resource_body['application/xml'].schema, str):
assert matched_resource_body['application/xml'].schema in list(self._nsxraml.schemas.keys()), \
'the external schema {} could not be found in the schema list of the RAML File'.format(
matched_resource_body['application/xml'].schema)
assert isinstance(self._nsxraml.schemas[matched_resource_body['application/xml'].schema],
base_et_element), 'the external schema {} is likely ' \
'misformated'.format(matched_resource_body['application/xml'].schema)
return self._nsxraml.schemas[matched_resource_body['application/xml'].schema]
def get_xml_example_by_displayname(self, display_name, method, remove_content=None, remove_comments=None):
if not remove_content:
remove_content = True
if not remove_comments:
remove_comments = True
method_options = {'read': 'get', 'create': 'post', 'delete': 'delete', 'update': 'put'}
matched_resource = self.find_resource_recursively(display_name)
assert matched_resource, 'The searched displayName could not be found in RAML File'
assert method_options[method] in matched_resource[1].methods, 'the resource does not support ' \
'the {} method'.format(method)
assert matched_resource[1].methods[method_options[method]].body, 'the resource does not have a ' \
'body schema in the RAML File'
matched_resource_body = matched_resource[1].methods[method_options[method]].body
example = matched_resource_body['application/xml'].example
try:
parser = et.XMLParser(remove_comments=remove_comments)
example_et = et.fromstring(example, parser=parser)
except et.XMLSyntaxError as e:
raise Exception('The parsing of the body example XML failed, please check the format in the RAML file,'
'the execption is:\n{}'.format(e))
if remove_content:
for parent, child in self._iterparent(example_et):
child.text = None
child.tail = None
return example_et
@staticmethod
def _collect_resource_details(resource_tuple):
method_options = {'read': 'get', 'create': 'post', 'delete': 'delete', 'update': 'put'}
if resource_tuple[1].methods:
supported_methods = [key for key in resource_tuple[1].methods]
supported_operations = [operation[0] for operation in list(method_options.items())
if operation[1] in supported_methods]
method_items = [method_item for method_item in list(resource_tuple[1].methods.items())]
try:
query_parameters = [list(rmethod[1].queryParameters.keys()) for rmethod in method_items if
rmethod[1].queryParameters][0]
except IndexError:
query_parameters = None
try:
resource_add_headers = [list(rmethod[1].headers.keys()) for rmethod in method_items if
rmethod[1].headers][0]
except IndexError:
resource_add_headers = None
else:
supported_operations = None
query_parameters = None
resource_add_headers = None
if resource_tuple[1].uriParameters:
resource_uri_parameters = [uri_parameter for uri_parameter in resource_tuple[1].uriParameters]
else:
resource_uri_parameters = None
return supported_operations, resource_uri_parameters, query_parameters, resource_add_headers
@staticmethod
def _iterparent(tree):
for parent in tree.getiterator():
for child in parent:
yield parent, child
def list_all_resources(self, raml_resource_root=None, display_names_dict=None):
if display_names_dict is None:
display_names_dict = {}
if raml_resource_root:
scanned_tuples = list(raml_resource_root.resources.items())
else:
scanned_tuples = list(self._nsxraml.resources.items())
for resource_tuple in scanned_tuples:
if resource_tuple[1].resources:
resources_details = self._collect_resource_details(resource_tuple)
display_names_dict[resource_tuple[1].displayName] = (resource_tuple[1].description,
resources_details[0], resources_details[1],
resources_details[2], resources_details[3])
display_names_dict = self.list_all_resources(raml_resource_root=resource_tuple[1],
display_names_dict=display_names_dict)
else:
resources_details = self._collect_resource_details(resource_tuple)
display_names_dict[resource_tuple[1].displayName] = (resource_tuple[1].description,
resources_details[0], resources_details[1],
resources_details[2], resources_details[3])
return display_names_dict
|
PypiClean
|
/sematic-0.0.2.alpha.1654672757-py3-none-any.whl/urllib3/poolmanager.py
|
from __future__ import absolute_import
import collections
import functools
import logging
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
from .exceptions import (
LocationValueError,
MaxRetryError,
ProxySchemeUnknown,
ProxySchemeUnsupported,
URLSchemeUnknown,
)
from .packages import six
from .packages.six.moves.urllib.parse import urljoin
from .request import RequestMethods
from .util.proxy import connection_requires_http_tunnel
from .util.retry import Retry
from .util.url import parse_url
__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
log = logging.getLogger(__name__)
SSL_KEYWORDS = (
"key_file",
"cert_file",
"cert_reqs",
"ca_certs",
"ssl_version",
"ca_cert_dir",
"ssl_context",
"key_password",
"server_hostname",
)
# All known keyword arguments that could be provided to the pool manager, its
# pools, or the underlying connections. This is used to construct a pool key.
_key_fields = (
"key_scheme", # str
"key_host", # str
"key_port", # int
"key_timeout", # int or float or Timeout
"key_retries", # int or Retry
"key_strict", # bool
"key_block", # bool
"key_source_address", # str
"key_key_file", # str
"key_key_password", # str
"key_cert_file", # str
"key_cert_reqs", # str
"key_ca_certs", # str
"key_ssl_version", # str
"key_ca_cert_dir", # str
"key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
"key_maxsize", # int
"key_headers", # dict
"key__proxy", # parsed proxy url
"key__proxy_headers", # dict
"key__proxy_config", # class
"key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
"key__socks_options", # dict
"key_assert_hostname", # bool or string
"key_assert_fingerprint", # str
"key_server_hostname", # str
)
#: The namedtuple class used to construct keys for the connection pool.
#: All custom key schemes should include the fields in this key at a minimum.
PoolKey = collections.namedtuple("PoolKey", _key_fields)
_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
def _default_key_normalizer(key_class, request_context):
"""
Create a pool key out of a request context dictionary.
According to RFC 3986, both the scheme and host are case-insensitive.
Therefore, this function normalizes both before constructing the pool
key for an HTTPS request. If you wish to change this behaviour, provide
alternate callables to ``key_fn_by_scheme``.
:param key_class:
The class to use when constructing the key. This should be a namedtuple
with the ``scheme`` and ``host`` keys at a minimum.
:type key_class: namedtuple
:param request_context:
A dictionary-like object that contain the context for a request.
:type request_context: dict
:return: A namedtuple that can be used as a connection pool key.
:rtype: PoolKey
"""
# Since we mutate the dictionary, make a copy first
context = request_context.copy()
context["scheme"] = context["scheme"].lower()
context["host"] = context["host"].lower()
# These are both dictionaries and need to be transformed into frozensets
for key in ("headers", "_proxy_headers", "_socks_options"):
if key in context and context[key] is not None:
context[key] = frozenset(context[key].items())
# The socket_options key may be a list and needs to be transformed into a
# tuple.
socket_opts = context.get("socket_options")
if socket_opts is not None:
context["socket_options"] = tuple(socket_opts)
# Map the kwargs to the names in the namedtuple - this is necessary since
# namedtuples can't have fields starting with '_'.
for key in list(context.keys()):
context["key_" + key] = context.pop(key)
# Default to ``None`` for keys missing from the context
for field in key_class._fields:
if field not in context:
context[field] = None
return key_class(**context)
#: A dictionary that maps a scheme to a callable that creates a pool key.
#: This can be used to alter the way pool keys are constructed, if desired.
#: Each PoolManager makes a copy of this dictionary so they can be configured
#: globally here, or individually on the instance.
key_fn_by_scheme = {
"http": functools.partial(_default_key_normalizer, PoolKey),
"https": functools.partial(_default_key_normalizer, PoolKey),
}
pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool}
class PoolManager(RequestMethods):
"""
Allows for arbitrary requests while transparently keeping track of
necessary connection pools for you.
:param num_pools:
Number of connection pools to cache before discarding the least
recently used pool.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param \\**connection_pool_kw:
Additional parameters are used to create fresh
:class:`urllib3.connectionpool.ConnectionPool` instances.
Example::
>>> manager = PoolManager(num_pools=2)
>>> r = manager.request('GET', 'http://google.com/')
>>> r = manager.request('GET', 'http://google.com/mail')
>>> r = manager.request('GET', 'http://yahoo.com/')
>>> len(manager.pools)
2
"""
proxy = None
proxy_config = None
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
RequestMethods.__init__(self, headers)
self.connection_pool_kw = connection_pool_kw
self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close())
# Locally set the pool classes and keys so other PoolManagers can
# override them.
self.pool_classes_by_scheme = pool_classes_by_scheme
self.key_fn_by_scheme = key_fn_by_scheme.copy()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.clear()
# Return False to re-raise any potential exceptions
return False
def _new_pool(self, scheme, host, port, request_context=None):
"""
Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
any additional pool keyword arguments.
If ``request_context`` is provided, it is provided as keyword arguments
to the pool class used. This method is used to actually create the
connection pools handed out by :meth:`connection_from_url` and
companion methods. It is intended to be overridden for customization.
"""
pool_cls = self.pool_classes_by_scheme[scheme]
if request_context is None:
request_context = self.connection_pool_kw.copy()
# Although the context has everything necessary to create the pool,
# this function has historically only used the scheme, host, and port
# in the positional args. When an API change is acceptable these can
# be removed.
for key in ("scheme", "host", "port"):
request_context.pop(key, None)
if scheme == "http":
for kw in SSL_KEYWORDS:
request_context.pop(kw, None)
return pool_cls(host, port, **request_context)
def clear(self):
"""
Empty our store of pools and direct them all to close.
This will not affect in-flight connections, but they will not be
re-used after completion.
"""
self.pools.clear()
def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
If ``port`` isn't given, it will be derived from the ``scheme`` using
``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
provided, it is merged with the instance's ``connection_pool_kw``
variable and used to create the new connection pool, if one is
needed.
"""
if not host:
raise LocationValueError("No host specified.")
request_context = self._merge_pool_kwargs(pool_kwargs)
request_context["scheme"] = scheme or "http"
if not port:
port = port_by_scheme.get(request_context["scheme"].lower(), 80)
request_context["port"] = port
request_context["host"] = host
return self.connection_from_context(request_context)
def connection_from_context(self, request_context):
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable.
"""
scheme = request_context["scheme"].lower()
pool_key_constructor = self.key_fn_by_scheme.get(scheme)
if not pool_key_constructor:
raise URLSchemeUnknown(scheme)
pool_key = pool_key_constructor(request_context)
return self.connection_from_pool_key(pool_key, request_context=request_context)
def connection_from_pool_key(self, pool_key, request_context=None):
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
``pool_key`` should be a namedtuple that only contains immutable
objects. At a minimum it must have the ``scheme``, ``host``, and
``port`` fields.
"""
with self.pools.lock:
# If the scheme, host, or port doesn't match existing open
# connections, open a new ConnectionPool.
pool = self.pools.get(pool_key)
if pool:
return pool
# Make a fresh ConnectionPool of the desired type
scheme = request_context["scheme"]
host = request_context["host"]
port = request_context["port"]
pool = self._new_pool(scheme, host, port, request_context=request_context)
self.pools[pool_key] = pool
return pool
def connection_from_url(self, url, pool_kwargs=None):
"""
Similar to :func:`urllib3.connectionpool.connection_from_url`.
If ``pool_kwargs`` is not provided and a new pool needs to be
constructed, ``self.connection_pool_kw`` is used to initialize
the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
is provided, it is used instead. Note that if a new pool does not
need to be created for the request, the provided ``pool_kwargs`` are
not used.
"""
u = parse_url(url)
return self.connection_from_host(
u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
)
def _merge_pool_kwargs(self, override):
"""
Merge a dictionary of override values for self.connection_pool_kw.
This does not modify self.connection_pool_kw and returns a new dict.
Any keys in the override dictionary with a value of ``None`` are
removed from the merged dictionary.
"""
base_pool_kwargs = self.connection_pool_kw.copy()
if override:
for key, value in override.items():
if value is None:
try:
del base_pool_kwargs[key]
except KeyError:
pass
else:
base_pool_kwargs[key] = value
return base_pool_kwargs
def _proxy_requires_url_absolute_form(self, parsed_url):
"""
Indicates if the proxy requires the complete destination URL in the
request. Normally this is only needed when not using an HTTP CONNECT
tunnel.
"""
if self.proxy is None:
return False
return not connection_requires_http_tunnel(
self.proxy, self.proxy_config, parsed_url.scheme
)
def _validate_proxy_scheme_url_selection(self, url_scheme):
"""
Validates that were not attempting to do TLS in TLS connections on
Python2 or with unsupported SSL implementations.
"""
if self.proxy is None or url_scheme != "https":
return
if self.proxy.scheme != "https":
return
if six.PY2 and not self.proxy_config.use_forwarding_for_https:
raise ProxySchemeUnsupported(
"Contacting HTTPS destinations through HTTPS proxies "
"'via CONNECT tunnels' is not supported in Python 2"
)
def urlopen(self, method, url, redirect=True, **kw):
"""
Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
with custom cross-host redirect logic and only sends the request-uri
portion of the ``url``.
The given ``url`` parameter must be absolute, such that an appropriate
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
u = parse_url(url)
self._validate_proxy_scheme_url_selection(u.scheme)
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
kw["assert_same_host"] = False
kw["redirect"] = False
if "headers" not in kw:
kw["headers"] = self.headers.copy()
if self._proxy_requires_url_absolute_form(u):
response = conn.urlopen(method, url, **kw)
else:
response = conn.urlopen(method, u.request_uri, **kw)
redirect_location = redirect and response.get_redirect_location()
if not redirect_location:
return response
# Support relative URLs for redirecting.
redirect_location = urljoin(url, redirect_location)
# RFC 7231, Section 6.4.4
if response.status == 303:
method = "GET"
retries = kw.get("retries")
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect)
# Strip headers marked as unsafe to forward to the redirected location.
# Check remove_headers_on_redirect to avoid a potential network call within
# conn.is_same_host() which may use socket.gethostbyname() in the future.
if retries.remove_headers_on_redirect and not conn.is_same_host(
redirect_location
):
headers = list(six.iterkeys(kw["headers"]))
for header in headers:
if header.lower() in retries.remove_headers_on_redirect:
kw["headers"].pop(header, None)
try:
retries = retries.increment(method, url, response=response, _pool=conn)
except MaxRetryError:
if retries.raise_on_redirect:
response.drain_conn()
raise
return response
kw["retries"] = retries
kw["redirect"] = redirect
log.info("Redirecting %s -> %s", url, redirect_location)
response.drain_conn()
return self.urlopen(method, redirect_location, **kw)
class ProxyManager(PoolManager):
"""
Behaves just like :class:`PoolManager`, but sends all requests through
the defined proxy, using the CONNECT method for HTTPS URLs.
:param proxy_url:
The URL of the proxy to be used.
:param proxy_headers:
A dictionary containing headers that will be sent to the proxy. In case
of HTTP they are being sent with each request, while in the
HTTPS/CONNECT case they are sent only once. Could be used for proxy
authentication.
:param proxy_ssl_context:
The proxy SSL context is used to establish the TLS connection to the
proxy when using HTTPS proxies.
:param use_forwarding_for_https:
(Defaults to False) If set to True will forward requests to the HTTPS
proxy to be made on behalf of the client instead of creating a TLS
tunnel via the CONNECT method. **Enabling this flag means that request
and response headers and content will be visible from the HTTPS proxy**
whereas tunneling keeps request and response headers and content
private. IP address, target hostname, SNI, and port are always visible
to an HTTPS proxy even when this flag is disabled.
Example:
>>> proxy = urllib3.ProxyManager('http://localhost:3128/')
>>> r1 = proxy.request('GET', 'http://google.com/')
>>> r2 = proxy.request('GET', 'http://httpbin.org/')
>>> len(proxy.pools)
1
>>> r3 = proxy.request('GET', 'https://httpbin.org/')
>>> r4 = proxy.request('GET', 'https://twitter.com/')
>>> len(proxy.pools)
3
"""
def __init__(
self,
proxy_url,
num_pools=10,
headers=None,
proxy_headers=None,
proxy_ssl_context=None,
use_forwarding_for_https=False,
**connection_pool_kw
):
if isinstance(proxy_url, HTTPConnectionPool):
proxy_url = "%s://%s:%i" % (
proxy_url.scheme,
proxy_url.host,
proxy_url.port,
)
proxy = parse_url(proxy_url)
if proxy.scheme not in ("http", "https"):
raise ProxySchemeUnknown(proxy.scheme)
if not proxy.port:
port = port_by_scheme.get(proxy.scheme, 80)
proxy = proxy._replace(port=port)
self.proxy = proxy
self.proxy_headers = proxy_headers or {}
self.proxy_ssl_context = proxy_ssl_context
self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
connection_pool_kw["_proxy"] = self.proxy
connection_pool_kw["_proxy_headers"] = self.proxy_headers
connection_pool_kw["_proxy_config"] = self.proxy_config
super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
if scheme == "https":
return super(ProxyManager, self).connection_from_host(
host, port, scheme, pool_kwargs=pool_kwargs
)
return super(ProxyManager, self).connection_from_host(
self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs
)
def _set_proxy_headers(self, url, headers=None):
"""
Sets headers needed by proxies: specifically, the Accept and Host
headers. Only sets headers not provided by the user.
"""
headers_ = {"Accept": "*/*"}
netloc = parse_url(url).netloc
if netloc:
headers_["Host"] = netloc
if headers:
headers_.update(headers)
return headers_
def urlopen(self, method, url, redirect=True, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
u = parse_url(url)
if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
# For connections using HTTP CONNECT, httplib sets the necessary
# headers on the CONNECT to the proxy. If we're not using CONNECT,
# we'll definitely need to set 'Host' at the very least.
headers = kw.get("headers", self.headers)
kw["headers"] = self._set_proxy_headers(url, headers)
return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
def proxy_from_url(url, **kw):
return ProxyManager(proxy_url=url, **kw)
|
PypiClean
|
/cdktf-cdktf-provider-azurerm-10.0.1.tar.gz/cdktf-cdktf-provider-azurerm-10.0.1/src/cdktf_cdktf_provider_azurerm/dns_srv_record/__init__.py
|
import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from typeguard import check_type
from .._jsii import *
import cdktf as _cdktf_9a9027ec
import constructs as _constructs_77d1e7e8
class DnsSrvRecord(
_cdktf_9a9027ec.TerraformResource,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-azurerm.dnsSrvRecord.DnsSrvRecord",
):
'''Represents a {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record azurerm_dns_srv_record}.'''
def __init__(
self,
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
name: builtins.str,
record: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DnsSrvRecordRecord", typing.Dict[builtins.str, typing.Any]]]],
resource_group_name: builtins.str,
ttl: jsii.Number,
zone_name: builtins.str,
id: typing.Optional[builtins.str] = None,
tags: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
timeouts: typing.Optional[typing.Union["DnsSrvRecordTimeouts", typing.Dict[builtins.str, typing.Any]]] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
'''Create a new {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record azurerm_dns_srv_record} Resource.
:param scope: The scope in which to define this construct.
:param id_: The scoped construct ID. Must be unique amongst siblings in the same scope
:param name: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#name DnsSrvRecord#name}.
:param record: record block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#record DnsSrvRecord#record}
:param resource_group_name: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#resource_group_name DnsSrvRecord#resource_group_name}.
:param ttl: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#ttl DnsSrvRecord#ttl}.
:param zone_name: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#zone_name DnsSrvRecord#zone_name}.
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#id DnsSrvRecord#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param tags: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#tags DnsSrvRecord#tags}.
:param timeouts: timeouts block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#timeouts DnsSrvRecord#timeouts}
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__fc2ee3708abad1f619c7cf711d5c10d42e7b1d7f4152b0d930d36d9d4a98fa0d)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id_", value=id_, expected_type=type_hints["id_"])
config = DnsSrvRecordConfig(
name=name,
record=record,
resource_group_name=resource_group_name,
ttl=ttl,
zone_name=zone_name,
id=id,
tags=tags,
timeouts=timeouts,
connection=connection,
count=count,
depends_on=depends_on,
for_each=for_each,
lifecycle=lifecycle,
provider=provider,
provisioners=provisioners,
)
jsii.create(self.__class__, self, [scope, id_, config])
@jsii.member(jsii_name="putRecord")
def put_record(
self,
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DnsSrvRecordRecord", typing.Dict[builtins.str, typing.Any]]]],
) -> None:
'''
:param value: -
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3c52fed5666c34ea1a3f02d7b08721d96884e031b9f14664cdd39ca492aa3d08)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
return typing.cast(None, jsii.invoke(self, "putRecord", [value]))
@jsii.member(jsii_name="putTimeouts")
def put_timeouts(
self,
*,
create: typing.Optional[builtins.str] = None,
delete: typing.Optional[builtins.str] = None,
read: typing.Optional[builtins.str] = None,
update: typing.Optional[builtins.str] = None,
) -> None:
'''
:param create: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#create DnsSrvRecord#create}.
:param delete: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#delete DnsSrvRecord#delete}.
:param read: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#read DnsSrvRecord#read}.
:param update: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#update DnsSrvRecord#update}.
'''
value = DnsSrvRecordTimeouts(
create=create, delete=delete, read=read, update=update
)
return typing.cast(None, jsii.invoke(self, "putTimeouts", [value]))
@jsii.member(jsii_name="resetId")
def reset_id(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetId", []))
@jsii.member(jsii_name="resetTags")
def reset_tags(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetTags", []))
@jsii.member(jsii_name="resetTimeouts")
def reset_timeouts(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetTimeouts", []))
@jsii.member(jsii_name="synthesizeAttributes")
def _synthesize_attributes(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "synthesizeAttributes", []))
@jsii.python.classproperty
@jsii.member(jsii_name="tfResourceType")
def TF_RESOURCE_TYPE(cls) -> builtins.str:
return typing.cast(builtins.str, jsii.sget(cls, "tfResourceType"))
@builtins.property
@jsii.member(jsii_name="fqdn")
def fqdn(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "fqdn"))
@builtins.property
@jsii.member(jsii_name="record")
def record(self) -> "DnsSrvRecordRecordList":
return typing.cast("DnsSrvRecordRecordList", jsii.get(self, "record"))
@builtins.property
@jsii.member(jsii_name="timeouts")
def timeouts(self) -> "DnsSrvRecordTimeoutsOutputReference":
return typing.cast("DnsSrvRecordTimeoutsOutputReference", jsii.get(self, "timeouts"))
@builtins.property
@jsii.member(jsii_name="idInput")
def id_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "idInput"))
@builtins.property
@jsii.member(jsii_name="nameInput")
def name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "nameInput"))
@builtins.property
@jsii.member(jsii_name="recordInput")
def record_input(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DnsSrvRecordRecord"]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DnsSrvRecordRecord"]]], jsii.get(self, "recordInput"))
@builtins.property
@jsii.member(jsii_name="resourceGroupNameInput")
def resource_group_name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "resourceGroupNameInput"))
@builtins.property
@jsii.member(jsii_name="tagsInput")
def tags_input(self) -> typing.Optional[typing.Mapping[builtins.str, builtins.str]]:
return typing.cast(typing.Optional[typing.Mapping[builtins.str, builtins.str]], jsii.get(self, "tagsInput"))
@builtins.property
@jsii.member(jsii_name="timeoutsInput")
def timeouts_input(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, "DnsSrvRecordTimeouts"]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, "DnsSrvRecordTimeouts"]], jsii.get(self, "timeoutsInput"))
@builtins.property
@jsii.member(jsii_name="ttlInput")
def ttl_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "ttlInput"))
@builtins.property
@jsii.member(jsii_name="zoneNameInput")
def zone_name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "zoneNameInput"))
@builtins.property
@jsii.member(jsii_name="id")
def id(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "id"))
@id.setter
def id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__54959a5e093766d41fcae3ac553ca35516ecb7b771b7bd96e2e2f7d94543e9b6)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "id", value)
@builtins.property
@jsii.member(jsii_name="name")
def name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "name"))
@name.setter
def name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5ad7d109554e0b96fa2d272cc53709e0a63b837433f57ae1717376d871e4e45f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "name", value)
@builtins.property
@jsii.member(jsii_name="resourceGroupName")
def resource_group_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "resourceGroupName"))
@resource_group_name.setter
def resource_group_name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5a6bac5e1aeab1861cfeea6c12cb2600e958d1c75159742e512c822007021873)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "resourceGroupName", value)
@builtins.property
@jsii.member(jsii_name="tags")
def tags(self) -> typing.Mapping[builtins.str, builtins.str]:
return typing.cast(typing.Mapping[builtins.str, builtins.str], jsii.get(self, "tags"))
@tags.setter
def tags(self, value: typing.Mapping[builtins.str, builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d0fdd4d6c41004f0a7d868c67702f2bf6346d9e3f2b82f17f53041522d39a2d5)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "tags", value)
@builtins.property
@jsii.member(jsii_name="ttl")
def ttl(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "ttl"))
@ttl.setter
def ttl(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7708b67908b36a3dc61b994dfe122c1a632a03433ff85e8b789662eae693325e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "ttl", value)
@builtins.property
@jsii.member(jsii_name="zoneName")
def zone_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "zoneName"))
@zone_name.setter
def zone_name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__223e2d877622c6388840ae90ba7d88563b77ea4db5f44ce14f232ac3daad9bb0)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "zoneName", value)
@jsii.data_type(
jsii_type="@cdktf/provider-azurerm.dnsSrvRecord.DnsSrvRecordConfig",
jsii_struct_bases=[_cdktf_9a9027ec.TerraformMetaArguments],
name_mapping={
"connection": "connection",
"count": "count",
"depends_on": "dependsOn",
"for_each": "forEach",
"lifecycle": "lifecycle",
"provider": "provider",
"provisioners": "provisioners",
"name": "name",
"record": "record",
"resource_group_name": "resourceGroupName",
"ttl": "ttl",
"zone_name": "zoneName",
"id": "id",
"tags": "tags",
"timeouts": "timeouts",
},
)
class DnsSrvRecordConfig(_cdktf_9a9027ec.TerraformMetaArguments):
def __init__(
self,
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
name: builtins.str,
record: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DnsSrvRecordRecord", typing.Dict[builtins.str, typing.Any]]]],
resource_group_name: builtins.str,
ttl: jsii.Number,
zone_name: builtins.str,
id: typing.Optional[builtins.str] = None,
tags: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
timeouts: typing.Optional[typing.Union["DnsSrvRecordTimeouts", typing.Dict[builtins.str, typing.Any]]] = None,
) -> None:
'''
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
:param name: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#name DnsSrvRecord#name}.
:param record: record block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#record DnsSrvRecord#record}
:param resource_group_name: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#resource_group_name DnsSrvRecord#resource_group_name}.
:param ttl: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#ttl DnsSrvRecord#ttl}.
:param zone_name: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#zone_name DnsSrvRecord#zone_name}.
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#id DnsSrvRecord#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param tags: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#tags DnsSrvRecord#tags}.
:param timeouts: timeouts block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#timeouts DnsSrvRecord#timeouts}
'''
if isinstance(lifecycle, dict):
lifecycle = _cdktf_9a9027ec.TerraformResourceLifecycle(**lifecycle)
if isinstance(timeouts, dict):
timeouts = DnsSrvRecordTimeouts(**timeouts)
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__49b8e9951efb4a478ca45c1f321717ea7404dd763e7e1df092365a726663d082)
check_type(argname="argument connection", value=connection, expected_type=type_hints["connection"])
check_type(argname="argument count", value=count, expected_type=type_hints["count"])
check_type(argname="argument depends_on", value=depends_on, expected_type=type_hints["depends_on"])
check_type(argname="argument for_each", value=for_each, expected_type=type_hints["for_each"])
check_type(argname="argument lifecycle", value=lifecycle, expected_type=type_hints["lifecycle"])
check_type(argname="argument provider", value=provider, expected_type=type_hints["provider"])
check_type(argname="argument provisioners", value=provisioners, expected_type=type_hints["provisioners"])
check_type(argname="argument name", value=name, expected_type=type_hints["name"])
check_type(argname="argument record", value=record, expected_type=type_hints["record"])
check_type(argname="argument resource_group_name", value=resource_group_name, expected_type=type_hints["resource_group_name"])
check_type(argname="argument ttl", value=ttl, expected_type=type_hints["ttl"])
check_type(argname="argument zone_name", value=zone_name, expected_type=type_hints["zone_name"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
check_type(argname="argument tags", value=tags, expected_type=type_hints["tags"])
check_type(argname="argument timeouts", value=timeouts, expected_type=type_hints["timeouts"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"name": name,
"record": record,
"resource_group_name": resource_group_name,
"ttl": ttl,
"zone_name": zone_name,
}
if connection is not None:
self._values["connection"] = connection
if count is not None:
self._values["count"] = count
if depends_on is not None:
self._values["depends_on"] = depends_on
if for_each is not None:
self._values["for_each"] = for_each
if lifecycle is not None:
self._values["lifecycle"] = lifecycle
if provider is not None:
self._values["provider"] = provider
if provisioners is not None:
self._values["provisioners"] = provisioners
if id is not None:
self._values["id"] = id
if tags is not None:
self._values["tags"] = tags
if timeouts is not None:
self._values["timeouts"] = timeouts
@builtins.property
def connection(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]]:
'''
:stability: experimental
'''
result = self._values.get("connection")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]], result)
@builtins.property
def count(
self,
) -> typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]]:
'''
:stability: experimental
'''
result = self._values.get("count")
return typing.cast(typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]], result)
@builtins.property
def depends_on(
self,
) -> typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]]:
'''
:stability: experimental
'''
result = self._values.get("depends_on")
return typing.cast(typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]], result)
@builtins.property
def for_each(self) -> typing.Optional[_cdktf_9a9027ec.ITerraformIterator]:
'''
:stability: experimental
'''
result = self._values.get("for_each")
return typing.cast(typing.Optional[_cdktf_9a9027ec.ITerraformIterator], result)
@builtins.property
def lifecycle(self) -> typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle]:
'''
:stability: experimental
'''
result = self._values.get("lifecycle")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle], result)
@builtins.property
def provider(self) -> typing.Optional[_cdktf_9a9027ec.TerraformProvider]:
'''
:stability: experimental
'''
result = self._values.get("provider")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformProvider], result)
@builtins.property
def provisioners(
self,
) -> typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]]:
'''
:stability: experimental
'''
result = self._values.get("provisioners")
return typing.cast(typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]], result)
@builtins.property
def name(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#name DnsSrvRecord#name}.'''
result = self._values.get("name")
assert result is not None, "Required property 'name' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def record(
self,
) -> typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DnsSrvRecordRecord"]]:
'''record block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#record DnsSrvRecord#record}
'''
result = self._values.get("record")
assert result is not None, "Required property 'record' is missing"
return typing.cast(typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DnsSrvRecordRecord"]], result)
@builtins.property
def resource_group_name(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#resource_group_name DnsSrvRecord#resource_group_name}.'''
result = self._values.get("resource_group_name")
assert result is not None, "Required property 'resource_group_name' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def ttl(self) -> jsii.Number:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#ttl DnsSrvRecord#ttl}.'''
result = self._values.get("ttl")
assert result is not None, "Required property 'ttl' is missing"
return typing.cast(jsii.Number, result)
@builtins.property
def zone_name(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#zone_name DnsSrvRecord#zone_name}.'''
result = self._values.get("zone_name")
assert result is not None, "Required property 'zone_name' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def id(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#id DnsSrvRecord#id}.
Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
'''
result = self._values.get("id")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def tags(self) -> typing.Optional[typing.Mapping[builtins.str, builtins.str]]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#tags DnsSrvRecord#tags}.'''
result = self._values.get("tags")
return typing.cast(typing.Optional[typing.Mapping[builtins.str, builtins.str]], result)
@builtins.property
def timeouts(self) -> typing.Optional["DnsSrvRecordTimeouts"]:
'''timeouts block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#timeouts DnsSrvRecord#timeouts}
'''
result = self._values.get("timeouts")
return typing.cast(typing.Optional["DnsSrvRecordTimeouts"], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DnsSrvRecordConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-azurerm.dnsSrvRecord.DnsSrvRecordRecord",
jsii_struct_bases=[],
name_mapping={
"port": "port",
"priority": "priority",
"target": "target",
"weight": "weight",
},
)
class DnsSrvRecordRecord:
def __init__(
self,
*,
port: jsii.Number,
priority: jsii.Number,
target: builtins.str,
weight: jsii.Number,
) -> None:
'''
:param port: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#port DnsSrvRecord#port}.
:param priority: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#priority DnsSrvRecord#priority}.
:param target: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#target DnsSrvRecord#target}.
:param weight: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#weight DnsSrvRecord#weight}.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6230293f860547f3c04403242d3423c4b5ad2f5dae282f731ccdf93ba7dd23fb)
check_type(argname="argument port", value=port, expected_type=type_hints["port"])
check_type(argname="argument priority", value=priority, expected_type=type_hints["priority"])
check_type(argname="argument target", value=target, expected_type=type_hints["target"])
check_type(argname="argument weight", value=weight, expected_type=type_hints["weight"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"port": port,
"priority": priority,
"target": target,
"weight": weight,
}
@builtins.property
def port(self) -> jsii.Number:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#port DnsSrvRecord#port}.'''
result = self._values.get("port")
assert result is not None, "Required property 'port' is missing"
return typing.cast(jsii.Number, result)
@builtins.property
def priority(self) -> jsii.Number:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#priority DnsSrvRecord#priority}.'''
result = self._values.get("priority")
assert result is not None, "Required property 'priority' is missing"
return typing.cast(jsii.Number, result)
@builtins.property
def target(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#target DnsSrvRecord#target}.'''
result = self._values.get("target")
assert result is not None, "Required property 'target' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def weight(self) -> jsii.Number:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#weight DnsSrvRecord#weight}.'''
result = self._values.get("weight")
assert result is not None, "Required property 'weight' is missing"
return typing.cast(jsii.Number, result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DnsSrvRecordRecord(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DnsSrvRecordRecordList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-azurerm.dnsSrvRecord.DnsSrvRecordRecordList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__326d84049d9eac1dec7071be969a08ea344c002d5ab2df8da37c85f2f333516c)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(self, index: jsii.Number) -> "DnsSrvRecordRecordOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d1895959b1759839f3589f9b7f427defe6fba72ceab2a4b436a59b3427fd5967)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DnsSrvRecordRecordOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1d1795828cb118584c645310fc5e4001f2965898ed73bd21c87572f6cae4cae3)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__2aa8ea63b4ee8a4cbb4d242a9ba345ca973657391a624061e816198e4b98fbb1)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a3ffdbc17da6bbf40b0b8205d2c028dadd50a4043e0f18f7eb5929c12ec89a91)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DnsSrvRecordRecord]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DnsSrvRecordRecord]]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DnsSrvRecordRecord]]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__83cfaef9dde31d23db4c98d16a160ca02ed1a286e2a8b77321f194b78f6ba9a1)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class DnsSrvRecordRecordOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-azurerm.dnsSrvRecord.DnsSrvRecordRecordOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__cf96d89dcf119ea99ac83aca0a5da512835f82f93fd4166f2aca9ae5324b043f)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="portInput")
def port_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "portInput"))
@builtins.property
@jsii.member(jsii_name="priorityInput")
def priority_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "priorityInput"))
@builtins.property
@jsii.member(jsii_name="targetInput")
def target_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "targetInput"))
@builtins.property
@jsii.member(jsii_name="weightInput")
def weight_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "weightInput"))
@builtins.property
@jsii.member(jsii_name="port")
def port(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "port"))
@port.setter
def port(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__01561ac0cd2537cdb9deb354c6852683f7fe7568474c055c882bcaf0ceb9a976)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "port", value)
@builtins.property
@jsii.member(jsii_name="priority")
def priority(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "priority"))
@priority.setter
def priority(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a30cd1be16731e1687c86a1d33fff70ed630a02b814daa0928bd89492cce8d08)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "priority", value)
@builtins.property
@jsii.member(jsii_name="target")
def target(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "target"))
@target.setter
def target(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8629ebf92cb6490d2f1c275ee8301e40845de153a359f1ac5ba4c4f5b72c5caf)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "target", value)
@builtins.property
@jsii.member(jsii_name="weight")
def weight(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "weight"))
@weight.setter
def weight(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__bed0164f260b0483dfa5149157b364d45f115069bd56a5a301bc4b7e7b72a3de)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "weight", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DnsSrvRecordRecord]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DnsSrvRecordRecord]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DnsSrvRecordRecord]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__80ee8d55c19a6c6ba7673420a9876bc28561ba03c180ee6c3658ad97a42ea0d7)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-azurerm.dnsSrvRecord.DnsSrvRecordTimeouts",
jsii_struct_bases=[],
name_mapping={
"create": "create",
"delete": "delete",
"read": "read",
"update": "update",
},
)
class DnsSrvRecordTimeouts:
def __init__(
self,
*,
create: typing.Optional[builtins.str] = None,
delete: typing.Optional[builtins.str] = None,
read: typing.Optional[builtins.str] = None,
update: typing.Optional[builtins.str] = None,
) -> None:
'''
:param create: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#create DnsSrvRecord#create}.
:param delete: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#delete DnsSrvRecord#delete}.
:param read: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#read DnsSrvRecord#read}.
:param update: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#update DnsSrvRecord#update}.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__35c68ed5afd65a955fd7ac3bf9b0725026e6d9e994a94568f3f45ecb3e19a1d5)
check_type(argname="argument create", value=create, expected_type=type_hints["create"])
check_type(argname="argument delete", value=delete, expected_type=type_hints["delete"])
check_type(argname="argument read", value=read, expected_type=type_hints["read"])
check_type(argname="argument update", value=update, expected_type=type_hints["update"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if create is not None:
self._values["create"] = create
if delete is not None:
self._values["delete"] = delete
if read is not None:
self._values["read"] = read
if update is not None:
self._values["update"] = update
@builtins.property
def create(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#create DnsSrvRecord#create}.'''
result = self._values.get("create")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def delete(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#delete DnsSrvRecord#delete}.'''
result = self._values.get("delete")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def read(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#read DnsSrvRecord#read}.'''
result = self._values.get("read")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def update(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/azurerm/3.71.0/docs/resources/dns_srv_record#update DnsSrvRecord#update}.'''
result = self._values.get("update")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DnsSrvRecordTimeouts(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DnsSrvRecordTimeoutsOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-azurerm.dnsSrvRecord.DnsSrvRecordTimeoutsOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__bc7080202fc80e3c2425dc51fc41e6fd789439a0f105afbddef775f3985101a8)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetCreate")
def reset_create(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetCreate", []))
@jsii.member(jsii_name="resetDelete")
def reset_delete(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetDelete", []))
@jsii.member(jsii_name="resetRead")
def reset_read(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRead", []))
@jsii.member(jsii_name="resetUpdate")
def reset_update(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetUpdate", []))
@builtins.property
@jsii.member(jsii_name="createInput")
def create_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "createInput"))
@builtins.property
@jsii.member(jsii_name="deleteInput")
def delete_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "deleteInput"))
@builtins.property
@jsii.member(jsii_name="readInput")
def read_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "readInput"))
@builtins.property
@jsii.member(jsii_name="updateInput")
def update_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "updateInput"))
@builtins.property
@jsii.member(jsii_name="create")
def create(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "create"))
@create.setter
def create(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3894080e7c4205d9b7a43bd95178b01860542bcb6e44ef5785ae65cc929ec3a9)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "create", value)
@builtins.property
@jsii.member(jsii_name="delete")
def delete(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "delete"))
@delete.setter
def delete(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0a8d35f0d27d296079b2a7e82e2b635a112e8c85bc5ba247b2596d863443b06e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "delete", value)
@builtins.property
@jsii.member(jsii_name="read")
def read(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "read"))
@read.setter
def read(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3449f90daea14df68c61ed96803ba03c205384ace223e6907f5ac3faebc29a9b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "read", value)
@builtins.property
@jsii.member(jsii_name="update")
def update(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "update"))
@update.setter
def update(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__599fac5c44ebf9ab96413e768e7224312124c5bad67654ff5feb6cf5025f50ae)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "update", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DnsSrvRecordTimeouts]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DnsSrvRecordTimeouts]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DnsSrvRecordTimeouts]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f8a968d92c7856fe15b8d0c3a63adc88b09255f6c0088d664dcf44c36443f0dd)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
__all__ = [
"DnsSrvRecord",
"DnsSrvRecordConfig",
"DnsSrvRecordRecord",
"DnsSrvRecordRecordList",
"DnsSrvRecordRecordOutputReference",
"DnsSrvRecordTimeouts",
"DnsSrvRecordTimeoutsOutputReference",
]
publication.publish()
def _typecheckingstub__fc2ee3708abad1f619c7cf711d5c10d42e7b1d7f4152b0d930d36d9d4a98fa0d(
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
name: builtins.str,
record: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DnsSrvRecordRecord, typing.Dict[builtins.str, typing.Any]]]],
resource_group_name: builtins.str,
ttl: jsii.Number,
zone_name: builtins.str,
id: typing.Optional[builtins.str] = None,
tags: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
timeouts: typing.Optional[typing.Union[DnsSrvRecordTimeouts, typing.Dict[builtins.str, typing.Any]]] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3c52fed5666c34ea1a3f02d7b08721d96884e031b9f14664cdd39ca492aa3d08(
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DnsSrvRecordRecord, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__54959a5e093766d41fcae3ac553ca35516ecb7b771b7bd96e2e2f7d94543e9b6(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5ad7d109554e0b96fa2d272cc53709e0a63b837433f57ae1717376d871e4e45f(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5a6bac5e1aeab1861cfeea6c12cb2600e958d1c75159742e512c822007021873(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d0fdd4d6c41004f0a7d868c67702f2bf6346d9e3f2b82f17f53041522d39a2d5(
value: typing.Mapping[builtins.str, builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7708b67908b36a3dc61b994dfe122c1a632a03433ff85e8b789662eae693325e(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__223e2d877622c6388840ae90ba7d88563b77ea4db5f44ce14f232ac3daad9bb0(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__49b8e9951efb4a478ca45c1f321717ea7404dd763e7e1df092365a726663d082(
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
name: builtins.str,
record: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DnsSrvRecordRecord, typing.Dict[builtins.str, typing.Any]]]],
resource_group_name: builtins.str,
ttl: jsii.Number,
zone_name: builtins.str,
id: typing.Optional[builtins.str] = None,
tags: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
timeouts: typing.Optional[typing.Union[DnsSrvRecordTimeouts, typing.Dict[builtins.str, typing.Any]]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6230293f860547f3c04403242d3423c4b5ad2f5dae282f731ccdf93ba7dd23fb(
*,
port: jsii.Number,
priority: jsii.Number,
target: builtins.str,
weight: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__326d84049d9eac1dec7071be969a08ea344c002d5ab2df8da37c85f2f333516c(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d1895959b1759839f3589f9b7f427defe6fba72ceab2a4b436a59b3427fd5967(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1d1795828cb118584c645310fc5e4001f2965898ed73bd21c87572f6cae4cae3(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__2aa8ea63b4ee8a4cbb4d242a9ba345ca973657391a624061e816198e4b98fbb1(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a3ffdbc17da6bbf40b0b8205d2c028dadd50a4043e0f18f7eb5929c12ec89a91(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__83cfaef9dde31d23db4c98d16a160ca02ed1a286e2a8b77321f194b78f6ba9a1(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DnsSrvRecordRecord]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__cf96d89dcf119ea99ac83aca0a5da512835f82f93fd4166f2aca9ae5324b043f(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__01561ac0cd2537cdb9deb354c6852683f7fe7568474c055c882bcaf0ceb9a976(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a30cd1be16731e1687c86a1d33fff70ed630a02b814daa0928bd89492cce8d08(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8629ebf92cb6490d2f1c275ee8301e40845de153a359f1ac5ba4c4f5b72c5caf(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__bed0164f260b0483dfa5149157b364d45f115069bd56a5a301bc4b7e7b72a3de(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__80ee8d55c19a6c6ba7673420a9876bc28561ba03c180ee6c3658ad97a42ea0d7(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DnsSrvRecordRecord]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__35c68ed5afd65a955fd7ac3bf9b0725026e6d9e994a94568f3f45ecb3e19a1d5(
*,
create: typing.Optional[builtins.str] = None,
delete: typing.Optional[builtins.str] = None,
read: typing.Optional[builtins.str] = None,
update: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__bc7080202fc80e3c2425dc51fc41e6fd789439a0f105afbddef775f3985101a8(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3894080e7c4205d9b7a43bd95178b01860542bcb6e44ef5785ae65cc929ec3a9(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0a8d35f0d27d296079b2a7e82e2b635a112e8c85bc5ba247b2596d863443b06e(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3449f90daea14df68c61ed96803ba03c205384ace223e6907f5ac3faebc29a9b(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__599fac5c44ebf9ab96413e768e7224312124c5bad67654ff5feb6cf5025f50ae(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f8a968d92c7856fe15b8d0c3a63adc88b09255f6c0088d664dcf44c36443f0dd(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DnsSrvRecordTimeouts]],
) -> None:
"""Type checking stubs"""
pass
|
PypiClean
|
/at_serial_can-1.0.4-py3-none-any.whl/at_serial_can/at_serial_can.py
|
from __future__ import absolute_import
from ctypes import ArgumentError
import time
import logging
from can import BusABC, Message
logger = logging.getLogger(__name__)
try:
import serial
except ImportError:
logger.warning("You won't be able to use the Serial AT-Style can backend without "
"the serial module installed!")
serial = None
class ATSerialBus(BusABC):
"""
AT Command Style Serial CAN BUS
"""
_SLEEP_AFTER_SERIAL_OPEN = 1 # in seconds
LINE_TERMINATOR = b'\r\n'
def __init__(self, channel, ttyBaudrate=9600, bitrate=None,
btr=None, sleep_after_open=_SLEEP_AFTER_SERIAL_OPEN,
rtscts=False, **kwargs):
"""
:param str channel:
port of underlying serial or usb device (e.g. /dev/ttyUSB0, COM8, ...)
Must not be empty.
:param int ttyBaudrate:
baudrate of underlying serial or usb device
:param int bitrate:
Bitrate in bit/s
:param float sleep_after_open:
Time to wait in seconds after opening serial connection
"""
if not channel: # if None or empty
raise TypeError("Must specify a serial port.")
if '@' in channel:
(channel, ttyBaudrate) = channel.split('@')
self.serialPortOrig = serial.serial_for_url(
channel, baudrate=ttyBaudrate, rtscts=rtscts)
self._buffer = bytearray()
time.sleep(sleep_after_open)
self.enter_config_mode()
self.set_loopback(False)
self.set_can_baudrate(bitrate)
self.enter_running_mode()
super(ATSerialBus, self).__init__(channel, ttyBaudrate=115200,
bitrate=None, rtscts=False, **kwargs)
def enter_config_mode(self):
self._config_mode = True
self.send_command('CG')
def enter_running_mode(self):
self._config_mode = False
self.send_command('AT')
def set_loopback(self, loopback: bool):
self._send_config_command('CAN_MODE', 1 if loopback else 0)
def set_can_baudrate(self, bitrate):
self._send_config_command("CAN_BAUD", bitrate)
def _send_config_command(self, param, value):
if not self._config_mode:
raise RuntimeError("ATSerialBus: Config device only allowed in Config Mode, try enter_config_mode() before calling")
self.send_command(f'{param}={value}')
def send_command(self, command):
self.write(f'AT+{command}'.encode())
result = self.read()
if (result!=b'OK\r\n'):
raise RuntimeError(f"ATSerialBus: Error in sending Command {command}")
def write(self, data):
self.serialPortOrig.write(data + self.LINE_TERMINATOR)
self.serialPortOrig.flush()
def read(self):
return self.serialPortOrig.read_until(self.LINE_TERMINATOR)
def _recv_internal(self, timeout):
if timeout != self.serialPortOrig.timeout:
self.serialPortOrig.timeout = timeout
canId = None
remote = False
extended = False
frame = []
# First read what is already in the receive buffer
while (self.serialPortOrig.in_waiting and
self.LINE_TERMINATOR not in self._buffer):
self._buffer += self.serialPortOrig.read(1)
# If we still don't have a complete message, do a blocking read
if self.LINE_TERMINATOR not in self._buffer:
self._buffer += self.serialPortOrig.read_until(self.LINE_TERMINATOR)
if self.LINE_TERMINATOR not in self._buffer:
# Timed out
return None, False
# byte 0:1 AT
# byte 2:5 ID
# byte 6 DLC
# byte 7:7+DLC Data
# byte -2:-1 \r\n
b = self._buffer[:]
extended = (b[5] & 0x04) > 0
remote = (b[5] & 0x02) > 0
canId = b[2] << 24 | b[3] << 16 | b[4] << 8 | b[5] & 0xFF
if extended:
canId = canId >> 3
else:
canId = canId >> 21
dlc = b[6]
frame = b[7:-2]
del self._buffer[:]
if canId is not None:
msg = Message(arbitration_id=canId,
is_extended_id=extended,
timestamp=time.time(), # Better than nothing...
is_remote_frame=remote,
dlc=dlc,
data=frame)
return msg, False
return None, False
def send(self, msg, timeout=None):
if timeout != self.serialPortOrig.write_timeout:
self.serialPortOrig.write_timeout = timeout
cid = 0
if msg.is_extended_id:
cid = msg.arbitration_id << 3 | 0x04
else:
cid = msg.arbitration_id << 21
if msg.is_remote_frame:
cid = cid | 0x02
id_dlc = bytes([cid>>24, cid>>16&0xff, cid>>8&0xff, cid&0xff, len(msg.data)])
w = b"AT" + id_dlc + msg.data
self.write(w)
def shutdown(self):
self.serialPortOrig.close()
def fileno(self):
if hasattr(self.serialPortOrig, 'fileno'):
return self.serialPortOrig.fileno()
# Return an invalid file descriptor on Windows
return -1
|
PypiClean
|
/jesseapp123-3.0.0.tar.gz/jesseapp123-3.0.0/src/timecard/interface/systray.py
|
from PySide6.QtGui import QAction, QIcon
from PySide6.QtWidgets import QMenu, QSystemTrayIcon
from timecard.interface.app import App
from timecard.interface.timecontrols import TimeControls
from timecard.interface.timedisplay import TimeDisplay
class SysTray:
systray = QSystemTrayIcon()
menu = QMenu()
act_status = QAction()
act_time = QAction()
act_toggle = QAction()
act_quit = QAction()
toggle_callback = []
quit_callback = []
@classmethod
def build(cls):
"""Construct the system tray"""
cls.systray.setIcon(App.icon)
cls.act_status.setText("00:00:00")
cls.menu.addAction(cls.act_status)
cls.menu.addAction(cls.act_time)
cls.set_mode_stopped()
cls.menu.addSeparator()
cls.act_toggle.setIcon(QIcon.fromTheme("view-restore"))
cls.act_toggle.setText("Show/Hide Window")
cls.act_toggle.triggered.connect(cls.toggle_window)
cls.menu.addAction(cls.act_toggle)
cls.menu.addSeparator()
cls.act_quit.setIcon(QIcon.fromTheme("application-exit"))
cls.act_quit.setText("Quit Timecard")
cls.act_quit.triggered.connect(cls.quit_app)
cls.menu.addAction(cls.act_quit)
cls.systray.setContextMenu(cls.menu)
cls.systray.show()
TimeDisplay.connect(on_tick=cls.update_time)
TimeControls.connect(
on_start=cls.set_mode_running,
on_resume=cls.set_mode_running,
on_pause=cls.set_mode_paused,
on_stop=cls.set_mode_save,
on_save=cls.set_mode_stopped,
on_reset=cls.set_mode_stopped,
)
App.connect(on_hide=cls.popup)
return cls.systray
@classmethod
def popup(cls, message, callback=None):
cls.systray.showMessage("Timecard", message, App.icon)
@classmethod
def update_time(cls, hours=0, minutes=0, seconds=0):
cls.act_status.setText(f"{hours:02}:{minutes:02}:{seconds:02}")
@classmethod
def _disconnect_actions(cls):
"""Disconnect signals for any actions that are modified by timer."""
try:
cls.act_time.triggered.disconnect()
except RuntimeError:
pass
@classmethod
def set_mode_stopped(cls):
cls._disconnect_actions()
cls.act_quit.setEnabled(True)
cls.act_time.setEnabled(True)
cls.act_time.setText("Start")
cls.act_time.setIcon(QIcon.fromTheme("media-playback-start"))
cls.act_time.triggered.connect(TimeControls.start)
@classmethod
def set_mode_running(cls):
cls._disconnect_actions()
cls.act_quit.setEnabled(False)
cls.act_time.setEnabled(True)
cls.act_time.setText("Pause")
cls.act_time.setIcon(QIcon.fromTheme("media-playback-pause"))
cls.act_time.triggered.connect(TimeControls.pause)
@classmethod
def set_mode_paused(cls):
cls._disconnect_actions()
cls.act_quit.setEnabled(False)
cls.act_time.setEnabled(True)
cls.act_time.setText("Resume")
cls.act_time.setIcon(QIcon.fromTheme("media-playback-start"))
cls.act_time.triggered.connect(TimeControls.resume)
@classmethod
def set_mode_save(cls):
cls._disconnect_actions()
cls.act_quit.setEnabled(False)
cls.act_time.setEnabled(False)
cls.act_time.setText("Awaiting Save")
@classmethod
def toggle_window(cls):
App.toggle_window()
@classmethod
def quit_app(cls):
App.quit()
@classmethod
def connect(cls, on_toggle=None, on_quit=None):
if on_toggle and on_toggle not in cls.toggle_callback:
cls.toggle_callback.append(on_toggle)
if on_quit:
cls.quit_callback = on_quit
|
PypiClean
|
/iCypress-0.8.tar.gz/iCypress-0.8/graphgym/models/layer.py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch_geometric as pyg
import graphgym.register as register
from graphgym.config import cfg
from graphgym.contrib.layer.generalconv import (GeneralConvLayer,
GeneralEdgeConvLayer)
from graphgym.models.act import act_dict
# General classes
class GeneralLayer(nn.Module):
'''General wrapper for layers'''
def __init__(self,
name,
dim_in,
dim_out,
has_act=True,
has_bn=True,
has_l2norm=False,
**kwargs):
super(GeneralLayer, self).__init__()
self.has_l2norm = has_l2norm
has_bn = has_bn and cfg.gnn.batchnorm
self.layer = layer_dict[name](dim_in,
dim_out,
bias=not has_bn,
**kwargs)
layer_wrapper = []
if has_bn:
layer_wrapper.append(
nn.BatchNorm1d(dim_out, eps=cfg.bn.eps, momentum=cfg.bn.mom))
if cfg.gnn.dropout > 0:
layer_wrapper.append(
nn.Dropout(p=cfg.gnn.dropout, inplace=cfg.mem.inplace))
if has_act:
layer_wrapper.append(act_dict[cfg.gnn.act])
self.post_layer = nn.Sequential(*layer_wrapper)
def forward(self, batch):
batch = self.layer(batch)
if isinstance(batch, torch.Tensor):
batch = self.post_layer(batch)
if self.has_l2norm:
batch = F.normalize(batch, p=2, dim=1)
else:
batch.node_feature = self.post_layer(batch.node_feature)
if self.has_l2norm:
batch.node_feature = F.normalize(batch.node_feature,
p=2,
dim=1)
return batch
class GeneralMultiLayer(nn.Module):
'''General wrapper for stack of layers'''
def __init__(self,
name,
num_layers,
dim_in,
dim_out,
dim_inner=None,
final_act=True,
**kwargs):
super(GeneralMultiLayer, self).__init__()
dim_inner = dim_in if dim_inner is None else dim_inner
for i in range(num_layers):
d_in = dim_in if i == 0 else dim_inner
d_out = dim_out if i == num_layers - 1 else dim_inner
has_act = final_act if i == num_layers - 1 else True
layer = GeneralLayer(name, d_in, d_out, has_act, **kwargs)
self.add_module('Layer_{}'.format(i), layer)
def forward(self, batch):
for layer in self.children():
batch = layer(batch)
return batch
# Core basic layers
# Input: batch; Output: batch
class Linear(nn.Module):
def __init__(self, dim_in, dim_out, bias=False, **kwargs):
super(Linear, self).__init__()
self.model = nn.Linear(dim_in, dim_out, bias=bias)
def forward(self, batch):
if isinstance(batch, torch.Tensor):
batch = self.model(batch)
else:
batch.node_feature = self.model(batch.node_feature)
return batch
class BatchNorm1dNode(nn.Module):
'''General wrapper for layers'''
def __init__(self, dim_in):
super(BatchNorm1dNode, self).__init__()
self.bn = nn.BatchNorm1d(dim_in, eps=cfg.bn.eps, momentum=cfg.bn.mom)
def forward(self, batch):
batch.node_feature = self.bn(batch.node_feature)
return batch
class BatchNorm1dEdge(nn.Module):
'''General wrapper for layers'''
def __init__(self, dim_in):
super(BatchNorm1dEdge, self).__init__()
self.bn = nn.BatchNorm1d(dim_in, eps=cfg.bn.eps, momentum=cfg.bn.mom)
def forward(self, batch):
batch.edge_feature = self.bn(batch.edge_feature)
return batch
class MLP(nn.Module):
def __init__(self,
dim_in,
dim_out,
bias=True,
dim_inner=None,
num_layers=2,
**kwargs):
'''
Note: MLP works for 0 layers
'''
super(MLP, self).__init__()
dim_inner = dim_in if dim_inner is None else dim_inner
layers = []
if num_layers > 1:
layers.append(
GeneralMultiLayer('linear',
num_layers - 1,
dim_in,
dim_inner,
dim_inner,
final_act=True))
layers.append(Linear(dim_inner, dim_out, bias))
else:
layers.append(Linear(dim_in, dim_out, bias))
self.model = nn.Sequential(*layers)
def forward(self, batch):
if isinstance(batch, torch.Tensor):
batch = self.model(batch)
else:
batch.node_feature = self.model(batch.node_feature)
return batch
class GCNConv(nn.Module):
def __init__(self, dim_in, dim_out, bias=False, **kwargs):
super(GCNConv, self).__init__()
self.model = pyg.nn.GCNConv(dim_in, dim_out, bias=bias)
def forward(self, batch):
batch.node_feature = self.model(batch.node_feature, batch.edge_index)
return batch
class SAGEConv(nn.Module):
def __init__(self, dim_in, dim_out, bias=False, **kwargs):
super(SAGEConv, self).__init__()
self.model = pyg.nn.SAGEConv(dim_in, dim_out, bias=bias)
def forward(self, batch):
batch.node_feature = self.model(batch.node_feature, batch.edge_index)
return batch
class GATConv(nn.Module):
def __init__(self, dim_in, dim_out, bias=False, **kwargs):
super(GATConv, self).__init__()
self.model = pyg.nn.GATConv(dim_in, dim_out, bias=bias)
def forward(self, batch):
batch.node_feature = self.model(batch.node_feature, batch.edge_index)
return batch
class GINConv(nn.Module):
def __init__(self, dim_in, dim_out, bias=False, **kwargs):
super(GINConv, self).__init__()
gin_nn = nn.Sequential(nn.Linear(dim_in, dim_out), nn.ReLU(),
nn.Linear(dim_out, dim_out))
self.model = pyg.nn.GINConv(gin_nn)
def forward(self, batch):
batch.node_feature = self.model(batch.node_feature, batch.edge_index)
return batch
class SplineConv(nn.Module):
def __init__(self, dim_in, dim_out, bias=False, **kwargs):
super(SplineConv, self).__init__()
self.model = pyg.nn.SplineConv(dim_in,
dim_out,
dim=1,
kernel_size=2,
bias=bias)
def forward(self, batch):
batch.node_feature = self.model(batch.node_feature, batch.edge_index,
batch.edge_feature)
return batch
class GeneralConv(nn.Module):
def __init__(self, dim_in, dim_out, bias=False, **kwargs):
super(GeneralConv, self).__init__()
self.model = GeneralConvLayer(dim_in, dim_out, bias=bias)
def forward(self, batch):
batch.node_feature = self.model(batch.node_feature, batch.edge_index, edge_weight = batch.edge_weights)
return batch
class GeneralEdgeConv(nn.Module):
def __init__(self, dim_in, dim_out, bias=False, **kwargs):
super(GeneralEdgeConv, self).__init__()
self.model = GeneralEdgeConvLayer(dim_in, dim_out, bias=bias)
def forward(self, batch):
batch.node_feature = self.model(batch.node_feature,
batch.edge_index,
edge_feature=batch.edge_feature)
return batch
class GeneralSampleEdgeConv(nn.Module):
def __init__(self, dim_in, dim_out, bias=False, **kwargs):
super(GeneralSampleEdgeConv, self).__init__()
self.model = GeneralEdgeConvLayer(dim_in, dim_out, bias=bias)
def forward(self, batch):
edge_mask = torch.rand(batch.edge_index.shape[1]) < cfg.gnn.keep_edge
edge_index = batch.edge_index[:, edge_mask]
edge_feature = batch.edge_feature[edge_mask, :]
batch.node_feature = self.model(batch.node_feature,
edge_index,
edge_feature=edge_feature)
return batch
layer_dict = {
'linear': Linear,
'mlp': MLP,
'gcnconv': GCNConv,
'sageconv': SAGEConv,
'gatconv': GATConv,
'splineconv': SplineConv,
'ginconv': GINConv,
'generalconv': GeneralConv,
'generaledgeconv': GeneralEdgeConv,
'generalsampleedgeconv': GeneralSampleEdgeConv,
}
# register additional convs
layer_dict = {**register.layer_dict, **layer_dict}
|
PypiClean
|
/alipay-sdk-python-pycryptodome-3.3.202.tar.gz/alipay-sdk-python-pycryptodome-3.3.202/alipay/aop/api/request/AlipaySecurityProdDesQueryRequest.py
|
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipaySecurityProdDesQueryModel import AlipaySecurityProdDesQueryModel
class AlipaySecurityProdDesQueryRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipaySecurityProdDesQueryModel):
self._biz_content = value
else:
self._biz_content = AlipaySecurityProdDesQueryModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.security.prod.des.query'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
|
PypiClean
|
/kicost-1.1.18.tar.gz/kicost-1.1.18/docs/index.rst
|
.. kicost documentation master file, created by
sphinx-quickstart on Tue Jul 9 22:26:36 2013.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to kicost's documentation!
======================================
Contents:
.. toctree::
:maxdepth: 2
readme
installation
usage
configuration
contributing
authors
history
modules
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
|
PypiClean
|
/pserialization-0.0.6.tar.gz/pserialization-0.0.6/src/pserialize/deserialize.py
|
import dataclasses
from enum import Enum
# https://docs.python.org/3/library/types.html#:~:text=class%20types.-,GenericAlias,-(t_origin%2C%20t_args)%C2%B6
# GenericAlias is the type used for parameterized lists and dicts
# ie: list[int], dict[str,object], etc
from types import GenericAlias
from typing import Any, Callable, Type, Union, get_type_hints
from .serialization_utils import (get_attributes, is_enum, is_optional,
is_primitive, is_union)
def type_args_string(type: type):
if is_union(type):
name = "Union"
elif hasattr(type, "__name__"):
name = type.__name__
else:
name = str(type)
if not hasattr(type, "__args__") or len(type.__args__) == 0:
return name
return f"{name}[{', '.join([type_args_string(arg) for arg in type.__args__])}]"
@dataclasses.dataclass
class BaseDeserializationException(Exception):
error: Exception
value: Any
def __repr__(self):
s = ""
if isinstance(self.error, BaseDeserializationException):
s += " -> " + str(self.error)
else:
s += f"'{self.value}' |{str(self.error)}|"
return s
def __str__(self):
return self.__repr__()
@dataclasses.dataclass
class DeserializeDictKeyException(BaseDeserializationException):
keyType: type
valueType: type
def __repr__(self):
return f"dict[{type_args_string(self.keyType)},{type_args_string(self.valueType)}].key" + super().__repr__()
@dataclasses.dataclass
class DeserializeDictValueException(BaseDeserializationException):
keyType: type
valueType: type
key: Any
def __repr__(self):
return f"dict[{type_args_string(self.keyType)},{type_args_string(self.valueType)}].value" + super().__repr__()
@dataclasses.dataclass
class DeserializeListException(BaseDeserializationException):
itemType: type
index: int
def __repr__(self):
return f"{type_args_string(self.itemType)}[{self.index}]" + super().__repr__()
@dataclasses.dataclass
class DeserializeClassException(BaseDeserializationException):
field_type: type
field_name: str
def __repr__(self):
# Get the type of the field
s = ""
if self.field_name:
s += self.field_name + ":"
if isinstance(self.error, (DeserializeListException, DeserializeDictKeyException, DeserializeDictValueException)):
# list errors report a more complete type
s += self.error.__repr__()
else:
s += type_args_string(self.field_type) + super().__repr__()
return s
type_of = type
def __deserialize_simple_object(dict: dict, classType: type, middleware: dict[type, Callable[[object], type]] = {}, strict: bool = False):
"""
Constructs an instance of the given type from the supplied dictionary
Args:
dict (dict): Dictionary with object structure
classType (type): The type to deserialize into
strict (bool, optional): Determines if extra fields in the dictionary that are not found in the type should be inserted into the object. Defaults to False.
Returns:
classType: An instance of classType
"""
attributes = get_attributes(classType)
type_hints = get_type_hints(classType.__init__)
if dataclasses.is_dataclass(classType):
type_hints.pop("return", None)
# Create an empty instance of classType
# Some types (ex: datetime.datetime) prohibit this call
# and will need a custom deserializer
cls = object.__new__(classType)
# Serialize any field that we can find a type hint for,
# otherwise set it to the raw primitive value
for name, value in dict.items():
# Check if an attribute with the given name exists, but overwrite
# the type if it exists in the constructor type_hints
type = attributes.pop(name) if name in attributes.keys() else None
type = type_hints.pop(name) if name in type_hints.keys() else type
# if strict deserialization is set, then we want to skip deserializing
# fields that we can't find a type for
if strict and type is None:
continue
try:
cls.__dict__[name] = __deserialize_inner(value, type, middleware, strict) if type else value
except Exception as e:
raise DeserializeClassException(e, value, type, name)
if len(attributes.keys()) + len(type_hints.keys()) > 0:
# There are attributes or init_parameters that weren't found in the dictionary
pass
return cls
def __deserialize_list(lst: list, listType: list[type], middleware: dict[type, Callable[[object], type]] = {}, strict: bool = False):
"""
Deserializes a list[classType]
Args:
lst (list): The list to deserialize
classType (type): The type of the list elements
strict (bool, optional): Determines if extra fields will/won't be added to the deserialized value. Defaults to False.
Returns:
list[classType]: Deserialized list of elements
"""
typeArgs = listType.__args__
typeArg = typeArgs[0] # List parameterization only takes 1 argument
deserializedList = []
for index in range(len(lst)):
value = lst[index]
try:
deserializedList.append(__deserialize_inner(value, typeArg, middleware, strict))
except Exception as e:
raise DeserializeListException(e, value, listType, index)
return deserializedList
def __deserialize_dict(dict: dict, keyType: type, valueType: type, middleware: dict[type, Callable[[object], type]] = {}, strict: bool = False):
"""
Deserializes the keys and values of the input dictionary
Args:
dict (dict): The dictionary to deserialize
keyType (type): The type of the keys of the dictionary
valueType (type): The type of the values of the dictionary
strict (bool, optional): Determines if extra fields will/won't be added to the deserialized value. Defaults to False.
Returns:
dict[keyType, valueType]: Deserialized dictionary
"""
deserializedDict = {}
for key, value in dict.items():
try:
deserializedKey = __deserialize_inner(key, keyType, middleware, strict)
except Exception as e:
raise DeserializeDictKeyException(e, key, keyType, valueType)
try:
deserializedValue = __deserialize_inner(value, valueType, middleware, strict)
except Exception as e:
raise DeserializeDictValueException(e, value, keyType, valueType, key)
deserializedDict[deserializedKey] = deserializedValue
return deserializedDict
def __deserialize_union(value: Any, allowed_types: list[type], middleware: dict[type, Callable[[object], type]] = {}, strict: bool = False):
"""
Deserializes a value into the first allowed type that succeeds
Args:
value (Any): The value to deserialize
allowed_types (list[type]): The list of possible types the value represents
strict (bool, optional): Determines if extra fields will/won't be added to the deserialized value. Defaults to False.
Returns:
type: An instance of one of the allowed types
"""
for type in allowed_types:
try:
# Check each type from left to right and return
# the first deserialized value that works
return __deserialize_inner(value, type, middleware, strict)
except Exception:
pass
raise BaseDeserializationException(Exception("Could not deserialize union"), value)
def deserialize(value: Any, classType: type, middleware: dict[type, Callable[[object], type]] = {}, strict: bool = False):
"""
Deserializes an arbitrary value into the supplied class type
Default support for:
Primitives (int, float, str, None)
Enums
Lists
Dicts
Basic objects
Any custom deserialization logic can be added using middleware
Args:
value (Any): The value to deserialize
classType (type): The type the value represents
strict (bool, optional): Determines if extra fields will/won't be added to the deserialized value. Defaults to False.
Returns:
classType: An instance of classType
"""
try:
return __deserialize_inner(value, classType, middleware, strict)
except Exception as e:
raise DeserializeClassException(e, value, classType, None)
def __deserialize_inner(value: Any, classType: type, middleware: dict[type, Callable[[object], type]] = {}, strict: bool = False):
def deserialize_primitive(classType: type, value: Any):
try:
return classType(value)
except Exception as e:
raise BaseDeserializationException(e, value)
#raise f" = '{value}' |{str(e)}|"
if (deserializer := middleware.get(classType, None)) is not None:
return deserializer(value, middleware)
if value is None:
# Allow None values
return None
if is_primitive(classType):
return deserialize_primitive(classType, value)
if is_enum(classType):
return deserialize_primitive(classType, value)
if is_optional(classType):
# If the parameter is optional, unpack the optional type and deserialize that type
realType = classType.__args__[0]
return __deserialize_inner(value, realType, middleware, strict)
if isinstance(classType, GenericAlias):
typeArgs = classType.__args__
originType = classType.__origin__
if originType is list: # list of some type
typeArg = typeArgs[0] # List parameterization only takes 1 argument
return __deserialize_list(value, classType, middleware, strict)
else:
keyType = typeArgs[0]
valueType = typeArgs[1]
return __deserialize_dict(value, keyType, valueType, middleware, strict)
if is_union(classType):
allowed_types = classType.__args__
return __deserialize_union(value, allowed_types, middleware, strict)
return __deserialize_simple_object(value, classType, middleware, strict)
|
PypiClean
|
/django-cache-extension-1.1.10.tar.gz/django-cache-extension-1.1.10/cache_extension/backends/redis.py
|
from __future__ import absolute_import
from cache_extension.cache import ExtensionCache
from django_redis.cache import RedisCache, omit_exception
from django_redis.client.default import _main_exceptions, DefaultClient
from django_redis.exceptions import ConnectionInterrupted
from redis.client import StrictPipeline
from redis.exceptions import ResponseError
KEY_CMDS = [
'exists', 'expire', 'expireat', 'rename', 'ttl', 'incrby', 'incrbyfloat',
]
# not support hstrlen
HASH_CMDS = [
'hdel', 'hexists', 'hget', 'hgetall', 'hincrby', 'hincrbyfloat', 'hkeys',
'hlen', 'hmget', 'hmset', 'hset', 'hsetnx', 'hvals', 'hscan',
]
# first arg must be key, so this cmds not support: brpoplpush, rpoplpush
LIST_CMDS = [
'blpop', 'brpop', 'lindex', 'linsert', 'llen',
'lpop', 'lpush', 'lpushx', 'lrange', 'lrem', 'lset',
'ltrim', 'rpop', 'rpush', 'rpushx',
]
SET_CMDS = [
'sadd', 'scard', 'sismember', 'smembers', 'spop', 'srem',
'srandmember', 'sinter', 'sinterstore', 'sdiff', 'sdiffstore',
'sunion', 'sunionstore',
]
ZSET_CMDS = [
'zadd', 'zcard', 'zcount', 'zincrby', 'zrange', 'zrem',
'zrevrange', 'zscore', 'zremrangebyrank'
]
SUPPORT_CMDS = KEY_CMDS + HASH_CMDS + LIST_CMDS + SET_CMDS + ZSET_CMDS
class ExtensionPipeline(StrictPipeline):
def __init__(self, connection_pool, response_callbacks, transaction,
shard_hint, make_key_func):
super(ExtensionPipeline, self).__init__(
connection_pool,
response_callbacks,
transaction,
shard_hint
)
self.make_key = make_key_func
def pipeline_execute_command(self, *args, **options):
args = list(args)
key = args[1]
args[1] = str(self.make_key(key))
# print("pipeline exec", args)
# print("pipeline exec opt", options)
return super(
ExtensionPipeline, self
).pipeline_execute_command(*args, **options)
class ExtensionClient(DefaultClient):
def pipeline(self, transaction=True, shard_hint=None):
client = self.get_client()
return ExtensionPipeline(
client.connection_pool,
client.response_callbacks,
transaction,
shard_hint,
self.make_key)
def __getattr__(self, cmd):
client = self.get_client()
return getattr(client, cmd)
class ExtensionRedisBackend(ExtensionCache, RedisCache):
def __init__(self, server, params):
options = params.get("OPTIONS", {})
default_client = "django_redis.client.DefaultClient"
client_class = options.get('CLIENT_CLASS', default_client)
options['CLIENT_CLASS'] = client_class
params['options'] = options
super(ExtensionRedisBackend, self).__init__(server, params)
@omit_exception
def incr(self, key, delta=1, version=None, client=None):
if not client:
client = self.client.get_client(write=True)
key = self.make_key(key, version=version)
try:
try:
value = client.incr(key, delta)
except ResponseError:
# if cached value or total value is greater than 64 bit signed
# integer.
# elif int is encoded. so redis sees the data as string.
# In this situations redis will throw ResponseError
# try to keep TTL of key
timeout = client.ttl(key)
value = self.get(key, version=version, client=client) + delta
self.set(key, value, version=version, timeout=timeout,
client=client)
except _main_exceptions as e:
raise ConnectionInterrupted(connection=client, parent=e)
return value
def pipeline(self, transaction=True, shard_hint=None):
return self.client.pipeline(transaction=transaction,
shard_hint=shard_hint)
def __getattr__(self, cmd):
if cmd not in SUPPORT_CMDS:
raise KeyError("not supported redis commands")
func = getattr(self.client, cmd)
def redis_cmd(key, *args, **kwargs):
key = self.make_key(key)
if cmd == 'rename':
dest = args[0]
dest = self.make_key(dest)
args = list(args)
args[0] = dest
# print("exec", cmd, key, args, kwargs)
return func(key, *args, **kwargs)
return redis_cmd
|
PypiClean
|
/ingestor_module-1.17.4.tar.gz/ingestor_module-1.17.4/ingestor/common/read_write_from_s3.py
|
import gzip
import os
import pickle
from io import StringIO, BytesIO
import boto3
from pandas import DataFrame, read_csv, read_pickle
from ingestor.utils import custom_exception, class_custom_exception, Logging
class ConnectS3:
@staticmethod
@custom_exception()
def create_connection(
aws_access_key_id=None, aws_secret_access_key=None, region_name=None
):
"""
Create boto connection object
:return: Connection object
"""
return boto3.resource(
"s3",
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region_name,
)
@class_custom_exception()
def read_csv_from_S3(
self, bucket_name=None, object_name=None, resource=None
) -> DataFrame:
"""
This function returns dataframe object of csv file stored in S3
:param bucket_name: Name of the bucket where csv is stored
:param object_name: Path of the object in S3
:param resource: Connection object
:return: dataframe object pandas
"""
content_object = resource.Object(bucket_name, object_name)
csv_string = content_object.get()["Body"].read().decode("utf - 8")
df = read_csv(StringIO(csv_string))
return df
@class_custom_exception()
def write_csv_to_S3(
self, bucket_name=None, object_name=None, df_to_upload=None, resource=None
) -> None:
"""
Function to write csv in S3
:param bucket_name: Name of the bucket where csv shall be stored
:param object_name: Path of the object in S3
:param df_to_upload: dataframe to be stored as csv
:param resource: Connection object
:return:
"""
csv_buffer = StringIO()
df_to_upload.to_csv(csv_buffer, index=False)
content_object = resource.Object(bucket_name, object_name)
content_object.put(Body=csv_buffer.getvalue())
csv_name = os.path.split(object_name)[1]
Logging.info("Successfully dumped " + csv_name + " data into s3")
@class_custom_exception()
def read_pkl_from_S3(self, bucket_name=None, object_name=None, resource=None):
"""
Function to write pkl in S3
:param bucket_name: Name of the bucket where pkl shall be stored
:param object_name: Path of the object in S3
:param resource: Connection object
:return: pkl object
"""
try:
response = resource.Bucket(bucket_name).Object(object_name).get()
body_string = response["Body"].read()
loaded_pickle = pickle.loads(body_string)
return loaded_pickle
except:
Logging.info(
"Unable to find file {}. No such file exists".format(object_name)
)
@class_custom_exception()
def write_pkl_to_S3(
self, bucket_name=None, object_name=None, data=None, resource=None
) -> None:
"""
Function to write pkl in S3
:param bucket_name: Name of the bucket where pkl shall be stored
:param object_name: Path of the object in S3
:param data: file to be stored as pkl, like dataframe, dict, list
:param resource: Connection object
:return: None
"""
try:
pkl_obj = pickle.dumps(data)
resource.Object(bucket_name, object_name).put(Body=pkl_obj)
pkl_name = os.path.split(object_name)[1]
Logging.info("Successfully dumped " + pkl_name + " data into s3")
except Exception as e:
Logging.info(f"Error while dumping {object_name} to S3, Exception: {e}")
@class_custom_exception()
def write_compress_pickles_to_S3(
self, bucket_name=None, object_name=None, data=None, resource=None
) -> None:
"""Upload pickle as compressed file
:param bucket_name: bucket name
:param object_name:Where to upload
:param data : local path of pickle file
:param resource: Connection object
:return:None
"""
try:
pkl_name = os.path.split(object_name)[1]
Logging.info("Start dumping " + pkl_name + " data into s3")
d = read_pickle(data)
with gzip.open(data, "wb") as f:
pickle.dump(d, f)
resource.meta.client.upload_file(
data,
bucket_name,
object_name,
)
Logging.info("Successfully dumped " + pkl_name + " data into s3")
except Exception as e:
Logging.error(f"Error while dumping {object_name} to S3, Exception: {e}")
@class_custom_exception()
def read_compress_pickles_from_S3(
self,
bucket_name=None,
object_name=None,
resource=None,
) -> DataFrame:
"""Read pickle as compressed file
:param bucket_name: bucket name
:param object_name: S3 Path of Pickle file
:param resource: Connection object
:return:dataframe
"""
try:
content_object = resource.Object(bucket_name, object_name)
read_file = content_object.get()["Body"].read()
zipfile = BytesIO(read_file)
with gzip.GzipFile(fileobj=zipfile) as gzipfile:
content = gzipfile.read()
loaded_pickle = pickle.loads(content)
print("File {} has been read successfully".format(object_name))
return loaded_pickle
except Exception as e:
Logging.error(f"Error while dumping {object_name} to S3, Exception: {e}")
@class_custom_exception()
def write_df_to_pkl_S3(
self, bucket_name=None, object_name=None, data=None, resource=None
) -> None:
"""Upload csv as compressed pickle file
:param bucket_name: bucket name
:param object_name:Where to upload
:param data : dataframe
:param resource: Connection object
:return:None
"""
try:
file_name = os.path.split(object_name)[1]
Logging.info("Start dumping " + file_name + " data into s3")
pickle_buffer = BytesIO()
data.to_pickle(pickle_buffer, compression="gzip")
resource.Object(bucket_name, object_name).put(Body=pickle_buffer.getvalue())
Logging.info("Successfully dumped " + file_name + " data into s3")
except Exception as e:
Logging.error(f"Error while dumping {object_name} to S3, Exception: {e}")
|
PypiClean
|
/alipay-python-3.3.17.tar.gz/alipay-python-3.3.17/alipay/aop/api/domain/AlipayUserInviteAwardReceiveModel.py
|
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayUserInviteAwardReceiveModel(object):
def __init__(self):
self._mobile = None
self._out_biz_code = None
self._out_biz_no = None
self._user_id = None
@property
def mobile(self):
return self._mobile
@mobile.setter
def mobile(self, value):
self._mobile = value
@property
def out_biz_code(self):
return self._out_biz_code
@out_biz_code.setter
def out_biz_code(self, value):
self._out_biz_code = value
@property
def out_biz_no(self):
return self._out_biz_no
@out_biz_no.setter
def out_biz_no(self, value):
self._out_biz_no = value
@property
def user_id(self):
return self._user_id
@user_id.setter
def user_id(self, value):
self._user_id = value
def to_alipay_dict(self):
params = dict()
if self.mobile:
if hasattr(self.mobile, 'to_alipay_dict'):
params['mobile'] = self.mobile.to_alipay_dict()
else:
params['mobile'] = self.mobile
if self.out_biz_code:
if hasattr(self.out_biz_code, 'to_alipay_dict'):
params['out_biz_code'] = self.out_biz_code.to_alipay_dict()
else:
params['out_biz_code'] = self.out_biz_code
if self.out_biz_no:
if hasattr(self.out_biz_no, 'to_alipay_dict'):
params['out_biz_no'] = self.out_biz_no.to_alipay_dict()
else:
params['out_biz_no'] = self.out_biz_no
if self.user_id:
if hasattr(self.user_id, 'to_alipay_dict'):
params['user_id'] = self.user_id.to_alipay_dict()
else:
params['user_id'] = self.user_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayUserInviteAwardReceiveModel()
if 'mobile' in d:
o.mobile = d['mobile']
if 'out_biz_code' in d:
o.out_biz_code = d['out_biz_code']
if 'out_biz_no' in d:
o.out_biz_no = d['out_biz_no']
if 'user_id' in d:
o.user_id = d['user_id']
return o
|
PypiClean
|
/monk_gluon_cuda92_test-0.0.1.tar.gz/monk_gluon_cuda92_test-0.0.1/monk/tf_keras_1/finetune/level_5_state_base.py
|
from monk.tf_keras_1.finetune.imports import *
from monk.system.imports import *
from monk.tf_keras_1.finetune.level_4_evaluation_base import finetune_evaluation
class finetune_state(finetune_evaluation):
'''
Base class for Monk states - train, eval_infer, resume, copy_from, pseudo_copy_from (for running sub-experiments)
Args:
verbose (int): Set verbosity levels
0 - Print Nothing
1 - Print desired details
'''
@accepts("self", verbose=int, post_trace=False)
#@TraceFunction(trace_args=True, trace_rv=True)
def __init__(self, verbose=1):
super().__init__(verbose=verbose);
###############################################################################################################################################
@accepts("self", post_trace=False)
#@TraceFunction(trace_args=True, trace_rv=True)
def set_system_state_eval_infer(self):
'''
Set system for eval_infer state
Args:
None
Returns:
None
'''
self.system_dict = read_json(self.system_dict["fname"], verbose=self.system_dict["verbose"]);
self.system_dict["states"]["eval_infer"] = True;
tmp = {};
tmp["featurewise_center"] = False;
tmp["featurewise_std_normalization"] = False;
tmp["rotation_range"] = 0;
tmp["width_shift_range"] = 0;
tmp["height_shift_range"] = 0;
tmp["shear_range"] = 0;
tmp["zoom_range"] = 0;
tmp["brightness_range"] = None;
tmp["horizontal_flip"] = False;
tmp["vertical_flip"] = False;
tmp["mean"] = False;
tmp["std"] = False;
self.system_dict["local"]["transforms_train"] = tmp;
self.system_dict["local"]["transforms_val"] = tmp;
self.system_dict["local"]["transforms_test"] = tmp;
if(self.system_dict["training"]["status"]):
if(len(self.system_dict["dataset"]["transforms"]["test"])):
self.system_dict = retrieve_test_transforms(self.system_dict);
else:
self.custom_print("Test transforms not found.");
self.custom_print("Add test transforms");
self.custom_print("");
self.set_model_final();
else:
msg = "Model in {} not trained. Cannot perform testing or inferencing".format(self.system_dict["experiment_name"]);
raise ConstraintError(msg);
###############################################################################################################################################
###############################################################################################################################################
@accepts("self", post_trace=False)
#@TraceFunction(trace_args=True, trace_rv=True)
def set_system_state_resume_train(self):
'''
Set system for resume training state
Args:
None
Returns:
None
'''
self.system_dict = read_json(self.system_dict["fname"], verbose=self.system_dict["verbose"]);
self.system_dict["states"]["resume_train"] = True;
tmp = {};
tmp["featurewise_center"] = False;
tmp["featurewise_std_normalization"] = False;
tmp["rotation_range"] = 0;
tmp["width_shift_range"] = 0;
tmp["height_shift_range"] = 0;
tmp["shear_range"] = 0;
tmp["zoom_range"] = 0;
tmp["brightness_range"] = None;
tmp["horizontal_flip"] = False;
tmp["vertical_flip"] = False;
tmp["mean"] = False;
tmp["std"] = False;
self.system_dict["local"]["transforms_train"] = tmp;
self.system_dict["local"]["transforms_val"] = tmp;
self.system_dict["local"]["transforms_test"] = tmp;
if(self.system_dict["dataset"]["status"]):
self.system_dict = retrieve_trainval_transforms(self.system_dict);
self.set_dataset_final();
else:
msg = "Dataset not set.\n";
msg += "Training not started. Cannot Run resume Mode";
raise ConstraintError(msg);
if(self.system_dict["model"]["status"]):
self.set_model_final();
else:
msg = "Model not set.\n";
msg += "Training not started. Cannot Run resume Mode";
raise ConstraintError(msg);
if(self.system_dict["hyper-parameters"]["status"]):
self.system_dict = retrieve_optimizer(self.system_dict);
self.system_dict = retrieve_scheduler(self.system_dict);
self.system_dict = retrieve_loss(self.system_dict);
else:
msg = "hyper-parameters not set.\n";
msg += "Training not started. Cannot Run resume Mode";
raise ConstraintError(msg);
###############################################################################################################################################
###############################################################################################################################################
@accepts("self", list, post_trace=False)
#@TraceFunction(trace_args=True, trace_rv=True)
def set_system_state_copy_from(self, copy_from):
'''
Set system for copied state
Args:
None
Returns:
None
'''
fname = self.system_dict["master_systems_dir_relative"] + copy_from[0] + "/" + copy_from[1] + "/experiment_state.json";
system_dict_tmp = read_json(fname, verbose=self.system_dict["verbose"]);
if(not system_dict_tmp["training"]["status"]):
self.custom_print("Project - {}, Experiment - {}, has incomplete training.".format(copy_from[0], copy_from[1]));
self.custom_print("Complete Previous training before copying from it.");
self.custom_print("");
elif(copy_from[0] == self.system_dict["project_name"] and copy_from[1] == self.system_dict["experiment_name"]):
self.custom_print("Cannot copy same experiment. Use a different experiment to copy and load a previous experiment");
self.custom_print("");
else:
self.system_dict["dataset"] = system_dict_tmp["dataset"];
self.system_dict["model"] = system_dict_tmp["model"];
self.system_dict["hyper-parameters"] = system_dict_tmp["hyper-parameters"];
self.system_dict["training"] = system_dict_tmp["training"];
self.system_dict["origin"] = [copy_from[0], copy_from[1]];
self.system_dict["training"]["outputs"] = {};
self.system_dict["training"]["outputs"]["max_gpu_memory_usage"] = 0;
self.system_dict["training"]["outputs"]["best_val_acc"] = 0;
self.system_dict["training"]["outputs"]["best_val_acc_epoch_num"] = 0;
self.system_dict["training"]["outputs"]["epochs_completed"] = 0;
self.system_dict["training"]["status"] = False;
self.system_dict["training"]["enabled"] = True;
self.system_dict["testing"] = {};
self.system_dict["testing"]["status"] = False;
save(self.system_dict);
self.system_dict = read_json(self.system_dict["fname_relative"], verbose=self.system_dict["verbose"]);
self.system_dict["states"]["copy_from"] = True;
tmp = {};
tmp["featurewise_center"] = False;
tmp["featurewise_std_normalization"] = False;
tmp["rotation_range"] = 0;
tmp["width_shift_range"] = 0;
tmp["height_shift_range"] = 0;
tmp["shear_range"] = 0;
tmp["zoom_range"] = 0;
tmp["brightness_range"] = None;
tmp["horizontal_flip"] = False;
tmp["vertical_flip"] = False;
tmp["mean"] = False;
tmp["std"] = False;
self.system_dict["local"]["transforms_train"] = tmp;
self.system_dict["local"]["transforms_val"] = tmp;
self.system_dict["local"]["transforms_test"] = tmp;
self.system_dict = retrieve_trainval_transforms(self.system_dict);
self.Dataset();
self.set_model_final();
self.system_dict = retrieve_optimizer(self.system_dict);
self.system_dict = retrieve_scheduler(self.system_dict);
self.system_dict = retrieve_loss(self.system_dict);
###############################################################################################################################################
###############################################################################################################################################
@accepts("self", list, post_trace=False)
#@TraceFunction(trace_args=True, trace_rv=True)
def set_system_state_pseudo_copy_from(self, pseudo_copy_from):
'''
Set system for copied state in hyper-parameter analysis mode
Args:
None
Returns:
None
'''
fname = self.system_dict["master_systems_dir_relative"] + pseudo_copy_from[0] + "/" + pseudo_copy_from[1] + "/experiment_state.json";
system_dict_tmp = read_json(fname, verbose=self.system_dict["verbose"]);
self.system_dict["dataset"] = system_dict_tmp["dataset"];
self.system_dict["model"] = system_dict_tmp["model"];
self.system_dict["hyper-parameters"] = system_dict_tmp["hyper-parameters"];
self.system_dict["training"] = system_dict_tmp["training"];
self.system_dict["origin"] = [pseudo_copy_from[0], pseudo_copy_from[1]];
self.system_dict["training"]["outputs"] = {};
self.system_dict["training"]["outputs"]["max_gpu_memory_usage"] = 0;
self.system_dict["training"]["outputs"]["best_val_acc"] = 0;
self.system_dict["training"]["outputs"]["best_val_acc_epoch_num"] = 0;
self.system_dict["training"]["outputs"]["epochs_completed"] = 0;
self.system_dict["training"]["status"] = False;
self.system_dict["training"]["enabled"] = True;
self.system_dict["testing"] = {};
self.system_dict["testing"]["status"] = False;
save(self.system_dict);
self.system_dict = read_json(self.system_dict["fname_relative"], verbose=self.system_dict["verbose"]);
self.system_dict["states"]["pseudo_copy_from"] = True;
tmp = {};
tmp["featurewise_center"] = False;
tmp["featurewise_std_normalization"] = False;
tmp["rotation_range"] = 0;
tmp["width_shift_range"] = 0;
tmp["height_shift_range"] = 0;
tmp["shear_range"] = 0;
tmp["zoom_range"] = 0;
tmp["brightness_range"] = None;
tmp["horizontal_flip"] = False;
tmp["vertical_flip"] = False;
tmp["mean"] = False;
tmp["std"] = False;
self.system_dict["local"]["transforms_train"] = tmp;
self.system_dict["local"]["transforms_val"] = tmp;
self.system_dict["local"]["transforms_test"] = tmp;
self.system_dict = retrieve_trainval_transforms(self.system_dict);
self.Dataset();
self.set_model_final();
self.system_dict = retrieve_optimizer(self.system_dict);
self.system_dict = retrieve_scheduler(self.system_dict);
self.system_dict = retrieve_loss(self.system_dict);
###############################################################################################################################################
|
PypiClean
|
/pretrainedmodels-0.7.4.tar.gz/pretrainedmodels-0.7.4/README.md
|
# Pretrained models for Pytorch (Work in progress)
The goal of this repo is:
- to help to reproduce research papers results (transfer learning setups for instance),
- to access pretrained ConvNets with a unique interface/API inspired by torchvision.
<a href="https://travis-ci.org/Cadene/pretrained-models.pytorch"><img src="https://api.travis-ci.org/Cadene/pretrained-models.pytorch.svg?branch=master"/></a>
News:
- 27/10/2018: Fix compatibility issues, Add tests, Add travis
- 04/06/2018: [PolyNet](https://github.com/CUHK-MMLAB/polynet) and [PNASNet-5-Large](https://arxiv.org/abs/1712.00559) thanks to [Alex Parinov](https://github.com/creafz)
- 16/04/2018: [SE-ResNet* and SE-ResNeXt*](https://github.com/hujie-frank/SENet) thanks to [Alex Parinov](https://github.com/creafz)
- 09/04/2018: [SENet154](https://github.com/hujie-frank/SENet) thanks to [Alex Parinov](https://github.com/creafz)
- 22/03/2018: CaffeResNet101 (good for localization with FasterRCNN)
- 21/03/2018: NASNet Mobile thanks to [Veronika Yurchuk](https://github.com/veronikayurchuk) and [Anastasiia](https://github.com/DagnyT)
- 25/01/2018: DualPathNetworks thanks to [Ross Wightman](https://github.com/rwightman/pytorch-dpn-pretrained), Xception thanks to [T Standley](https://github.com/tstandley/Xception-PyTorch), improved TransformImage API
- 13/01/2018: `pip install pretrainedmodels`, `pretrainedmodels.model_names`, `pretrainedmodels.pretrained_settings`
- 12/01/2018: `python setup.py install`
- 08/12/2017: update data url (/!\ `git pull` is needed)
- 30/11/2017: improve API (`model.features(input)`, `model.logits(features)`, `model.forward(input)`, `model.last_linear`)
- 16/11/2017: nasnet-a-large pretrained model ported by T. Durand and R. Cadene
- 22/07/2017: torchvision pretrained models
- 22/07/2017: momentum in inceptionv4 and inceptionresnetv2 to 0.1
- 17/07/2017: model.input_range attribut
- 17/07/2017: BNInception pretrained on Imagenet
## Summary
- [Installation](https://github.com/Cadene/pretrained-models.pytorch#installation)
- [Quick examples](https://github.com/Cadene/pretrained-models.pytorch#quick-examples)
- [Few use cases](https://github.com/Cadene/pretrained-models.pytorch#few-use-cases)
- [Compute imagenet logits](https://github.com/Cadene/pretrained-models.pytorch#compute-imagenet-logits)
- [Compute imagenet validation metrics](https://github.com/Cadene/pretrained-models.pytorch#compute-imagenet-validation-metrics)
- [Evaluation on ImageNet](https://github.com/Cadene/pretrained-models.pytorch#evaluation-on-imagenet)
- [Accuracy on valset](https://github.com/Cadene/pretrained-models.pytorch#accuracy-on-validation-set)
- [Reproducing results](https://github.com/Cadene/pretrained-models.pytorch#reproducing-results)
- [Documentation](https://github.com/Cadene/pretrained-models.pytorch#documentation)
- [Available models](https://github.com/Cadene/pretrained-models.pytorch#available-models)
- [AlexNet](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [BNInception](https://github.com/Cadene/pretrained-models.pytorch#bninception)
- [CaffeResNet101](https://github.com/Cadene/pretrained-models.pytorch#caffe-resnet)
- [DenseNet121](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [DenseNet161](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [DenseNet169](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [DenseNet201](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [DenseNet201](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [DualPathNet68](https://github.com/Cadene/pretrained-models.pytorch#dualpathnetworks)
- [DualPathNet92](https://github.com/Cadene/pretrained-models.pytorch#dualpathnetworks)
- [DualPathNet98](https://github.com/Cadene/pretrained-models.pytorch#dualpathnetworks)
- [DualPathNet107](https://github.com/Cadene/pretrained-models.pytorch#dualpathnetworks)
- [DualPathNet113](https://github.com/Cadene/pretrained-models.pytorch#dualpathnetworks)
- [FBResNet152](https://github.com/Cadene/pretrained-models.pytorch#facebook-resnet)
- [InceptionResNetV2](https://github.com/Cadene/pretrained-models.pytorch#inception)
- [InceptionV3](https://github.com/Cadene/pretrained-models.pytorch#inception)
- [InceptionV4](https://github.com/Cadene/pretrained-models.pytorch#inception)
- [NASNet-A-Large](https://github.com/Cadene/pretrained-models.pytorch#nasnet)
- [NASNet-A-Mobile](https://github.com/Cadene/pretrained-models.pytorch#nasnet)
- [PNASNet-5-Large](https://github.com/Cadene/pretrained-models.pytorch#pnasnet)
- [PolyNet](https://github.com/Cadene/pretrained-models.pytorch#polynet)
- [ResNeXt101_32x4d](https://github.com/Cadene/pretrained-models.pytorch#resnext)
- [ResNeXt101_64x4d](https://github.com/Cadene/pretrained-models.pytorch#resnext)
- [ResNet101](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [ResNet152](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [ResNet18](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [ResNet34](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [ResNet50](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [SENet154](https://github.com/Cadene/pretrained-models.pytorch#senet)
- [SE-ResNet50](https://github.com/Cadene/pretrained-models.pytorch#senet)
- [SE-ResNet101](https://github.com/Cadene/pretrained-models.pytorch#senet)
- [SE-ResNet152](https://github.com/Cadene/pretrained-models.pytorch#senet)
- [SE-ResNeXt50_32x4d](https://github.com/Cadene/pretrained-models.pytorch#senet)
- [SE-ResNeXt101_32x4d](https://github.com/Cadene/pretrained-models.pytorch#senet)
- [SqueezeNet1_0](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [SqueezeNet1_1](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [VGG11](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [VGG13](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [VGG16](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [VGG19](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [VGG11_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [VGG13_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [VGG16_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [VGG19_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision)
- [Xception](https://github.com/Cadene/pretrained-models.pytorch#xception)
- [Model API](https://github.com/Cadene/pretrained-models.pytorch#model-api)
- [model.input_size](https://github.com/Cadene/pretrained-models.pytorch#modelinput_size)
- [model.input_space](https://github.com/Cadene/pretrained-models.pytorch#modelinput_space)
- [model.input_range](https://github.com/Cadene/pretrained-models.pytorch#modelinput_range)
- [model.mean](https://github.com/Cadene/pretrained-models.pytorch#modelmean)
- [model.std](https://github.com/Cadene/pretrained-models.pytorch#modelstd)
- [model.features](https://github.com/Cadene/pretrained-models.pytorch#modelfeatures)
- [model.logits](https://github.com/Cadene/pretrained-models.pytorch#modellogits)
- [model.forward](https://github.com/Cadene/pretrained-models.pytorch#modelforward)
- [Reproducing porting](https://github.com/Cadene/pretrained-models.pytorch#reproducing)
- [ResNet*](https://github.com/Cadene/pretrained-models.pytorch#hand-porting-of-resnet152)
- [ResNeXt*](https://github.com/Cadene/pretrained-models.pytorch#automatic-porting-of-resnext)
- [Inception*](https://github.com/Cadene/pretrained-models.pytorch#hand-porting-of-inceptionv4-and-inceptionresnetv2)
## Installation
1. [python3 with anaconda](https://www.continuum.io/downloads)
2. [pytorch with/out CUDA](http://pytorch.org)
### Install from pip
3. `pip install pretrainedmodels`
### Install from repo
3. `git clone https://github.com/Cadene/pretrained-models.pytorch.git`
4. `cd pretrained-models.pytorch`
5. `python setup.py install`
## Quick examples
- To import `pretrainedmodels`:
```python
import pretrainedmodels
```
- To print the available pretrained models:
```python
print(pretrainedmodels.model_names)
> ['fbresnet152', 'bninception', 'resnext101_32x4d', 'resnext101_64x4d', 'inceptionv4', 'inceptionresnetv2', 'alexnet', 'densenet121', 'densenet169', 'densenet201', 'densenet161', 'resnet18', 'resnet34', 'resnet50', 'resnet101', 'resnet152', 'inceptionv3', 'squeezenet1_0', 'squeezenet1_1', 'vgg11', 'vgg11_bn', 'vgg13', 'vgg13_bn', 'vgg16', 'vgg16_bn', 'vgg19_bn', 'vgg19', 'nasnetalarge', 'nasnetamobile', 'cafferesnet101', 'senet154', 'se_resnet50', 'se_resnet101', 'se_resnet152', 'se_resnext50_32x4d', 'se_resnext101_32x4d', 'cafferesnet101', 'polynet', 'pnasnet5large']
```
- To print the available pretrained settings for a chosen model:
```python
print(pretrainedmodels.pretrained_settings['nasnetalarge'])
> {'imagenet': {'url': 'http://data.lip6.fr/cadene/pretrainedmodels/nasnetalarge-a1897284.pth', 'input_space': 'RGB', 'input_size': [3, 331, 331], 'input_range': [0, 1], 'mean': [0.5, 0.5, 0.5], 'std': [0.5, 0.5, 0.5], 'num_classes': 1000}, 'imagenet+background': {'url': 'http://data.lip6.fr/cadene/pretrainedmodels/nasnetalarge-a1897284.pth', 'input_space': 'RGB', 'input_size': [3, 331, 331], 'input_range': [0, 1], 'mean': [0.5, 0.5, 0.5], 'std': [0.5, 0.5, 0.5], 'num_classes': 1001}}
```
- To load a pretrained models from imagenet:
```python
model_name = 'nasnetalarge' # could be fbresnet152 or inceptionresnetv2
model = pretrainedmodels.__dict__[model_name](num_classes=1000, pretrained='imagenet')
model.eval()
```
**Note**: By default, models will be downloaded to your `$HOME/.torch` folder. You can modify this behavior using the `$TORCH_MODEL_ZOO` variable as follow: `export TORCH_MODEL_ZOO="/local/pretrainedmodels`
- To load an image and do a complete forward pass:
```python
import torch
import pretrainedmodels.utils as utils
load_img = utils.LoadImage()
# transformations depending on the model
# rescale, center crop, normalize, and others (ex: ToBGR, ToRange255)
tf_img = utils.TransformImage(model)
path_img = 'data/cat.jpg'
input_img = load_img(path_img)
input_tensor = tf_img(input_img) # 3x400x225 -> 3x299x299 size may differ
input_tensor = input_tensor.unsqueeze(0) # 3x299x299 -> 1x3x299x299
input = torch.autograd.Variable(input_tensor,
requires_grad=False)
output_logits = model(input) # 1x1000
```
- To extract features (beware this API is not available for all networks):
```python
output_features = model.features(input) # 1x14x14x2048 size may differ
output_logits = model.logits(output_features) # 1x1000
```
## Few use cases
### Compute imagenet logits
- See [examples/imagenet_logits.py](https://github.com/Cadene/pretrained-models.pytorch/blob/master/examples/imagenet_logits.py) to compute logits of classes appearance over a single image with a pretrained model on imagenet.
```
$ python examples/imagenet_logits.py -h
> nasnetalarge, resnet152, inceptionresnetv2, inceptionv4, ...
```
```
$ python examples/imagenet_logits.py -a nasnetalarge --path_img data/cat.png
> 'nasnetalarge': data/cat.png' is a 'tiger cat'
```
### Compute imagenet evaluation metrics
- See [examples/imagenet_eval.py](https://github.com/Cadene/pretrained-models.pytorch/blob/master/examples/imagenet_eval.py) to evaluate pretrained models on imagenet valset.
```
$ python examples/imagenet_eval.py /local/common-data/imagenet_2012/images -a nasnetalarge -b 20 -e
> * Acc@1 92.693, Acc@5 96.13
```
## Evaluation on imagenet
### Accuracy on validation set (single model)
Results were obtained using (center cropped) images of the same size than during the training process.
Model | Version | Acc@1 | Acc@5
--- | --- | --- | ---
PNASNet-5-Large | [Tensorflow](https://github.com/tensorflow/models/tree/master/research/slim) | 82.858 | 96.182
[PNASNet-5-Large](https://github.com/Cadene/pretrained-models.pytorch#pnasnet) | Our porting | 82.736 | 95.992
NASNet-A-Large | [Tensorflow](https://github.com/tensorflow/models/tree/master/research/slim) | 82.693 | 96.163
[NASNet-A-Large](https://github.com/Cadene/pretrained-models.pytorch#nasnet) | Our porting | 82.566 | 96.086
SENet154 | [Caffe](https://github.com/hujie-frank/SENet) | 81.32 | 95.53
[SENet154](https://github.com/Cadene/pretrained-models.pytorch#senet) | Our porting | 81.304 | 95.498
PolyNet | [Caffe](https://github.com/CUHK-MMLAB/polynet) | 81.29 | 95.75
[PolyNet](https://github.com/Cadene/pretrained-models.pytorch#polynet) | Our porting | 81.002 | 95.624
InceptionResNetV2 | [Tensorflow](https://github.com/tensorflow/models/tree/master/slim) | 80.4 | 95.3
InceptionV4 | [Tensorflow](https://github.com/tensorflow/models/tree/master/slim) | 80.2 | 95.3
[SE-ResNeXt101_32x4d](https://github.com/Cadene/pretrained-models.pytorch#senet) | Our porting | 80.236 | 95.028
SE-ResNeXt101_32x4d | [Caffe](https://github.com/hujie-frank/SENet) | 80.19 | 95.04
[InceptionResNetV2](https://github.com/Cadene/pretrained-models.pytorch#inception) | Our porting | 80.170 | 95.234
[InceptionV4](https://github.com/Cadene/pretrained-models.pytorch#inception) | Our porting | 80.062 | 94.926
[DualPathNet107_5k](https://github.com/Cadene/pretrained-models.pytorch#dualpathnetworks) | Our porting | 79.746 | 94.684
ResNeXt101_64x4d | [Torch7](https://github.com/facebookresearch/ResNeXt) | 79.6 | 94.7
[DualPathNet131](https://github.com/Cadene/pretrained-models.pytorch#dualpathnetworks) | Our porting | 79.432 | 94.574
[DualPathNet92_5k](https://github.com/Cadene/pretrained-models.pytorch#dualpathnetworks) | Our porting | 79.400 | 94.620
[DualPathNet98](https://github.com/Cadene/pretrained-models.pytorch#dualpathnetworks) | Our porting | 79.224 | 94.488
[SE-ResNeXt50_32x4d](https://github.com/Cadene/pretrained-models.pytorch#senet) | Our porting | 79.076 | 94.434
SE-ResNeXt50_32x4d | [Caffe](https://github.com/hujie-frank/SENet) | 79.03 | 94.46
[Xception](https://github.com/Cadene/pretrained-models.pytorch#xception) | [Keras](https://github.com/keras-team/keras/blob/master/keras/applications/xception.py) | 79.000 | 94.500
[ResNeXt101_64x4d](https://github.com/Cadene/pretrained-models.pytorch#resnext) | Our porting | 78.956 | 94.252
[Xception](https://github.com/Cadene/pretrained-models.pytorch#xception) | Our porting | 78.888 | 94.292
ResNeXt101_32x4d | [Torch7](https://github.com/facebookresearch/ResNeXt) | 78.8 | 94.4
SE-ResNet152 | [Caffe](https://github.com/hujie-frank/SENet) | 78.66 | 94.46
[SE-ResNet152](https://github.com/Cadene/pretrained-models.pytorch#senet) | Our porting | 78.658 | 94.374
ResNet152 | [Pytorch](https://github.com/pytorch/vision#models) | 78.428 | 94.110
[SE-ResNet101](https://github.com/Cadene/pretrained-models.pytorch#senet) | Our porting | 78.396 | 94.258
SE-ResNet101 | [Caffe](https://github.com/hujie-frank/SENet) | 78.25 | 94.28
[ResNeXt101_32x4d](https://github.com/Cadene/pretrained-models.pytorch#resnext) | Our porting | 78.188 | 93.886
FBResNet152 | [Torch7](https://github.com/facebook/fb.resnet.torch) | 77.84 | 93.84
SE-ResNet50 | [Caffe](https://github.com/hujie-frank/SENet) | 77.63 | 93.64
[SE-ResNet50](https://github.com/Cadene/pretrained-models.pytorch#senet) | Our porting | 77.636 | 93.752
[DenseNet161](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 77.560 | 93.798
[ResNet101](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 77.438 | 93.672
[FBResNet152](https://github.com/Cadene/pretrained-models.pytorch#facebook-resnet) | Our porting | 77.386 | 93.594
[InceptionV3](https://github.com/Cadene/pretrained-models.pytorch#inception) | [Pytorch](https://github.com/pytorch/vision#models) | 77.294 | 93.454
[DenseNet201](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 77.152 | 93.548
[DualPathNet68b_5k](https://github.com/Cadene/pretrained-models.pytorch#dualpathnetworks) | Our porting | 77.034 | 93.590
[CaffeResnet101](https://github.com/Cadene/pretrained-models.pytorch#caffe-resnet) | [Caffe](https://github.com/KaimingHe/deep-residual-networks) | 76.400 | 92.900
[CaffeResnet101](https://github.com/Cadene/pretrained-models.pytorch#caffe-resnet) | Our porting | 76.200 | 92.766
[DenseNet169](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 76.026 | 92.992
[ResNet50](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 76.002 | 92.980
[DualPathNet68](https://github.com/Cadene/pretrained-models.pytorch#dualpathnetworks) | Our porting | 75.868 | 92.774
[DenseNet121](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 74.646 | 92.136
[VGG19_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 74.266 | 92.066
NASNet-A-Mobile | [Tensorflow](https://github.com/tensorflow/models/tree/master/research/slim) | 74.0 | 91.6
[NASNet-A-Mobile](https://github.com/veronikayurchuk/pretrained-models.pytorch/blob/master/pretrainedmodels/models/nasnet_mobile.py) | Our porting | 74.080 | 91.740
[ResNet34](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 73.554 | 91.456
[BNInception](https://github.com/Cadene/pretrained-models.pytorch#bninception) | Our porting | 73.524 | 91.562
[VGG16_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 73.518 | 91.608
[VGG19](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 72.080 | 90.822
[VGG16](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 71.636 | 90.354
[VGG13_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 71.508 | 90.494
[VGG11_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 70.452 | 89.818
[ResNet18](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 70.142 | 89.274
[VGG13](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 69.662 | 89.264
[VGG11](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 68.970 | 88.746
[SqueezeNet1_1](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 58.250 | 80.800
[SqueezeNet1_0](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 58.108 | 80.428
[Alexnet](https://github.com/Cadene/pretrained-models.pytorch#torchvision) | [Pytorch](https://github.com/pytorch/vision#models) | 56.432 | 79.194
Notes:
- the Pytorch version of ResNet152 is not a porting of the Torch7 but has been retrained by facebook.
- For the PolyNet evaluation each image was resized to 378x378 without preserving the aspect ratio and then the central 331×331 patch from the resulting image was used.
Beware, the accuracy reported here is not always representative of the transferable capacity of the network on other tasks and datasets. You must try them all! :P
### Reproducing results
Please see [Compute imagenet validation metrics](https://github.com/Cadene/pretrained-models.pytorch#compute-imagenet-validation-metrics)
## Documentation
### Available models
#### NASNet*
Source: [TensorFlow Slim repo](https://github.com/tensorflow/models/tree/master/research/slim)
- `nasnetalarge(num_classes=1000, pretrained='imagenet')`
- `nasnetalarge(num_classes=1001, pretrained='imagenet+background')`
- `nasnetamobile(num_classes=1000, pretrained='imagenet')`
#### FaceBook ResNet*
Source: [Torch7 repo of FaceBook](https://github.com/facebook/fb.resnet.torch)
There are a bit different from the ResNet* of torchvision. ResNet152 is currently the only one available.
- `fbresnet152(num_classes=1000, pretrained='imagenet')`
#### Caffe ResNet*
Source: [Caffe repo of KaimingHe](https://github.com/KaimingHe/deep-residual-networks)
- `cafferesnet101(num_classes=1000, pretrained='imagenet')`
#### Inception*
Source: [TensorFlow Slim repo](https://github.com/tensorflow/models/tree/master/slim) and [Pytorch/Vision repo](https://github.com/pytorch/vision/tree/master/torchvision) for `inceptionv3`
- `inceptionresnetv2(num_classes=1000, pretrained='imagenet')`
- `inceptionresnetv2(num_classes=1001, pretrained='imagenet+background')`
- `inceptionv4(num_classes=1000, pretrained='imagenet')`
- `inceptionv4(num_classes=1001, pretrained='imagenet+background')`
- `inceptionv3(num_classes=1000, pretrained='imagenet')`
#### BNInception
Source: [Trained with Caffe](https://github.com/Cadene/tensorflow-model-zoo.torch/pull/2) by [Xiong Yuanjun](http://yjxiong.me)
- `bninception(num_classes=1000, pretrained='imagenet')`
#### ResNeXt*
Source: [ResNeXt repo of FaceBook](https://github.com/facebookresearch/ResNeXt)
- `resnext101_32x4d(num_classes=1000, pretrained='imagenet')`
- `resnext101_62x4d(num_classes=1000, pretrained='imagenet')`
#### DualPathNetworks
Source: [MXNET repo of Chen Yunpeng](https://github.com/cypw/DPNs)
The porting has been made possible by [Ross Wightman](http://rwightman.com) in his [PyTorch repo](https://github.com/rwightman/pytorch-dpn-pretrained).
As you can see [here](https://github.com/rwightman/pytorch-dpn-pretrained) DualPathNetworks allows you to try different scales. The default one in this repo is 0.875 meaning that the original input size is 256 before croping to 224.
- `dpn68(num_classes=1000, pretrained='imagenet')`
- `dpn98(num_classes=1000, pretrained='imagenet')`
- `dpn131(num_classes=1000, pretrained='imagenet')`
- `dpn68b(num_classes=1000, pretrained='imagenet+5k')`
- `dpn92(num_classes=1000, pretrained='imagenet+5k')`
- `dpn107(num_classes=1000, pretrained='imagenet+5k')`
`'imagenet+5k'` means that the network has been pretrained on imagenet5k before being finetuned on imagenet1k.
#### Xception
Source: [Keras repo](https://github.com/keras-team/keras/blob/master/keras/applications/xception.py)
The porting has been made possible by [T Standley](https://github.com/tstandley/Xception-PyTorch).
- `xception(num_classes=1000, pretrained='imagenet')`
#### SENet*
Source: [Caffe repo of Jie Hu](https://github.com/hujie-frank/SENet)
- `senet154(num_classes=1000, pretrained='imagenet')`
- `se_resnet50(num_classes=1000, pretrained='imagenet')`
- `se_resnet101(num_classes=1000, pretrained='imagenet')`
- `se_resnet152(num_classes=1000, pretrained='imagenet')`
- `se_resnext50_32x4d(num_classes=1000, pretrained='imagenet')`
- `se_resnext101_32x4d(num_classes=1000, pretrained='imagenet')`
#### PNASNet*
Source: [TensorFlow Slim repo](https://github.com/tensorflow/models/tree/master/research/slim)
- `pnasnet5large(num_classes=1000, pretrained='imagenet')`
- `pnasnet5large(num_classes=1001, pretrained='imagenet+background')`
#### PolyNet
Source: [Caffe repo of the CUHK Multimedia Lab](https://github.com/CUHK-MMLAB/polynet)
- `polynet(num_classes=1000, pretrained='imagenet')`
#### TorchVision
Source: [Pytorch/Vision repo](https://github.com/pytorch/vision/tree/master/torchvision)
(`inceptionv3` included in [Inception*](https://github.com/Cadene/pretrained-models.pytorch#inception))
- `resnet18(num_classes=1000, pretrained='imagenet')`
- `resnet34(num_classes=1000, pretrained='imagenet')`
- `resnet50(num_classes=1000, pretrained='imagenet')`
- `resnet101(num_classes=1000, pretrained='imagenet')`
- `resnet152(num_classes=1000, pretrained='imagenet')`
- `densenet121(num_classes=1000, pretrained='imagenet')`
- `densenet161(num_classes=1000, pretrained='imagenet')`
- `densenet169(num_classes=1000, pretrained='imagenet')`
- `densenet201(num_classes=1000, pretrained='imagenet')`
- `squeezenet1_0(num_classes=1000, pretrained='imagenet')`
- `squeezenet1_1(num_classes=1000, pretrained='imagenet')`
- `alexnet(num_classes=1000, pretrained='imagenet')`
- `vgg11(num_classes=1000, pretrained='imagenet')`
- `vgg13(num_classes=1000, pretrained='imagenet')`
- `vgg16(num_classes=1000, pretrained='imagenet')`
- `vgg19(num_classes=1000, pretrained='imagenet')`
- `vgg11_bn(num_classes=1000, pretrained='imagenet')`
- `vgg13_bn(num_classes=1000, pretrained='imagenet')`
- `vgg16_bn(num_classes=1000, pretrained='imagenet')`
- `vgg19_bn(num_classes=1000, pretrained='imagenet')`
### Model API
Once a pretrained model has been loaded, you can use it that way.
**Important note**: All image must be loaded using `PIL` which scales the pixel values between 0 and 1.
#### `model.input_size`
Attribut of type `list` composed of 3 numbers:
- number of color channels,
- height of the input image,
- width of the input image.
Example:
- `[3, 299, 299]` for inception* networks,
- `[3, 224, 224]` for resnet* networks.
#### `model.input_space`
Attribut of type `str` representating the color space of the image. Can be `RGB` or `BGR`.
#### `model.input_range`
Attribut of type `list` composed of 2 numbers:
- min pixel value,
- max pixel value.
Example:
- `[0, 1]` for resnet* and inception* networks,
- `[0, 255]` for bninception network.
#### `model.mean`
Attribut of type `list` composed of 3 numbers which are used to normalize the input image (substract "color-channel-wise").
Example:
- `[0.5, 0.5, 0.5]` for inception* networks,
- `[0.485, 0.456, 0.406]` for resnet* networks.
#### `model.std`
Attribut of type `list` composed of 3 numbers which are used to normalize the input image (divide "color-channel-wise").
Example:
- `[0.5, 0.5, 0.5]` for inception* networks,
- `[0.229, 0.224, 0.225]` for resnet* networks.
#### `model.features`
/!\ work in progress (may not be available)
Method which is used to extract the features from the image.
Example when the model is loaded using `fbresnet152`:
```python
print(input_224.size()) # (1,3,224,224)
output = model.features(input_224)
print(output.size()) # (1,2048,1,1)
# print(input_448.size()) # (1,3,448,448)
output = model.features(input_448)
# print(output.size()) # (1,2048,7,7)
```
#### `model.logits`
/!\ work in progress (may not be available)
Method which is used to classify the features from the image.
Example when the model is loaded using `fbresnet152`:
```python
output = model.features(input_224)
print(output.size()) # (1,2048, 1, 1)
output = model.logits(output)
print(output.size()) # (1,1000)
```
#### `model.forward`
Method used to call `model.features` and `model.logits`. It can be overwritten as desired.
**Note**: A good practice is to use `model.__call__` as your function of choice to forward an input to your model. See the example bellow.
```python
# Without model.__call__
output = model.forward(input_224)
print(output.size()) # (1,1000)
# With model.__call__
output = model(input_224)
print(output.size()) # (1,1000)
```
#### `model.last_linear`
Attribut of type `nn.Linear`. This module is the last one to be called during the forward pass.
- Can be replaced by an adapted `nn.Linear` for fine tuning.
- Can be replaced by `pretrained.utils.Identity` for features extraction.
Example when the model is loaded using `fbresnet152`:
```python
print(input_224.size()) # (1,3,224,224)
output = model.features(input_224)
print(output.size()) # (1,2048,1,1)
output = model.logits(output)
print(output.size()) # (1,1000)
# fine tuning
dim_feats = model.last_linear.in_features # =2048
nb_classes = 4
model.last_linear = nn.Linear(dim_feats, nb_classes)
output = model(input_224)
print(output.size()) # (1,4)
# features extraction
model.last_linear = pretrained.utils.Identity()
output = model(input_224)
print(output.size()) # (1,2048)
```
## Reproducing
### Hand porting of ResNet152
```
th pretrainedmodels/fbresnet/resnet152_dump.lua
python pretrainedmodels/fbresnet/resnet152_load.py
```
### Automatic porting of ResNeXt
https://github.com/clcarwin/convert_torch_to_pytorch
### Hand porting of NASNet, InceptionV4 and InceptionResNetV2
https://github.com/Cadene/tensorflow-model-zoo.torch
## Acknowledgement
Thanks to the deep learning community and especially to the contributers of the pytorch ecosystem.
|
PypiClean
|
/apache_superset_iteco-2.1.1.4-py3-none-any.whl/superset/static/assets/cffb467b416baf7fcea0.chunk.js
|
"use strict";(globalThis.webpackChunksuperset=globalThis.webpackChunksuperset||[]).push([[9452],{29848:(e,t,l)=>{l.d(t,{Z:()=>d}),l(67294);var a=l(51995),n=l(58593),s=l(70163),r=l(11965);const i=a.iK.span`
white-space: nowrap;
min-width: 100px;
svg,
i {
margin-right: 8px;
&:hover {
path {
fill: ${e=>{let{theme:t}=e;return t.colors.primary.base}};
}
}
}
`,o=a.iK.span`
color: ${e=>{let{theme:t}=e;return t.colors.grayscale.base}};
`;function d(e){let{actions:t}=e;return(0,r.tZ)(i,{className:"actions"},t.map(((e,t)=>{const l=s.Z[e.icon];return e.tooltip?(0,r.tZ)(n.u,{id:`${e.label}-tooltip`,title:e.tooltip,placement:e.placement,key:t},(0,r.tZ)(o,{role:"button",tabIndex:0,className:"action-button",onClick:e.onClick},(0,r.tZ)(l,null))):(0,r.tZ)(o,{role:"button",tabIndex:0,className:"action-button",onClick:e.onClick,key:t},(0,r.tZ)(l,null))})))}},45395:(e,t,l)=>{l.r(t),l.d(t,{default:()=>T});var a=l(67294),n=l(55867),s=l(31069),r=l(15926),i=l.n(r),o=l(30381),d=l.n(o),c=l(34858),u=l(40768),m=l(14114),p=l(20755),h=l(17198),g=l(58593),b=l(19259),Z=l(29848),_=l(18782),y=l(51995),S=l(70163),f=l(74069),C=l(94670),w=l(11965);const v=y.iK.div`
margin: ${e=>{let{theme:t}=e;return 2*t.gridUnit}}px auto
${e=>{let{theme:t}=e;return 4*t.gridUnit}}px auto;
`,$=(0,y.iK)(C.ry)`
border-radius: ${e=>{let{theme:t}=e;return t.borderRadius}}px;
border: 1px solid ${e=>{let{theme:t}=e;return t.colors.secondary.light2}};
`,k=y.iK.div`
margin-bottom: ${e=>{let{theme:t}=e;return 10*t.gridUnit}}px;
.control-label {
margin-bottom: ${e=>{let{theme:t}=e;return 2*t.gridUnit}}px;
}
.required {
margin-left: ${e=>{let{theme:t}=e;return t.gridUnit/2}}px;
color: ${e=>{let{theme:t}=e;return t.colors.error.base}};
}
input[type='text'] {
padding: ${e=>{let{theme:t}=e;return 1.5*t.gridUnit}}px
${e=>{let{theme:t}=e;return 2*t.gridUnit}}px;
border: 1px solid ${e=>{let{theme:t}=e;return t.colors.grayscale.light2}};
border-radius: ${e=>{let{theme:t}=e;return t.gridUnit}}px;
width: 50%;
}
`,x=(0,m.ZP)((e=>{let{addDangerToast:t,onCssTemplateAdd:l,onHide:s,show:r,cssTemplate:i=null}=e;const[o,d]=(0,a.useState)(!0),[m,p]=(0,a.useState)(null),[h,g]=(0,a.useState)(!0),b=null!==i,{state:{loading:Z,resource:_},fetchResource:y,createResource:C,updateResource:x}=(0,c.LE)("css_template",(0,n.t)("css_template"),t),T=()=>{g(!0),s()};return(0,a.useEffect)((()=>{if(b&&(!m||!m.id||i&&(null==i?void 0:i.id)!==m.id||h&&r)){if(null!==(null==i?void 0:i.id)&&!Z){const e=i.id||0;y(e)}}else!b&&(!m||m.id||h&&r)&&p({template_name:"",css:""})}),[i]),(0,a.useEffect)((()=>{_&&p(_)}),[_]),(0,a.useEffect)((()=>{var e;null!=m&&m.template_name.length&&null!=m&&null!=(e=m.css)&&e.length?d(!1):d(!0)}),[m?m.template_name:"",m?m.css:""]),h&&r&&g(!1),(0,w.tZ)(f.Z,{disablePrimaryButton:o,onHandledPrimaryAction:()=>{if(b){if(null!=m&&m.id){const e=m.id;delete m.id,delete m.created_by,x(e,m).then((e=>{e&&(l&&l(),T())}))}}else m&&C(m).then((e=>{e&&(l&&l(),T())}))},onHide:T,primaryButtonName:b?(0,n.t)("Save"):(0,n.t)("Add"),show:r,width:"55%",title:(0,w.tZ)("h4",null,b?(0,w.tZ)(S.Z.EditAlt,{css:u.xL}):(0,w.tZ)(S.Z.PlusLarge,{css:u.xL}),b?(0,n.t)("Edit CSS template properties"):(0,n.t)("Add CSS template"))},(0,w.tZ)(v,null,(0,w.tZ)("h4",null,(0,n.t)("Basic information"))),(0,w.tZ)(k,null,(0,w.tZ)("div",{className:"control-label"},(0,n.t)("CSS template name"),(0,w.tZ)("span",{className:"required"},"*")),(0,w.tZ)("input",{name:"template_name",onChange:e=>{const{target:t}=e,l={...m,template_name:m?m.template_name:"",css:m?m.css:""};l[t.name]=t.value,p(l)},type:"text",value:null==m?void 0:m.template_name})),(0,w.tZ)(k,null,(0,w.tZ)("div",{className:"control-label"},(0,n.t)("css"),(0,w.tZ)("span",{className:"required"},"*")),(0,w.tZ)($,{onChange:e=>{const t={...m,template_name:m?m.template_name:"",css:e};p(t)},value:null==m?void 0:m.css,width:"100%"})))})),T=(0,m.ZP)((function(e){let{addDangerToast:t,addSuccessToast:l,user:r}=e;const{state:{loading:o,resourceCount:m,resourceCollection:y,bulkSelectEnabled:S},hasPerm:f,fetchData:C,refreshData:v,toggleBulkSelect:$}=(0,c.Yi)("css_template",(0,n.t)("CSS templates"),t),[k,T]=(0,a.useState)(!1),[D,H]=(0,a.useState)(null),N=f("can_write"),A=f("can_write"),B=f("can_write"),[E,U]=(0,a.useState)(null),z=[{id:"template_name",desc:!0}],L=(0,a.useMemo)((()=>[{accessor:"template_name",Header:(0,n.t)("Name")},{Cell:e=>{let{row:{original:{changed_on_delta_humanized:t,changed_by:l}}}=e,a="null";return l&&(a=`${l.first_name} ${l.last_name}`),(0,w.tZ)(g.u,{id:"allow-run-async-header-tooltip",title:(0,n.t)("Last modified by %s",a),placement:"right"},(0,w.tZ)("span",null,t))},Header:(0,n.t)("Last modified"),accessor:"changed_on_delta_humanized",size:"xl",disableSortBy:!0},{Cell:e=>{let{row:{original:{created_on:t}}}=e;const l=new Date(t),a=new Date(Date.UTC(l.getFullYear(),l.getMonth(),l.getDate(),l.getHours(),l.getMinutes(),l.getSeconds(),l.getMilliseconds()));return d()(a).fromNow()},Header:(0,n.t)("Created on"),accessor:"created_on",size:"xl",disableSortBy:!0},{accessor:"created_by",disableSortBy:!0,Header:(0,n.t)("Created by"),Cell:e=>{let{row:{original:{created_by:t}}}=e;return t?`${t.first_name} ${t.last_name}`:""},size:"xl"},{Cell:e=>{let{row:{original:t}}=e;const l=[A?{label:"edit-action",tooltip:(0,n.t)("Edit template"),placement:"bottom",icon:"Edit",onClick:()=>(H(t),void T(!0))}:null,B?{label:"delete-action",tooltip:(0,n.t)("Delete template"),placement:"bottom",icon:"Trash",onClick:()=>U(t)}:null].filter((e=>!!e));return(0,w.tZ)(Z.Z,{actions:l})},Header:(0,n.t)("Actions"),id:"actions",disableSortBy:!0,hidden:!A&&!B,size:"xl"}]),[B,N]),P={name:(0,n.t)("CSS templates")},M=[];N&&M.push({name:(0,w.tZ)(a.Fragment,null,(0,w.tZ)("i",{className:"fa fa-plus"})," ",(0,n.t)("CSS template")),buttonStyle:"primary",onClick:()=>{H(null),T(!0)}}),B&&M.push({name:(0,n.t)("Bulk select"),onClick:$,buttonStyle:"secondary"}),P.buttons=M;const K=(0,a.useMemo)((()=>[{Header:(0,n.t)("Created by"),key:"created_by",id:"created_by",input:"select",operator:_.p.relationOneMany,unfilteredLabel:(0,n.t)("All"),fetchSelects:(0,u.tm)("css_template","created_by",(0,u.v$)((e=>(0,n.t)("An error occurred while fetching dataset datasource values: %s",e))),r),paginate:!0},{Header:(0,n.t)("Search"),key:"search",id:"template_name",input:"search",operator:_.p.contains}]),[]);return(0,w.tZ)(a.Fragment,null,(0,w.tZ)(p.Z,P),(0,w.tZ)(x,{addDangerToast:t,cssTemplate:D,onCssTemplateAdd:()=>v(),onHide:()=>T(!1),show:k}),E&&(0,w.tZ)(h.Z,{description:(0,n.t)("This action will permanently delete the template."),onConfirm:()=>{E&&(e=>{let{id:a,template_name:r}=e;s.Z.delete({endpoint:`/api/v1/css_template/${a}`}).then((()=>{v(),U(null),l((0,n.t)("Deleted: %s",r))}),(0,u.v$)((e=>t((0,n.t)("There was an issue deleting %s: %s",r,e)))))})(E)},onHide:()=>U(null),open:!0,title:(0,n.t)("Delete Template?")}),(0,w.tZ)(b.Z,{title:(0,n.t)("Please confirm"),description:(0,n.t)("Are you sure you want to delete the selected templates?"),onConfirm:e=>{s.Z.delete({endpoint:`/api/v1/css_template/?q=${i().encode(e.map((e=>{let{id:t}=e;return t})))}`}).then((e=>{let{json:t={}}=e;v(),l(t.message)}),(0,u.v$)((e=>t((0,n.t)("There was an issue deleting the selected templates: %s",e)))))}},(e=>{const t=B?[{key:"delete",name:(0,n.t)("Delete"),onSelect:e,type:"danger"}]:[];return(0,w.tZ)(_.Z,{className:"css-templates-list-view",columns:L,count:m,data:y,fetchData:C,filters:K,initialSort:z,loading:o,pageSize:25,bulkActions:t,bulkSelectEnabled:S,disableBulkSelect:$})})))}))}}]);
//# sourceMappingURL=cffb467b416baf7fcea0.chunk.js.map
|
PypiClean
|
/dappy-3.0.3.tar.gz/dappy-3.0.3/docs/usage.rst
|
=====
Usage
=====
To use dappy in a project::
from dappy import API, Endpoint
ItunesAPI = API('itunes.apple.com', [
Endpoint(
'search', '/search',
query_map={ 'search_string': 'term' }, # Map input query params to what the API actually expects
default_query={ 'entity': 'podcast' } # Default query params to send with every request
# we could also pass default_params={} or default_headers={} here
),
Endpoint('lookup', '/lookup')
], scheme='https') # scheme defaults to 'https'
ItunesAPI.search(query={
'search_string': 'Hello, World' # 'search_string" will get mapped to 'term' before we send the request
}) # 'entity=podcast' gets added to the query without us providing it here
ItunesAPI.get(query={ 'id': '656270845' })
Return Values
==========
All dappy functions return either a dict representing JSON or a requests request object depending on how an Endpoint was set up. Currently it defaults to parsing JSON and returning a dict. ::
from dappy import API, Endpoint
from dappy.formatters import default_formatter, json_formatter
ItunesAPI = API('itunes.apple.com', [
Endpoint('search', '/search'),
])
ItunesAPI.search() # Returns requests request object
ItunesAPI = API('itunes.apple.com', [
Endpoint('search', '/search'),
], formatter=json_formatter)
ItunesAPI.search() # Returns dict parsed from JSON
ItunesAPI = API('itunes.apple.com', [
Endpoint('search', '/search', formatter=json_formatter),
Endpoint('lookup', '/lookup'),
json=False)
ItunesAPI.search() # Returns dict parsed from JSON
ItunesAPI.lookup() # Returns requests request object
ItunesAPI = API('itunes.apple.com', [
Endpoint('search', '/search', formatter=default_formatter),
Endpoint('lookup', '/lookup'),
], formatter=json_formatter)
ItunesAPI.search() # Returns requests request object
ItunesAPI.lookup() # Returns dict parsed from JSON
Mocking
=======
Using the requests-mock module, we can mock specific URLs ::
import requests_mock
from dappy import API, Endpoint
ItunesAPI = API('itunes.apple.com', [
Endpoint('search', '/search'),
])
with requests_mock.Mocker() as mock:
mock.get('https://itunes.apple.com/search', json={'results': []})
ItunesAPI.search().json() # returns {'results': []}
GET, HEAD, POST, PUT, DELETE, CONNECT, OPTIONS, PATCH, TRACE
============================================================
Dappy supports all http methods, each http method can accept keyword args 'query', 'params', and 'headers' ::
from dappy import API, Endpoint
ItunesAPI = API('itunes.apple.com', [
Endpoint('search', '/search'),
])
ItunesAPI.search() # sends a GET request
ItunesAPI.search.get(query={}) # also sends a GET request
ItunesAPI.search.head()
ItunesAPI.search.post(params={}, headers={'Authorization': 'Basic ...'})
ItunesAPI.search.put()
ItunesAPI.search.delete()
ItunesAPI.search.connect()
ItunesAPI.search.options()
ItunesAPI.search.patch()
ItunesAPI.search.trace()
...
|
PypiClean
|
/trafilatura-1.6.1.tar.gz/trafilatura-1.6.1/docs/usage-r.rst
|
With R
======
Introduction
------------
`R <https://www.r-project.org/>`_ is a free software environment for statistical computing and graphics. The `reticulate <https://rstudio.github.io/reticulate>`_ package provides a comprehensive set of tools for seamless interoperability between Python and R. It basically allows for execution of Python code inside an R session, so that Python packages can be used with minimal adaptations, which is ideal for those who would rather operate from R than having to go back and forth between languages and environments.
The package provides several ways to integrate Python code into R projects:
- Python in R Markdown
- Importing Python modules
- Sourcing Python scripts
- An interactive Python console within R.
Complete vignette: `Calling Python from R <https://rstudio.github.io/reticulate/articles/calling_python.html>`_.
This tutorial shows how to import a Python scraper straight from R and use the results directly with the usual R syntax: `Web scraping with R: Text and metadata extraction <https://adrien.barbaresi.eu/blog/web-scraping-text-metadata-r.html>`_.
Installation
------------
The reticulate package can be easily installed from CRAN as follows:
.. code-block:: R
> install.packages("reticulate")
A recent version of Python 3 is necessary. Some systems already have such an environment installed, to check it just run the following command in a terminal window:
.. code-block:: bash
$ python3 --version
Python 3.8.6 # version 3.6 or higher is fine
In case Python is not installed, please refer to the excellent `Djangogirls tutorial: Python installation <https://tutorial.djangogirls.org/en/python_installation/>`_.
``Trafilatura`` has to be installed with `pip <installation.html>`_, `conda <https://docs.conda.io/en/latest/>`_, or `py_install <https://rstudio.github.io/reticulate/reference/py_install.html>`_. Skip the installation of Miniconda if it doesn't seem necessary, you should only be prompted once; or see `Installing Python Packages <https://rstudio.github.io/reticulate/articles/python_packages.html>`_.
Here is a simple example using the ``py_install()`` function included in ``reticulate``:
.. code-block:: R
> library(reticulate)
> py_install("trafilatura")
Download and extraction
-----------------------
Text extraction from HTML documents (including downloads) is available in a straightforward way:
.. code-block:: R
# getting started
> install.packages("reticulate")
> library(reticulate)
> trafilatura <- import("trafilatura")
# get a HTML document as string
> url <- "https://example.org/"
> downloaded <- trafilatura$fetch_url(url)
# extraction
> trafilatura$extract(downloaded)
[1] "This domain is for use in illustrative examples in documents. You may use this domain in literature without prior coordination or asking for permission.\nMore information..."
# extraction with arguments
> trafilatura$extract(downloaded, output_format="xml", url=url)
[1] "<doc sitename=\"example.org\" title=\"Example Domain\" source=\"https://example.org/\" hostname=\"example.org\" categories=\"\" tags=\"\" fingerprint=\"lxZaiIwoxp80+AXA2PtCBnJJDok=\">\n <main>\n <div>\n <head>Example Domain</head>\n <p>This domain is for use in illustrative examples in documents. You may use this\ndomain in literature without prior coordination or asking for permission.</p>\n <p>More information...</p>\n </div>\n </main>\n <comments/>\n</doc>"
For a full list of arguments see `extraction documentation <corefunctions.html#extraction>`_.
Already stored documents can also be read directly from R, for example with CSV/TSV output and ``read_delim()``, see information on `data import in R <https://r4ds.had.co.nz/data-import.html>`_.
The ``html2txt`` function extracts all possible text on the webpage, it can be used as follows:
.. code-block:: R
> trafilatura$html2txt(downloaded)
Other functions
---------------
Specific parts of the package can also be imported on demand, which provides access to functions not directly exported by the package. For a list of relevant functions and arguments see `core functions <corefunctions.html>`_.
.. code-block:: R
# using the code for link discovery in sitemaps
> sitemapsfunc <- py_run_string("from trafilatura.sitemaps import sitemap_search")
> sitemapsfunc$sitemap_search("https://www.sitemaps.org/")
[1] "https://www.sitemaps.org"
[2] "https://www.sitemaps.org/protocol.html"
[3] "https://www.sitemaps.org/faq.html"
[4] "https://www.sitemaps.org/terms.html"
# and so on...
# import the metadata part of the package as a function
> metadatafunc <- py_run_string("from trafilatura.metadata import extract_metadata")
> downloaded <- trafilatura$fetch_url("https://github.com/rstudio/reticulate")
> metadatafunc$extract_metadata(downloaded)
$title
[1] "rstudio/reticulate"
$author
[1] "Rstudio"
$url
[1] "https://github.com/rstudio/reticulate"
$hostname
[1] "github.com"
# and so on...
Going further
-------------
- `Basic Text Processing in R <https://programminghistorian.org/en/lessons/basic-text-processing-in-r>`_
- `Quanteda <https://quanteda.io>`_ is an R package for managing and analyzing text:
- `Quickstart <https://quanteda.io/articles/pkgdown/quickstart.html>`_
- `Quanteda tutorials <https://tutorials.quanteda.io/>`_
- `Advancing Text Mining with R and quanteda <https://www.r-bloggers.com/2019/10/advancing-text-mining-with-r-and-quanteda/>`_
|
PypiClean
|
/ehelply_python_sdk-1.1.88-py3-none-any.whl/ehelply_python_sdk/model/participant_user_return.py
|
import re # noqa: F401
import sys # noqa: F401
import typing # noqa: F401
import functools # noqa: F401
from frozendict import frozendict # noqa: F401
import decimal # noqa: F401
from datetime import date, datetime # noqa: F401
from frozendict import frozendict # noqa: F401
from ehelply_python_sdk.schemas import ( # noqa: F401
AnyTypeSchema,
ComposedSchema,
DictSchema,
ListSchema,
StrSchema,
IntSchema,
Int32Schema,
Int64Schema,
Float32Schema,
Float64Schema,
NumberSchema,
UUIDSchema,
DateSchema,
DateTimeSchema,
DecimalSchema,
BoolSchema,
BinarySchema,
NoneSchema,
none_type,
Configuration,
Unset,
unset,
ComposedBase,
ListBase,
DictBase,
NoneBase,
StrBase,
IntBase,
Int32Base,
Int64Base,
Float32Base,
Float64Base,
NumberBase,
UUIDBase,
DateBase,
DateTimeBase,
BoolBase,
BinaryBase,
Schema,
_SchemaValidator,
_SchemaTypeChecker,
_SchemaEnumMaker
)
class ParticipantUserReturn(
DictSchema
):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Contains all fields required when doing a Participant GET but also has user fields (name, location, ect). This is
what is returned from all participant endpoints.
"""
uuid = StrSchema
user_uuid = StrSchema
participant_meta = DictSchema
first_name = StrSchema
last_name = StrSchema
class email(
ComposedSchema
):
@classmethod
@property
@functools.cache
def _composed_schemas(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return {
'allOf': [
UserEmail,
],
'oneOf': [
],
'anyOf': [
],
'not':
None
}
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[Configuration] = None,
**kwargs: typing.Type[Schema],
) -> 'email':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
phone_number = StrSchema
country = StrSchema
gps_location = DictSchema
picture = StrSchema
last_login = DateTimeSchema
verified_legal_terms = BoolSchema
date_created = DateTimeSchema
date_updated = DateTimeSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, ],
uuid: typing.Union[uuid, Unset] = unset,
user_uuid: typing.Union[user_uuid, Unset] = unset,
participant_meta: typing.Union[participant_meta, Unset] = unset,
first_name: typing.Union[first_name, Unset] = unset,
last_name: typing.Union[last_name, Unset] = unset,
email: typing.Union[email, Unset] = unset,
phone_number: typing.Union[phone_number, Unset] = unset,
country: typing.Union[country, Unset] = unset,
gps_location: typing.Union[gps_location, Unset] = unset,
picture: typing.Union[picture, Unset] = unset,
last_login: typing.Union[last_login, Unset] = unset,
verified_legal_terms: typing.Union[verified_legal_terms, Unset] = unset,
date_created: typing.Union[date_created, Unset] = unset,
date_updated: typing.Union[date_updated, Unset] = unset,
_configuration: typing.Optional[Configuration] = None,
**kwargs: typing.Type[Schema],
) -> 'ParticipantUserReturn':
return super().__new__(
cls,
*args,
uuid=uuid,
user_uuid=user_uuid,
participant_meta=participant_meta,
first_name=first_name,
last_name=last_name,
email=email,
phone_number=phone_number,
country=country,
gps_location=gps_location,
picture=picture,
last_login=last_login,
verified_legal_terms=verified_legal_terms,
date_created=date_created,
date_updated=date_updated,
_configuration=_configuration,
**kwargs,
)
from ehelply_python_sdk.model.user_email import UserEmail
|
PypiClean
|
/wildcard.cleanprint-1.0a3.tar.gz/wildcard.cleanprint-1.0a3/README.txt
|
Provides integration with Plone for the Clean Print product
(http://www.formatdynamics.com/diypub/).
To use clean print, you must agree to some terms and generate some
html/javascript:
1. Enable the Clean Print product in the portal_quickinstaller
2. Go to http://www.formatdynamics.com/diypub/
3. Fill out the form and click the 'Generate CleanPrint Tag' button
4. Copy the value of the SRC parameter of the <script> element and
paste it into the Script URL configuration field in the CleanPrint
settings (Site Setup -> CleanPrint Settings) and save the form
5. Then there are 2 options for including buttons for clean print on
your site:
a. Make use of the defined portal_actions
b. Use the generated Button HTML (or some derivation thereof) from
step 2 in a custom template of some sort
|
PypiClean
|
/cppyy-cling-6.28.0.tar.gz/cppyy-cling-6.28.0/src/interpreter/llvm/src/docs/tutorial/MyFirstLanguageFrontend/LangImpl09.rst
|
======================================
Kaleidoscope: Adding Debug Information
======================================
.. contents::
:local:
Chapter 9 Introduction
======================
Welcome to Chapter 9 of the "`Implementing a language with
LLVM <index.html>`_" tutorial. In chapters 1 through 8, we've built a
decent little programming language with functions and variables.
What happens if something goes wrong though, how do you debug your
program?
Source level debugging uses formatted data that helps a debugger
translate from binary and the state of the machine back to the
source that the programmer wrote. In LLVM we generally use a format
called `DWARF <http://dwarfstd.org>`_. DWARF is a compact encoding
that represents types, source locations, and variable locations.
The short summary of this chapter is that we'll go through the
various things you have to add to a programming language to
support debug info, and how you translate that into DWARF.
Caveat: For now we can't debug via the JIT, so we'll need to compile
our program down to something small and standalone. As part of this
we'll make a few modifications to the running of the language and
how programs are compiled. This means that we'll have a source file
with a simple program written in Kaleidoscope rather than the
interactive JIT. It does involve a limitation that we can only
have one "top level" command at a time to reduce the number of
changes necessary.
Here's the sample program we'll be compiling:
.. code-block:: python
def fib(x)
if x < 3 then
1
else
fib(x-1)+fib(x-2);
fib(10)
Why is this a hard problem?
===========================
Debug information is a hard problem for a few different reasons - mostly
centered around optimized code. First, optimization makes keeping source
locations more difficult. In LLVM IR we keep the original source location
for each IR level instruction on the instruction. Optimization passes
should keep the source locations for newly created instructions, but merged
instructions only get to keep a single location - this can cause jumping
around when stepping through optimized programs. Secondly, optimization
can move variables in ways that are either optimized out, shared in memory
with other variables, or difficult to track. For the purposes of this
tutorial we're going to avoid optimization (as you'll see with one of the
next sets of patches).
Ahead-of-Time Compilation Mode
==============================
To highlight only the aspects of adding debug information to a source
language without needing to worry about the complexities of JIT debugging
we're going to make a few changes to Kaleidoscope to support compiling
the IR emitted by the front end into a simple standalone program that
you can execute, debug, and see results.
First we make our anonymous function that contains our top level
statement be our "main":
.. code-block:: udiff
- auto Proto = std::make_unique<PrototypeAST>("", std::vector<std::string>());
+ auto Proto = std::make_unique<PrototypeAST>("main", std::vector<std::string>());
just with the simple change of giving it a name.
Then we're going to remove the command line code wherever it exists:
.. code-block:: udiff
@@ -1129,7 +1129,6 @@ static void HandleTopLevelExpression() {
/// top ::= definition | external | expression | ';'
static void MainLoop() {
while (1) {
- fprintf(stderr, "ready> ");
switch (CurTok) {
case tok_eof:
return;
@@ -1184,7 +1183,6 @@ int main() {
BinopPrecedence['*'] = 40; // highest.
// Prime the first token.
- fprintf(stderr, "ready> ");
getNextToken();
Lastly we're going to disable all of the optimization passes and the JIT so
that the only thing that happens after we're done parsing and generating
code is that the LLVM IR goes to standard error:
.. code-block:: udiff
@@ -1108,17 +1108,8 @@ static void HandleExtern() {
static void HandleTopLevelExpression() {
// Evaluate a top-level expression into an anonymous function.
if (auto FnAST = ParseTopLevelExpr()) {
- if (auto *FnIR = FnAST->codegen()) {
- // We're just doing this to make sure it executes.
- TheExecutionEngine->finalizeObject();
- // JIT the function, returning a function pointer.
- void *FPtr = TheExecutionEngine->getPointerToFunction(FnIR);
-
- // Cast it to the right type (takes no arguments, returns a double) so we
- // can call it as a native function.
- double (*FP)() = (double (*)())(intptr_t)FPtr;
- // Ignore the return value for this.
- (void)FP;
+ if (!F->codegen()) {
+ fprintf(stderr, "Error generating code for top level expr");
}
} else {
// Skip token for error recovery.
@@ -1439,11 +1459,11 @@ int main() {
// target lays out data structures.
TheModule->setDataLayout(TheExecutionEngine->getDataLayout());
OurFPM.add(new DataLayoutPass());
+#if 0
OurFPM.add(createBasicAliasAnalysisPass());
// Promote allocas to registers.
OurFPM.add(createPromoteMemoryToRegisterPass());
@@ -1218,7 +1210,7 @@ int main() {
OurFPM.add(createGVNPass());
// Simplify the control flow graph (deleting unreachable blocks, etc).
OurFPM.add(createCFGSimplificationPass());
-
+ #endif
OurFPM.doInitialization();
// Set the global so the code gen can use this.
This relatively small set of changes get us to the point that we can compile
our piece of Kaleidoscope language down to an executable program via this
command line:
.. code-block:: bash
Kaleidoscope-Ch9 < fib.ks | & clang -x ir -
which gives an a.out/a.exe in the current working directory.
Compile Unit
============
The top level container for a section of code in DWARF is a compile unit.
This contains the type and function data for an individual translation unit
(read: one file of source code). So the first thing we need to do is
construct one for our fib.ks file.
DWARF Emission Setup
====================
Similar to the ``IRBuilder`` class we have a
`DIBuilder <https://llvm.org/doxygen/classllvm_1_1DIBuilder.html>`_ class
that helps in constructing debug metadata for an LLVM IR file. It
corresponds 1:1 similarly to ``IRBuilder`` and LLVM IR, but with nicer names.
Using it does require that you be more familiar with DWARF terminology than
you needed to be with ``IRBuilder`` and ``Instruction`` names, but if you
read through the general documentation on the
`Metadata Format <https://llvm.org/docs/SourceLevelDebugging.html>`_ it
should be a little more clear. We'll be using this class to construct all
of our IR level descriptions. Construction for it takes a module so we
need to construct it shortly after we construct our module. We've left it
as a global static variable to make it a bit easier to use.
Next we're going to create a small container to cache some of our frequent
data. The first will be our compile unit, but we'll also write a bit of
code for our one type since we won't have to worry about multiple typed
expressions:
.. code-block:: c++
static DIBuilder *DBuilder;
struct DebugInfo {
DICompileUnit *TheCU;
DIType *DblTy;
DIType *getDoubleTy();
} KSDbgInfo;
DIType *DebugInfo::getDoubleTy() {
if (DblTy)
return DblTy;
DblTy = DBuilder->createBasicType("double", 64, dwarf::DW_ATE_float);
return DblTy;
}
And then later on in ``main`` when we're constructing our module:
.. code-block:: c++
DBuilder = new DIBuilder(*TheModule);
KSDbgInfo.TheCU = DBuilder->createCompileUnit(
dwarf::DW_LANG_C, DBuilder->createFile("fib.ks", "."),
"Kaleidoscope Compiler", 0, "", 0);
There are a couple of things to note here. First, while we're producing a
compile unit for a language called Kaleidoscope we used the language
constant for C. This is because a debugger wouldn't necessarily understand
the calling conventions or default ABI for a language it doesn't recognize
and we follow the C ABI in our LLVM code generation so it's the closest
thing to accurate. This ensures we can actually call functions from the
debugger and have them execute. Secondly, you'll see the "fib.ks" in the
call to ``createCompileUnit``. This is a default hard coded value since
we're using shell redirection to put our source into the Kaleidoscope
compiler. In a usual front end you'd have an input file name and it would
go there.
One last thing as part of emitting debug information via DIBuilder is that
we need to "finalize" the debug information. The reasons are part of the
underlying API for DIBuilder, but make sure you do this near the end of
main:
.. code-block:: c++
DBuilder->finalize();
before you dump out the module.
Functions
=========
Now that we have our ``Compile Unit`` and our source locations, we can add
function definitions to the debug info. So in ``PrototypeAST::codegen()`` we
add a few lines of code to describe a context for our subprogram, in this
case the "File", and the actual definition of the function itself.
So the context:
.. code-block:: c++
DIFile *Unit = DBuilder->createFile(KSDbgInfo.TheCU.getFilename(),
KSDbgInfo.TheCU.getDirectory());
giving us an DIFile and asking the ``Compile Unit`` we created above for the
directory and filename where we are currently. Then, for now, we use some
source locations of 0 (since our AST doesn't currently have source location
information) and construct our function definition:
.. code-block:: c++
DIScope *FContext = Unit;
unsigned LineNo = 0;
unsigned ScopeLine = 0;
DISubprogram *SP = DBuilder->createFunction(
FContext, P.getName(), StringRef(), Unit, LineNo,
CreateFunctionType(TheFunction->arg_size(), Unit),
false /* internal linkage */, true /* definition */, ScopeLine,
DINode::FlagPrototyped, false);
TheFunction->setSubprogram(SP);
and we now have an DISubprogram that contains a reference to all of our
metadata for the function.
Source Locations
================
The most important thing for debug information is accurate source location -
this makes it possible to map your source code back. We have a problem though,
Kaleidoscope really doesn't have any source location information in the lexer
or parser so we'll need to add it.
.. code-block:: c++
struct SourceLocation {
int Line;
int Col;
};
static SourceLocation CurLoc;
static SourceLocation LexLoc = {1, 0};
static int advance() {
int LastChar = getchar();
if (LastChar == '\n' || LastChar == '\r') {
LexLoc.Line++;
LexLoc.Col = 0;
} else
LexLoc.Col++;
return LastChar;
}
In this set of code we've added some functionality on how to keep track of the
line and column of the "source file". As we lex every token we set our current
current "lexical location" to the assorted line and column for the beginning
of the token. We do this by overriding all of the previous calls to
``getchar()`` with our new ``advance()`` that keeps track of the information
and then we have added to all of our AST classes a source location:
.. code-block:: c++
class ExprAST {
SourceLocation Loc;
public:
ExprAST(SourceLocation Loc = CurLoc) : Loc(Loc) {}
virtual ~ExprAST() {}
virtual Value* codegen() = 0;
int getLine() const { return Loc.Line; }
int getCol() const { return Loc.Col; }
virtual raw_ostream &dump(raw_ostream &out, int ind) {
return out << ':' << getLine() << ':' << getCol() << '\n';
}
that we pass down through when we create a new expression:
.. code-block:: c++
LHS = std::make_unique<BinaryExprAST>(BinLoc, BinOp, std::move(LHS),
std::move(RHS));
giving us locations for each of our expressions and variables.
To make sure that every instruction gets proper source location information,
we have to tell ``Builder`` whenever we're at a new source location.
We use a small helper function for this:
.. code-block:: c++
void DebugInfo::emitLocation(ExprAST *AST) {
DIScope *Scope;
if (LexicalBlocks.empty())
Scope = TheCU;
else
Scope = LexicalBlocks.back();
Builder.SetCurrentDebugLocation(
DILocation::get(Scope->getContext(), AST->getLine(), AST->getCol(), Scope));
}
This both tells the main ``IRBuilder`` where we are, but also what scope
we're in. The scope can either be on compile-unit level or be the nearest
enclosing lexical block like the current function.
To represent this we create a stack of scopes:
.. code-block:: c++
std::vector<DIScope *> LexicalBlocks;
and push the scope (function) to the top of the stack when we start
generating the code for each function:
.. code-block:: c++
KSDbgInfo.LexicalBlocks.push_back(SP);
Also, we may not forget to pop the scope back off of the scope stack at the
end of the code generation for the function:
.. code-block:: c++
// Pop off the lexical block for the function since we added it
// unconditionally.
KSDbgInfo.LexicalBlocks.pop_back();
Then we make sure to emit the location every time we start to generate code
for a new AST object:
.. code-block:: c++
KSDbgInfo.emitLocation(this);
Variables
=========
Now that we have functions, we need to be able to print out the variables
we have in scope. Let's get our function arguments set up so we can get
decent backtraces and see how our functions are being called. It isn't
a lot of code, and we generally handle it when we're creating the
argument allocas in ``FunctionAST::codegen``.
.. code-block:: c++
// Record the function arguments in the NamedValues map.
NamedValues.clear();
unsigned ArgIdx = 0;
for (auto &Arg : TheFunction->args()) {
// Create an alloca for this variable.
AllocaInst *Alloca = CreateEntryBlockAlloca(TheFunction, Arg.getName());
// Create a debug descriptor for the variable.
DILocalVariable *D = DBuilder->createParameterVariable(
SP, Arg.getName(), ++ArgIdx, Unit, LineNo, KSDbgInfo.getDoubleTy(),
true);
DBuilder->insertDeclare(Alloca, D, DBuilder->createExpression(),
DILocation::get(SP->getContext(), LineNo, 0, SP),
Builder.GetInsertBlock());
// Store the initial value into the alloca.
Builder.CreateStore(&Arg, Alloca);
// Add arguments to variable symbol table.
NamedValues[Arg.getName()] = Alloca;
}
Here we're first creating the variable, giving it the scope (``SP``),
the name, source location, type, and since it's an argument, the argument
index. Next, we create an ``lvm.dbg.declare`` call to indicate at the IR
level that we've got a variable in an alloca (and it gives a starting
location for the variable), and setting a source location for the
beginning of the scope on the declare.
One interesting thing to note at this point is that various debuggers have
assumptions based on how code and debug information was generated for them
in the past. In this case we need to do a little bit of a hack to avoid
generating line information for the function prologue so that the debugger
knows to skip over those instructions when setting a breakpoint. So in
``FunctionAST::CodeGen`` we add some more lines:
.. code-block:: c++
// Unset the location for the prologue emission (leading instructions with no
// location in a function are considered part of the prologue and the debugger
// will run past them when breaking on a function)
KSDbgInfo.emitLocation(nullptr);
and then emit a new location when we actually start generating code for the
body of the function:
.. code-block:: c++
KSDbgInfo.emitLocation(Body.get());
With this we have enough debug information to set breakpoints in functions,
print out argument variables, and call functions. Not too bad for just a
few simple lines of code!
Full Code Listing
=================
Here is the complete code listing for our running example, enhanced with
debug information. To build this example, use:
.. code-block:: bash
# Compile
clang++ -g toy.cpp `llvm-config --cxxflags --ldflags --system-libs --libs core orcjit native` -O3 -o toy
# Run
./toy
Here is the code:
.. literalinclude:: ../../../examples/Kaleidoscope/Chapter9/toy.cpp
:language: c++
`Next: Conclusion and other useful LLVM tidbits <LangImpl10.html>`_
|
PypiClean
|
/effect-edc-0.1.37.tar.gz/effect-edc-0.1.37/effect_ae/list_data.py
|
from edc_constants.constants import (
DEAD,
MALIGNANCY,
NOT_APPLICABLE,
OTHER,
OTHER_PLEASE_SPECIFY_TEXT,
TUBERCULOSIS,
UNKNOWN,
)
from edc_constants.disease_constants import (
ANAEMIA,
BACTERAEMIA_SEPSIS,
BACTERIAL_PNEUMONIA,
CM_RELAPSE_IRIS,
COVID_19,
CRYPTOCOCCAL_MENINGITIS,
NEUTROPAENIA,
PNEUMONIA,
RENAL_IMPAIRMENT,
TB_PULMONARY,
THROMBOCYTOPENIA,
)
list_data = {
"edc_adverse_event.aeclassification": [
("cm_possible", "Possible cryptococcal meningitis"),
("cm_confirmed", "Confirmed cryptococcal meningitis"),
(ANAEMIA, "Anaemia"),
(NEUTROPAENIA, "Neutropaenia"),
(THROMBOCYTOPENIA, "Thrombocytopenia"),
(RENAL_IMPAIRMENT, "Renal impairment"),
(BACTERAEMIA_SEPSIS, "Bacteraemia/sepsis"),
(TUBERCULOSIS, "TB"),
(PNEUMONIA, "Pneumonia"),
("gastroenteritis", "Gastroenteritis"),
("liver_injury_drug", "Drug-induced liver injury"),
(OTHER, "Other"),
(NOT_APPLICABLE, "Not applicable"),
],
"edc_adverse_event.saereason": [
(NOT_APPLICABLE, "Not applicable"),
(DEAD, "Death"),
("life_threatening", "Life-threatening"),
("significant_disability", "Significant disability"),
("in-patient_hospitalization", "In-patient hospitalization or prolongation"),
(
"medically_important_event",
"Medically important event (e.g. Bacteraemia, "
"recurrence of symptoms not requiring admission)",
),
],
"edc_adverse_event.causeofdeath": [
(CRYPTOCOCCAL_MENINGITIS, "Cryptococcal meningitis"),
(BACTERAEMIA_SEPSIS, "Bacteraemia/sepsis"),
(BACTERIAL_PNEUMONIA, "Bacterial pneumonia"),
(COVID_19, "COVID-19"),
(CM_RELAPSE_IRIS, "Cryptococcal meningitis relapse/IRIS"),
("iris_non_cm", "IRIS non-CM"),
(TB_PULMONARY, "TB - Pulmonary"),
("tb_meningitis", "TB - Meningitis"),
("tb_disseminated", "TB - Disseminated"),
("art_toxicity", "ART toxicity"),
(MALIGNANCY, "Malignancy"),
("diarrhea_wasting", "Diarrhea/wasting"),
("pcp", "PCP"),
("toxoplasmosis", "Toxoplasmosis"),
(UNKNOWN, "Unknown"),
(OTHER, OTHER_PLEASE_SPECIFY_TEXT),
],
}
|
PypiClean
|
/datapipe_core-0.13.0b4-py3-none-any.whl/datapipe/migrations/v013.py
|
from rich import print as rprint
from sqlalchemy import insert, literal
from sqlalchemy.sql import and_, func, select
from datapipe.step.batch_transform import BaseBatchTransformStep
def migrate_transform_tables(app, steps):
for batch_transform in steps:
if not isinstance(batch_transform, BaseBatchTransformStep):
continue
rprint(f"Migrate '{batch_transform.get_name()}': ")
size = batch_transform.meta_table.get_metadata_size()
if size > 0:
print(f"Skipping -- size of metadata is greater 0: {size=}")
continue
output_tbls = [
output_dt.meta_table.sql_table for output_dt in batch_transform.output_dts
]
def make_ids_cte():
ids_cte = (
select(
*[
func.coalesce(*[tbl.c[k] for tbl in output_tbls]).label(k)
for k in batch_transform.transform_keys
],
)
.distinct()
.select_from(output_tbls[0])
.where(and_(*[tbl.c.delete_ts.is_(None) for tbl in output_tbls]))
)
prev_tbl = output_tbls[0]
for tbl in output_tbls[1:]:
ids_cte = ids_cte.outerjoin(
tbl,
and_(
*[
prev_tbl.c[k] == tbl.c[k]
for k in batch_transform.transform_keys
]
),
full=True,
)
return ids_cte.cte(name="ids")
ids_cte = make_ids_cte()
sql = (
select(
*[ids_cte.c[k] for k in batch_transform.transform_keys],
func.max(
app.ds.meta_dbconn.func_greatest(
*[tbl.c["process_ts"] for tbl in output_tbls]
)
).label("process_ts"),
)
.select_from(ids_cte)
.where(and_(*[tbl.c.delete_ts.is_(None) for tbl in output_tbls]))
)
for tbl in output_tbls:
sql = sql.join(
tbl,
and_(
*[ids_cte.c[k] == tbl.c[k] for k in batch_transform.transform_keys]
),
isouter=True,
)
sql = sql.group_by(*[ids_cte.c[k] for k in batch_transform.transform_keys])
insert_stmt = insert(batch_transform.meta_table.sql_table).from_select(
batch_transform.transform_keys
+ ["process_ts", "is_success", "error", "priority"],
select(
*[sql.c[k] for k in batch_transform.transform_keys],
sql.c["process_ts"],
literal(True),
literal(None),
literal(0),
),
)
app.ds.meta_dbconn.con.execute(insert_stmt)
rprint(" [green] ok[/green]")
|
PypiClean
|
/nanolp-1.0g.zip/nanolp-1.0g/bin/nlp.py
|
# Main module (to run from command line)
# Author: Balkansoft.BlogSpot.com
# GNU GPL licensed
import getopt
import sys
import os
from nanolp import lp
################################################################################
class AppError(Exception): pass
class App:
input_file = None # input file name
cfgfile = '' # path to config. file
tb = False # show traceback on exceptions
refs = False # flush references file
def print_usage(self):
def parser_info(cls):
ext = '%s' % ', '.join(cls.ext)
return ' %s - %s: %s'%(cls.__name__, cls.descr or 'Unknown', ext)
formats = [parser_info(p) for p in lp.Parser.parsers]
formats = '\n'.join(formats)
if self.cfgfile:
cfginfo = "Setup from: '%s'" % self.cfgfile
else:
cfginfo = ''
USAGE = '''\
Syntax: -i FILE [-x] [-r] [-h]
-i FILE Input file
-x Detailed stack-trace on errors
-r Flush references file
-h This help
Supported formats:
%s
%s
'''%(formats, cfginfo)
sys.stdout.write(USAGE)
def parse_cmdline(self):
"""Returns True, if app may continue execution, False otherwise
(help printing is only need)
"""
try:
opts, args = getopt.getopt(sys.argv[1:], 'rxhi:', [])
except getopt.GetoptError as x:
sys.stderr.write('Syntax error! See help (-h)\n')
sys.exit(1)
self.input_file = None
for o, v in opts:
if o == '-h':
return False
elif o == '-i':
self.input_file = v
elif o == '-x':
self.tb = True
elif o == '-r':
self.refs = True
if not self.input_file:
sys.stderr.write('Input file is mandatory. See help (-h)\n')
sys.exit(1)
return True
# XXX first call parse_cmdline() to determine input dir (as possible place
# of cfg. file)
def findcfgfile(self):
"""Return path to cfg file or raise exception, if not found. Priority
of search:
- folder of input file
- current working directory
- script directory
"""
dirs = [os.getcwd(), os.path.dirname(os.path.realpath(__file__))]
if self.input_file:
# input dir has higher priority for search of cfgfile
absp = os.path.abspath(self.input_file)
dirs.insert(0, os.path.dirname(absp))
for indir in dirs:
cfgfile = os.path.join(indir, 'lprc')
if os.path.exists(cfgfile):
return cfgfile
raise AppError('Can not found configuration file')
def main(self):
sys.stderr.write(lp.__ABOUT__+'\n')
onlyhelp = not self.parse_cmdline()
def _do():
"""real action"""
self.cfgfile = self.findcfgfile()
if onlyhelp:
self.print_usage()
sys.exit(0)
lp.Lp(cfgfile=self.cfgfile)
parserclass = lp.Parser.fileparser(self.input_file)
parser = parserclass.parsefile(self.input_file)
if self.refs:
# if need to flush references, do it
fn = os.path.split(self.input_file)[1]
fn = fn.upper()
refsfile = lp.RefsFile(parser, "%s: references"%fn)
refsfile.save()
if self.tb:
_do()
else:
try:
_do()
except Exception as x:
sys.stderr.write("ERROR '%s': %s\n"%(x.__class__.__name__,str(x)))
################################################################################
if __name__ == "__main__":
app = App()
app.main()
|
PypiClean
|
/infoworkssdk-3.2.tar.gz/infoworkssdk-3.2/infoworks/sdk/cicd/upload_configurations/cdata_source.py
|
import copy
import json
import requests
import yaml
import configparser
from infoworks.sdk.url_builder import get_source_details_url
from infoworks.sdk.utils import IWUtils
from infoworks.sdk.cicd.upload_configurations.update_configurations import InfoworksDynamicAccessNestedDict
from infoworks.sdk.cicd.upload_configurations.local_configurations import PRE_DEFINED_MAPPINGS
class CdataSource:
def __init__(self):
self.configuration_obj = None
self.source_config_path = None
self.environment_id = None
self.storage_id = None
self.secrets = None
def set_variables(self, environment_id, storage_id, source_config_path, secrets=None, replace_words=""):
self.storage_id = storage_id
self.environment_id = environment_id
self.source_config_path = source_config_path
with open(self.source_config_path, 'r') as file:
json_string = file.read()
if replace_words != "":
for key, value in [item.split("->") for item in replace_words.split(";")]:
json_string = json_string.replace(key, value)
self.configuration_obj = IWUtils.ejson_deserialize(json_string)
self.secrets = secrets
def update_mappings_for_configurations(self, mappings):
config = configparser.ConfigParser()
config.read_dict(mappings)
d = InfoworksDynamicAccessNestedDict(self.configuration_obj)
for section in config.sections():
if section in PRE_DEFINED_MAPPINGS:
continue
print("section:", section)
try:
final = d.setval(section.split("$"), dict(config.items(section)))
except KeyError as e:
pass
self.configuration_obj = d.data
if "configuration$source_configs$data_lake_schema" in config.sections():
self.update_table_schema_and_database("target_schema",dict(config.items("configuration$source_configs$data_lake_schema")))
if "configuration$source_configs$staging_schema_name" in config.sections():
self.update_table_schema_and_database("stage_schema",dict(config.items("configuration$source_configs$staging_schema_name")))
if "configuration$source_configs$target_database_name" in config.sections():
self.update_table_schema_and_database("database",dict(config.items("configuration$source_configs$target_database_name")))
def create_cdata_source(self, src_client_obj):
data = self.configuration_obj["configuration"]["source_configs"]
create_cdata_source_payload = data.copy()
create_cdata_source_payload["data_lake_schema"]= data["data_lake_schema"] if "data_lake_schema" in data else ""
create_cdata_source_payload["environment_id"] = self.environment_id
create_cdata_source_payload["storage_id"] = self.storage_id
create_cdata_source_payload["is_source_ingested"]= True
src_create_response = src_client_obj.create_source(source_config=create_cdata_source_payload)
if src_create_response["result"]["status"].upper() == "SUCCESS":
source_id_created = src_create_response["result"]["source_id"]
return source_id_created
else:
src_client_obj.logger.info('Cant create source {} '.format(data["name"]))
src_client_obj.logger.info(f"Getting the existing SourceId with name {data['name']} if exists")
filter_condition = IWUtils.ejson_serialize({"name": data['name']})
source_detail_url = get_source_details_url(
src_client_obj.client_config) + f"?filter={{filter_condition}}".format(
filter_condition=filter_condition)
response = requests.get(source_detail_url,
headers={'Authorization': 'Bearer ' + src_client_obj.client_config['bearer_token'],
'Content-Type': 'application/json'}, verify=False)
if response.status_code == "406":
headers = src_client_obj.regenerate_bearer_token_if_needed(
{'Authorization': 'Bearer ' + src_client_obj.client_config['bearer_token'],
'Content-Type': 'application/json'})
response = requests.get(source_detail_url, headers=headers, verify=False)
response = IWUtils.ejson_deserialize(response.content)
if not response.get('result', None):
src_client_obj.logger.error("Failed to make an api call to get source details")
src_client_obj.logger.info(response)
else:
src_client_obj.logger.info(
f"Source Id with the same Source name {data['name']} : {response['result'][0]['id']}")
return response['result'][0]['id']
def configure_cdata_source_connection(self, src_client_obj, source_id, override_config_file=None,
read_passwords_from_secrets=False, env_tag="", secret_type=""):
source_configs = self.configuration_obj["configuration"]["source_configs"]
src_name = str(source_configs["name"])
connection_object = source_configs["connection"]
connection_object["connection_mode"]="jdbc"
if override_config_file is not None:
with open(override_config_file) as file:
information = yaml.load(file, Loader=yaml.FullLoader)
if information["source_details"].get(src_name, None) is not None:
override_keys = information["source_details"].get(src_name).keys()
for key in override_keys:
connection_object[key] = information["source_details"][src_name][key]
response = src_client_obj.configure_source_connection(source_id, connection_object=connection_object)
if response["result"]["status"].upper() != "SUCCESS":
src_client_obj.logger.info(f"Failed to configure the source {source_id} connection")
src_client_obj.logger.info(response["result"])
return "FAILED"
else:
src_client_obj.logger.info(response["result"])
return "SUCCESS"
def test_source_connection(self, src_client_obj, source_id):
response = src_client_obj.source_test_connection_job_poll(source_id, poll_timeout=300,
polling_frequency=15, retries=1)
return response["result"]["status"].upper()
def browse_source_tables(self, src_client_obj, source_id):
filter_tables_properties = self.configuration_obj["filter_tables_properties"]
response = src_client_obj.browse_source_tables(source_id, filter_tables_properties=filter_tables_properties,
poll_timeout=300, polling_frequency=15, retries=1)
return response["result"]["status"].upper()
def add_tables_to_source(self, src_client_obj, source_id):
tables_already_added_in_source = src_client_obj.list_tables_in_source(source_id)["result"]["response"]
tables_list = []
tables = self.configuration_obj["configuration"]["table_configs"]
if len(tables_already_added_in_source) > 0:
for table in tables:
if table["configuration"]["schema_name_at_source"] + "." + table["configuration"][
"name"] not in tables_already_added_in_source:
temp = {"table_name": table["configuration"]["name"],
"schema_name": table["configuration"]["schema_name_at_source"],
"table_type": table["entity_type"].upper(),
"target_table_name": table["configuration"]["configuration"]["target_table_name"],
"target_schema_name": table["configuration"]["configuration"]["target_schema_name"]}
if table["configuration"].get("catalog_name","")!="":
temp["catalog_name"]=table["configuration"]["catalog_name"]
tables_list.append(copy.deepcopy(temp))
src_client_obj.logger.info(
f"Adding table {temp['table_name']} to source {source_id} config payload")
else:
for table in tables:
temp = {"table_name": table["configuration"]["name"],
"schema_name": table["configuration"]["schema_name_at_source"],
"table_type": table["entity_type"].upper(),
"target_table_name": table["configuration"]["configuration"]["target_table_name"],
"target_schema_name": table["configuration"]["configuration"]["target_schema_name"]}
if table["configuration"].get("catalog_name", "") != "":
temp["catalog_name"] = table["configuration"]["catalog_name"]
tables_list.append(copy.deepcopy(temp))
src_client_obj.logger.info(f"Adding table {temp['table_name']} to source {source_id} config payload")
response = src_client_obj.add_tables_to_source(source_id, tables_list)
print(tables_list)
return response["result"]["status"].upper()
def configure_tables_and_tablegroups(self, src_client_obj, source_id, export_configuration_file=None,
export_config_lookup=True, mappings=None, read_passwords_from_secrets=False,
env_tag="", secret_type=""):
if mappings is None:
mappings = {}
iw_mappings = self.configuration_obj["configuration"]["iw_mappings"]
table_group_compute_mappings = mappings.get("table_group_compute_mappings", {})
source_configs = self.configuration_obj["configuration"]["source_configs"]
src_name = str(source_configs["name"])
for item in iw_mappings:
if item.get("entity_type", "") == "environment_compute_template":
item["recommendation"]["compute_name"] = table_group_compute_mappings.get(
item["recommendation"]["compute_name"], item["recommendation"]["compute_name"])
self.configuration_obj["configuration"]["iw_mappings"] = iw_mappings
# Update the service account json file mappings if any
if export_config_lookup and (export_configuration_file is not None or read_passwords_from_secrets):
for table in self.configuration_obj["configuration"]["table_configs"]:
# Check if there are any export configurations and passwords to replace
if table.get("configuration", {}).get("export_configuration", None) is not None:
export_configs = table.get("configuration", {}).get("export_configuration")
target_type = export_configs.get("target_type", "").upper()
table_name = table["configuration"]["name"].upper()
override_keys = []
if export_configuration_file is not None:
with open(export_configuration_file) as file:
information = yaml.load(file, Loader=yaml.FullLoader)
if information["src_export_details"].get(src_name + "_" + table_name, None) is not None:
info_key = src_name + "_" + table_name
override_keys = information["src_export_details"].get(src_name + "_" + table_name,
{}).keys()
else:
info_key = src_name
override_keys = information["src_export_details"].get(src_name, {}).keys()
for key in override_keys:
table["configuration"]["export_configuration"]["connection"][key] = \
information["src_export_details"][info_key][key]
if target_type.upper() in ["SNOWFLAKE", "POSTGRES"]:
pass
elif target_type.upper() == "BIGQUERY":
if "server_path" not in override_keys:
server_path = table["configuration"]["export_configuration"].get("connection", {}).get(
"server_path", "")
if "gcp_details" in mappings:
server_path = mappings["gcp_details"].get("service_json_path")
if "service_json_mappings" in mappings:
server_path = mappings["service_json_mappings"].get(
server_path.split("/")[-1],
server_path)
table["configuration"]["export_configuration"]["connection"][
"server_path"] = server_path if server_path != "" else table["configuration"][
"export_configuration"].get("connection", {}).get(
"server_path", "")
table["configuration"]["export_configuration"]["connection"][
"upload_option"] = "serverLocation"
response = src_client_obj.configure_tables_and_tablegroups(source_id, configuration_obj=self.configuration_obj[
"configuration"])
if response["result"]["status"].upper() != "SUCCESS":
src_client_obj.logger.error("Failed to import the source {} (source config path : {})"
.format(source_id, self.source_config_path))
src_client_obj.logger.error(response.get("message", "") + "(source config path : {})"
.format(self.source_config_path))
return "FAILED"
else:
src_client_obj.logger.info(f"Successfully imported source configurations to {source_id}")
return "SUCCESS"
|
PypiClean
|
/tensorflow_ascend-1.15.0-cp37-cp37m-manylinux2014_aarch64.whl/tensorflow_core/contrib/input_pipeline/python/ops/input_pipeline_ops.py
|
"""Python wrapper for input_pipeline_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
from tensorflow.contrib.input_pipeline.ops import gen_input_pipeline_ops
from tensorflow.contrib.util import loader
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import resource_loader
_input_pipeline_ops = loader.load_op_library(
resource_loader.get_path_to_datafile("_input_pipeline_ops.so"))
def obtain_next(string_list_tensor, counter):
"""Basic wrapper for the ObtainNextOp.
Args:
string_list_tensor: A tensor that is a list of strings
counter: an int64 ref tensor to keep track of which element is returned.
Returns:
An op that produces the element at counter + 1 in the list, round
robin style.
"""
return gen_input_pipeline_ops.obtain_next(string_list_tensor, counter)
def _maybe_randomize_list(string_list, shuffle):
if shuffle:
random.shuffle(string_list)
return string_list
def _create_list(string_list, shuffle, seed, num_epochs):
if shuffle and seed:
random.seed(seed)
expanded_list = _maybe_randomize_list(string_list, shuffle)[:]
if num_epochs:
for _ in range(num_epochs - 1):
expanded_list.extend(_maybe_randomize_list(string_list, shuffle))
return expanded_list
def seek_next(string_list, shuffle=False, seed=None, num_epochs=None):
"""Returns an op that seeks the next element in a list of strings.
Seeking happens in a round robin fashion. This op creates a variable called
obtain_next_counter that is initialized to -1 and is used to keep track of
which element in the list was returned, and a variable
obtain_next_expanded_list to hold the list. If num_epochs is not None, then we
limit the number of times we go around the string_list before OutOfRangeError
is thrown. It creates a variable to keep track of this.
Args:
string_list: A list of strings.
shuffle: If true, we shuffle the string_list differently for each epoch.
seed: Seed used for shuffling.
num_epochs: Returns OutOfRangeError once string_list has been repeated
num_epoch times. If unspecified then keeps on looping.
Returns:
An op that produces the next element in the provided list.
"""
expanded_list = _create_list(string_list, shuffle, seed, num_epochs)
with variable_scope.variable_scope("obtain_next"):
counter = variable_scope.get_variable(
name="obtain_next_counter",
initializer=constant_op.constant(
-1, dtype=dtypes.int64),
dtype=dtypes.int64,
trainable=False)
with ops.colocate_with(counter):
string_tensor = variable_scope.get_variable(
name="obtain_next_expanded_list",
initializer=constant_op.constant(expanded_list),
dtype=dtypes.string,
trainable=False)
if num_epochs:
filename_counter = variable_scope.get_variable(
name="obtain_next_filename_counter",
initializer=constant_op.constant(
0, dtype=dtypes.int64),
dtype=dtypes.int64,
trainable=False)
c = filename_counter.count_up_to(len(expanded_list))
with ops.control_dependencies([c]):
return obtain_next(string_tensor, counter)
else:
return obtain_next(string_tensor, counter)
|
PypiClean
|
/pulumi_azure_native-2.5.1a1693590910.tar.gz/pulumi_azure_native-2.5.1a1693590910/pulumi_azure_native/storagecache/v20210301/outputs.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
__all__ = [
'BlobNfsTargetResponse',
'CacheActiveDirectorySettingsResponse',
'CacheActiveDirectorySettingsResponseCredentials',
'CacheDirectorySettingsResponse',
'CacheEncryptionSettingsResponse',
'CacheHealthResponse',
'CacheIdentityResponse',
'CacheNetworkSettingsResponse',
'CacheResponseSku',
'CacheSecuritySettingsResponse',
'CacheUpgradeStatusResponse',
'CacheUsernameDownloadSettingsResponse',
'CacheUsernameDownloadSettingsResponseCredentials',
'ClfsTargetResponse',
'ConditionResponse',
'KeyVaultKeyReferenceResponse',
'KeyVaultKeyReferenceResponseSourceVault',
'NamespaceJunctionResponse',
'Nfs3TargetResponse',
'NfsAccessPolicyResponse',
'NfsAccessRuleResponse',
'SystemDataResponse',
'UnknownTargetResponse',
]
@pulumi.output_type
class BlobNfsTargetResponse(dict):
"""
Properties pertaining to the BlobNfsTarget.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "usageModel":
suggest = "usage_model"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in BlobNfsTargetResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
BlobNfsTargetResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
BlobNfsTargetResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
target: Optional[str] = None,
usage_model: Optional[str] = None):
"""
Properties pertaining to the BlobNfsTarget.
:param str target: Resource ID of the storage container.
:param str usage_model: Identifies the StorageCache usage model to be used for this storage target.
"""
if target is not None:
pulumi.set(__self__, "target", target)
if usage_model is not None:
pulumi.set(__self__, "usage_model", usage_model)
@property
@pulumi.getter
def target(self) -> Optional[str]:
"""
Resource ID of the storage container.
"""
return pulumi.get(self, "target")
@property
@pulumi.getter(name="usageModel")
def usage_model(self) -> Optional[str]:
"""
Identifies the StorageCache usage model to be used for this storage target.
"""
return pulumi.get(self, "usage_model")
@pulumi.output_type
class CacheActiveDirectorySettingsResponse(dict):
"""
Active Directory settings used to join a cache to a domain.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cacheNetBiosName":
suggest = "cache_net_bios_name"
elif key == "domainJoined":
suggest = "domain_joined"
elif key == "domainName":
suggest = "domain_name"
elif key == "domainNetBiosName":
suggest = "domain_net_bios_name"
elif key == "primaryDnsIpAddress":
suggest = "primary_dns_ip_address"
elif key == "secondaryDnsIpAddress":
suggest = "secondary_dns_ip_address"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CacheActiveDirectorySettingsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CacheActiveDirectorySettingsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CacheActiveDirectorySettingsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cache_net_bios_name: str,
domain_joined: str,
domain_name: str,
domain_net_bios_name: str,
primary_dns_ip_address: str,
credentials: Optional['outputs.CacheActiveDirectorySettingsResponseCredentials'] = None,
secondary_dns_ip_address: Optional[str] = None):
"""
Active Directory settings used to join a cache to a domain.
:param str cache_net_bios_name: The NetBIOS name to assign to the HPC Cache when it joins the Active Directory domain as a server. Length must 1-15 characters from the class [-0-9a-zA-Z].
:param str domain_joined: True if the HPC Cache is joined to the Active Directory domain.
:param str domain_name: The fully qualified domain name of the Active Directory domain controller.
:param str domain_net_bios_name: The Active Directory domain's NetBIOS name.
:param str primary_dns_ip_address: Primary DNS IP address used to resolve the Active Directory domain controller's fully qualified domain name.
:param 'CacheActiveDirectorySettingsResponseCredentials' credentials: Active Directory admin credentials used to join the HPC Cache to a domain.
:param str secondary_dns_ip_address: Secondary DNS IP address used to resolve the Active Directory domain controller's fully qualified domain name.
"""
pulumi.set(__self__, "cache_net_bios_name", cache_net_bios_name)
pulumi.set(__self__, "domain_joined", domain_joined)
pulumi.set(__self__, "domain_name", domain_name)
pulumi.set(__self__, "domain_net_bios_name", domain_net_bios_name)
pulumi.set(__self__, "primary_dns_ip_address", primary_dns_ip_address)
if credentials is not None:
pulumi.set(__self__, "credentials", credentials)
if secondary_dns_ip_address is not None:
pulumi.set(__self__, "secondary_dns_ip_address", secondary_dns_ip_address)
@property
@pulumi.getter(name="cacheNetBiosName")
def cache_net_bios_name(self) -> str:
"""
The NetBIOS name to assign to the HPC Cache when it joins the Active Directory domain as a server. Length must 1-15 characters from the class [-0-9a-zA-Z].
"""
return pulumi.get(self, "cache_net_bios_name")
@property
@pulumi.getter(name="domainJoined")
def domain_joined(self) -> str:
"""
True if the HPC Cache is joined to the Active Directory domain.
"""
return pulumi.get(self, "domain_joined")
@property
@pulumi.getter(name="domainName")
def domain_name(self) -> str:
"""
The fully qualified domain name of the Active Directory domain controller.
"""
return pulumi.get(self, "domain_name")
@property
@pulumi.getter(name="domainNetBiosName")
def domain_net_bios_name(self) -> str:
"""
The Active Directory domain's NetBIOS name.
"""
return pulumi.get(self, "domain_net_bios_name")
@property
@pulumi.getter(name="primaryDnsIpAddress")
def primary_dns_ip_address(self) -> str:
"""
Primary DNS IP address used to resolve the Active Directory domain controller's fully qualified domain name.
"""
return pulumi.get(self, "primary_dns_ip_address")
@property
@pulumi.getter
def credentials(self) -> Optional['outputs.CacheActiveDirectorySettingsResponseCredentials']:
"""
Active Directory admin credentials used to join the HPC Cache to a domain.
"""
return pulumi.get(self, "credentials")
@property
@pulumi.getter(name="secondaryDnsIpAddress")
def secondary_dns_ip_address(self) -> Optional[str]:
"""
Secondary DNS IP address used to resolve the Active Directory domain controller's fully qualified domain name.
"""
return pulumi.get(self, "secondary_dns_ip_address")
@pulumi.output_type
class CacheActiveDirectorySettingsResponseCredentials(dict):
"""
Active Directory admin credentials used to join the HPC Cache to a domain.
"""
def __init__(__self__, *,
password: str,
username: str):
"""
Active Directory admin credentials used to join the HPC Cache to a domain.
:param str password: Plain text password of the Active Directory domain administrator. This value is stored encrypted and not returned on response.
:param str username: Username of the Active Directory domain administrator. This value is stored encrypted and not returned on response.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def password(self) -> str:
"""
Plain text password of the Active Directory domain administrator. This value is stored encrypted and not returned on response.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def username(self) -> str:
"""
Username of the Active Directory domain administrator. This value is stored encrypted and not returned on response.
"""
return pulumi.get(self, "username")
@pulumi.output_type
class CacheDirectorySettingsResponse(dict):
"""
Cache Directory Services settings.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "activeDirectory":
suggest = "active_directory"
elif key == "usernameDownload":
suggest = "username_download"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CacheDirectorySettingsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CacheDirectorySettingsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CacheDirectorySettingsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
active_directory: Optional['outputs.CacheActiveDirectorySettingsResponse'] = None,
username_download: Optional['outputs.CacheUsernameDownloadSettingsResponse'] = None):
"""
Cache Directory Services settings.
:param 'CacheActiveDirectorySettingsResponse' active_directory: Specifies settings for joining the HPC Cache to an Active Directory domain.
:param 'CacheUsernameDownloadSettingsResponse' username_download: Specifies settings for Extended Groups. Extended Groups allows users to be members of more than 16 groups.
"""
if active_directory is not None:
pulumi.set(__self__, "active_directory", active_directory)
if username_download is not None:
pulumi.set(__self__, "username_download", username_download)
@property
@pulumi.getter(name="activeDirectory")
def active_directory(self) -> Optional['outputs.CacheActiveDirectorySettingsResponse']:
"""
Specifies settings for joining the HPC Cache to an Active Directory domain.
"""
return pulumi.get(self, "active_directory")
@property
@pulumi.getter(name="usernameDownload")
def username_download(self) -> Optional['outputs.CacheUsernameDownloadSettingsResponse']:
"""
Specifies settings for Extended Groups. Extended Groups allows users to be members of more than 16 groups.
"""
return pulumi.get(self, "username_download")
@pulumi.output_type
class CacheEncryptionSettingsResponse(dict):
"""
Cache encryption settings.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "keyEncryptionKey":
suggest = "key_encryption_key"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CacheEncryptionSettingsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CacheEncryptionSettingsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CacheEncryptionSettingsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
key_encryption_key: Optional['outputs.KeyVaultKeyReferenceResponse'] = None):
"""
Cache encryption settings.
:param 'KeyVaultKeyReferenceResponse' key_encryption_key: Specifies the location of the key encryption key in Key Vault.
"""
if key_encryption_key is not None:
pulumi.set(__self__, "key_encryption_key", key_encryption_key)
@property
@pulumi.getter(name="keyEncryptionKey")
def key_encryption_key(self) -> Optional['outputs.KeyVaultKeyReferenceResponse']:
"""
Specifies the location of the key encryption key in Key Vault.
"""
return pulumi.get(self, "key_encryption_key")
@pulumi.output_type
class CacheHealthResponse(dict):
"""
An indication of Cache health. Gives more information about health than just that related to provisioning.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "statusDescription":
suggest = "status_description"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CacheHealthResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CacheHealthResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CacheHealthResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
conditions: Sequence['outputs.ConditionResponse'],
state: Optional[str] = None,
status_description: Optional[str] = None):
"""
An indication of Cache health. Gives more information about health than just that related to provisioning.
:param Sequence['ConditionResponse'] conditions: Outstanding conditions that need to be investigated and resolved.
:param str state: List of Cache health states.
:param str status_description: Describes explanation of state.
"""
pulumi.set(__self__, "conditions", conditions)
if state is not None:
pulumi.set(__self__, "state", state)
if status_description is not None:
pulumi.set(__self__, "status_description", status_description)
@property
@pulumi.getter
def conditions(self) -> Sequence['outputs.ConditionResponse']:
"""
Outstanding conditions that need to be investigated and resolved.
"""
return pulumi.get(self, "conditions")
@property
@pulumi.getter
def state(self) -> Optional[str]:
"""
List of Cache health states.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="statusDescription")
def status_description(self) -> Optional[str]:
"""
Describes explanation of state.
"""
return pulumi.get(self, "status_description")
@pulumi.output_type
class CacheIdentityResponse(dict):
"""
Cache identity properties.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "principalId":
suggest = "principal_id"
elif key == "tenantId":
suggest = "tenant_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CacheIdentityResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CacheIdentityResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CacheIdentityResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
principal_id: str,
tenant_id: str,
type: Optional[str] = None):
"""
Cache identity properties.
:param str principal_id: The principal id of the cache.
:param str tenant_id: The tenant id associated with the cache.
:param str type: The type of identity used for the cache
"""
pulumi.set(__self__, "principal_id", principal_id)
pulumi.set(__self__, "tenant_id", tenant_id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> str:
"""
The principal id of the cache.
"""
return pulumi.get(self, "principal_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
The tenant id associated with the cache.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The type of identity used for the cache
"""
return pulumi.get(self, "type")
@pulumi.output_type
class CacheNetworkSettingsResponse(dict):
"""
Cache network settings.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "utilityAddresses":
suggest = "utility_addresses"
elif key == "dnsSearchDomain":
suggest = "dns_search_domain"
elif key == "dnsServers":
suggest = "dns_servers"
elif key == "ntpServer":
suggest = "ntp_server"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CacheNetworkSettingsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CacheNetworkSettingsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CacheNetworkSettingsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
utility_addresses: Sequence[str],
dns_search_domain: Optional[str] = None,
dns_servers: Optional[Sequence[str]] = None,
mtu: Optional[int] = None,
ntp_server: Optional[str] = None):
"""
Cache network settings.
:param Sequence[str] utility_addresses: Array of additional IP addresses used by this Cache.
:param str dns_search_domain: DNS search domain
:param Sequence[str] dns_servers: DNS servers for the cache to use. It will be set from the network configuration if no value is provided.
:param int mtu: The IPv4 maximum transmission unit configured for the subnet.
:param str ntp_server: NTP server IP Address or FQDN for the cache to use. The default is time.windows.com.
"""
pulumi.set(__self__, "utility_addresses", utility_addresses)
if dns_search_domain is not None:
pulumi.set(__self__, "dns_search_domain", dns_search_domain)
if dns_servers is not None:
pulumi.set(__self__, "dns_servers", dns_servers)
if mtu is None:
mtu = 1500
if mtu is not None:
pulumi.set(__self__, "mtu", mtu)
if ntp_server is None:
ntp_server = 'time.windows.com'
if ntp_server is not None:
pulumi.set(__self__, "ntp_server", ntp_server)
@property
@pulumi.getter(name="utilityAddresses")
def utility_addresses(self) -> Sequence[str]:
"""
Array of additional IP addresses used by this Cache.
"""
return pulumi.get(self, "utility_addresses")
@property
@pulumi.getter(name="dnsSearchDomain")
def dns_search_domain(self) -> Optional[str]:
"""
DNS search domain
"""
return pulumi.get(self, "dns_search_domain")
@property
@pulumi.getter(name="dnsServers")
def dns_servers(self) -> Optional[Sequence[str]]:
"""
DNS servers for the cache to use. It will be set from the network configuration if no value is provided.
"""
return pulumi.get(self, "dns_servers")
@property
@pulumi.getter
def mtu(self) -> Optional[int]:
"""
The IPv4 maximum transmission unit configured for the subnet.
"""
return pulumi.get(self, "mtu")
@property
@pulumi.getter(name="ntpServer")
def ntp_server(self) -> Optional[str]:
"""
NTP server IP Address or FQDN for the cache to use. The default is time.windows.com.
"""
return pulumi.get(self, "ntp_server")
@pulumi.output_type
class CacheResponseSku(dict):
"""
SKU for the Cache.
"""
def __init__(__self__, *,
name: Optional[str] = None):
"""
SKU for the Cache.
:param str name: SKU name for this Cache.
"""
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
SKU name for this Cache.
"""
return pulumi.get(self, "name")
@pulumi.output_type
class CacheSecuritySettingsResponse(dict):
"""
Cache security settings.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "accessPolicies":
suggest = "access_policies"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CacheSecuritySettingsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CacheSecuritySettingsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CacheSecuritySettingsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
access_policies: Optional[Sequence['outputs.NfsAccessPolicyResponse']] = None):
"""
Cache security settings.
:param Sequence['NfsAccessPolicyResponse'] access_policies: NFS access policies defined for this cache.
"""
if access_policies is not None:
pulumi.set(__self__, "access_policies", access_policies)
@property
@pulumi.getter(name="accessPolicies")
def access_policies(self) -> Optional[Sequence['outputs.NfsAccessPolicyResponse']]:
"""
NFS access policies defined for this cache.
"""
return pulumi.get(self, "access_policies")
@pulumi.output_type
class CacheUpgradeStatusResponse(dict):
"""
Properties describing the software upgrade state of the Cache.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "currentFirmwareVersion":
suggest = "current_firmware_version"
elif key == "firmwareUpdateDeadline":
suggest = "firmware_update_deadline"
elif key == "firmwareUpdateStatus":
suggest = "firmware_update_status"
elif key == "lastFirmwareUpdate":
suggest = "last_firmware_update"
elif key == "pendingFirmwareVersion":
suggest = "pending_firmware_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CacheUpgradeStatusResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CacheUpgradeStatusResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CacheUpgradeStatusResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
current_firmware_version: str,
firmware_update_deadline: str,
firmware_update_status: str,
last_firmware_update: str,
pending_firmware_version: str):
"""
Properties describing the software upgrade state of the Cache.
:param str current_firmware_version: Version string of the firmware currently installed on this Cache.
:param str firmware_update_deadline: Time at which the pending firmware update will automatically be installed on the Cache.
:param str firmware_update_status: True if there is a firmware update ready to install on this Cache. The firmware will automatically be installed after firmwareUpdateDeadline if not triggered earlier via the upgrade operation.
:param str last_firmware_update: Time of the last successful firmware update.
:param str pending_firmware_version: When firmwareUpdateAvailable is true, this field holds the version string for the update.
"""
pulumi.set(__self__, "current_firmware_version", current_firmware_version)
pulumi.set(__self__, "firmware_update_deadline", firmware_update_deadline)
pulumi.set(__self__, "firmware_update_status", firmware_update_status)
pulumi.set(__self__, "last_firmware_update", last_firmware_update)
pulumi.set(__self__, "pending_firmware_version", pending_firmware_version)
@property
@pulumi.getter(name="currentFirmwareVersion")
def current_firmware_version(self) -> str:
"""
Version string of the firmware currently installed on this Cache.
"""
return pulumi.get(self, "current_firmware_version")
@property
@pulumi.getter(name="firmwareUpdateDeadline")
def firmware_update_deadline(self) -> str:
"""
Time at which the pending firmware update will automatically be installed on the Cache.
"""
return pulumi.get(self, "firmware_update_deadline")
@property
@pulumi.getter(name="firmwareUpdateStatus")
def firmware_update_status(self) -> str:
"""
True if there is a firmware update ready to install on this Cache. The firmware will automatically be installed after firmwareUpdateDeadline if not triggered earlier via the upgrade operation.
"""
return pulumi.get(self, "firmware_update_status")
@property
@pulumi.getter(name="lastFirmwareUpdate")
def last_firmware_update(self) -> str:
"""
Time of the last successful firmware update.
"""
return pulumi.get(self, "last_firmware_update")
@property
@pulumi.getter(name="pendingFirmwareVersion")
def pending_firmware_version(self) -> str:
"""
When firmwareUpdateAvailable is true, this field holds the version string for the update.
"""
return pulumi.get(self, "pending_firmware_version")
@pulumi.output_type
class CacheUsernameDownloadSettingsResponse(dict):
"""
Settings for Extended Groups username and group download.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "usernameDownloaded":
suggest = "username_downloaded"
elif key == "autoDownloadCertificate":
suggest = "auto_download_certificate"
elif key == "caCertificateURI":
suggest = "ca_certificate_uri"
elif key == "encryptLdapConnection":
suggest = "encrypt_ldap_connection"
elif key == "extendedGroups":
suggest = "extended_groups"
elif key == "groupFileURI":
suggest = "group_file_uri"
elif key == "ldapBaseDN":
suggest = "ldap_base_dn"
elif key == "ldapServer":
suggest = "ldap_server"
elif key == "requireValidCertificate":
suggest = "require_valid_certificate"
elif key == "userFileURI":
suggest = "user_file_uri"
elif key == "usernameSource":
suggest = "username_source"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CacheUsernameDownloadSettingsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CacheUsernameDownloadSettingsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CacheUsernameDownloadSettingsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
username_downloaded: str,
auto_download_certificate: Optional[bool] = None,
ca_certificate_uri: Optional[str] = None,
credentials: Optional['outputs.CacheUsernameDownloadSettingsResponseCredentials'] = None,
encrypt_ldap_connection: Optional[bool] = None,
extended_groups: Optional[bool] = None,
group_file_uri: Optional[str] = None,
ldap_base_dn: Optional[str] = None,
ldap_server: Optional[str] = None,
require_valid_certificate: Optional[bool] = None,
user_file_uri: Optional[str] = None,
username_source: Optional[str] = None):
"""
Settings for Extended Groups username and group download.
:param str username_downloaded: Indicates whether or not the HPC Cache has performed the username download successfully.
:param bool auto_download_certificate: Determines if the certificate should be automatically downloaded. This applies to 'caCertificateURI' only if 'requireValidCertificate' is true.
:param str ca_certificate_uri: The URI of the CA certificate to validate the LDAP secure connection. This field must be populated when 'requireValidCertificate' is set to true.
:param 'CacheUsernameDownloadSettingsResponseCredentials' credentials: When present, these are the credentials for the secure LDAP connection.
:param bool encrypt_ldap_connection: Whether or not the LDAP connection should be encrypted.
:param bool extended_groups: Whether or not Extended Groups is enabled.
:param str group_file_uri: The URI of the file containing group information (in /etc/group file format). This field must be populated when 'usernameSource' is set to 'File'.
:param str ldap_base_dn: The base distinguished name for the LDAP domain.
:param str ldap_server: The fully qualified domain name or IP address of the LDAP server to use.
:param bool require_valid_certificate: Determines if the certificates must be validated by a certificate authority. When true, caCertificateURI must be provided.
:param str user_file_uri: The URI of the file containing user information (in /etc/passwd file format). This field must be populated when 'usernameSource' is set to 'File'.
:param str username_source: This setting determines how the cache gets username and group names for clients.
"""
pulumi.set(__self__, "username_downloaded", username_downloaded)
if auto_download_certificate is not None:
pulumi.set(__self__, "auto_download_certificate", auto_download_certificate)
if ca_certificate_uri is not None:
pulumi.set(__self__, "ca_certificate_uri", ca_certificate_uri)
if credentials is not None:
pulumi.set(__self__, "credentials", credentials)
if encrypt_ldap_connection is not None:
pulumi.set(__self__, "encrypt_ldap_connection", encrypt_ldap_connection)
if extended_groups is not None:
pulumi.set(__self__, "extended_groups", extended_groups)
if group_file_uri is not None:
pulumi.set(__self__, "group_file_uri", group_file_uri)
if ldap_base_dn is not None:
pulumi.set(__self__, "ldap_base_dn", ldap_base_dn)
if ldap_server is not None:
pulumi.set(__self__, "ldap_server", ldap_server)
if require_valid_certificate is not None:
pulumi.set(__self__, "require_valid_certificate", require_valid_certificate)
if user_file_uri is not None:
pulumi.set(__self__, "user_file_uri", user_file_uri)
if username_source is None:
username_source = 'None'
if username_source is not None:
pulumi.set(__self__, "username_source", username_source)
@property
@pulumi.getter(name="usernameDownloaded")
def username_downloaded(self) -> str:
"""
Indicates whether or not the HPC Cache has performed the username download successfully.
"""
return pulumi.get(self, "username_downloaded")
@property
@pulumi.getter(name="autoDownloadCertificate")
def auto_download_certificate(self) -> Optional[bool]:
"""
Determines if the certificate should be automatically downloaded. This applies to 'caCertificateURI' only if 'requireValidCertificate' is true.
"""
return pulumi.get(self, "auto_download_certificate")
@property
@pulumi.getter(name="caCertificateURI")
def ca_certificate_uri(self) -> Optional[str]:
"""
The URI of the CA certificate to validate the LDAP secure connection. This field must be populated when 'requireValidCertificate' is set to true.
"""
return pulumi.get(self, "ca_certificate_uri")
@property
@pulumi.getter
def credentials(self) -> Optional['outputs.CacheUsernameDownloadSettingsResponseCredentials']:
"""
When present, these are the credentials for the secure LDAP connection.
"""
return pulumi.get(self, "credentials")
@property
@pulumi.getter(name="encryptLdapConnection")
def encrypt_ldap_connection(self) -> Optional[bool]:
"""
Whether or not the LDAP connection should be encrypted.
"""
return pulumi.get(self, "encrypt_ldap_connection")
@property
@pulumi.getter(name="extendedGroups")
def extended_groups(self) -> Optional[bool]:
"""
Whether or not Extended Groups is enabled.
"""
return pulumi.get(self, "extended_groups")
@property
@pulumi.getter(name="groupFileURI")
def group_file_uri(self) -> Optional[str]:
"""
The URI of the file containing group information (in /etc/group file format). This field must be populated when 'usernameSource' is set to 'File'.
"""
return pulumi.get(self, "group_file_uri")
@property
@pulumi.getter(name="ldapBaseDN")
def ldap_base_dn(self) -> Optional[str]:
"""
The base distinguished name for the LDAP domain.
"""
return pulumi.get(self, "ldap_base_dn")
@property
@pulumi.getter(name="ldapServer")
def ldap_server(self) -> Optional[str]:
"""
The fully qualified domain name or IP address of the LDAP server to use.
"""
return pulumi.get(self, "ldap_server")
@property
@pulumi.getter(name="requireValidCertificate")
def require_valid_certificate(self) -> Optional[bool]:
"""
Determines if the certificates must be validated by a certificate authority. When true, caCertificateURI must be provided.
"""
return pulumi.get(self, "require_valid_certificate")
@property
@pulumi.getter(name="userFileURI")
def user_file_uri(self) -> Optional[str]:
"""
The URI of the file containing user information (in /etc/passwd file format). This field must be populated when 'usernameSource' is set to 'File'.
"""
return pulumi.get(self, "user_file_uri")
@property
@pulumi.getter(name="usernameSource")
def username_source(self) -> Optional[str]:
"""
This setting determines how the cache gets username and group names for clients.
"""
return pulumi.get(self, "username_source")
@pulumi.output_type
class CacheUsernameDownloadSettingsResponseCredentials(dict):
"""
When present, these are the credentials for the secure LDAP connection.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "bindDn":
suggest = "bind_dn"
elif key == "bindPassword":
suggest = "bind_password"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CacheUsernameDownloadSettingsResponseCredentials. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CacheUsernameDownloadSettingsResponseCredentials.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CacheUsernameDownloadSettingsResponseCredentials.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
bind_dn: Optional[str] = None,
bind_password: Optional[str] = None):
"""
When present, these are the credentials for the secure LDAP connection.
:param str bind_dn: The Bind Distinguished Name identity to be used in the secure LDAP connection. This value is stored encrypted and not returned on response.
:param str bind_password: The Bind password to be used in the secure LDAP connection. This value is stored encrypted and not returned on response.
"""
if bind_dn is not None:
pulumi.set(__self__, "bind_dn", bind_dn)
if bind_password is not None:
pulumi.set(__self__, "bind_password", bind_password)
@property
@pulumi.getter(name="bindDn")
def bind_dn(self) -> Optional[str]:
"""
The Bind Distinguished Name identity to be used in the secure LDAP connection. This value is stored encrypted and not returned on response.
"""
return pulumi.get(self, "bind_dn")
@property
@pulumi.getter(name="bindPassword")
def bind_password(self) -> Optional[str]:
"""
The Bind password to be used in the secure LDAP connection. This value is stored encrypted and not returned on response.
"""
return pulumi.get(self, "bind_password")
@pulumi.output_type
class ClfsTargetResponse(dict):
"""
Properties pertaining to the ClfsTarget
"""
def __init__(__self__, *,
target: Optional[str] = None):
"""
Properties pertaining to the ClfsTarget
:param str target: Resource ID of storage container.
"""
if target is not None:
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def target(self) -> Optional[str]:
"""
Resource ID of storage container.
"""
return pulumi.get(self, "target")
@pulumi.output_type
class ConditionResponse(dict):
"""
Outstanding conditions that will need to be resolved.
"""
def __init__(__self__, *,
message: str,
timestamp: str):
"""
Outstanding conditions that will need to be resolved.
:param str message: The issue requiring attention.
:param str timestamp: The time when the condition was raised.
"""
pulumi.set(__self__, "message", message)
pulumi.set(__self__, "timestamp", timestamp)
@property
@pulumi.getter
def message(self) -> str:
"""
The issue requiring attention.
"""
return pulumi.get(self, "message")
@property
@pulumi.getter
def timestamp(self) -> str:
"""
The time when the condition was raised.
"""
return pulumi.get(self, "timestamp")
@pulumi.output_type
class KeyVaultKeyReferenceResponse(dict):
"""
Describes a reference to Key Vault Key.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "keyUrl":
suggest = "key_url"
elif key == "sourceVault":
suggest = "source_vault"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in KeyVaultKeyReferenceResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
KeyVaultKeyReferenceResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
KeyVaultKeyReferenceResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
key_url: str,
source_vault: 'outputs.KeyVaultKeyReferenceResponseSourceVault'):
"""
Describes a reference to Key Vault Key.
:param str key_url: The URL referencing a key encryption key in Key Vault.
:param 'KeyVaultKeyReferenceResponseSourceVault' source_vault: Describes a resource Id to source Key Vault.
"""
pulumi.set(__self__, "key_url", key_url)
pulumi.set(__self__, "source_vault", source_vault)
@property
@pulumi.getter(name="keyUrl")
def key_url(self) -> str:
"""
The URL referencing a key encryption key in Key Vault.
"""
return pulumi.get(self, "key_url")
@property
@pulumi.getter(name="sourceVault")
def source_vault(self) -> 'outputs.KeyVaultKeyReferenceResponseSourceVault':
"""
Describes a resource Id to source Key Vault.
"""
return pulumi.get(self, "source_vault")
@pulumi.output_type
class KeyVaultKeyReferenceResponseSourceVault(dict):
"""
Describes a resource Id to source Key Vault.
"""
def __init__(__self__, *,
id: Optional[str] = None):
"""
Describes a resource Id to source Key Vault.
:param str id: Resource Id.
"""
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id.
"""
return pulumi.get(self, "id")
@pulumi.output_type
class NamespaceJunctionResponse(dict):
"""
A namespace junction.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "namespacePath":
suggest = "namespace_path"
elif key == "nfsAccessPolicy":
suggest = "nfs_access_policy"
elif key == "nfsExport":
suggest = "nfs_export"
elif key == "targetPath":
suggest = "target_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NamespaceJunctionResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NamespaceJunctionResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NamespaceJunctionResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
namespace_path: Optional[str] = None,
nfs_access_policy: Optional[str] = None,
nfs_export: Optional[str] = None,
target_path: Optional[str] = None):
"""
A namespace junction.
:param str namespace_path: Namespace path on a Cache for a Storage Target.
:param str nfs_access_policy: Name of the access policy applied to this junction.
:param str nfs_export: NFS export where targetPath exists.
:param str target_path: Path in Storage Target to which namespacePath points.
"""
if namespace_path is not None:
pulumi.set(__self__, "namespace_path", namespace_path)
if nfs_access_policy is None:
nfs_access_policy = 'default'
if nfs_access_policy is not None:
pulumi.set(__self__, "nfs_access_policy", nfs_access_policy)
if nfs_export is not None:
pulumi.set(__self__, "nfs_export", nfs_export)
if target_path is not None:
pulumi.set(__self__, "target_path", target_path)
@property
@pulumi.getter(name="namespacePath")
def namespace_path(self) -> Optional[str]:
"""
Namespace path on a Cache for a Storage Target.
"""
return pulumi.get(self, "namespace_path")
@property
@pulumi.getter(name="nfsAccessPolicy")
def nfs_access_policy(self) -> Optional[str]:
"""
Name of the access policy applied to this junction.
"""
return pulumi.get(self, "nfs_access_policy")
@property
@pulumi.getter(name="nfsExport")
def nfs_export(self) -> Optional[str]:
"""
NFS export where targetPath exists.
"""
return pulumi.get(self, "nfs_export")
@property
@pulumi.getter(name="targetPath")
def target_path(self) -> Optional[str]:
"""
Path in Storage Target to which namespacePath points.
"""
return pulumi.get(self, "target_path")
@pulumi.output_type
class Nfs3TargetResponse(dict):
"""
Properties pertaining to the Nfs3Target
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "usageModel":
suggest = "usage_model"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in Nfs3TargetResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
Nfs3TargetResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
Nfs3TargetResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
target: Optional[str] = None,
usage_model: Optional[str] = None):
"""
Properties pertaining to the Nfs3Target
:param str target: IP address or host name of an NFSv3 host (e.g., 10.0.44.44).
:param str usage_model: Identifies the StorageCache usage model to be used for this storage target.
"""
if target is not None:
pulumi.set(__self__, "target", target)
if usage_model is not None:
pulumi.set(__self__, "usage_model", usage_model)
@property
@pulumi.getter
def target(self) -> Optional[str]:
"""
IP address or host name of an NFSv3 host (e.g., 10.0.44.44).
"""
return pulumi.get(self, "target")
@property
@pulumi.getter(name="usageModel")
def usage_model(self) -> Optional[str]:
"""
Identifies the StorageCache usage model to be used for this storage target.
"""
return pulumi.get(self, "usage_model")
@pulumi.output_type
class NfsAccessPolicyResponse(dict):
"""
A set of rules describing access policies applied to NFSv3 clients of the cache.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "accessRules":
suggest = "access_rules"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NfsAccessPolicyResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NfsAccessPolicyResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NfsAccessPolicyResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
access_rules: Sequence['outputs.NfsAccessRuleResponse'],
name: str):
"""
A set of rules describing access policies applied to NFSv3 clients of the cache.
:param Sequence['NfsAccessRuleResponse'] access_rules: The set of rules describing client accesses allowed under this policy.
:param str name: Name identifying this policy. Access Policy names are not case sensitive.
"""
pulumi.set(__self__, "access_rules", access_rules)
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="accessRules")
def access_rules(self) -> Sequence['outputs.NfsAccessRuleResponse']:
"""
The set of rules describing client accesses allowed under this policy.
"""
return pulumi.get(self, "access_rules")
@property
@pulumi.getter
def name(self) -> str:
"""
Name identifying this policy. Access Policy names are not case sensitive.
"""
return pulumi.get(self, "name")
@pulumi.output_type
class NfsAccessRuleResponse(dict):
"""
Rule to place restrictions on portions of the cache namespace being presented to clients.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "anonymousGID":
suggest = "anonymous_gid"
elif key == "anonymousUID":
suggest = "anonymous_uid"
elif key == "rootSquash":
suggest = "root_squash"
elif key == "submountAccess":
suggest = "submount_access"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NfsAccessRuleResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NfsAccessRuleResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NfsAccessRuleResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
access: str,
scope: str,
anonymous_gid: Optional[str] = None,
anonymous_uid: Optional[str] = None,
filter: Optional[str] = None,
root_squash: Optional[bool] = None,
submount_access: Optional[bool] = None,
suid: Optional[bool] = None):
"""
Rule to place restrictions on portions of the cache namespace being presented to clients.
:param str access: Access allowed by this rule.
:param str scope: Scope for this rule. The scope and filter determine which clients match the rule.
:param str anonymous_gid: GID value that replaces 0 when rootSquash is true. This will use the value of anonymousUID if not provided.
:param str anonymous_uid: UID value that replaces 0 when rootSquash is true. 65534 will be used if not provided.
:param str filter: Filter applied to the scope for this rule. The filter's format depends on its scope. 'default' scope matches all clients and has no filter value. 'network' scope takes a filter in CIDR format (for example, 10.99.1.0/24). 'host' takes an IP address or fully qualified domain name as filter. If a client does not match any filter rule and there is no default rule, access is denied.
:param bool root_squash: Map root accesses to anonymousUID and anonymousGID.
:param bool submount_access: For the default policy, allow access to subdirectories under the root export. If this is set to no, clients can only mount the path '/'. If set to yes, clients can mount a deeper path, like '/a/b'.
:param bool suid: Allow SUID semantics.
"""
pulumi.set(__self__, "access", access)
pulumi.set(__self__, "scope", scope)
if anonymous_gid is not None:
pulumi.set(__self__, "anonymous_gid", anonymous_gid)
if anonymous_uid is not None:
pulumi.set(__self__, "anonymous_uid", anonymous_uid)
if filter is not None:
pulumi.set(__self__, "filter", filter)
if root_squash is not None:
pulumi.set(__self__, "root_squash", root_squash)
if submount_access is not None:
pulumi.set(__self__, "submount_access", submount_access)
if suid is not None:
pulumi.set(__self__, "suid", suid)
@property
@pulumi.getter
def access(self) -> str:
"""
Access allowed by this rule.
"""
return pulumi.get(self, "access")
@property
@pulumi.getter
def scope(self) -> str:
"""
Scope for this rule. The scope and filter determine which clients match the rule.
"""
return pulumi.get(self, "scope")
@property
@pulumi.getter(name="anonymousGID")
def anonymous_gid(self) -> Optional[str]:
"""
GID value that replaces 0 when rootSquash is true. This will use the value of anonymousUID if not provided.
"""
return pulumi.get(self, "anonymous_gid")
@property
@pulumi.getter(name="anonymousUID")
def anonymous_uid(self) -> Optional[str]:
"""
UID value that replaces 0 when rootSquash is true. 65534 will be used if not provided.
"""
return pulumi.get(self, "anonymous_uid")
@property
@pulumi.getter
def filter(self) -> Optional[str]:
"""
Filter applied to the scope for this rule. The filter's format depends on its scope. 'default' scope matches all clients and has no filter value. 'network' scope takes a filter in CIDR format (for example, 10.99.1.0/24). 'host' takes an IP address or fully qualified domain name as filter. If a client does not match any filter rule and there is no default rule, access is denied.
"""
return pulumi.get(self, "filter")
@property
@pulumi.getter(name="rootSquash")
def root_squash(self) -> Optional[bool]:
"""
Map root accesses to anonymousUID and anonymousGID.
"""
return pulumi.get(self, "root_squash")
@property
@pulumi.getter(name="submountAccess")
def submount_access(self) -> Optional[bool]:
"""
For the default policy, allow access to subdirectories under the root export. If this is set to no, clients can only mount the path '/'. If set to yes, clients can mount a deeper path, like '/a/b'.
"""
return pulumi.get(self, "submount_access")
@property
@pulumi.getter
def suid(self) -> Optional[bool]:
"""
Allow SUID semantics.
"""
return pulumi.get(self, "suid")
@pulumi.output_type
class SystemDataResponse(dict):
"""
Metadata pertaining to creation and last modification of the resource.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "createdAt":
suggest = "created_at"
elif key == "createdBy":
suggest = "created_by"
elif key == "createdByType":
suggest = "created_by_type"
elif key == "lastModifiedAt":
suggest = "last_modified_at"
elif key == "lastModifiedBy":
suggest = "last_modified_by"
elif key == "lastModifiedByType":
suggest = "last_modified_by_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SystemDataResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SystemDataResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SystemDataResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
created_at: Optional[str] = None,
created_by: Optional[str] = None,
created_by_type: Optional[str] = None,
last_modified_at: Optional[str] = None,
last_modified_by: Optional[str] = None,
last_modified_by_type: Optional[str] = None):
"""
Metadata pertaining to creation and last modification of the resource.
:param str created_at: The timestamp of resource creation (UTC).
:param str created_by: The identity that created the resource.
:param str created_by_type: The type of identity that created the resource.
:param str last_modified_at: The timestamp of resource last modification (UTC)
:param str last_modified_by: The identity that last modified the resource.
:param str last_modified_by_type: The type of identity that last modified the resource.
"""
if created_at is not None:
pulumi.set(__self__, "created_at", created_at)
if created_by is not None:
pulumi.set(__self__, "created_by", created_by)
if created_by_type is not None:
pulumi.set(__self__, "created_by_type", created_by_type)
if last_modified_at is not None:
pulumi.set(__self__, "last_modified_at", last_modified_at)
if last_modified_by is not None:
pulumi.set(__self__, "last_modified_by", last_modified_by)
if last_modified_by_type is not None:
pulumi.set(__self__, "last_modified_by_type", last_modified_by_type)
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> Optional[str]:
"""
The timestamp of resource creation (UTC).
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter(name="createdBy")
def created_by(self) -> Optional[str]:
"""
The identity that created the resource.
"""
return pulumi.get(self, "created_by")
@property
@pulumi.getter(name="createdByType")
def created_by_type(self) -> Optional[str]:
"""
The type of identity that created the resource.
"""
return pulumi.get(self, "created_by_type")
@property
@pulumi.getter(name="lastModifiedAt")
def last_modified_at(self) -> Optional[str]:
"""
The timestamp of resource last modification (UTC)
"""
return pulumi.get(self, "last_modified_at")
@property
@pulumi.getter(name="lastModifiedBy")
def last_modified_by(self) -> Optional[str]:
"""
The identity that last modified the resource.
"""
return pulumi.get(self, "last_modified_by")
@property
@pulumi.getter(name="lastModifiedByType")
def last_modified_by_type(self) -> Optional[str]:
"""
The type of identity that last modified the resource.
"""
return pulumi.get(self, "last_modified_by_type")
@pulumi.output_type
class UnknownTargetResponse(dict):
"""
Properties pertaining to the UnknownTarget
"""
def __init__(__self__, *,
attributes: Optional[Mapping[str, str]] = None):
"""
Properties pertaining to the UnknownTarget
:param Mapping[str, str] attributes: Dictionary of string->string pairs containing information about the Storage Target.
"""
if attributes is not None:
pulumi.set(__self__, "attributes", attributes)
@property
@pulumi.getter
def attributes(self) -> Optional[Mapping[str, str]]:
"""
Dictionary of string->string pairs containing information about the Storage Target.
"""
return pulumi.get(self, "attributes")
|
PypiClean
|
/azure_mgmt_cdn-12.1.0b1-py3-none-any.whl/azure/mgmt/cdn/aio/operations/_origins_operations.py
|
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._origins_operations import (
build_create_request,
build_delete_request,
build_get_request,
build_list_by_endpoint_request,
build_update_request,
)
from .._vendor import MixinABC
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class OriginsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.cdn.aio.CdnManagementClient`'s
:attr:`origins` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_by_endpoint(
self, resource_group_name: str, profile_name: str, endpoint_name: str, **kwargs: Any
) -> AsyncIterable["_models.Origin"]:
"""Lists all of the existing origins within an endpoint.
:param resource_group_name: Name of the Resource group within the Azure subscription. Required.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within the resource group.
Required.
:type profile_name: str
:param endpoint_name: Name of the endpoint under the profile which is unique globally.
Required.
:type endpoint_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Origin or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.cdn.models.Origin]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.OriginListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_endpoint_request(
resource_group_name=resource_group_name,
profile_name=profile_name,
endpoint_name=endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_endpoint.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OriginListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_by_endpoint.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/endpoints/{endpointName}/origins"} # type: ignore
@distributed_trace_async
async def get(
self, resource_group_name: str, profile_name: str, endpoint_name: str, origin_name: str, **kwargs: Any
) -> _models.Origin:
"""Gets an existing origin within an endpoint.
:param resource_group_name: Name of the Resource group within the Azure subscription. Required.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within the resource group.
Required.
:type profile_name: str
:param endpoint_name: Name of the endpoint under the profile which is unique globally.
Required.
:type endpoint_name: str
:param origin_name: Name of the origin which is unique within the endpoint. Required.
:type origin_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Origin or the result of cls(response)
:rtype: ~azure.mgmt.cdn.models.Origin
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.Origin]
request = build_get_request(
resource_group_name=resource_group_name,
profile_name=profile_name,
endpoint_name=endpoint_name,
origin_name=origin_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("Origin", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/endpoints/{endpointName}/origins/{originName}"} # type: ignore
async def _create_initial(
self,
resource_group_name: str,
profile_name: str,
endpoint_name: str,
origin_name: str,
origin: Union[_models.Origin, IO],
**kwargs: Any
) -> _models.Origin:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.Origin]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(origin, (IO, bytes)):
_content = origin
else:
_json = self._serialize.body(origin, "Origin")
request = build_create_request(
resource_group_name=resource_group_name,
profile_name=profile_name,
endpoint_name=endpoint_name,
origin_name=origin_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("Origin", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("Origin", pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize("Origin", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/endpoints/{endpointName}/origins/{originName}"} # type: ignore
@overload
async def begin_create(
self,
resource_group_name: str,
profile_name: str,
endpoint_name: str,
origin_name: str,
origin: _models.Origin,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.Origin]:
"""Creates a new origin within the specified endpoint.
:param resource_group_name: Name of the Resource group within the Azure subscription. Required.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within the resource group.
Required.
:type profile_name: str
:param endpoint_name: Name of the endpoint under the profile which is unique globally.
Required.
:type endpoint_name: str
:param origin_name: Name of the origin that is unique within the endpoint. Required.
:type origin_name: str
:param origin: Origin properties. Required.
:type origin: ~azure.mgmt.cdn.models.Origin
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Origin or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.cdn.models.Origin]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_create(
self,
resource_group_name: str,
profile_name: str,
endpoint_name: str,
origin_name: str,
origin: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.Origin]:
"""Creates a new origin within the specified endpoint.
:param resource_group_name: Name of the Resource group within the Azure subscription. Required.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within the resource group.
Required.
:type profile_name: str
:param endpoint_name: Name of the endpoint under the profile which is unique globally.
Required.
:type endpoint_name: str
:param origin_name: Name of the origin that is unique within the endpoint. Required.
:type origin_name: str
:param origin: Origin properties. Required.
:type origin: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Origin or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.cdn.models.Origin]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_create(
self,
resource_group_name: str,
profile_name: str,
endpoint_name: str,
origin_name: str,
origin: Union[_models.Origin, IO],
**kwargs: Any
) -> AsyncLROPoller[_models.Origin]:
"""Creates a new origin within the specified endpoint.
:param resource_group_name: Name of the Resource group within the Azure subscription. Required.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within the resource group.
Required.
:type profile_name: str
:param endpoint_name: Name of the endpoint under the profile which is unique globally.
Required.
:type endpoint_name: str
:param origin_name: Name of the origin that is unique within the endpoint. Required.
:type origin_name: str
:param origin: Origin properties. Is either a model type or a IO type. Required.
:type origin: ~azure.mgmt.cdn.models.Origin or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Origin or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.cdn.models.Origin]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.Origin]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_initial( # type: ignore
resource_group_name=resource_group_name,
profile_name=profile_name,
endpoint_name=endpoint_name,
origin_name=origin_name,
origin=origin,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("Origin", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/endpoints/{endpointName}/origins/{originName}"} # type: ignore
async def _update_initial(
self,
resource_group_name: str,
profile_name: str,
endpoint_name: str,
origin_name: str,
origin_update_properties: Union[_models.OriginUpdateParameters, IO],
**kwargs: Any
) -> _models.Origin:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.Origin]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(origin_update_properties, (IO, bytes)):
_content = origin_update_properties
else:
_json = self._serialize.body(origin_update_properties, "OriginUpdateParameters")
request = build_update_request(
resource_group_name=resource_group_name,
profile_name=profile_name,
endpoint_name=endpoint_name,
origin_name=origin_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("Origin", pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize("Origin", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/endpoints/{endpointName}/origins/{originName}"} # type: ignore
@overload
async def begin_update(
self,
resource_group_name: str,
profile_name: str,
endpoint_name: str,
origin_name: str,
origin_update_properties: _models.OriginUpdateParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.Origin]:
"""Updates an existing origin within an endpoint.
:param resource_group_name: Name of the Resource group within the Azure subscription. Required.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within the resource group.
Required.
:type profile_name: str
:param endpoint_name: Name of the endpoint under the profile which is unique globally.
Required.
:type endpoint_name: str
:param origin_name: Name of the origin which is unique within the endpoint. Required.
:type origin_name: str
:param origin_update_properties: Origin properties. Required.
:type origin_update_properties: ~azure.mgmt.cdn.models.OriginUpdateParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Origin or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.cdn.models.Origin]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_update(
self,
resource_group_name: str,
profile_name: str,
endpoint_name: str,
origin_name: str,
origin_update_properties: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.Origin]:
"""Updates an existing origin within an endpoint.
:param resource_group_name: Name of the Resource group within the Azure subscription. Required.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within the resource group.
Required.
:type profile_name: str
:param endpoint_name: Name of the endpoint under the profile which is unique globally.
Required.
:type endpoint_name: str
:param origin_name: Name of the origin which is unique within the endpoint. Required.
:type origin_name: str
:param origin_update_properties: Origin properties. Required.
:type origin_update_properties: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Origin or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.cdn.models.Origin]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_update(
self,
resource_group_name: str,
profile_name: str,
endpoint_name: str,
origin_name: str,
origin_update_properties: Union[_models.OriginUpdateParameters, IO],
**kwargs: Any
) -> AsyncLROPoller[_models.Origin]:
"""Updates an existing origin within an endpoint.
:param resource_group_name: Name of the Resource group within the Azure subscription. Required.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within the resource group.
Required.
:type profile_name: str
:param endpoint_name: Name of the endpoint under the profile which is unique globally.
Required.
:type endpoint_name: str
:param origin_name: Name of the origin which is unique within the endpoint. Required.
:type origin_name: str
:param origin_update_properties: Origin properties. Is either a model type or a IO type.
Required.
:type origin_update_properties: ~azure.mgmt.cdn.models.OriginUpdateParameters or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Origin or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.cdn.models.Origin]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.Origin]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_initial( # type: ignore
resource_group_name=resource_group_name,
profile_name=profile_name,
endpoint_name=endpoint_name,
origin_name=origin_name,
origin_update_properties=origin_update_properties,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("Origin", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/endpoints/{endpointName}/origins/{originName}"} # type: ignore
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, profile_name: str, endpoint_name: str, origin_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_delete_request(
resource_group_name=resource_group_name,
profile_name=profile_name,
endpoint_name=endpoint_name,
origin_name=origin_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/endpoints/{endpointName}/origins/{originName}"} # type: ignore
@distributed_trace_async
async def begin_delete(
self, resource_group_name: str, profile_name: str, endpoint_name: str, origin_name: str, **kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes an existing origin within an endpoint.
:param resource_group_name: Name of the Resource group within the Azure subscription. Required.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within the resource group.
Required.
:type profile_name: str
:param endpoint_name: Name of the endpoint under the profile which is unique globally.
Required.
:type endpoint_name: str
:param origin_name: Name of the origin which is unique within the endpoint. Required.
:type origin_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
profile_name=profile_name,
endpoint_name=endpoint_name,
origin_name=origin_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/endpoints/{endpointName}/origins/{originName}"} # type: ignore
|
PypiClean
|
/DragonPyEmulator-0.9.0-py3-none-any.whl/dragonpy/cli/cli_app.py
|
import inspect
import locale
import logging
import sys
from pathlib import Path
import rich_click as click
from bx_py_utils.path import assert_is_file
from rich import print # noqa
from rich_click import RichGroup
import dragonpy
from basic_editor.editor import run_basic_editor
from dragonpy import __version__, constants
from dragonpy.components.rom import ROMFileError
from dragonpy.constants import VERBOSITY_DEFAULT_VALUE, VERBOSITY_DICT
from dragonpy.core.configs import machine_dict
from dragonpy.core.gui_starter import gui_mainloop
logger = logging.getLogger(__name__)
# use user's preferred locale
# e.g.: for formatting cycles/sec number
locale.setlocale(locale.LC_ALL, '')
PACKAGE_ROOT = Path(dragonpy.__file__).parent.parent
assert_is_file(PACKAGE_ROOT / 'pyproject.toml')
OPTION_ARGS_DEFAULT_TRUE = dict(is_flag=True, show_default=True, default=True)
OPTION_ARGS_DEFAULT_FALSE = dict(is_flag=True, show_default=True, default=False)
ARGUMENT_EXISTING_DIR = dict(
type=click.Path(exists=True, file_okay=False, dir_okay=True, readable=True, path_type=Path)
)
ARGUMENT_NOT_EXISTING_DIR = dict(
type=click.Path(exists=False, file_okay=False, dir_okay=True, readable=False, writable=True, path_type=Path)
)
ARGUMENT_EXISTING_FILE = dict(
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True, path_type=Path)
)
OPTION_KWARGS_VERBOSITY = dict(
type=click.Choice([str(number) for number in sorted(VERBOSITY_DICT.keys())]),
default=str(VERBOSITY_DEFAULT_VALUE),
show_default=True,
help=", ".join(f'{number}:{text}' for number, text in VERBOSITY_DICT.items()),
)
OPTION_KWARGS_MACHINE = dict(
type=click.Choice(sorted(machine_dict.keys())),
default=machine_dict.DEFAULT,
show_default=True,
help='Used machine configuration',
)
class ClickGroup(RichGroup): # FIXME: How to set the "info_name" easier?
def make_context(self, info_name, *args, **kwargs):
info_name = './cli.py'
return super().make_context(info_name, *args, **kwargs)
@click.group(
cls=ClickGroup,
epilog=constants.CLI_EPILOG,
)
def cli():
pass
@click.command()
def version():
"""Print version and exit"""
# Pseudo command, because the version always printed on every CLI call ;)
sys.exit(0)
cli.add_command(version)
@click.command()
def gui():
"""Start the DragonPy tkinter starter GUI"""
gui_mainloop(confirm_exit=False)
cli.add_command(gui)
@click.command()
@click.option('--verbosity', **OPTION_KWARGS_VERBOSITY)
@click.option('--trace/--no-trace', **OPTION_ARGS_DEFAULT_FALSE, help='Create trace lines')
@click.option(
'--max-ops',
type=int,
default=None,
show_default=True,
help='If given: Stop CPU after given cycles else: run forever',
)
@click.option('--machine', **OPTION_KWARGS_MACHINE)
def run(machine: str, trace: bool, max_ops: int | None, verbosity: str):
"""Run a machine emulation"""
machine_run_func, MachineConfigClass = machine_dict[machine]
print(f'Use machine func: {machine_run_func.__name__}')
cfg_dict = {
'verbosity': int(verbosity),
'trace': trace,
'max_ops': max_ops,
}
print(cfg_dict)
machine_run_func(cfg_dict)
cli.add_command(run)
@click.command()
@click.option('--verbosity', **OPTION_KWARGS_VERBOSITY)
@click.option('--machine', **OPTION_KWARGS_MACHINE)
def editor(machine: str, verbosity: str):
"""
Run only the BASIC editor
"""
machine_run_func, MachineConfigClass = machine_dict[machine]
cfg_dict = {
'verbosity': int(verbosity),
'trace': False,
'max_ops': None,
}
machine_cfg = MachineConfigClass(cfg_dict)
run_basic_editor(machine_cfg)
cli.add_command(editor)
@click.command()
@click.option('--verbose/--no-verbose', **OPTION_ARGS_DEFAULT_FALSE)
@click.option(
'--machines',
'-m',
multiple=True,
type=click.Choice(sorted(machine_dict.keys())),
default=None,
help='Download ROM only for given machine(s). Leave empty to download all known ROMs',
)
def download_roms(machines: tuple[str] | None, verbose: bool = True):
"""
Download/Test only ROM files
"""
if not machines:
machines = sorted(machine_dict.keys())
print(f'Download ROMs for {machines}')
success = 0
for machine_name in machines:
machine_run_func, machine_cfg = machine_dict[machine_name]
print(f'Download / test ROM for {machine_name}:')
for rom in machine_cfg.DEFAULT_ROMS:
print(f"\tROM file: {rom.FILENAME}")
try:
content = rom.get_data()
except ROMFileError as err:
print(f'[red]{err}')
continue
size = len(content)
print(f"\tfile size is ${size:04x} (dez.: {size:d}) Bytes\n")
success += 1
print(f'{success} ROMs succeed.')
cli.add_command(download_roms)
@click.command()
def log_list():
"""
List all exiting loggers and exit.
"""
print("A list of all loggers:")
for log_name in sorted(logging.Logger.manager.loggerDict):
print(f"\t{log_name}")
cli.add_command(log_list)
def main():
print(f'[bold][green]dragonpy[/green] v[cyan]{__version__}')
print(
inspect.cleandoc(
"""
********************************************************
* DragonPy is a Open source (GPL v3 or later) emulator *
* for the 30 years old homecomputer Dragon 32 *
* and Tandy TRS-80 Color Computer (CoCo)... *
********************************************************
* Homepage: https://github.com/jedie/DragonPy *
********************************************************
"""
)
)
# Execute Click CLI:
cli.name = './cli.py'
cli()
|
PypiClean
|
/abdo-2.0.0.tar.gz/abdo-2.0.0/convo/nlu/featurizers/sparse_featurizer/lexical_syntactic_featurizer.py
|
import logging
from collections import defaultdict, OrderedDict
from pathlib import Path
import numpy as np
from typing import Any, Dict, Optional, Text, List, Type, Union
from convo.nlu.tokenizers.spacy_tokenizer import POS_TAG_KEY
from convo.shared.constants import DOCS_URL_COMPONENTS
from convo.nlu.components import Component
from convo.nlu.tokenizers.tokenizer import Token
from convo.nlu.tokenizers.tokenizer import Tokenizer
from convo.nlu.featurizers.featurizer import SparseFeaturizer
from convo.shared.nlu.training_data.features import Features
from convo.nlu.config import ConvoNLUModelConfig
from convo.shared.nlu.training_data.training_data import TrainingData
from convo.shared.nlu.training_data.message import Message
from convo.nlu.constants import TOKENS_NAMES, FEATURIZER_CLASS_ALIAS
from convo.shared.nlu.constants import TEXT, FEATURE_TYPE_SEQUENCE
from convo.nlu.model import Metadata
import convo.utils.io as io_utils
logger = logging.getLogger(__name__)
END_OF_SENTENCE = "EOS"
BEGIN_OF_SENTENCE = "BOS"
class LexicalSyntacticFeaturizer(SparseFeaturizer):
"""Creates features for entity extraction.
Moves with a sliding window over every token in the user message and creates
features according to the configuration.
"""
@classmethod
def required_components(cls) -> List[Type[Component]]:
return [Tokenizer]
defaults = {
# 'features' is [before, word, after] array with before, word,
# after holding keys about which features to use for each word,
# for example, 'title' in array before will have the feature
# "is the preceding word in title case?"
# POS features require 'SpacyTokenizer'.
"features": [
["low", "title", "upper"],
["BOS", "EOS", "low", "upper", "title", "digit"],
["low", "title", "upper"],
]
}
function_dict = {
"low": lambda token: token.text.islower(),
"title": lambda token: token.text.istitle(),
"prefix5": lambda token: token.text[:5],
"prefix2": lambda token: token.text[:2],
"suffix5": lambda token: token.text[-5:],
"suffix3": lambda token: token.text[-3:],
"suffix2": lambda token: token.text[-2:],
"suffix1": lambda token: token.text[-1:],
"pos": lambda token: token.data.get(POS_TAG_KEY)
if POS_TAG_KEY in token.data
else None,
"pos2": lambda token: token.data.get(POS_TAG_KEY)[:2]
if "pos" in token.data
else None,
"upper": lambda token: token.text.isupper(),
"digit": lambda token: token.text.isdigit(),
}
def __init__(
self,
component_config: Dict[Text, Any],
feature_to_idx_dict: Optional[Dict[Text, Any]] = None,
):
super().__init__(component_config)
self.feature_to_idx_dict = feature_to_idx_dict or {}
self.number_of_features = self._calculate_number_of_features()
def _calculate_number_of_features(self) -> int:
return sum(
[
len(feature_values.values())
for feature_values in self.feature_to_idx_dict.values()
]
)
def train(
self,
training_data: TrainingData,
config: Optional[ConvoNLUModelConfig] = None,
**kwargs: Any,
) -> None:
self.feature_to_idx_dict = self._create_feature_to_idx_dict(training_data)
self.number_of_features = self._calculate_number_of_features()
for example in training_data.training_examples:
self._create_sparse_features(example)
def process(self, message: Message, **kwargs: Any) -> None:
self._create_sparse_features(message)
def _create_feature_to_idx_dict(
self, training_data: TrainingData
) -> Dict[Text, Dict[Text, int]]:
"""Create dictionary of all feature values.
Each feature key, defined in the component configuration, points to
different feature values and their indices in the overall resulting
feature vector.
"""
# get all possible feature values
all_features = []
for example in training_data.training_examples:
tokens = example.get(TOKENS_NAMES[TEXT])
if tokens:
all_features.append(self._tokens_to_features(tokens))
# build vocabulary of features
feature_vocabulary = self._build_feature_vocabulary(all_features)
# assign a unique index to each feature value
return self._map_features_to_indices(feature_vocabulary)
@staticmethod
def _map_features_to_indices(
feature_vocabulary: Dict[Text, List[Text]]
) -> Dict[Text, Dict[Text, int]]:
feature_to_idx_dict = {}
offset = 0
for feature_name, feature_values in feature_vocabulary.items():
feature_to_idx_dict[feature_name] = {
str(feature_value): feature_idx
for feature_idx, feature_value in enumerate(
sorted(feature_values), start=offset
)
}
offset += len(feature_values)
return feature_to_idx_dict
@staticmethod
def _build_feature_vocabulary(
features: List[List[Dict[Text, Any]]]
) -> Dict[Text, List[Text]]:
feature_vocabulary = defaultdict(set)
for sentence_features in features:
for token_features in sentence_features:
for feature_name, feature_value in token_features.items():
feature_vocabulary[feature_name].add(feature_value)
# sort items to ensure same order every time (for tests)
feature_vocabulary = OrderedDict(sorted(feature_vocabulary.items()))
return feature_vocabulary
def _create_sparse_features(self, message: Message) -> None:
"""Convert incoming messages into sparse features using the configured
features."""
import scipy.sparse
tokens = message.get(TOKENS_NAMES[TEXT])
# this check is required because there might be training data examples without TEXT,
# e.g., `Message("", {action_name: "action_listen"})`
if tokens:
sentence_features = self._tokens_to_features(tokens)
one_hot_seq_feature_vector = self._features_to_one_hot(sentence_features)
sequence_features = scipy.sparse.coo_matrix(one_hot_seq_feature_vector)
final_sequence_features = Features(
sequence_features,
FEATURE_TYPE_SEQUENCE,
TEXT,
self.component_config[FEATURIZER_CLASS_ALIAS],
)
message.add_features(final_sequence_features)
def _tokens_to_features(self, tokens: List[Token]) -> List[Dict[Text, Any]]:
"""Convert words into discrete features."""
configured_features = self.component_config["features"]
sentence_features = []
for token_idx in range(len(tokens)):
# get the window size (e.g. before, word, after) of the configured features
# in case of an even number we will look at one more word before,
# e.g. window size 4 will result in a window range of
# [-2, -1, 0, 1] (0 = current word in sentence)
window_size = len(configured_features)
half_window_size = window_size // 2
window_range = range(-half_window_size, half_window_size + window_size % 2)
prefixes = [str(i) for i in window_range]
token_features = {}
for pointer_position in window_range:
current_idx = token_idx + pointer_position
# skip, if current_idx is pointing to a non-existing token
if current_idx < 0 or current_idx >= len(tokens):
continue
token = tokens[token_idx + pointer_position]
current_feature_idx = pointer_position + half_window_size
prefix = prefixes[current_feature_idx]
for feature in configured_features[current_feature_idx]:
token_features[f"{prefix}:{feature}"] = self._get_feature_value(
feature, token, token_idx, pointer_position, len(tokens)
)
sentence_features.append(token_features)
return sentence_features
def _features_to_one_hot(
self, sentence_features: List[Dict[Text, Any]]
) -> np.ndarray:
"""Convert the word features into a one-hot presentation using the indices
in the feature-to-idx dictionary."""
one_hot_seq_feature_vector = np.zeros(
[len(sentence_features), self.number_of_features]
)
for token_idx, token_features in enumerate(sentence_features):
for feature_name, feature_value in token_features.items():
feature_value_str = str(feature_value)
if (
feature_name in self.feature_to_idx_dict
and feature_value_str in self.feature_to_idx_dict[feature_name]
):
feature_idx = self.feature_to_idx_dict[feature_name][
feature_value_str
]
one_hot_seq_feature_vector[token_idx][feature_idx] = 1
return one_hot_seq_feature_vector
def _get_feature_value(
self,
feature: Text,
token: Token,
token_idx: int,
pointer_position: int,
token_length: int,
) -> Union[bool, int, Text]:
if feature == END_OF_SENTENCE:
return token_idx + pointer_position == token_length - 1
if feature == BEGIN_OF_SENTENCE:
return token_idx + pointer_position == 0
if feature not in self.function_dict:
raise ValueError(
f"Configured feature '{feature}' not valid. Please check "
f"'{DOCS_URL_COMPONENTS}' for valid configuration parameters."
)
value = self.function_dict[feature](token)
if value is None:
logger.debug(
f"Invalid value '{value}' for feature '{feature}'."
f" Feature is ignored."
)
return value
@classmethod
def load(
cls,
meta: Dict[Text, Any],
model_dir: Optional[Text] = None,
model_metadata: Optional[Metadata] = None,
cached_component: Optional["LexicalSyntacticFeaturizer"] = None,
**kwargs: Any,
) -> "LexicalSyntacticFeaturizer":
file_name = meta.get("file")
feature_to_idx_file = Path(model_dir) / f"{file_name}.feature_to_idx_dict.pkl"
feature_to_idx_dict = io_utils.json_unpickle(feature_to_idx_file)
return LexicalSyntacticFeaturizer(meta, feature_to_idx_dict=feature_to_idx_dict)
def persist(self, file_name: Text, model_dir: Text) -> Optional[Dict[Text, Any]]:
"""Persist this model into the passed directory.
Return the metadata necessary to load the model again."""
feature_to_idx_file = Path(model_dir) / f"{file_name}.feature_to_idx_dict.pkl"
io_utils.json_pickle(feature_to_idx_file, self.feature_to_idx_dict)
return {"file": file_name}
|
PypiClean
|
/getnovel_khangit-1.4.0.tar.gz/getnovel_khangit-1.4.0/src/getnovel/app/spiders/bachngocsach.py
|
from scrapy import Spider
from scrapy.exceptions import CloseSpider
from scrapy.http import Response, Request
from getnovel.app.itemloaders import InfoLoader, ChapterLoader
from getnovel.app.items import Info, Chapter
class BachNgocSachSpider(Spider):
"""Define spider for domain: bachngocsach.
Attributes
----------
name : str
Name of the spider.
start_urls : list
List of url to start crawling from.
sa : int
The chapter index to start crawling.
so : int
The chapter index to stop crawling after that.
c : str
Language code of novel.
"""
name = "bachngocsach"
def __init__(self, u: str, start: int, stop: int, *args, **kwargs):
"""Initialize attributes.
Parameters
----------
u : str
Url of the novel information page.
start: int
Start crawling from this chapter.
stop : int
Stop crawling after this chapter, input -1 to get all chapters.
"""
super().__init__(*args, **kwargs)
self.start_urls = [u]
self.sa = int(start)
self.so = int(stop)
self.c = "vi" # language code
def parse(self, res: Response, *args, **kwargs):
"""Extract info and send request to the table of content.
Parameters
----------
res : Response
The response to parse.
Yields
------
Info
Info item.
Request
Request to the table of content.
"""
yield get_info(res)
yield Request(
url=f"{res.url}/muc-luc?page=all",
callback=self.parse_toc,
)
def parse_toc(self, res: Response):
"""Extract link of the start chapter.
Parameters
----------
res : Response
The response to parse.
Yields
------
Request
Request to the start chapter.
"""
yield res.follow(
url=res.xpath(f'(//*[@class="chuong-link"]/@href)[{self.sa}]').get(),
meta={"id": self.sa},
callback=self.parse_content,
)
def parse_content(self, res: Response):
"""Extract content.
Parameters
----------
res : Response
The response to parse.
Yields
------
Chapter
Chapter item.
Request
Request to the next chapter.
"""
yield get_content(res)
neu = res.xpath('//a[contains(@class,"page-next")]/@href').get()
if (neu is None) or (res.meta["id"] == self.so):
raise CloseSpider(reason="done")
yield res.follow(
url=neu,
meta={"id": res.meta["id"] + 1},
callback=self.parse_content,
)
def get_info(res: Response) -> Info:
"""Get novel information.
Parameters
----------
res : Response
The response to parse.
Returns
-------
Info
Populated Info item.
"""
r = InfoLoader(item=Info(), response=res)
r.add_xpath("title", '//*[@id="truyen-title"]/text()')
r.add_xpath("author", '//div[@id="tacgia"]/a/text()')
r.add_xpath("types", '//div[@id="theloai"]/a/text()')
r.add_xpath("foreword", '//div[@id="gioithieu"]/div/p/text()')
r.add_xpath("image_urls", '//div[@id="anhbia"]/img/@src')
r.add_value("url", res.request.url)
return r.load_item()
def get_content(res: Response) -> Chapter:
"""Get chapter content.
Parameters
----------
res : Response
The response to parse.
Returns
-------
Chapter
Populated Chapter item.
"""
r = ChapterLoader(item=Chapter(), response=res)
r.add_value("id", str(res.meta["id"]))
r.add_value("url", res.url)
r.add_xpath("title", '//h1[@id="chuong-title"]/text()')
r.add_xpath("content", '//div[@id="noi-dung"]/p/text()')
return r.load_item()
|
PypiClean
|
/Camelot-13.04.13-gpl-pyqt.tar.gz/Camelot-13.04.13-gpl-pyqt/camelot/view/mainwindow.py
|
import logging
logger = logging.getLogger('camelot.view.mainwindow')
from PyQt4.QtCore import Qt
from PyQt4 import QtGui, QtCore
from camelot.view.controls.busy_widget import BusyWidget
from camelot.view.controls.navpane2 import NavigationPane
from camelot.view.model_thread import post
from camelot.core.utils import ugettext as _
class MainWindow(QtGui.QMainWindow):
"""Main window of a Desktop Camelot application
:param gui_context: an :class:`camelot.admin.action.application_action.ApplicationActionGuiContext`
object
:param parent: a :class:`QtGui.QWidget` object or :class:`None`
.. attribute:: splash_screen
a :class:`QtGui.QWidget` that needs to be closed when
the main window is shown.
"""
def __init__(self, gui_context, parent=None):
from workspace import DesktopWorkspace
logger.debug('initializing main window')
QtGui.QMainWindow.__init__(self, parent)
self.splash_screen = None
self.toolbars = []
self.nav_pane = None
self.app_admin = gui_context.admin.get_application_admin()
logger.debug('setting up workspace')
self.workspace = DesktopWorkspace( self.app_admin, self )
self.gui_context = gui_context
self.gui_context.workspace = self.workspace
logger.debug('setting child windows dictionary')
logger.debug('setting central widget to our workspace')
self.setCentralWidget( self.workspace )
self.workspace.change_view_mode_signal.connect( self.change_view_mode )
self.workspace.last_view_closed_signal.connect( self.unmaximize_view )
self.workspace.view_activated_signal.connect( self.view_activated )
logger.debug('creating navigation pane')
post( self.app_admin.get_sections, self.set_sections )
logger.debug('creating the menus')
post( self.app_admin.get_main_menu, self.set_main_menu )
logger.debug('creating the toolbars')
post( self.app_admin.get_toolbar_actions,
self.set_left_toolbar_actions,
args = (Qt.LeftToolBarArea,) )
post( self.app_admin.get_toolbar_actions,
self.set_right_toolbar_actions,
args = (Qt.RightToolBarArea,) )
post( self.app_admin.get_toolbar_actions,
self.set_top_toolbar_actions,
args = (Qt.TopToolBarArea,) )
post( self.app_admin.get_toolbar_actions,
self.set_bottom_toolbar_actions,
args = (Qt.BottomToolBarArea,) )
post( self.app_admin.get_hidden_actions,
self.set_hidden_actions )
logger.debug('reading saved settings')
self.read_settings()
windowtitle = self.app_admin.get_name()
logger.debug( u'setting up window title: %s'%windowtitle )
self.setWindowTitle( windowtitle )
self.app_admin.title_changed_signal.connect( self.setWindowTitle )
logger.debug('initialization complete')
@QtCore.pyqtSlot()
def show( self ):
"""This method wait until the main window is completely set up, and
only then shows it. This is a workaround for a bug in Qt on OS X
https://bugreports.qt.nokia.com/browse/QTBUG-18567
"""
post( lambda:None, self._delayed_show )
@QtCore.pyqtSlot(object)
def _delayed_show( self, _o ):
"""Call to the underlying :meth:`QMainWindow.show`, to be used in
:meth:`MainWindow.show`
"""
super( MainWindow, self ).show()
if self.splash_screen:
self.splash_screen.close()
@QtCore.pyqtSlot()
def unmaximize_view( self ):
"""Show the navigation pane and the menu bar if they exist """
if self.navpane:
self.navpane.show()
if self.menuBar():
self.menuBar().show()
@QtCore.pyqtSlot()
def change_view_mode( self ):
"""Switch between hidden or shown menubar and navigation pane"""
if self.menuBar().isHidden():
if self.navpane:
self.navpane.show()
self.menuBar().show()
else:
if self.navpane:
self.navpane.hide()
self.menuBar().hide()
def read_settings( self ):
"""Restore the geometry of the main window to its last saved state"""
settings = QtCore.QSettings()
self.restoreGeometry(settings.value('geometry').toByteArray())
def write_settings(self):
"""Store the current geometry of the main window"""
logger.debug('writing application settings')
settings = QtCore.QSettings()
settings.setValue('geometry', QtCore.QVariant(self.saveGeometry()))
logger.debug('settings written')
@QtCore.pyqtSlot( object )
def set_main_menu( self, main_menu ):
"""Set the main menu
:param main_menu: a list of :class:`camelot.admin.menu.Menu` objects,
as returned by the :meth:`camelot.admin.application_admin.ApplicationAdmin.get_main_menu`
method.
"""
from camelot.view.controls.action_widget import ActionAction
if main_menu == None:
return
menu_bar = self.menuBar()
for menu in main_menu:
menu_bar.addMenu( menu.render( self.gui_context, menu_bar ) )
for qaction in menu_bar.findChildren( ActionAction ):
qaction.triggered.connect( self.action_triggered )
def get_gui_context( self ):
"""Get the :class:`GuiContext` of the active view in the mainwindow,
or the :class:`GuiContext` of the application.
:return: a :class:`camelot.admin.action.base.GuiContext`
"""
active_view = self.gui_context.workspace.active_view()
if active_view:
return active_view.gui_context
return self.gui_context
@QtCore.pyqtSlot( object, object )
def set_toolbar_actions( self, toolbar_area, toolbar_actions ):
"""Set the toolbar for a specific area
:param toolbar_area: the area on which to put the toolbar, from
:class:`Qt.LeftToolBarArea` through :class:`Qt.BottomToolBarArea`
:param toolbar_actions: a list of :class:`camelot.admin.action..base.Action` objects,
as returned by the :meth:`camelot.admin.application_admin.ApplicationAdmin.get_toolbar_actions`
method.
"""
from camelot.view.controls.action_widget import ActionAction
if toolbar_actions != None:
#
# gather menu bar actions to prevent duplication of QActions
#
qactions = dict()
menu_bar = self.menuBar()
if menu_bar:
for qaction in menu_bar.findChildren( ActionAction ):
qactions[qaction.action] = qaction
toolbar = QtGui.QToolBar( _('Toolbar') )
self.addToolBar( toolbar_area, toolbar )
toolbar.setObjectName( 'MainWindowToolBar_%i'%toolbar_area )
toolbar.setMovable( False )
toolbar.setFloatable( False )
for action in toolbar_actions:
qaction = qactions.get( action, None )
if qaction == None:
qaction = action.render( self.gui_context, toolbar )
qaction.triggered.connect( self.action_triggered )
toolbar.addAction( qaction )
self.toolbars.append( toolbar )
toolbar.addWidget( BusyWidget() )
@QtCore.pyqtSlot( object )
def set_left_toolbar_actions( self, toolbar_actions ):
self.set_toolbar_actions( Qt.LeftToolBarArea, toolbar_actions )
@QtCore.pyqtSlot( object )
def set_right_toolbar_actions( self, toolbar_actions ):
self.set_toolbar_actions( Qt.RightToolBarArea, toolbar_actions )
@QtCore.pyqtSlot( object )
def set_top_toolbar_actions( self, toolbar_actions ):
self.set_toolbar_actions( Qt.TopToolBarArea, toolbar_actions )
@QtCore.pyqtSlot( object )
def set_bottom_toolbar_actions( self, toolbar_actions ):
self.set_toolbar_actions( Qt.BottomToolBarArea, toolbar_actions )
@QtCore.pyqtSlot( object )
def set_hidden_actions( self, hidden_actions ):
from camelot.view.controls.action_widget import ActionAction
for action in hidden_actions:
action_action = ActionAction( action, self.gui_context, self )
action_action.triggered.connect( self.action_triggered )
self.addAction( action_action )
@QtCore.pyqtSlot()
def view_activated( self ):
"""Update the state of the actions when the active tab in the
desktop widget has changed"""
from camelot.view.controls.action_widget import ActionAction
gui_context = self.get_gui_context()
model_context = gui_context.create_model_context()
for toolbar in self.toolbars:
for qaction in toolbar.actions():
if isinstance( qaction, ActionAction ):
post( qaction.action.get_state,
qaction.set_state,
args = ( model_context, ) )
menu_bar = self.menuBar()
if menu_bar:
for qaction in menu_bar.findChildren( ActionAction ):
post( qaction.action.get_state,
qaction.set_state,
args = ( model_context, ) )
@QtCore.pyqtSlot( bool )
def action_triggered( self, _checked = False ):
"""Execute an action that was triggered somewhere in the main window,
such as the toolbar or the main menu"""
action_action = self.sender()
gui_context = self.get_gui_context()
action_action.action.gui_run( gui_context )
@QtCore.pyqtSlot( object )
def set_sections( self, sections ):
"""Set the sections of the navigation pane
:param main_menu: a list of :class:`camelot.admin.section.Section` objects,
as returned by the :meth:`camelot.admin.application_admin.ApplicationAdmin.get_sections`
method.
"""
if sections != None:
self.navpane = NavigationPane(
self.app_admin,
workspace=self.workspace,
parent=self
)
self.addDockWidget( Qt.LeftDockWidgetArea, self.navpane )
else:
self.navpane = None
def closeEvent( self, event ):
from camelot.view.model_thread import get_model_thread
model_thread = get_model_thread()
self.workspace.close_all_views()
self.write_settings()
logger.info( 'closing mainwindow' )
model_thread.stop()
super( MainWindow, self ).closeEvent( event )
QtCore.QCoreApplication.exit(0)
|
PypiClean
|
/marshmallow_aiohttp-0.1.0-py3-none-any.whl/marshmallow_aiohttp/_schemas.py
|
from typing import Final
import aiohttp
import ujson
from marshmallow import Schema, fields, post_load
from ._trasers import make_default_trace_configs
DEFAULT_CLIENT_TIMEOUT: Final = aiohttp.ClientTimeout(total=60)
class TCPConnectorSchema(Schema):
# Timout for connection reusing after releasing (optional).
# For disabling keep-alive feature use force_close=True flag.
keepalive_timeout = fields.Float(missing=None)
# Total number simultaneous connections.
# If limit is None the connector has no limit.
limit = fields.Int(missing=100)
# Limit simultaneous connections to the same endpoint.
# Endpoints are the same if they are have equal host, port and is_ssl.
# If limit is 0 the connector has no limit.
limit_per_host = fields.Int(missing=0)
# Some SSL servers do not properly complete SSL shutdown process,
# in that case asyncio leaks ssl connections.
# If this parameter is set to True, aiohttp additionally aborts
# underlining transport after 2 seconds.
enable_cleanup_closed = fields.Bool(missing=False)
# Use internal cache for DNS lookups, True by default.
use_dns_cache = fields.Bool(missing=True)
# Expire after some seconds the DNS entries, None forever.
ttl_dns_cache = fields.Int(missing=60 * 60)
# Custom resolver instance to use.
resolver = fields.Raw(missing=aiohttp.AsyncResolver)
@post_load
def make_connector(self, data: dict, **kwargs) -> aiohttp.TCPConnector:
return aiohttp.TCPConnector(**data)
class ClientTimeoutSchema(Schema):
# Total timeout for the whole request.
total = fields.Float(
missing=DEFAULT_CLIENT_TIMEOUT.total,
allow_none=True,
)
# Total timeout for acquiring a connection from pool.
# The time consists connection establishment for a new connection
# or waiting for a free connection from a pool if pool connection
# limits are exceeded.
connect = fields.Float(
missing=DEFAULT_CLIENT_TIMEOUT.connect,
allow_none=True,
)
# A timeout for reading a portion of data from a peer.
sock_connect = fields.Float(
missing=DEFAULT_CLIENT_TIMEOUT.sock_connect,
allow_none=True,
)
# A timeout for connecting to a peer for a new connection,
# not given from a pool. See also connect.
sock_read = fields.Float(
missing=DEFAULT_CLIENT_TIMEOUT.sock_read,
allow_none=True,
)
@post_load
def make_timeout(self, data: dict, **kwargs) -> aiohttp.ClientTimeout:
return aiohttp.ClientTimeout(**data)
class ClientSessionSchema(Schema):
# TCP connector instance to support connection pooling (optional).
connector = fields.Nested(
TCPConnectorSchema,
missing=lambda: TCPConnectorSchema().load(data={}),
)
# Cookies to send with the request (optional).
cookies = fields.Dict(fields.Str, fields.Str, missing=None)
# HTTP Headers to send with every request (optional).
headers = fields.Dict(fields.Str, fields.Str, missing=None)
# Set of headers for which autogenerated should be skipped (optional).
skip_auto_headers = fields.List(fields.Str, missing=None)
# Json serializer callable.
json_serialize = fields.Raw(missing=lambda: ujson.dumps)
# By default every session instance has own private cookie jar
# for automatic cookies processing but user may redefine
# this behavior by providing own jar implementation.
#
# If no cookie processing is needed, a aiohttp.DummyCookieJar
# instance can be provided.
cookie_jar = fields.Raw(missing=aiohttp.DummyCookieJar)
# Close connector instance on session closing.
connector_owner = fields.Bool(missing=True)
# Raise an aiohttp.ClientResponseError if the response failed.
# Do nothing for success responses (less than 400).
raise_for_status = fields.Bool(missing=False)
# ClientTimeout settings structure, DEFAULT_TIMEOUT by default.
timeout = fields.Nested(
ClientTimeoutSchema,
missing=DEFAULT_CLIENT_TIMEOUT,
)
# Automatically decompress response body.
auto_decompress = fields.Bool(missing=True)
# Get proxies information from HTTP_PROXY / HTTPS_PROXY environment
# variables if the parameter is True (False by default).
trust_env = fields.Bool(missing=False)
# A list of TraceConfig instances used for client tracing.
# None (default) is used for request tracing disabling.
trace_configs = fields.List(fields.Raw, missing=make_default_trace_configs)
@post_load
def make_session(self, data: dict, **kwargs) -> aiohttp.ClientSession:
return aiohttp.ClientSession(**data)
|
PypiClean
|
/gitone-0.1.3.tar.gz/gitone-0.1.3/docs/readme.html
|
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta charset="utf-8" />
<title>Gitone: Combine multiple git version controls steps into one — Gitone 0.0.1 documentation</title>
<link rel="stylesheet" href="_static/alabaster.css" type="text/css" />
<link rel="stylesheet" href="_static/pygments.css" type="text/css" />
<script type="text/javascript" id="documentation_options" data-url_root="./" src="_static/documentation_options.js"></script>
<script type="text/javascript" src="_static/jquery.js"></script>
<script type="text/javascript" src="_static/underscore.js"></script>
<script type="text/javascript" src="_static/doctools.js"></script>
<script type="text/javascript" src="_static/language_data.js"></script>
<link rel="index" title="Index" href="genindex.html" />
<link rel="search" title="Search" href="search.html" />
<link rel="next" title="Gitone command-line interface (CLI)" href="cli.html" />
<link rel="prev" title="Welcome to Gitone’s documentation!" href="index.html" />
<link rel="stylesheet" href="_static/custom.css" type="text/css" />
<meta name="viewport" content="width=device-width, initial-scale=0.9, maximum-scale=0.9" />
</head><body>
<div class="document">
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
<div class="sphinxsidebarwrapper">
<h1 class="logo"><a href="index.html">Gitone</a></h1>
<p class="blurb">A Python package for programmatic R markdown workflows</p>
<p>
<iframe src="https://ghbtns.com/github-btn.html?user=marskar&repo=gitone&type=star&count=true&size=large&v=2"
allowtransparency="true" frameborder="0" scrolling="0" width="200px" height="35px"></iframe>
</p>
<h3>Navigation</h3>
<p class="caption"><span class="caption-text">Contents:</span></p>
<ul class="current">
<li class="toctree-l1 current"><a class="current reference internal" href="#">Project overview</a><ul>
<li class="toctree-l2"><a class="reference internal" href="#introduction">Introduction</a></li>
<li class="toctree-l2"><a class="reference internal" href="#documentation-and-code">Documentation and Code</a></li>
<li class="toctree-l2"><a class="reference internal" href="#installation">Installation</a></li>
<li class="toctree-l2"><a class="reference internal" href="#usage">Usage</a></li>
<li class="toctree-l2"><a class="reference internal" href="#next-steps">Next Steps</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="cli.html">Command-line interface</a></li>
<li class="toctree-l1"><a class="reference internal" href="modules.html">Module reference</a></li>
<li class="toctree-l1"><a class="reference internal" href="tests.html">Test reference</a></li>
</ul>
<div class="relations">
<h3>Related Topics</h3>
<ul>
<li><a href="index.html">Documentation overview</a><ul>
<li>Previous: <a href="index.html" title="previous chapter">Welcome to Gitone’s documentation!</a></li>
<li>Next: <a href="cli.html" title="next chapter">Gitone command-line interface (CLI)</a></li>
</ul></li>
</ul>
</div>
<div id="searchbox" style="display: none" role="search">
<h3>Quick search</h3>
<div class="searchformwrapper">
<form class="search" action="search.html" method="get">
<input type="text" name="q" />
<input type="submit" value="Go" />
</form>
</div>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
</div>
</div>
<div class="documentwrapper">
<div class="bodywrapper">
<div class="body" role="main">
<div class="section" id="gitone-combine-multiple-git-version-controls-steps-into-one">
<h1>Gitone: Combine multiple <strong>git</strong> version controls steps into <strong>one</strong><a class="headerlink" href="#gitone-combine-multiple-git-version-controls-steps-into-one" title="Permalink to this headline">¶</a></h1>
<p><a class="reference external" href="https://travis-ci.org/marskar/gitone"><img alt="Build" src="https://travis-ci.org/marskar/gitone.svg?branch=master" /></a> <a class="reference external" href="https://opensource.org/licenses/MIT"><img alt="License" src="https://img.shields.io/badge/License-MIT-brightgreen.svg" /></a> <a class="reference external" href="https://pypi.python.org/pypi/gitone"><img alt="PyPI" src="https://img.shields.io/pypi/v/gitone.svg" /></a> <a class="reference external" href="https://www.repostatus.org/#active"><img alt="Project Status: Active – The project has reached a stable, usable state and is being actively developed." src="https://www.repostatus.org/badges/latest/active.svg" /></a> <a class="reference external" href="https://pyup.io/repos/github/marskar/gitone/"><img alt="Updates" src="https://pyup.io/repos/github/marskar/gitone/shield.svg" /></a> <a class="reference external" href="https://www.python.org/downloads/"><img alt="PyPI - Python Version" src="https://img.shields.io/pypi/pyversions/gitone.svg" /></a></p>
<div class="section" id="introduction">
<h2>Introduction<a class="headerlink" href="#introduction" title="Permalink to this headline">¶</a></h2>
<p>The <code class="docutils literal notranslate"><span class="pre">gitone</span></code> Python package takes some of the tedium out of <a class="reference external" href="https://git-scm.com/">git</a> version control by rolling multiple git shell commands into one shell command or Python function.</p>
<p>Unlike git shell commands, <code class="docutils literal notranslate"><span class="pre">gitone</span></code> shell commands and Python functions can automatically generate commit messages if a commit message is not provided!</p>
<p>You can use <code class="docutils literal notranslate"><span class="pre">gitone</span></code> in</p>
<ul class="simple">
<li><p>your terminal (e.g. <code class="docutils literal notranslate"><span class="pre">bash</span></code>, <code class="docutils literal notranslate"><span class="pre">zsh</span></code>, <code class="docutils literal notranslate"><span class="pre">fish</span></code>, etc.) or</p></li>
<li><p>your favorite Python environment (e.g. <a class="reference external" href="https://www.jetbrains.com/pycharm/">PyCharm</a> or <a class="reference external" href="https://code.visualstudio.com/docs/python/python-tutorial">Visual Studio Code</a>).</p></li>
</ul>
<p>The <code class="docutils literal notranslate"><span class="pre">gitone</span></code> Python package consists of 8 shell commands and Python functions:</p>
<ul class="simple">
<li><p><code class="docutils literal notranslate"><span class="pre">cam</span></code>, which stands for <code class="docutils literal notranslate"><span class="pre">git</span> <span class="pre">commit</span> <span class="pre">-am</span></code>, will add and commit all changes made to tracked files.</p></li>
<li><p><code class="docutils literal notranslate"><span class="pre">camp</span></code>, which stands for <code class="docutils literal notranslate"><span class="pre">git</span> <span class="pre">commit</span> <span class="pre">-am</span> <span class="pre">&&</span> <span class="pre">git</span> <span class="pre">push</span></code>, will add and commit all changes made to tracked files and push the commit to the remote repository.</p></li>
<li><p><code class="docutils literal notranslate"><span class="pre">acm</span></code>, which stands for <code class="docutils literal notranslate"><span class="pre">git</span> <span class="pre">add</span> <span class="pre">--all</span> <span class="pre">&&</span> <span class="pre">git</span> <span class="pre">commit</span> <span class="pre">-m</span></code>, will add and commit all changes made to all files and push the changes to the remote repository.</p></li>
<li><p><code class="docutils literal notranslate"><span class="pre">acmp</span></code>, which stands for <code class="docutils literal notranslate"><span class="pre">git</span> <span class="pre">add</span> <span class="pre">--all</span> <span class="pre">&&</span> <span class="pre">git</span> <span class="pre">commit</span> <span class="pre">-m</span> <span class="pre">&&</span> <span class="pre">git</span> <span class="pre">push</span></code>, will add and commit all changes made to all files and push the changes to the remote repository.</p></li>
</ul>
<p>In summary, <code class="docutils literal notranslate"><span class="pre">cam</span></code> and <code class="docutils literal notranslate"><span class="pre">camp</span></code> work on only tracked files (those that have previously been added to git’s index),
while <code class="docutils literal notranslate"><span class="pre">acm</span></code> and <code class="docutils literal notranslate"><span class="pre">acmp</span></code> work on all files by adding untracked files to git’s index.</p>
<p>There are also the <code class="docutils literal notranslate"><span class="pre">--amend</span></code> versions of the above:</p>
<ul class="simple">
<li><p><code class="docutils literal notranslate"><span class="pre">amend</span></code>, which is short for <code class="docutils literal notranslate"><span class="pre">git</span> <span class="pre">commit</span> <span class="pre">--amend</span> <span class="pre">-am</span></code>, will overwrite the previous commit by adding and committing all changes made to tracked files.</p></li>
<li><p><code class="docutils literal notranslate"><span class="pre">amendp</span></code>, which is short for <code class="docutils literal notranslate"><span class="pre">git</span> <span class="pre">commit</span> <span class="pre">--amend</span> <span class="pre">-am</span> <span class="pre">&&</span> <span class="pre">git</span> <span class="pre">push</span> <span class="pre">--force</span></code>, will overwrite the previous commit by adding and committing all changes made to tracked files and then force push the overwritten commit to the remote repository.</p></li>
<li><p><code class="docutils literal notranslate"><span class="pre">aamend</span></code>, which is short for <code class="docutils literal notranslate"><span class="pre">git</span> <span class="pre">add</span> <span class="pre">--all</span> <span class="pre">&&</span> <span class="pre">git</span> <span class="pre">commit</span> <span class="pre">--amend</span> <span class="pre">-m</span></code>, will overwrite the previous commit by adding and committing all changes made to all files.</p></li>
<li><p><code class="docutils literal notranslate"><span class="pre">aamendp</span></code>, which is short for <code class="docutils literal notranslate"><span class="pre">git</span> <span class="pre">add</span> <span class="pre">--all</span> <span class="pre">&&</span> <span class="pre">git</span> <span class="pre">commit</span> <span class="pre">--amend</span> <span class="pre">-m</span> <span class="pre">&&</span> <span class="pre">git</span> <span class="pre">push</span> <span class="pre">--force</span></code>, will overwrite the previous commit by adding and committing all changes made to all files and then force push the overwritten commit to the remote repository.</p></li>
</ul>
<p>Similarly to the first four, <code class="docutils literal notranslate"><span class="pre">amend</span></code> and <code class="docutils literal notranslate"><span class="pre">amendp</span></code> work on only tracked files (those that have previously been added to git’s index),
while <code class="docutils literal notranslate"><span class="pre">aamend</span></code> and <code class="docutils literal notranslate"><span class="pre">aamendp</span></code> work on all files by adding untracked files to git’s index.</p>
<p>All <code class="docutils literal notranslate"><span class="pre">gitone</span></code> functions and commands rely on the <a class="reference external" href="https://gitpython.readthedocs.io/">GitPython</a> Python library.
The command line interface relies on the <a class="reference external" href="https://click.palletsprojects.com/">click</a> Python library.</p>
</div>
<div class="section" id="documentation-and-code">
<h2>Documentation and Code<a class="headerlink" href="#documentation-and-code" title="Permalink to this headline">¶</a></h2>
<p>The documentation is hosted at <a class="reference external" href="https://marskar.github.io/gitone/">https://marskar.github.io/gitone/</a>.</p>
<p>The code is hosted at <a class="reference external" href="https://github.com/marskar/gitone">https://github.com/marskar/gitone</a>.</p>
</div>
<div class="section" id="installation">
<h2>Installation<a class="headerlink" href="#installation" title="Permalink to this headline">¶</a></h2>
<div class="highlight-shell notranslate"><div class="highlight"><pre><span></span>$ pip install gitone
</pre></div>
</div>
</div>
<div class="section" id="usage">
<h2>Usage<a class="headerlink" href="#usage" title="Permalink to this headline">¶</a></h2>
<p>Run any of the available shell commands or Python functions without arguments and a commit message will be automatically generated.</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="gp">>>> </span><span class="n">camp</span><span class="p">()</span>
<span class="gp">>>> </span><span class="n">acmp</span><span class="p">()</span>
<span class="gp">>>> </span><span class="n">cam</span><span class="p">()</span>
<span class="gp">>>> </span><span class="n">acm</span><span class="p">()</span>
</pre></div>
</div>
<div class="highlight-shell notranslate"><div class="highlight"><pre><span></span>$ camp
$ acmp
$ cam
$ acm
</pre></div>
</div>
<p>You can also pass a commit message to any of the functions or shell commands.</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="gp">>>> </span><span class="n">camp</span><span class="p">(</span><span class="n">message</span><span class="o">=</span><span class="s2">"Made some changes."</span><span class="p">)</span>
<span class="gp">>>> </span><span class="n">acmp</span><span class="p">(</span><span class="s2">"Lemme try something."</span><span class="p">)</span>
<span class="gp">>>> </span><span class="n">cam</span><span class="p">(</span><span class="s2">"Not sure what changed."</span><span class="p">)</span>
<span class="gp">>>> </span><span class="n">acm</span><span class="p">(</span><span class="s2">"Should be OK now."</span><span class="p">)</span>
</pre></div>
</div>
<p>When using the shell commands. Do not wrap the commit message in quotes (<code class="docutils literal notranslate"><span class="pre">""</span></code>) or they will be included in the commit message.</p>
<div class="highlight-shell notranslate"><div class="highlight"><pre><span></span>$ camp Made some changes.
$ acmp Lemme try something.
$ cam Not sure what changed.
$ acm Should be OK now.
</pre></div>
</div>
<p>To overwrite the previous commit, you can use the amend functions.</p>
<p>If you do not provide a commit message, the previous commit message will be reused.</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="gp">>>> </span><span class="n">amend</span><span class="p">()</span>
<span class="gp">>>> </span><span class="n">amendp</span><span class="p">()</span>
<span class="gp">>>> </span><span class="n">aamend</span><span class="p">()</span>
<span class="gp">>>> </span><span class="n">aamendp</span><span class="p">()</span>
</pre></div>
</div>
<div class="highlight-shell notranslate"><div class="highlight"><pre><span></span>$ amend
$ amendp
$ aamend
$ aamendp
</pre></div>
</div>
</div>
<div class="section" id="next-steps">
<h2>Next Steps<a class="headerlink" href="#next-steps" title="Permalink to this headline">¶</a></h2>
<p>Setting up a repo can be a pain.</p>
<ul class="simple">
<li><p>Write an <code class="docutils literal notranslate"><span class="pre">init</span></code> function and command to handle all of the repo setup steps like in <a class="reference external" href="https://github.com/marskar/cookiecutter/blob/master/%7B%7Bcookiecutter.repo%7D%7D/Makefile#L21">this Makefile</a>. Inspired by the <a class="reference external" href="https://usethis.r-lib.org/reference/use_github.html">usethis</a> R package.</p></li>
</ul>
</div>
</div>
</div>
</div>
</div>
<div class="clearer"></div>
</div>
<div class="footer">
©2019, Martin Skarzynski.
|
Powered by <a href="http://sphinx-doc.org/">Sphinx 2.0.1</a>
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.12</a>
|
<a href="_sources/readme.rst.txt"
rel="nofollow">Page source</a>
</div>
<a href="https://github.com/marskar/gitone" class="github">
<img style="position: absolute; top: 0; right: 0; border: 0;" src="https://s3.amazonaws.com/github/ribbons/forkme_right_darkblue_121621.png" alt="Fork me on GitHub" class="github"/>
</a>
</body>
</html>
|
PypiClean
|
/m3-ZSI-2.1.tar.gz/m3-ZSI-2.1/ZSI/wstools/TimeoutSocket.py
|
ident = "$Id: TimeoutSocket.py 237 2003-05-20 21:10:14Z warnes $"
import string, socket, select, errno
WSAEINVAL = getattr(errno, 'WSAEINVAL', 10022)
class TimeoutSocket:
"""A socket imposter that supports timeout limits."""
def __init__(self, timeout=20, sock=None):
self.timeout = float(timeout)
self.inbuf = ''
if sock is None:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock = sock
self.sock.setblocking(0)
self._rbuf = ''
self._wbuf = ''
def __getattr__(self, name):
# Delegate to real socket attributes.
return getattr(self.sock, name)
def connect(self, *addr):
timeout = self.timeout
sock = self.sock
try:
# Non-blocking mode
sock.setblocking(0)
apply(sock.connect, addr)
sock.setblocking(timeout != 0)
return 1
except socket.error,why:
if not timeout:
raise
sock.setblocking(1)
if len(why.args) == 1:
code = 0
else:
code, why = why
if code not in (
errno.EINPROGRESS, errno.EALREADY, errno.EWOULDBLOCK
):
raise
r,w,e = select.select([],[sock],[],timeout)
if w:
try:
apply(sock.connect, addr)
return 1
except socket.error,why:
if len(why.args) == 1:
code = 0
else:
code, why = why
if code in (errno.EISCONN, WSAEINVAL):
return 1
raise
raise TimeoutError('socket connect() timeout.')
def send(self, data, flags=0):
total = len(data)
next = 0
while 1:
r, w, e = select.select([],[self.sock], [], self.timeout)
if w:
buff = data[next:next + 8192]
sent = self.sock.send(buff, flags)
next = next + sent
if next == total:
return total
continue
raise TimeoutError('socket send() timeout.')
def recv(self, amt, flags=0):
if select.select([self.sock], [], [], self.timeout)[0]:
return self.sock.recv(amt, flags)
raise TimeoutError('socket recv() timeout.')
buffsize = 4096
handles = 1
def makefile(self, mode="r", buffsize=-1):
self.handles = self.handles + 1
self.mode = mode
return self
def close(self):
self.handles = self.handles - 1
if self.handles == 0 and self.sock.fileno() >= 0:
self.sock.close()
def read(self, n=-1):
if not isinstance(n, type(1)):
n = -1
if n >= 0:
k = len(self._rbuf)
if n <= k:
data = self._rbuf[:n]
self._rbuf = self._rbuf[n:]
return data
n = n - k
L = [self._rbuf]
self._rbuf = ""
while n > 0:
new = self.recv(max(n, self.buffsize))
if not new: break
k = len(new)
if k > n:
L.append(new[:n])
self._rbuf = new[n:]
break
L.append(new)
n = n - k
return "".join(L)
k = max(4096, self.buffsize)
L = [self._rbuf]
self._rbuf = ""
while 1:
new = self.recv(k)
if not new: break
L.append(new)
k = min(k*2, 1024**2)
return "".join(L)
def readline(self, limit=-1):
data = ""
i = self._rbuf.find('\n')
while i < 0 and not (0 < limit <= len(self._rbuf)):
new = self.recv(self.buffsize)
if not new: break
i = new.find('\n')
if i >= 0: i = i + len(self._rbuf)
self._rbuf = self._rbuf + new
if i < 0: i = len(self._rbuf)
else: i = i+1
if 0 <= limit < len(self._rbuf): i = limit
data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
return data
def readlines(self, sizehint = 0):
total = 0
list = []
while 1:
line = self.readline()
if not line: break
list.append(line)
total += len(line)
if sizehint and total >= sizehint:
break
return list
def writelines(self, list):
self.send(''.join(list))
def write(self, data):
self.send(data)
def flush(self):
pass
class TimeoutError(Exception):
pass
|
PypiClean
|
/TDY_PKG_saquibquddus-1.1.1-py3-none-any.whl/tf2_webapp/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import functools
from six.moves import range
import tensorflow.compat.v1 as tf
import tf_slim as slim
from object_detection.meta_architectures import ssd_meta_arch
from object_detection.models import feature_map_generators
from object_detection.utils import context_manager
from object_detection.utils import ops
from object_detection.utils import shape_utils
from nets.mobilenet import mobilenet
from nets.mobilenet import mobilenet_v2
# A modified config of mobilenet v2 that makes it more detection friendly.
def _create_modified_mobilenet_config():
conv_defs = copy.deepcopy(mobilenet_v2.V2_DEF)
conv_defs['spec'][-1] = mobilenet.op(
slim.conv2d, stride=1, kernel_size=[1, 1], num_outputs=256)
return conv_defs
class SSDMobileNetV2FpnFeatureExtractor(ssd_meta_arch.SSDFeatureExtractor):
"""SSD Feature Extractor using MobilenetV2 FPN features."""
def __init__(self,
is_training,
depth_multiplier,
min_depth,
pad_to_multiple,
conv_hyperparams_fn,
fpn_min_level=3,
fpn_max_level=7,
additional_layer_depth=256,
reuse_weights=None,
use_explicit_padding=False,
use_depthwise=False,
use_native_resize_op=False,
override_base_feature_extractor_hyperparams=False):
"""SSD FPN feature extractor based on Mobilenet v2 architecture.
Args:
is_training: whether the network is in training mode.
depth_multiplier: float depth multiplier for feature extractor.
min_depth: minimum feature extractor depth.
pad_to_multiple: the nearest multiple to zero pad the input height and
width dimensions to.
conv_hyperparams_fn: A function to construct tf slim arg_scope for conv2d
and separable_conv2d ops in the layers that are added on top of the base
feature extractor.
fpn_min_level: the highest resolution feature map to use in FPN. The valid
values are {2, 3, 4, 5} which map to MobileNet v2 layers
{layer_4, layer_7, layer_14, layer_19}, respectively.
fpn_max_level: the smallest resolution feature map to construct or use in
FPN. FPN constructions uses features maps starting from fpn_min_level
upto the fpn_max_level. In the case that there are not enough feature
maps in the backbone network, additional feature maps are created by
applying stride 2 convolutions until we get the desired number of fpn
levels.
additional_layer_depth: additional feature map layer channel depth.
reuse_weights: whether to reuse variables. Default is None.
use_explicit_padding: Whether to use explicit padding when extracting
features. Default is False.
use_depthwise: Whether to use depthwise convolutions. Default is False.
use_native_resize_op: Whether to use tf.image.nearest_neighbor_resize
to do upsampling in FPN. Default is false.
override_base_feature_extractor_hyperparams: Whether to override
hyperparameters of the base feature extractor with the one from
`conv_hyperparams_fn`.
"""
super(SSDMobileNetV2FpnFeatureExtractor, self).__init__(
is_training=is_training,
depth_multiplier=depth_multiplier,
min_depth=min_depth,
pad_to_multiple=pad_to_multiple,
conv_hyperparams_fn=conv_hyperparams_fn,
reuse_weights=reuse_weights,
use_explicit_padding=use_explicit_padding,
use_depthwise=use_depthwise,
override_base_feature_extractor_hyperparams=
override_base_feature_extractor_hyperparams)
self._fpn_min_level = fpn_min_level
self._fpn_max_level = fpn_max_level
self._additional_layer_depth = additional_layer_depth
self._conv_defs = None
if self._use_depthwise:
self._conv_defs = _create_modified_mobilenet_config()
self._use_native_resize_op = use_native_resize_op
def preprocess(self, resized_inputs):
"""SSD preprocessing.
Maps pixel values to the range [-1, 1].
Args:
resized_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
Returns:
preprocessed_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
"""
return (2.0 / 255.0) * resized_inputs - 1.0
def extract_features(self, preprocessed_inputs):
"""Extract features from preprocessed inputs.
Args:
preprocessed_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
Returns:
feature_maps: a list of tensors where the ith tensor has shape
[batch, height_i, width_i, depth_i]
"""
preprocessed_inputs = shape_utils.check_min_image_dim(
33, preprocessed_inputs)
with tf.variable_scope('MobilenetV2', reuse=self._reuse_weights) as scope:
with slim.arg_scope(
mobilenet_v2.training_scope(is_training=None, bn_decay=0.9997)), \
slim.arg_scope(
[mobilenet.depth_multiplier], min_depth=self._min_depth):
with (slim.arg_scope(self._conv_hyperparams_fn())
if self._override_base_feature_extractor_hyperparams else
context_manager.IdentityContextManager()):
_, image_features = mobilenet_v2.mobilenet_base(
ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple),
final_endpoint='layer_19',
depth_multiplier=self._depth_multiplier,
conv_defs=self._conv_defs,
use_explicit_padding=self._use_explicit_padding,
scope=scope)
depth_fn = lambda d: max(int(d * self._depth_multiplier), self._min_depth)
with slim.arg_scope(self._conv_hyperparams_fn()):
with tf.variable_scope('fpn', reuse=self._reuse_weights):
feature_blocks = [
'layer_4', 'layer_7', 'layer_14', 'layer_19'
]
base_fpn_max_level = min(self._fpn_max_level, 5)
feature_block_list = []
for level in range(self._fpn_min_level, base_fpn_max_level + 1):
feature_block_list.append(feature_blocks[level - 2])
fpn_features = feature_map_generators.fpn_top_down_feature_maps(
[(key, image_features[key]) for key in feature_block_list],
depth=depth_fn(self._additional_layer_depth),
use_depthwise=self._use_depthwise,
use_explicit_padding=self._use_explicit_padding,
use_native_resize_op=self._use_native_resize_op)
feature_maps = []
for level in range(self._fpn_min_level, base_fpn_max_level + 1):
feature_maps.append(fpn_features['top_down_{}'.format(
feature_blocks[level - 2])])
last_feature_map = fpn_features['top_down_{}'.format(
feature_blocks[base_fpn_max_level - 2])]
# Construct coarse features
padding = 'VALID' if self._use_explicit_padding else 'SAME'
kernel_size = 3
for i in range(base_fpn_max_level + 1, self._fpn_max_level + 1):
if self._use_depthwise:
conv_op = functools.partial(
slim.separable_conv2d, depth_multiplier=1)
else:
conv_op = slim.conv2d
if self._use_explicit_padding:
last_feature_map = ops.fixed_padding(
last_feature_map, kernel_size)
last_feature_map = conv_op(
last_feature_map,
num_outputs=depth_fn(self._additional_layer_depth),
kernel_size=[kernel_size, kernel_size],
stride=2,
padding=padding,
scope='bottom_up_Conv2d_{}'.format(i - base_fpn_max_level + 19))
feature_maps.append(last_feature_map)
return feature_maps
|
PypiClean
|
/nengolib-0.5.2.tar.gz/nengolib-0.5.2/docs/notebooks/examples/_decoding_derivative.ipynb
|
# Differentiation using Heterogeneous Synapses
The following model shows how to solve for the decoders over time to compute functions that depend on past inputs (i.e. a filter). To do this accurately in some cases, without principle 3, it becomes necessary to introduce heterogeneous synapses, so that each neuron's "augmented encoder" will extract different "temporal features" from the input.
Note, this won't be as good as just using `LinearNetwork` to build the desired filter, because that can use a passthrough. A passthrough is what's needed to compute the highpass transfer function exactly; if a passthrough is "allowed", this problem is more trivial.
```
%pylab inline
import pylab
try:
import seaborn as sns # optional; prettier graphs
except ImportError:
pass
import numpy as np
from scipy.linalg import svd
import nengo
import nengolib
from nengolib.synapses import HeteroSynapse
def make_model(synapses, encoders, decoders, n_neurons, tau_derivative,
seed, stim_seed, dt, T, stim_cutoff_freq=15, stim_rms=0.5,
neuron_type=nengo.LIF()):
with nengolib.Network(seed=seed) as model:
# White noise input
stim = nengo.Node(output=nengo.processes.WhiteSignal(
T, high=stim_cutoff_freq, rms=stim_rms, seed=stim_seed))
# Heterogeneous synapses (one per neuron)
x_synapses = nengo.Node(
size_in=1, output=HeteroSynapse(synapses, dt=dt))
# Ensemble that encodes the signal
x = nengo.Ensemble(
n_neurons, 1, encoders=encoders, neuron_type=neuron_type)
# Optional decoding (linear readout)
if decoders is None:
decoders = np.zeros((n_neurons, 1))
y = nengo.Node(size_in=1)
# Connections
nengo.Connection(stim, x_synapses, synapse=None)
nengo.Connection(x_synapses, x.neurons,
function=lambda x: x*encoders[:, 0], synapse=None)
nengo.Connection(x.neurons, y, transform=decoders.T,
synapse=tau_derivative)
# Probes
p_input = nengo.Probe(stim, synapse=None)
#p_synapses = nengo.Probe(x_synapses, synapse=tau_derivative)
p_x = nengo.Probe(x.neurons, synapse=tau_derivative)
p_y = nengo.Probe(y, synapse=None)
return model, (p_input, p_x, p_y)
n_neurons = 1000
tau_derivative = 0.005
seed = 0
rng = np.random.RandomState(seed)
taus = rng.uniform(0.0005, 0.01, size=n_neurons) # encoding filters
synapses = [nengolib.Lowpass(tau) for tau in taus]
encoders = nengolib.stats.sphere.sample(n_neurons, rng=rng)
```
We will approximate the filter $H(s) = 2\tau s / (\tau s + 1)$. This is the derivative $s$ multiplied by a lowpass $1 / (\tau s + 1)$ (to make it causal) and scaled by $2\tau$ (for normalization).
```
H = 2 * nengolib.synapses.Highpass(tau_derivative)
h_size = 1000
h_dt = 0.001
freqs = np.fft.rfftfreq(h_size, d=h_dt)
desired = H.impulse(h_size, dt=h_dt)
pylab.figure()
pylab.title("Derivative Filter (Fourier Domain)")
pylab.plot(freqs, abs(np.fft.rfft(desired)), label="Desired")
pylab.xlabel("Frequency ($Hz$)")
#pylab.legend(loc='center right')
pylab.show()
```
### Training
To solve for the decoders, we first simulate the network on a $15\,Hz$ training signal and collect the spike data filtered by $\tau$. Then use a decoder solver (e.g. LstsqL2) where the target points are $y = (x \ast h)(t)$ given the filtered spikes $x$.
```
dt = 0.0002
T = 1.0
model, (p_input, p_x, p_y) = make_model(
synapses, encoders, decoders=None, n_neurons=n_neurons,
tau_derivative=tau_derivative, seed=seed, stim_seed=0, dt=dt, T=T)
sim = nengo.Simulator(model, dt=dt)
sim.run(T, progress_bar=False)
# AD = Y
X = sim.data[p_input]
A = sim.data[p_x]
Y = H.filt(X, dt=dt)
decoders, info = nengo.solvers.LstsqL2()(A, Y)
Y_hat = np.dot(A, decoders)
gamma = np.dot(A.T, A)
U, S, V = svd(gamma)
chi = np.dot(A, U)
pylab.figure(figsize=(12, 7))
pylab.title("SVD of Gamma Matrix")
pylab.plot(sim.trange(), X, label="Input")
for i in range(3):
pylab.plot(sim.trange(), chi[:, i] / len(chi), label=r"$\chi_%d$" % i)
pylab.legend(loc='best')
pylab.show()
def plot_signals(X, Y, Y_hat, offset=100):
pylab.figure(figsize=(12, 7))
pylab.title("Derivative of Signal (RMSE: %.3f)" % nengo.utils.numpy.rmse(Y, Y_hat))
pylab.plot(sim.trange()[offset:], X[offset:], label="Input")
pylab.plot(sim.trange()[offset:], Y[offset:], label="Ideal")
pylab.plot(sim.trange()[offset:], Y_hat[offset:], label="Approximation")
pylab.legend(loc='best')
pylab.xlabel("Time ($s$)")
#pylab.ylim(-1, 1)
pylab.show()
plot_signals(X, Y, Y_hat)
```
### Validation
Now we demonstrate that these same decoders generalize to other signals, even at a different frequency (e.g. $10\,Hz$)!
```
model, (p_input, p_x, p_y) = make_model(
synapses, encoders, decoders=decoders, n_neurons=n_neurons,
tau_derivative=tau_derivative, seed=seed, stim_seed=1, dt=dt, T=T,
stim_cutoff_freq=10)
sim = nengo.Simulator(model, dt=dt)
sim.run(T, progress_bar=False)
X = sim.data[p_input]
Y = H.filt(X, dt=dt)
Y_hat = sim.data[p_y]
plot_signals(X, Y, Y_hat)
```
Lastly it is natural to wonder how crucial it is to have heterogeneous synapses. We can answer that question by repeating this with a number of different sampling widths, i.e. $\tau \sim \mathcal{U}[L, L + width]$ for various $width$ parameters.
```
num_samples = 5
plot_x = []
plot_y = []
T = 1
for width in np.linspace(0, 0.03, num_samples):
L, U = 0.0005, 0.0005 + width
rng = np.random.RandomState(seed)
taus = rng.uniform(L, U, size=n_neurons)
synapses = [nengolib.Lowpass(tau) for tau in taus]
model, (p_input, p_x, p_y) = make_model(
synapses, encoders, decoders=None, n_neurons=n_neurons,
tau_derivative=tau_derivative, seed=seed, stim_seed=0, dt=dt, T=T)
sim = nengo.Simulator(model, dt=dt)
sim.run(T, progress_bar=False)
X = sim.data[p_input]
A = sim.data[p_x]
Y = H.filt(X, dt=dt)
decoders, info = nengo.solvers.LstsqL2()(A, Y)
Y_hat = np.dot(A, decoders)
plot_x.append(width)
plot_y.append(nengo.utils.numpy.rmse(Y, Y_hat))
from scipy.optimize import curve_fit
def fit(x, a, b, c, d, e):
return a*np.exp(-b*x) + c*np.exp(-d*x) + e
popt, pcov = curve_fit(fit, plot_x, plot_y, bounds=(0, [1., 10000., 1., 10000., 1.]))
pylab.figure()
pylab.title("Effect of Increasing Heterogeneity")
pylab.plot(plot_x, plot_y)
#pylab.plot(plot_x, fit(np.asarray(plot_x), *popt), color='green', linewidth=1, alpha=0.8)
pylab.xlabel(r"$\tau$ width ($ms$)")
pylab.ylabel("RMSE")
pylab.show()
```
|
PypiClean
|
/imbibe-0.0.4.tar.gz/imbibe-0.0.4/imbibe.py
|
import collections
import ujson
import zmq
class Imbibe(object):
def __init__(self, servers):
if not isinstance(servers, list):
self.servers = [servers]
else:
self.servers = servers
self.context = zmq.Context()
self.sub_socket = self.context.socket(zmq.SUB)
for server in servers:
print "Connect to {0}".format(server)
self.sub_socket.connect('tcp://{0}'.format(server))
self.sub_socket.setsockopt(zmq.SUBSCRIBE, '')
self.poller = zmq.Poller()
self.poller.register(self.sub_socket, zmq.POLLIN)
self.counters = collections.defaultdict(dict)
def imbibe(self):
""" Yield metrics """
self.running = True
while self.running:
socks = dict(self.poller.poll(1000))
if self.sub_socket in socks and socks[self.sub_socket] == zmq.POLLIN:
metrics = ujson.loads(self.sub_socket.recv())
for m in metrics:
yield self.__process_metric(m)
def stop(self):
self.running = False
def __process_metric(self, metric):
hostname, app_name, metric_name, metric_type, value, metric_time = metric
value = float(value)
metric_time = float(metric_time)
ret_val = value
if metric_type == 'COUNTER':
# Calculate a rate
full_name = '{0}/{1}'.format(app_name, metric_name)
if full_name in self.counters[hostname]:
last_val, last_ts = self.counters[hostname][full_name]
if value > last_val:
ret_val = (value - last_val) / (metric_time - last_ts)
else:
ret_val = None
else:
ret_val = None
self.counters[hostname][full_name] = (value, metric_time)
return (hostname, app_name, metric_name, ret_val, metric_time)
if __name__=='__main__':
i = Imbibe(['127.0.0.1:5002'])
try:
for m in i.imbibe():
print m
except Exception, e:
print "Exception... stop imbibing - {0}".format(e)
i.stop()
|
PypiClean
|
/anthill-common-0.2.5.tar.gz/anthill-common-0.2.5/anthill/common/login.py
|
from tornado.gen import Task
from . import cached
from . validate import validate
from . internal import Internal, InternalError
from . import singleton
import ujson
class GamespaceAdapter(object):
def __init__(self, data):
self.gamespace_id = data.get("id")
self.name = data.get("name")
self.title = data.get("title")
def dump(self):
return {
"id": self.gamespace_id,
"name": self.name,
"title": self.title
}
class LoginClientError(Exception):
def __init__(self, code, message):
self.code = code
self.message = message
class LoginClient(object, metaclass=singleton.Singleton):
def __init__(self, cache):
self.cache = cache
self.internal = Internal()
@validate(gamespace_name="str_name", gamespace_info=GamespaceAdapter)
async def set_gamespace(self, gamespace_name, gamespace_info):
"""
Do not use this method for any purposes except testing,
as its affect the cache permanently
"""
async with self.cache.acquire() as db:
await db.set("gamespace_info:" + gamespace_name, ujson.dumps(gamespace_info.dump()))
async def find_gamespace(self, gamespace_name):
@cached(kv=self.cache,
h=lambda: "gamespace_info:" + gamespace_name,
ttl=300,
json=True)
async def get():
try:
response = await self.internal.request(
"login",
"get_gamespace",
name=gamespace_name)
except InternalError as e:
raise LoginClientError(e.code, str(e))
else:
return response
gamespace_info = await get()
if gamespace_info is None:
return None
return GamespaceAdapter(gamespace_info)
async def get_gamespaces(self):
@cached(kv=self.cache,
h=lambda: "gamespaces_list",
ttl=30,
json=True)
async def get():
try:
response = await self.internal.request("login", "get_gamespaces")
except InternalError as e:
raise LoginClientError(e.code, str(e))
else:
return response
gamespace_list = await get()
if gamespace_list is None:
return None
return list(map(GamespaceAdapter, gamespace_list))
|
PypiClean
|
/txt_to_html-0.0.1.tar.gz/txt_to_html-0.0.1/resources/MathJax-2.7.2/unpacked/localization/kn/kn.js
|
MathJax.Localization.addTranslation("kn",null,{
menuTitle: "\u0C95\u0CA8\u0CCD\u0CA8\u0CA1",
version: "2.7.2",
isLoaded: true,
domains: {
"_": {
version: "2.7.2",
isLoaded: true,
strings: {
CookieConfig: "\u0CAE\u0CA5\u0CCD\u0C9C\u0C95\u0CCD\u0CB7\u0CCD \u0C87\u0C97\u0CC6 \u0C92\u0C82\u0CA6\u0CC1 \u0CB8\u0CA6\u0CB8\u0CCD\u0CAF \u0C9A\u0CCA\u0CA8\u0CCD\u0CAB\u0CBC\u0CBF\u0C97\u0CC1\u0CB0\u0CA4\u0CBF\u0C92\u0CA8\u0CCD \u0C95\u0CC1\u0C95\u0CC0 \u0CB8\u0CBF\u0C95\u0CCD\u0C95\u0CBF\u0CA6\u0CC6. \u0C85\u0CA6\u0CCD\u0CA6\u0CA8\u0CCD\u0CA8 \u0CB0\u0CA8\u0CCD \u0CAE\u0CBE\u0CA1\u0CAC\u0CC7\u0C95? \n\n(\u0CA8\u0CC0\u0CB5\u0CC7 \u0C85\u0CA6\u0CA8\u0CCD\u0CA8 \u0CB8\u0CC6\u0C9F\u0CCD \u0C85\u0CAA\u0CCD \u0CAE\u0CBE\u0CA1\u0CBF\u0CA6\u0CC1 \u0C85\u0CB2\u0CCD\u0CB2 \u0C86\u0CA6\u0CB0\u0CC6 \"Cancel\" \u0C85\u0CA5\u0CB5\u0CBE \"\u0CB0\u0CA6\u0CCD\u0CA6\u0CC1\" \u0CB5\u0CA4\u0CCD\u0CA4\u0CBF.)",
MathProcessingError: "\u0C97\u0CA3\u0CBF\u0CA4 \u0CAA\u0CCD\u0CB0\u0C95\u0CCD\u0CB0\u0CBF\u0CAF\u0CC6\u0CAF\u0CB2\u0CCD\u0CB2\u0CBF \u0CA6\u0CCB\u0CB7",
MathError: "\u0C97\u0CA3\u0CBF\u0CA4 \u0CA6\u0CCB\u0CB7",
LoadFile: "%1 \u0CB2\u0CCB\u0CA1\u0CCD \u0C86\u0C97\u0CC1\u0CA4\u0CCD\u0CA4\u0CBF\u0CA6\u0CC6",
Loading: "\u0CB2\u0CCB\u0CA1\u0CCD \u0C86\u0C97\u0CC1\u0CA4\u0CCD\u0CA4\u0CBF\u0CA6\u0CC6",
LoadFailed: "%1 \u0CAB\u0CC8\u0CB2\u0CCD \u0CB2\u0CCB\u0CA1\u0CCD \u0C86\u0C97\u0CB2\u0CBF\u0CB2\u0CCD\u0CB2",
ProcessMath: "\u0C97\u0CA3\u0CBF\u0CA4 \u0CAA\u0CCD\u0CB0\u0C95\u0CCD\u0CB0\u0CBF\u0CAF\u0CC6 \u0C86\u0C97\u0CC1\u0CA4\u0CCD\u0CA4\u0CBF\u0CA6\u0CC6: %1%%",
Processing: "\u0CAA\u0CCD\u0CB0\u0C95\u0CCD\u0CB0\u0CBF\u0CAF\u0CC6 \u0C86\u0C97\u0CC1\u0CA4\u0CCD\u0CA4\u0CBF\u0CA6\u0CC6",
TypesetMath: "\u0C97\u0CA3\u0CBF\u0CA4 \u0C9F\u0CC8\u0CAA\u0CCD\u200D\u0CB8\u0CC6\u0C9F\u0CCD\u0C9F\u0CBF\u0C82\u0C97\u0CCD: %1%%",
Typesetting: "\u0C9F\u0CC8\u0CAA\u0CCD\u200D\u0CB8\u0CC6\u0C9F\u0CCD\u0C9F\u0CBF\u0C82\u0C97\u0CCD",
MathJaxNotSupported: "\u0CA8\u0CBF\u0CAE\u0CCD\u0CAE \u0CAC\u0CCD\u0CB0\u0CCC\u0CB8\u0CB0\u0CCD MathJax\u0C85\u0CA8\u0CCD\u0CA8\u0CC1 \u0CAC\u0CC6\u0C82\u0CAC\u0CB2\u0CBF\u0CB8\u0CC1\u0CB5\u0CC1\u0CA6\u0CBF\u0CB2\u0CCD\u0CB2"
}
},
"FontWarnings": {},
"HTML-CSS": {},
"HelpDialog": {},
"MathML": {},
"MathMenu": {},
"TeX": {}
},
plural: function (n) {
return 1; // other
},
number: function (n) {
return n;
}
});
MathJax.Ajax.loadComplete("[MathJax]/localization/kn/kn.js");
|
PypiClean
|
/sila2lib_implementations-0.1.1-py3-none-any.whl/sila2lib_implementations/Presens/PresensService/CalibrationService/gRPC/CalibrationService_pb2_grpc.py
|
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from . import CalibrationService_pb2 as CalibrationService__pb2
class CalibrationServiceStub(object):
"""Feature: Calibration Service
Calibrate the sensor bars by adjusting the temperature compensation and the dynamic averaging value.
By Lukas Bromig, Institute of Biochemical Engineering, Technical University of Munich, 14.02.2020
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetO2CalLow = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetO2CalLow',
request_serializer=CalibrationService__pb2.GetO2CalLow_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.GetO2CalLow_Responses.FromString,
)
self.SetO2CalLow = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetO2CalLow',
request_serializer=CalibrationService__pb2.SetO2CalLow_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.SetO2CalLow_Responses.FromString,
)
self.GetO2CalHigh = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetO2CalHigh',
request_serializer=CalibrationService__pb2.GetO2CalHigh_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.GetO2CalHigh_Responses.FromString,
)
self.SetO2CalHigh = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetO2CalHigh',
request_serializer=CalibrationService__pb2.SetO2CalHigh_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.SetO2CalHigh_Responses.FromString,
)
self.GetO2CalTemp = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetO2CalTemp',
request_serializer=CalibrationService__pb2.GetO2CalTemp_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.GetO2CalTemp_Responses.FromString,
)
self.SetO2CalTemp = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetO2CalTemp',
request_serializer=CalibrationService__pb2.SetO2CalTemp_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.SetO2CalTemp_Responses.FromString,
)
self.GetPHImax = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetPHImax',
request_serializer=CalibrationService__pb2.GetPHImax_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.GetPHImax_Responses.FromString,
)
self.SetPHImax = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetPHImax',
request_serializer=CalibrationService__pb2.SetPHImax_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.SetPHImax_Responses.FromString,
)
self.GetPHImin = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetPHImin',
request_serializer=CalibrationService__pb2.GetPHImin_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.GetPHImin_Responses.FromString,
)
self.SetPHImin = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetPHImin',
request_serializer=CalibrationService__pb2.SetPHImin_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.SetPHImin_Responses.FromString,
)
self.GetPHpH0 = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetPHpH0',
request_serializer=CalibrationService__pb2.GetPHpH0_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.GetPHpH0_Responses.FromString,
)
self.SetPHpH0 = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetPHpH0',
request_serializer=CalibrationService__pb2.SetPHpH0_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.SetPHpH0_Responses.FromString,
)
self.GetPHdpH = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetPHdpH',
request_serializer=CalibrationService__pb2.GetPHdpH_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.GetPHdpH_Responses.FromString,
)
self.SetPHdpH = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetPHdpH',
request_serializer=CalibrationService__pb2.SetPHdpH_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.SetPHdpH_Responses.FromString,
)
self.GetPHCalTemp = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetPHCalTemp',
request_serializer=CalibrationService__pb2.GetPHCalTemp_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.GetPHCalTemp_Responses.FromString,
)
self.SetPHCalTemp = channel.unary_unary(
'/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetPHCalTemp',
request_serializer=CalibrationService__pb2.SetPHCalTemp_Parameters.SerializeToString,
response_deserializer=CalibrationService__pb2.SetPHCalTemp_Responses.FromString,
)
class CalibrationServiceServicer(object):
"""Feature: Calibration Service
Calibrate the sensor bars by adjusting the temperature compensation and the dynamic averaging value.
By Lukas Bromig, Institute of Biochemical Engineering, Technical University of Munich, 14.02.2020
"""
def GetO2CalLow(self, request, context):
"""Get O2 Calibration Low
Get the O2 calibration point value at 0% dissolved oxygen.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetO2CalLow(self, request, context):
"""Set O2 Calibration Low
Set the O2 calibration point value at 0% dissolved oxygen. Default = 57.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetO2CalHigh(self, request, context):
"""Get O2 Calibration High
Get the O2 calibration point value at 100% dissolved oxygen. Default = 27.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetO2CalHigh(self, request, context):
"""Set O2 Calibration High
Set the O2 calibration point value at 100% dissolved oxygen. Default = 27.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetO2CalTemp(self, request, context):
"""Get O2 Calibration Temperature
Get the value of the oxygen sensor calibration temperature in degrees Celsius. Default = 20.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetO2CalTemp(self, request, context):
"""Set O2 Calibration Temperature
Set the value of the oxygen calibration temperature in degree Celsius. Default = 20.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPHImax(self, request, context):
"""Get phi max
Get the given value of the first calibration point (phi max) of the pH sensor. Default = 0.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetPHImax(self, request, context):
"""Set phi max
Set the value of the first calibration point (phi max) of the pH sensor. Default = 0.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPHImin(self, request, context):
"""Get phi min
Get the given value of the second calibration point (phi min) of the pH sensor. Default = -.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetPHImin(self, request, context):
"""Set phi min
Set the value of the second calibration point (phi min) of the pH sensor. Default = -.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPHpH0(self, request, context):
"""Get PH pH0
Get the given value of the third calibration point (pH0) of the pH sensor. Default = 0.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetPHpH0(self, request, context):
"""Set PH pH0
Set the value of the third calibration point (pH0) of the pH sensor. Default = 0.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPHdpH(self, request, context):
"""Get PH dpH
Get the given value of the fourth calibration point (dpH) of the pH sensor. Default = 0.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetPHdpH(self, request, context):
"""Set PH dpH
Set the value of the fourth calibration point (dpH) of the pH sensor. Default = 0.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPHCalTemp(self, request, context):
"""Get PH Calibration Temperature
Get the value of the pH sensor calibration temperature in degree Celsius. Default = 20.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetPHCalTemp(self, request, context):
"""Set PH Calibration Temperature
Set the value of the pH calibration temperature in degree Celsius. Default = 20.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_CalibrationServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetO2CalLow': grpc.unary_unary_rpc_method_handler(
servicer.GetO2CalLow,
request_deserializer=CalibrationService__pb2.GetO2CalLow_Parameters.FromString,
response_serializer=CalibrationService__pb2.GetO2CalLow_Responses.SerializeToString,
),
'SetO2CalLow': grpc.unary_unary_rpc_method_handler(
servicer.SetO2CalLow,
request_deserializer=CalibrationService__pb2.SetO2CalLow_Parameters.FromString,
response_serializer=CalibrationService__pb2.SetO2CalLow_Responses.SerializeToString,
),
'GetO2CalHigh': grpc.unary_unary_rpc_method_handler(
servicer.GetO2CalHigh,
request_deserializer=CalibrationService__pb2.GetO2CalHigh_Parameters.FromString,
response_serializer=CalibrationService__pb2.GetO2CalHigh_Responses.SerializeToString,
),
'SetO2CalHigh': grpc.unary_unary_rpc_method_handler(
servicer.SetO2CalHigh,
request_deserializer=CalibrationService__pb2.SetO2CalHigh_Parameters.FromString,
response_serializer=CalibrationService__pb2.SetO2CalHigh_Responses.SerializeToString,
),
'GetO2CalTemp': grpc.unary_unary_rpc_method_handler(
servicer.GetO2CalTemp,
request_deserializer=CalibrationService__pb2.GetO2CalTemp_Parameters.FromString,
response_serializer=CalibrationService__pb2.GetO2CalTemp_Responses.SerializeToString,
),
'SetO2CalTemp': grpc.unary_unary_rpc_method_handler(
servicer.SetO2CalTemp,
request_deserializer=CalibrationService__pb2.SetO2CalTemp_Parameters.FromString,
response_serializer=CalibrationService__pb2.SetO2CalTemp_Responses.SerializeToString,
),
'GetPHImax': grpc.unary_unary_rpc_method_handler(
servicer.GetPHImax,
request_deserializer=CalibrationService__pb2.GetPHImax_Parameters.FromString,
response_serializer=CalibrationService__pb2.GetPHImax_Responses.SerializeToString,
),
'SetPHImax': grpc.unary_unary_rpc_method_handler(
servicer.SetPHImax,
request_deserializer=CalibrationService__pb2.SetPHImax_Parameters.FromString,
response_serializer=CalibrationService__pb2.SetPHImax_Responses.SerializeToString,
),
'GetPHImin': grpc.unary_unary_rpc_method_handler(
servicer.GetPHImin,
request_deserializer=CalibrationService__pb2.GetPHImin_Parameters.FromString,
response_serializer=CalibrationService__pb2.GetPHImin_Responses.SerializeToString,
),
'SetPHImin': grpc.unary_unary_rpc_method_handler(
servicer.SetPHImin,
request_deserializer=CalibrationService__pb2.SetPHImin_Parameters.FromString,
response_serializer=CalibrationService__pb2.SetPHImin_Responses.SerializeToString,
),
'GetPHpH0': grpc.unary_unary_rpc_method_handler(
servicer.GetPHpH0,
request_deserializer=CalibrationService__pb2.GetPHpH0_Parameters.FromString,
response_serializer=CalibrationService__pb2.GetPHpH0_Responses.SerializeToString,
),
'SetPHpH0': grpc.unary_unary_rpc_method_handler(
servicer.SetPHpH0,
request_deserializer=CalibrationService__pb2.SetPHpH0_Parameters.FromString,
response_serializer=CalibrationService__pb2.SetPHpH0_Responses.SerializeToString,
),
'GetPHdpH': grpc.unary_unary_rpc_method_handler(
servicer.GetPHdpH,
request_deserializer=CalibrationService__pb2.GetPHdpH_Parameters.FromString,
response_serializer=CalibrationService__pb2.GetPHdpH_Responses.SerializeToString,
),
'SetPHdpH': grpc.unary_unary_rpc_method_handler(
servicer.SetPHdpH,
request_deserializer=CalibrationService__pb2.SetPHdpH_Parameters.FromString,
response_serializer=CalibrationService__pb2.SetPHdpH_Responses.SerializeToString,
),
'GetPHCalTemp': grpc.unary_unary_rpc_method_handler(
servicer.GetPHCalTemp,
request_deserializer=CalibrationService__pb2.GetPHCalTemp_Parameters.FromString,
response_serializer=CalibrationService__pb2.GetPHCalTemp_Responses.SerializeToString,
),
'SetPHCalTemp': grpc.unary_unary_rpc_method_handler(
servicer.SetPHCalTemp,
request_deserializer=CalibrationService__pb2.SetPHCalTemp_Parameters.FromString,
response_serializer=CalibrationService__pb2.SetPHCalTemp_Responses.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class CalibrationService(object):
"""Feature: Calibration Service
Calibrate the sensor bars by adjusting the temperature compensation and the dynamic averaging value.
By Lukas Bromig, Institute of Biochemical Engineering, Technical University of Munich, 14.02.2020
"""
@staticmethod
def GetO2CalLow(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetO2CalLow',
CalibrationService__pb2.GetO2CalLow_Parameters.SerializeToString,
CalibrationService__pb2.GetO2CalLow_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetO2CalLow(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetO2CalLow',
CalibrationService__pb2.SetO2CalLow_Parameters.SerializeToString,
CalibrationService__pb2.SetO2CalLow_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetO2CalHigh(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetO2CalHigh',
CalibrationService__pb2.GetO2CalHigh_Parameters.SerializeToString,
CalibrationService__pb2.GetO2CalHigh_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetO2CalHigh(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetO2CalHigh',
CalibrationService__pb2.SetO2CalHigh_Parameters.SerializeToString,
CalibrationService__pb2.SetO2CalHigh_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetO2CalTemp(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetO2CalTemp',
CalibrationService__pb2.GetO2CalTemp_Parameters.SerializeToString,
CalibrationService__pb2.GetO2CalTemp_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetO2CalTemp(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetO2CalTemp',
CalibrationService__pb2.SetO2CalTemp_Parameters.SerializeToString,
CalibrationService__pb2.SetO2CalTemp_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetPHImax(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetPHImax',
CalibrationService__pb2.GetPHImax_Parameters.SerializeToString,
CalibrationService__pb2.GetPHImax_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetPHImax(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetPHImax',
CalibrationService__pb2.SetPHImax_Parameters.SerializeToString,
CalibrationService__pb2.SetPHImax_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetPHImin(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetPHImin',
CalibrationService__pb2.GetPHImin_Parameters.SerializeToString,
CalibrationService__pb2.GetPHImin_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetPHImin(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetPHImin',
CalibrationService__pb2.SetPHImin_Parameters.SerializeToString,
CalibrationService__pb2.SetPHImin_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetPHpH0(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetPHpH0',
CalibrationService__pb2.GetPHpH0_Parameters.SerializeToString,
CalibrationService__pb2.GetPHpH0_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetPHpH0(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetPHpH0',
CalibrationService__pb2.SetPHpH0_Parameters.SerializeToString,
CalibrationService__pb2.SetPHpH0_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetPHdpH(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetPHdpH',
CalibrationService__pb2.GetPHdpH_Parameters.SerializeToString,
CalibrationService__pb2.GetPHdpH_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetPHdpH(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetPHdpH',
CalibrationService__pb2.SetPHdpH_Parameters.SerializeToString,
CalibrationService__pb2.SetPHdpH_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetPHCalTemp(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/GetPHCalTemp',
CalibrationService__pb2.GetPHCalTemp_Parameters.SerializeToString,
CalibrationService__pb2.GetPHCalTemp_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetPHCalTemp(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sila2.biovt.mw.tum.de.examples.calibrationservice.v1.CalibrationService/SetPHCalTemp',
CalibrationService__pb2.SetPHCalTemp_Parameters.SerializeToString,
CalibrationService__pb2.SetPHCalTemp_Responses.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
|
PypiClean
|
/kiwi-blockchain-1.0.3.tar.gz/kiwi-blockchain-1.0.3/CODE_OF_CONDUCT.md
|
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Insulting/derogatory comments, and personal or political attacks, or excessive trolling.
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at [email protected]. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at [https://www.contributor-covenant.org/version/1/4/code-of-conduct.html](https://www.contributor-covenant.org/version/1/4/code-of-conduct.html)
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
[https://www.contributor-covenant.org/faq](https://www.contributor-covenant.org/faq)
|
PypiClean
|
/SMDAI-1.0.0.1.tar.gz/SMDAI-1.0.0.1/PSMNet/models/basic.py
|
from __future__ import print_function
import torch
import torch.nn as nn
import torch.utils.data
from torch.autograd import Variable
import torch.nn.functional as F
import math
from PSMNet.models.submodule import *
class PSMNet(nn.Module):
def __init__(self, maxdisp):
super(PSMNet, self).__init__()
self.maxdisp = maxdisp
self.feature_extraction = feature_extraction()
########
self.dres0 = nn.Sequential(convbn_3d(64, 32, 3, 1, 1),
nn.ReLU(inplace=True),
convbn_3d(32, 32, 3, 1, 1),
nn.ReLU(inplace=True))
self.dres1 = nn.Sequential(convbn_3d(32, 32, 3, 1, 1),
nn.ReLU(inplace=True),
convbn_3d(32, 32, 3, 1, 1))
self.dres2 = nn.Sequential(convbn_3d(32, 32, 3, 1, 1),
nn.ReLU(inplace=True),
convbn_3d(32, 32, 3, 1, 1))
self.dres3 = nn.Sequential(convbn_3d(32, 32, 3, 1, 1),
nn.ReLU(inplace=True),
convbn_3d(32, 32, 3, 1, 1))
self.dres4 = nn.Sequential(convbn_3d(32, 32, 3, 1, 1),
nn.ReLU(inplace=True),
convbn_3d(32, 32, 3, 1, 1))
self.classify = nn.Sequential(convbn_3d(32, 32, 3, 1, 1),
nn.ReLU(inplace=True),
nn.Conv3d(32, 1, kernel_size=3, padding=1, stride=1,bias=False))
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.Conv3d):
n = m.kernel_size[0] * m.kernel_size[1]*m.kernel_size[2] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm3d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.bias.data.zero_()
def forward(self, left, right):
refimg_fea = self.feature_extraction(left)
targetimg_fea = self.feature_extraction(right)
#matching
cost = Variable(torch.FloatTensor(refimg_fea.size()[0], refimg_fea.size()[1]*2, self.maxdisp/4, refimg_fea.size()[2], refimg_fea.size()[3]).zero_(), volatile= not self.training).cuda()
for i in range(self.maxdisp/4):
if i > 0 :
cost[:, :refimg_fea.size()[1], i, :,i:] = refimg_fea[:,:,:,i:]
cost[:, refimg_fea.size()[1]:, i, :,i:] = targetimg_fea[:,:,:,:-i]
else:
cost[:, :refimg_fea.size()[1], i, :,:] = refimg_fea
cost[:, refimg_fea.size()[1]:, i, :,:] = targetimg_fea
cost = cost.contiguous()
cost0 = self.dres0(cost)
cost0 = self.dres1(cost0) + cost0
cost0 = self.dres2(cost0) + cost0
cost0 = self.dres3(cost0) + cost0
cost0 = self.dres4(cost0) + cost0
cost = self.classify(cost0)
cost = F.upsample(cost, [self.maxdisp,left.size()[2],left.size()[3]], mode='trilinear')
cost = torch.squeeze(cost,1)
pred = F.softmax(cost)
pred = disparityregression(self.maxdisp)(pred)
return pred
|
PypiClean
|
/django-charisma-1.0.tar.gz/django-charisma-1.0/charisma_django/static/charisma_django/js/charisma.js
|
$(document).ready(function () {
//themes, change CSS with JS
//default theme(CSS) is cerulean, change it if needed
var defaultTheme = 'lumen';
var currentTheme = $.cookie('currentTheme') == null ? defaultTheme : $.cookie('currentTheme');
var msie = navigator.userAgent.match(/msie/i);
$.browser = {};
$.browser.msie = {};
switchTheme(currentTheme);
$('.navbar-toggle').click(function (e) {
e.preventDefault();
$('.nav-sm').html($('.navbar-collapse').html());
$('.sidebar-nav').toggleClass('active');
$(this).toggleClass('active');
});
var $sidebarNav = $('.sidebar-nav');
// Hide responsive navbar on clicking outside
$(document).mouseup(function (e) {
if (!$sidebarNav.is(e.target) // if the target of the click isn't the container...
&& $sidebarNav.has(e.target).length === 0
&& !$('.navbar-toggle').is(e.target)
&& $('.navbar-toggle').has(e.target).length === 0
&& $sidebarNav.hasClass('active')
)// ... nor a descendant of the container
{
e.stopPropagation();
$('.navbar-toggle').click();
}
});
$('#themes a').click(function (e) {
e.preventDefault();
currentTheme = $(this).attr('data-value');
$.cookie('currentTheme', currentTheme, {expires: 365});
switchTheme(currentTheme);
});
function switchTheme(themeName) {
if (themeName == 'classic') {
$('#bs-css').attr('href', '/static/charisma_django/bower_components/bootstrap/dist/css/bootstrap.min.css');
} else {
$('#bs-css').attr('href', '/static/charisma_django/css/bootstrap-' + themeName + '.min.css');
}
$('#themes i').removeClass('glyphicon glyphicon-ok whitespace').addClass('whitespace');
$('#themes a[data-value=' + themeName + ']').find('i').removeClass('whitespace').addClass('glyphicon glyphicon-ok');
}
//ajax menu checkbox
$('#is-ajax').click(function (e) {
$.cookie('is-ajax', $(this).prop('checked'), {expires: 365});
});
$('#is-ajax').prop('checked', $.cookie('is-ajax') === 'true' ? true : false);
//disbaling some functions for Internet Explorer
if (msie) {
$('#is-ajax').prop('checked', false);
$('#for-is-ajax').hide();
$('#toggle-fullscreen').hide();
$('.login-box').find('.input-large').removeClass('span10');
}
//highlight current / active link
$('ul.main-menu li a').each(function () {
if ($($(this))[0].href == String(window.location))
$(this).parent().addClass('active');
});
//establish history variables
var
History = window.History, // Note: We are using a capital H instead of a lower h
State = History.getState(),
$log = $('#log');
//bind to State Change
History.Adapter.bind(window, 'statechange', function () { // Note: We are using statechange instead of popstate
var State = History.getState(); // Note: We are using History.getState() instead of event.state
$.ajax({
url: State.url,
success: function (msg) {
$('#content').html($(msg).find('#content').html());
$('#loading').remove();
$('#content').fadeIn();
var newTitle = $(msg).filter('title').text();
$('title').text(newTitle);
docReady();
}
});
});
//ajaxify menus
$('a.ajax-link').click(function (e) {
if (msie) e.which = 1;
if (e.which != 1 || !$('#is-ajax').prop('checked') || $(this).parent().hasClass('active')) return;
e.preventDefault();
$('.sidebar-nav').removeClass('active');
$('.navbar-toggle').removeClass('active');
$('#loading').remove();
$('#content').fadeOut().parent().append('<div id="loading" class="center">Loading...<div class="center"></div></div>');
var $clink = $(this);
History.pushState(null, null, $clink.attr('href'));
$('ul.main-menu li.active').removeClass('active');
$clink.parent('li').addClass('active');
});
$('.accordion > a').click(function (e) {
e.preventDefault();
var $ul = $(this).siblings('ul');
var $li = $(this).parent();
if ($ul.is(':visible')) $li.removeClass('active');
else $li.addClass('active');
$ul.slideToggle();
});
$('.accordion li.active:first').parents('ul').slideDown();
//other things to do on document ready, separated for ajax calls
docReady();
});
function docReady() {
//prevent # links from moving to top
$('a[href="#"][data-top!=true]').click(function (e) {
e.preventDefault();
});
//notifications
$('.noty').click(function (e) {
e.preventDefault();
var options = $.parseJSON($(this).attr('data-noty-options'));
noty(options);
});
//chosen - improves select
$('[data-rel="chosen"],[rel="chosen"]').chosen();
//tabs
$('#myTab a:first').tab('show');
$('#myTab a').click(function (e) {
e.preventDefault();
$(this).tab('show');
});
//tooltip
$('[data-toggle="tooltip"]').tooltip();
//auto grow textarea
$('textarea.autogrow').autogrow();
//popover
$('[data-toggle="popover"]').popover();
//iOS / iPhone style toggle switch
$('.iphone-toggle').iphoneStyle();
//star rating
$('.raty').raty({
score: 4 //default stars
});
//uploadify - multiple uploads
$('#file_upload').uploadify({
'swf': 'misc/uploadify.swf',
'uploader': 'misc/uploadify.php'
// Put your options here
});
//gallery controls container animation
$('ul.gallery li').hover(function () {
$('img', this).fadeToggle(1000);
$(this).find('.gallery-controls').remove();
$(this).append('<div class="well gallery-controls">' +
'<p><a href="#" class="gallery-edit btn"><i class="glyphicon glyphicon-edit"></i></a> <a href="#" class="gallery-delete btn"><i class="glyphicon glyphicon-remove"></i></a></p>' +
'</div>');
$(this).find('.gallery-controls').stop().animate({'margin-top': '-1'}, 400);
}, function () {
$('img', this).fadeToggle(1000);
$(this).find('.gallery-controls').stop().animate({'margin-top': '-30'}, 200, function () {
$(this).remove();
});
});
//gallery image controls example
//gallery delete
$('.thumbnails').on('click', '.gallery-delete', function (e) {
e.preventDefault();
//get image id
//alert($(this).parents('.thumbnail').attr('id'));
$(this).parents('.thumbnail').fadeOut();
});
//gallery edit
$('.thumbnails').on('click', '.gallery-edit', function (e) {
e.preventDefault();
//get image id
//alert($(this).parents('.thumbnail').attr('id'));
});
//gallery colorbox
$('.thumbnail a').colorbox({
rel: 'thumbnail a',
transition: "elastic",
maxWidth: "95%",
maxHeight: "95%",
slideshow: true
});
//gallery fullscreen
$('#toggle-fullscreen').button().click(function () {
var button = $(this), root = document.documentElement;
if (!button.hasClass('active')) {
$('#thumbnails').addClass('modal-fullscreen');
if (root.webkitRequestFullScreen) {
root.webkitRequestFullScreen(
window.Element.ALLOW_KEYBOARD_INPUT
);
} else if (root.mozRequestFullScreen) {
root.mozRequestFullScreen();
}
} else {
$('#thumbnails').removeClass('modal-fullscreen');
(document.webkitCancelFullScreen ||
document.mozCancelFullScreen ||
$.noop).apply(document);
}
});
//tour
if ($('.tour').length && typeof(tour) == 'undefined') {
var tour = new Tour();
tour.addStep({
element: "#content", /* html element next to which the step popover should be shown */
placement: "top",
title: "Include boxes on pages", /* title of the popover */
content: "You can just include every box in pages" /* content of the popover */
});
tour.addStep({
element: "#weeklystats_tour",
placement: "top",
title: "Weekly stats",
content: "Display the boxes with just an include"
});
tour.addStep({
element: "#weeklystats_tour2",
placement: "top",
title: "Different width",
content: "You can set up the width"
});
tour.addStep({
element: "ul.main-menu a:first",
title: "Dashboard",
content: "This is your dashboard from here you will find highlights."
});
tour.addStep({
element: ".top-nav a:first",
placement: "bottom",
title: "Visit Site",
content: "Visit your front end from here."
});
tour.restart();
}
//datatable
$('.datatable').dataTable({
"sDom": "<'row'<'col-md-6'l><'col-md-6'f>r>t<'row'<'col-md-12'i><'col-md-12 center-block'p>>",
"sPaginationType": "bootstrap",
"oLanguage": {
"sLengthMenu": "_MENU_ records per page"
}
});
$('.btn-close').click(function (e) {
e.preventDefault();
$(this).parent().parent().parent().fadeOut();
});
$('.btn-minimize').click(function (e) {
e.preventDefault();
var $target = $(this).parent().parent().next('.box-content');
if ($target.is(':visible')) $('i', $(this)).removeClass('glyphicon-chevron-up').addClass('glyphicon-chevron-down');
else $('i', $(this)).removeClass('glyphicon-chevron-down').addClass('glyphicon-chevron-up');
$target.slideToggle();
});
$('.btn-setting').click(function (e) {
e.preventDefault();
var MyModal = "myModal"+$(this).attr('id');console.log(MyModal);
$('#'+MyModal).modal('show');
});
$('#calendar').fullCalendar({
header: {
left: 'prev,next today',
center: 'title',
right: 'month,agendaWeek,agendaDay'
},
defaultDate: '2017-06-12',
events: [
{
title: 'Internet of Things conference',
start: '2017-06-01'
},
{
title: 'Customers at office',
start: '2017-06-07',
end: '2017-06-10'
},
{
id: 999,
title: 'Developer meeting',
start: '2017-06-09T16:00:00'
},
{
id: 999,
title: 'Developer meeting',
start: '2017-06-16T16:00:00'
},
{
title: 'Meeting',
start: '2017-06-12T10:30:00',
end: '2017-06-12T12:30:00'
},
{
title: 'Lunch',
start: '2017-06-12T12:00:00'
},
{
title: 'Birthday Party',
start: '2017-06-13T07:00:00'
},
{
title: 'Click for Google',
url: 'http://google.com/',
start: '2017-06-28'
}
]
});
}
//additional functions for data table
$.fn.dataTableExt.oApi.fnPagingInfo = function (oSettings) {
return {
"iStart": oSettings._iDisplayStart,
"iEnd": oSettings.fnDisplayEnd(),
"iLength": oSettings._iDisplayLength,
"iTotal": oSettings.fnRecordsTotal(),
"iFilteredTotal": oSettings.fnRecordsDisplay(),
"iPage": Math.ceil(oSettings._iDisplayStart / oSettings._iDisplayLength),
"iTotalPages": Math.ceil(oSettings.fnRecordsDisplay() / oSettings._iDisplayLength)
};
};
$.extend($.fn.dataTableExt.oPagination, {
"bootstrap": {
"fnInit": function (oSettings, nPaging, fnDraw) {
var oLang = oSettings.oLanguage.oPaginate;
var fnClickHandler = function (e) {
e.preventDefault();
if (oSettings.oApi._fnPageChange(oSettings, e.data.action)) {
fnDraw(oSettings);
}
};
$(nPaging).addClass('pagination').append(
'<ul class="pagination">' +
'<li class="prev disabled"><a href="#">← ' + oLang.sPrevious + '</a></li>' +
'<li class="next disabled"><a href="#">' + oLang.sNext + ' → </a></li>' +
'</ul>'
);
var els = $('a', nPaging);
$(els[0]).bind('click.DT', { action: "previous" }, fnClickHandler);
$(els[1]).bind('click.DT', { action: "next" }, fnClickHandler);
},
"fnUpdate": function (oSettings, fnDraw) {
var iListLength = 5;
var oPaging = oSettings.oInstance.fnPagingInfo();
var an = oSettings.aanFeatures.p;
var i, j, sClass, iStart, iEnd, iHalf = Math.floor(iListLength / 2);
if (oPaging.iTotalPages < iListLength) {
iStart = 1;
iEnd = oPaging.iTotalPages;
}
else if (oPaging.iPage <= iHalf) {
iStart = 1;
iEnd = iListLength;
} else if (oPaging.iPage >= (oPaging.iTotalPages - iHalf)) {
iStart = oPaging.iTotalPages - iListLength + 1;
iEnd = oPaging.iTotalPages;
} else {
iStart = oPaging.iPage - iHalf + 1;
iEnd = iStart + iListLength - 1;
}
for (i = 0, iLen = an.length; i < iLen; i++) {
// remove the middle elements
$('li:gt(0)', an[i]).filter(':not(:last)').remove();
// add the new list items and their event handlers
for (j = iStart; j <= iEnd; j++) {
sClass = (j == oPaging.iPage + 1) ? 'class="active"' : '';
$('<li ' + sClass + '><a href="#">' + j + '</a></li>')
.insertBefore($('li:last', an[i])[0])
.bind('click', function (e) {
e.preventDefault();
oSettings._iDisplayStart = (parseInt($('a', this).text(), 10) - 1) * oPaging.iLength;
fnDraw(oSettings);
});
}
// add / remove disabled classes from the static elements
if (oPaging.iPage === 0) {
$('li:first', an[i]).addClass('disabled');
} else {
$('li:first', an[i]).removeClass('disabled');
}
if (oPaging.iPage === oPaging.iTotalPages - 1 || oPaging.iTotalPages === 0) {
$('li:last', an[i]).addClass('disabled');
} else {
$('li:last', an[i]).removeClass('disabled');
}
}
}
}
});
|
PypiClean
|
/sktime-0.22.0-py3-none-any.whl/examples/rocket.ipynb
|
# Demo of ROCKET transform
## Overview
ROCKET [1] transforms time series using random convolutional kernels (random length, weights, bias, dilation, and padding). ROCKET computes two features from the resulting feature maps: the max, and the proportion of positive values (or ppv). The transformed features are used to train a linear classifier.
[1] Dempster A, Petitjean F, Webb GI (2019) ROCKET: Exceptionally fast and accurate time series classification using random convolutional kernels. [arXiv:1910.13051](https://arxiv.org/abs/1910.13051)
***
## Contents
1. Imports
2. Univariate Time Series
3. Multivariate Time Series
4. Pipeline Example
***
## 1 Imports
Import example data, ROCKET, and a classifier (`RidgeClassifierCV` from scikit-learn), as well as NumPy and `make_pipeline` from scikit-learn.
**Note**: ROCKET compiles (via Numba) on import, which may take a few seconds.
```
# !pip install --upgrade numba
import numpy as np
from sklearn.linear_model import RidgeClassifierCV
from sklearn.pipeline import make_pipeline
from sktime.datasets import load_arrow_head # univariate dataset
from sktime.datasets import load_basic_motions # multivariate dataset
from sktime.transformations.panel.rocket import Rocket
```
## 2 Univariate Time Series
We can transform the data using ROCKET and separately fit a classifier, or we can use ROCKET together with a classifier in a pipeline (section 4, below).
### 2.1 Load the Training Data
For more details on the data set, see the [univariate time series classification notebook](https://github.com/sktime/sktime/blob/main/examples/02_classification_univariate.ipynb).
```
X_train, y_train = load_arrow_head(split="train", return_X_y=True)
```
### 2.2 Initialise ROCKET and Transform the Training Data
```
rocket = Rocket() # by default, ROCKET uses 10,000 kernels
rocket.fit(X_train)
X_train_transform = rocket.transform(X_train)
```
### 2.3 Fit a Classifier
We recommend using `RidgeClassifierCV` from scikit-learn for smaller datasets (fewer than approx. 20K training examples), and using logistic regression trained using stochastic gradient descent for larger datasets.
```
classifier = RidgeClassifierCV(alphas=np.logspace(-3, 3, 10))
classifier.fit(X_train_transform, y_train)
```
### 2.4 Load and Transform the Test Data
```
X_test, y_test = load_arrow_head(split="test", return_X_y=True)
X_test_transform = rocket.transform(X_test)
```
### 2.5 Classify the Test Data
```
classifier.score(X_test_transform, y_test)
```
***
## 3 Multivariate Time Series
We can use ROCKET in exactly the same way for multivariate time series.
### 3.1 Load the Training Data
```
X_train, y_train = load_basic_motions(split="train", return_X_y=True)
```
### 3.2 Initialise ROCKET and Transform the Training Data
```
rocket = Rocket()
rocket.fit(X_train)
X_train_transform = rocket.transform(X_train)
```
### 3.3 Fit a Classifier
```
classifier = RidgeClassifierCV(alphas=np.logspace(-3, 3, 10))
classifier.fit(X_train_transform, y_train)
```
### 3.4 Load and Transform the Test Data
```
X_test, y_test = load_basic_motions(split="test", return_X_y=True)
X_test_transform = rocket.transform(X_test)
```
### 3.5 Classify the Test Data
```
classifier.score(X_test_transform, y_test)
```
***
## 4 Pipeline Example
We can use ROCKET together with `RidgeClassifierCV` (or another classifier) in a pipeline. We can then use the pipeline like a self-contained classifier, with a single call to `fit`, and without having to separately transform the data, etc.
### 4.1 Initialise the Pipeline
```
rocket_pipeline = make_pipeline(
Rocket(), RidgeClassifierCV(alphas=np.logspace(-3, 3, 10))
)
```
### 4.2 Load and Fit the Training Data
```
X_train, y_train = load_arrow_head(split="train", return_X_y=True)
# it is necessary to pass y_train to the pipeline
# y_train is not used for the transform, but it is used by the classifier
rocket_pipeline.fit(X_train, y_train)
```
### 4.3 Load and Classify the Test Data
```
X_test, y_test = load_arrow_head(split="test", return_X_y=True)
rocket_pipeline.score(X_test, y_test)
```
|
PypiClean
|
/singa-easy-0.4.7.tar.gz/singa-easy-0.4.7/singa_easy/modules/mod_modelslicing/models/resnet_imagenet.py
|
import torch.nn as nn
import math
import torch.utils.model_zoo as model_zoo
__all__ = [
'ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101', 'resnet152',
'imagenet_resnet'
]
model_urls = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=1,
bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes,
planes,
kernel_size=3,
stride=stride,
padding=1,
bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, layers, num_classes=1000):
self.inplanes = 64
super(ResNet, self).__init__()
self.conv1 = nn.Conv2d(3,
64,
kernel_size=7,
stride=2,
padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
self.avgpool = nn.AvgPool2d(7, stride=1)
self.fc = nn.Linear(512 * block.expansion, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes,
planes * block.expansion,
kernel_size=1,
stride=stride,
bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
def resnet18(pretrained=False, **kwargs):
"""Constructs a ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet18']))
return model
def resnet34(pretrained=False, **kwargs):
"""Constructs a ResNet-34 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet34']))
return model
def resnet50(pretrained=False, **kwargs):
"""Constructs a ResNet-50 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet50']))
return model
def resnet101(pretrained=False, **kwargs):
"""Constructs a ResNet-101 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet101']))
return model
def resnet152(pretrained=False, **kwargs):
"""Constructs a ResNet-152 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet152']))
return model
def imagenet_resnet(args):
if args.depth == 18:
return resnet18()
elif args.depth == 34:
return resnet34()
elif args.depth == 50:
return resnet50()
elif args.depth == 101:
return resnet101()
elif args.depth == 152:
return resnet152()
else:
raise Exception('not support depth: {} for resnet'.format(args.depth))
|
PypiClean
|
/sanic_sse-0.3.1.tar.gz/sanic_sse-0.3.1/sanic_sse/sse.py
|
import re
import io
import asyncio
import contextlib
import inspect
import warnings
from http import HTTPStatus
from sanic import Sanic
from sanic.response import stream
from sanic.exceptions import abort
from .pub_sub import PubSub
# pylint: disable=bad-continuation
class Sse:
"""
A :class: that knows how to publish, subscribe to, and stream server-sent events.
"""
_DEFAULT_PING_INTERVAL = 15
_DEFAULT_SEPARATOR = "\r\n"
_LINE_SEP_EXPR = re.compile(r"\r\n|\r|\n")
_DEFAULT_URL = "/sse"
_HEADERS = {"Cache-Control": "no-cache"}
def __init__( # type: ignore
self,
app: Sanic = None,
url: str = _DEFAULT_URL,
ping_interval: int = _DEFAULT_PING_INTERVAL,
before_request_func=None,
):
"""
Application initialization
:param `sanic.Sanic` app: Sanic application
:param str url: sse event url
:param int ping_interval: interval of ping message
"""
self._ping_task = None
self._before_request = None
if app is not None:
self.init_app(app, url, ping_interval, before_request_func)
async def _ping(self):
# periodically send ping to the browser. Any message that
# starts with ":" colon ignored by a browser and could be used
# as ping message.
while True:
await asyncio.sleep(self._ping_interval)
await self._pubsub.publish(
": ping{0}{0}".format(self._DEFAULT_SEPARATOR).encode("utf-8")
)
@staticmethod
def _prepare(data, event_id=None, event=None, retry=None):
buffer = io.StringIO()
if event_id is not None:
buffer.write(Sse._LINE_SEP_EXPR.sub("", "id: {}".format(event_id)))
buffer.write(Sse._DEFAULT_SEPARATOR)
if event is not None:
buffer.write(Sse._LINE_SEP_EXPR.sub("", "event: {}".format(event)))
buffer.write(Sse._DEFAULT_SEPARATOR)
for chunk in Sse._LINE_SEP_EXPR.split(data):
buffer.write("data: {}".format(chunk))
buffer.write(Sse._DEFAULT_SEPARATOR)
if retry is not None:
if not isinstance(retry, int):
raise TypeError("retry argument must be int")
buffer.write("retry: {}".format(retry))
buffer.write(Sse._DEFAULT_SEPARATOR)
buffer.write(Sse._DEFAULT_SEPARATOR)
return buffer.getvalue().encode("utf-8")
def send( # pylint: disable=too-many-arguments
self,
data: str,
channel_id: str = None,
event_id: str = None,
event: str = None,
retry: int = None,
):
"""Send data using EventSource protocol. This call is blocking
:param str data: The data field for the message.
:param str event_id: The event ID to set the EventSource object's last
event ID value to.
:param str event: The event's type. If this is specified, an event will
be dispatched on the browser to the listener for the specified
event name; the web site would use addEventListener() to listen
for named events. The default event type is "message".
:param int retry: The reconnection time to use when attempting to send
the event. [What code handles this?] This must be an integer,
specifying the reconnection time in milliseconds. If a non-integer
value is specified, the field is ignored.
"""
data = self._prepare(data, event_id, event, retry)
return self._pubsub.publish(data, channel_id)
def send_nowait( # pylint: disable=too-many-arguments
self,
data: str,
channel_id: str = None,
event_id: str = None,
event: str = None,
retry: int = None,
):
"""Send data using EventSource protocol. This call is not blocking.
:param str data: The data field for the message.
:param str event_id: The event ID to set the EventSource object's last
event ID value to.
:param str event: The event's type. If this is specified, an event will
be dispatched on the browser to the listener for the specified
event name; the web site would use addEventListener() to listen
for named events. The default event type is "message".
:param int retry: The reconnection time to use when attempting to send
the event. [What code handles this?] This must be an integer,
specifying the reconnection time in milliseconds. If a non-integer
value is specified, the field is ignored.
"""
warnings.warn("deprecated. use send", DeprecationWarning)
self.send(data, channel_id, event_id, event, retry)
def set_before_request_callback(self, func):
"""
Set function for callback before sse request. It can be used for authorizations purpose
:param callable func: coroutine function with one parameter - request
"""
if not callable(func):
raise TypeError("{} should be callable".format(func))
if not inspect.iscoroutinefunction(func):
raise TypeError("{} should be coroutine function".format(func))
if len(inspect.signature(func).parameters) != 1:
raise ValueError("{} should get only one parameter - request".format(func))
self._before_request = func
def init_app(
self,
app: Sanic,
url: str = _DEFAULT_URL,
ping_interval: int = _DEFAULT_PING_INTERVAL,
before_request_func=None,
):
"""
Application initialization
:param `sanic.Sanic` app: Sanic application
:param str url: sse event url
:param int ping_interval: interval of ping message
"""
self._url = url
self._ping_interval = ping_interval
if before_request_func is not None:
self.set_before_request_callback(before_request_func)
self._pubsub = PubSub()
@app.listener("after_server_start")
def _on_start(_, loop):
self._ping_task = loop.create_task(self._ping())
@app.listener("before_server_stop")
async def _on_stop(_, __):
self._ping_task.cancel()
with contextlib.suppress(asyncio.CancelledError):
await self._ping_task
await self._pubsub.close()
app.sse_send = self.send
app.sse_send_nowait = self.send_nowait
@app.route(self._url, methods=["GET"])
async def _(request):
if self._before_request is not None:
await self._before_request(request)
channel_id = request.args.get("channel_id", None)
client_id = self._pubsub.register(channel_id)
async def streaming_fn(response):
try:
while True:
try:
data = await self._pubsub.get(client_id, channel_id)
except ValueError:
break
await response.write(data)
self._pubsub.task_done(client_id, channel_id)
finally:
self._pubsub.delete(client_id, channel_id)
return stream(
streaming_fn, headers=self._HEADERS, content_type="text/event-stream"
)
|
PypiClean
|
/Sleepy-0.1a1.tar.gz/Sleepy-0.1a1/sleepy/static/js/jquery.form.js
|
;(function($) {
/*
Usage Note:
-----------
Do not use both ajaxSubmit and ajaxForm on the same form. These
functions are intended to be exclusive. Use ajaxSubmit if you want
to bind your own submit handler to the form. For example,
$(document).ready(function() {
$('#myForm').bind('submit', function(e) {
e.preventDefault(); // <-- important
$(this).ajaxSubmit({
target: '#output'
});
});
});
Use ajaxForm when you want the plugin to manage all the event binding
for you. For example,
$(document).ready(function() {
$('#myForm').ajaxForm({
target: '#output'
});
});
When using ajaxForm, the ajaxSubmit function will be invoked for you
at the appropriate time.
*/
/**
* ajaxSubmit() provides a mechanism for immediately submitting
* an HTML form using AJAX.
*/
$.fn.ajaxSubmit = function(options) {
// fast fail if nothing selected (http://dev.jquery.com/ticket/2752)
if (!this.length) {
log('ajaxSubmit: skipping submit process - no element selected');
return this;
}
if (typeof options == 'function') {
options = { success: options };
}
var action = this.attr('action');
var url = (typeof action === 'string') ? $.trim(action) : '';
if (url) {
// clean url (don't include hash vaue)
url = (url.match(/^([^#]+)/)||[])[1];
}
url = url || window.location.href || '';
options = $.extend(true, {
url: url,
success: $.ajaxSettings.success,
type: this[0].getAttribute('method') || 'GET', // IE7 massage (see issue 57)
iframeSrc: /^https/i.test(window.location.href || '') ? 'javascript:false' : 'about:blank'
}, options);
// hook for manipulating the form data before it is extracted;
// convenient for use with rich editors like tinyMCE or FCKEditor
var veto = {};
this.trigger('form-pre-serialize', [this, options, veto]);
if (veto.veto) {
log('ajaxSubmit: submit vetoed via form-pre-serialize trigger');
return this;
}
// provide opportunity to alter form data before it is serialized
if (options.beforeSerialize && options.beforeSerialize(this, options) === false) {
log('ajaxSubmit: submit aborted via beforeSerialize callback');
return this;
}
var n,v,a = this.formToArray(options.semantic);
if (options.data) {
options.extraData = options.data;
for (n in options.data) {
if(options.data[n] instanceof Array) {
for (var k in options.data[n]) {
a.push( { name: n, value: options.data[n][k] } );
}
}
else {
v = options.data[n];
v = $.isFunction(v) ? v() : v; // if value is fn, invoke it
a.push( { name: n, value: v } );
}
}
}
// give pre-submit callback an opportunity to abort the submit
if (options.beforeSubmit && options.beforeSubmit(a, this, options) === false) {
log('ajaxSubmit: submit aborted via beforeSubmit callback');
return this;
}
// fire vetoable 'validate' event
this.trigger('form-submit-validate', [a, this, options, veto]);
if (veto.veto) {
log('ajaxSubmit: submit vetoed via form-submit-validate trigger');
return this;
}
var q = $.param(a);
if (options.type.toUpperCase() == 'GET') {
options.url += (options.url.indexOf('?') >= 0 ? '&' : '?') + q;
options.data = null; // data is null for 'get'
}
else {
options.data = q; // data is the query string for 'post'
}
var $form = this, callbacks = [];
if (options.resetForm) {
callbacks.push(function() { $form.resetForm(); });
}
if (options.clearForm) {
callbacks.push(function() { $form.clearForm(); });
}
// perform a load on the target only if dataType is not provided
if (!options.dataType && options.target) {
var oldSuccess = options.success || function(){};
callbacks.push(function(data) {
var fn = options.replaceTarget ? 'replaceWith' : 'html';
$(options.target)[fn](data).each(oldSuccess, arguments);
});
}
else if (options.success) {
callbacks.push(options.success);
}
options.success = function(data, status, xhr) { // jQuery 1.4+ passes xhr as 3rd arg
var context = options.context || options; // jQuery 1.4+ supports scope context
for (var i=0, max=callbacks.length; i < max; i++) {
callbacks[i].apply(context, [data, status, xhr || $form, $form]);
}
};
// are there files to upload?
var fileInputs = $('input:file', this).length > 0;
var mp = 'multipart/form-data';
var multipart = ($form.attr('enctype') == mp || $form.attr('encoding') == mp);
// options.iframe allows user to force iframe mode
// 06-NOV-09: now defaulting to iframe mode if file input is detected
if (options.iframe !== false && (fileInputs || options.iframe || multipart)) {
// hack to fix Safari hang (thanks to Tim Molendijk for this)
// see: http://groups.google.com/group/jquery-dev/browse_thread/thread/36395b7ab510dd5d
if (options.closeKeepAlive) {
$.get(options.closeKeepAlive, fileUpload);
}
else {
fileUpload();
}
}
else {
$.ajax(options);
}
// fire 'notify' event
this.trigger('form-submit-notify', [this, options]);
return this;
// private function for handling file uploads (hat tip to YAHOO!)
function fileUpload() {
var form = $form[0];
if ($(':input[name=submit],:input[id=submit]', form).length) {
// if there is an input with a name or id of 'submit' then we won't be
// able to invoke the submit fn on the form (at least not x-browser)
alert('Error: Form elements must not have name or id of "submit".');
return;
}
var s = $.extend(true, {}, $.ajaxSettings, options);
s.context = s.context || s;
var id = 'jqFormIO' + (new Date().getTime()), fn = '_'+id;
var $io = $('<iframe id="' + id + '" name="' + id + '" src="'+ s.iframeSrc +'" />');
var io = $io[0];
$io.css({ position: 'absolute', top: '-1000px', left: '-1000px' });
var xhr = { // mock object
aborted: 0,
responseText: null,
responseXML: null,
status: 0,
statusText: 'n/a',
getAllResponseHeaders: function() {},
getResponseHeader: function() {},
setRequestHeader: function() {},
abort: function(status) {
var e = (status === 'timeout' ? 'timeout' : 'aborted');
log('aborting upload... ' + e);
this.aborted = 1;
$io.attr('src', s.iframeSrc); // abort op in progress
xhr.error = e;
s.error && s.error.call(s.context, xhr, e, e);
g && $.event.trigger("ajaxError", [xhr, s, e]);
s.complete && s.complete.call(s.context, xhr, e);
}
};
var g = s.global;
// trigger ajax global events so that activity/block indicators work like normal
if (g && ! $.active++) {
$.event.trigger("ajaxStart");
}
if (g) {
$.event.trigger("ajaxSend", [xhr, s]);
}
if (s.beforeSend && s.beforeSend.call(s.context, xhr, s) === false) {
if (s.global) {
$.active--;
}
return;
}
if (xhr.aborted) {
return;
}
var timedOut = 0, timeoutHandle;
// add submitting element to data if we know it
var sub = form.clk;
if (sub) {
var n = sub.name;
if (n && !sub.disabled) {
s.extraData = s.extraData || {};
s.extraData[n] = sub.value;
if (sub.type == "image") {
s.extraData[n+'.x'] = form.clk_x;
s.extraData[n+'.y'] = form.clk_y;
}
}
}
// take a breath so that pending repaints get some cpu time before the upload starts
function doSubmit() {
// make sure form attrs are set
var t = $form.attr('target'), a = $form.attr('action');
// update form attrs in IE friendly way
form.setAttribute('target',id);
if (form.getAttribute('method') != 'POST') {
form.setAttribute('method', 'POST');
}
if (form.getAttribute('action') != s.url) {
form.setAttribute('action', s.url);
}
// ie borks in some cases when setting encoding
if (! s.skipEncodingOverride) {
$form.attr({
encoding: 'multipart/form-data',
enctype: 'multipart/form-data'
});
}
// support timout
if (s.timeout) {
timeoutHandle = setTimeout(function() { timedOut = true; cb(true); }, s.timeout);
}
// add "extra" data to form if provided in options
var extraInputs = [];
try {
if (s.extraData) {
for (var n in s.extraData) {
extraInputs.push(
$('<input type="hidden" name="'+n+'" value="'+s.extraData[n]+'" />')
.appendTo(form)[0]);
}
}
// add iframe to doc and submit the form
$io.appendTo('body');
io.attachEvent ? io.attachEvent('onload', cb) : io.addEventListener('load', cb, false);
form.submit();
}
finally {
// reset attrs and remove "extra" input elements
form.setAttribute('action',a);
if(t) {
form.setAttribute('target', t);
} else {
$form.removeAttr('target');
}
$(extraInputs).remove();
}
}
if (s.forceSync) {
doSubmit();
}
else {
setTimeout(doSubmit, 10); // this lets dom updates render
}
var data, doc, domCheckCount = 50, callbackProcessed;
function cb(e) {
if (xhr.aborted || callbackProcessed) {
return;
}
if (e === true && xhr) {
xhr.abort('timeout');
return;
}
var doc = io.contentWindow ? io.contentWindow.document : io.contentDocument ? io.contentDocument : io.document;
if (!doc || doc.location.href == s.iframeSrc) {
// response not received yet
if (!timedOut)
return;
}
io.detachEvent ? io.detachEvent('onload', cb) : io.removeEventListener('load', cb, false);
var ok = true;
try {
if (timedOut) {
throw 'timeout';
}
var isXml = s.dataType == 'xml' || doc.XMLDocument || $.isXMLDoc(doc);
log('isXml='+isXml);
if (!isXml && window.opera && (doc.body == null || doc.body.innerHTML == '')) {
if (--domCheckCount) {
// in some browsers (Opera) the iframe DOM is not always traversable when
// the onload callback fires, so we loop a bit to accommodate
log('requeing onLoad callback, DOM not available');
setTimeout(cb, 250);
return;
}
// let this fall through because server response could be an empty document
//log('Could not access iframe DOM after mutiple tries.');
//throw 'DOMException: not available';
}
//log('response detected');
xhr.responseText = doc.body ? doc.body.innerHTML : doc.documentElement ? doc.documentElement.innerHTML : null;
xhr.responseXML = doc.XMLDocument ? doc.XMLDocument : doc;
if (!xhr.responseText && xhr.responseXML && !s.dataType)
s.dataType = 'xml';
xhr.getResponseHeader = function(header){
var headers = {'content-type': s.dataType};
return headers[header];
};
var scr = /(json|script|text)/.test(s.dataType);
if (scr || s.textarea) {
// see if user embedded response in textarea
var ta = doc.getElementsByTagName('textarea')[0];
if (ta) {
xhr.responseText = ta.value;
}
else if (scr) {
// account for browsers injecting pre around json response
var pre = doc.getElementsByTagName('pre')[0];
var b = doc.getElementsByTagName('body')[0];
if (pre) {
xhr.responseText = pre.textContent;
}
else if (b) {
xhr.responseText = b.innerHTML;
}
}
}
else if (s.dataType == 'xml' && !xhr.responseXML && xhr.responseText != null) {
xhr.responseXML = toXml(xhr.responseText);
}
data = httpData(xhr, s.dataType, s);
}
catch(e){
log('error caught:',e);
ok = false;
xhr.error = e;
s.error && s.error.call(s.context, xhr, 'error', e);
g && $.event.trigger("ajaxError", [xhr, s, e]);
}
if (xhr.aborted) {
log('upload aborted');
ok = false;
}
// ordering of these callbacks/triggers is odd, but that's how $.ajax does it
if (ok) {
s.success && s.success.call(s.context, data, 'success', xhr);
g && $.event.trigger("ajaxSuccess", [xhr, s]);
}
g && $.event.trigger("ajaxComplete", [xhr, s]);
if (g && ! --$.active) {
$.event.trigger("ajaxStop");
}
s.complete && s.complete.call(s.context, xhr, ok ? 'success' : 'error');
callbackProcessed = true;
if (s.timeout)
clearTimeout(timeoutHandle);
// clean up
setTimeout(function() {
$io.removeData('form-plugin-onload');
$io.remove();
xhr.responseXML = null;
}, 100);
}
var toXml = $.parseXML || function(s, doc) { // use parseXML if available (jQuery 1.5+)
if (window.ActiveXObject) {
doc = new ActiveXObject('Microsoft.XMLDOM');
doc.async = 'false';
doc.loadXML(s);
}
else {
doc = (new DOMParser()).parseFromString(s, 'text/xml');
}
return (doc && doc.documentElement && doc.documentElement.nodeName != 'parsererror') ? doc : null;
};
var parseJSON = $.parseJSON || function(s) {
return window['eval']('(' + s + ')');
};
var httpData = function( xhr, type, s ) { // mostly lifted from jq1.4.4
var ct = xhr.getResponseHeader('content-type') || '',
xml = type === 'xml' || !type && ct.indexOf('xml') >= 0,
data = xml ? xhr.responseXML : xhr.responseText;
if (xml && data.documentElement.nodeName === 'parsererror') {
$.error && $.error('parsererror');
}
if (s && s.dataFilter) {
data = s.dataFilter(data, type);
}
if (typeof data === 'string') {
if (type === 'json' || !type && ct.indexOf('json') >= 0) {
data = parseJSON(data);
} else if (type === "script" || !type && ct.indexOf("javascript") >= 0) {
$.globalEval(data);
}
}
return data;
};
}
};
/**
* ajaxForm() provides a mechanism for fully automating form submission.
*
* The advantages of using this method instead of ajaxSubmit() are:
*
* 1: This method will include coordinates for <input type="image" /> elements (if the element
* is used to submit the form).
* 2. This method will include the submit element's name/value data (for the element that was
* used to submit the form).
* 3. This method binds the submit() method to the form for you.
*
* The options argument for ajaxForm works exactly as it does for ajaxSubmit. ajaxForm merely
* passes the options argument along after properly binding events for submit elements and
* the form itself.
*/
$.fn.ajaxForm = function(options) {
// in jQuery 1.3+ we can fix mistakes with the ready state
if (this.length === 0) {
var o = { s: this.selector, c: this.context };
if (!$.isReady && o.s) {
log('DOM not ready, queuing ajaxForm');
$(function() {
$(o.s,o.c).ajaxForm(options);
});
return this;
}
// is your DOM ready? http://docs.jquery.com/Tutorials:Introducing_$(document).ready()
log('terminating; zero elements found by selector' + ($.isReady ? '' : ' (DOM not ready)'));
return this;
}
return this.ajaxFormUnbind().bind('submit.form-plugin', function(e) {
if (!e.isDefaultPrevented()) { // if event has been canceled, don't proceed
e.preventDefault();
$(this).ajaxSubmit(options);
}
}).bind('click.form-plugin', function(e) {
var target = e.target;
var $el = $(target);
if (!($el.is(":submit,input:image"))) {
// is this a child element of the submit el? (ex: a span within a button)
var t = $el.closest(':submit');
if (t.length == 0) {
return;
}
target = t[0];
}
var form = this;
form.clk = target;
if (target.type == 'image') {
if (e.offsetX != undefined) {
form.clk_x = e.offsetX;
form.clk_y = e.offsetY;
} else if (typeof $.fn.offset == 'function') { // try to use dimensions plugin
var offset = $el.offset();
form.clk_x = e.pageX - offset.left;
form.clk_y = e.pageY - offset.top;
} else {
form.clk_x = e.pageX - target.offsetLeft;
form.clk_y = e.pageY - target.offsetTop;
}
}
// clear form vars
setTimeout(function() { form.clk = form.clk_x = form.clk_y = null; }, 100);
});
};
// ajaxFormUnbind unbinds the event handlers that were bound by ajaxForm
$.fn.ajaxFormUnbind = function() {
return this.unbind('submit.form-plugin click.form-plugin');
};
/**
* formToArray() gathers form element data into an array of objects that can
* be passed to any of the following ajax functions: $.get, $.post, or load.
* Each object in the array has both a 'name' and 'value' property. An example of
* an array for a simple login form might be:
*
* [ { name: 'username', value: 'jresig' }, { name: 'password', value: 'secret' } ]
*
* It is this array that is passed to pre-submit callback functions provided to the
* ajaxSubmit() and ajaxForm() methods.
*/
$.fn.formToArray = function(semantic) {
var a = [];
if (this.length === 0) {
return a;
}
var form = this[0];
var els = semantic ? form.getElementsByTagName('*') : form.elements;
if (!els) {
return a;
}
var i,j,n,v,el,max,jmax;
for(i=0, max=els.length; i < max; i++) {
el = els[i];
n = el.name;
if (!n) {
continue;
}
if (semantic && form.clk && el.type == "image") {
// handle image inputs on the fly when semantic == true
if(!el.disabled && form.clk == el) {
a.push({name: n, value: $(el).val()});
a.push({name: n+'.x', value: form.clk_x}, {name: n+'.y', value: form.clk_y});
}
continue;
}
v = $.fieldValue(el, true);
if (v && v.constructor == Array) {
for(j=0, jmax=v.length; j < jmax; j++) {
a.push({name: n, value: v[j]});
}
}
else if (v !== null && typeof v != 'undefined') {
a.push({name: n, value: v});
}
}
if (!semantic && form.clk) {
// input type=='image' are not found in elements array! handle it here
var $input = $(form.clk), input = $input[0];
n = input.name;
if (n && !input.disabled && input.type == 'image') {
a.push({name: n, value: $input.val()});
a.push({name: n+'.x', value: form.clk_x}, {name: n+'.y', value: form.clk_y});
}
}
return a;
};
/**
* Serializes form data into a 'submittable' string. This method will return a string
* in the format: name1=value1&name2=value2
*/
$.fn.formSerialize = function(semantic) {
//hand off to jQuery.param for proper encoding
return $.param(this.formToArray(semantic));
};
/**
* Serializes all field elements in the jQuery object into a query string.
* This method will return a string in the format: name1=value1&name2=value2
*/
$.fn.fieldSerialize = function(successful) {
var a = [];
this.each(function() {
var n = this.name;
if (!n) {
return;
}
var v = $.fieldValue(this, successful);
if (v && v.constructor == Array) {
for (var i=0,max=v.length; i < max; i++) {
a.push({name: n, value: v[i]});
}
}
else if (v !== null && typeof v != 'undefined') {
a.push({name: this.name, value: v});
}
});
//hand off to jQuery.param for proper encoding
return $.param(a);
};
/**
* Returns the value(s) of the element in the matched set. For example, consider the following form:
*
* <form><fieldset>
* <input name="A" type="text" />
* <input name="A" type="text" />
* <input name="B" type="checkbox" value="B1" />
* <input name="B" type="checkbox" value="B2"/>
* <input name="C" type="radio" value="C1" />
* <input name="C" type="radio" value="C2" />
* </fieldset></form>
*
* var v = $(':text').fieldValue();
* // if no values are entered into the text inputs
* v == ['','']
* // if values entered into the text inputs are 'foo' and 'bar'
* v == ['foo','bar']
*
* var v = $(':checkbox').fieldValue();
* // if neither checkbox is checked
* v === undefined
* // if both checkboxes are checked
* v == ['B1', 'B2']
*
* var v = $(':radio').fieldValue();
* // if neither radio is checked
* v === undefined
* // if first radio is checked
* v == ['C1']
*
* The successful argument controls whether or not the field element must be 'successful'
* (per http://www.w3.org/TR/html4/interact/forms.html#successful-controls).
* The default value of the successful argument is true. If this value is false the value(s)
* for each element is returned.
*
* Note: This method *always* returns an array. If no valid value can be determined the
* array will be empty, otherwise it will contain one or more values.
*/
$.fn.fieldValue = function(successful) {
for (var val=[], i=0, max=this.length; i < max; i++) {
var el = this[i];
var v = $.fieldValue(el, successful);
if (v === null || typeof v == 'undefined' || (v.constructor == Array && !v.length)) {
continue;
}
v.constructor == Array ? $.merge(val, v) : val.push(v);
}
return val;
};
/**
* Returns the value of the field element.
*/
$.fieldValue = function(el, successful) {
var n = el.name, t = el.type, tag = el.tagName.toLowerCase();
if (successful === undefined) {
successful = true;
}
if (successful && (!n || el.disabled || t == 'reset' || t == 'button' ||
(t == 'checkbox' || t == 'radio') && !el.checked ||
(t == 'submit' || t == 'image') && el.form && el.form.clk != el ||
tag == 'select' && el.selectedIndex == -1)) {
return null;
}
if (tag == 'select') {
var index = el.selectedIndex;
if (index < 0) {
return null;
}
var a = [], ops = el.options;
var one = (t == 'select-one');
var max = (one ? index+1 : ops.length);
for(var i=(one ? index : 0); i < max; i++) {
var op = ops[i];
if (op.selected) {
var v = op.value;
if (!v) { // extra pain for IE...
v = (op.attributes && op.attributes['value'] && !(op.attributes['value'].specified)) ? op.text : op.value;
}
if (one) {
return v;
}
a.push(v);
}
}
return a;
}
return $(el).val();
};
/**
* Clears the form data. Takes the following actions on the form's input fields:
* - input text fields will have their 'value' property set to the empty string
* - select elements will have their 'selectedIndex' property set to -1
* - checkbox and radio inputs will have their 'checked' property set to false
* - inputs of type submit, button, reset, and hidden will *not* be effected
* - button elements will *not* be effected
*/
$.fn.clearForm = function() {
return this.each(function() {
$('input,select,textarea', this).clearFields();
});
};
/**
* Clears the selected form elements.
*/
$.fn.clearFields = $.fn.clearInputs = function() {
return this.each(function() {
var t = this.type, tag = this.tagName.toLowerCase();
if (t == 'text' || t == 'password' || tag == 'textarea') {
this.value = '';
}
else if (t == 'checkbox' || t == 'radio') {
this.checked = false;
}
else if (tag == 'select') {
this.selectedIndex = -1;
}
});
};
/**
* Resets the form data. Causes all form elements to be reset to their original value.
*/
$.fn.resetForm = function() {
return this.each(function() {
// guard against an input with the name of 'reset'
// note that IE reports the reset function as an 'object'
if (typeof this.reset == 'function' || (typeof this.reset == 'object' && !this.reset.nodeType)) {
this.reset();
}
});
};
/**
* Enables or disables any matching elements.
*/
$.fn.enable = function(b) {
if (b === undefined) {
b = true;
}
return this.each(function() {
this.disabled = !b;
});
};
/**
* Checks/unchecks any matching checkboxes or radio buttons and
* selects/deselects and matching option elements.
*/
$.fn.selected = function(select) {
if (select === undefined) {
select = true;
}
return this.each(function() {
var t = this.type;
if (t == 'checkbox' || t == 'radio') {
this.checked = select;
}
else if (this.tagName.toLowerCase() == 'option') {
var $sel = $(this).parent('select');
if (select && $sel[0] && $sel[0].type == 'select-one') {
// deselect all other options
$sel.find('option').selected(false);
}
this.selected = select;
}
});
};
// helper fn for console logging
// set $.fn.ajaxSubmit.debug to true to enable debug logging
function log() {
if ($.fn.ajaxSubmit.debug) {
var msg = '[jquery.form] ' + Array.prototype.join.call(arguments,'');
if (window.console && window.console.log) {
window.console.log(msg);
}
else if (window.opera && window.opera.postError) {
window.opera.postError(msg);
}
}
};
})(jQuery);
|
PypiClean
|
/cernopendata-client-0.3.0.tar.gz/cernopendata-client-0.3.0/docs/cliapi.rst
|
.. _cliapi:
CLI API
=======
.. code-block:: console
$ cernopendata-client --help
Usage: cernopendata-client [OPTIONS] COMMAND [ARGS]...
Options:
--help Show this message and exit.
Commands:
download-files Download data files belonging to a record.
get-file-locations Get a list of data file locations of a record.
get-metadata Get metadata content of a record.
list-directory List contents of a EOSPUBLIC Open Data directory.
verify-files Verify downloaded data file integrity.
version Return cernopendata-client version.
.. click:: cernopendata_client.cli:cernopendata_client
:prog: cernopendata-client
:nested: full
|
PypiClean
|
/django-micro-1.8.0.tar.gz/django-micro-1.8.0/README.rst
|
============
Django Micro
============
.. image::
https://img.shields.io/pypi/v/django-micro.svg
:target: https://pypi.python.org/pypi/django-micro
.. image::
https://img.shields.io/badge/status-stable-brightgreen.svg
Django Micro is lightweight wrapper around Django that turns it to the microframework for writing small applications in a single file.
**tl;dr:** See the example_ of full-featured application.
What works
==========
- `Configuration`_
- `Views and routes`_
- `Models and migrations`_
- `Management commands`_
- `Custom template tags`_
- `Testing`_
- `Admin interface`_
- Third party apps
Installation
============
.. code-block::
$ pip install django-micro
Quick start
===========
Create ``app.py`` file with following content.
.. code-block:: python
from django_micro import configure, route, run
from django.http import HttpResponse
DEBUG = True
configure(locals())
@route('', name='homepage')
def homepage(request):
name = request.GET.get('name', 'World')
return HttpResponse('Hello, {}!'.format(name))
application = run()
Run the application.
.. code-block::
$ python app.py runserver
**Note:** Parent directory of the ``app.py`` file must have a valid python module name. Under the hood, Micro adds that directory to ``INSTALLED_APPS`` and uses it as a regular Django application.
Compatibility
=============
The latest relase of django-micro supports only the latest stable release of Django. This is the only way to keep codebase of django-micro clean, without hacks for different versions of Django.
- **Django version:** >=2.0, <2.1
- **Python version:** >=3.4
Run and deployment
==================
On the localhost the application runs with the built-in ``runserver`` command and deploys as a standard WSGI application.
.. code-block::
$ python app.py runserver
$ gunicorn example.app --bind localhost:8000
$ uwsgi --module example.app --http localhost:8000
This behaviour is provided by the single string: ``application = run()`` which actually just a shortcut for the following code.
.. code-block:: python
if __name__ == '__main__':
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
else:
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
Configuration
=============
The call of the ``configure`` function must be placed at the top of your application above the definition of views, models, and imports of other modules. It may violate PEP8, but this is the only way to make it works. You can’t define models or import models from another application until Django is configured.
I recommend to define all the configuration in the global namespace and call ``configure`` with ``locals()`` argument. Don’t worry, configure takes only *UPPERCASE* variables.
.. code-block:: python
from django_micro import configure
DEBUG = True
configure(locals())
Views and routes
================
Routing is wrapped in a single function ``route``. You can use it as a decorator.
.. code-block:: python
from django_micro import route
@route('blog/<int:year>/', name='year_archive')
def year_archive(request, year):
return HttpResponse('hello')
Or as a regular function.
.. code-block:: python
def year_archive(request):
return HttpResponse('hello')
route('blog/<int:year>/', year_archive, name='year_archive')
Also ``route`` may be used with class-based views.
.. code-block:: python
@route('blog/<int:year>/', name='year_archive')
class YearArchiveView(View):
def get(request, year):
return HttpResponse('hello')
# or directly
route('blog/<int:year>/', YearArchiveView.as_view(), name='year_archive')
Micro uses the new simplified routing syntax which was introduced in Django 2.0. But if you’d like to use the regex-based routing syntax, just add ``regex=True`` to the decorator.
.. code-block:: python
@route(r'^articles/(?P<year>[0-9]{4})/$', regex=True)
def year_archive(request, year):
return HttpResponse('hello')
You always can access the ``urlpatterns`` for the use low-level API.
.. code-block:: python
from django.urls import path
import django_micro as micro
micro.urlpatterns += [
path('', homepage, name='homepage'),
]
**Note:** You can include third-party apps into Micro’s ``urlpatterns``, but currently can’t use Micro as a third-party app. Micro is a singleton, and you can’t create more that one instance of it.
Models and migrations
=====================
Micro works well with models and migrations. Just define model in your ``app.py`` file. If you need migrations, create ``migrations`` directory next to the ``app.py`` and call ``python app.py makemigrations``.
.. code-block::
blog
├── __init__.py
├── app.py
└── migrations
├── __init__.py
└── 0001_initial.py
.. code-block:: python
from django.db import models
class Post(models.Model):
title = models.CharField(max_length=255)
class Meta:
app_label = 'blog'
**Note:** You always need to set ``app_label`` attribute in ``Meta`` of your models. For example, if application placed in ``blog/app.py``, app_label should be ``blog``.
For getting ``app_label`` you can use ``get_app_label`` shortcut.
.. code-block:: python
from django_micro import get_app_label
class Meta:
app_label = get_app_label()
You also can place models separately in ``models.py`` file. In this case ``app_label`` is not required, but this is not a micro-way ;)
Management commands
===================
Now you can create any management command without creating a file in ``yourapp/management/commands``. Just defne command class in your ``app.py`` and wrap it to ``@command`` decorator.
.. code-block:: python
from django.core.management.base import BaseCommand
from django_micro import command
@command('print_hello')
class PrintHelloCommand(BaseCommand):
def handle(self, *args, **options):
self.stdout.write('Hello, Django!')
You also can create function-based commands.
.. code-block:: python
from django_micro import command
@command
def print_hello(cmd, **options):
cmd.stdout.write('Hello, Django!')
Unfortunately, the ``command`` decorator uses a few dirty hacks for command registration. But everything works fine if you don’t think about it ;)
Custom template tags
====================
Use ``template`` for register template tags. It works same as a ``register`` object in tag library file.
.. code-block:: python
from django_micro import template
@template.simple_tag
def print_hello(name):
return 'Hello, {}!'
@template.filter
def remove_spaces(value):
return value.replace(' ', '')
You don’t need to use the ``load`` tag. All template tags are global.
Testing
=======
No magick. Use built-in test cases.
.. code-block:: python
from django.test import TestCase
class TestIndexView(TestCase):
def test_success(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
To run tests which defined in app.py use the following command:
.. code-block::
$ python app.py test __main__
Admin interface
===============
Django-admin requires lots of dependencies in apps and middlewares. We’ve realized that it’s not a simple way to add a huge list of apps to your config just to use the admin interface. So we added a shortcut ``django_admin=True`` to the ``configure`` function that automatically includes all the needed dependencies.
.. code-block:: python
from django.contrib import admin
from django_micro import configure
configure(locals(), django_admin=True)
class Post(models.Model):
title = models.CharField(max_length=255)
content = models.TextField(blank=True)
create_date = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = get_app_label()
ordering = ('-create_date',)
@admin.register(Post)
class PostAdmin(admin.ModelAdmin):
pass
route('admin/', admin.site.urls)
Who uses django-micro
=====================
- `storagl <https://github.com/zenwalker/storagl>`_ — simple storage for screenshots and other shared files with short direct links
Related projects
================
- importd_ — Popular implementation of django-as-microframework idea, but too magical and over-engineered in my opinion.
- djmicro_ — Good and lightweight wrapper. I’ve took a few ideas from there. But it’s an experimental, undocumented and doesn’t develop anymore.
.. _example: https://github.com/zenwalker/django-micro/tree/master/example
.. _djmicro: https://github.com/apendleton/djmicro
.. _importd: https://github.com/amitu/importd
|
PypiClean
|
/benchmark_templates-0.1.2-py3-none-any.whl/benchtmpl/workflow/parameter/base.py
|
from benchtmpl.error import InvalidParameterError
import benchtmpl.workflow.parameter.declaration as pd
class ParameterBase(object):
"""Base class for template parameter and parameter argument values. The
base class maintains the unique parameter identifier and the information
about the data type.
"""
def __init__(self, identifier, data_type):
"""Initialize the unique identifier and data type. Raises value error
if the given data type identifier is not valid.
Parameters
----------
identifier: string
Unique parameter identifier
data_type: string
Identifier for parameter data type
Raises
------
benchtmpl.error.InvalidParameterError
"""
if not data_type in pd.DATA_TYPES:
raise InvalidParameterError('invalid data type \'{}\''.format(data_type))
self.identifier = identifier
self.data_type = data_type
def is_bool(self):
"""Test if data type for the parameter declaration is DT_BOOL.
Returns
-------
bool
"""
return self.data_type == pd.DT_BOOL
def is_file(self):
"""Test if data type for the parameter declaration is DT_FILE.
Returns
-------
bool
"""
return self.data_type == pd.DT_FILE
def is_float(self):
"""Test if data type for the parameter declaration is DT_DECIMAL.
Returns
-------
bool
"""
return self.data_type == pd.DT_DECIMAL
def is_int(self):
"""Test if data type for the parameter declaration is DT_INTEGER.
Returns
-------
bool
"""
return self.data_type == pd.DT_INTEGER
def is_list(self):
"""Test if data type for the parameter declaration is DT_LIST.
Returns
-------
bool
"""
return self.data_type == pd.DT_LIST
def is_record(self):
"""Test if data type for the parameter declaration is DT_RECORD.
Returns
-------
bool
"""
return self.data_type == pd.DT_RECORD
def is_string(self):
"""Test if data type for the parameter declaration is DT_STRING.
Returns
-------
bool
"""
return self.data_type == pd.DT_STRING
class TemplateParameter(ParameterBase):
"""The template parameter is a simple wrapper around a dictionary that
contains a parameter declaration. The wrapper provides easy access to the
different components of the parameter declaration.
"""
def __init__(self, obj, children=None):
"""Initialize the different attributes of a template parameter
declaration from a given dictionary.
Parameters
----------
obj: dict
Dictionary containing the template parameter declaration properties
children: list(benchtmpl.workflow.parameter.base.TemplateParameter), optional
Optional list of parameter children for parameter lists or records
"""
super(TemplateParameter, self).__init__(
identifier=obj[pd.LABEL_ID],
data_type = obj[pd.LABEL_DATATYPE]
)
self.obj = obj
self.name = obj[pd.LABEL_NAME]
self.description = obj[pd.LABEL_DESCRIPTION]
self.index = obj[pd.LABEL_INDEX]
self.default_value = obj[pd.LABEL_DEFAULT] if pd.LABEL_DEFAULT in obj else None
self.is_required = obj[pd.LABEL_REQUIRED]
self.values = obj[pd.LABEL_VALUES] if pd.LABEL_VALUES in obj else None
self.parent = obj[pd.LABEL_PARENT] if pd.LABEL_PARENT in obj else None
self.as_constant = obj[pd.LABEL_AS] if pd.LABEL_AS in obj else None
self.children = children
def add_child(self, para):
"""Short-cut to add an element to the list of children of the parameter.
Parameters
----------
para: benchtmpl.workflow.parameter.base.TemplateParameter
Template parameter instance for child parameter
"""
self.children.append(para)
self.children.sort(key=lambda p: (p.index, p.identifier))
def get_constant(self):
"""Get the value of the as_constant property.
Returns
-------
string
"""
return self.as_constant
def has_children(self):
"""Test if a parameter has children. Only returns True if the list of
children is not None and not empty.
Returns
-------
bool
"""
if not self.children is None:
return len(self.children) > 0
return False
def has_constant(self):
"""True if the as_constant property is not None.
Returns
-------
bool
"""
return not self.as_constant is None
def prompt(self):
"""Get default input prompt for the parameter declaration. The prompt
contains an indication of the data type, the parameter name and the
default value (if defined).
Returns
-------
string
"""
val = str(self.name)
# Add text that indicates the parameter type
if self.is_bool():
val += ' (bool)'
elif self.is_file():
val += ' (file)'
elif self.is_float():
val += ' (decimal)'
elif self.is_int():
val += ' (integer)'
elif self.is_string():
val += ' (string)'
if not self.default_value is None:
if self.is_bool() or self.is_float() or self.is_int():
val += ' [default ' + str(self.default_value) + ']'
else:
val += ' [default \'' + str(self.default_value) + '\']'
return val + ': '
def to_dict(self):
"""Get the dictionary serialization for the parameter declaration.
Returns
-------
dict
"""
return self.obj
|
PypiClean
|
/pyAya-2.0.0-py3-none-any.whl/geezlibs/types/messages_and_media/document.py
|
from datetime import datetime
from typing import List
import geezlibs
from geezlibs import raw, utils
from geezlibs import types
from geezlibs.file_id import FileId, FileType, FileUniqueId, FileUniqueType
from ..object import Object
class Document(Object):
"""A generic file (as opposed to photos, voice messages, audio files, ...).
Parameters:
file_id (``str``):
Identifier for this file, which can be used to download or reuse the file.
file_unique_id (``str``):
Unique identifier for this file, which is supposed to be the same over time and for different accounts.
Can't be used to download or reuse the file.
file_name (``str``, *optional*):
Original filename as defined by sender.
mime_type (``str``, *optional*):
MIME type of the file as defined by sender.
file_size (``int``, *optional*):
File size.
date (:py:obj:`~datetime.datetime`, *optional*):
Date the document was sent.
thumbs (List of :obj:`~geezlibs.types.Thumbnail`, *optional*):
Document thumbnails as defined by sender.
"""
def __init__(
self,
*,
client: "geezlibs.Client" = None,
file_id: str,
file_unique_id: str,
file_name: str = None,
mime_type: str = None,
file_size: int = None,
date: datetime = None,
thumbs: List["types.Thumbnail"] = None
):
super().__init__(client)
self.file_id = file_id
self.file_unique_id = file_unique_id
self.file_name = file_name
self.mime_type = mime_type
self.file_size = file_size
self.date = date
self.thumbs = thumbs
@staticmethod
def _parse(client, document: "raw.types.Document", file_name: str) -> "Document":
return Document(
file_id=FileId(
file_type=FileType.DOCUMENT,
dc_id=document.dc_id,
media_id=document.id,
access_hash=document.access_hash,
file_reference=document.file_reference
).encode(),
file_unique_id=FileUniqueId(
file_unique_type=FileUniqueType.DOCUMENT,
media_id=document.id
).encode(),
file_name=file_name,
mime_type=document.mime_type,
file_size=document.size,
date=utils.timestamp_to_datetime(document.date),
thumbs=types.Thumbnail._parse(client, document),
client=client
)
|
PypiClean
|
/secrets_env-0.28.3-py3-none-any.whl/secrets_env/subprocess.py
|
import enum
import logging
import queue
import subprocess
import threading
import time
from typing import IO, Iterator, List, Optional, Sequence, Tuple
from secrets_env.utils import strip_ansi
logger = logging.getLogger(__name__)
class Channel(enum.IntEnum):
prefix: str
def __new__(cls, value: int, prefix: str) -> "Channel":
obj = int.__new__(cls, value)
obj._value_ = value
obj.prefix = prefix
return obj
Stdout = 1, "<"
Stderr = 2, "<[stderr]"
class Run:
"""Yet another :py:class:`subprocess.Popen` wrapper. Runs subprocess and
yields both stdout and stderr in real time."""
def __init__(self, cmd: Sequence[str]) -> None:
"""Starts a run."""
self._queue = queue.Queue()
self._stdouts: List[str] = []
self._stderrs: List[str] = []
# start process
logger.debug("$ %s", " ".join(cmd))
self._proc = subprocess.Popen(
args=cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
bufsize=1,
encoding="utf-8",
)
# fire threads to read output
self._threads = (
threading.Thread(
target=polling_output,
args=(Channel.Stdout, self._proc.stdout, self._queue),
daemon=True,
),
threading.Thread(
target=polling_output,
args=(Channel.Stderr, self._proc.stderr, self._queue),
daemon=True,
),
)
for t in self._threads:
t.start()
def _iter_output(self) -> Iterator[Tuple[Channel, str]]:
POLL_INTERVAL = 0.05
def _flush_queue():
while not self._queue.empty():
ch, line = self._queue.get_nowait()
if ch == Channel.Stdout:
self._stdouts.append(line)
else:
self._stderrs.append(line)
yield ch, line
while self._proc.poll() is None:
yield from _flush_queue()
time.sleep(POLL_INTERVAL)
time.sleep(POLL_INTERVAL)
yield from _flush_queue()
def wait(self) -> int:
"""Wait until process terminated"""
for _ in self._iter_output():
...
return self._proc.wait()
def iter_any_output(self) -> Iterator[str]:
"""Reads any output in real time.
This method does not impacts :py:attr:`stdout` or :py:attr:`stderr`."""
for _, line in self._iter_output():
yield line
@property
def return_code(self) -> Optional[int]:
"""The child process return code"""
return self._proc.returncode
@property
def stdout(self) -> str:
"""Wait until process terminated and returns stdout outputs"""
self.wait()
return "".join(self._stdouts)
@property
def stderr(self) -> str:
"""Wait until process terminated and returns stderr outputs"""
self.wait()
return "".join(self._stderrs)
def polling_output(ch: Channel, source: IO[str], q: queue.Queue):
for line in source:
q.put((ch, line))
logger.debug("%s %s", ch.prefix, strip_ansi(line.rstrip()))
|
PypiClean
|
/kaze_python-0.8.9-py3-none-any.whl/kaze/SmartContract/ContractParameterContext.py
|
import json
import binascii
from logzero import logger
from kaze.Core.TX.Transaction import ContractTransaction
from kaze.SmartContract.Contract import Contract, ContractType
from kaze.SmartContract.ContractParameterType import ContractParameterType, ToName
from kaze.VM.ScriptBuilder import ScriptBuilder
from kaze.IO.MemoryStream import MemoryStream
from kazecore.IO.BinaryReader import BinaryReader
from kazecore.IO.BinaryWriter import BinaryWriter
from kaze.VM import OpCode
from kaze.Core.Witness import Witness
class ContractParamater:
Type = None
Value = None
def __init__(self, type):
if isinstance(type, ContractParameterType):
self.Type = type
elif isinstance(type, int):
self.Type = ContractParameterType(type)
else:
raise Exception("Invalid Contract Parameter Type %s. Must be ContractParameterType or int" % type)
def ToJson(self):
jsn = {}
jsn['type'] = self.Type.name
return jsn
class ContextItem:
Script = None
ContractParameters = None
Signatures = None
def __init__(self, contract):
self.Script = contract.Script
self.ContractParameters = []
for b in bytearray(contract.ParameterList):
p = ContractParamater(b)
self.ContractParameters.append(p)
def ToJson(self):
jsn = {}
if self.Script is not None:
if type(self.Script) is str:
jsn['script'] = self.Script
else:
try:
jsn['script'] = self.Script.decode()
except UnicodeDecodeError:
jsn['script'] = binascii.hexlify(self.Script).decode()
jsn['parameters'] = [p.ToJson() for p in self.ContractParameters]
if self.Signatures is not None:
jsn['signatures'] = {}
for key, value in self.Signatures.items():
if value is not None:
if type(value) is str:
jsn['signatures'][key] = value
else:
jsn['signatures'][key] = value.decode()
else:
logger.info("Seems like {} has empty signature".format(key))
return jsn
class ContractParametersContext:
Verifiable = None
ScriptHashes = None
ContextItems = None
IsMultiSig = None
def __init__(self, verifiable, isMultiSig=False):
self.Verifiable = verifiable
self.ScriptHashes = verifiable.GetScriptHashesForVerifying()
self.ContextItems = {}
self.IsMultiSig = isMultiSig
@property
def Completed(self):
if len(self.ContextItems) < len(self.ScriptHashes):
return False
for item in self.ContextItems.values():
if item is None:
return False
for p in item.ContractParameters:
# for multi signature contracts, we need to make sure
# that this check runs
if self.IsMultiSig:
if p is None or p.Value is None:
return False
if p.Type is not None:
if p.Value == 0:
return False
# for non-multisig contracts ( specifically, sending from contract
# addresses that have more than one param ) we need to allow an empty
# value, or, if it is empty, to fill it with 0
else:
if p is None or p.Value is None:
if p.Type is not None:
p.Value = 0
return True
def Add(self, contract, index, parameter):
item = self.CreateItem(contract)
if item:
item.ContractParameters[index].Value = parameter
return True
return False
def CreateItem(self, contract):
if contract.ScriptHash.ToBytes() in self.ContextItems.keys():
return self.ContextItems[contract.ScriptHash.ToBytes()]
if contract.ScriptHash not in self.ScriptHashes:
return None
item = ContextItem(contract)
self.ContextItems[contract.ScriptHash.ToBytes()] = item
return item
def AddSignature(self, contract, pubkey, signature):
if contract.Type == ContractType.MultiSigContract:
item = self.CreateItem(contract)
if item is None:
return False
for p in item.ContractParameters:
if p.Value is not None:
return False
if item.Signatures is None:
item.Signatures = {}
elif pubkey.encode_point(True) in item.Signatures:
return False
points = []
temp = binascii.unhexlify(contract.Script)
ms = MemoryStream(binascii.unhexlify(contract.Script))
reader = BinaryReader(ms)
numr = reader.ReadUInt8()
while reader.ReadUInt8() == 33:
points.append(binascii.hexlify(reader.ReadBytes(33)))
ms.close()
if pubkey.encode_point(True) not in points:
return False
item.Signatures[pubkey.encode_point(True).decode()] = binascii.hexlify(signature)
if len(item.Signatures) == len(contract.ParameterList):
i = 0
points.sort(reverse=True)
for k in points:
if k.decode() in item.Signatures:
if self.Add(contract, i, item.Signatures[k.decode()]) is None:
raise Exception("Invalid operation")
i += 1
item.Signatures = None
return True
else:
index = -1
if contract.ParameterList == '00':
contract.ParameterList = b'\x00'
length = len(contract.ParameterList)
for i in range(0, length):
if ContractParameterType(contract.ParameterList[i]) == ContractParameterType.Signature:
if index >= 0:
raise Exception("Signature must be first")
else:
index = i
return self.Add(contract, index, signature)
def GetIndex(self, script_hash):
for index, hash in enumerate(self.ScriptHashes):
if hash == script_hash:
return index
return -1
def GetParameters(self, script_hash):
if script_hash.ToBytes() in self.ContextItems.keys():
return self.ContextItems[script_hash.ToBytes()].Parameters
def GetParameter(self, scriptHash, index):
params = self.GetParameters(scriptHash)
if params:
return params[index]
return None
def GetScripts(self):
if not self.Completed:
raise Exception("Signature Context not complete")
scripts = []
for i in range(0, len(self.ScriptHashes)):
item = self.ContextItems[self.ScriptHashes[i].ToBytes()]
sb = ScriptBuilder()
plist = list(item.ContractParameters)
plist.reverse()
for p in plist:
if type(p.Value) is list:
pa = p.Value
pa.reverse()
listlength = len(pa)
for listitem in pa:
sb.push(listitem)
sb.push(listlength)
sb.Emit(OpCode.PACK)
else:
sb.push(p.Value)
vscript = bytearray(0)
if item.Script is not None:
if type(item.Script) is str:
item.Script = item.Script.encode('utf-8')
vscript = item.Script
# logger.info("SCRIPT IS %s " % item.Script)
witness = Witness(
invocation_script=sb.ToArray(),
verification_script=vscript
)
scripts.append(witness)
return scripts
def ToJson(self):
jsn = {}
jsn['type'] = 'kaze.Core.ContractTransaction' # Verifiable.GetType().FullName
ms = MemoryStream()
w = BinaryWriter(ms)
self.Verifiable.SerializeUnsigned(w)
ms.flush()
jsn['hex'] = ms.ToArray().decode()
jsn['items'] = {}
for key, value in self.ContextItems.items():
if type(key) == str:
shkey = "0x{}".format(key)
else:
shkey = "0x{}".format(key.decode())
jsn['items'][shkey] = value.ToJson()
return jsn
def FromJson(jsn, isMultiSig=True):
try:
parsed = json.loads(jsn)
if parsed['type'] == 'kaze.Core.ContractTransaction':
verifiable = ContractTransaction()
ms = MemoryStream(binascii.unhexlify(parsed['hex']))
r = BinaryReader(ms)
verifiable.DeserializeUnsigned(r)
context = ContractParametersContext(verifiable, isMultiSig=isMultiSig)
for key, value in parsed['items'].items():
if "0x" in key:
key = key[2:]
key = key.encode()
parameterbytes = []
for pt in value['parameters']:
if pt['type'] == 'Signature':
parameterbytes.append(0)
contract = Contract.Create(value['script'], parameterbytes, key)
context.ContextItems[key] = ContextItem(contract)
if 'signatures' in value:
context.ContextItems[key].Signatures = value['signatures']
return context
else:
raise ("Unsupported transaction type in JSON")
except Exception as e:
logger.error("Failed to import ContractParametersContext from JSON: {}".format(e))
|
PypiClean
|
/BIA_OBS-1.0.3.tar.gz/BIA_OBS-1.0.3/BIA/static/dist/node_modules/postcss-load-config/README.md
|
[![npm][npm]][npm-url]
[![node][node]][node-url]
[![deps][deps]][deps-url]
[![test][test]][test-url]
[![coverage][cover]][cover-url]
[![code style][style]][style-url]
[![chat][chat]][chat-url]
<div align="center">
<img width="100" height="100" title="Load Options" src="http://michael-ciniawsky.github.io/postcss-load-options/logo.svg">
<a href="https://github.com/postcss/postcss">
<img width="110" height="110" title="PostCSS" src="http://postcss.github.io/postcss/logo.svg" hspace="10">
</a>
<img width="100" height="100" title="Load Plugins" src="http://michael-ciniawsky.github.io/postcss-load-plugins/logo.svg">
<h1>Load Config</h1>
</div>
<h2 align="center">Install</h2>
```bash
npm i -D postcss-load-config
```
<h2 align="center">Usage</h2>
```bash
npm i -S|-D postcss-plugin
```
Install all required PostCSS plugins and save them to your **package.json** `dependencies`/`devDependencies`
Then create a PostCSS config file by choosing one of the following formats
### `package.json`
Create a **`postcss`** section in your project's **`package.json`**
```
Project (Root)
|– client
|– public
|
|- package.json
```
```json
{
"postcss": {
"parser": "sugarss",
"map": false,
"plugins": {
"postcss-plugin": {}
}
}
}
```
### `.postcssrc`
Create a **`.postcssrc`** file in JSON or YAML format
> ℹ️ It's recommended to use an extension (e.g **`.postcssrc.json`** or **`.postcssrc.yml`**) instead of `.postcssrc`
```
Project (Root)
|– client
|– public
|
|- (.postcssrc|.postcssrc.json|.postcssrc.yml)
|- package.json
```
**`.postcssrc.json`**
```json
{
"parser": "sugarss",
"map": false,
"plugins": {
"postcss-plugin": {}
}
}
```
**`.postcssrc.yml`**
```yaml
parser: sugarss
map: false
plugins:
postcss-plugin: {}
```
### `.postcssrc.js` or `postcss.config.js`
You may need some logic within your config. In this case create JS file named **`.postcssrc.js`** or **`postcss.config.js`**
```
Project (Root)
|– client
|– public
|
|- (.postcssrc.js|postcss.config.js)
|- package.json
```
You can export the config as an `{Object}`
**.postcssrc.js**
```js
module.exports = {
parser: 'sugarss',
map: false,
plugins: {
'postcss-plugin': {}
}
}
```
Or export a `{Function}` that returns the config (more about the `ctx` param below)
**.postcssrc.js**
```js
module.exports = (ctx) => ({
parser: ctx.parser ? 'sugarss' : false,
map: ctx.env === 'development' ? ctx.map : false,
plugins: {
'postcss-plugin': ctx.options.plugin
}
})
```
Plugins can be loaded either using an `{Object}` or an `{Array}`
#### `{Object}`
**.postcssrc.js**
```js
module.exports = ({ env }) => ({
...options,
plugins: {
'postcss-plugin': env === 'production' ? {} : false
}
})
```
> ℹ️ When using an `{Object}`, the key can be a Node.js module name, a path to a JavaScript file that is relative to the directory of the PostCSS config file, or an absolute path to a JavaScript file.
#### `{Array}`
**.postcssrc.js**
```js
module.exports = ({ env }) => ({
...options,
plugins: [
env === 'production' ? require('postcss-plugin')() : false
]
})
```
> :warning: When using an `{Array}`, make sure to `require()` each plugin
<h2 align="center">Options</h2>
|Name|Type|Default|Description|
|:--:|:--:|:-----:|:----------|
|[**`to`**](#to)|`{String}`|`undefined`|Destination File Path|
|[**`map`**](#map)|`{String\|Object}`|`false`|Enable/Disable Source Maps|
|[**`from`**](#from)|`{String}`|`undefined`|Source File Path|
|[**`parser`**](#parser)|`{String\|Function}`|`false`|Custom PostCSS Parser|
|[**`syntax`**](#syntax)|`{String\|Function}`|`false`|Custom PostCSS Syntax|
|[**`stringifier`**](#stringifier)|`{String\|Function}`|`false`|Custom PostCSS Stringifier|
### `parser`
**.postcssrc.js**
```js
module.exports = {
parser: 'sugarss'
}
```
### `syntax`
**.postcssrc.js**
```js
module.exports = {
syntax: 'postcss-scss'
}
```
### `stringifier`
**.postcssrc.js**
```js
module.exports = {
stringifier: 'midas'
}
```
### [**`map`**](https://github.com/postcss/postcss/blob/master/docs/source-maps.md)
**.postcssrc.js**
```js
module.exports = {
map: 'inline'
}
```
> :warning: In most cases `options.from` && `options.to` are set by the third-party which integrates this package (CLI, gulp, webpack). It's unlikely one needs to set/use `options.from` && `options.to` within a config file. Unless you're a third-party plugin author using this module and its Node API directly **dont't set `options.from` && `options.to` yourself**
### `to`
```js
module.exports = {
to: 'path/to/dest.css'
}
```
### `from`
```js
module.exports = {
from: 'path/to/src.css'
}
```
<h2 align="center">Plugins</h2>
### `{} || null`
The plugin will be loaded with defaults
```js
'postcss-plugin': {} || null
```
**.postcssrc.js**
```js
module.exports = {
plugins: {
'postcss-plugin': {} || null
}
}
```
> :warning: `{}` must be an **empty** `{Object}` literal
### `{Object}`
The plugin will be loaded with given options
```js
'postcss-plugin': { option: '', option: '' }
```
**.postcssrc.js**
```js
module.exports = {
plugins: {
'postcss-plugin': { option: '', option: '' }
}
}
```
### `false`
The plugin will not be loaded
```js
'postcss-plugin': false
```
**.postcssrc.js**
```js
module.exports = {
plugins: {
'postcss-plugin': false
}
}
```
### `Ordering`
Plugin **execution order** is determined by declaration in the plugins section (**top-down**)
```js
{
plugins: {
'postcss-plugin': {}, // [0]
'postcss-plugin': {}, // [1]
'postcss-plugin': {} // [2]
}
}
```
<h2 align="center">Context</h2>
When using a `{Function}` (`postcss.config.js` or `.postcssrc.js`), it's possible to pass context to `postcss-load-config`, which will be evaluated while loading your config. By default `ctx.env (process.env.NODE_ENV)` and `ctx.cwd (process.cwd())` are available on the `ctx` `{Object}`
> ℹ️ Most third-party integrations add additional properties to the `ctx` (e.g `postcss-loader`). Check the specific module's README for more information about what is available on the respective `ctx`
<h2 align="center">Examples</h2>
**postcss.config.js**
```js
module.exports = (ctx) => ({
parser: ctx.parser ? 'sugarss' : false,
map: ctx.env === 'development' ? ctx.map : false,
plugins: {
'postcss-import': {},
'postcss-nested': {},
cssnano: ctx.env === 'production' ? {} : false
}
})
```
<div align="center">
<img width="80" height="80" src="https://worldvectorlogo.com/logos/nodejs-icon.svg">
</div>
```json
"scripts": {
"build": "NODE_ENV=production node postcss",
"start": "NODE_ENV=development node postcss"
}
```
### `Async`
```js
const { readFileSync } = require('fs')
const postcss = require('postcss')
const postcssrc = require('postcss-load-config')
const css = readFileSync('index.sss', 'utf8')
const ctx = { parser: true, map: 'inline' }
postcssrc(ctx).then(({ plugins, options }) => {
postcss(plugins)
.process(css, options)
.then((result) => console.log(result.css))
})
```
### `Sync`
```js
const { readFileSync } = require('fs')
const postcss = require('postcss')
const postcssrc = require('postcss-load-config')
const css = readFileSync('index.sss', 'utf8')
const ctx = { parser: true, map: 'inline' }
const { plugins, options } = postcssrc.sync(ctx)
```
<div align="center">
<img width="80" height="80" halign="10" src="https://worldvectorlogo.com/logos/gulp.svg">
</div>
```json
"scripts": {
"build": "NODE_ENV=production gulp",
"start": "NODE_ENV=development gulp"
}
```
```js
const { task, src, dest, series, watch } = require('gulp')
const postcss = require('gulp-postcssrc')
const css = () => {
src('src/*.css')
.pipe(postcss())
.pipe(dest('dest'))
})
task('watch', () => {
watch(['src/*.css', 'postcss.config.js'], css)
})
task('default', series(css, 'watch'))
```
<div align="center">
<img width="80" height="80" src="https://cdn.rawgit.com/webpack/media/e7485eb2/logo/icon.svg">
</div>
```json
"scripts": {
"build": "NODE_ENV=production webpack",
"start": "NODE_ENV=development webpack-dev-server"
}
```
**webpack.config.js**
```js
module.exports = (env) => ({
module: {
rules: [
{
test: /\.css$/,
use: [
'style-loader',
'css-loader',
'postcss-loader'
]
}
]
}
})
```
<h2 align="center">Maintainers</h2>
<table>
<tbody>
<tr>
<td align="center">
<img width="150" height="150"
src="https://github.com/michael-ciniawsky.png?v=3&s=150">
<br />
<a href="https://github.com/michael-ciniawsky">Michael Ciniawsky</a>
</td>
<td align="center">
<img width="150" height="150"
src="https://github.com/ertrzyiks.png?v=3&s=150">
<br />
<a href="https://github.com/ertrzyiks">Mateusz Derks</a>
</td>
</tr>
<tbody>
</table>
<h2 align="center">Contributors</h2>
<table>
<tbody>
<tr>
<td align="center">
<img width="150" height="150"
src="https://github.com/sparty02.png?v=3&s=150">
<br />
<a href="https://github.com/sparty02">Ryan Dunckel</a>
</td>
<td align="center">
<img width="150" height="150"
src="https://github.com/pcgilday.png?v=3&s=150">
<br />
<a href="https://github.com/pcgilday">Patrick Gilday</a>
</td>
<td align="center">
<img width="150" height="150"
src="https://github.com/daltones.png?v=3&s=150">
<br />
<a href="https://github.com/daltones">Dalton Santos</a>
</td>
<td align="center">
<img width="150" height="150"
src="https://github.com/fwouts.png?v=3&s=150">
<br />
<a href="https://github.com/fwouts">François Wouts</a>
</td>
</tr>
<tbody>
</table>
[npm]: https://img.shields.io/npm/v/postcss-load-config.svg
[npm-url]: https://npmjs.com/package/postcss-load-config
[node]: https://img.shields.io/node/v/postcss-load-plugins.svg
[node-url]: https://nodejs.org/
[deps]: https://david-dm.org/michael-ciniawsky/postcss-load-config.svg
[deps-url]: https://david-dm.org/michael-ciniawsky/postcss-load-config
[test]: http://img.shields.io/travis/michael-ciniawsky/postcss-load-config.svg
[test-url]: https://travis-ci.org/michael-ciniawsky/postcss-load-config
[cover]: https://coveralls.io/repos/github/michael-ciniawsky/postcss-load-config/badge.svg
[cover-url]: https://coveralls.io/github/michael-ciniawsky/postcss-load-config
[style]: https://img.shields.io/badge/code%20style-standard-yellow.svg
[style-url]: http://standardjs.com/
[chat]: https://img.shields.io/gitter/room/postcss/postcss.svg
[chat-url]: https://gitter.im/postcss/postcss
## Security Contact
To report a security vulnerability, please use the [Tidelift security contact].
Tidelift will coordinate the fix and disclosure.
[Tidelift security contact]: https://tidelift.com/security
|
PypiClean
|
/django_start_tool-1.0.1-py3-none-any.whl/django_start_tool/handler.py
|
from pathlib import Path
import shutil
from jinja2 import Template
from django_start_tool.utils import download
from django_start_tool.utils import entity_matches_one_of_patterns
from django_start_tool.utils import generate_secret_key
from django_start_tool.utils import is_url
class TemplateHandler:
def __init__(
self,
name: str,
to_render: list[str],
to_exclude: list[str],
) -> None:
self.name = name
self.to_render = to_render
self.to_exclude = to_exclude
self.template_context = {
"project_name": name,
"secret_key": generate_secret_key()
}
def run(self, source: str, destination: str) -> None:
if source_is_url := is_url(source):
source = download(source)
source = Path(source)
destination = Path(destination)
self.handle_tree(source, destination)
if source_is_url:
shutil.rmtree(source)
def handle_tree(self, source: Path, destination: Path) -> None:
for entity in source.iterdir():
if entity.is_dir():
self.handle_directory(entity, destination / entity.name)
elif entity.is_file():
self.handle_file(entity, destination / entity.name)
def handle_directory(self, source: Path, destination: Path) -> None:
if entity_matches_one_of_patterns(source, self.to_exclude):
return None
destination = destination.with_name(
source.name.replace("project_name", self.name)
)
self.handle_tree(source, destination)
def handle_file(self, source: Path, destination: Path) -> None:
if entity_matches_one_of_patterns(source, self.to_render):
file_content = self.render_file(source)
else:
file_content = source.read_text()
destination = destination.with_name(
source.name.replace("-tpl", "")
)
destination.parent.mkdir(parents=True, exist_ok=True)
destination.touch(exist_ok=True)
destination.write_text(file_content)
def render_file(self, file: Path) -> str:
template = Template(file.read_text())
size = file.stat().st_size
eof = ['', '\n'][size > 0]
return template.render(**self.template_context) + eof
|
PypiClean
|
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/request/AlipayUserAccountOrderConsultRequest.py
|
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayUserAccountOrderConsultModel import AlipayUserAccountOrderConsultModel
class AlipayUserAccountOrderConsultRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayUserAccountOrderConsultModel):
self._biz_content = value
else:
self._biz_content = AlipayUserAccountOrderConsultModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.user.account.order.consult'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
|
PypiClean
|
/MarkDo-0.3.0.tar.gz/MarkDo-0.3.0/markdo/static/bower/codemirror/mode/apl/apl.js
|
CodeMirror.defineMode("apl", function() {
var builtInOps = {
".": "innerProduct",
"\\": "scan",
"/": "reduce",
"⌿": "reduce1Axis",
"⍀": "scan1Axis",
"¨": "each",
"⍣": "power"
};
var builtInFuncs = {
"+": ["conjugate", "add"],
"−": ["negate", "subtract"],
"×": ["signOf", "multiply"],
"÷": ["reciprocal", "divide"],
"⌈": ["ceiling", "greaterOf"],
"⌊": ["floor", "lesserOf"],
"∣": ["absolute", "residue"],
"⍳": ["indexGenerate", "indexOf"],
"?": ["roll", "deal"],
"⋆": ["exponentiate", "toThePowerOf"],
"⍟": ["naturalLog", "logToTheBase"],
"○": ["piTimes", "circularFuncs"],
"!": ["factorial", "binomial"],
"⌹": ["matrixInverse", "matrixDivide"],
"<": [null, "lessThan"],
"≤": [null, "lessThanOrEqual"],
"=": [null, "equals"],
">": [null, "greaterThan"],
"≥": [null, "greaterThanOrEqual"],
"≠": [null, "notEqual"],
"≡": ["depth", "match"],
"≢": [null, "notMatch"],
"∈": ["enlist", "membership"],
"⍷": [null, "find"],
"∪": ["unique", "union"],
"∩": [null, "intersection"],
"∼": ["not", "without"],
"∨": [null, "or"],
"∧": [null, "and"],
"⍱": [null, "nor"],
"⍲": [null, "nand"],
"⍴": ["shapeOf", "reshape"],
",": ["ravel", "catenate"],
"⍪": [null, "firstAxisCatenate"],
"⌽": ["reverse", "rotate"],
"⊖": ["axis1Reverse", "axis1Rotate"],
"⍉": ["transpose", null],
"↑": ["first", "take"],
"↓": [null, "drop"],
"⊂": ["enclose", "partitionWithAxis"],
"⊃": ["diclose", "pick"],
"⌷": [null, "index"],
"⍋": ["gradeUp", null],
"⍒": ["gradeDown", null],
"⊤": ["encode", null],
"⊥": ["decode", null],
"⍕": ["format", "formatByExample"],
"⍎": ["execute", null],
"⊣": ["stop", "left"],
"⊢": ["pass", "right"]
};
var isOperator = /[\.\/⌿⍀¨⍣]/;
var isNiladic = /⍬/;
var isFunction = /[\+−×÷⌈⌊∣⍳\?⋆⍟○!⌹<≤=>≥≠≡≢∈⍷∪∩∼∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢]/;
var isArrow = /←/;
var isComment = /[⍝#].*$/;
var stringEater = function(type) {
var prev;
prev = false;
return function(c) {
prev = c;
if (c === type) {
return prev === "\\";
}
return true;
};
};
return {
startState: function() {
return {
prev: false,
func: false,
op: false,
string: false,
escape: false
};
},
token: function(stream, state) {
var ch, funcName, word;
if (stream.eatSpace()) {
return null;
}
ch = stream.next();
if (ch === '"' || ch === "'") {
stream.eatWhile(stringEater(ch));
stream.next();
state.prev = true;
return "string";
}
if (/[\[{\(]/.test(ch)) {
state.prev = false;
return null;
}
if (/[\]}\)]/.test(ch)) {
state.prev = true;
return null;
}
if (isNiladic.test(ch)) {
state.prev = false;
return "niladic";
}
if (/[¯\d]/.test(ch)) {
if (state.func) {
state.func = false;
state.prev = false;
} else {
state.prev = true;
}
stream.eatWhile(/[\w\.]/);
return "number";
}
if (isOperator.test(ch)) {
return "operator apl-" + builtInOps[ch];
}
if (isArrow.test(ch)) {
return "apl-arrow";
}
if (isFunction.test(ch)) {
funcName = "apl-";
if (builtInFuncs[ch] != null) {
if (state.prev) {
funcName += builtInFuncs[ch][1];
} else {
funcName += builtInFuncs[ch][0];
}
}
state.func = true;
state.prev = false;
return "function " + funcName;
}
if (isComment.test(ch)) {
stream.skipToEnd();
return "comment";
}
if (ch === "∘" && stream.peek() === ".") {
stream.next();
return "function jot-dot";
}
stream.eatWhile(/[\w\$_]/);
word = stream.current();
state.prev = true;
return "keyword";
}
};
});
CodeMirror.defineMIME("text/apl", "apl");
|
PypiClean
|
/x-to-nwb-0.2.2.tar.gz/x-to-nwb-0.2.2/src/x_to_nwb/ABF1Converter.py
|
import pyabf
import numpy as np
import os
import glob
import json
from datetime import datetime
from dateutil.tz import tzlocal
from pynwb import NWBHDF5IO, NWBFile
from .conversion_utils import convertDataset, V_CLAMP_MODE, I_CLAMP_MODE, getStimulusSeriesClass, getAcquiredSeriesClass
class ABF1Converter:
"""
Converts Neuron2BrainLab's ABF1 files from a single cell (collected without amplifier settings from the
multi-clamp commander) to a collective NeurodataWithoutBorders v2 file.
Modeled after ABFConverter created by the Allen Institute.
Parameters
----------
inputPath: path to ABF file or a folder of ABF files to be converted
outputFilePath: path to the output NWB file
acquisitionChannelName: Allows to output only a specific acquisition channel, defaults to all
stimulusChannelName: Allows to output only a specific stimulus channel,
defaults to all. The name can also be an AD channel name for cases where
the stimulus is recorded as well.
responseGain: user-input float indicating scalar gain for response channel
stimulusGain: user-input float indicating scalar gain for stimulus channel
compression: Toggle compression for HDF5 datasets
clampMode: 0 or 1 integer indicating clamp mode (0 is VC, 1 is CC). If not None, overwrites clamp mode provided in ABF file
"""
def __init__(
self,
inputPath,
outputFilePath,
acquisitionChannelName=None,
stimulusChannelName=None,
responseGain=1,
stimulusGain=1,
responseOffset=0,
clampMode=None,
compression=True,
):
self.inputPath = inputPath
self.debug = False
if os.path.isfile(self.inputPath):
print(inputPath)
abf = pyabf.ABF(self.inputPath)
if abf.abfVersion["major"] != 1:
raise ValueError(f"The ABF version for the file {abf} is not supported.")
self.fileNames = [os.path.basename(self.inputPath)]
self.abfFiles = [abf]
elif os.path.isdir(self.inputPath):
abfFiles = []
for dirpath, dirnames, filenames in os.walk(self.inputPath):
# Find all .abf files in the directory
if len(dirnames) == 0 and len(glob.glob(dirpath + "/*.abf")) != 0:
abfFiles += glob.glob(dirpath + "/*.abf")
if len(abfFiles) == 0:
raise ValueError(f"{inputPath} contains no ABF Files.")
# Arrange the ABF files in ascending order
abfFiles.sort(key=lambda x: os.path.basename(x))
# Collect file names for description
self.fileNames = []
for file in abfFiles:
self.fileNames += [os.path.basename(file)]
self.abfFiles = []
for abfFile in abfFiles:
# Load each ABF file using pyabf
abf = pyabf.ABF(abfFile)
# Check for ABF version
if abf.abfVersion["major"] != 1:
raise ValueError(f"The ABF version for the file {abf} is not supported.")
self.abfFiles += [abf]
if clampMode:
self.clampMode = clampMode # sometimes the abf-based clamp mode is wrong
else:
self.clampMode = self.abfFiles[0]._headerV1.nExperimentType
self.compression = compression
self.outputPath = outputFilePath
# Take metadata input, and return hard coded values for None
self.responseGain = responseGain
self.stimulusGain = stimulusGain
self.responseOffset = responseOffset
self.acquisitionChannelName = acquisitionChannelName
self.stimulusChannelName = stimulusChannelName
@staticmethod
def outputMetadata(inFile):
if not os.path.isfile(inFile):
raise ValueError(f"The file {inFile} does not exist.")
root, ext = os.path.splitext(inFile)
abf = pyabf.ABF(inFile)
pyabf.abfHeaderDisplay.abfInfoPage(abf).generateHTML(saveAs=root + ".html")
def _getComments(self, abf):
"""
Accesses the tag comments created in Clampfit
"""
return abf.tagComments
def _createNWBFile(self):
"""
Creates the NWB file for the cell, as defined by PyNWB
"""
self.start_time = datetime.combine(
self.abfFiles[0].abfDateTime.date(), self.abfFiles[0].abfDateTime.time(), tzinfo=tzlocal()
)
self.inputCellName = os.path.basename(self.inputPath)
creatorInfo = self.abfFiles[0]._headerV1.sCreatorInfo
creatorVersion = self.abfFiles[0]._headerV1.creatorVersionString
experiment_description = f"{creatorInfo} v{creatorVersion}"
self.NWBFile = NWBFile(
session_description="",
session_start_time=self.start_time,
experiment_description=experiment_description,
identifier=self.inputCellName,
experimenter=None,
notes="",
)
return self.NWBFile
def _createDevice(self):
creatorInfo = self.abfFiles[0]._headerV1.sCreatorInfo
creatorVersion = self.abfFiles[0]._headerV1.creatorVersionString
self.device = self.NWBFile.create_device(name=f"{creatorInfo} {creatorVersion}")
def _createElectrode(self):
self.electrode = self.NWBFile.create_icephys_electrode(
name="elec0", device=self.device, description="PLACEHOLDER"
)
def _unitConversion(self, unit):
# Returns a 2-list of base unit and conversion factor
if unit == "V":
return 1.0, "V"
elif unit == "mV":
return 1e-3, "V"
elif unit == "A":
return 1.0, "A"
elif unit == "pA":
return 1e-12, "A"
elif unit == "nA":
return 1e-9, "A"
else:
# raise ValueError(f"{unit} is not a valid unit.")
return 1.0, "V" # hard coded for units stored as '?'
def _addStimulus(self):
"""
Adds a stimulus class as defined by PyNWB to the NWB File.
Written for experiments conducted from a single channel.
For multiple channels, refer to https://github.com/AllenInstitute/ipfx/blob/master/ipfx/x_to_nwb/ABFConverter.py
"""
for idx, abfFile in enumerate(self.abfFiles):
isStimulus = True
if self.stimulusChannelName is None:
channelList = abfFile.adcNames
channelIndices = range(len(channelList))
else:
if self.stimulusChannelName in abfFile.dacNames:
channelList = abfFile.dacNames
channelIndices = [channelList.index(self.stimulusChannelName)]
elif self.stimulusChannelName in abfFile.adcNames:
isStimulus = False
channelList = abfFile.adcNames
channelIndices = [channelList.index(self.stimulusChannelName)]
else:
raise ValueError(f"Channel {self.stimulusChannelName} could not be found.")
for i in range(abfFile.sweepCount):
for channelIndex in channelIndices:
if self.debug:
print(
f"stimulus: abfFile={abfFile.abfFilePath}, sweep={i}, channelIndex={channelIndex}, channelName={channelList[channelIndex]}"
)
# Collect data from pyABF
abfFile.setSweep(i, channel=channelIndex)
seriesName = f"Index_{idx}_{i}_{channelIndex}"
if isStimulus:
data = abfFile.sweepC
scaledUnit = abfFile.sweepUnitsC
else:
data = abfFile.sweepY
scaledUnit = abfFile.sweepUnitsY
stimulusGain = float(self.stimulusGain)
data = data * stimulusGain
conversion, _ = self._unitConversion(scaledUnit)
electrode = self.electrode
resolution = np.nan
starting_time = 0.0
rate = float(abfFile.dataRate)
# Create a JSON file for the description field
description = json.dumps(
{
"file_name": os.path.basename(self.fileNames[idx]),
"file_version": abfFile.abfVersionString,
"sweep_number": i,
"protocol": abfFile.protocol,
"protocol_path": abfFile.protocolPath,
"comments": self._getComments(abfFile),
},
sort_keys=True,
indent=4,
)
stimulusClass = getStimulusSeriesClass(self.clampMode)
data = convertDataset(data, self.compression)
# Create a stimulus class
stimulus = stimulusClass(
name=seriesName,
data=data,
sweep_number=np.uint32(i),
electrode=electrode,
gain=stimulusGain,
resolution=resolution,
conversion=conversion,
starting_time=starting_time,
rate=rate,
description=description,
)
self.NWBFile.add_stimulus(stimulus)
def _addAcquisition(self):
"""
Adds an acquisition class as defined by PyNWB to the NWB File.
Written for experiments conducted from a single channel.
For multiple channels, refer to https://github.com/AllenInstitute/ipfx/blob/master/ipfx/x_to_nwb/ABFConverter.py
"""
for idx, abfFile in enumerate(self.abfFiles):
if self.acquisitionChannelName is None:
channelList = abfFile.adcNames
channelIndices = range(len(channelList))
else:
if self.acquisitionChannelName in abfFile.adcNames:
channelList = abfFile.adcNames
channelIndices = [channelList.index(self.acquisitionChannelName)]
else:
raise ValueError(f"Channel {self.acquisitionChannelName} could not be found.")
for i in range(abfFile.sweepCount):
for channelIndex in channelIndices:
if self.debug:
print(
f"acquisition: abfFile={abfFile.abfFilePath}, sweep={i}, channelIndex={channelIndex}, channelName={channelList[channelIndex]}"
)
# Collect data from pyABF
abfFile.setSweep(i, channel=channelIndex)
seriesName = f"Index_{idx}_{i}_{channelIndex}"
responseGain = float(self.responseGain)
responseOffset = self.responseOffset
data = abfFile.sweepY * responseGain + responseOffset
conversion, _ = self._unitConversion(abfFile.sweepUnitsY)
electrode = self.electrode
resolution = np.nan
starting_time = 0.0
rate = float(abfFile.dataRate)
# Create a JSON file for the description field
description = json.dumps(
{
"file_name": os.path.basename(self.fileNames[idx]),
"file_version": abfFile.abfVersionString,
"sweep_number": i,
"protocol": abfFile.protocol,
"protocol_path": abfFile.protocolPath,
"comments": self._getComments(abfFile),
},
sort_keys=True,
indent=4,
)
# Create an acquisition class
# Note: voltage input produces current output; current input produces voltage output
data = convertDataset(data, self.compression)
series = getAcquiredSeriesClass(self.clampMode)
if self.clampMode == I_CLAMP_MODE:
acquisition = series(
name=seriesName,
data=data,
sweep_number=np.uint32(i),
electrode=electrode,
gain=responseGain,
resolution=resolution,
conversion=conversion,
starting_time=starting_time,
rate=rate,
description=description,
bias_current=np.nan,
bridge_balance=np.nan,
capacitance_compensation=np.nan,
)
elif self.clampMode == V_CLAMP_MODE:
acquisition = series(
name=seriesName,
data=data,
sweep_number=np.uint32(i),
electrode=electrode,
gain=responseGain,
resolution=resolution,
conversion=conversion,
starting_time=starting_time,
rate=rate,
description=description,
capacitance_fast=np.nan,
capacitance_slow=np.nan,
resistance_comp_bandwidth=np.nan,
resistance_comp_correction=np.nan,
resistance_comp_prediction=np.nan,
whole_cell_capacitance_comp=np.nan,
whole_cell_series_resistance_comp=np.nan,
)
else:
raise ValueError(f"Unsupported clamp mode {self.clampMode}")
self.NWBFile.add_acquisition(acquisition)
def convert(self):
"""
Iterates through the functions in the specified order.
:return: True (for success)
"""
self._createNWBFile()
self._createDevice()
self._createElectrode()
self._addStimulus()
self._addAcquisition()
with NWBHDF5IO(self.outputPath, "w") as io:
io.write(self.NWBFile, cache_spec=True)
print(f"Successfully converted to {self.outputPath}.")
|
PypiClean
|
/robots_controller-1.0.12.tar.gz/robots_controller-1.0.12/docs/source/rst_guide.rst
|
RST Quick guide
===============
Online reStructuredText editor - http://rst.ninjs.org/
Main heading
============
Secondary heading
-----------------
Typography
----------
**Bold**
`Italic`
``Accent``
Blocks
------
Double colon to consider the following paragraphs preformatted::
This text is preformated. Can be used for code samples.
.. code-block:: python
# code-block accepts language name to highlight code
# E.g.: python, html
import this
.. note::
This text will be rendered as a note block (usually green).
.. warning::
This text will be rendered as a warning block (usually red).
Lists
-----
1. Ordered item 1.
Indent paragraph to make in belong to the above list item.
2. Ordered item 2.
+ Unordered item 1.
+ Unordered item .
Links
-----
:ref:`Documentation inner link label <some-marker>`
.. _some-marker:
`Outer link label <http://github.com/idlesign/makeapp/>`_
Inline URLs are converted to links automatically: http://github.com/idlesign/makeapp/
|
PypiClean
|
/pywapor_test-3.1.7-py3-none-any.whl/pywapor/general/lazifier.py
|
import types
import numpy as np
from pywapor.general.logger import log
def decorate_mod(module, decorator):
"""Apply a decorator to all the functions inside a module.
Parameters
----------
module : module
Module to decorate.
decorator : function
Function to decorate with.
"""
for name in dir(module):
if name not in [
"ra_soil",
"initial_friction_velocity_inst",
"monin_obukhov_length",
"stability_parameter",
"stability_factor",
"friction_velocity",
"ra_canopy",
"stability_parameter_obs",
"stability_correction_heat_obs"]:
obj = getattr(module, name)
if isinstance(obj, types.FunctionType) and not hasattr(obj, 'decorated'):
setattr(module, name, decorator(obj))
def decorate_submods(module, decorator):
"""Apply a decorator to all the functions inside all the submodules of a
module.
Parameters
----------
module : module
Module of which the functions inside its submodules to decorate.
decorator : function
Function to decorate with.
"""
for submod in dir(module):
submod = getattr(module, submod)
if isinstance(submod, types.ModuleType):
decorate_mod(submod, decorator)
def etlook_decorator(func):
"""Checks if the DataArrays contain data or are None.
Parameters
----------
func : function
Function for which to check the inputs.
"""
group = str(func.__module__)
def wrapper_func(*args, **kwargs):
check1 = np.all([arg.dtype != object for arg in args])
check2 = np.all([arg.dtype != object for _, arg in kwargs.items()])
if check1 and check2:
log.info(f"--> Calculating `{func.__name__}`.")
x = func(*args, **kwargs)
x.attrs["calculated_with"] = [arg.name for arg in args]
x.attrs["et_look_module"] = group
return x
else:
log.warning(f"--> Insufficient data found for `{func.__name__}`.")
wrapper_func.__module__ = func.__module__
wrapper_func.__name__ = func.__name__
setattr(wrapper_func, "decorated", True)
return wrapper_func
|
PypiClean
|
/rayures-0.4a23.tar.gz/rayures-0.4a23/versioneer.py
|
from __future__ import print_function
try:
import configparser
except ImportError:
import ConfigParser as configparser
import errno
import json
import os
import re
import subprocess
import sys
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_root():
"""Get the project root directory.
We require that all commands are run from the project root, i.e. the
directory that contains setup.py, setup.cfg, and versioneer.py .
"""
root = os.path.realpath(os.path.abspath(os.getcwd()))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
# allow 'python path/to/setup.py COMMAND'
root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
err = ("Versioneer was unable to run the project root directory. "
"Versioneer requires setup.py to be executed from "
"its immediate directory (like 'python setup.py COMMAND'), "
"or in a way that lets it use sys.argv[0] to find the root "
"(like 'python path/to/setup.py COMMAND').")
raise VersioneerBadRootError(err)
try:
# Certain runtime workflows (setup.py install/develop in a setuptools
# tree) execute all dependencies in a single python process, so
# "versioneer" may be imported multiple times, and python's shared
# module-import table will cache the first one. So we can't use
# os.path.dirname(__file__), as that will find whichever
# versioneer.py was first imported, even in later projects.
me = os.path.realpath(os.path.abspath(__file__))
me_dir = os.path.normcase(os.path.splitext(me)[0])
vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
if me_dir != vsr_dir:
print("Warning: build in %s is using versioneer.py from %s"
% (os.path.dirname(me), versioneer_py))
except NameError:
pass
return root
def get_config_from_root(root):
"""Read the project setup.cfg file to determine Versioneer config."""
# This might raise EnvironmentError (if setup.cfg is missing), or
# configparser.NoSectionError (if it lacks a [versioneer] section), or
# configparser.NoOptionError (if it lacks "VCS="). See the docstring at
# the top of versioneer.py for instructions on writing your setup.cfg .
setup_cfg = os.path.join(root, "setup.cfg")
parser = configparser.SafeConfigParser()
with open(setup_cfg, "r") as f:
parser.readfp(f)
VCS = parser.get("versioneer", "VCS") # mandatory
def get(parser, name):
if parser.has_option("versioneer", name):
return parser.get("versioneer", name)
return None
cfg = VersioneerConfig()
cfg.VCS = VCS
cfg.style = get(parser, "style") or ""
cfg.versionfile_source = get(parser, "versionfile_source")
cfg.versionfile_build = get(parser, "versionfile_build")
cfg.tag_prefix = get(parser, "tag_prefix")
if cfg.tag_prefix in ("''", '""'):
cfg.tag_prefix = ""
cfg.parentdir_prefix = get(parser, "parentdir_prefix")
cfg.verbose = get(parser, "verbose")
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
# these dictionaries contain VCS-specific tools
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
LONG_VERSION_PY['git'] = '''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "%(STYLE)s"
cfg.tag_prefix = "%(TAG_PREFIX)s"
cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %%s" %% dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% dispcmd)
print("stdout was %%s" %% stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %%s but none started with prefix %%s" %%
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%%s', no digits" %% ",".join(refs - tags))
if verbose:
print("likely tags: %%s" %% ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %%s" %% r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %%s not under git control" %% root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%%s*" %% tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%%s'"
%% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%%s' doesn't start with prefix '%%s'"
print(fmt %% (full_tag, tag_prefix))
pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
%% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%%d" %% pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%%d" %% pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%%s" %% pieces["short"]
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%%s" %% pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%%s'" %% style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
'''
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def do_vcs_install(manifest_in, versionfile_source, ipy):
"""Git-specific installation logic for Versioneer.
For Git, this means creating/changing .gitattributes to mark _version.py
for export-subst keyword substitution.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
files = [manifest_in, versionfile_source]
if ipy:
files.append(ipy)
try:
me = __file__
if me.endswith(".pyc") or me.endswith(".pyo"):
me = os.path.splitext(me)[0] + ".py"
versioneer_file = os.path.relpath(me)
except NameError:
versioneer_file = "versioneer.py"
files.append(versioneer_file)
present = False
try:
f = open(".gitattributes", "r")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
pass
if not present:
f = open(".gitattributes", "a+")
f.write("%s export-subst\n" % versionfile_source)
f.close()
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
SHORT_VERSION_PY = """
# This file was generated by 'versioneer.py' (0.18) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
import json
version_json = '''
%s
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
"""
def versions_from_file(filename):
"""Try to determine the version from _version.py if present."""
try:
with open(filename) as f:
contents = f.read()
except EnvironmentError:
raise NotThisMethod("unable to read _version.py")
mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
if not mo:
mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
if not mo:
raise NotThisMethod("no version_json in _version.py")
return json.loads(mo.group(1))
def write_to_version_file(filename, versions):
"""Write the given version number to the given _version.py file."""
os.unlink(filename)
contents = json.dumps(versions, sort_keys=True,
indent=1, separators=(",", ": "))
with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % contents)
print("set %s to '%s'" % (filename, versions["version"]))
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
class VersioneerBadRootError(Exception):
"""The project root directory is unknown or missing key files."""
def get_versions(verbose=False):
"""Get the project version from whatever source is available.
Returns dict with two keys: 'version' and 'full'.
"""
if "versioneer" in sys.modules:
# see the discussion in cmdclass.py:get_cmdclass()
del sys.modules["versioneer"]
root = get_root()
cfg = get_config_from_root(root)
assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
handlers = HANDLERS.get(cfg.VCS)
assert handlers, "unrecognized VCS '%s'" % cfg.VCS
verbose = verbose or cfg.verbose
assert cfg.versionfile_source is not None, \
"please set versioneer.versionfile_source"
assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
versionfile_abs = os.path.join(root, cfg.versionfile_source)
# extract version from first of: _version.py, VCS command (e.g. 'git
# describe'), parentdir. This is meant to work for developers using a
# source checkout, for users of a tarball created by 'setup.py sdist',
# and for users of a tarball/zipball created by 'git archive' or github's
# download-from-tag feature or the equivalent in other VCSes.
get_keywords_f = handlers.get("get_keywords")
from_keywords_f = handlers.get("keywords")
if get_keywords_f and from_keywords_f:
try:
keywords = get_keywords_f(versionfile_abs)
ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
if verbose:
print("got version from expanded keyword %s" % ver)
return ver
except NotThisMethod:
pass
try:
ver = versions_from_file(versionfile_abs)
if verbose:
print("got version from file %s %s" % (versionfile_abs, ver))
return ver
except NotThisMethod:
pass
from_vcs_f = handlers.get("pieces_from_vcs")
if from_vcs_f:
try:
pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
ver = render(pieces, cfg.style)
if verbose:
print("got version from VCS %s" % ver)
return ver
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
if verbose:
print("got version from parentdir %s" % ver)
return ver
except NotThisMethod:
pass
if verbose:
print("unable to compute version")
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None, "error": "unable to compute version",
"date": None}
def get_version():
"""Get the short version string for this project."""
return get_versions()["version"]
def get_cmdclass():
"""Get the custom setuptools/distutils subclasses used by Versioneer."""
if "versioneer" in sys.modules:
del sys.modules["versioneer"]
# this fixes the "python setup.py develop" case (also 'install' and
# 'easy_install .'), in which subdependencies of the main project are
# built (using setup.py bdist_egg) in the same python process. Assume
# a main project A and a dependency B, which use different versions
# of Versioneer. A's setup.py imports A's Versioneer, leaving it in
# sys.modules by the time B's setup.py is executed, causing B to run
# with the wrong versioneer. Setuptools wraps the sub-dep builds in a
# sandbox that restores sys.modules to it's pre-build state, so the
# parent is protected against the child's "import versioneer". By
# removing ourselves from sys.modules here, before the child build
# happens, we protect the child from the parent's versioneer too.
# Also see https://github.com/warner/python-versioneer/issues/52
cmds = {}
# we add "version" to both distutils and setuptools
from distutils.core import Command
class cmd_version(Command):
description = "report generated version string"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
vers = get_versions(verbose=True)
print("Version: %s" % vers["version"])
print(" full-revisionid: %s" % vers.get("full-revisionid"))
print(" dirty: %s" % vers.get("dirty"))
print(" date: %s" % vers.get("date"))
if vers["error"]:
print(" error: %s" % vers["error"])
cmds["version"] = cmd_version
# we override "build_py" in both distutils and setuptools
#
# most invocation pathways end up running build_py:
# distutils/build -> build_py
# distutils/install -> distutils/build ->..
# setuptools/bdist_wheel -> distutils/install ->..
# setuptools/bdist_egg -> distutils/install_lib -> build_py
# setuptools/install -> bdist_egg ->..
# setuptools/develop -> ?
# pip install:
# copies source tree to a tempdir before running egg_info/etc
# if .git isn't copied too, 'git describe' will fail
# then does setup.py bdist_wheel, or sometimes setup.py install
# setup.py egg_info -> ?
# we override different "build_py" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.build_py import build_py as _build_py
else:
from distutils.command.build_py import build_py as _build_py
class cmd_build_py(_build_py):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
_build_py.run(self)
# now locate _version.py in the new build/ directory and replace
# it with an updated value
if cfg.versionfile_build:
target_versionfile = os.path.join(self.build_lib,
cfg.versionfile_build)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
cmds["build_py"] = cmd_build_py
if "cx_Freeze" in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
# nczeczulin reports that py2exe won't like the pep440-style string
# as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
# setup(console=[{
# "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
# "product_version": versioneer.get_version(),
# ...
class cmd_build_exe(_build_exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_build_exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
cmds["build_exe"] = cmd_build_exe
del cmds["build_py"]
if 'py2exe' in sys.modules: # py2exe enabled?
try:
from py2exe.distutils_buildexe import py2exe as _py2exe # py3
except ImportError:
from py2exe.build_exe import py2exe as _py2exe # py2
class cmd_py2exe(_py2exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_py2exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
cmds["py2exe"] = cmd_py2exe
# we override different "sdist" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.sdist import sdist as _sdist
else:
from distutils.command.sdist import sdist as _sdist
class cmd_sdist(_sdist):
def run(self):
versions = get_versions()
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old
# version
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
root = get_root()
cfg = get_config_from_root(root)
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory
# (remembering that it may be a hardlink) and replace it with an
# updated value
target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile,
self._versioneer_generated_versions)
cmds["sdist"] = cmd_sdist
return cmds
CONFIG_ERROR = """
setup.cfg is missing the necessary Versioneer configuration. You need
a section like:
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix =
parentdir_prefix = myproject-
You will also need to edit your setup.py to use the results:
import versioneer
setup(version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(), ...)
Please read the docstring in ./versioneer.py for configuration instructions,
edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
"""
SAMPLE_CONFIG = """
# See the docstring in versioneer.py for instructions. Note that you must
# re-run 'versioneer.py setup' after changing this section, and commit the
# resulting files.
[versioneer]
#VCS = git
#style = pep440
#versionfile_source =
#versionfile_build =
#tag_prefix =
#parentdir_prefix =
"""
INIT_PY_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
def do_setup():
"""Main VCS-independent setup function for installing Versioneer."""
root = get_root()
try:
cfg = get_config_from_root(root)
except (EnvironmentError, configparser.NoSectionError,
configparser.NoOptionError) as e:
if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
print("Adding sample versioneer config to setup.cfg",
file=sys.stderr)
with open(os.path.join(root, "setup.cfg"), "a") as f:
f.write(SAMPLE_CONFIG)
print(CONFIG_ERROR, file=sys.stderr)
return 1
print(" creating %s" % cfg.versionfile_source)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG % {"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
"__init__.py")
if os.path.exists(ipy):
try:
with open(ipy, "r") as f:
old = f.read()
except EnvironmentError:
old = ""
if INIT_PY_SNIPPET not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
f.write(INIT_PY_SNIPPET)
else:
print(" %s unmodified" % ipy)
else:
print(" %s doesn't exist, ok" % ipy)
ipy = None
# Make sure both the top-level "versioneer.py" and versionfile_source
# (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
# they'll be copied into source distributions. Pip won't be able to
# install the package without this.
manifest_in = os.path.join(root, "MANIFEST.in")
simple_includes = set()
try:
with open(manifest_in, "r") as f:
for line in f:
if line.startswith("include "):
for include in line.split()[1:]:
simple_includes.add(include)
except EnvironmentError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
# it might give some false negatives. Appending redundant 'include'
# lines is safe, though.
if "versioneer.py" not in simple_includes:
print(" appending 'versioneer.py' to MANIFEST.in")
with open(manifest_in, "a") as f:
f.write("include versioneer.py\n")
else:
print(" 'versioneer.py' already in MANIFEST.in")
if cfg.versionfile_source not in simple_includes:
print(" appending versionfile_source ('%s') to MANIFEST.in" %
cfg.versionfile_source)
with open(manifest_in, "a") as f:
f.write("include %s\n" % cfg.versionfile_source)
else:
print(" versionfile_source already in MANIFEST.in")
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-subst keyword
# substitution.
do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
return 0
def scan_setup_py():
"""Validate the contents of setup.py against Versioneer's expectations."""
found = set()
setters = False
errors = 0
with open("setup.py", "r") as f:
for line in f.readlines():
if "import versioneer" in line:
found.add("import")
if "versioneer.get_cmdclass()" in line:
found.add("cmdclass")
if "versioneer.get_version()" in line:
found.add("get_version")
if "versioneer.VCS" in line:
setters = True
if "versioneer.versionfile_source" in line:
setters = True
if len(found) != 3:
print("")
print("Your setup.py appears to be missing some important items")
print("(but I might be wrong). Please make sure it has something")
print("roughly like the following:")
print("")
print(" import versioneer")
print(" setup( version=versioneer.get_version(),")
print(" cmdclass=versioneer.get_cmdclass(), ...)")
print("")
errors += 1
if setters:
print("You should remove lines like 'versioneer.VCS = ' and")
print("'versioneer.versionfile_source = ' . This configuration")
print("now lives in setup.cfg, and should be removed from setup.py")
print("")
errors += 1
return errors
if __name__ == "__main__":
cmd = sys.argv[1]
if cmd == "setup":
errors = do_setup()
errors += scan_setup_py()
if errors:
sys.exit(1)
|
PypiClean
|
/pnu-pipinfo-0.9.4.tar.gz/pnu-pipinfo-0.9.4/README.md
|
# Installation
Once you have installed [Python](https://www.python.org/downloads/) and its packages manager [pip](https://pip.pypa.io/en/stable/installation/),
depending on if you want only this tool, the full set of PNU tools, or PNU plus a selection of additional third-parties tools, use one of these commands:
```
pip install pnu-pipinfo
pip install PNU
pip install pytnix
```
# PIPINFO(1)
## NAME
pipinfo - Alternative tool for listing Python packages
## SYNOPSIS
**pipinfo**
\[-l|--check-latest\]
\[-v|--check-vulns\]
\[-c|--no-color\]
\[-p|--no-progress\]
\[-i|--info\]
\[-S|--system\]
\[-U|--user\]
\[-I|--issues\]
\[-O|--outdated\]
\[-L|--latest|--uptodate\]
\[-V|--vulnerable\]
\[-H|--healthy|--sane\]
\[-N|--not-required\]
\[-R|--required\]
\[--debug\]
\[--help|-?\]
\[--version\]
\[--\]
\[directory ...\]
## DESCRIPTION
The **pipinfo** utility provides an alternative to the "pip list", "pip list --outdated", "pip show" and "pip-audit" commands.
It shows all the available packages in the Python PATH (not just the latest version) or given directories (even for different Python versions), differenciates user and system-wide packages (user packages are written in bright style), show duplicate packages (name in yellow foreground, including user versions shadowing system ones), shows each package summary (avoiding the use of "pip show" to discover what a package is about), and prints the count of installed packages.
With the *-l|--check-latest* option, it will also use a Python Index Package web service to check for the latest versions available, using a simple color (version in yellow foreground) or visual scheme to show outdated packages and count them.
With the *-v|--check-vulns* option, it will also use another Python Index Package web service to check for known vulnerabilities in your packages versions, using a simple color (version in red background) or visual scheme to show vulnerable packages and count them.
The color or visual scheme should be enough to tell you to upgrade the indicated packages, however you can print additional details about new versions available and vulnerabilities with the *-i|--info* option.
You can disable the color output with the *-c|--no-color* option, and the progress meter with the *-p|--no-progress* option.
Finally you can restrict the list to the user or system packages, outdated or up-to-date packages, vulnerable or sane packages, required or not packages with the options in upper case.
The most useful one is probably the *-I|--issues* option which will select the outdated or vulnerable packages only.
### OPTIONS
Options | Use
------- | ---
-l\|--check-latest|Check latest versions
-v\|--check-vulns|Check vulnerabilities
-c\|--no-color|Toggle off color output
-p\|--no-progress|Toggle off progress meter
-i\|--info|Print detailed info on versions & vulnerabilities
-S\|--system|Select only system packages
-U\|--user|Select only user packages
-L\|--latest\|--uptodate|Select only latest packages (implies -l)
-O\|--outdated|Select only outdated packages (implies -l)
-H\|--healthy\|--sane|Select only healthy packages (implies -v)
-V\|--vulnerable|Select only vulnerable packages (implies -v)
-R\|--required|Select only required packages
-N\|--not-required|Select only not required packages
-I\|--issues|Select all packages with issues (-O & -V)
--debug|Enable debug mode
--help\|-?|Print usage and a short help message and exit
--version|Print version and exit
--|Options processing terminator
## ENVIRONMENT
The *PIPINFO_DEBUG* environment variable can be set to any value to enable debug mode.
It's mostly used to display and debug the package requirements read from the Python packages metadata files.
The *LOCALAPPDATA* and *TMP* environment variables under Windows, and *HOME*, *TMPDIR* and *TMP* environment variables
under other operating systems can influence the caching directory used.
## FILES
The **pipinfo** utility will attempt to maintain a caching directory for the web services it uses, where the individual files will be re-used within the next 24 hours.
This directory will be located in one of the following places:
* Windows:
* %LOCALAPPDATA%\\cache\\pipinfo
* %TMP%\\cache\\pipinfo
* Unix:
* ${HOME}/.cache/pipinfo
* ${TMPDIR}/.cache/pipinfo
* ${TMP}/.cache/pipinfo
## EXIT STATUS
The **pipinfo** utility exits 0 on success, and >0 if an error occurs.
## EXAMPLES
Use the following command to print a package listing after checking for the existence of new versions or vulnerabilities:
```
pipinfo -lv
```
Use the following command to restrict the list to outdated and vulnerable packages with details but no progress meter:
```
pipinfo -Iip
```
## SEE ALSO
[pip](https://pypi.org/project/pip/),
[pip-audit](https://pypi.org/project/pip-audit/)
## STANDARDS
The **pipinfo** utility is not a standard UNIX command.
It tries to follow the [PEP 8](https://www.python.org/dev/peps/pep-0008/) style guide for [Python](https://www.python.org/) code.
## PORTABILITY
Tested OK under Windows.
## HISTORY
This implementation was made for the [PNU project](https://github.com/HubTou/PNU),
both for my personal convenience and also to investigate some pip issues with the *pip list --outdated* option.
## LICENSE
It is available under the [3-clause BSD license](https://opensource.org/licenses/BSD-3-Clause).
## AUTHORS
[Hubert Tournier](https://github.com/HubTou)
|
PypiClean
|
/blockstore-client-0.0.12.10.tar.gz/blockstore-client-0.0.12.10/blockstore_client/blockstore_cli.py
|
import argparse
import sys
import json
import traceback
import os
import re
import pybitcoin
import subprocess
from socket import error as socket_error
from time import sleep
from getpass import getpass
import requests
requests.packages.urllib3.disable_warnings()
import logging
logging.disable(logging.CRITICAL)
# Hack around absolute paths
current_dir = os.path.abspath(os.path.dirname(__file__))
parent_dir = os.path.abspath(current_dir + "/../")
sys.path.insert(0, parent_dir)
from blockstore_client import config, client, schemas, parsing, user
from blockstore_client import storage, drivers
from blockstore_client.utils import pretty_dump, print_result
from blockstore_client.config import WALLET_PATH, WALLET_PASSWORD_LENGTH
from blockstore_client.parser import add_subparsers, add_advanced_subparsers
from blockstore_client.parser import AliasedSubParsersAction
from registrar.wallet import HDWallet
from registrar.crypto.utils import aes_encrypt, aes_decrypt
from registrar.blockchain import get_balance, dontuseAddress
from registrar.network import get_bs_client
from registrar.rpc_daemon import background_process
from registrar.utils import satoshis_to_btc
from registrar.states import nameRegistered, ownerName, profileonBlockchain
from registrar.blockchain import recipientNotReady, get_tx_confirmations
from pybitcoin import is_b58check_address
from binascii import hexlify
import xmlrpclib
from registrar.config import REGISTRAR_IP, REGISTRAR_PORT
from registrar.config import BLOCKSTORED_IP, BLOCKSTORED_PORT
RPC_DAEMON = 'http://' + REGISTRAR_IP + ':' + str(REGISTRAR_PORT)
log = config.log
def initialize_wallet():
result = {}
print "Initializing new wallet ..."
password = "temp"
try:
while len(password) < WALLET_PASSWORD_LENGTH:
password = getpass("Enter new password: ")
if len(password) < WALLET_PASSWORD_LENGTH:
msg = "Password is too short. Please make it at"
msg += " least %s characters long" % WALLET_PASSWORD_LENGTH
print msg
else:
confirm_password = getpass("Confirm new password: ")
if password != confirm_password:
exit_with_error("Passwords don't match.")
temp_wallet = HDWallet()
hex_privkey = temp_wallet.get_master_privkey()
hex_password = hexlify(password)
wallet = HDWallet(hex_privkey)
child = wallet.get_child_keypairs(count=2)
data = {}
encrypted_key = aes_encrypt(hex_privkey, hex_password)
data['encrypted_master_private_key'] = encrypted_key
data['payment_addresses'] = [child[0]]
data['owner_addresses'] = [child[1]]
file = open(WALLET_PATH, 'w')
file.write(json.dumps(data))
file.close()
print "Wallet created. Make sure to backup the following:"
result['wallet_password'] = password
result['master_private_key'] = hex_privkey
print_result(result)
input_prompt = "Have you backed up the above private key? (y/n): "
user_input = raw_input(input_prompt)
user_input = user_input.lower()
if user_input != 'y':
exit_with_error("Please backup your private key first.")
except KeyboardInterrupt:
exit_with_error("\nExited.")
return result
def unlock_wallet(display_enabled=False):
if walletUnlocked():
if display_enabled:
payment_address, owner_address = get_addresses_from_file()
display_wallet_info(payment_address, owner_address)
else:
try:
password = getpass("Enter wallet password: ")
hex_password = hexlify(password)
file = open(WALLET_PATH, 'r')
data = file.read()
data = json.loads(data)
file.close()
hex_privkey = None
try:
hex_privkey = aes_decrypt(data['encrypted_master_private_key'],
hex_password)
except:
exit_with_error("Incorrect password.")
else:
print "Unlocked wallet."
wallet = HDWallet(hex_privkey)
child = wallet.get_child_keypairs(count=2,
include_privkey=True)
payment_keypair = child[0]
owner_keypair = child[1]
save_keys_to_memory(payment_keypair, owner_keypair)
if display_enabled:
display_wallet_info(payment_keypair[0], owner_keypair[0])
except KeyboardInterrupt:
print "\nExited."
def walletUnlocked():
local_proxy = get_local_proxy()
conf = config.get_config()
if local_proxy is not False:
wallet_data = local_proxy.get_wallet(conf['rpc_token'])
wallet_data = json.loads(wallet_data)
if 'error' in wallet_data:
return False
elif wallet_data['payment_address'] is None:
return False
else:
return True
else:
return False
def display_wallet_info(payment_address, owner_address):
print '-' * 60
print "Payment address:\t%s" % payment_address
print "Owner address:\t\t%s" % owner_address
print '-' * 60
print "Balance:"
print "%s: %s" % (payment_address, get_balance(payment_address))
print '-' * 60
print "Names Owned:"
names_owned = get_names_owned(owner_address)
print "%s: %s" % (owner_address, names_owned)
print '-' * 60
def get_names_owned(address):
# hack to ensure local, until we update client
from blockstore_client import client as bs_client
# start session using blockstore_client
bs_client.session(server_host=BLOCKSTORED_IP, server_port=BLOCKSTORED_PORT,
set_global=True)
try:
names_owned = bs_client.get_names_owned_by_address(address)
except socket_error:
names_owned = "Error connecting to server"
return names_owned
def get_local_proxy():
proxy = xmlrpclib.ServerProxy(RPC_DAEMON)
try:
data = proxy.ping()
except:
log.debug('RPC daemon is not online')
return False
return proxy
def start_background_daemons():
""" Start the rpc_daemon and monitor processes
if they're not already running
"""
proxy = xmlrpclib.ServerProxy(RPC_DAEMON)
try:
data = proxy.ping()
except:
background_process('start_daemon')
sleep(2)
output = findProcess('start_monitor')
if 'registrar.rpc_daemon' not in output:
background_process('start_monitor')
sleep(2)
def save_keys_to_memory(payment_keypair, owner_keypair):
proxy = get_local_proxy()
if proxy is False:
start_background_daemons()
try:
data = proxy.set_wallet(payment_keypair, owner_keypair)
except:
exit_with_error('Error talking to local proxy')
def get_addresses_from_file():
file = open(WALLET_PATH, 'r')
data = file.read()
data = json.loads(data)
file.close()
payment_address = data['payment_addresses'][0]
owner_address = data['owner_addresses'][0]
return payment_address, owner_address
def get_payment_addresses():
payment_addresses = []
# currently only using one
payment_address, owner_address = get_addresses_from_file()
payment_addresses.append({'address': payment_address,
'balance': get_balance(payment_address)})
return payment_addresses
def get_owner_addresses():
owner_addresses = []
# currently only using one
payment_address, owner_address = get_addresses_from_file()
owner_addresses.append({'address': owner_address,
'names_owned': get_names_owned(owner_address)})
return owner_addresses
def get_all_names_owned():
owner_addresses = get_owner_addresses()
names_owned = []
for entry in owner_addresses:
additional_names = get_names_owned(entry['address'])
for name in additional_names:
names_owned.append(name)
return names_owned
def get_total_balance():
payment_addresses = get_payment_addresses()
total_balance = 0.0
for entry in payment_addresses:
total_balance += float(entry['balance'])
return total_balance, payment_addresses
def approx_tx_fees(num_tx):
""" Just a rough approximation on tx fees
It slightly over estimates
Should be replaced by checking for fee estimation from bitcoind
"""
APPROX_FEE_PER_TX = 8000 # in satoshis
return num_tx * APPROX_FEE_PER_TX
def hasEnoughBalance(payment_address, cost):
total_balance = get_balance(payment_address)
if total_balance > cost:
return True
else:
return False
def get_total_fees(data):
reply = {}
registration_fee_satoshi = data['satoshis']
tx_fee_satoshi = approx_tx_fees(num_tx=2)
registration_fee = satoshis_to_btc(registration_fee_satoshi)
tx_fee = satoshis_to_btc(tx_fee_satoshi)
reply['name_price'] = registration_fee
reply['transaction_fee'] = tx_fee
reply['total_estimated_cost'] = registration_fee + tx_fee
return reply
def exit_with_error(error_message, help_message=None):
result = {'error': error_message}
if help_message is not None:
result['help'] = help_message
print_result(result)
exit(0)
def check_valid_name(fqu):
try:
name, tld = fqu.rsplit('.')
except:
msg = 'The name specified is invalid.'
msg += ' Names must end with a period followed by a valid TLD.'
exit_with_error(msg)
if name == '':
msg = 'The name specified is invalid.'
msg += ' Names must be at least one character long, not including the TLD.'
exit_with_error(msg)
regrex = r'^[a-z0-9_]{1,60}$'
if not re.search(regrex, name):
msg = 'The name specified is invalid.'
msg += ' Names may only contain alphanumeric characters,'
msg += ' dashes, and underscores.'
exit_with_error(msg)
def tests_for_update_and_transfer(fqu, transfer_address=None):
""" Any update or transfer operation
should pass these tests
"""
if not nameRegistered(fqu):
exit_with_error("%s is not registered yet." % fqu)
payment_address, owner_address = get_addresses_from_file()
if not ownerName(fqu, owner_address):
exit_with_error("%s is not in your possession." % fqu)
tx_fee_satoshi = approx_tx_fees(num_tx=1)
tx_fee = satoshis_to_btc(tx_fee_satoshi)
if not hasEnoughBalance(payment_address, tx_fee):
msg = "Address %s doesn't have enough balance." % payment_address
exit_with_error(msg)
if dontuseAddress(payment_address):
msg = "Address %s has pending transactions." % payment_address
msg += " Wait and try later."
exit_with_error(msg)
if transfer_address is not None:
try:
resp = is_b58check_address(str(transfer_address))
except:
msg = "Address %s is not a valid Bitcoin address." % transfer_address
exit_with_error(msg)
if recipientNotReady(transfer_address):
msg = "Address %s owns too many names already." % transfer_address
exit_with_error(msg)
def findProcess(processName):
ps = subprocess.Popen("ps -ef | grep "+processName, shell=True,
stdout=subprocess.PIPE)
output = ps.stdout.read()
ps.stdout.close()
ps.wait()
return output
def get_sorted_commands(display_commands=False):
""" when adding new commands to the parser, use this function to
check the correct sorted order
"""
command_list = ['status', 'ping', 'preorder', 'register', 'update',
'transfer', 'renew', 'name_import', 'namespace_preorder',
'namespace_ready', 'namespace_reveal', 'put_mutable',
'put_immutable', 'get_mutable', 'get_immutable',
'cost', 'get_namespace_cost', 'get_nameops_at',
'get_name_blockchain_record',
'get_namespace_blockchain_record',
'get_name_record', 'lookup',
'get_all_names', 'get_names_in_namespace', 'consensus',
'lookup_snv', 'get_names_owned_by_address',
'preorder_tx', 'preorder_subsidized',
'register_tx', 'register_subsidized',
'update_tx', 'update_subsidized',
'transfer_tx', 'transfer_subsidized',
'revoke_tx', 'revoke_subsidized',
'renew_tx', 'renew_subsidized']
if display_commands:
for cmd in sorted(command_list):
log.debug(cmd)
return command_list
def run_cli():
""" run cli
"""
conf = config.get_config()
if conf is None:
log.error("Failed to load config")
sys.exit(1)
advanced_mode = conf['advanced_mode']
parser = argparse.ArgumentParser(
description='Blockstack cli version {}'.format(config.VERSION))
parser.register('action', 'parsers', AliasedSubParsersAction)
subparsers = parser.add_subparsers(
dest='action')
add_subparsers(subparsers)
if advanced_mode == "on":
add_advanced_subparsers(subparsers)
# Print default help message, if no argument is given
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args, unknown_args = parser.parse_known_args()
result = {}
conf = config.get_config()
blockstore_server = conf['server']
blockstore_port = conf['port']
proxy = client.session(conf=conf, server_host=blockstore_server,
server_port=blockstore_port, set_global=True)
# start the two background processes (rpc daemon and monitor queue)
start_background_daemons()
if args.action == 'balance':
if not os.path.exists(WALLET_PATH):
initialize_wallet()
result['total_balance'], result['addresses'] = get_total_balance()
elif args.action == 'price':
fqu = str(args.name)
check_valid_name(fqu)
try:
resp = client.get_name_cost(fqu)
except socket_error:
exit_with_error("Error connecting to server")
if 'error' in resp:
exit_with_error(resp['error'])
data = get_total_fees(resp)
result = data
elif args.action == 'config':
data = {}
settings_updated = False
data["message"] = "Updated settings for"
if args.host is not None:
config.update_config('blockstack-client', 'server', args.host)
data["message"] += " host"
settings_updated = True
if args.port is not None:
config.update_config('blockstack-client', 'port', args.port)
data["message"] += " port"
settings_updated = True
if args.advanced is not None:
if args.advanced != "on" and args.advanced != "off":
exit_with_error("Use --advanced=on or --advanced=off")
else:
config.update_config('blockstack-client', 'advanced_mode', args.advanced)
data["message"] += " advanced"
settings_updated = True
# reload conf
conf = config.get_config()
if settings_updated:
result['message'] = data['message']
else:
result['message'] = "No config settings were updated."
elif args.action == 'deposit':
if not os.path.exists(WALLET_PATH):
initialize_wallet()
result['message'] = 'Send bitcoins to the address specified.'
result['address'], owner_address = get_addresses_from_file()
elif args.action == 'import':
if not os.path.exists(WALLET_PATH):
initialize_wallet()
result['message'] = 'Send the name you want to receive to the'
result['message'] += ' address specified.'
payment_address, result['address'] = get_addresses_from_file()
elif args.action == 'names':
if not os.path.exists(WALLET_PATH):
initialize_wallet()
result['names_owned'] = get_all_names_owned()
result['addresses'] = get_owner_addresses()
elif args.action in ('info', 'status', 'ping', 'details'):
resp = client.getinfo()
result = {}
result['server_host'] = conf['server']
result['server_port'] = str(conf['port'])
result['cli_version'] = config.VERSION
result['advanced_mode'] = conf['advanced_mode']
if 'error' in resp:
result['server_alive'] = False
result['server_error'] = resp['error']
else:
result['server_alive'] = True
result['server_version'] = resp['blockstore_version']
try:
result['last_block_processed'] = resp['last_block']
except:
result['last_block_processed'] = resp['blocks']
result['last_block_seen'] = resp['bitcoind_blocks']
result['consensus_hash'] = resp['consensus']
if advanced_mode == 'on':
result['testset'] = resp['testset']
proxy = get_local_proxy()
if proxy is not False:
current_state = json.loads(proxy.state())
queue = {}
pending_queue = []
preorder_queue = []
register_queue = []
update_queue = []
transfer_queue = []
def format_new_entry(entry):
new_entry = {}
new_entry['name'] = entry['fqu']
confirmations = get_tx_confirmations(entry['tx_hash'])
if confirmations is None:
confirmations = 0
new_entry['confirmations'] = confirmations
return new_entry
def format_queue_display(preorder_queue,
register_queue):
for entry in register_queue:
name = entry['name']
for check_entry in preorder_queue:
if check_entry['name'] == name:
preorder_queue.remove(check_entry)
for entry in current_state:
if 'type' in entry:
if entry['type'] == 'preorder':
preorder_queue.append(format_new_entry(entry))
elif entry['type'] == 'register':
register_queue.append(format_new_entry(entry))
elif entry['type'] == 'update':
update_queue.append(format_new_entry(entry))
elif entry['type'] == 'transfer':
transfer_queue.append(format_new_entry(entry))
format_queue_display(preorder_queue,
register_queue)
if len(preorder_queue) != 0:
queue['preorder'] = preorder_queue
if len(register_queue) != 0:
queue['register'] = register_queue
if len(update_queue) != 0:
queue['update'] = update_queue
if len(transfer_queue) != 0:
queue['transfer'] = transfer_queue
if queue != {}:
result['queue'] = queue
elif args.action == 'lookup':
data = {}
blockchain_record = None
fqu = str(args.name)
check_valid_name(fqu)
try:
blockchain_record = client.get_name_blockchain_record(fqu)
except socket_error:
exit_with_error("Error connecting to server.")
if 'value_hash' not in blockchain_record:
exit_with_error("%s is not registered" % fqu)
try:
data_id = blockchain_record['value_hash']
data['data_record'] = json.loads(
client.get_immutable(str(args.name), data_id)['data'])
except:
data['data_record'] = None
result = data
elif args.action == 'whois':
data = {}
record = None
fqu = str(args.name)
check_valid_name(fqu)
try:
record = client.get_name_blockchain_record(fqu)
except socket_error:
exit_with_error("Error connecting to server.")
if 'value_hash' not in record:
result['registered'] = False
else:
result['registered'] = True
result['block_preordered_at'] = record['preorder_block_number']
result['block_renewed_at'] = record['last_renewed']
result['owner_address'] = record['address']
result['owner_public_key'] = record['sender_pubkey']
result['owner_script'] = record['sender']
result['preorder_transaction_id'] = record['txid']
elif args.action == 'register':
if not os.path.exists(WALLET_PATH):
initialize_wallet()
result = {}
fqu = str(args.name)
check_valid_name(fqu)
cost = client.get_name_cost(fqu)
if 'error' in cost:
exit_with_error(cost['error'])
if nameRegistered(fqu):
exit_with_error("%s is already registered." % fqu)
if not walletUnlocked():
unlock_wallet()
fees = get_total_fees(cost)
try:
cost = fees['total_estimated_cost']
input_prompt = "Registering %s will cost %s BTC." % (fqu, cost)
input_prompt += " Continue? (y/n): "
user_input = raw_input(input_prompt)
user_input = user_input.lower()
if user_input != 'y':
print "Not registering."
exit(0)
except KeyboardInterrupt:
print "\nExiting."
exit(0)
payment_address, owner_address = get_addresses_from_file()
if not hasEnoughBalance(payment_address, fees['total_estimated_cost']):
msg = "Address %s doesn't have enough balance." % payment_address
exit_with_error(msg)
if recipientNotReady(owner_address):
msg = "Address %s owns too many names already." % owner_address
exit_with_error(msg)
if dontuseAddress(payment_address):
msg = "Address %s has pending transactions." % payment_address
msg += " Wait and try later."
exit_with_error(msg)
proxy = get_local_proxy()
try:
resp = proxy.preorder(fqu)
except:
exit_with_error("Error talking to server, try again.")
if 'success' in resp and resp['success']:
result = resp
else:
if 'error' in resp:
exit_with_error(resp['error'])
if 'message' in resp:
exit_with_error(resp['message'])
elif args.action == 'update':
if not os.path.exists(WALLET_PATH):
initialize_wallet()
fqu = str(args.name)
check_valid_name(fqu)
user_data = str(args.data)
try:
user_data = json.loads(user_data)
except:
exit_with_error("Data is not in JSON format.")
tests_for_update_and_transfer(fqu)
if profileonBlockchain(fqu, user_data):
msg = "Data is same as current data record, update not needed."
exit_with_error(msg)
if not walletUnlocked():
unlock_wallet()
proxy = get_local_proxy()
try:
resp = proxy.update(fqu, user_data)
except:
exit_with_error("Error talking to server, try again.")
if 'success' in resp and resp['success']:
result = resp
else:
if 'error' in resp:
exit_with_error(resp['error'])
if 'message' in resp:
exit_with_error(resp['message'])
elif args.action == 'transfer':
if not os.path.exists(WALLET_PATH):
initialize_wallet()
fqu = str(args.name)
check_valid_name(fqu)
transfer_address = str(args.address)
tests_for_update_and_transfer(fqu, transfer_address=transfer_address)
if not walletUnlocked():
unlock_wallet()
proxy = get_local_proxy()
try:
resp = proxy.transfer(fqu, transfer_address)
except:
exit_with_error("Error talking to server, try again.")
if 'success' in resp and resp['success']:
result = resp
else:
if 'error' in resp:
exit_with_error(resp['error'])
if 'message' in resp:
exit_with_error(resp['message'])
# ---------------------- Advanced options ---------------------------------
elif args.action == 'wallet':
if not os.path.exists(WALLET_PATH):
result = initialize_wallet()
else:
unlock_wallet(display_enabled=True)
elif args.action == 'consensus':
if args.block_height is None:
# by default get last indexed block
resp = client.getinfo()
if 'error' in resp:
exit_with_error("Error connecting to server.")
elif 'last_block' in resp or 'blocks' in resp:
if 'last_block' in resp:
args.block_height = client.getinfo()['last_block']
elif 'blocks' in resp:
args.block_height = client.getinfo()['blocks']
else:
result['error'] = "Server is indexing. Try again"
exit(0)
resp = client.get_consensus_at(int(args.block_height))
data = {}
data['consensus'] = resp
data['block_height'] = args.block_height
result = data
elif args.action == 'register_tx':
result = client.register(str(args.name), str(args.privatekey),
str(args.addr), tx_only=True)
elif args.action == 'register_subsidized':
result = client.register_subsidized(str(args.name), str(args.privatekey),
str(args.addr), str(args.subsidy_key))
elif args.action == 'update_tx':
txid = None
if args.txid is not None:
txid = str(args.txid)
result = client.update(str(args.name),
str(args.record_json),
str(args.privatekey),
txid=txid, tx_only=True)
elif args.action == 'update_subsidized':
txid = None
if args.txid is not None:
txid = str(args.txid)
result = client.update_subsidized(str(args.name),
str(args.record_json),
str(args.public_key),
str(args.subsidy_key),
txid=txid)
elif args.action == 'transfer_tx':
keepdata = False
if args.keepdata.lower() not in ["on", "false"]:
print >> sys.stderr, "Pass 'true' or 'false' for keepdata"
sys.exit(1)
if args.keepdata.lower() == "on":
keepdata = True
result = client.transfer(str(args.name),
str(args.address),
keepdata,
str(args.privatekey),
tx_only=True)
elif args.action == 'preorder':
register_addr = None
if args.address is not None:
register_addr = str(args.address)
result = client.preorder(str(args.name), str(args.privatekey),
register_addr=register_addr)
elif args.action == 'preorder_tx':
register_addr = None
if args.address is not None:
register_addr = str(args.address)
result = client.preorder(str(args.name), str(args.privatekey),
register_addr=register_addr, tx_only=True)
elif args.action == 'preorder_subsidized':
result = client.preorder_subsidized(str(args.name),
str(args.public_key),
str(args.address),
str(args.subsidy_key))
elif args.action == 'transfer_subsidized':
keepdata = False
if args.keepdata.lower() not in ["on", "false"]:
print >> sys.stderr, "Pass 'true' or 'false' for keepdata"
sys.exit(1)
if args.keepdata.lower() == "on":
keepdata = True
result = client.transfer_subsidized(str(args.name),
str(args.address),
keepdata,
str(args.public_key),
str(args.subsidy_key))
elif args.action == 'renew':
result = client.renew(str(args.name), str(args.privatekey))
elif args.action == 'renew_tx':
result = client.renew(str(args.name), str(args.privatekey),
tx_only=True)
elif args.action == 'renew_subsidized':
result = client.renew_subsidized(str(args.name), str(args.public_key),
str(args.subsidy_key))
elif args.action == 'revoke':
result = client.revoke(str(args.name), str(args.privatekey))
elif args.action == 'revoke_tx':
result = client.revoke(str(args.name), str(args.privatekey),
tx_only=True)
elif args.action == 'revoke_subsidized':
result = client.revoke_subsidized(str(args.name), str(args.public_key),
str(args.subsidy_key))
elif args.action == 'name_import':
result = client.name_import(str(args.name), str(args.address),
str(args.hash), str(args.privatekey))
elif args.action == 'namespace_preorder':
reveal_addr = None
if args.address is not None:
reveal_addr = str(args.address)
result = client.namespace_preorder(str(args.namespace_id),
str(args.privatekey),
reveal_addr=reveal_addr)
elif args.action == 'namespace_reveal':
bucket_exponents = args.bucket_exponents.split(',')
if len(bucket_exponents) != 16:
raise Exception("bucket_exponents must be a 16-value CSV \
of integers")
for i in xrange(0, len(bucket_exponents)):
try:
bucket_exponents[i] = int(bucket_exponents[i])
except:
raise Exception("bucket_exponents must contain integers in \
range [0, 16)")
lifetime = int(args.lifetime)
if lifetime < 0:
lifetime = 0xffffffff # means "infinite" to blockstack-server
result = client.namespace_reveal(str(args.namespace_id),
str(args.addr),
lifetime,
int(args.coeff),
int(args.base),
bucket_exponents,
int(args.nonalpha_discount),
int(args.no_vowel_discount),
str(args.privatekey))
elif args.action == 'namespace_ready':
result = client.namespace_ready(str(args.namespace_id),
str(args.privatekey))
elif args.action == 'put_mutable':
result = client.put_mutable(str(args.name),
str(args.data_id),
str(args.data),
str(args.privatekey))
elif args.action == 'put_immutable':
result = client.put_immutable(str(args.name),
str(args.data),
str(args.privatekey),
conf=conf)
elif args.action == 'get_mutable':
result = client.get_mutable(str(args.name), str(args.data_id),
conf=conf)
elif args.action == 'get_immutable':
result = client.get_immutable(str(args.name), str(args.hash))
elif args.action == 'delete_immutable':
result = client.delete_immutable(str(args.name), str(args.hash),
str(args.privatekey))
elif args.action == 'delete_mutable':
result = client.delete_mutable(str(args.name), str(args.data_id),
str(args.privatekey))
elif args.action == 'get_name_blockchain_record':
result = client.get_name_blockchain_record(str(args.name))
elif args.action == 'get_namespace_blockchain_record':
result = client.get_namespace_blockchain_record(str(args.namespace_id))
elif args.action == 'lookup_snv':
result = client.lookup_snv(str(args.name), int(args.block_id),
str(args.consensus_hash))
elif args.action == 'get_name_record':
result = client.get_name_record(str(args.name))
elif args.action == 'get_names_owned_by_address':
result = client.get_names_owned_by_address(str(args.address))
elif args.action == 'get_namespace_cost':
result = client.get_namespace_cost(str(args.namespace_id))
elif args.action == 'get_all_names':
offset = None
count = None
if args.offset is not None:
offset = int(args.offset)
if args.count is not None:
count = int(args.count)
result = client.get_all_names(offset, count)
elif args.action == 'get_names_in_namespace':
offset = None
count = None
if args.offset is not None:
offset = int(args.offset)
if args.count is not None:
count = int(args.count)
result = client.get_names_in_namespace(str(args.namespace_id), offset,
count)
elif args.action == 'get_nameops_at':
result = client.get_nameops_at(int(args.block_id))
print_result(result)
if __name__ == '__main__':
try:
run_cli()
except:
exit_with_error("Unexpected error. Try getting latest version of CLI" +
"'sudo pip install blockstack --upgrade'")
|
PypiClean
|
/PyAlgoTrade-0.20.tar.gz/PyAlgoTrade-0.20/pyalgotrade/broker/slippage.py
|
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class SlippageModel(object):
"""Base class for slippage models.
.. note::
This is a base class and should not be used directly.
"""
@abc.abstractmethod
def calculatePrice(self, order, price, quantity, bar, volumeUsed):
"""
Returns the slipped price per share for an order.
:param order: The order being filled.
:type order: :class:`pyalgotrade.broker.Order`.
:param price: The price for each share before slippage.
:type price: float.
:param quantity: The amount of shares that will get filled at this time for this order.
:type quantity: float.
:param bar: The current bar.
:type bar: :class:`pyalgotrade.bar.Bar`.
:param volumeUsed: The volume size that was taken so far from the current bar.
:type volumeUsed: float.
:rtype: float.
"""
raise NotImplementedError()
class NoSlippage(SlippageModel):
"""A no slippage model."""
def calculatePrice(self, order, price, quantity, bar, volumeUsed):
return price
class VolumeShareSlippage(SlippageModel):
"""
A volume share slippage model as defined in Zipline's VolumeShareSlippage model.
The slippage is calculated by multiplying the price impact constant by the square of the ratio of the order
to the total volume.
Check https://www.quantopian.com/help#ide-slippage for more details.
:param priceImpact: Defines how large of an impact your order will have on the backtester's price calculation.
:type priceImpact: float.
"""
def __init__(self, priceImpact=0.1):
super(VolumeShareSlippage, self).__init__()
self.__priceImpact = priceImpact
def calculatePrice(self, order, price, quantity, bar, volumeUsed):
assert bar.getVolume(), "Can't use 0 volume bars with VolumeShareSlippage"
totalVolume = volumeUsed + quantity
volumeShare = totalVolume / float(bar.getVolume())
impactPct = volumeShare ** 2 * self.__priceImpact
if order.isBuy():
ret = price * (1 + impactPct)
else:
ret = price * (1 - impactPct)
return ret
|
PypiClean
|
/FairDynamicRec-0.0.123-py3-none-any.whl/fair_dynamic_rec/core/rankers/linear_neighbor_bandit3.py
|
import numpy as np
from .abstract_ranker import AbstractRanker
import pandas as pd
from fair_dynamic_rec.core.util.outputs import make_output_dir
from pathlib import Path
from sklearn.metrics.pairwise import cosine_similarity
import os.path
from datetime import datetime
import sys
from scipy.sparse import csr_matrix
class NeighborUCB(AbstractRanker):
def __init__(self, config, dataObj, parameters=None):
super(NeighborUCB, self).__init__(config, dataObj)
print(str(datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + ": Start setting up ranker.")
sys.stdout.flush()
self.n_samples = np.zeros(dataObj.n_users)
self.n_clicks = np.zeros(dataObj.n_users)
self.prng = np.random.RandomState(seed=config.seed)
self.sigma = float(parameters["sigma"]["value"]) if "sigma" in parameters else 1.0
self.l = int(parameters["latent_dim"]["value"]) if "latent_dim" in parameters else 0
self.lambda_1 = float(parameters["lambda"]["value"]) if "lambda" in parameters else 1.0
self.alpha = float(parameters["alpha"]["value"]) if "alpha" in parameters else 1.0
self.noise = float(parameters["noise"]["value"]) if "noise" in parameters else 0.00001
# self.X = dataObj.train_data
# self.X = (self.X > 0).astype(float)
# self.X = self.X + self.noise
dataObj.train_data = (dataObj.train_data > 0).astype(float)
# dataObj.train_data = dataObj.train_data + self.noise
self.X = csr_matrix(dataObj.train_data)
# print(str(datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + ": Defining XTX.")
# sys.stdout.flush()
# self.XTX = np.dot(dataObj.train_data.T, dataObj.train_data)
# print(str(datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + ": XTX defined.")
# sys.stdout.flush()
print(str(datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + ": Defining U and V.")
sys.stdout.flush()
self.U = np.zeros((self.dataObj.n_items, self.l)) + self.noise
self.V = np.zeros((self.dataObj.n_items, self.l)) + self.noise
print(str(datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + ": Defining A and AInv.")
sys.stdout.flush()
# A = U_T * (X_T.X + lambda) * U -> l * l
self.A = np.eye(self.l)
self.AInv = np.linalg.inv(self.A)
# U_T * (X_T . X_{u,i} - dMat(\etha)) -> l * m
self.b = np.zeros((self.l, dataObj.n_items))
print(str(datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + ": Defining C.")
sys.stdout.flush()
# X_T . X + \lambda -> m * m
self.C = self.X.T * self.X
self.C += self.lambda_1 * np.eye(dataObj.n_items)
self.C = csr_matrix(self.C)
print(str(datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + ": C defined.")
sys.stdout.flush()
self.CInv = np.linalg.inv(self.C)
print(str(datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + ": CInv defined.")
sys.stdout.flush()
# (X_T . X + dMat(\etha)) * V -> m * l
self.d = np.zeros((dataObj.n_items, self.l))
self.E = np.eye(self.l)
self.EInv = np.linalg.inv(self.E)
# self.writeSimMat()
# self.ill_matrix_counter = 0
# # for ill inverse
# self.AInv_tmp = np.zeros((self.dataObj.n_users, (self.k + self.l) * self.dataObj.n_users, (self.k + self.l) * self.dataObj.n_users))
# self.b_tmp = np.zeros((self.dataObj.n_users, (self.k + self.l) * self.dataObj.n_users))
# self.CInv_tmp = np.zeros((self.dataObj.n_items, self.l, self.l))
# self.d_tmp = np.zeros((self.dataObj.n_items, self.l))
def get_ranking(self, batch_users, sampled_items=None, round=None):
"""
:param x: features
:param k: number of positions
:return: ranking: the ranked item id.
"""
# assert x.shape[0] >= k
rankings = np.zeros((len(batch_users), self.config.list_size), dtype=int)
# self.batch_features = np.zeros((len(batch_users), self.config.list_size, self.dim))
tie_breaker = self.prng.rand(self.dataObj.n_items)
for i in range(len(batch_users)):
user = batch_users[i]
user_vector = np.reshape(self.X[user], (1,self.dataObj.X.shape[1]))
S = np.dot(self.U, self.V.T)
np.fill_diagonal(S, 0.0)
score = np.dot(self.X[user], S)
score[np.isnan(score)] = 0
XU = np.multiply(user_vector.T,self.U) # m * k
cbV = np.sqrt(np.sum(np.multiply(np.dot(XU, self.AInv), XU), axis=1))
cbV[np.isnan(cbV)] = 0
xCx = np.multiply(np.dot(self.X[user], self.CInv), self.X[user]) # 1 * m
vEv = np.dot(np.dot(self.V, self.EInv), self.V.T) # m * m
cbU = np.sqrt(np.multiply(np.dot(xCx, vEv), xCx))
cbU[np.isnan(cbU)] = 0
ucb = score + self.alpha * (cbV + cbU)
rankings[i] = np.lexsort((tie_breaker, -ucb))[:self.config.list_size]
# self.writeCB(round+i, cbV, cbU)
return rankings
def update(self, batch_users, rankings, clicks, round=None, user_round=None):
for i in range(len(batch_users)):
user = batch_users[i]
_clicks, _ranking = self.__collect_feedback(clicks[i], rankings[i])
clicked_items = _ranking[np.where(_clicks)[0]]
X_clicked_items = np.zeros((self.dataObj.n_items, self.dataObj.n_users))
if sum(_clicks) > 0:
X_clicked_items[clicked_items, user] = np.ones((len(clicked_items),1))
# Update XTX
# self.XTX = self.XTX + np.dot(X_clicked_items, self.dataObj.train_data)
# Update C and CInv
self.C += np.dot(X_clicked_items, self.dataObj.train_data)
self.CInv = np.linalg.inv(self.C)
# Update X
self.dataObj.train_data[user, clicked_items] += np.ones(len(clicked_items), dtype=float)
# Update d
self.d += np.dot(np.dot(self.dataObj.train_data[user].T.reshape(self.dataObj.n_items,1), _clicks.reshape(1,len(_clicks))), self.V[_ranking])
# Update U
if self.d.sum() != 0:
self.U = np.dot(np.dot(self.CInv, self.d), self.EInv)
self.U = self.U / np.sqrt(np.sum(self.U ** 2))
# Update A
user_vector = np.reshape(self.dataObj.train_data[user], (1, self.dataObj.train_data.shape[1]))
XU = np.multiply(user_vector.T, self.U) # m * k
self.A += np.dot(XU.T, XU) + self.lambda_1 * np.dot(self.U.T, self.U)
try:
self.AInv = np.linalg.inv(self.A)
except np.linalg.LinAlgError:
# for the ill matrix. if the matrix is not invertible, we ignore this update
print('ill matrix A.')
self.AInv = np.linalg.pinv(self.A)
# Update b
self.b[:, _ranking] += np.dot(self.U.T, np.dot(self.dataObj.train_data[user].T.reshape(self.dataObj.n_items,1), _clicks.reshape(1,len(_clicks))))
# Update V.T
self.V[_ranking] = np.dot(self.AInv, self.b[:, _ranking]).T
# self.V = self.V / np.sqrt(np.sum(self.V ** 2))
# Update E
self.update_E(self.V[_ranking])
# self.E = self.E + np.dot(self.V[_ranking].T, self.V[_ranking])
# self.EInv = np.linalg.inv(self.E)
# Update U again
if self.d.sum() != 0:
self.U = np.dot(np.dot(self.CInv, self.d), self.EInv)
self.U = self.U / np.sqrt(np.sum(self.U ** 2))
self.n_samples[user] += len(_clicks)
self.n_clicks[user] += sum(_clicks)
# self.writeParams(round)
def __collect_feedback(self, click, ranking):
"""
:param y:
:return: the last observed position.
"""
# With Cascade assumption, only the first click counts.
if self.config.feedback_model == 'cascade':
if np.sum(click) == 0:
return click, ranking
first_click = np.where(click)[0][0]
return click[:first_click + 1], ranking[:first_click +1]
elif self.config.feedback_model == 'dcm':
if np.sum(click) == 0:
return click, ranking
last_click = np.where(click)[0][-1]
return click[:last_click + 1], ranking[:last_click + 1]
# all items are observed
else:
return click, ranking
def writeCB(self, round, cb1, cb2):
if not os.path.exists('cb1.txt'):
f=open('cb1.txt','a')
f.write(str(round)+'\t'+",".join(map(str, cb1))+'\n')
f.close()
if not os.path.exists('cb2.txt'):
f = open('cb2.txt', 'a')
f.write(str(round) + '\t' + ",".join(map(str, cb2))+'\n')
f.close()
def writeParams(self, round):
if round == 0:
return
pre_path = self.config._target / Path('results')
U_path = pre_path / Path('U'+str(round)+'.txt')
V_path = pre_path / Path('V'+str(round)+'.txt')
df = pd.DataFrame(data=self.U.astype(float))
df.to_csv(U_path, sep=' ', header=False, float_format='%.10f', index=False)
df = pd.DataFrame(data=self.V.astype(float))
df.to_csv(V_path, sep=' ', header=False, float_format='%.10f', index=False)
def writeSimMat(self):
pre_path = self.config._target
test_sim_path = pre_path / Path('test_item_sim.txt')
train_sim_path = pre_path / Path('train_item_sim.txt')
data_sim_path = pre_path / Path('data_item_sim.txt')
if not os.path.isfile(test_sim_path):
item_sim_matrix = cosine_similarity(self.dataObj.test_data.T)
np.fill_diagonal(item_sim_matrix, 0)
df = pd.DataFrame(data=item_sim_matrix.astype(float))
df.to_csv(test_sim_path, sep=' ', header=False, float_format='%.10f', index=False)
if not os.path.isfile(train_sim_path):
item_sim_matrix = cosine_similarity(self.dataObj.train_data.T)
np.fill_diagonal(item_sim_matrix, 0)
df = pd.DataFrame(data=item_sim_matrix.astype(float))
df.to_csv(train_sim_path, sep=' ', header=False, float_format='%.10f', index=False)
if not os.path.isfile(data_sim_path):
item_sim_matrix = cosine_similarity((self.dataObj.test_data + self.dataObj.train_data).T)
np.fill_diagonal(item_sim_matrix, 0)
df = pd.DataFrame(data=item_sim_matrix.astype(float))
df.to_csv(data_sim_path, sep=' ', header=False, float_format='%.10f', index=False)
def update_E(self, x):
self.E = self.E + np.dot(x.T, x)
# x * m^-1 * x^T
xmx = np.dot(x, np.dot(self.EInv, x.T))
# (1/sigma I + xmx)^-1
try:
tmp_inv = np.linalg.inv(1 / self.sigma * np.eye(len(x)) + xmx)
except np.linalg.LinAlgError:
# for the ill matrix. if the matrix is not invertible, we ignore this update
self.EInv = np.linalg.inv(self.E)
return
# m^-1*x^T
MInv_xT = self.EInv.dot(x.T)
# MInv_xT*tmp_inv*MInv_xT^T
self.EInv -= np.dot(np.dot(MInv_xT, tmp_inv), MInv_xT.T)
|
PypiClean
|
/aifs_nni-1.9.5-py3-none-manylinux1_x86_64.whl/aifs_nni-1.9.5.data/data/nni/node_modules/tweetnacl/nacl-fast.js
|
(function(nacl) {
'use strict';
// Ported in 2014 by Dmitry Chestnykh and Devi Mandiri.
// Public domain.
//
// Implementation derived from TweetNaCl version 20140427.
// See for details: http://tweetnacl.cr.yp.to/
var gf = function(init) {
var i, r = new Float64Array(16);
if (init) for (i = 0; i < init.length; i++) r[i] = init[i];
return r;
};
// Pluggable, initialized in high-level API below.
var randombytes = function(/* x, n */) { throw new Error('no PRNG'); };
var _0 = new Uint8Array(16);
var _9 = new Uint8Array(32); _9[0] = 9;
var gf0 = gf(),
gf1 = gf([1]),
_121665 = gf([0xdb41, 1]),
D = gf([0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070, 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203]),
D2 = gf([0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0, 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406]),
X = gf([0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c, 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169]),
Y = gf([0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666]),
I = gf([0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43, 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83]);
function ts64(x, i, h, l) {
x[i] = (h >> 24) & 0xff;
x[i+1] = (h >> 16) & 0xff;
x[i+2] = (h >> 8) & 0xff;
x[i+3] = h & 0xff;
x[i+4] = (l >> 24) & 0xff;
x[i+5] = (l >> 16) & 0xff;
x[i+6] = (l >> 8) & 0xff;
x[i+7] = l & 0xff;
}
function vn(x, xi, y, yi, n) {
var i,d = 0;
for (i = 0; i < n; i++) d |= x[xi+i]^y[yi+i];
return (1 & ((d - 1) >>> 8)) - 1;
}
function crypto_verify_16(x, xi, y, yi) {
return vn(x,xi,y,yi,16);
}
function crypto_verify_32(x, xi, y, yi) {
return vn(x,xi,y,yi,32);
}
function core_salsa20(o, p, k, c) {
var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24,
j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24,
j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24,
j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24,
j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24,
j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24,
j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24,
j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24,
j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24,
j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24,
j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24,
j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24,
j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24,
j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24,
j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24,
j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24;
var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7,
x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14,
x15 = j15, u;
for (var i = 0; i < 20; i += 2) {
u = x0 + x12 | 0;
x4 ^= u<<7 | u>>>(32-7);
u = x4 + x0 | 0;
x8 ^= u<<9 | u>>>(32-9);
u = x8 + x4 | 0;
x12 ^= u<<13 | u>>>(32-13);
u = x12 + x8 | 0;
x0 ^= u<<18 | u>>>(32-18);
u = x5 + x1 | 0;
x9 ^= u<<7 | u>>>(32-7);
u = x9 + x5 | 0;
x13 ^= u<<9 | u>>>(32-9);
u = x13 + x9 | 0;
x1 ^= u<<13 | u>>>(32-13);
u = x1 + x13 | 0;
x5 ^= u<<18 | u>>>(32-18);
u = x10 + x6 | 0;
x14 ^= u<<7 | u>>>(32-7);
u = x14 + x10 | 0;
x2 ^= u<<9 | u>>>(32-9);
u = x2 + x14 | 0;
x6 ^= u<<13 | u>>>(32-13);
u = x6 + x2 | 0;
x10 ^= u<<18 | u>>>(32-18);
u = x15 + x11 | 0;
x3 ^= u<<7 | u>>>(32-7);
u = x3 + x15 | 0;
x7 ^= u<<9 | u>>>(32-9);
u = x7 + x3 | 0;
x11 ^= u<<13 | u>>>(32-13);
u = x11 + x7 | 0;
x15 ^= u<<18 | u>>>(32-18);
u = x0 + x3 | 0;
x1 ^= u<<7 | u>>>(32-7);
u = x1 + x0 | 0;
x2 ^= u<<9 | u>>>(32-9);
u = x2 + x1 | 0;
x3 ^= u<<13 | u>>>(32-13);
u = x3 + x2 | 0;
x0 ^= u<<18 | u>>>(32-18);
u = x5 + x4 | 0;
x6 ^= u<<7 | u>>>(32-7);
u = x6 + x5 | 0;
x7 ^= u<<9 | u>>>(32-9);
u = x7 + x6 | 0;
x4 ^= u<<13 | u>>>(32-13);
u = x4 + x7 | 0;
x5 ^= u<<18 | u>>>(32-18);
u = x10 + x9 | 0;
x11 ^= u<<7 | u>>>(32-7);
u = x11 + x10 | 0;
x8 ^= u<<9 | u>>>(32-9);
u = x8 + x11 | 0;
x9 ^= u<<13 | u>>>(32-13);
u = x9 + x8 | 0;
x10 ^= u<<18 | u>>>(32-18);
u = x15 + x14 | 0;
x12 ^= u<<7 | u>>>(32-7);
u = x12 + x15 | 0;
x13 ^= u<<9 | u>>>(32-9);
u = x13 + x12 | 0;
x14 ^= u<<13 | u>>>(32-13);
u = x14 + x13 | 0;
x15 ^= u<<18 | u>>>(32-18);
}
x0 = x0 + j0 | 0;
x1 = x1 + j1 | 0;
x2 = x2 + j2 | 0;
x3 = x3 + j3 | 0;
x4 = x4 + j4 | 0;
x5 = x5 + j5 | 0;
x6 = x6 + j6 | 0;
x7 = x7 + j7 | 0;
x8 = x8 + j8 | 0;
x9 = x9 + j9 | 0;
x10 = x10 + j10 | 0;
x11 = x11 + j11 | 0;
x12 = x12 + j12 | 0;
x13 = x13 + j13 | 0;
x14 = x14 + j14 | 0;
x15 = x15 + j15 | 0;
o[ 0] = x0 >>> 0 & 0xff;
o[ 1] = x0 >>> 8 & 0xff;
o[ 2] = x0 >>> 16 & 0xff;
o[ 3] = x0 >>> 24 & 0xff;
o[ 4] = x1 >>> 0 & 0xff;
o[ 5] = x1 >>> 8 & 0xff;
o[ 6] = x1 >>> 16 & 0xff;
o[ 7] = x1 >>> 24 & 0xff;
o[ 8] = x2 >>> 0 & 0xff;
o[ 9] = x2 >>> 8 & 0xff;
o[10] = x2 >>> 16 & 0xff;
o[11] = x2 >>> 24 & 0xff;
o[12] = x3 >>> 0 & 0xff;
o[13] = x3 >>> 8 & 0xff;
o[14] = x3 >>> 16 & 0xff;
o[15] = x3 >>> 24 & 0xff;
o[16] = x4 >>> 0 & 0xff;
o[17] = x4 >>> 8 & 0xff;
o[18] = x4 >>> 16 & 0xff;
o[19] = x4 >>> 24 & 0xff;
o[20] = x5 >>> 0 & 0xff;
o[21] = x5 >>> 8 & 0xff;
o[22] = x5 >>> 16 & 0xff;
o[23] = x5 >>> 24 & 0xff;
o[24] = x6 >>> 0 & 0xff;
o[25] = x6 >>> 8 & 0xff;
o[26] = x6 >>> 16 & 0xff;
o[27] = x6 >>> 24 & 0xff;
o[28] = x7 >>> 0 & 0xff;
o[29] = x7 >>> 8 & 0xff;
o[30] = x7 >>> 16 & 0xff;
o[31] = x7 >>> 24 & 0xff;
o[32] = x8 >>> 0 & 0xff;
o[33] = x8 >>> 8 & 0xff;
o[34] = x8 >>> 16 & 0xff;
o[35] = x8 >>> 24 & 0xff;
o[36] = x9 >>> 0 & 0xff;
o[37] = x9 >>> 8 & 0xff;
o[38] = x9 >>> 16 & 0xff;
o[39] = x9 >>> 24 & 0xff;
o[40] = x10 >>> 0 & 0xff;
o[41] = x10 >>> 8 & 0xff;
o[42] = x10 >>> 16 & 0xff;
o[43] = x10 >>> 24 & 0xff;
o[44] = x11 >>> 0 & 0xff;
o[45] = x11 >>> 8 & 0xff;
o[46] = x11 >>> 16 & 0xff;
o[47] = x11 >>> 24 & 0xff;
o[48] = x12 >>> 0 & 0xff;
o[49] = x12 >>> 8 & 0xff;
o[50] = x12 >>> 16 & 0xff;
o[51] = x12 >>> 24 & 0xff;
o[52] = x13 >>> 0 & 0xff;
o[53] = x13 >>> 8 & 0xff;
o[54] = x13 >>> 16 & 0xff;
o[55] = x13 >>> 24 & 0xff;
o[56] = x14 >>> 0 & 0xff;
o[57] = x14 >>> 8 & 0xff;
o[58] = x14 >>> 16 & 0xff;
o[59] = x14 >>> 24 & 0xff;
o[60] = x15 >>> 0 & 0xff;
o[61] = x15 >>> 8 & 0xff;
o[62] = x15 >>> 16 & 0xff;
o[63] = x15 >>> 24 & 0xff;
}
function core_hsalsa20(o,p,k,c) {
var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24,
j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24,
j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24,
j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24,
j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24,
j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24,
j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24,
j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24,
j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24,
j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24,
j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24,
j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24,
j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24,
j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24,
j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24,
j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24;
var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7,
x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14,
x15 = j15, u;
for (var i = 0; i < 20; i += 2) {
u = x0 + x12 | 0;
x4 ^= u<<7 | u>>>(32-7);
u = x4 + x0 | 0;
x8 ^= u<<9 | u>>>(32-9);
u = x8 + x4 | 0;
x12 ^= u<<13 | u>>>(32-13);
u = x12 + x8 | 0;
x0 ^= u<<18 | u>>>(32-18);
u = x5 + x1 | 0;
x9 ^= u<<7 | u>>>(32-7);
u = x9 + x5 | 0;
x13 ^= u<<9 | u>>>(32-9);
u = x13 + x9 | 0;
x1 ^= u<<13 | u>>>(32-13);
u = x1 + x13 | 0;
x5 ^= u<<18 | u>>>(32-18);
u = x10 + x6 | 0;
x14 ^= u<<7 | u>>>(32-7);
u = x14 + x10 | 0;
x2 ^= u<<9 | u>>>(32-9);
u = x2 + x14 | 0;
x6 ^= u<<13 | u>>>(32-13);
u = x6 + x2 | 0;
x10 ^= u<<18 | u>>>(32-18);
u = x15 + x11 | 0;
x3 ^= u<<7 | u>>>(32-7);
u = x3 + x15 | 0;
x7 ^= u<<9 | u>>>(32-9);
u = x7 + x3 | 0;
x11 ^= u<<13 | u>>>(32-13);
u = x11 + x7 | 0;
x15 ^= u<<18 | u>>>(32-18);
u = x0 + x3 | 0;
x1 ^= u<<7 | u>>>(32-7);
u = x1 + x0 | 0;
x2 ^= u<<9 | u>>>(32-9);
u = x2 + x1 | 0;
x3 ^= u<<13 | u>>>(32-13);
u = x3 + x2 | 0;
x0 ^= u<<18 | u>>>(32-18);
u = x5 + x4 | 0;
x6 ^= u<<7 | u>>>(32-7);
u = x6 + x5 | 0;
x7 ^= u<<9 | u>>>(32-9);
u = x7 + x6 | 0;
x4 ^= u<<13 | u>>>(32-13);
u = x4 + x7 | 0;
x5 ^= u<<18 | u>>>(32-18);
u = x10 + x9 | 0;
x11 ^= u<<7 | u>>>(32-7);
u = x11 + x10 | 0;
x8 ^= u<<9 | u>>>(32-9);
u = x8 + x11 | 0;
x9 ^= u<<13 | u>>>(32-13);
u = x9 + x8 | 0;
x10 ^= u<<18 | u>>>(32-18);
u = x15 + x14 | 0;
x12 ^= u<<7 | u>>>(32-7);
u = x12 + x15 | 0;
x13 ^= u<<9 | u>>>(32-9);
u = x13 + x12 | 0;
x14 ^= u<<13 | u>>>(32-13);
u = x14 + x13 | 0;
x15 ^= u<<18 | u>>>(32-18);
}
o[ 0] = x0 >>> 0 & 0xff;
o[ 1] = x0 >>> 8 & 0xff;
o[ 2] = x0 >>> 16 & 0xff;
o[ 3] = x0 >>> 24 & 0xff;
o[ 4] = x5 >>> 0 & 0xff;
o[ 5] = x5 >>> 8 & 0xff;
o[ 6] = x5 >>> 16 & 0xff;
o[ 7] = x5 >>> 24 & 0xff;
o[ 8] = x10 >>> 0 & 0xff;
o[ 9] = x10 >>> 8 & 0xff;
o[10] = x10 >>> 16 & 0xff;
o[11] = x10 >>> 24 & 0xff;
o[12] = x15 >>> 0 & 0xff;
o[13] = x15 >>> 8 & 0xff;
o[14] = x15 >>> 16 & 0xff;
o[15] = x15 >>> 24 & 0xff;
o[16] = x6 >>> 0 & 0xff;
o[17] = x6 >>> 8 & 0xff;
o[18] = x6 >>> 16 & 0xff;
o[19] = x6 >>> 24 & 0xff;
o[20] = x7 >>> 0 & 0xff;
o[21] = x7 >>> 8 & 0xff;
o[22] = x7 >>> 16 & 0xff;
o[23] = x7 >>> 24 & 0xff;
o[24] = x8 >>> 0 & 0xff;
o[25] = x8 >>> 8 & 0xff;
o[26] = x8 >>> 16 & 0xff;
o[27] = x8 >>> 24 & 0xff;
o[28] = x9 >>> 0 & 0xff;
o[29] = x9 >>> 8 & 0xff;
o[30] = x9 >>> 16 & 0xff;
o[31] = x9 >>> 24 & 0xff;
}
function crypto_core_salsa20(out,inp,k,c) {
core_salsa20(out,inp,k,c);
}
function crypto_core_hsalsa20(out,inp,k,c) {
core_hsalsa20(out,inp,k,c);
}
var sigma = new Uint8Array([101, 120, 112, 97, 110, 100, 32, 51, 50, 45, 98, 121, 116, 101, 32, 107]);
// "expand 32-byte k"
function crypto_stream_salsa20_xor(c,cpos,m,mpos,b,n,k) {
var z = new Uint8Array(16), x = new Uint8Array(64);
var u, i;
for (i = 0; i < 16; i++) z[i] = 0;
for (i = 0; i < 8; i++) z[i] = n[i];
while (b >= 64) {
crypto_core_salsa20(x,z,k,sigma);
for (i = 0; i < 64; i++) c[cpos+i] = m[mpos+i] ^ x[i];
u = 1;
for (i = 8; i < 16; i++) {
u = u + (z[i] & 0xff) | 0;
z[i] = u & 0xff;
u >>>= 8;
}
b -= 64;
cpos += 64;
mpos += 64;
}
if (b > 0) {
crypto_core_salsa20(x,z,k,sigma);
for (i = 0; i < b; i++) c[cpos+i] = m[mpos+i] ^ x[i];
}
return 0;
}
function crypto_stream_salsa20(c,cpos,b,n,k) {
var z = new Uint8Array(16), x = new Uint8Array(64);
var u, i;
for (i = 0; i < 16; i++) z[i] = 0;
for (i = 0; i < 8; i++) z[i] = n[i];
while (b >= 64) {
crypto_core_salsa20(x,z,k,sigma);
for (i = 0; i < 64; i++) c[cpos+i] = x[i];
u = 1;
for (i = 8; i < 16; i++) {
u = u + (z[i] & 0xff) | 0;
z[i] = u & 0xff;
u >>>= 8;
}
b -= 64;
cpos += 64;
}
if (b > 0) {
crypto_core_salsa20(x,z,k,sigma);
for (i = 0; i < b; i++) c[cpos+i] = x[i];
}
return 0;
}
function crypto_stream(c,cpos,d,n,k) {
var s = new Uint8Array(32);
crypto_core_hsalsa20(s,n,k,sigma);
var sn = new Uint8Array(8);
for (var i = 0; i < 8; i++) sn[i] = n[i+16];
return crypto_stream_salsa20(c,cpos,d,sn,s);
}
function crypto_stream_xor(c,cpos,m,mpos,d,n,k) {
var s = new Uint8Array(32);
crypto_core_hsalsa20(s,n,k,sigma);
var sn = new Uint8Array(8);
for (var i = 0; i < 8; i++) sn[i] = n[i+16];
return crypto_stream_salsa20_xor(c,cpos,m,mpos,d,sn,s);
}
/*
* Port of Andrew Moon's Poly1305-donna-16. Public domain.
* https://github.com/floodyberry/poly1305-donna
*/
var poly1305 = function(key) {
this.buffer = new Uint8Array(16);
this.r = new Uint16Array(10);
this.h = new Uint16Array(10);
this.pad = new Uint16Array(8);
this.leftover = 0;
this.fin = 0;
var t0, t1, t2, t3, t4, t5, t6, t7;
t0 = key[ 0] & 0xff | (key[ 1] & 0xff) << 8; this.r[0] = ( t0 ) & 0x1fff;
t1 = key[ 2] & 0xff | (key[ 3] & 0xff) << 8; this.r[1] = ((t0 >>> 13) | (t1 << 3)) & 0x1fff;
t2 = key[ 4] & 0xff | (key[ 5] & 0xff) << 8; this.r[2] = ((t1 >>> 10) | (t2 << 6)) & 0x1f03;
t3 = key[ 6] & 0xff | (key[ 7] & 0xff) << 8; this.r[3] = ((t2 >>> 7) | (t3 << 9)) & 0x1fff;
t4 = key[ 8] & 0xff | (key[ 9] & 0xff) << 8; this.r[4] = ((t3 >>> 4) | (t4 << 12)) & 0x00ff;
this.r[5] = ((t4 >>> 1)) & 0x1ffe;
t5 = key[10] & 0xff | (key[11] & 0xff) << 8; this.r[6] = ((t4 >>> 14) | (t5 << 2)) & 0x1fff;
t6 = key[12] & 0xff | (key[13] & 0xff) << 8; this.r[7] = ((t5 >>> 11) | (t6 << 5)) & 0x1f81;
t7 = key[14] & 0xff | (key[15] & 0xff) << 8; this.r[8] = ((t6 >>> 8) | (t7 << 8)) & 0x1fff;
this.r[9] = ((t7 >>> 5)) & 0x007f;
this.pad[0] = key[16] & 0xff | (key[17] & 0xff) << 8;
this.pad[1] = key[18] & 0xff | (key[19] & 0xff) << 8;
this.pad[2] = key[20] & 0xff | (key[21] & 0xff) << 8;
this.pad[3] = key[22] & 0xff | (key[23] & 0xff) << 8;
this.pad[4] = key[24] & 0xff | (key[25] & 0xff) << 8;
this.pad[5] = key[26] & 0xff | (key[27] & 0xff) << 8;
this.pad[6] = key[28] & 0xff | (key[29] & 0xff) << 8;
this.pad[7] = key[30] & 0xff | (key[31] & 0xff) << 8;
};
poly1305.prototype.blocks = function(m, mpos, bytes) {
var hibit = this.fin ? 0 : (1 << 11);
var t0, t1, t2, t3, t4, t5, t6, t7, c;
var d0, d1, d2, d3, d4, d5, d6, d7, d8, d9;
var h0 = this.h[0],
h1 = this.h[1],
h2 = this.h[2],
h3 = this.h[3],
h4 = this.h[4],
h5 = this.h[5],
h6 = this.h[6],
h7 = this.h[7],
h8 = this.h[8],
h9 = this.h[9];
var r0 = this.r[0],
r1 = this.r[1],
r2 = this.r[2],
r3 = this.r[3],
r4 = this.r[4],
r5 = this.r[5],
r6 = this.r[6],
r7 = this.r[7],
r8 = this.r[8],
r9 = this.r[9];
while (bytes >= 16) {
t0 = m[mpos+ 0] & 0xff | (m[mpos+ 1] & 0xff) << 8; h0 += ( t0 ) & 0x1fff;
t1 = m[mpos+ 2] & 0xff | (m[mpos+ 3] & 0xff) << 8; h1 += ((t0 >>> 13) | (t1 << 3)) & 0x1fff;
t2 = m[mpos+ 4] & 0xff | (m[mpos+ 5] & 0xff) << 8; h2 += ((t1 >>> 10) | (t2 << 6)) & 0x1fff;
t3 = m[mpos+ 6] & 0xff | (m[mpos+ 7] & 0xff) << 8; h3 += ((t2 >>> 7) | (t3 << 9)) & 0x1fff;
t4 = m[mpos+ 8] & 0xff | (m[mpos+ 9] & 0xff) << 8; h4 += ((t3 >>> 4) | (t4 << 12)) & 0x1fff;
h5 += ((t4 >>> 1)) & 0x1fff;
t5 = m[mpos+10] & 0xff | (m[mpos+11] & 0xff) << 8; h6 += ((t4 >>> 14) | (t5 << 2)) & 0x1fff;
t6 = m[mpos+12] & 0xff | (m[mpos+13] & 0xff) << 8; h7 += ((t5 >>> 11) | (t6 << 5)) & 0x1fff;
t7 = m[mpos+14] & 0xff | (m[mpos+15] & 0xff) << 8; h8 += ((t6 >>> 8) | (t7 << 8)) & 0x1fff;
h9 += ((t7 >>> 5)) | hibit;
c = 0;
d0 = c;
d0 += h0 * r0;
d0 += h1 * (5 * r9);
d0 += h2 * (5 * r8);
d0 += h3 * (5 * r7);
d0 += h4 * (5 * r6);
c = (d0 >>> 13); d0 &= 0x1fff;
d0 += h5 * (5 * r5);
d0 += h6 * (5 * r4);
d0 += h7 * (5 * r3);
d0 += h8 * (5 * r2);
d0 += h9 * (5 * r1);
c += (d0 >>> 13); d0 &= 0x1fff;
d1 = c;
d1 += h0 * r1;
d1 += h1 * r0;
d1 += h2 * (5 * r9);
d1 += h3 * (5 * r8);
d1 += h4 * (5 * r7);
c = (d1 >>> 13); d1 &= 0x1fff;
d1 += h5 * (5 * r6);
d1 += h6 * (5 * r5);
d1 += h7 * (5 * r4);
d1 += h8 * (5 * r3);
d1 += h9 * (5 * r2);
c += (d1 >>> 13); d1 &= 0x1fff;
d2 = c;
d2 += h0 * r2;
d2 += h1 * r1;
d2 += h2 * r0;
d2 += h3 * (5 * r9);
d2 += h4 * (5 * r8);
c = (d2 >>> 13); d2 &= 0x1fff;
d2 += h5 * (5 * r7);
d2 += h6 * (5 * r6);
d2 += h7 * (5 * r5);
d2 += h8 * (5 * r4);
d2 += h9 * (5 * r3);
c += (d2 >>> 13); d2 &= 0x1fff;
d3 = c;
d3 += h0 * r3;
d3 += h1 * r2;
d3 += h2 * r1;
d3 += h3 * r0;
d3 += h4 * (5 * r9);
c = (d3 >>> 13); d3 &= 0x1fff;
d3 += h5 * (5 * r8);
d3 += h6 * (5 * r7);
d3 += h7 * (5 * r6);
d3 += h8 * (5 * r5);
d3 += h9 * (5 * r4);
c += (d3 >>> 13); d3 &= 0x1fff;
d4 = c;
d4 += h0 * r4;
d4 += h1 * r3;
d4 += h2 * r2;
d4 += h3 * r1;
d4 += h4 * r0;
c = (d4 >>> 13); d4 &= 0x1fff;
d4 += h5 * (5 * r9);
d4 += h6 * (5 * r8);
d4 += h7 * (5 * r7);
d4 += h8 * (5 * r6);
d4 += h9 * (5 * r5);
c += (d4 >>> 13); d4 &= 0x1fff;
d5 = c;
d5 += h0 * r5;
d5 += h1 * r4;
d5 += h2 * r3;
d5 += h3 * r2;
d5 += h4 * r1;
c = (d5 >>> 13); d5 &= 0x1fff;
d5 += h5 * r0;
d5 += h6 * (5 * r9);
d5 += h7 * (5 * r8);
d5 += h8 * (5 * r7);
d5 += h9 * (5 * r6);
c += (d5 >>> 13); d5 &= 0x1fff;
d6 = c;
d6 += h0 * r6;
d6 += h1 * r5;
d6 += h2 * r4;
d6 += h3 * r3;
d6 += h4 * r2;
c = (d6 >>> 13); d6 &= 0x1fff;
d6 += h5 * r1;
d6 += h6 * r0;
d6 += h7 * (5 * r9);
d6 += h8 * (5 * r8);
d6 += h9 * (5 * r7);
c += (d6 >>> 13); d6 &= 0x1fff;
d7 = c;
d7 += h0 * r7;
d7 += h1 * r6;
d7 += h2 * r5;
d7 += h3 * r4;
d7 += h4 * r3;
c = (d7 >>> 13); d7 &= 0x1fff;
d7 += h5 * r2;
d7 += h6 * r1;
d7 += h7 * r0;
d7 += h8 * (5 * r9);
d7 += h9 * (5 * r8);
c += (d7 >>> 13); d7 &= 0x1fff;
d8 = c;
d8 += h0 * r8;
d8 += h1 * r7;
d8 += h2 * r6;
d8 += h3 * r5;
d8 += h4 * r4;
c = (d8 >>> 13); d8 &= 0x1fff;
d8 += h5 * r3;
d8 += h6 * r2;
d8 += h7 * r1;
d8 += h8 * r0;
d8 += h9 * (5 * r9);
c += (d8 >>> 13); d8 &= 0x1fff;
d9 = c;
d9 += h0 * r9;
d9 += h1 * r8;
d9 += h2 * r7;
d9 += h3 * r6;
d9 += h4 * r5;
c = (d9 >>> 13); d9 &= 0x1fff;
d9 += h5 * r4;
d9 += h6 * r3;
d9 += h7 * r2;
d9 += h8 * r1;
d9 += h9 * r0;
c += (d9 >>> 13); d9 &= 0x1fff;
c = (((c << 2) + c)) | 0;
c = (c + d0) | 0;
d0 = c & 0x1fff;
c = (c >>> 13);
d1 += c;
h0 = d0;
h1 = d1;
h2 = d2;
h3 = d3;
h4 = d4;
h5 = d5;
h6 = d6;
h7 = d7;
h8 = d8;
h9 = d9;
mpos += 16;
bytes -= 16;
}
this.h[0] = h0;
this.h[1] = h1;
this.h[2] = h2;
this.h[3] = h3;
this.h[4] = h4;
this.h[5] = h5;
this.h[6] = h6;
this.h[7] = h7;
this.h[8] = h8;
this.h[9] = h9;
};
poly1305.prototype.finish = function(mac, macpos) {
var g = new Uint16Array(10);
var c, mask, f, i;
if (this.leftover) {
i = this.leftover;
this.buffer[i++] = 1;
for (; i < 16; i++) this.buffer[i] = 0;
this.fin = 1;
this.blocks(this.buffer, 0, 16);
}
c = this.h[1] >>> 13;
this.h[1] &= 0x1fff;
for (i = 2; i < 10; i++) {
this.h[i] += c;
c = this.h[i] >>> 13;
this.h[i] &= 0x1fff;
}
this.h[0] += (c * 5);
c = this.h[0] >>> 13;
this.h[0] &= 0x1fff;
this.h[1] += c;
c = this.h[1] >>> 13;
this.h[1] &= 0x1fff;
this.h[2] += c;
g[0] = this.h[0] + 5;
c = g[0] >>> 13;
g[0] &= 0x1fff;
for (i = 1; i < 10; i++) {
g[i] = this.h[i] + c;
c = g[i] >>> 13;
g[i] &= 0x1fff;
}
g[9] -= (1 << 13);
mask = (c ^ 1) - 1;
for (i = 0; i < 10; i++) g[i] &= mask;
mask = ~mask;
for (i = 0; i < 10; i++) this.h[i] = (this.h[i] & mask) | g[i];
this.h[0] = ((this.h[0] ) | (this.h[1] << 13) ) & 0xffff;
this.h[1] = ((this.h[1] >>> 3) | (this.h[2] << 10) ) & 0xffff;
this.h[2] = ((this.h[2] >>> 6) | (this.h[3] << 7) ) & 0xffff;
this.h[3] = ((this.h[3] >>> 9) | (this.h[4] << 4) ) & 0xffff;
this.h[4] = ((this.h[4] >>> 12) | (this.h[5] << 1) | (this.h[6] << 14)) & 0xffff;
this.h[5] = ((this.h[6] >>> 2) | (this.h[7] << 11) ) & 0xffff;
this.h[6] = ((this.h[7] >>> 5) | (this.h[8] << 8) ) & 0xffff;
this.h[7] = ((this.h[8] >>> 8) | (this.h[9] << 5) ) & 0xffff;
f = this.h[0] + this.pad[0];
this.h[0] = f & 0xffff;
for (i = 1; i < 8; i++) {
f = (((this.h[i] + this.pad[i]) | 0) + (f >>> 16)) | 0;
this.h[i] = f & 0xffff;
}
mac[macpos+ 0] = (this.h[0] >>> 0) & 0xff;
mac[macpos+ 1] = (this.h[0] >>> 8) & 0xff;
mac[macpos+ 2] = (this.h[1] >>> 0) & 0xff;
mac[macpos+ 3] = (this.h[1] >>> 8) & 0xff;
mac[macpos+ 4] = (this.h[2] >>> 0) & 0xff;
mac[macpos+ 5] = (this.h[2] >>> 8) & 0xff;
mac[macpos+ 6] = (this.h[3] >>> 0) & 0xff;
mac[macpos+ 7] = (this.h[3] >>> 8) & 0xff;
mac[macpos+ 8] = (this.h[4] >>> 0) & 0xff;
mac[macpos+ 9] = (this.h[4] >>> 8) & 0xff;
mac[macpos+10] = (this.h[5] >>> 0) & 0xff;
mac[macpos+11] = (this.h[5] >>> 8) & 0xff;
mac[macpos+12] = (this.h[6] >>> 0) & 0xff;
mac[macpos+13] = (this.h[6] >>> 8) & 0xff;
mac[macpos+14] = (this.h[7] >>> 0) & 0xff;
mac[macpos+15] = (this.h[7] >>> 8) & 0xff;
};
poly1305.prototype.update = function(m, mpos, bytes) {
var i, want;
if (this.leftover) {
want = (16 - this.leftover);
if (want > bytes)
want = bytes;
for (i = 0; i < want; i++)
this.buffer[this.leftover + i] = m[mpos+i];
bytes -= want;
mpos += want;
this.leftover += want;
if (this.leftover < 16)
return;
this.blocks(this.buffer, 0, 16);
this.leftover = 0;
}
if (bytes >= 16) {
want = bytes - (bytes % 16);
this.blocks(m, mpos, want);
mpos += want;
bytes -= want;
}
if (bytes) {
for (i = 0; i < bytes; i++)
this.buffer[this.leftover + i] = m[mpos+i];
this.leftover += bytes;
}
};
function crypto_onetimeauth(out, outpos, m, mpos, n, k) {
var s = new poly1305(k);
s.update(m, mpos, n);
s.finish(out, outpos);
return 0;
}
function crypto_onetimeauth_verify(h, hpos, m, mpos, n, k) {
var x = new Uint8Array(16);
crypto_onetimeauth(x,0,m,mpos,n,k);
return crypto_verify_16(h,hpos,x,0);
}
function crypto_secretbox(c,m,d,n,k) {
var i;
if (d < 32) return -1;
crypto_stream_xor(c,0,m,0,d,n,k);
crypto_onetimeauth(c, 16, c, 32, d - 32, c);
for (i = 0; i < 16; i++) c[i] = 0;
return 0;
}
function crypto_secretbox_open(m,c,d,n,k) {
var i;
var x = new Uint8Array(32);
if (d < 32) return -1;
crypto_stream(x,0,32,n,k);
if (crypto_onetimeauth_verify(c, 16,c, 32,d - 32,x) !== 0) return -1;
crypto_stream_xor(m,0,c,0,d,n,k);
for (i = 0; i < 32; i++) m[i] = 0;
return 0;
}
function set25519(r, a) {
var i;
for (i = 0; i < 16; i++) r[i] = a[i]|0;
}
function car25519(o) {
var i, v, c = 1;
for (i = 0; i < 16; i++) {
v = o[i] + c + 65535;
c = Math.floor(v / 65536);
o[i] = v - c * 65536;
}
o[0] += c-1 + 37 * (c-1);
}
function sel25519(p, q, b) {
var t, c = ~(b-1);
for (var i = 0; i < 16; i++) {
t = c & (p[i] ^ q[i]);
p[i] ^= t;
q[i] ^= t;
}
}
function pack25519(o, n) {
var i, j, b;
var m = gf(), t = gf();
for (i = 0; i < 16; i++) t[i] = n[i];
car25519(t);
car25519(t);
car25519(t);
for (j = 0; j < 2; j++) {
m[0] = t[0] - 0xffed;
for (i = 1; i < 15; i++) {
m[i] = t[i] - 0xffff - ((m[i-1]>>16) & 1);
m[i-1] &= 0xffff;
}
m[15] = t[15] - 0x7fff - ((m[14]>>16) & 1);
b = (m[15]>>16) & 1;
m[14] &= 0xffff;
sel25519(t, m, 1-b);
}
for (i = 0; i < 16; i++) {
o[2*i] = t[i] & 0xff;
o[2*i+1] = t[i]>>8;
}
}
function neq25519(a, b) {
var c = new Uint8Array(32), d = new Uint8Array(32);
pack25519(c, a);
pack25519(d, b);
return crypto_verify_32(c, 0, d, 0);
}
function par25519(a) {
var d = new Uint8Array(32);
pack25519(d, a);
return d[0] & 1;
}
function unpack25519(o, n) {
var i;
for (i = 0; i < 16; i++) o[i] = n[2*i] + (n[2*i+1] << 8);
o[15] &= 0x7fff;
}
function A(o, a, b) {
for (var i = 0; i < 16; i++) o[i] = a[i] + b[i];
}
function Z(o, a, b) {
for (var i = 0; i < 16; i++) o[i] = a[i] - b[i];
}
function M(o, a, b) {
var v, c,
t0 = 0, t1 = 0, t2 = 0, t3 = 0, t4 = 0, t5 = 0, t6 = 0, t7 = 0,
t8 = 0, t9 = 0, t10 = 0, t11 = 0, t12 = 0, t13 = 0, t14 = 0, t15 = 0,
t16 = 0, t17 = 0, t18 = 0, t19 = 0, t20 = 0, t21 = 0, t22 = 0, t23 = 0,
t24 = 0, t25 = 0, t26 = 0, t27 = 0, t28 = 0, t29 = 0, t30 = 0,
b0 = b[0],
b1 = b[1],
b2 = b[2],
b3 = b[3],
b4 = b[4],
b5 = b[5],
b6 = b[6],
b7 = b[7],
b8 = b[8],
b9 = b[9],
b10 = b[10],
b11 = b[11],
b12 = b[12],
b13 = b[13],
b14 = b[14],
b15 = b[15];
v = a[0];
t0 += v * b0;
t1 += v * b1;
t2 += v * b2;
t3 += v * b3;
t4 += v * b4;
t5 += v * b5;
t6 += v * b6;
t7 += v * b7;
t8 += v * b8;
t9 += v * b9;
t10 += v * b10;
t11 += v * b11;
t12 += v * b12;
t13 += v * b13;
t14 += v * b14;
t15 += v * b15;
v = a[1];
t1 += v * b0;
t2 += v * b1;
t3 += v * b2;
t4 += v * b3;
t5 += v * b4;
t6 += v * b5;
t7 += v * b6;
t8 += v * b7;
t9 += v * b8;
t10 += v * b9;
t11 += v * b10;
t12 += v * b11;
t13 += v * b12;
t14 += v * b13;
t15 += v * b14;
t16 += v * b15;
v = a[2];
t2 += v * b0;
t3 += v * b1;
t4 += v * b2;
t5 += v * b3;
t6 += v * b4;
t7 += v * b5;
t8 += v * b6;
t9 += v * b7;
t10 += v * b8;
t11 += v * b9;
t12 += v * b10;
t13 += v * b11;
t14 += v * b12;
t15 += v * b13;
t16 += v * b14;
t17 += v * b15;
v = a[3];
t3 += v * b0;
t4 += v * b1;
t5 += v * b2;
t6 += v * b3;
t7 += v * b4;
t8 += v * b5;
t9 += v * b6;
t10 += v * b7;
t11 += v * b8;
t12 += v * b9;
t13 += v * b10;
t14 += v * b11;
t15 += v * b12;
t16 += v * b13;
t17 += v * b14;
t18 += v * b15;
v = a[4];
t4 += v * b0;
t5 += v * b1;
t6 += v * b2;
t7 += v * b3;
t8 += v * b4;
t9 += v * b5;
t10 += v * b6;
t11 += v * b7;
t12 += v * b8;
t13 += v * b9;
t14 += v * b10;
t15 += v * b11;
t16 += v * b12;
t17 += v * b13;
t18 += v * b14;
t19 += v * b15;
v = a[5];
t5 += v * b0;
t6 += v * b1;
t7 += v * b2;
t8 += v * b3;
t9 += v * b4;
t10 += v * b5;
t11 += v * b6;
t12 += v * b7;
t13 += v * b8;
t14 += v * b9;
t15 += v * b10;
t16 += v * b11;
t17 += v * b12;
t18 += v * b13;
t19 += v * b14;
t20 += v * b15;
v = a[6];
t6 += v * b0;
t7 += v * b1;
t8 += v * b2;
t9 += v * b3;
t10 += v * b4;
t11 += v * b5;
t12 += v * b6;
t13 += v * b7;
t14 += v * b8;
t15 += v * b9;
t16 += v * b10;
t17 += v * b11;
t18 += v * b12;
t19 += v * b13;
t20 += v * b14;
t21 += v * b15;
v = a[7];
t7 += v * b0;
t8 += v * b1;
t9 += v * b2;
t10 += v * b3;
t11 += v * b4;
t12 += v * b5;
t13 += v * b6;
t14 += v * b7;
t15 += v * b8;
t16 += v * b9;
t17 += v * b10;
t18 += v * b11;
t19 += v * b12;
t20 += v * b13;
t21 += v * b14;
t22 += v * b15;
v = a[8];
t8 += v * b0;
t9 += v * b1;
t10 += v * b2;
t11 += v * b3;
t12 += v * b4;
t13 += v * b5;
t14 += v * b6;
t15 += v * b7;
t16 += v * b8;
t17 += v * b9;
t18 += v * b10;
t19 += v * b11;
t20 += v * b12;
t21 += v * b13;
t22 += v * b14;
t23 += v * b15;
v = a[9];
t9 += v * b0;
t10 += v * b1;
t11 += v * b2;
t12 += v * b3;
t13 += v * b4;
t14 += v * b5;
t15 += v * b6;
t16 += v * b7;
t17 += v * b8;
t18 += v * b9;
t19 += v * b10;
t20 += v * b11;
t21 += v * b12;
t22 += v * b13;
t23 += v * b14;
t24 += v * b15;
v = a[10];
t10 += v * b0;
t11 += v * b1;
t12 += v * b2;
t13 += v * b3;
t14 += v * b4;
t15 += v * b5;
t16 += v * b6;
t17 += v * b7;
t18 += v * b8;
t19 += v * b9;
t20 += v * b10;
t21 += v * b11;
t22 += v * b12;
t23 += v * b13;
t24 += v * b14;
t25 += v * b15;
v = a[11];
t11 += v * b0;
t12 += v * b1;
t13 += v * b2;
t14 += v * b3;
t15 += v * b4;
t16 += v * b5;
t17 += v * b6;
t18 += v * b7;
t19 += v * b8;
t20 += v * b9;
t21 += v * b10;
t22 += v * b11;
t23 += v * b12;
t24 += v * b13;
t25 += v * b14;
t26 += v * b15;
v = a[12];
t12 += v * b0;
t13 += v * b1;
t14 += v * b2;
t15 += v * b3;
t16 += v * b4;
t17 += v * b5;
t18 += v * b6;
t19 += v * b7;
t20 += v * b8;
t21 += v * b9;
t22 += v * b10;
t23 += v * b11;
t24 += v * b12;
t25 += v * b13;
t26 += v * b14;
t27 += v * b15;
v = a[13];
t13 += v * b0;
t14 += v * b1;
t15 += v * b2;
t16 += v * b3;
t17 += v * b4;
t18 += v * b5;
t19 += v * b6;
t20 += v * b7;
t21 += v * b8;
t22 += v * b9;
t23 += v * b10;
t24 += v * b11;
t25 += v * b12;
t26 += v * b13;
t27 += v * b14;
t28 += v * b15;
v = a[14];
t14 += v * b0;
t15 += v * b1;
t16 += v * b2;
t17 += v * b3;
t18 += v * b4;
t19 += v * b5;
t20 += v * b6;
t21 += v * b7;
t22 += v * b8;
t23 += v * b9;
t24 += v * b10;
t25 += v * b11;
t26 += v * b12;
t27 += v * b13;
t28 += v * b14;
t29 += v * b15;
v = a[15];
t15 += v * b0;
t16 += v * b1;
t17 += v * b2;
t18 += v * b3;
t19 += v * b4;
t20 += v * b5;
t21 += v * b6;
t22 += v * b7;
t23 += v * b8;
t24 += v * b9;
t25 += v * b10;
t26 += v * b11;
t27 += v * b12;
t28 += v * b13;
t29 += v * b14;
t30 += v * b15;
t0 += 38 * t16;
t1 += 38 * t17;
t2 += 38 * t18;
t3 += 38 * t19;
t4 += 38 * t20;
t5 += 38 * t21;
t6 += 38 * t22;
t7 += 38 * t23;
t8 += 38 * t24;
t9 += 38 * t25;
t10 += 38 * t26;
t11 += 38 * t27;
t12 += 38 * t28;
t13 += 38 * t29;
t14 += 38 * t30;
// t15 left as is
// first car
c = 1;
v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536;
v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536;
v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536;
v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536;
v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536;
v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536;
v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536;
v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536;
v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536;
v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536;
v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536;
v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536;
v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536;
v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536;
v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536;
v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536;
t0 += c-1 + 37 * (c-1);
// second car
c = 1;
v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536;
v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536;
v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536;
v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536;
v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536;
v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536;
v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536;
v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536;
v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536;
v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536;
v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536;
v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536;
v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536;
v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536;
v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536;
v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536;
t0 += c-1 + 37 * (c-1);
o[ 0] = t0;
o[ 1] = t1;
o[ 2] = t2;
o[ 3] = t3;
o[ 4] = t4;
o[ 5] = t5;
o[ 6] = t6;
o[ 7] = t7;
o[ 8] = t8;
o[ 9] = t9;
o[10] = t10;
o[11] = t11;
o[12] = t12;
o[13] = t13;
o[14] = t14;
o[15] = t15;
}
function S(o, a) {
M(o, a, a);
}
function inv25519(o, i) {
var c = gf();
var a;
for (a = 0; a < 16; a++) c[a] = i[a];
for (a = 253; a >= 0; a--) {
S(c, c);
if(a !== 2 && a !== 4) M(c, c, i);
}
for (a = 0; a < 16; a++) o[a] = c[a];
}
function pow2523(o, i) {
var c = gf();
var a;
for (a = 0; a < 16; a++) c[a] = i[a];
for (a = 250; a >= 0; a--) {
S(c, c);
if(a !== 1) M(c, c, i);
}
for (a = 0; a < 16; a++) o[a] = c[a];
}
function crypto_scalarmult(q, n, p) {
var z = new Uint8Array(32);
var x = new Float64Array(80), r, i;
var a = gf(), b = gf(), c = gf(),
d = gf(), e = gf(), f = gf();
for (i = 0; i < 31; i++) z[i] = n[i];
z[31]=(n[31]&127)|64;
z[0]&=248;
unpack25519(x,p);
for (i = 0; i < 16; i++) {
b[i]=x[i];
d[i]=a[i]=c[i]=0;
}
a[0]=d[0]=1;
for (i=254; i>=0; --i) {
r=(z[i>>>3]>>>(i&7))&1;
sel25519(a,b,r);
sel25519(c,d,r);
A(e,a,c);
Z(a,a,c);
A(c,b,d);
Z(b,b,d);
S(d,e);
S(f,a);
M(a,c,a);
M(c,b,e);
A(e,a,c);
Z(a,a,c);
S(b,a);
Z(c,d,f);
M(a,c,_121665);
A(a,a,d);
M(c,c,a);
M(a,d,f);
M(d,b,x);
S(b,e);
sel25519(a,b,r);
sel25519(c,d,r);
}
for (i = 0; i < 16; i++) {
x[i+16]=a[i];
x[i+32]=c[i];
x[i+48]=b[i];
x[i+64]=d[i];
}
var x32 = x.subarray(32);
var x16 = x.subarray(16);
inv25519(x32,x32);
M(x16,x16,x32);
pack25519(q,x16);
return 0;
}
function crypto_scalarmult_base(q, n) {
return crypto_scalarmult(q, n, _9);
}
function crypto_box_keypair(y, x) {
randombytes(x, 32);
return crypto_scalarmult_base(y, x);
}
function crypto_box_beforenm(k, y, x) {
var s = new Uint8Array(32);
crypto_scalarmult(s, x, y);
return crypto_core_hsalsa20(k, _0, s, sigma);
}
var crypto_box_afternm = crypto_secretbox;
var crypto_box_open_afternm = crypto_secretbox_open;
function crypto_box(c, m, d, n, y, x) {
var k = new Uint8Array(32);
crypto_box_beforenm(k, y, x);
return crypto_box_afternm(c, m, d, n, k);
}
function crypto_box_open(m, c, d, n, y, x) {
var k = new Uint8Array(32);
crypto_box_beforenm(k, y, x);
return crypto_box_open_afternm(m, c, d, n, k);
}
var K = [
0x428a2f98, 0xd728ae22, 0x71374491, 0x23ef65cd,
0xb5c0fbcf, 0xec4d3b2f, 0xe9b5dba5, 0x8189dbbc,
0x3956c25b, 0xf348b538, 0x59f111f1, 0xb605d019,
0x923f82a4, 0xaf194f9b, 0xab1c5ed5, 0xda6d8118,
0xd807aa98, 0xa3030242, 0x12835b01, 0x45706fbe,
0x243185be, 0x4ee4b28c, 0x550c7dc3, 0xd5ffb4e2,
0x72be5d74, 0xf27b896f, 0x80deb1fe, 0x3b1696b1,
0x9bdc06a7, 0x25c71235, 0xc19bf174, 0xcf692694,
0xe49b69c1, 0x9ef14ad2, 0xefbe4786, 0x384f25e3,
0x0fc19dc6, 0x8b8cd5b5, 0x240ca1cc, 0x77ac9c65,
0x2de92c6f, 0x592b0275, 0x4a7484aa, 0x6ea6e483,
0x5cb0a9dc, 0xbd41fbd4, 0x76f988da, 0x831153b5,
0x983e5152, 0xee66dfab, 0xa831c66d, 0x2db43210,
0xb00327c8, 0x98fb213f, 0xbf597fc7, 0xbeef0ee4,
0xc6e00bf3, 0x3da88fc2, 0xd5a79147, 0x930aa725,
0x06ca6351, 0xe003826f, 0x14292967, 0x0a0e6e70,
0x27b70a85, 0x46d22ffc, 0x2e1b2138, 0x5c26c926,
0x4d2c6dfc, 0x5ac42aed, 0x53380d13, 0x9d95b3df,
0x650a7354, 0x8baf63de, 0x766a0abb, 0x3c77b2a8,
0x81c2c92e, 0x47edaee6, 0x92722c85, 0x1482353b,
0xa2bfe8a1, 0x4cf10364, 0xa81a664b, 0xbc423001,
0xc24b8b70, 0xd0f89791, 0xc76c51a3, 0x0654be30,
0xd192e819, 0xd6ef5218, 0xd6990624, 0x5565a910,
0xf40e3585, 0x5771202a, 0x106aa070, 0x32bbd1b8,
0x19a4c116, 0xb8d2d0c8, 0x1e376c08, 0x5141ab53,
0x2748774c, 0xdf8eeb99, 0x34b0bcb5, 0xe19b48a8,
0x391c0cb3, 0xc5c95a63, 0x4ed8aa4a, 0xe3418acb,
0x5b9cca4f, 0x7763e373, 0x682e6ff3, 0xd6b2b8a3,
0x748f82ee, 0x5defb2fc, 0x78a5636f, 0x43172f60,
0x84c87814, 0xa1f0ab72, 0x8cc70208, 0x1a6439ec,
0x90befffa, 0x23631e28, 0xa4506ceb, 0xde82bde9,
0xbef9a3f7, 0xb2c67915, 0xc67178f2, 0xe372532b,
0xca273ece, 0xea26619c, 0xd186b8c7, 0x21c0c207,
0xeada7dd6, 0xcde0eb1e, 0xf57d4f7f, 0xee6ed178,
0x06f067aa, 0x72176fba, 0x0a637dc5, 0xa2c898a6,
0x113f9804, 0xbef90dae, 0x1b710b35, 0x131c471b,
0x28db77f5, 0x23047d84, 0x32caab7b, 0x40c72493,
0x3c9ebe0a, 0x15c9bebc, 0x431d67c4, 0x9c100d4c,
0x4cc5d4be, 0xcb3e42b6, 0x597f299c, 0xfc657e2a,
0x5fcb6fab, 0x3ad6faec, 0x6c44198c, 0x4a475817
];
function crypto_hashblocks_hl(hh, hl, m, n) {
var wh = new Int32Array(16), wl = new Int32Array(16),
bh0, bh1, bh2, bh3, bh4, bh5, bh6, bh7,
bl0, bl1, bl2, bl3, bl4, bl5, bl6, bl7,
th, tl, i, j, h, l, a, b, c, d;
var ah0 = hh[0],
ah1 = hh[1],
ah2 = hh[2],
ah3 = hh[3],
ah4 = hh[4],
ah5 = hh[5],
ah6 = hh[6],
ah7 = hh[7],
al0 = hl[0],
al1 = hl[1],
al2 = hl[2],
al3 = hl[3],
al4 = hl[4],
al5 = hl[5],
al6 = hl[6],
al7 = hl[7];
var pos = 0;
while (n >= 128) {
for (i = 0; i < 16; i++) {
j = 8 * i + pos;
wh[i] = (m[j+0] << 24) | (m[j+1] << 16) | (m[j+2] << 8) | m[j+3];
wl[i] = (m[j+4] << 24) | (m[j+5] << 16) | (m[j+6] << 8) | m[j+7];
}
for (i = 0; i < 80; i++) {
bh0 = ah0;
bh1 = ah1;
bh2 = ah2;
bh3 = ah3;
bh4 = ah4;
bh5 = ah5;
bh6 = ah6;
bh7 = ah7;
bl0 = al0;
bl1 = al1;
bl2 = al2;
bl3 = al3;
bl4 = al4;
bl5 = al5;
bl6 = al6;
bl7 = al7;
// add
h = ah7;
l = al7;
a = l & 0xffff; b = l >>> 16;
c = h & 0xffff; d = h >>> 16;
// Sigma1
h = ((ah4 >>> 14) | (al4 << (32-14))) ^ ((ah4 >>> 18) | (al4 << (32-18))) ^ ((al4 >>> (41-32)) | (ah4 << (32-(41-32))));
l = ((al4 >>> 14) | (ah4 << (32-14))) ^ ((al4 >>> 18) | (ah4 << (32-18))) ^ ((ah4 >>> (41-32)) | (al4 << (32-(41-32))));
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
// Ch
h = (ah4 & ah5) ^ (~ah4 & ah6);
l = (al4 & al5) ^ (~al4 & al6);
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
// K
h = K[i*2];
l = K[i*2+1];
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
// w
h = wh[i%16];
l = wl[i%16];
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
b += a >>> 16;
c += b >>> 16;
d += c >>> 16;
th = c & 0xffff | d << 16;
tl = a & 0xffff | b << 16;
// add
h = th;
l = tl;
a = l & 0xffff; b = l >>> 16;
c = h & 0xffff; d = h >>> 16;
// Sigma0
h = ((ah0 >>> 28) | (al0 << (32-28))) ^ ((al0 >>> (34-32)) | (ah0 << (32-(34-32)))) ^ ((al0 >>> (39-32)) | (ah0 << (32-(39-32))));
l = ((al0 >>> 28) | (ah0 << (32-28))) ^ ((ah0 >>> (34-32)) | (al0 << (32-(34-32)))) ^ ((ah0 >>> (39-32)) | (al0 << (32-(39-32))));
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
// Maj
h = (ah0 & ah1) ^ (ah0 & ah2) ^ (ah1 & ah2);
l = (al0 & al1) ^ (al0 & al2) ^ (al1 & al2);
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
b += a >>> 16;
c += b >>> 16;
d += c >>> 16;
bh7 = (c & 0xffff) | (d << 16);
bl7 = (a & 0xffff) | (b << 16);
// add
h = bh3;
l = bl3;
a = l & 0xffff; b = l >>> 16;
c = h & 0xffff; d = h >>> 16;
h = th;
l = tl;
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
b += a >>> 16;
c += b >>> 16;
d += c >>> 16;
bh3 = (c & 0xffff) | (d << 16);
bl3 = (a & 0xffff) | (b << 16);
ah1 = bh0;
ah2 = bh1;
ah3 = bh2;
ah4 = bh3;
ah5 = bh4;
ah6 = bh5;
ah7 = bh6;
ah0 = bh7;
al1 = bl0;
al2 = bl1;
al3 = bl2;
al4 = bl3;
al5 = bl4;
al6 = bl5;
al7 = bl6;
al0 = bl7;
if (i%16 === 15) {
for (j = 0; j < 16; j++) {
// add
h = wh[j];
l = wl[j];
a = l & 0xffff; b = l >>> 16;
c = h & 0xffff; d = h >>> 16;
h = wh[(j+9)%16];
l = wl[(j+9)%16];
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
// sigma0
th = wh[(j+1)%16];
tl = wl[(j+1)%16];
h = ((th >>> 1) | (tl << (32-1))) ^ ((th >>> 8) | (tl << (32-8))) ^ (th >>> 7);
l = ((tl >>> 1) | (th << (32-1))) ^ ((tl >>> 8) | (th << (32-8))) ^ ((tl >>> 7) | (th << (32-7)));
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
// sigma1
th = wh[(j+14)%16];
tl = wl[(j+14)%16];
h = ((th >>> 19) | (tl << (32-19))) ^ ((tl >>> (61-32)) | (th << (32-(61-32)))) ^ (th >>> 6);
l = ((tl >>> 19) | (th << (32-19))) ^ ((th >>> (61-32)) | (tl << (32-(61-32)))) ^ ((tl >>> 6) | (th << (32-6)));
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
b += a >>> 16;
c += b >>> 16;
d += c >>> 16;
wh[j] = (c & 0xffff) | (d << 16);
wl[j] = (a & 0xffff) | (b << 16);
}
}
}
// add
h = ah0;
l = al0;
a = l & 0xffff; b = l >>> 16;
c = h & 0xffff; d = h >>> 16;
h = hh[0];
l = hl[0];
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
b += a >>> 16;
c += b >>> 16;
d += c >>> 16;
hh[0] = ah0 = (c & 0xffff) | (d << 16);
hl[0] = al0 = (a & 0xffff) | (b << 16);
h = ah1;
l = al1;
a = l & 0xffff; b = l >>> 16;
c = h & 0xffff; d = h >>> 16;
h = hh[1];
l = hl[1];
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
b += a >>> 16;
c += b >>> 16;
d += c >>> 16;
hh[1] = ah1 = (c & 0xffff) | (d << 16);
hl[1] = al1 = (a & 0xffff) | (b << 16);
h = ah2;
l = al2;
a = l & 0xffff; b = l >>> 16;
c = h & 0xffff; d = h >>> 16;
h = hh[2];
l = hl[2];
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
b += a >>> 16;
c += b >>> 16;
d += c >>> 16;
hh[2] = ah2 = (c & 0xffff) | (d << 16);
hl[2] = al2 = (a & 0xffff) | (b << 16);
h = ah3;
l = al3;
a = l & 0xffff; b = l >>> 16;
c = h & 0xffff; d = h >>> 16;
h = hh[3];
l = hl[3];
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
b += a >>> 16;
c += b >>> 16;
d += c >>> 16;
hh[3] = ah3 = (c & 0xffff) | (d << 16);
hl[3] = al3 = (a & 0xffff) | (b << 16);
h = ah4;
l = al4;
a = l & 0xffff; b = l >>> 16;
c = h & 0xffff; d = h >>> 16;
h = hh[4];
l = hl[4];
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
b += a >>> 16;
c += b >>> 16;
d += c >>> 16;
hh[4] = ah4 = (c & 0xffff) | (d << 16);
hl[4] = al4 = (a & 0xffff) | (b << 16);
h = ah5;
l = al5;
a = l & 0xffff; b = l >>> 16;
c = h & 0xffff; d = h >>> 16;
h = hh[5];
l = hl[5];
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
b += a >>> 16;
c += b >>> 16;
d += c >>> 16;
hh[5] = ah5 = (c & 0xffff) | (d << 16);
hl[5] = al5 = (a & 0xffff) | (b << 16);
h = ah6;
l = al6;
a = l & 0xffff; b = l >>> 16;
c = h & 0xffff; d = h >>> 16;
h = hh[6];
l = hl[6];
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
b += a >>> 16;
c += b >>> 16;
d += c >>> 16;
hh[6] = ah6 = (c & 0xffff) | (d << 16);
hl[6] = al6 = (a & 0xffff) | (b << 16);
h = ah7;
l = al7;
a = l & 0xffff; b = l >>> 16;
c = h & 0xffff; d = h >>> 16;
h = hh[7];
l = hl[7];
a += l & 0xffff; b += l >>> 16;
c += h & 0xffff; d += h >>> 16;
b += a >>> 16;
c += b >>> 16;
d += c >>> 16;
hh[7] = ah7 = (c & 0xffff) | (d << 16);
hl[7] = al7 = (a & 0xffff) | (b << 16);
pos += 128;
n -= 128;
}
return n;
}
function crypto_hash(out, m, n) {
var hh = new Int32Array(8),
hl = new Int32Array(8),
x = new Uint8Array(256),
i, b = n;
hh[0] = 0x6a09e667;
hh[1] = 0xbb67ae85;
hh[2] = 0x3c6ef372;
hh[3] = 0xa54ff53a;
hh[4] = 0x510e527f;
hh[5] = 0x9b05688c;
hh[6] = 0x1f83d9ab;
hh[7] = 0x5be0cd19;
hl[0] = 0xf3bcc908;
hl[1] = 0x84caa73b;
hl[2] = 0xfe94f82b;
hl[3] = 0x5f1d36f1;
hl[4] = 0xade682d1;
hl[5] = 0x2b3e6c1f;
hl[6] = 0xfb41bd6b;
hl[7] = 0x137e2179;
crypto_hashblocks_hl(hh, hl, m, n);
n %= 128;
for (i = 0; i < n; i++) x[i] = m[b-n+i];
x[n] = 128;
n = 256-128*(n<112?1:0);
x[n-9] = 0;
ts64(x, n-8, (b / 0x20000000) | 0, b << 3);
crypto_hashblocks_hl(hh, hl, x, n);
for (i = 0; i < 8; i++) ts64(out, 8*i, hh[i], hl[i]);
return 0;
}
function add(p, q) {
var a = gf(), b = gf(), c = gf(),
d = gf(), e = gf(), f = gf(),
g = gf(), h = gf(), t = gf();
Z(a, p[1], p[0]);
Z(t, q[1], q[0]);
M(a, a, t);
A(b, p[0], p[1]);
A(t, q[0], q[1]);
M(b, b, t);
M(c, p[3], q[3]);
M(c, c, D2);
M(d, p[2], q[2]);
A(d, d, d);
Z(e, b, a);
Z(f, d, c);
A(g, d, c);
A(h, b, a);
M(p[0], e, f);
M(p[1], h, g);
M(p[2], g, f);
M(p[3], e, h);
}
function cswap(p, q, b) {
var i;
for (i = 0; i < 4; i++) {
sel25519(p[i], q[i], b);
}
}
function pack(r, p) {
var tx = gf(), ty = gf(), zi = gf();
inv25519(zi, p[2]);
M(tx, p[0], zi);
M(ty, p[1], zi);
pack25519(r, ty);
r[31] ^= par25519(tx) << 7;
}
function scalarmult(p, q, s) {
var b, i;
set25519(p[0], gf0);
set25519(p[1], gf1);
set25519(p[2], gf1);
set25519(p[3], gf0);
for (i = 255; i >= 0; --i) {
b = (s[(i/8)|0] >> (i&7)) & 1;
cswap(p, q, b);
add(q, p);
add(p, p);
cswap(p, q, b);
}
}
function scalarbase(p, s) {
var q = [gf(), gf(), gf(), gf()];
set25519(q[0], X);
set25519(q[1], Y);
set25519(q[2], gf1);
M(q[3], X, Y);
scalarmult(p, q, s);
}
function crypto_sign_keypair(pk, sk, seeded) {
var d = new Uint8Array(64);
var p = [gf(), gf(), gf(), gf()];
var i;
if (!seeded) randombytes(sk, 32);
crypto_hash(d, sk, 32);
d[0] &= 248;
d[31] &= 127;
d[31] |= 64;
scalarbase(p, d);
pack(pk, p);
for (i = 0; i < 32; i++) sk[i+32] = pk[i];
return 0;
}
var L = new Float64Array([0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58, 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10]);
function modL(r, x) {
var carry, i, j, k;
for (i = 63; i >= 32; --i) {
carry = 0;
for (j = i - 32, k = i - 12; j < k; ++j) {
x[j] += carry - 16 * x[i] * L[j - (i - 32)];
carry = (x[j] + 128) >> 8;
x[j] -= carry * 256;
}
x[j] += carry;
x[i] = 0;
}
carry = 0;
for (j = 0; j < 32; j++) {
x[j] += carry - (x[31] >> 4) * L[j];
carry = x[j] >> 8;
x[j] &= 255;
}
for (j = 0; j < 32; j++) x[j] -= carry * L[j];
for (i = 0; i < 32; i++) {
x[i+1] += x[i] >> 8;
r[i] = x[i] & 255;
}
}
function reduce(r) {
var x = new Float64Array(64), i;
for (i = 0; i < 64; i++) x[i] = r[i];
for (i = 0; i < 64; i++) r[i] = 0;
modL(r, x);
}
// Note: difference from C - smlen returned, not passed as argument.
function crypto_sign(sm, m, n, sk) {
var d = new Uint8Array(64), h = new Uint8Array(64), r = new Uint8Array(64);
var i, j, x = new Float64Array(64);
var p = [gf(), gf(), gf(), gf()];
crypto_hash(d, sk, 32);
d[0] &= 248;
d[31] &= 127;
d[31] |= 64;
var smlen = n + 64;
for (i = 0; i < n; i++) sm[64 + i] = m[i];
for (i = 0; i < 32; i++) sm[32 + i] = d[32 + i];
crypto_hash(r, sm.subarray(32), n+32);
reduce(r);
scalarbase(p, r);
pack(sm, p);
for (i = 32; i < 64; i++) sm[i] = sk[i];
crypto_hash(h, sm, n + 64);
reduce(h);
for (i = 0; i < 64; i++) x[i] = 0;
for (i = 0; i < 32; i++) x[i] = r[i];
for (i = 0; i < 32; i++) {
for (j = 0; j < 32; j++) {
x[i+j] += h[i] * d[j];
}
}
modL(sm.subarray(32), x);
return smlen;
}
function unpackneg(r, p) {
var t = gf(), chk = gf(), num = gf(),
den = gf(), den2 = gf(), den4 = gf(),
den6 = gf();
set25519(r[2], gf1);
unpack25519(r[1], p);
S(num, r[1]);
M(den, num, D);
Z(num, num, r[2]);
A(den, r[2], den);
S(den2, den);
S(den4, den2);
M(den6, den4, den2);
M(t, den6, num);
M(t, t, den);
pow2523(t, t);
M(t, t, num);
M(t, t, den);
M(t, t, den);
M(r[0], t, den);
S(chk, r[0]);
M(chk, chk, den);
if (neq25519(chk, num)) M(r[0], r[0], I);
S(chk, r[0]);
M(chk, chk, den);
if (neq25519(chk, num)) return -1;
if (par25519(r[0]) === (p[31]>>7)) Z(r[0], gf0, r[0]);
M(r[3], r[0], r[1]);
return 0;
}
function crypto_sign_open(m, sm, n, pk) {
var i, mlen;
var t = new Uint8Array(32), h = new Uint8Array(64);
var p = [gf(), gf(), gf(), gf()],
q = [gf(), gf(), gf(), gf()];
mlen = -1;
if (n < 64) return -1;
if (unpackneg(q, pk)) return -1;
for (i = 0; i < n; i++) m[i] = sm[i];
for (i = 0; i < 32; i++) m[i+32] = pk[i];
crypto_hash(h, m, n);
reduce(h);
scalarmult(p, q, h);
scalarbase(q, sm.subarray(32));
add(p, q);
pack(t, p);
n -= 64;
if (crypto_verify_32(sm, 0, t, 0)) {
for (i = 0; i < n; i++) m[i] = 0;
return -1;
}
for (i = 0; i < n; i++) m[i] = sm[i + 64];
mlen = n;
return mlen;
}
var crypto_secretbox_KEYBYTES = 32,
crypto_secretbox_NONCEBYTES = 24,
crypto_secretbox_ZEROBYTES = 32,
crypto_secretbox_BOXZEROBYTES = 16,
crypto_scalarmult_BYTES = 32,
crypto_scalarmult_SCALARBYTES = 32,
crypto_box_PUBLICKEYBYTES = 32,
crypto_box_SECRETKEYBYTES = 32,
crypto_box_BEFORENMBYTES = 32,
crypto_box_NONCEBYTES = crypto_secretbox_NONCEBYTES,
crypto_box_ZEROBYTES = crypto_secretbox_ZEROBYTES,
crypto_box_BOXZEROBYTES = crypto_secretbox_BOXZEROBYTES,
crypto_sign_BYTES = 64,
crypto_sign_PUBLICKEYBYTES = 32,
crypto_sign_SECRETKEYBYTES = 64,
crypto_sign_SEEDBYTES = 32,
crypto_hash_BYTES = 64;
nacl.lowlevel = {
crypto_core_hsalsa20: crypto_core_hsalsa20,
crypto_stream_xor: crypto_stream_xor,
crypto_stream: crypto_stream,
crypto_stream_salsa20_xor: crypto_stream_salsa20_xor,
crypto_stream_salsa20: crypto_stream_salsa20,
crypto_onetimeauth: crypto_onetimeauth,
crypto_onetimeauth_verify: crypto_onetimeauth_verify,
crypto_verify_16: crypto_verify_16,
crypto_verify_32: crypto_verify_32,
crypto_secretbox: crypto_secretbox,
crypto_secretbox_open: crypto_secretbox_open,
crypto_scalarmult: crypto_scalarmult,
crypto_scalarmult_base: crypto_scalarmult_base,
crypto_box_beforenm: crypto_box_beforenm,
crypto_box_afternm: crypto_box_afternm,
crypto_box: crypto_box,
crypto_box_open: crypto_box_open,
crypto_box_keypair: crypto_box_keypair,
crypto_hash: crypto_hash,
crypto_sign: crypto_sign,
crypto_sign_keypair: crypto_sign_keypair,
crypto_sign_open: crypto_sign_open,
crypto_secretbox_KEYBYTES: crypto_secretbox_KEYBYTES,
crypto_secretbox_NONCEBYTES: crypto_secretbox_NONCEBYTES,
crypto_secretbox_ZEROBYTES: crypto_secretbox_ZEROBYTES,
crypto_secretbox_BOXZEROBYTES: crypto_secretbox_BOXZEROBYTES,
crypto_scalarmult_BYTES: crypto_scalarmult_BYTES,
crypto_scalarmult_SCALARBYTES: crypto_scalarmult_SCALARBYTES,
crypto_box_PUBLICKEYBYTES: crypto_box_PUBLICKEYBYTES,
crypto_box_SECRETKEYBYTES: crypto_box_SECRETKEYBYTES,
crypto_box_BEFORENMBYTES: crypto_box_BEFORENMBYTES,
crypto_box_NONCEBYTES: crypto_box_NONCEBYTES,
crypto_box_ZEROBYTES: crypto_box_ZEROBYTES,
crypto_box_BOXZEROBYTES: crypto_box_BOXZEROBYTES,
crypto_sign_BYTES: crypto_sign_BYTES,
crypto_sign_PUBLICKEYBYTES: crypto_sign_PUBLICKEYBYTES,
crypto_sign_SECRETKEYBYTES: crypto_sign_SECRETKEYBYTES,
crypto_sign_SEEDBYTES: crypto_sign_SEEDBYTES,
crypto_hash_BYTES: crypto_hash_BYTES
};
/* High-level API */
function checkLengths(k, n) {
if (k.length !== crypto_secretbox_KEYBYTES) throw new Error('bad key size');
if (n.length !== crypto_secretbox_NONCEBYTES) throw new Error('bad nonce size');
}
function checkBoxLengths(pk, sk) {
if (pk.length !== crypto_box_PUBLICKEYBYTES) throw new Error('bad public key size');
if (sk.length !== crypto_box_SECRETKEYBYTES) throw new Error('bad secret key size');
}
function checkArrayTypes() {
var t, i;
for (i = 0; i < arguments.length; i++) {
if ((t = Object.prototype.toString.call(arguments[i])) !== '[object Uint8Array]')
throw new TypeError('unexpected type ' + t + ', use Uint8Array');
}
}
function cleanup(arr) {
for (var i = 0; i < arr.length; i++) arr[i] = 0;
}
// TODO: Completely remove this in v0.15.
if (!nacl.util) {
nacl.util = {};
nacl.util.decodeUTF8 = nacl.util.encodeUTF8 = nacl.util.encodeBase64 = nacl.util.decodeBase64 = function() {
throw new Error('nacl.util moved into separate package: https://github.com/dchest/tweetnacl-util-js');
};
}
nacl.randomBytes = function(n) {
var b = new Uint8Array(n);
randombytes(b, n);
return b;
};
nacl.secretbox = function(msg, nonce, key) {
checkArrayTypes(msg, nonce, key);
checkLengths(key, nonce);
var m = new Uint8Array(crypto_secretbox_ZEROBYTES + msg.length);
var c = new Uint8Array(m.length);
for (var i = 0; i < msg.length; i++) m[i+crypto_secretbox_ZEROBYTES] = msg[i];
crypto_secretbox(c, m, m.length, nonce, key);
return c.subarray(crypto_secretbox_BOXZEROBYTES);
};
nacl.secretbox.open = function(box, nonce, key) {
checkArrayTypes(box, nonce, key);
checkLengths(key, nonce);
var c = new Uint8Array(crypto_secretbox_BOXZEROBYTES + box.length);
var m = new Uint8Array(c.length);
for (var i = 0; i < box.length; i++) c[i+crypto_secretbox_BOXZEROBYTES] = box[i];
if (c.length < 32) return false;
if (crypto_secretbox_open(m, c, c.length, nonce, key) !== 0) return false;
return m.subarray(crypto_secretbox_ZEROBYTES);
};
nacl.secretbox.keyLength = crypto_secretbox_KEYBYTES;
nacl.secretbox.nonceLength = crypto_secretbox_NONCEBYTES;
nacl.secretbox.overheadLength = crypto_secretbox_BOXZEROBYTES;
nacl.scalarMult = function(n, p) {
checkArrayTypes(n, p);
if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size');
if (p.length !== crypto_scalarmult_BYTES) throw new Error('bad p size');
var q = new Uint8Array(crypto_scalarmult_BYTES);
crypto_scalarmult(q, n, p);
return q;
};
nacl.scalarMult.base = function(n) {
checkArrayTypes(n);
if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size');
var q = new Uint8Array(crypto_scalarmult_BYTES);
crypto_scalarmult_base(q, n);
return q;
};
nacl.scalarMult.scalarLength = crypto_scalarmult_SCALARBYTES;
nacl.scalarMult.groupElementLength = crypto_scalarmult_BYTES;
nacl.box = function(msg, nonce, publicKey, secretKey) {
var k = nacl.box.before(publicKey, secretKey);
return nacl.secretbox(msg, nonce, k);
};
nacl.box.before = function(publicKey, secretKey) {
checkArrayTypes(publicKey, secretKey);
checkBoxLengths(publicKey, secretKey);
var k = new Uint8Array(crypto_box_BEFORENMBYTES);
crypto_box_beforenm(k, publicKey, secretKey);
return k;
};
nacl.box.after = nacl.secretbox;
nacl.box.open = function(msg, nonce, publicKey, secretKey) {
var k = nacl.box.before(publicKey, secretKey);
return nacl.secretbox.open(msg, nonce, k);
};
nacl.box.open.after = nacl.secretbox.open;
nacl.box.keyPair = function() {
var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES);
var sk = new Uint8Array(crypto_box_SECRETKEYBYTES);
crypto_box_keypair(pk, sk);
return {publicKey: pk, secretKey: sk};
};
nacl.box.keyPair.fromSecretKey = function(secretKey) {
checkArrayTypes(secretKey);
if (secretKey.length !== crypto_box_SECRETKEYBYTES)
throw new Error('bad secret key size');
var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES);
crypto_scalarmult_base(pk, secretKey);
return {publicKey: pk, secretKey: new Uint8Array(secretKey)};
};
nacl.box.publicKeyLength = crypto_box_PUBLICKEYBYTES;
nacl.box.secretKeyLength = crypto_box_SECRETKEYBYTES;
nacl.box.sharedKeyLength = crypto_box_BEFORENMBYTES;
nacl.box.nonceLength = crypto_box_NONCEBYTES;
nacl.box.overheadLength = nacl.secretbox.overheadLength;
nacl.sign = function(msg, secretKey) {
checkArrayTypes(msg, secretKey);
if (secretKey.length !== crypto_sign_SECRETKEYBYTES)
throw new Error('bad secret key size');
var signedMsg = new Uint8Array(crypto_sign_BYTES+msg.length);
crypto_sign(signedMsg, msg, msg.length, secretKey);
return signedMsg;
};
nacl.sign.open = function(signedMsg, publicKey) {
if (arguments.length !== 2)
throw new Error('nacl.sign.open accepts 2 arguments; did you mean to use nacl.sign.detached.verify?');
checkArrayTypes(signedMsg, publicKey);
if (publicKey.length !== crypto_sign_PUBLICKEYBYTES)
throw new Error('bad public key size');
var tmp = new Uint8Array(signedMsg.length);
var mlen = crypto_sign_open(tmp, signedMsg, signedMsg.length, publicKey);
if (mlen < 0) return null;
var m = new Uint8Array(mlen);
for (var i = 0; i < m.length; i++) m[i] = tmp[i];
return m;
};
nacl.sign.detached = function(msg, secretKey) {
var signedMsg = nacl.sign(msg, secretKey);
var sig = new Uint8Array(crypto_sign_BYTES);
for (var i = 0; i < sig.length; i++) sig[i] = signedMsg[i];
return sig;
};
nacl.sign.detached.verify = function(msg, sig, publicKey) {
checkArrayTypes(msg, sig, publicKey);
if (sig.length !== crypto_sign_BYTES)
throw new Error('bad signature size');
if (publicKey.length !== crypto_sign_PUBLICKEYBYTES)
throw new Error('bad public key size');
var sm = new Uint8Array(crypto_sign_BYTES + msg.length);
var m = new Uint8Array(crypto_sign_BYTES + msg.length);
var i;
for (i = 0; i < crypto_sign_BYTES; i++) sm[i] = sig[i];
for (i = 0; i < msg.length; i++) sm[i+crypto_sign_BYTES] = msg[i];
return (crypto_sign_open(m, sm, sm.length, publicKey) >= 0);
};
nacl.sign.keyPair = function() {
var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES);
var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES);
crypto_sign_keypair(pk, sk);
return {publicKey: pk, secretKey: sk};
};
nacl.sign.keyPair.fromSecretKey = function(secretKey) {
checkArrayTypes(secretKey);
if (secretKey.length !== crypto_sign_SECRETKEYBYTES)
throw new Error('bad secret key size');
var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES);
for (var i = 0; i < pk.length; i++) pk[i] = secretKey[32+i];
return {publicKey: pk, secretKey: new Uint8Array(secretKey)};
};
nacl.sign.keyPair.fromSeed = function(seed) {
checkArrayTypes(seed);
if (seed.length !== crypto_sign_SEEDBYTES)
throw new Error('bad seed size');
var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES);
var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES);
for (var i = 0; i < 32; i++) sk[i] = seed[i];
crypto_sign_keypair(pk, sk, true);
return {publicKey: pk, secretKey: sk};
};
nacl.sign.publicKeyLength = crypto_sign_PUBLICKEYBYTES;
nacl.sign.secretKeyLength = crypto_sign_SECRETKEYBYTES;
nacl.sign.seedLength = crypto_sign_SEEDBYTES;
nacl.sign.signatureLength = crypto_sign_BYTES;
nacl.hash = function(msg) {
checkArrayTypes(msg);
var h = new Uint8Array(crypto_hash_BYTES);
crypto_hash(h, msg, msg.length);
return h;
};
nacl.hash.hashLength = crypto_hash_BYTES;
nacl.verify = function(x, y) {
checkArrayTypes(x, y);
// Zero length arguments are considered not equal.
if (x.length === 0 || y.length === 0) return false;
if (x.length !== y.length) return false;
return (vn(x, 0, y, 0, x.length) === 0) ? true : false;
};
nacl.setPRNG = function(fn) {
randombytes = fn;
};
(function() {
// Initialize PRNG if environment provides CSPRNG.
// If not, methods calling randombytes will throw.
var crypto = typeof self !== 'undefined' ? (self.crypto || self.msCrypto) : null;
if (crypto && crypto.getRandomValues) {
// Browsers.
var QUOTA = 65536;
nacl.setPRNG(function(x, n) {
var i, v = new Uint8Array(n);
for (i = 0; i < n; i += QUOTA) {
crypto.getRandomValues(v.subarray(i, i + Math.min(n - i, QUOTA)));
}
for (i = 0; i < n; i++) x[i] = v[i];
cleanup(v);
});
} else if (typeof require !== 'undefined') {
// Node.js.
crypto = require('crypto');
if (crypto && crypto.randomBytes) {
nacl.setPRNG(function(x, n) {
var i, v = crypto.randomBytes(n);
for (i = 0; i < n; i++) x[i] = v[i];
cleanup(v);
});
}
}
})();
})(typeof module !== 'undefined' && module.exports ? module.exports : (self.nacl = self.nacl || {}));
|
PypiClean
|
/rethinkdb_iantocristian-2.4.8.post2.tar.gz/rethinkdb_iantocristian-2.4.8.post2/rethinkdb/__main__.py
|
# Copyright 2018 RethinkDB
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This file incorporates work covered by the following copyright:
# Copyright 2010-2016 RethinkDB, all rights reserved.
"""Dispatcher for interactive functions such as repl and backup"""
import code
import sys
import traceback
from rethinkdb import errors, net, utils_common
def startInterpreter(argv=None, prog=None):
repl_variables = {"r": net.Connection._r, "rethinkdb": net.Connection._r}
banner = "The RethinkDB driver has been imported as `r`."
# -- get host/port setup
# - parse command line
parser = utils_common.CommonOptionsParser(
prog=prog,
description="An interactive Python shell (repl) with the RethinkDB driver imported",
)
options, args = parser.parse_args(
argv if argv is not None else sys.argv[1:], connect=False
)
if args:
parser.error(
"No positional arguments supported. Unrecognized option(s): %s" % args
)
# -- open connection
try:
repl_variables["conn"] = options.retryQuery.conn()
repl_variables["conn"].repl()
banner += """
A connection to %s:%d has been established as `conn`
and can be used by calling `run()` on a query without any arguments.""" % (
options.hostname,
options.driver_port,
)
except errors.ReqlDriverError as e:
banner += "\nWarning: %s" % str(e)
if options.debug:
banner += "\n" + traceback.format_exc()
# -- start interpreter
code.interact(banner=banner + "\n==========", local=repl_variables)
if __name__ == "__main__":
if __package__ is None:
__package__ = "rethinkdb"
# -- figure out which mode we are in
modes = ["dump", "export", "import", "index_rebuild", "repl", "restore"]
if len(sys.argv) < 2 or sys.argv[1] not in modes:
sys.exit(
"ERROR: Must be called with one of the following verbs: %s"
% ", ".join(modes)
)
verb = sys.argv[1]
prog = "python -m rethinkdb"
if sys.version_info < (2, 7) or (
sys.version_info >= (3, 0) and sys.version_info < (3, 4)
):
prog += ".__main__" # Python versions 2.6, 3.0, 3.1 and 3.3 do not support running packages
prog += " " + verb
argv = sys.argv[2:]
if verb == "dump":
from . import _dump
exit(_dump.main(argv, prog=prog))
elif verb == "export":
from . import _export
exit(_export.main(argv, prog=prog))
elif verb == "import":
from . import _import
exit(_import.main(argv, prog=prog))
elif verb == "index_rebuild":
from . import _index_rebuild
exit(_index_rebuild.main(argv, prog=prog))
elif verb == "repl":
startInterpreter(argv, prog=prog)
elif verb == "restore":
from . import _restore
exit(_restore.main(argv, prog=prog))
|
PypiClean
|
/open_answerx-2.0.3-py3-none-any.whl/open_answerx-2.0.3.data/scripts/open-answerx.py
|
import argparse
import json
import os
import sys
import urllib.error
import urllib.parse
import urllib.request
from urllib.parse import urljoin
import requests
import pdb
import pprint
from akamai.edgegrid import EdgeGridAuth, EdgeRc
parser = argparse.ArgumentParser(description='Interact with the AnswerX OPEN API to view or insert table data')
parser.add_argument('-t', '--table', help='Table to target, displays the table schema if dump is not provided', required=True)
parser.add_argument('-d', '--dump', help='Dump/Display the contents of the table', action='store_true')
parser.add_argument('-r', '--remove', help='Remove a domain from a table', type=str)
parser.add_argument('-D', '--debug', help='Enable Pragma headers for troubleshooting', action='store_true')
parser.add_argument('-i', '--insert', help='Toggle insert mode, if this is set, you need to specify what do you want to insert into the table', type=str)
parser.add_argument('-j', '--json', help='Insert custom JSON data from a file. You need to specify what do you want to insert into the table', type=str)
parser.add_argument('-k', '--key', help='If using custom JSON data from a file, you must specify a key, this is usually the domain name or subscriber', type=str)
parser.add_argument('-x', '--expiry', help='If insert mode is true, set the expiry time for the realtime table entry (TTL), default is 60 seconds', type=int)
parser.add_argument('-S', '--subscriber', help='Check subscriberID belongs to a table', type=str)
parser.add_argument('-C', '--cidr', help='Check CIDR belongs to a table', type=str)
parser.add_argument('-s', '--static', help='Tells the API to query a static table and not a real-time table', action='store_true')
parser.add_argument('-e', '--environment', help='Set target environment, this is a number related to the .edgerc section that contains the credentials where you want to run the script, if not specified, then the default section is used', type=int)
args = vars(parser.parse_args())
pp = pprint.PrettyPrinter(indent=4, width=80, compact=False)
edgerc_path = os.getenv("HOME")+'/'+'.edgerc'
edgerc = EdgeRc(edgerc_path)
if args['environment']:
service_instance_id = args['environment']
section = 'r_' + str(service_instance_id)
else:
service_instance_id = '3'
section = 'default'
baseurl = 'https://%s' % edgerc.get(section, 'host')
print("RUNNING ON service_instance_id = " + str(service_instance_id) + ' using ' + str(section) + ' section from ' + os.path.abspath(".edgerc"))
s = requests.Session()
s.auth = EdgeGridAuth.from_edgerc(edgerc, section)
s.headers = {'Accept': 'application/json'}
if args['debug']:
s.headers.update({'Pragma': 'akamai-x-get-cache-key, akamai-x-cache-on, akamai-x-cache-remote-on, akamai-x-get-true-cache-key, akamai-x-check-cacheable, akamai-x-get-request-id, akamai-x-serial-no, akamai-x-get-ssl-client-session-id, edgegrid-fingerprints-on'})
print(args)
def showTable(tablename):
# result = s.get(urljoin(baseurl, '/recursive-dns-db/v1/service-instances/9/tables/r_9_IPToSubscriberTable'))
if args['static']:
table_type = 'table_type=static'
else:
table_type = 'table_type=rtt'
if args['dump']:
data_format = "txt"
table_url = urljoin(baseurl, '/recursive-dns-db/v1/service-instances/' + str(
service_instance_id) + '/tables/' + tablename + '?action=dump&' + table_type)
else:
data_format = "json"
table_url = urljoin(baseurl, '/recursive-dns-db/v1/service-instances/'+str(service_instance_id)+'/tables/' + tablename + '?'+'action=schema'+'&'+table_type)
table_data = s.get(table_url, stream=True)
headers = dict(table_data.headers)
for key, value in headers.items():
pp.pprint("{}: {}".format(key, value))
# pp.pprint(headers.items())
print('GOT HTTP CODE: ' + str(table_data.status_code) + '\n' + table_url + '\n')
if data_format == "json":
table_rows = json.dumps(table_data.json(), indent=2)
else:
table_rows = table_data.text
return table_rows
def showSub(tablename):
key = args['subscriber']
table_url = urljoin(baseurl,
'/recursive-dns-db/v1/service-instances/' + str(service_instance_id) + '/tables/' + tablename)
subscriber_url = urljoin(table_url, '?key=%22' + key + '%22')
subscriber = s.get(subscriber_url)
print('GOT HTTP CODE: ' + str(subscriber.status_code) + '\n' + subscriber_url + '\n')
return subscriber.text
def encodeCIDR(cidr):
noslash_cidr = urllib.parse.quote(cidr, safe='')
encoded_cidr = str(noslash_cidr).replace(".", "%2E")
return encoded_cidr
def showCIDR(tablename):
key = args['cidr']
key = encodeCIDR(key)
table_url = urljoin(baseurl,
'/recursive-dns-db/v1/service-instances/' + str(service_instance_id) + '/tables/' + tablename)
cidr_url = urljoin(table_url, '?key=' + key)
cidr = s.get(cidr_url)
print('GOT HTTP CODE: ' + str(cidr.status_code) + '\n' + cidr_url + '\n')
#pp.pprint(cidr.content)
return cidr.text
def insertDomain(tablename):
s.headers = {'Accept': 'application/json'}
key = args['insert']
if args['expiry']:
expiry = args['expiry']
else:
expiry = 315360000
data = {"Expiry": expiry, "Field": [{"Name": key, "Type": "STRING"}]}
jsondata = json.dumps(data, sort_keys=True, indent=2)
table_url = urljoin(baseurl, '/recursive-dns-db/v1/service-instances/' + str(service_instance_id) + '/tables/' + tablename)
post_url = urljoin(table_url, '?key=%22' + key + '%22')
print(jsondata)
table_post = s.post(post_url, data=jsondata)
print('GOT HTTP CODE: ' + str(table_post.status_code) + '\n' + post_url + '\n')
#print(table_post.text)
return table_post.text
def removeDomain(tablename):
key = args['remove']
table_url = urljoin(baseurl, '/recursive-dns-db/v1/service-instances/' + str(service_instance_id) + '/tables/' + tablename)
delete_url = urljoin(table_url, '?key=%22' + key + '%22')
domain_delete = s.delete(delete_url)
print('GOT HTTP CODE: ' + str(domain_delete.status_code) + '\n' + delete_url + '\n')
#print(domain_delete.content)
return domain_delete.text
def insertjson(json_file, tablename, key):
key = args['key']
s.headers = {'Accept': 'application/json'}
# Give an arbitrary piece of json data on a file and insert it on a table, useful for custom schemas not covered
# on the other methods
# key = args['json']
table_url = urljoin(baseurl, '/recursive-dns-db/v1/service-instances/' + str(service_instance_id) + '/tables/' + tablename)
with open(json_file) as file:
json_data = json.load(file)
print(json.dumps(json_data, indent=2))
# Get the key from the arguments, which is the first field
# key = (json_data["Columns"][0]["Value"])
key_url = urljoin(table_url, '?key=%22' + key + '%22')
json_data_post = s.post(key_url, json=json_data)
print('GOT HTTP CODE: ' + str(json_data_post.status_code) + '\n' + key_url + '\n')
return json_data_post.content
if __name__ == '__main__':
if args['insert']:
print("Inserting domain " + args['insert'] + " into " + args['table'])
print(insertDomain(args['table']))
if args['json']:
if not args['key']:
print("You must provide a key value to insert JSON data, this is usually the domain or subscriber ID")
sys.exit()
print("JSON Data file provided, inserting into table " + args['table'])
print(insertjson(args['json'], args['table'], args['key']))
if args['remove']:
print("Removing domain " + args['remove'] + " from " + args['table'])
print(removeDomain(args['table']))
if args['subscriber']:
print(showSub(args['table']))
if args['cidr']:
print(showCIDR(args['table']))
else:
print(showTable(args['table']))
|
PypiClean
|
/GraphLab_Create-2.1-cp27-none-macosx_10_5_x86_64.macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.macosx_10_11_intel.macosx_10_11_x86_64.whl/graphlab/distributed/toolkits/regression/random_forest_regression.py
|
from ... import _supervised_learning as _sl
import logging as _logging
import graphlab.connect as _mt
from graphlab.util import _make_internal_url
def submit_training_job(env, dataset, target,
features=None,
max_iterations=10,
validation_set=None,
random_seed = None,
metric = 'auto',
model_checkpoint_path='auto',
**kwargs):
"""
Submit a job to create a :class:`~graphlab.random_forest_regression.RandomForestRegression` to predict
a scalar target variable using one or more features. In addition to standard
numeric and categorical types, features can also be extracted automatically
from list- or dictionary-type SFrame columns.
Parameters
----------
env : graphlab.deploy.hadoop_cluster.HadoopCluster
Hadoop cluster to submit the training job
dataset : SFrame
A training dataset containing feature columns and a target column.
Only numerical typed (int, float) target column is allowed.
target : str
The name of the column in ``dataset`` that is the prediction target.
This column must have a numeric type.
features : list[str], optional
A list of columns names of features used for training the model.
Defaults to None, using all columns.
max_iterations : int, optional
The number of iterations to perform.
max_depth : float, optional
Maximum depth of a tree.
min_loss_reduction : float, optional (non-negative)
Minimum loss reduction required to make a further partition/split a
node during the tree learning phase. Larger (more positive) values
can help prevent overfitting by avoiding splits that do not
sufficiently reduce the loss function.
min_child_weight : float, optional (non-negative)
Controls the minimum weight of each leaf node. Larger values result in
more conservative tree learning and help prevent overfitting.
Formally, this is minimum sum of instance weights (hessians) in each
node. If the tree learning algorithm results in a leaf node with the
sum of instance weights less than `min_child_weight`, tree building
will terminate.
row_subsample : float, optional
Subsample the ratio of the training set in each iteration of tree
construction. This is called the bagging trick and usually can help
prevent overfitting. Setting it to 0.5 means that model randomly
collected half of the examples (rows) to grow each tree.
column_subsample : float, optional
Subsample ratio of the columns in each iteration of tree
construction. Like row_subsample, this also usually can help
prevent overfitting. Setting it to 0.5 means that model randomly
collected half of the columns to grow each tree.
validation_set : SFrame, optional
A dataset for monitoring the model's generalization performance.
For each row of the progress table, the chosen metrics are computed
for both the provided training dataset and the validation_set. The
format of this SFrame must be the same as the training set.
By default this argument is set to 'auto' and a validation set is
automatically sampled and used for progress printing. If
validation_set is set to None, then no additional metrics
are computed. This is computed once per full iteration. Large
differences in model accuracy between the training data and validation
data is indicative of overfitting.
random_seed : int, optional
Seeds random opertations such as column and row subsampling, such that
results are reproducable.
metric : str or list[str], optional
Performance metric(s) that are tracked during training. When specified,
the progress table will display the tracked metric(s) on training and
validation set.
Supported metrics are: {'rmse', 'max_error'}
kwargs : dict, optional
Additional arguments for training the model.
- ``model_checkpoint_path`` : str, default 'auto'
If specified, checkpoint the model training to the given path every n iterations,
where n is specified by ``model_checkpoint_interval``.
For instance, if `model_checkpoint_interval` is 5, and `model_checkpoint_path` is
set to ``/tmp/model_tmp``, the checkpoints will be saved into
``/tmp/model_tmp/model_checkpoint_5``, ``/tmp/model_tmp/model_checkpoint_10``, ... etc.
Training can be resumed by setting ``resume_from_checkpoint`` to one of these checkpoints.
- ``model_checkpoint_interval`` : int, default 5
If model_check_point_path is specified,
save the model to the given path every n iterations.
- ``resume_from_checkpoint`` : str, default None
Continues training from a model checkpoint. The model must take
exact the same training data as the checkpointed model.
Returns
-------
out : :class:`~graphlab.distributed._dml_job_status.DMLJobStatus`
An object that tracks the execution of the distributed training job.
References
----------
- `Trevor Hastie's slides on Boosted Trees and Random Forest
<http://jessica2.msri.org/attachments/10778/10778-boost.pdf>`_
See Also
--------
RandomForestRegression, graphlab.linear_regression.LinearRegression, graphlab.regression.create
Examples
--------
Setup the data:
>>> url = 'https://static.turi.com/datasets/xgboost/mushroom.csv'
>>> data = graphlab.SFrame.read_csv(url)
>>> data['label'] = data['label'] == 'p'
>>> hdp_env = graphlab.deploy.hadoop_cluster.create('my-first-hadoop-cluster',
... 'hdfs://path-to-turi-distributed-installation')
Split the data into training and test data:
>>> train, test = data.random_split(0.8)
Create the model:
>>> distr_job = graphlab.random_forest_regression.submit_training_job(hdp_env, train, target='label')
>>> model = distr_job.get_results()
Make predictions and evaluate the model:
>>> predictions = model.predict(test)
>>> results = model.evaluate(test)
"""
logger = _logging.getLogger(__name__)
if random_seed is not None:
kwargs['random_seed'] = random_seed
if model_checkpoint_path != 'auto':
model_checkpoint_path = _make_internal_url(model_checkpoint_path)
if 'resume_from_checkpoint' in kwargs:
kwargs['resume_from_checkpoint'] = _make_internal_url(kwargs['resume_from_checkpoint'])
if 'num_trees' in kwargs:
logger.warning("The `num_trees` keyword argument is deprecated. Please "
"use the `max_iterations` argument instead. Any value provided "
"for `num_trees` will be used in place of `max_iterations`.")
max_iterations = kwargs['num_trees']
del kwargs['num_trees']
_mt._get_metric_tracker().track('distributed.toolkit.regression.random_forest_regression.submit_training_job')
dml_obj = _sl.create(dataset = dataset,
target = target,
features = features,
model_name = 'random_forest_regression',
env = env,
max_iterations = max_iterations,
validation_set = validation_set,
metric = metric,
model_checkpoint_path=model_checkpoint_path,
**kwargs)
return dml_obj
|
PypiClean
|
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/servicefabric/v20170701preview/_inputs.py
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from ._enums import *
__all__ = [
'ApplicationMetricDescriptionArgs',
'ApplicationUpgradePolicyArgs',
'ArmApplicationHealthPolicyArgs',
'ArmRollingUpgradeMonitoringPolicyArgs',
'ArmServiceTypeHealthPolicyArgs',
'AzureActiveDirectoryArgs',
'CertificateDescriptionArgs',
'ClientCertificateCommonNameArgs',
'ClientCertificateThumbprintArgs',
'ClusterHealthPolicyArgs',
'ClusterUpgradeDeltaHealthPolicyArgs',
'ClusterUpgradePolicyArgs',
'ClusterVersionDetailsArgs',
'DiagnosticsStorageAccountConfigArgs',
'EndpointRangeDescriptionArgs',
'NamedPartitionSchemeDescriptionArgs',
'NodeTypeDescriptionArgs',
'ServiceCorrelationDescriptionArgs',
'ServiceLoadMetricDescriptionArgs',
'ServicePlacementPolicyDescriptionArgs',
'SettingsParameterDescriptionArgs',
'SettingsSectionDescriptionArgs',
'SingletonPartitionSchemeDescriptionArgs',
'UniformInt64RangePartitionSchemeDescriptionArgs',
]
@pulumi.input_type
class ApplicationMetricDescriptionArgs:
def __init__(__self__, *,
maximum_capacity: Optional[pulumi.Input[float]] = None,
name: Optional[pulumi.Input[str]] = None,
reservation_capacity: Optional[pulumi.Input[float]] = None,
total_application_capacity: Optional[pulumi.Input[float]] = None):
"""
Describes capacity information for a custom resource balancing metric. This can be used to limit the total consumption of this metric by the services of this application.
:param pulumi.Input[float] maximum_capacity: The maximum node capacity for Service Fabric application.
This is the maximum Load for an instance of this application on a single node. Even if the capacity of node is greater than this value, Service Fabric will limit the total load of services within the application on each node to this value.
If set to zero, capacity for this metric is unlimited on each node.
When creating a new application with application capacity defined, the product of MaximumNodes and this value must always be smaller than or equal to TotalApplicationCapacity.
When updating existing application with application capacity, the product of MaximumNodes and this value must always be smaller than or equal to TotalApplicationCapacity.
:param pulumi.Input[str] name: The name of the metric.
:param pulumi.Input[float] reservation_capacity: The node reservation capacity for Service Fabric application.
This is the amount of load which is reserved on nodes which have instances of this application.
If MinimumNodes is specified, then the product of these values will be the capacity reserved in the cluster for the application.
If set to zero, no capacity is reserved for this metric.
When setting application capacity or when updating application capacity; this value must be smaller than or equal to MaximumCapacity for each metric.
:param pulumi.Input[float] total_application_capacity: The total metric capacity for Service Fabric application.
This is the total metric capacity for this application in the cluster. Service Fabric will try to limit the sum of loads of services within the application to this value.
When creating a new application with application capacity defined, the product of MaximumNodes and MaximumCapacity must always be smaller than or equal to this value.
"""
if maximum_capacity is not None:
pulumi.set(__self__, "maximum_capacity", maximum_capacity)
if name is not None:
pulumi.set(__self__, "name", name)
if reservation_capacity is not None:
pulumi.set(__self__, "reservation_capacity", reservation_capacity)
if total_application_capacity is not None:
pulumi.set(__self__, "total_application_capacity", total_application_capacity)
@property
@pulumi.getter(name="maximumCapacity")
def maximum_capacity(self) -> Optional[pulumi.Input[float]]:
"""
The maximum node capacity for Service Fabric application.
This is the maximum Load for an instance of this application on a single node. Even if the capacity of node is greater than this value, Service Fabric will limit the total load of services within the application on each node to this value.
If set to zero, capacity for this metric is unlimited on each node.
When creating a new application with application capacity defined, the product of MaximumNodes and this value must always be smaller than or equal to TotalApplicationCapacity.
When updating existing application with application capacity, the product of MaximumNodes and this value must always be smaller than or equal to TotalApplicationCapacity.
"""
return pulumi.get(self, "maximum_capacity")
@maximum_capacity.setter
def maximum_capacity(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "maximum_capacity", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the metric.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="reservationCapacity")
def reservation_capacity(self) -> Optional[pulumi.Input[float]]:
"""
The node reservation capacity for Service Fabric application.
This is the amount of load which is reserved on nodes which have instances of this application.
If MinimumNodes is specified, then the product of these values will be the capacity reserved in the cluster for the application.
If set to zero, no capacity is reserved for this metric.
When setting application capacity or when updating application capacity; this value must be smaller than or equal to MaximumCapacity for each metric.
"""
return pulumi.get(self, "reservation_capacity")
@reservation_capacity.setter
def reservation_capacity(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "reservation_capacity", value)
@property
@pulumi.getter(name="totalApplicationCapacity")
def total_application_capacity(self) -> Optional[pulumi.Input[float]]:
"""
The total metric capacity for Service Fabric application.
This is the total metric capacity for this application in the cluster. Service Fabric will try to limit the sum of loads of services within the application to this value.
When creating a new application with application capacity defined, the product of MaximumNodes and MaximumCapacity must always be smaller than or equal to this value.
"""
return pulumi.get(self, "total_application_capacity")
@total_application_capacity.setter
def total_application_capacity(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "total_application_capacity", value)
@pulumi.input_type
class ApplicationUpgradePolicyArgs:
def __init__(__self__, *,
application_health_policy: Optional[pulumi.Input['ArmApplicationHealthPolicyArgs']] = None,
force_restart: Optional[pulumi.Input[bool]] = None,
rolling_upgrade_monitoring_policy: Optional[pulumi.Input['ArmRollingUpgradeMonitoringPolicyArgs']] = None,
upgrade_replica_set_check_timeout: Optional[pulumi.Input[str]] = None):
"""
Describes the policy for a monitored application upgrade.
:param pulumi.Input['ArmApplicationHealthPolicyArgs'] application_health_policy: Defines a health policy used to evaluate the health of an application or one of its children entities.
:param pulumi.Input[bool] force_restart: If true, then processes are forcefully restarted during upgrade even when the code version has not changed (the upgrade only changes configuration or data).
:param pulumi.Input['ArmRollingUpgradeMonitoringPolicyArgs'] rolling_upgrade_monitoring_policy: The policy used for monitoring the application upgrade
:param pulumi.Input[str] upgrade_replica_set_check_timeout: The maximum amount of time to block processing of an upgrade domain and prevent loss of availability when there are unexpected issues. When this timeout expires, processing of the upgrade domain will proceed regardless of availability loss issues. The timeout is reset at the start of each upgrade domain. Valid values are between 0 and 42949672925 inclusive. (unsigned 32-bit integer).
"""
if application_health_policy is not None:
pulumi.set(__self__, "application_health_policy", application_health_policy)
if force_restart is not None:
pulumi.set(__self__, "force_restart", force_restart)
if rolling_upgrade_monitoring_policy is not None:
pulumi.set(__self__, "rolling_upgrade_monitoring_policy", rolling_upgrade_monitoring_policy)
if upgrade_replica_set_check_timeout is not None:
pulumi.set(__self__, "upgrade_replica_set_check_timeout", upgrade_replica_set_check_timeout)
@property
@pulumi.getter(name="applicationHealthPolicy")
def application_health_policy(self) -> Optional[pulumi.Input['ArmApplicationHealthPolicyArgs']]:
"""
Defines a health policy used to evaluate the health of an application or one of its children entities.
"""
return pulumi.get(self, "application_health_policy")
@application_health_policy.setter
def application_health_policy(self, value: Optional[pulumi.Input['ArmApplicationHealthPolicyArgs']]):
pulumi.set(self, "application_health_policy", value)
@property
@pulumi.getter(name="forceRestart")
def force_restart(self) -> Optional[pulumi.Input[bool]]:
"""
If true, then processes are forcefully restarted during upgrade even when the code version has not changed (the upgrade only changes configuration or data).
"""
return pulumi.get(self, "force_restart")
@force_restart.setter
def force_restart(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_restart", value)
@property
@pulumi.getter(name="rollingUpgradeMonitoringPolicy")
def rolling_upgrade_monitoring_policy(self) -> Optional[pulumi.Input['ArmRollingUpgradeMonitoringPolicyArgs']]:
"""
The policy used for monitoring the application upgrade
"""
return pulumi.get(self, "rolling_upgrade_monitoring_policy")
@rolling_upgrade_monitoring_policy.setter
def rolling_upgrade_monitoring_policy(self, value: Optional[pulumi.Input['ArmRollingUpgradeMonitoringPolicyArgs']]):
pulumi.set(self, "rolling_upgrade_monitoring_policy", value)
@property
@pulumi.getter(name="upgradeReplicaSetCheckTimeout")
def upgrade_replica_set_check_timeout(self) -> Optional[pulumi.Input[str]]:
"""
The maximum amount of time to block processing of an upgrade domain and prevent loss of availability when there are unexpected issues. When this timeout expires, processing of the upgrade domain will proceed regardless of availability loss issues. The timeout is reset at the start of each upgrade domain. Valid values are between 0 and 42949672925 inclusive. (unsigned 32-bit integer).
"""
return pulumi.get(self, "upgrade_replica_set_check_timeout")
@upgrade_replica_set_check_timeout.setter
def upgrade_replica_set_check_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "upgrade_replica_set_check_timeout", value)
@pulumi.input_type
class ArmApplicationHealthPolicyArgs:
def __init__(__self__, *,
consider_warning_as_error: Optional[pulumi.Input[bool]] = None,
default_service_type_health_policy: Optional[pulumi.Input['ArmServiceTypeHealthPolicyArgs']] = None,
max_percent_unhealthy_deployed_applications: Optional[pulumi.Input[int]] = None,
service_type_health_policy_map: Optional[pulumi.Input[Mapping[str, pulumi.Input['ArmServiceTypeHealthPolicyArgs']]]] = None):
"""
Defines a health policy used to evaluate the health of an application or one of its children entities.
:param pulumi.Input[bool] consider_warning_as_error: Indicates whether warnings are treated with the same severity as errors.
:param pulumi.Input['ArmServiceTypeHealthPolicyArgs'] default_service_type_health_policy: The health policy used by default to evaluate the health of a service type.
:param pulumi.Input[int] max_percent_unhealthy_deployed_applications: The maximum allowed percentage of unhealthy deployed applications. Allowed values are Byte values from zero to 100.
The percentage represents the maximum tolerated percentage of deployed applications that can be unhealthy before the application is considered in error.
This is calculated by dividing the number of unhealthy deployed applications over the number of nodes where the application is currently deployed on in the cluster.
The computation rounds up to tolerate one failure on small numbers of nodes. Default percentage is zero.
:param pulumi.Input[Mapping[str, pulumi.Input['ArmServiceTypeHealthPolicyArgs']]] service_type_health_policy_map: The map with service type health policy per service type name. The map is empty by default.
"""
if consider_warning_as_error is None:
consider_warning_as_error = False
if consider_warning_as_error is not None:
pulumi.set(__self__, "consider_warning_as_error", consider_warning_as_error)
if default_service_type_health_policy is not None:
pulumi.set(__self__, "default_service_type_health_policy", default_service_type_health_policy)
if max_percent_unhealthy_deployed_applications is None:
max_percent_unhealthy_deployed_applications = 0
if max_percent_unhealthy_deployed_applications is not None:
pulumi.set(__self__, "max_percent_unhealthy_deployed_applications", max_percent_unhealthy_deployed_applications)
if service_type_health_policy_map is not None:
pulumi.set(__self__, "service_type_health_policy_map", service_type_health_policy_map)
@property
@pulumi.getter(name="considerWarningAsError")
def consider_warning_as_error(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates whether warnings are treated with the same severity as errors.
"""
return pulumi.get(self, "consider_warning_as_error")
@consider_warning_as_error.setter
def consider_warning_as_error(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "consider_warning_as_error", value)
@property
@pulumi.getter(name="defaultServiceTypeHealthPolicy")
def default_service_type_health_policy(self) -> Optional[pulumi.Input['ArmServiceTypeHealthPolicyArgs']]:
"""
The health policy used by default to evaluate the health of a service type.
"""
return pulumi.get(self, "default_service_type_health_policy")
@default_service_type_health_policy.setter
def default_service_type_health_policy(self, value: Optional[pulumi.Input['ArmServiceTypeHealthPolicyArgs']]):
pulumi.set(self, "default_service_type_health_policy", value)
@property
@pulumi.getter(name="maxPercentUnhealthyDeployedApplications")
def max_percent_unhealthy_deployed_applications(self) -> Optional[pulumi.Input[int]]:
"""
The maximum allowed percentage of unhealthy deployed applications. Allowed values are Byte values from zero to 100.
The percentage represents the maximum tolerated percentage of deployed applications that can be unhealthy before the application is considered in error.
This is calculated by dividing the number of unhealthy deployed applications over the number of nodes where the application is currently deployed on in the cluster.
The computation rounds up to tolerate one failure on small numbers of nodes. Default percentage is zero.
"""
return pulumi.get(self, "max_percent_unhealthy_deployed_applications")
@max_percent_unhealthy_deployed_applications.setter
def max_percent_unhealthy_deployed_applications(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_percent_unhealthy_deployed_applications", value)
@property
@pulumi.getter(name="serviceTypeHealthPolicyMap")
def service_type_health_policy_map(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input['ArmServiceTypeHealthPolicyArgs']]]]:
"""
The map with service type health policy per service type name. The map is empty by default.
"""
return pulumi.get(self, "service_type_health_policy_map")
@service_type_health_policy_map.setter
def service_type_health_policy_map(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input['ArmServiceTypeHealthPolicyArgs']]]]):
pulumi.set(self, "service_type_health_policy_map", value)
@pulumi.input_type
class ArmRollingUpgradeMonitoringPolicyArgs:
def __init__(__self__, *,
failure_action: Optional[pulumi.Input[Union[str, 'ArmUpgradeFailureAction']]] = None,
health_check_retry_timeout: Optional[pulumi.Input[str]] = None,
health_check_stable_duration: Optional[pulumi.Input[str]] = None,
health_check_wait_duration: Optional[pulumi.Input[str]] = None,
upgrade_domain_timeout: Optional[pulumi.Input[str]] = None,
upgrade_timeout: Optional[pulumi.Input[str]] = None):
"""
The policy used for monitoring the application upgrade
:param pulumi.Input[Union[str, 'ArmUpgradeFailureAction']] failure_action: The activation Mode of the service package
:param pulumi.Input[str] health_check_retry_timeout: The amount of time to retry health evaluation when the application or cluster is unhealthy before FailureAction is executed. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
:param pulumi.Input[str] health_check_stable_duration: The amount of time that the application or cluster must remain healthy before the upgrade proceeds to the next upgrade domain. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
:param pulumi.Input[str] health_check_wait_duration: The amount of time to wait after completing an upgrade domain before applying health policies. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
:param pulumi.Input[str] upgrade_domain_timeout: The amount of time each upgrade domain has to complete before FailureAction is executed. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
:param pulumi.Input[str] upgrade_timeout: The amount of time the overall upgrade has to complete before FailureAction is executed. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
"""
if failure_action is not None:
pulumi.set(__self__, "failure_action", failure_action)
if health_check_retry_timeout is not None:
pulumi.set(__self__, "health_check_retry_timeout", health_check_retry_timeout)
if health_check_stable_duration is not None:
pulumi.set(__self__, "health_check_stable_duration", health_check_stable_duration)
if health_check_wait_duration is not None:
pulumi.set(__self__, "health_check_wait_duration", health_check_wait_duration)
if upgrade_domain_timeout is not None:
pulumi.set(__self__, "upgrade_domain_timeout", upgrade_domain_timeout)
if upgrade_timeout is not None:
pulumi.set(__self__, "upgrade_timeout", upgrade_timeout)
@property
@pulumi.getter(name="failureAction")
def failure_action(self) -> Optional[pulumi.Input[Union[str, 'ArmUpgradeFailureAction']]]:
"""
The activation Mode of the service package
"""
return pulumi.get(self, "failure_action")
@failure_action.setter
def failure_action(self, value: Optional[pulumi.Input[Union[str, 'ArmUpgradeFailureAction']]]):
pulumi.set(self, "failure_action", value)
@property
@pulumi.getter(name="healthCheckRetryTimeout")
def health_check_retry_timeout(self) -> Optional[pulumi.Input[str]]:
"""
The amount of time to retry health evaluation when the application or cluster is unhealthy before FailureAction is executed. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
"""
return pulumi.get(self, "health_check_retry_timeout")
@health_check_retry_timeout.setter
def health_check_retry_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_retry_timeout", value)
@property
@pulumi.getter(name="healthCheckStableDuration")
def health_check_stable_duration(self) -> Optional[pulumi.Input[str]]:
"""
The amount of time that the application or cluster must remain healthy before the upgrade proceeds to the next upgrade domain. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
"""
return pulumi.get(self, "health_check_stable_duration")
@health_check_stable_duration.setter
def health_check_stable_duration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_stable_duration", value)
@property
@pulumi.getter(name="healthCheckWaitDuration")
def health_check_wait_duration(self) -> Optional[pulumi.Input[str]]:
"""
The amount of time to wait after completing an upgrade domain before applying health policies. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
"""
return pulumi.get(self, "health_check_wait_duration")
@health_check_wait_duration.setter
def health_check_wait_duration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_wait_duration", value)
@property
@pulumi.getter(name="upgradeDomainTimeout")
def upgrade_domain_timeout(self) -> Optional[pulumi.Input[str]]:
"""
The amount of time each upgrade domain has to complete before FailureAction is executed. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
"""
return pulumi.get(self, "upgrade_domain_timeout")
@upgrade_domain_timeout.setter
def upgrade_domain_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "upgrade_domain_timeout", value)
@property
@pulumi.getter(name="upgradeTimeout")
def upgrade_timeout(self) -> Optional[pulumi.Input[str]]:
"""
The amount of time the overall upgrade has to complete before FailureAction is executed. It is first interpreted as a string representing an ISO 8601 duration. If that fails, then it is interpreted as a number representing the total number of milliseconds.
"""
return pulumi.get(self, "upgrade_timeout")
@upgrade_timeout.setter
def upgrade_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "upgrade_timeout", value)
@pulumi.input_type
class ArmServiceTypeHealthPolicyArgs:
def __init__(__self__, *,
max_percent_unhealthy_partitions_per_service: Optional[pulumi.Input[int]] = None,
max_percent_unhealthy_replicas_per_partition: Optional[pulumi.Input[int]] = None,
max_percent_unhealthy_services: Optional[pulumi.Input[int]] = None):
"""
Represents the health policy used to evaluate the health of services belonging to a service type.
:param pulumi.Input[int] max_percent_unhealthy_partitions_per_service: The maximum percentage of partitions per service allowed to be unhealthy before your application is considered in error.
:param pulumi.Input[int] max_percent_unhealthy_replicas_per_partition: The maximum percentage of replicas per partition allowed to be unhealthy before your application is considered in error.
:param pulumi.Input[int] max_percent_unhealthy_services: The maximum percentage of services allowed to be unhealthy before your application is considered in error.
"""
if max_percent_unhealthy_partitions_per_service is None:
max_percent_unhealthy_partitions_per_service = 0
if max_percent_unhealthy_partitions_per_service is not None:
pulumi.set(__self__, "max_percent_unhealthy_partitions_per_service", max_percent_unhealthy_partitions_per_service)
if max_percent_unhealthy_replicas_per_partition is None:
max_percent_unhealthy_replicas_per_partition = 0
if max_percent_unhealthy_replicas_per_partition is not None:
pulumi.set(__self__, "max_percent_unhealthy_replicas_per_partition", max_percent_unhealthy_replicas_per_partition)
if max_percent_unhealthy_services is None:
max_percent_unhealthy_services = 0
if max_percent_unhealthy_services is not None:
pulumi.set(__self__, "max_percent_unhealthy_services", max_percent_unhealthy_services)
@property
@pulumi.getter(name="maxPercentUnhealthyPartitionsPerService")
def max_percent_unhealthy_partitions_per_service(self) -> Optional[pulumi.Input[int]]:
"""
The maximum percentage of partitions per service allowed to be unhealthy before your application is considered in error.
"""
return pulumi.get(self, "max_percent_unhealthy_partitions_per_service")
@max_percent_unhealthy_partitions_per_service.setter
def max_percent_unhealthy_partitions_per_service(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_percent_unhealthy_partitions_per_service", value)
@property
@pulumi.getter(name="maxPercentUnhealthyReplicasPerPartition")
def max_percent_unhealthy_replicas_per_partition(self) -> Optional[pulumi.Input[int]]:
"""
The maximum percentage of replicas per partition allowed to be unhealthy before your application is considered in error.
"""
return pulumi.get(self, "max_percent_unhealthy_replicas_per_partition")
@max_percent_unhealthy_replicas_per_partition.setter
def max_percent_unhealthy_replicas_per_partition(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_percent_unhealthy_replicas_per_partition", value)
@property
@pulumi.getter(name="maxPercentUnhealthyServices")
def max_percent_unhealthy_services(self) -> Optional[pulumi.Input[int]]:
"""
The maximum percentage of services allowed to be unhealthy before your application is considered in error.
"""
return pulumi.get(self, "max_percent_unhealthy_services")
@max_percent_unhealthy_services.setter
def max_percent_unhealthy_services(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_percent_unhealthy_services", value)
@pulumi.input_type
class AzureActiveDirectoryArgs:
def __init__(__self__, *,
client_application: Optional[pulumi.Input[str]] = None,
cluster_application: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None):
"""
The settings to enable AAD authentication on the cluster.
:param pulumi.Input[str] client_application: Azure active directory client application id.
:param pulumi.Input[str] cluster_application: Azure active directory cluster application id.
:param pulumi.Input[str] tenant_id: Azure active directory tenant id.
"""
if client_application is not None:
pulumi.set(__self__, "client_application", client_application)
if cluster_application is not None:
pulumi.set(__self__, "cluster_application", cluster_application)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
@property
@pulumi.getter(name="clientApplication")
def client_application(self) -> Optional[pulumi.Input[str]]:
"""
Azure active directory client application id.
"""
return pulumi.get(self, "client_application")
@client_application.setter
def client_application(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_application", value)
@property
@pulumi.getter(name="clusterApplication")
def cluster_application(self) -> Optional[pulumi.Input[str]]:
"""
Azure active directory cluster application id.
"""
return pulumi.get(self, "cluster_application")
@cluster_application.setter
def cluster_application(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_application", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
Azure active directory tenant id.
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
@pulumi.input_type
class CertificateDescriptionArgs:
def __init__(__self__, *,
thumbprint: pulumi.Input[str],
thumbprint_secondary: Optional[pulumi.Input[str]] = None,
x509_store_name: Optional[pulumi.Input[str]] = None):
"""
Describes the certificate details.
:param pulumi.Input[str] thumbprint: Thumbprint of the primary certificate.
:param pulumi.Input[str] thumbprint_secondary: Thumbprint of the secondary certificate.
:param pulumi.Input[str] x509_store_name: The local certificate store location.
"""
pulumi.set(__self__, "thumbprint", thumbprint)
if thumbprint_secondary is not None:
pulumi.set(__self__, "thumbprint_secondary", thumbprint_secondary)
if x509_store_name is not None:
pulumi.set(__self__, "x509_store_name", x509_store_name)
@property
@pulumi.getter
def thumbprint(self) -> pulumi.Input[str]:
"""
Thumbprint of the primary certificate.
"""
return pulumi.get(self, "thumbprint")
@thumbprint.setter
def thumbprint(self, value: pulumi.Input[str]):
pulumi.set(self, "thumbprint", value)
@property
@pulumi.getter(name="thumbprintSecondary")
def thumbprint_secondary(self) -> Optional[pulumi.Input[str]]:
"""
Thumbprint of the secondary certificate.
"""
return pulumi.get(self, "thumbprint_secondary")
@thumbprint_secondary.setter
def thumbprint_secondary(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "thumbprint_secondary", value)
@property
@pulumi.getter(name="x509StoreName")
def x509_store_name(self) -> Optional[pulumi.Input[str]]:
"""
The local certificate store location.
"""
return pulumi.get(self, "x509_store_name")
@x509_store_name.setter
def x509_store_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "x509_store_name", value)
@pulumi.input_type
class ClientCertificateCommonNameArgs:
def __init__(__self__, *,
certificate_common_name: pulumi.Input[str],
certificate_issuer_thumbprint: pulumi.Input[str],
is_admin: pulumi.Input[bool]):
"""
Describes the client certificate details using common name.
:param pulumi.Input[str] certificate_common_name: The common name of the client certificate.
:param pulumi.Input[str] certificate_issuer_thumbprint: The issuer thumbprint of the client certificate.
:param pulumi.Input[bool] is_admin: Indicates if the client certificate has admin access to the cluster. Non admin clients can perform only read only operations on the cluster.
"""
pulumi.set(__self__, "certificate_common_name", certificate_common_name)
pulumi.set(__self__, "certificate_issuer_thumbprint", certificate_issuer_thumbprint)
pulumi.set(__self__, "is_admin", is_admin)
@property
@pulumi.getter(name="certificateCommonName")
def certificate_common_name(self) -> pulumi.Input[str]:
"""
The common name of the client certificate.
"""
return pulumi.get(self, "certificate_common_name")
@certificate_common_name.setter
def certificate_common_name(self, value: pulumi.Input[str]):
pulumi.set(self, "certificate_common_name", value)
@property
@pulumi.getter(name="certificateIssuerThumbprint")
def certificate_issuer_thumbprint(self) -> pulumi.Input[str]:
"""
The issuer thumbprint of the client certificate.
"""
return pulumi.get(self, "certificate_issuer_thumbprint")
@certificate_issuer_thumbprint.setter
def certificate_issuer_thumbprint(self, value: pulumi.Input[str]):
pulumi.set(self, "certificate_issuer_thumbprint", value)
@property
@pulumi.getter(name="isAdmin")
def is_admin(self) -> pulumi.Input[bool]:
"""
Indicates if the client certificate has admin access to the cluster. Non admin clients can perform only read only operations on the cluster.
"""
return pulumi.get(self, "is_admin")
@is_admin.setter
def is_admin(self, value: pulumi.Input[bool]):
pulumi.set(self, "is_admin", value)
@pulumi.input_type
class ClientCertificateThumbprintArgs:
def __init__(__self__, *,
certificate_thumbprint: pulumi.Input[str],
is_admin: pulumi.Input[bool]):
"""
Describes the client certificate details using thumbprint.
:param pulumi.Input[str] certificate_thumbprint: The thumbprint of the client certificate.
:param pulumi.Input[bool] is_admin: Indicates if the client certificate has admin access to the cluster. Non admin clients can perform only read only operations on the cluster.
"""
pulumi.set(__self__, "certificate_thumbprint", certificate_thumbprint)
pulumi.set(__self__, "is_admin", is_admin)
@property
@pulumi.getter(name="certificateThumbprint")
def certificate_thumbprint(self) -> pulumi.Input[str]:
"""
The thumbprint of the client certificate.
"""
return pulumi.get(self, "certificate_thumbprint")
@certificate_thumbprint.setter
def certificate_thumbprint(self, value: pulumi.Input[str]):
pulumi.set(self, "certificate_thumbprint", value)
@property
@pulumi.getter(name="isAdmin")
def is_admin(self) -> pulumi.Input[bool]:
"""
Indicates if the client certificate has admin access to the cluster. Non admin clients can perform only read only operations on the cluster.
"""
return pulumi.get(self, "is_admin")
@is_admin.setter
def is_admin(self, value: pulumi.Input[bool]):
pulumi.set(self, "is_admin", value)
@pulumi.input_type
class ClusterHealthPolicyArgs:
def __init__(__self__, *,
max_percent_unhealthy_applications: Optional[pulumi.Input[int]] = None,
max_percent_unhealthy_nodes: Optional[pulumi.Input[int]] = None):
"""
Defines a health policy used to evaluate the health of the cluster or of a cluster node.
:param pulumi.Input[int] max_percent_unhealthy_applications: The maximum allowed percentage of unhealthy applications before reporting an error. For example, to allow 10% of applications to be unhealthy, this value would be 10.
:param pulumi.Input[int] max_percent_unhealthy_nodes: The maximum allowed percentage of unhealthy nodes before reporting an error. For example, to allow 10% of nodes to be unhealthy, this value would be 10.
"""
if max_percent_unhealthy_applications is not None:
pulumi.set(__self__, "max_percent_unhealthy_applications", max_percent_unhealthy_applications)
if max_percent_unhealthy_nodes is not None:
pulumi.set(__self__, "max_percent_unhealthy_nodes", max_percent_unhealthy_nodes)
@property
@pulumi.getter(name="maxPercentUnhealthyApplications")
def max_percent_unhealthy_applications(self) -> Optional[pulumi.Input[int]]:
"""
The maximum allowed percentage of unhealthy applications before reporting an error. For example, to allow 10% of applications to be unhealthy, this value would be 10.
"""
return pulumi.get(self, "max_percent_unhealthy_applications")
@max_percent_unhealthy_applications.setter
def max_percent_unhealthy_applications(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_percent_unhealthy_applications", value)
@property
@pulumi.getter(name="maxPercentUnhealthyNodes")
def max_percent_unhealthy_nodes(self) -> Optional[pulumi.Input[int]]:
"""
The maximum allowed percentage of unhealthy nodes before reporting an error. For example, to allow 10% of nodes to be unhealthy, this value would be 10.
"""
return pulumi.get(self, "max_percent_unhealthy_nodes")
@max_percent_unhealthy_nodes.setter
def max_percent_unhealthy_nodes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_percent_unhealthy_nodes", value)
@pulumi.input_type
class ClusterUpgradeDeltaHealthPolicyArgs:
def __init__(__self__, *,
max_percent_delta_unhealthy_applications: pulumi.Input[int],
max_percent_delta_unhealthy_nodes: pulumi.Input[int],
max_percent_upgrade_domain_delta_unhealthy_nodes: pulumi.Input[int]):
"""
Describes the delta health policies for the cluster upgrade.
:param pulumi.Input[int] max_percent_delta_unhealthy_applications: The maximum allowed percentage of applications health degradation allowed during cluster upgrades. The delta is measured between the state of the applications at the beginning of upgrade and the state of the applications at the time of the health evaluation. The check is performed after every upgrade domain upgrade completion to make sure the global state of the cluster is within tolerated limits. System services are not included in this.
:param pulumi.Input[int] max_percent_delta_unhealthy_nodes: The maximum allowed percentage of nodes health degradation allowed during cluster upgrades. The delta is measured between the state of the nodes at the beginning of upgrade and the state of the nodes at the time of the health evaluation. The check is performed after every upgrade domain upgrade completion to make sure the global state of the cluster is within tolerated limits.
:param pulumi.Input[int] max_percent_upgrade_domain_delta_unhealthy_nodes: The maximum allowed percentage of upgrade domain nodes health degradation allowed during cluster upgrades. The delta is measured between the state of the upgrade domain nodes at the beginning of upgrade and the state of the upgrade domain nodes at the time of the health evaluation. The check is performed after every upgrade domain upgrade completion for all completed upgrade domains to make sure the state of the upgrade domains is within tolerated limits.
"""
pulumi.set(__self__, "max_percent_delta_unhealthy_applications", max_percent_delta_unhealthy_applications)
pulumi.set(__self__, "max_percent_delta_unhealthy_nodes", max_percent_delta_unhealthy_nodes)
pulumi.set(__self__, "max_percent_upgrade_domain_delta_unhealthy_nodes", max_percent_upgrade_domain_delta_unhealthy_nodes)
@property
@pulumi.getter(name="maxPercentDeltaUnhealthyApplications")
def max_percent_delta_unhealthy_applications(self) -> pulumi.Input[int]:
"""
The maximum allowed percentage of applications health degradation allowed during cluster upgrades. The delta is measured between the state of the applications at the beginning of upgrade and the state of the applications at the time of the health evaluation. The check is performed after every upgrade domain upgrade completion to make sure the global state of the cluster is within tolerated limits. System services are not included in this.
"""
return pulumi.get(self, "max_percent_delta_unhealthy_applications")
@max_percent_delta_unhealthy_applications.setter
def max_percent_delta_unhealthy_applications(self, value: pulumi.Input[int]):
pulumi.set(self, "max_percent_delta_unhealthy_applications", value)
@property
@pulumi.getter(name="maxPercentDeltaUnhealthyNodes")
def max_percent_delta_unhealthy_nodes(self) -> pulumi.Input[int]:
"""
The maximum allowed percentage of nodes health degradation allowed during cluster upgrades. The delta is measured between the state of the nodes at the beginning of upgrade and the state of the nodes at the time of the health evaluation. The check is performed after every upgrade domain upgrade completion to make sure the global state of the cluster is within tolerated limits.
"""
return pulumi.get(self, "max_percent_delta_unhealthy_nodes")
@max_percent_delta_unhealthy_nodes.setter
def max_percent_delta_unhealthy_nodes(self, value: pulumi.Input[int]):
pulumi.set(self, "max_percent_delta_unhealthy_nodes", value)
@property
@pulumi.getter(name="maxPercentUpgradeDomainDeltaUnhealthyNodes")
def max_percent_upgrade_domain_delta_unhealthy_nodes(self) -> pulumi.Input[int]:
"""
The maximum allowed percentage of upgrade domain nodes health degradation allowed during cluster upgrades. The delta is measured between the state of the upgrade domain nodes at the beginning of upgrade and the state of the upgrade domain nodes at the time of the health evaluation. The check is performed after every upgrade domain upgrade completion for all completed upgrade domains to make sure the state of the upgrade domains is within tolerated limits.
"""
return pulumi.get(self, "max_percent_upgrade_domain_delta_unhealthy_nodes")
@max_percent_upgrade_domain_delta_unhealthy_nodes.setter
def max_percent_upgrade_domain_delta_unhealthy_nodes(self, value: pulumi.Input[int]):
pulumi.set(self, "max_percent_upgrade_domain_delta_unhealthy_nodes", value)
@pulumi.input_type
class ClusterUpgradePolicyArgs:
def __init__(__self__, *,
health_check_retry_timeout: pulumi.Input[str],
health_check_stable_duration: pulumi.Input[str],
health_check_wait_duration: pulumi.Input[str],
health_policy: pulumi.Input['ClusterHealthPolicyArgs'],
upgrade_domain_timeout: pulumi.Input[str],
upgrade_replica_set_check_timeout: pulumi.Input[str],
upgrade_timeout: pulumi.Input[str],
delta_health_policy: Optional[pulumi.Input['ClusterUpgradeDeltaHealthPolicyArgs']] = None,
force_restart: Optional[pulumi.Input[bool]] = None):
"""
Describes the policy used when upgrading the cluster.
:param pulumi.Input[str] health_check_retry_timeout: The amount of time to retry health evaluation when the application or cluster is unhealthy before the upgrade rolls back. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
:param pulumi.Input[str] health_check_stable_duration: The amount of time that the application or cluster must remain healthy before the upgrade proceeds to the next upgrade domain. The duration can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
:param pulumi.Input[str] health_check_wait_duration: The length of time to wait after completing an upgrade domain before performing health checks. The duration can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
:param pulumi.Input['ClusterHealthPolicyArgs'] health_policy: The cluster health policy used when upgrading the cluster.
:param pulumi.Input[str] upgrade_domain_timeout: The amount of time each upgrade domain has to complete before the upgrade rolls back. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
:param pulumi.Input[str] upgrade_replica_set_check_timeout: The maximum amount of time to block processing of an upgrade domain and prevent loss of availability when there are unexpected issues. When this timeout expires, processing of the upgrade domain will proceed regardless of availability loss issues. The timeout is reset at the start of each upgrade domain. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
:param pulumi.Input[str] upgrade_timeout: The amount of time the overall upgrade has to complete before the upgrade rolls back. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
:param pulumi.Input['ClusterUpgradeDeltaHealthPolicyArgs'] delta_health_policy: The delta health policy used when upgrading the cluster.
:param pulumi.Input[bool] force_restart: If true, then processes are forcefully restarted during upgrade even when the code version has not changed (the upgrade only changes configuration or data).
"""
pulumi.set(__self__, "health_check_retry_timeout", health_check_retry_timeout)
pulumi.set(__self__, "health_check_stable_duration", health_check_stable_duration)
pulumi.set(__self__, "health_check_wait_duration", health_check_wait_duration)
pulumi.set(__self__, "health_policy", health_policy)
pulumi.set(__self__, "upgrade_domain_timeout", upgrade_domain_timeout)
pulumi.set(__self__, "upgrade_replica_set_check_timeout", upgrade_replica_set_check_timeout)
pulumi.set(__self__, "upgrade_timeout", upgrade_timeout)
if delta_health_policy is not None:
pulumi.set(__self__, "delta_health_policy", delta_health_policy)
if force_restart is not None:
pulumi.set(__self__, "force_restart", force_restart)
@property
@pulumi.getter(name="healthCheckRetryTimeout")
def health_check_retry_timeout(self) -> pulumi.Input[str]:
"""
The amount of time to retry health evaluation when the application or cluster is unhealthy before the upgrade rolls back. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
"""
return pulumi.get(self, "health_check_retry_timeout")
@health_check_retry_timeout.setter
def health_check_retry_timeout(self, value: pulumi.Input[str]):
pulumi.set(self, "health_check_retry_timeout", value)
@property
@pulumi.getter(name="healthCheckStableDuration")
def health_check_stable_duration(self) -> pulumi.Input[str]:
"""
The amount of time that the application or cluster must remain healthy before the upgrade proceeds to the next upgrade domain. The duration can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
"""
return pulumi.get(self, "health_check_stable_duration")
@health_check_stable_duration.setter
def health_check_stable_duration(self, value: pulumi.Input[str]):
pulumi.set(self, "health_check_stable_duration", value)
@property
@pulumi.getter(name="healthCheckWaitDuration")
def health_check_wait_duration(self) -> pulumi.Input[str]:
"""
The length of time to wait after completing an upgrade domain before performing health checks. The duration can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
"""
return pulumi.get(self, "health_check_wait_duration")
@health_check_wait_duration.setter
def health_check_wait_duration(self, value: pulumi.Input[str]):
pulumi.set(self, "health_check_wait_duration", value)
@property
@pulumi.getter(name="healthPolicy")
def health_policy(self) -> pulumi.Input['ClusterHealthPolicyArgs']:
"""
The cluster health policy used when upgrading the cluster.
"""
return pulumi.get(self, "health_policy")
@health_policy.setter
def health_policy(self, value: pulumi.Input['ClusterHealthPolicyArgs']):
pulumi.set(self, "health_policy", value)
@property
@pulumi.getter(name="upgradeDomainTimeout")
def upgrade_domain_timeout(self) -> pulumi.Input[str]:
"""
The amount of time each upgrade domain has to complete before the upgrade rolls back. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
"""
return pulumi.get(self, "upgrade_domain_timeout")
@upgrade_domain_timeout.setter
def upgrade_domain_timeout(self, value: pulumi.Input[str]):
pulumi.set(self, "upgrade_domain_timeout", value)
@property
@pulumi.getter(name="upgradeReplicaSetCheckTimeout")
def upgrade_replica_set_check_timeout(self) -> pulumi.Input[str]:
"""
The maximum amount of time to block processing of an upgrade domain and prevent loss of availability when there are unexpected issues. When this timeout expires, processing of the upgrade domain will proceed regardless of availability loss issues. The timeout is reset at the start of each upgrade domain. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
"""
return pulumi.get(self, "upgrade_replica_set_check_timeout")
@upgrade_replica_set_check_timeout.setter
def upgrade_replica_set_check_timeout(self, value: pulumi.Input[str]):
pulumi.set(self, "upgrade_replica_set_check_timeout", value)
@property
@pulumi.getter(name="upgradeTimeout")
def upgrade_timeout(self) -> pulumi.Input[str]:
"""
The amount of time the overall upgrade has to complete before the upgrade rolls back. The timeout can be in either hh:mm:ss or in d.hh:mm:ss.ms format.
"""
return pulumi.get(self, "upgrade_timeout")
@upgrade_timeout.setter
def upgrade_timeout(self, value: pulumi.Input[str]):
pulumi.set(self, "upgrade_timeout", value)
@property
@pulumi.getter(name="deltaHealthPolicy")
def delta_health_policy(self) -> Optional[pulumi.Input['ClusterUpgradeDeltaHealthPolicyArgs']]:
"""
The delta health policy used when upgrading the cluster.
"""
return pulumi.get(self, "delta_health_policy")
@delta_health_policy.setter
def delta_health_policy(self, value: Optional[pulumi.Input['ClusterUpgradeDeltaHealthPolicyArgs']]):
pulumi.set(self, "delta_health_policy", value)
@property
@pulumi.getter(name="forceRestart")
def force_restart(self) -> Optional[pulumi.Input[bool]]:
"""
If true, then processes are forcefully restarted during upgrade even when the code version has not changed (the upgrade only changes configuration or data).
"""
return pulumi.get(self, "force_restart")
@force_restart.setter
def force_restart(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_restart", value)
@pulumi.input_type
class ClusterVersionDetailsArgs:
def __init__(__self__, *,
code_version: Optional[pulumi.Input[str]] = None,
environment: Optional[pulumi.Input[str]] = None,
support_expiry_utc: Optional[pulumi.Input[str]] = None):
"""
The detail of the Service Fabric runtime version result
:param pulumi.Input[str] code_version: The Service Fabric runtime version of the cluster.
:param pulumi.Input[str] environment: Indicates if this version is for Windows or Linux operating system.
:param pulumi.Input[str] support_expiry_utc: The date of expiry of support of the version.
"""
if code_version is not None:
pulumi.set(__self__, "code_version", code_version)
if environment is not None:
pulumi.set(__self__, "environment", environment)
if support_expiry_utc is not None:
pulumi.set(__self__, "support_expiry_utc", support_expiry_utc)
@property
@pulumi.getter(name="codeVersion")
def code_version(self) -> Optional[pulumi.Input[str]]:
"""
The Service Fabric runtime version of the cluster.
"""
return pulumi.get(self, "code_version")
@code_version.setter
def code_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "code_version", value)
@property
@pulumi.getter
def environment(self) -> Optional[pulumi.Input[str]]:
"""
Indicates if this version is for Windows or Linux operating system.
"""
return pulumi.get(self, "environment")
@environment.setter
def environment(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "environment", value)
@property
@pulumi.getter(name="supportExpiryUtc")
def support_expiry_utc(self) -> Optional[pulumi.Input[str]]:
"""
The date of expiry of support of the version.
"""
return pulumi.get(self, "support_expiry_utc")
@support_expiry_utc.setter
def support_expiry_utc(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "support_expiry_utc", value)
@pulumi.input_type
class DiagnosticsStorageAccountConfigArgs:
def __init__(__self__, *,
blob_endpoint: pulumi.Input[str],
protected_account_key_name: pulumi.Input[str],
queue_endpoint: pulumi.Input[str],
storage_account_name: pulumi.Input[str],
table_endpoint: pulumi.Input[str]):
"""
The storage account information for storing Service Fabric diagnostic logs.
:param pulumi.Input[str] blob_endpoint: The blob endpoint of the azure storage account.
:param pulumi.Input[str] protected_account_key_name: The protected diagnostics storage key name.
:param pulumi.Input[str] queue_endpoint: The queue endpoint of the azure storage account.
:param pulumi.Input[str] storage_account_name: The Azure storage account name.
:param pulumi.Input[str] table_endpoint: The table endpoint of the azure storage account.
"""
pulumi.set(__self__, "blob_endpoint", blob_endpoint)
pulumi.set(__self__, "protected_account_key_name", protected_account_key_name)
pulumi.set(__self__, "queue_endpoint", queue_endpoint)
pulumi.set(__self__, "storage_account_name", storage_account_name)
pulumi.set(__self__, "table_endpoint", table_endpoint)
@property
@pulumi.getter(name="blobEndpoint")
def blob_endpoint(self) -> pulumi.Input[str]:
"""
The blob endpoint of the azure storage account.
"""
return pulumi.get(self, "blob_endpoint")
@blob_endpoint.setter
def blob_endpoint(self, value: pulumi.Input[str]):
pulumi.set(self, "blob_endpoint", value)
@property
@pulumi.getter(name="protectedAccountKeyName")
def protected_account_key_name(self) -> pulumi.Input[str]:
"""
The protected diagnostics storage key name.
"""
return pulumi.get(self, "protected_account_key_name")
@protected_account_key_name.setter
def protected_account_key_name(self, value: pulumi.Input[str]):
pulumi.set(self, "protected_account_key_name", value)
@property
@pulumi.getter(name="queueEndpoint")
def queue_endpoint(self) -> pulumi.Input[str]:
"""
The queue endpoint of the azure storage account.
"""
return pulumi.get(self, "queue_endpoint")
@queue_endpoint.setter
def queue_endpoint(self, value: pulumi.Input[str]):
pulumi.set(self, "queue_endpoint", value)
@property
@pulumi.getter(name="storageAccountName")
def storage_account_name(self) -> pulumi.Input[str]:
"""
The Azure storage account name.
"""
return pulumi.get(self, "storage_account_name")
@storage_account_name.setter
def storage_account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "storage_account_name", value)
@property
@pulumi.getter(name="tableEndpoint")
def table_endpoint(self) -> pulumi.Input[str]:
"""
The table endpoint of the azure storage account.
"""
return pulumi.get(self, "table_endpoint")
@table_endpoint.setter
def table_endpoint(self, value: pulumi.Input[str]):
pulumi.set(self, "table_endpoint", value)
@pulumi.input_type
class EndpointRangeDescriptionArgs:
def __init__(__self__, *,
end_port: pulumi.Input[int],
start_port: pulumi.Input[int]):
"""
Port range details
:param pulumi.Input[int] end_port: End port of a range of ports
:param pulumi.Input[int] start_port: Starting port of a range of ports
"""
pulumi.set(__self__, "end_port", end_port)
pulumi.set(__self__, "start_port", start_port)
@property
@pulumi.getter(name="endPort")
def end_port(self) -> pulumi.Input[int]:
"""
End port of a range of ports
"""
return pulumi.get(self, "end_port")
@end_port.setter
def end_port(self, value: pulumi.Input[int]):
pulumi.set(self, "end_port", value)
@property
@pulumi.getter(name="startPort")
def start_port(self) -> pulumi.Input[int]:
"""
Starting port of a range of ports
"""
return pulumi.get(self, "start_port")
@start_port.setter
def start_port(self, value: pulumi.Input[int]):
pulumi.set(self, "start_port", value)
@pulumi.input_type
class NamedPartitionSchemeDescriptionArgs:
def __init__(__self__, *,
count: pulumi.Input[int],
names: pulumi.Input[Sequence[pulumi.Input[str]]],
partition_scheme: pulumi.Input[str]):
"""
Describes the named partition scheme of the service.
:param pulumi.Input[int] count: The number of partitions.
:param pulumi.Input[Sequence[pulumi.Input[str]]] names: Array of size specified by the ‘Count’ parameter, for the names of the partitions.
:param pulumi.Input[str] partition_scheme: Specifies how the service is partitioned.
Expected value is 'Named'.
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "names", names)
pulumi.set(__self__, "partition_scheme", 'Named')
@property
@pulumi.getter
def count(self) -> pulumi.Input[int]:
"""
The number of partitions.
"""
return pulumi.get(self, "count")
@count.setter
def count(self, value: pulumi.Input[int]):
pulumi.set(self, "count", value)
@property
@pulumi.getter
def names(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
Array of size specified by the ‘Count’ parameter, for the names of the partitions.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter(name="partitionScheme")
def partition_scheme(self) -> pulumi.Input[str]:
"""
Specifies how the service is partitioned.
Expected value is 'Named'.
"""
return pulumi.get(self, "partition_scheme")
@partition_scheme.setter
def partition_scheme(self, value: pulumi.Input[str]):
pulumi.set(self, "partition_scheme", value)
@pulumi.input_type
class NodeTypeDescriptionArgs:
def __init__(__self__, *,
client_connection_endpoint_port: pulumi.Input[int],
http_gateway_endpoint_port: pulumi.Input[int],
is_primary: pulumi.Input[bool],
name: pulumi.Input[str],
vm_instance_count: pulumi.Input[int],
application_ports: Optional[pulumi.Input['EndpointRangeDescriptionArgs']] = None,
capacities: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
durability_level: Optional[pulumi.Input[str]] = None,
ephemeral_ports: Optional[pulumi.Input['EndpointRangeDescriptionArgs']] = None,
placement_properties: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
reverse_proxy_endpoint_port: Optional[pulumi.Input[int]] = None):
"""
Describes a node type in the cluster, each node type represents sub set of nodes in the cluster.
:param pulumi.Input[int] client_connection_endpoint_port: The TCP cluster management endpoint port.
:param pulumi.Input[int] http_gateway_endpoint_port: The HTTP cluster management endpoint port.
:param pulumi.Input[bool] is_primary: The node type on which system services will run. Only one node type should be marked as primary. Primary node type cannot be deleted or changed for existing clusters.
:param pulumi.Input[str] name: The name of the node type.
:param pulumi.Input[int] vm_instance_count: The number of nodes in the node type. This count should match the capacity property in the corresponding VirtualMachineScaleSet resource.
:param pulumi.Input['EndpointRangeDescriptionArgs'] application_ports: The range of ports from which cluster assigned port to Service Fabric applications.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] capacities: The capacity tags applied to the nodes in the node type, the cluster resource manager uses these tags to understand how much resource a node has.
:param pulumi.Input[str] durability_level: The durability level of the node type. Learn about [DurabilityLevel](https://docs.microsoft.com/azure/service-fabric/service-fabric-cluster-capacity).
- Bronze - No privileges. This is the default.
- Silver - The infrastructure jobs can be paused for a duration of 30 minutes per UD.
- Gold - The infrastructure jobs can be paused for a duration of 2 hours per UD. Gold durability can be enabled only on full node VM skus like D15_V2, G5 etc.
:param pulumi.Input['EndpointRangeDescriptionArgs'] ephemeral_ports: The range of ephemeral ports that nodes in this node type should be configured with.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] placement_properties: The placement tags applied to nodes in the node type, which can be used to indicate where certain services (workload) should run.
:param pulumi.Input[int] reverse_proxy_endpoint_port: The endpoint used by reverse proxy.
"""
pulumi.set(__self__, "client_connection_endpoint_port", client_connection_endpoint_port)
pulumi.set(__self__, "http_gateway_endpoint_port", http_gateway_endpoint_port)
pulumi.set(__self__, "is_primary", is_primary)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "vm_instance_count", vm_instance_count)
if application_ports is not None:
pulumi.set(__self__, "application_ports", application_ports)
if capacities is not None:
pulumi.set(__self__, "capacities", capacities)
if durability_level is not None:
pulumi.set(__self__, "durability_level", durability_level)
if ephemeral_ports is not None:
pulumi.set(__self__, "ephemeral_ports", ephemeral_ports)
if placement_properties is not None:
pulumi.set(__self__, "placement_properties", placement_properties)
if reverse_proxy_endpoint_port is not None:
pulumi.set(__self__, "reverse_proxy_endpoint_port", reverse_proxy_endpoint_port)
@property
@pulumi.getter(name="clientConnectionEndpointPort")
def client_connection_endpoint_port(self) -> pulumi.Input[int]:
"""
The TCP cluster management endpoint port.
"""
return pulumi.get(self, "client_connection_endpoint_port")
@client_connection_endpoint_port.setter
def client_connection_endpoint_port(self, value: pulumi.Input[int]):
pulumi.set(self, "client_connection_endpoint_port", value)
@property
@pulumi.getter(name="httpGatewayEndpointPort")
def http_gateway_endpoint_port(self) -> pulumi.Input[int]:
"""
The HTTP cluster management endpoint port.
"""
return pulumi.get(self, "http_gateway_endpoint_port")
@http_gateway_endpoint_port.setter
def http_gateway_endpoint_port(self, value: pulumi.Input[int]):
pulumi.set(self, "http_gateway_endpoint_port", value)
@property
@pulumi.getter(name="isPrimary")
def is_primary(self) -> pulumi.Input[bool]:
"""
The node type on which system services will run. Only one node type should be marked as primary. Primary node type cannot be deleted or changed for existing clusters.
"""
return pulumi.get(self, "is_primary")
@is_primary.setter
def is_primary(self, value: pulumi.Input[bool]):
pulumi.set(self, "is_primary", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the node type.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="vmInstanceCount")
def vm_instance_count(self) -> pulumi.Input[int]:
"""
The number of nodes in the node type. This count should match the capacity property in the corresponding VirtualMachineScaleSet resource.
"""
return pulumi.get(self, "vm_instance_count")
@vm_instance_count.setter
def vm_instance_count(self, value: pulumi.Input[int]):
pulumi.set(self, "vm_instance_count", value)
@property
@pulumi.getter(name="applicationPorts")
def application_ports(self) -> Optional[pulumi.Input['EndpointRangeDescriptionArgs']]:
"""
The range of ports from which cluster assigned port to Service Fabric applications.
"""
return pulumi.get(self, "application_ports")
@application_ports.setter
def application_ports(self, value: Optional[pulumi.Input['EndpointRangeDescriptionArgs']]):
pulumi.set(self, "application_ports", value)
@property
@pulumi.getter
def capacities(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
The capacity tags applied to the nodes in the node type, the cluster resource manager uses these tags to understand how much resource a node has.
"""
return pulumi.get(self, "capacities")
@capacities.setter
def capacities(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "capacities", value)
@property
@pulumi.getter(name="durabilityLevel")
def durability_level(self) -> Optional[pulumi.Input[str]]:
"""
The durability level of the node type. Learn about [DurabilityLevel](https://docs.microsoft.com/azure/service-fabric/service-fabric-cluster-capacity).
- Bronze - No privileges. This is the default.
- Silver - The infrastructure jobs can be paused for a duration of 30 minutes per UD.
- Gold - The infrastructure jobs can be paused for a duration of 2 hours per UD. Gold durability can be enabled only on full node VM skus like D15_V2, G5 etc.
"""
return pulumi.get(self, "durability_level")
@durability_level.setter
def durability_level(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "durability_level", value)
@property
@pulumi.getter(name="ephemeralPorts")
def ephemeral_ports(self) -> Optional[pulumi.Input['EndpointRangeDescriptionArgs']]:
"""
The range of ephemeral ports that nodes in this node type should be configured with.
"""
return pulumi.get(self, "ephemeral_ports")
@ephemeral_ports.setter
def ephemeral_ports(self, value: Optional[pulumi.Input['EndpointRangeDescriptionArgs']]):
pulumi.set(self, "ephemeral_ports", value)
@property
@pulumi.getter(name="placementProperties")
def placement_properties(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
The placement tags applied to nodes in the node type, which can be used to indicate where certain services (workload) should run.
"""
return pulumi.get(self, "placement_properties")
@placement_properties.setter
def placement_properties(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "placement_properties", value)
@property
@pulumi.getter(name="reverseProxyEndpointPort")
def reverse_proxy_endpoint_port(self) -> Optional[pulumi.Input[int]]:
"""
The endpoint used by reverse proxy.
"""
return pulumi.get(self, "reverse_proxy_endpoint_port")
@reverse_proxy_endpoint_port.setter
def reverse_proxy_endpoint_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "reverse_proxy_endpoint_port", value)
@pulumi.input_type
class ServiceCorrelationDescriptionArgs:
def __init__(__self__, *,
scheme: pulumi.Input[Union[str, 'ServiceCorrelationScheme']],
service_name: pulumi.Input[str]):
"""
Creates a particular correlation between services.
:param pulumi.Input[Union[str, 'ServiceCorrelationScheme']] scheme: The ServiceCorrelationScheme which describes the relationship between this service and the service specified via ServiceName.
:param pulumi.Input[str] service_name: The name of the service that the correlation relationship is established with.
"""
pulumi.set(__self__, "scheme", scheme)
pulumi.set(__self__, "service_name", service_name)
@property
@pulumi.getter
def scheme(self) -> pulumi.Input[Union[str, 'ServiceCorrelationScheme']]:
"""
The ServiceCorrelationScheme which describes the relationship between this service and the service specified via ServiceName.
"""
return pulumi.get(self, "scheme")
@scheme.setter
def scheme(self, value: pulumi.Input[Union[str, 'ServiceCorrelationScheme']]):
pulumi.set(self, "scheme", value)
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> pulumi.Input[str]:
"""
The name of the service that the correlation relationship is established with.
"""
return pulumi.get(self, "service_name")
@service_name.setter
def service_name(self, value: pulumi.Input[str]):
pulumi.set(self, "service_name", value)
@pulumi.input_type
class ServiceLoadMetricDescriptionArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
default_load: Optional[pulumi.Input[int]] = None,
primary_default_load: Optional[pulumi.Input[int]] = None,
secondary_default_load: Optional[pulumi.Input[int]] = None,
weight: Optional[pulumi.Input[Union[str, 'ServiceLoadMetricWeight']]] = None):
"""
Specifies a metric to load balance a service during runtime.
:param pulumi.Input[str] name: The name of the metric. If the service chooses to report load during runtime, the load metric name should match the name that is specified in Name exactly. Note that metric names are case sensitive.
:param pulumi.Input[int] default_load: Used only for Stateless services. The default amount of load, as a number, that this service creates for this metric.
:param pulumi.Input[int] primary_default_load: Used only for Stateful services. The default amount of load, as a number, that this service creates for this metric when it is a Primary replica.
:param pulumi.Input[int] secondary_default_load: Used only for Stateful services. The default amount of load, as a number, that this service creates for this metric when it is a Secondary replica.
:param pulumi.Input[Union[str, 'ServiceLoadMetricWeight']] weight: The service load metric relative weight, compared to other metrics configured for this service, as a number.
"""
pulumi.set(__self__, "name", name)
if default_load is not None:
pulumi.set(__self__, "default_load", default_load)
if primary_default_load is not None:
pulumi.set(__self__, "primary_default_load", primary_default_load)
if secondary_default_load is not None:
pulumi.set(__self__, "secondary_default_load", secondary_default_load)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the metric. If the service chooses to report load during runtime, the load metric name should match the name that is specified in Name exactly. Note that metric names are case sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="defaultLoad")
def default_load(self) -> Optional[pulumi.Input[int]]:
"""
Used only for Stateless services. The default amount of load, as a number, that this service creates for this metric.
"""
return pulumi.get(self, "default_load")
@default_load.setter
def default_load(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "default_load", value)
@property
@pulumi.getter(name="primaryDefaultLoad")
def primary_default_load(self) -> Optional[pulumi.Input[int]]:
"""
Used only for Stateful services. The default amount of load, as a number, that this service creates for this metric when it is a Primary replica.
"""
return pulumi.get(self, "primary_default_load")
@primary_default_load.setter
def primary_default_load(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "primary_default_load", value)
@property
@pulumi.getter(name="secondaryDefaultLoad")
def secondary_default_load(self) -> Optional[pulumi.Input[int]]:
"""
Used only for Stateful services. The default amount of load, as a number, that this service creates for this metric when it is a Secondary replica.
"""
return pulumi.get(self, "secondary_default_load")
@secondary_default_load.setter
def secondary_default_load(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "secondary_default_load", value)
@property
@pulumi.getter
def weight(self) -> Optional[pulumi.Input[Union[str, 'ServiceLoadMetricWeight']]]:
"""
The service load metric relative weight, compared to other metrics configured for this service, as a number.
"""
return pulumi.get(self, "weight")
@weight.setter
def weight(self, value: Optional[pulumi.Input[Union[str, 'ServiceLoadMetricWeight']]]):
pulumi.set(self, "weight", value)
@pulumi.input_type
class ServicePlacementPolicyDescriptionArgs:
def __init__(__self__, *,
type: pulumi.Input[Union[str, 'ServicePlacementPolicyType']]):
"""
Describes the policy to be used for placement of a Service Fabric service.
:param pulumi.Input[Union[str, 'ServicePlacementPolicyType']] type: The type of placement policy for a service fabric service. Following are the possible values.
"""
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def type(self) -> pulumi.Input[Union[str, 'ServicePlacementPolicyType']]:
"""
The type of placement policy for a service fabric service. Following are the possible values.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[Union[str, 'ServicePlacementPolicyType']]):
pulumi.set(self, "type", value)
@pulumi.input_type
class SettingsParameterDescriptionArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
value: pulumi.Input[str]):
"""
Describes a parameter in fabric settings of the cluster.
:param pulumi.Input[str] name: The parameter name of fabric setting.
:param pulumi.Input[str] value: The parameter value of fabric setting.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The parameter name of fabric setting.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
"""
The parameter value of fabric setting.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@pulumi.input_type
class SettingsSectionDescriptionArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
parameters: pulumi.Input[Sequence[pulumi.Input['SettingsParameterDescriptionArgs']]]):
"""
Describes a section in the fabric settings of the cluster.
:param pulumi.Input[str] name: The section name of the fabric settings.
:param pulumi.Input[Sequence[pulumi.Input['SettingsParameterDescriptionArgs']]] parameters: The collection of parameters in the section.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "parameters", parameters)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The section name of the fabric settings.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def parameters(self) -> pulumi.Input[Sequence[pulumi.Input['SettingsParameterDescriptionArgs']]]:
"""
The collection of parameters in the section.
"""
return pulumi.get(self, "parameters")
@parameters.setter
def parameters(self, value: pulumi.Input[Sequence[pulumi.Input['SettingsParameterDescriptionArgs']]]):
pulumi.set(self, "parameters", value)
@pulumi.input_type
class SingletonPartitionSchemeDescriptionArgs:
def __init__(__self__, *,
partition_scheme: pulumi.Input[str]):
"""
Describes the partition scheme of a singleton-partitioned, or non-partitioned service.
:param pulumi.Input[str] partition_scheme: Specifies how the service is partitioned.
Expected value is 'Singleton'.
"""
pulumi.set(__self__, "partition_scheme", 'Singleton')
@property
@pulumi.getter(name="partitionScheme")
def partition_scheme(self) -> pulumi.Input[str]:
"""
Specifies how the service is partitioned.
Expected value is 'Singleton'.
"""
return pulumi.get(self, "partition_scheme")
@partition_scheme.setter
def partition_scheme(self, value: pulumi.Input[str]):
pulumi.set(self, "partition_scheme", value)
@pulumi.input_type
class UniformInt64RangePartitionSchemeDescriptionArgs:
def __init__(__self__, *,
count: pulumi.Input[int],
high_key: pulumi.Input[str],
low_key: pulumi.Input[str],
partition_scheme: pulumi.Input[str]):
"""
Describes a partitioning scheme where an integer range is allocated evenly across a number of partitions.
:param pulumi.Input[int] count: The number of partitions.
:param pulumi.Input[str] high_key: String indicating the upper bound of the partition key range that
should be split between the partition ‘Count’
:param pulumi.Input[str] low_key: String indicating the lower bound of the partition key range that
should be split between the partition ‘Count’
:param pulumi.Input[str] partition_scheme: Specifies how the service is partitioned.
Expected value is 'UniformInt64Range'.
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "high_key", high_key)
pulumi.set(__self__, "low_key", low_key)
pulumi.set(__self__, "partition_scheme", 'UniformInt64Range')
@property
@pulumi.getter
def count(self) -> pulumi.Input[int]:
"""
The number of partitions.
"""
return pulumi.get(self, "count")
@count.setter
def count(self, value: pulumi.Input[int]):
pulumi.set(self, "count", value)
@property
@pulumi.getter(name="highKey")
def high_key(self) -> pulumi.Input[str]:
"""
String indicating the upper bound of the partition key range that
should be split between the partition ‘Count’
"""
return pulumi.get(self, "high_key")
@high_key.setter
def high_key(self, value: pulumi.Input[str]):
pulumi.set(self, "high_key", value)
@property
@pulumi.getter(name="lowKey")
def low_key(self) -> pulumi.Input[str]:
"""
String indicating the lower bound of the partition key range that
should be split between the partition ‘Count’
"""
return pulumi.get(self, "low_key")
@low_key.setter
def low_key(self, value: pulumi.Input[str]):
pulumi.set(self, "low_key", value)
@property
@pulumi.getter(name="partitionScheme")
def partition_scheme(self) -> pulumi.Input[str]:
"""
Specifies how the service is partitioned.
Expected value is 'UniformInt64Range'.
"""
return pulumi.get(self, "partition_scheme")
@partition_scheme.setter
def partition_scheme(self, value: pulumi.Input[str]):
pulumi.set(self, "partition_scheme", value)
|
PypiClean
|
/isedit-0.3.0.tar.gz/isedit-0.3.0/js/node_modules/vexflow/build/esm/src/tickable.js
|
import { Element } from './element.js';
import { Fraction } from './fraction.js';
import { Tables } from './tables.js';
import { defined, RuntimeError } from './util.js';
export class Tickable extends Element {
constructor() {
super();
this._preFormatted = false;
this._postFormatted = false;
this.ticks = new Fraction(0, 1);
this.intrinsicTicks = 0;
this.tickMultiplier = new Fraction(1, 1);
this.width = 0;
this.x_shift = 0;
this.modifiers = [];
this.tupletStack = [];
this.align_center = false;
this.center_x_shift = 0;
this.ignore_ticks = false;
this.formatterMetrics = {
freedom: { left: 0, right: 0 },
duration: '',
iterations: 0,
space: {
used: 0,
mean: 0,
deviation: 0,
},
};
}
static get CATEGORY() {
return "Tickable";
}
reset() {
return this;
}
getTicks() {
return this.ticks;
}
shouldIgnoreTicks() {
return this.ignore_ticks;
}
setIgnoreTicks(flag) {
this.ignore_ticks = flag;
return this;
}
setWidth(width) {
this.width = width;
}
getWidth() {
if (!this._preFormatted) {
throw new RuntimeError('UnformattedNote', "Can't call GetWidth on an unformatted note.");
}
return this.width + (this.modifierContext ? this.modifierContext.getWidth() : 0);
}
setXShift(x) {
this.x_shift = x;
return this;
}
getXShift() {
return this.x_shift;
}
getX() {
const tickContext = this.checkTickContext(`Can't getX() without a TickContext.`);
return tickContext.getX() + this.x_shift;
}
getFormatterMetrics() {
return this.formatterMetrics;
}
getCenterXShift() {
if (this.isCenterAligned()) {
return this.center_x_shift;
}
return 0;
}
setCenterXShift(centerXShift) {
this.center_x_shift = centerXShift;
return this;
}
isCenterAligned() {
return this.align_center;
}
setCenterAlignment(align_center) {
this.align_center = align_center;
return this;
}
getVoice() {
return defined(this.voice, 'NoVoice', 'Tickable has no voice.');
}
setVoice(voice) {
this.voice = voice;
}
getTuplet() {
return this.tuplet;
}
getTupletStack() {
return this.tupletStack;
}
resetTuplet(tuplet) {
let noteCount;
let notesOccupied;
if (tuplet) {
const i = this.tupletStack.indexOf(tuplet);
if (i !== -1) {
this.tupletStack.splice(i, 1);
noteCount = tuplet.getNoteCount();
notesOccupied = tuplet.getNotesOccupied();
this.applyTickMultiplier(noteCount, notesOccupied);
}
return this;
}
while (this.tupletStack.length) {
tuplet = this.tupletStack.pop();
noteCount = tuplet.getNoteCount();
notesOccupied = tuplet.getNotesOccupied();
this.applyTickMultiplier(noteCount, notesOccupied);
}
return this;
}
setTuplet(tuplet) {
if (tuplet) {
this.tupletStack.push(tuplet);
const noteCount = tuplet.getNoteCount();
const notesOccupied = tuplet.getNotesOccupied();
this.applyTickMultiplier(notesOccupied, noteCount);
}
this.tuplet = tuplet;
return this;
}
addToModifierContext(mc) {
this.modifierContext = mc;
for (let i = 0; i < this.modifiers.length; ++i) {
this.modifierContext.addMember(this.modifiers[i]);
}
this.modifierContext.addMember(this);
this._preFormatted = false;
return this;
}
addModifier(modifier, index = 0) {
this.modifiers.push(modifier);
this._preFormatted = false;
return this;
}
getModifiers() {
return this.modifiers;
}
setTickContext(tc) {
this.tickContext = tc;
this._preFormatted = false;
}
checkTickContext(message = 'Tickable has no tick context.') {
return defined(this.tickContext, 'NoTickContext', message);
}
preFormat() {
if (this._preFormatted)
return;
this.width = 0;
if (this.modifierContext) {
this.modifierContext.preFormat();
this.width += this.modifierContext.getWidth();
}
}
set preFormatted(value) {
this._preFormatted = value;
}
get preFormatted() {
return this._preFormatted;
}
postFormat() {
if (this._postFormatted)
return this;
this._postFormatted = true;
return this;
}
set postFormatted(value) {
this._postFormatted = value;
}
get postFormatted() {
return this._postFormatted;
}
getIntrinsicTicks() {
return this.intrinsicTicks;
}
setIntrinsicTicks(intrinsicTicks) {
this.intrinsicTicks = intrinsicTicks;
this.ticks = this.tickMultiplier.clone().multiply(this.intrinsicTicks);
}
getTickMultiplier() {
return this.tickMultiplier;
}
applyTickMultiplier(numerator, denominator) {
this.tickMultiplier.multiply(numerator, denominator);
this.ticks = this.tickMultiplier.clone().multiply(this.intrinsicTicks);
}
setDuration(duration) {
const ticks = duration.numerator * (Tables.RESOLUTION / duration.denominator);
this.ticks = this.tickMultiplier.clone().multiply(ticks);
this.intrinsicTicks = this.ticks.value();
}
getAbsoluteX() {
const tickContext = this.checkTickContext(`Can't getAbsoluteX() without a TickContext.`);
return tickContext.getX();
}
setModifierContext(mc) {
this.modifierContext = mc;
return this;
}
getModifierContext() {
return this.modifierContext;
}
checkModifierContext() {
return defined(this.modifierContext, 'NoModifierContext', 'No modifier context attached to this tickable.');
}
}
|
PypiClean
|
/commondatamodel_objectmodel-1.7.3-py3-none-any.whl/cdm/persistence/cdmfolder/types/__init__.py
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
from .argument import Argument
from .attribute_context import AttributeContext
from .attribute_group import AttributeGroup
from .attribute_group_reference import AttributeGroupReference
from .attribute_resolution_guidance import AttributeResolutionGuidance
from .cdm_import import Import
from .cdm_json_type import CdmJsonType
from .constant_entity import ConstantEntity
from .data_partition import DataPartition
from .data_partition_pattern import DataPartitionPattern
from .data_type import DataType
from .data_type_reference import DataTypeReference
from .document_content import DocumentContent
from .e2e_relationship import E2ERelationship
from .entity import Entity
from .entity_attribute import EntityAttribute
from .entity_reference import EntityReference
from .file_status import FileStatus
from .folder import Folder
from .local_entity_declaration import LocalEntityDeclaration
from .manifest_content import ManifestContent
from .manifest_declaration import ManifestDeclaration
from .projections.operation_add_count_attribute import OperationAddCountAttribute
from .projections.operation_add_supporting_attribute import OperationAddSupportingAttribute
from .projections.operation_add_type_attribute import OperationAddTypeAttribute
from .projections.operation_exclude_attributes import OperationExcludeAttributes
from .projections.operation_array_expansion import OperationArrayExpansion
from .projections.operation_combine_attributes import OperationCombineAttributes
from .projections.operation_rename_attributes import OperationRenameAttributes
from .projections.operation_replace_as_foreign_key import OperationReplaceAsForeignKey
from .projections.operation_include_attributes import OperationIncludeAttributes
from .projections.operation_add_attribute_group import OperationAddAttributeGroup
from .projections.operation_alter_traits import OperationAlterTraits
from .projections.operation_add_artifact_attribute import OperationAddArtifactAttribute
from .parameter import Parameter
from .projections.projection import Projection
from .purpose import Purpose
from .purpose_reference import PurposeReference
from .referenced_entity_declaration import ReferencedEntityDeclaration
from .trait import Trait
from .trait_group import TraitGroup
from .trait_reference import TraitReference
from .trait_group_reference import TraitGroupReference
from .type_attribute import TypeAttribute
__all__ = [
'Argument',
'AttributeContext',
'AttributeGroup',
'AttributeGroupReference',
'AttributeResolutionGuidance',
'CdmJsonType',
'ConstantEntity',
'DataPartition',
'DataPartitionPattern',
'DataType',
'DataTypeReference',
'DocumentContent',
'E2ERelationship',
'Entity',
'EntityAttribute',
'EntityReference',
'FileStatus',
'Folder',
'Import',
'LocalEntityDeclaration',
'ManifestContent',
'ManifestDeclaration',
'OperationAddCountAttribute',
'OperationAddSupportingAttribute',
'OperationAddTypeAttribute',
'OperationExcludeAttributes',
'OperationArrayExpansion',
'OperationCombineAttributes',
'OperationRenameAttributes',
'OperationReplaceAsForeignKey',
'OperationIncludeAttributes',
'OperationAddAttributeGroup',
'OperationAlterTraits',
'OperationAddArtifactAttribute',
'Parameter',
'Projection',
'Purpose',
'PurposeReference',
'ReferencedEntityDeclaration',
'Trait',
'TraitReference',
'TraitGroup',
'TraitGroupReference',
'TypeAttribute'
]
|
PypiClean
|
/fcs-sru-server-1.1.1.tar.gz/fcs-sru-server-1.1.1/README.md
|
FCS SRU Server
==============
<!-- START: BADGES -->
[](https://github.com/psf/black)
[](https://pycqa.github.io/isort/)
[](https://github.com/PyCQA/flake8)
[](https://www.sphinx-doc.org/en/master/usage/index.html)
[](https://google.github.io/styleguide/pyguide.html#s3.8-comments-and-docstrings)
[](https://pypi.python.org/pypi/fcs-sru-server)
[](https://github.com/Querela/fcs-sru-server-python/commits/main)
[](https://fcs-sru-server-python.readthedocs.io/en/latest/?badge=latest)
<!-- END: BADGES -->
- Based on [Java](https://github.com/clarin-eric/fcs-sru-server/) implementation
_git commit: `0091fca0a4add134c478beed422dd1399a5364e3`_
- Differences:
- a bit more pythonic (naming, interfaces, enums etc.)
- no auth stuff yet
- WIP output buffering, server framework might not allow this,
so no streaming and everything is in memory until sent
- server framework choice (wsgi, asgi), for now [`werkzeug`](https://werkzeug.palletsprojects.com)
- TODO: refactoring to allow async variants for streaming responses (large resources),
e.g. with [`starlette`](https://www.starlette.io/)
## Summary
This package implements the server-side part of the SRU/CQL protocol (SRU/S)
and conforms to SRU version 1.1 and 1.2. SRU version 2.0 is mostly implemented
but might be missing some more obscure features.
The library will handle most of the protocol related tasks for you and you'll
only need to implement a few classes to connect you search engine. However, the
library will not save you from doing your SRU/CQL homework (i.e. you'll need to
have at least some understanding of the protocol and adhere to the protocol
semantics). Furthermore, you need to have at least some basic understanding of
Python web application development (wsgi in particular) to use this library.
More Information about SRU/CQL:
http://www.loc.gov/standards/sru/
The implementation is designed to make very minimal assumptions about the
environment it's deployed in. For interfacing with your search engine, you
need to implement the `SRUSearchEngine` interface. At minimum, you'll need
to implement at least the `search()` method. Please check the Python API
documentation for further details about this interface.
The `SRUServer` implements the SRU protocol and uses your supplied search engine
implementation to talk to your search engine. The SRUServer is configured
using a `SRUServerConfig` instance. The `SRUServerConfig` reads an XML document,
which contains the (static) server configuration. It must conform to the
`sru-server-config.xsd` schema in the [`src/clarin/sru/xml/`](src/clarin/sru/xml/)
directory.
## Installation
```bash
# from github/source
python3 -m pip install 'fcs-sru-server @ git+https://github.com/Querela/fcs-sru-server-python.git'
# (locally) built package
python3 -m pip install dist/fcs_sru_server-<version>-py2.py3-none-any.whl
# or
python3 -m pip install dist/fcs-sru-server-<version>.tar.gz
# for local development
python3 -m pip install -e .
```
In `setup.cfg`:
```ini
[options]
install_requires =
fcs-sru-server @ git+https://github.com/Querela/fcs-sru-server-python.git
```
## Build source/binary distribution
```bash
python3 -m pip install build
python3 -m build
```
## Development
* Uses `pytest` (with coverage, clarity and randomly plugins).
```bash
python3 -m pip install -e .[test]
pytest
```
Run style checks:
```bash
# general style checks
python3 -m pip install -e .[style]
black --check .
flake8 . --show-source --statistics
isort --check --diff .
mypy .
# building the package and check metadata
python3 -m pip install -e .[build]
python3 -m build
twine check --strict dist/*
# build documentation and check links ...
python3 -m pip install -e .[docs]
sphinx-build -b html docs dist/docs
sphinx-build -b linkcheck docs dist/docs
```
## Build documentation
```bash
python3 -m pip install -r ./docs/requirements.txt
# or
python3 -m pip install -e .[docs]
sphinx-build -b html docs dist/docs
sphinx-build -b linkcheck docs dist/docs
```
## See also
- [clarin-eric/fcs-sru-server](https://github.com/clarin-eric/fcs-sru-server/)
- [clarin-eric/fcs-sru-client](https://github.com/clarin-eric/fcs-sru-client/)
|
PypiClean
|
/aliyun-python-sdk-rds-2.7.43.tar.gz/aliyun-python-sdk-rds-2.7.43/aliyunsdkrds/request/v20140815/ModifyDBInstanceConfigRequest.py
|
from aliyunsdkcore.request import RpcRequest
from aliyunsdkrds.endpoint import endpoint_data
class ModifyDBInstanceConfigRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Rds', '2014-08-15', 'ModifyDBInstanceConfig')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_ConfigName(self): # String
return self.get_query_params().get('ConfigName')
def set_ConfigName(self, ConfigName): # String
self.add_query_param('ConfigName', ConfigName)
def get_DBInstanceId(self): # String
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self, DBInstanceId): # String
self.add_query_param('DBInstanceId', DBInstanceId)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_ConfigValue(self): # String
return self.get_query_params().get('ConfigValue')
def set_ConfigValue(self, ConfigValue): # String
self.add_query_param('ConfigValue', ConfigValue)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
|
PypiClean
|
/numericube-twistranet-2.0.0.zip/numericube-twistranet-2.0.0/twistranet/themes/twistheme/static/js/tiny_mce/langs/no.js
|
tinyMCE.addI18n({no:{
common:{
edit_confirm:"Vil du bruke WYSIWYG-editoren for dette tekstfeltet?",
apply:"Legg til",
insert:"Sett inn",
update:"Oppdater",
cancel:"Avbryt",
close:"Stopp",
browse:"Bla gjennom",
class_name:"Klasse",
not_set:"--ikke sett--",
clipboard_msg:"Klipp ut / Kopier /Lim inn fungerer ikke i Mozilla og Firefox. Vil du vite mer om dette?",
clipboard_no_support:"For tiden ikke st\u00F8ttet av nettleseren din, bruk tastatursnarveier i stedet.",
popup_blocked:"Beklager, det er registrert at du har popup-sperre aktivert i nettleseren. Du m\u00E5 oppheve popup-sperren for nettstedet for \u00E5 f\u00E5 tilgang til dette verkt\u00F8yet",
invalid_data:"Feil: Ugyldige verdier er skrevet inn, disse er merket med r\u00F8dt",
more_colors:"Flere farger"
},
contextmenu:{
align:"Justering",
left:"Venstre",
center:"Midtstill",
right:"H\u00F8yre",
full:"Full"
},
insertdatetime:{
date_fmt:"%d-%m-%Y",
time_fmt:"%H:%M:%S",
insertdate_desc:"Lim inn dato",
inserttime_desc:"Lim inn tid",
months_long:"januar,februar,mars,april,mai,juni,juli,august,september,oktober,november,desember",
months_short:"jan,feb,mar,apr,mai,jun,jul,aug,sep,oct,nov,des",
day_long:"s\u00F8ndag,mandag,tirsdag,onsdag,torsdag,fredag,l\u00F8rdag,s\u00F8ndag",
day_short:"s\u00F8n,man,tir,ons,tor,fre,l\u00F8r,s\u00F8n"
},
print:{
print_desc:"Skriv ut"
},
preview:{
preview_desc:"Forh\u00E5ndsvisning"
},
directionality:{
ltr_desc:"Retning venstre mot h\u00F8yre",
rtl_desc:"Retning h\u00F8yre mot venstre"
},
layer:{
insertlayer_desc:"Sett inn nytt lag",
forward_desc:"Flytt framover",
backward_desc:"Flytt bakover",
absolute_desc:"Sl\u00E5 p\u00E5/av absolutt plassering",
content:"Nytt lag..."
},
save:{
save_desc:"Lagre",
cancel_desc:"Kanseller alle endringer"
},
nonbreaking:{
nonbreaking_desc:"Sett inn hardt mellomrom"
},
iespell:{
iespell_desc:"Kontroller rettskriving",
download:"ieSpell ikke funnet. \u00D8nsker du \u00E5 installere ieSpell?"
},
advhr:{
advhr_desc:"Horisontal linje"
},
emotions:{
emotions_desc:"Hum\u00F8rfjes"
},
searchreplace:{
search_desc:"S\u00F8k",
replace_desc:"S\u00F8k/Erstatt"
},
advimage:{
image_desc:"Sett inn / endre bilde"
},
advlink:{
link_desc:"Sett inn / endre lenke"
},
xhtmlxtras:{
cite_desc:"Sitat",
abbr_desc:"Forkortning",
acronym_desc:"Akronym",
del_desc:"Sletting",
ins_desc:"Innsetting",
attribs_desc:"Sett inn / Endre egenskaper"
},
style:{
desc:"Rediger CSS-stil"
},
paste:{
paste_text_desc:"Lim inn som vanlig tekst",
paste_word_desc:"Lim inn fra Word",
selectall_desc:"Marker alt",
plaintext_mode_sticky:"Paste is now in plain text mode. Click again to toggle back to regular paste mode. After you paste something you will be returned to regular paste mode.",
plaintext_mode:"Paste is now in plain text mode. Click again to toggle back to regular paste mode."
},
paste_dlg:{
text_title:"Bruk CTRL+V p\u00E5 tastaturet for \u00E5 lime inn i dette vinduet.",
text_linebreaks:"Behold tekstbryting",
word_title:"Bruk CTRL+V p\u00E5 tastaturet for \u00E5 lime inn i dette vinduet."
},
table:{
desc:"Sett inn en ny tabell",
row_before_desc:"Sett inn rad framfor",
row_after_desc:"Sett inn rad etter",
delete_row_desc:"Fjern rad",
col_before_desc:"Sett inn kolonne framfor",
col_after_desc:"Sett inn kolonne etter",
delete_col_desc:"Fjern kolonne",
split_cells_desc:"Del celler",
merge_cells_desc:"Sl\u00E5 sammen celler",
row_desc:"Radegenskaper",
cell_desc:"Celleegenskaper",
props_desc:"Tabellegenskaper",
paste_row_before_desc:"Lim inn rad framfor",
paste_row_after_desc:"Lim inn rad etter",
cut_row_desc:"Fjern rad",
copy_row_desc:"Kopier rad",
del:"Slett tabell",
row:"Rad",
col:"Kolonne",
cell:"Celle"
},
autosave:{
unload_msg:"Endringene du gjorde g\u00E5r tapt om du forlater denne siden!",
restore_content:"Restore auto-saved content.",
warning_message:"If you restore the saved content, you will lose all the content that is currently in the editor.\n\nAre you sure you want to restore the saved content?."
},
fullscreen:{
desc:"Skift til/fra fullskjermmodus"
},
media:{
desc:"Sett inn / rediger inkludert objekt",
edit:"Endre innsatt objekt"
},
fullpage:{
desc:"Dokumentegenskaper"
},
template:{
desc:"Sett inn forh\u00E5ndsdefinert malinnhold"
},
visualchars:{
desc:"Visuelle konktrolltegn p\u00E5/av"
},
spellchecker:{
desc:"Stavekontroll p\u00E5/av",
menu:"Vis meny",
ignore_word:"Ignorer ord",
ignore_words:"Ignorer alt",
langs:"Spr\u00E5k",
wait:"Vennligst vent...",
sug:"Framlegg",
no_sug:"Ingen framlegg",
no_mpell:"Ingen stavefeil funnet."
},
pagebreak:{
desc:"Sett inn sideskift"
},
advlist:{
types:"Types",
def:"Default",
lower_alpha:"Lower alpha",
lower_greek:"Lower greek",
lower_roman:"Lower roman",
upper_alpha:"Upper alpha",
upper_roman:"Upper roman",
circle:"Circle",
disc:"Disc",
square:"Square"
}}});
|
PypiClean
|
/biopal-0.3.0rc0-py3-none-any.whl/arepytools/geometry/_geocoding/_direct_geocoding.py
|
import numpy as np
from arepytools.geometry import conversions as conv
from arepytools.geometry.wgs84 import WGS84
import arepytools.constants as cst
from arepytools.geometry._geocoding import _newton
from arepytools import _utils
_LOOK_SIGN = {"RIGHT": 1, "LEFT": -1}
def _mid_range_distance(range_axis: np.ndarray):
return range_axis[(range_axis.size + 1) // 2 - 1] * cst.LIGHT_SPEED / 2
def direct_geocoding_monostatic(sensor_position: np.ndarray, sensor_velocity: np.ndarray,
range_times: np.ndarray, look_direction: str, geodetic_altitude: float,
frequency_doppler_centroid: np.ndarray, wavelength: float,
initial_guess: np.ndarray = None) -> np.ndarray:
"""Perform monostatic direct geocoding
:param sensor_position: position of the sensor
:param sensor_velocity: velocity of the sensor
:param range_times: range axis (N, 1) vector
:param look_direction: either RIGHT or LEFT
:param geodetic_altitude: the altitude over wgs84
:param frequency_doppler_centroid: array with frequency doppler centroid values (N,1) vector
:param wavelength: the wavelength
:param initial_guess: initial guess for newton iterations. If not provided a guess will be computed [optional]
:return: a matrix 3xN with the xyz coordinate of the points
"""
# Input validation
_utils.check_type(wavelength, float)
_utils.check_type(geodetic_altitude, float)
sensor_position = _utils.input_data_to_numpy_array_with_checks(sensor_position, dtype=float, shape=(3,))
sensor_velocity = _utils.input_data_to_numpy_array_with_checks(sensor_velocity, dtype=float, shape=(3,))
# Optional computation of the initial guess
if initial_guess is None:
initial_guess = _direct_geocoding_monostatic_init(sensor_position, sensor_velocity,
_mid_range_distance(range_times),
_LOOK_SIGN[look_direction])
# Geocoding
output = np.zeros((3, range_times.size))
for i_rg, (rg_time, f_dc) in enumerate(zip(range_times, frequency_doppler_centroid)):
output[:, i_rg] = _direct_geocoding_monostatic_core(initial_guess, sensor_position, sensor_velocity, rg_time,
wavelength, f_dc, geodetic_altitude)
initial_guess = output[:, i_rg].copy()
return output
def direct_geocoding_bistatic(sensor_position_rx: np.ndarray, sensor_velocity_rx: np.ndarray,
sensor_positions_tx: np.ndarray, sensor_velocities_tx: np.ndarray,
range_times: np.ndarray,
look_direction: str, geodetic_altitude: float,
frequency_doppler_centroid: np.ndarray, wavelength: float,
initial_guess: np.ndarray = None) -> np.ndarray:
"""Perform bistatic direct geocoding
:param sensor_position_rx: position of the sensor rx
:param sensor_velocity_rx: velocity of the sensor rx
:param sensor_positions_tx: positions of the sensor tx (one for each range time (3, N))
:param sensor_velocities_tx: velocities of the sensor tx (one for each range time (3, N))
:param range_times: range axis (N, 1) vector
:param look_direction: either RIGHT or LEFT
:param geodetic_altitude: the altitude over wgs84
:param frequency_doppler_centroid: array with frequency doppler centroid values (N,1) vector
:param wavelength: the wavelength
:param initial_guess: initial guess for newton iterations. If not provided a guess will be computed [optional]
:return: a matrix 3xN with the xyz coordinate of the points
"""
# input validation
_utils.check_type(wavelength, float)
_utils.check_type(geodetic_altitude, float)
sensor_position_rx = _utils.input_data_to_numpy_array_with_checks(sensor_position_rx, dtype=float, shape=(3,))
sensor_velocity_rx = _utils.input_data_to_numpy_array_with_checks(sensor_velocity_rx, dtype=float, shape=(3,))
sensor_positions_tx = _utils.input_data_to_numpy_array_with_checks(sensor_positions_tx, dtype=float,
first_axis_size=3)
sensor_velocities_tx = _utils.input_data_to_numpy_array_with_checks(sensor_velocities_tx, dtype=float,
first_axis_size=3)
# Optional initial guess
if initial_guess is None:
initial_guess = _direct_geocoding_monostatic_init(sensor_position_rx, sensor_velocity_rx,
_mid_range_distance(range_times),
_LOOK_SIGN[look_direction])
output = np.zeros((3, range_times.size))
for i_rg, (rg_time, pos_tx, vel_tx, f_dc) in enumerate(
zip(range_times, sensor_positions_tx.T, sensor_velocities_tx.T, frequency_doppler_centroid)):
output[:, i_rg] = _direct_geocoding_bistatic_core(initial_guess, sensor_position_rx, sensor_velocity_rx, pos_tx,
vel_tx, rg_time, wavelength, f_dc,
geodetic_altitude)
initial_guess = output[:, i_rg].copy()
return output
def _direct_geocoding_monostatic_init(sat_position, sat_velocity, range_distance, look_sign):
satellite_distance_from_center = np.linalg.norm(sat_position) # Satellite Position Norm
llh_sat = conv.xyz2llh(sat_position)
earth_radius = np.linalg.norm(conv.llh2xyz(np.asarray([llh_sat[0], llh_sat[1], 0], dtype=float)))
# Check earth radius vs range compatibility
if range_distance < satellite_distance_from_center - earth_radius:
raise RuntimeError("Cannot find initial guess for direct geocoding")
ux = sat_position / satellite_distance_from_center
uy = np.cross(sat_position, sat_velocity)
uy = uy / np.linalg.norm(uy)
uz = np.cross(ux, uy)
# x-coordinate
x = (satellite_distance_from_center ** 2 + earth_radius ** 2 - range_distance ** 2) / (
2 * satellite_distance_from_center)
# Circle radius
ro = np.sqrt(earth_radius ** 2 - x ** 2)
# Project velocity on ref frame
vx = np.dot(sat_velocity, ux)
vz = np.dot(sat_velocity, uz)
# Find first solution
z = (satellite_distance_from_center - x) * vx / vz
y = np.sqrt(ro ** 2 - z ** 2)
if look_sign * y > 0:
y = -y
return x * ux + y * uy + z * uz
def _direct_geocoding_monostatic_core(initial_guess, sat_position, sat_velocity, range_time, wavelength,
frequency_doppler_centroid, geodetic_altitude):
d_range_square = (cst.LIGHT_SPEED * range_time / 2.) ** 2
geoid_r_min = WGS84.semi_minor_axis + geodetic_altitude
geoid_r_max = WGS84.semi_major_axis + geodetic_altitude
r_ep2 = geoid_r_min ** 2
r_ee2 = geoid_r_max ** 2
def direct_geocoding_function(x):
sat2point, distance_square, distance, pv_scalar = _basic_data(x, sat_position, sat_velocity)
range_equation = distance_square - d_range_square
grad_range_equation = - 2 * sat2point
doppler_equation, grad_doppler_equation = _doppler_equation(wavelength, pv_scalar, distance,
frequency_doppler_centroid, sat_velocity,
sat2point)
fun = [range_equation, _ellipse_equation(x, r_ee2, r_ep2), doppler_equation]
jacobian = [[grad_range_equation[k], _der_ellipse_equation_xi(x, k, r_ee2, r_ep2), grad_doppler_equation[k]]
for
k in range(3)]
return fun, jacobian
return _newton.newton_for_geocoding(direct_geocoding_function, initial_guess)
def _direct_geocoding_bistatic_core(initial_guess, sat_position_rx, sat_velocity_rx, sat_position_tx,
sat_velocity_tx,
range_time, wavelength, frequency_doppler_centroid, geodetic_altitude):
d_range_square = (cst.LIGHT_SPEED * range_time) ** 2 # two-way distance
geoid_r_min = WGS84.semi_minor_axis + geodetic_altitude
geoid_r_max = WGS84.semi_major_axis + geodetic_altitude
r_ep2 = geoid_r_min ** 2
r_ee2 = geoid_r_max ** 2
def direct_geocoding_function(x):
sat2point_rx, _, distance_rx, pv_scalar_rx = _basic_data(x, sat_position_rx,
sat_velocity_rx)
sat2point_tx, _, distance_tx, pv_scalar_tx = _basic_data(x, sat_position_tx,
sat_velocity_tx)
# range equation
distance = distance_rx + distance_tx
range_equation = distance ** 2 - d_range_square
grad_range_equation = - 2 * distance * (sat2point_rx / distance_rx + sat2point_tx / distance_tx)
# doppler equation
doppler_equation_rx, grad_doppler_equation_rx = _doppler_equation(wavelength, pv_scalar_rx, distance_rx,
frequency_doppler_centroid,
sat_velocity_rx,
sat2point_rx)
doppler_equation_tx, grad_doppler_equation_tx = _doppler_equation(wavelength, pv_scalar_tx, distance_tx,
frequency_doppler_centroid,
sat_velocity_tx,
sat2point_tx)
doppler_equation = (doppler_equation_rx + doppler_equation_tx) / 2
grad_doppler_equation = (grad_doppler_equation_rx + grad_doppler_equation_tx) / 2
fun = [range_equation, _ellipse_equation(x, r_ee2, r_ep2), doppler_equation]
jacobian = [[grad_range_equation[k], _der_ellipse_equation_xi(x, k, r_ee2, r_ep2), grad_doppler_equation[k]]
for
k in range(3)]
return fun, jacobian
return _newton.newton_for_geocoding(direct_geocoding_function, initial_guess)
def _ellipse_equation(x, r_ee2, r_ep2):
return (x[0] * x[0] + x[1] * x[1]) / r_ee2 + x[2] * x[2] / r_ep2 - 1.0
def _der_ellipse_equation_xi(x, i_coord, r_ee2, r_ep2):
r2 = r_ee2 if i_coord < 2 else r_ep2
return 2 * x[i_coord] / r2
def _doppler_equation(wavelength, pv_scalar, distance, frequency_doppler_centroid, sat_velocity, sat2point):
c = 2. / wavelength / distance
doppler_equation = c * pv_scalar + frequency_doppler_centroid
grad_doppler_equation = c * (- sat_velocity + pv_scalar * sat2point / distance ** 2)
return doppler_equation, grad_doppler_equation
def _basic_data(x, sat_position, sat_velocity):
sat2point = sat_position - x
distance_square = np.dot(sat2point, sat2point)
distance = np.sqrt(distance_square)
pv_scalar = np.dot(sat_velocity, sat2point)
return sat2point, distance_square, distance, pv_scalar
|
PypiClean
|
/tensorflow_edwin-2.10.1-cp38-cp38-win_amd64.whl/tensorflow/python/distribute/mirrored_run.py
|
"""Class MirroredStrategy implementing tf.distribute.Strategy."""
import contextlib
import functools
import threading
import weakref
from tensorflow.python import pywrap_tfe
from tensorflow.python.autograph.core import ag_ctx as autograph_ctx
from tensorflow.python.autograph.impl import api as autograph
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.distribute import distribute_utils
from tensorflow.python.distribute import shared_variable_creator
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import device as tf_device
from tensorflow.python.framework import ops
from tensorflow.python.ops import summary_ops_v2
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import coordinator
from tensorflow.python.util import traceback_utils
def _is_gpu_device(device):
return tf_device.DeviceSpec.from_string(device).device_type == "GPU"
def call_for_each_replica(strategy, fn, args=None, kwargs=None):
"""Call `fn` on each worker devices(replica).
It's highly recommended to wrap the call to this function inside a
`tf.function`, otherwise the performance is poor.
Args:
strategy: `tf.distribute.Strategy`.
fn: function to call on each worker devices.
args: positional arguments to `fn`.
kwargs: keyword arguments to `fn`.
Returns:
Wrapped returned value of `fn` from all replicas.
"""
if args is None:
args = ()
if kwargs is None:
kwargs = {}
if isinstance(fn, def_function.Function):
# Don't lift up the tf.function decoration if `fn` is compiled with XLA
# and all devices are GPU. In this case we will use collectives to do
# cross-device communication, thus no merge_call is in the path.
if fn._jit_compile and all( # pylint: disable=protected-access
[_is_gpu_device(d) for d in strategy.extended.worker_devices]):
return _call_for_each_replica(strategy, fn, args, kwargs)
if strategy not in _cfer_fn_cache:
_cfer_fn_cache[strategy] = weakref.WeakKeyDictionary()
wrapped = _cfer_fn_cache[strategy].get(fn)
if wrapped is None:
# We need to wrap fn such that it triggers _call_for_each_replica inside
# the tf.function. We use _clone() instead of @tf.function wrapped
# call_for_each_replica() because we would like to retain the arguments to
# the @tf.function decorator of fn.
wrapped = fn._clone( # pylint: disable=protected-access
python_function=functools.partial(call_for_each_replica, strategy,
fn.python_function))
_cfer_fn_cache[strategy][fn] = wrapped
return wrapped(args, kwargs)
if context.executing_eagerly():
logging.log_first_n(
logging.WARN, "Using %s eagerly has significant "
"overhead currently. We will be working on improving "
"this in the future, but for now please wrap "
"`call_for_each_replica` or `experimental_run` or "
"`run` inside a tf.function to get "
"the best performance." % strategy.__class__.__name__, 5)
else:
# When a tf.function is wrapped to trigger _call_for_each_replica (see
# the other branch above), AutoGraph stops conversion at
# _call_for_each_replica itself (TF library functions are allowlisted).
# This makes sure that the Python function that originally passed to
# the tf.function is still converted.
fn = autograph.tf_convert(fn, autograph_ctx.control_status_ctx())
return _call_for_each_replica(strategy, fn, args, kwargs)
# Per strategy cache for call_for_each_replica def_function.Function objects.
_cfer_fn_cache = weakref.WeakKeyDictionary()
@contextlib.contextmanager
def _enter_graph(g, eager, creator_stack=None):
"""Context manager for selecting a graph and maybe eager mode."""
if eager:
with g.as_default(), context.eager_mode():
if creator_stack is not None:
g._variable_creator_stack = creator_stack # pylint: disable=protected-access
yield
else:
with g.as_default():
if creator_stack is not None:
g._variable_creator_stack = creator_stack # pylint: disable=protected-access
yield
@contextlib.contextmanager
def _maybe_enter_eager_mode(eager):
if eager:
with context.eager_mode():
yield
else:
yield
def _cpu_device(device):
cpu_device = tf_device.DeviceSpec.from_string(device)
cpu_device = cpu_device.replace(device_type="CPU", device_index=0)
return cpu_device.to_string()
class _RequestedStop(Exception): # pylint: disable=g-bad-exception-name
pass
def _get_thread_local_configuration_callable():
if traceback_utils.is_traceback_filtering_enabled():
thread_local_callables = {traceback_utils.enable_traceback_filtering}
else:
thread_local_callables = {traceback_utils.disable_traceback_filtering}
return thread_local_callables
def _call_for_each_replica(distribution, fn, args, kwargs):
"""Run `fn` in separate threads, once per replica/worker device.
Args:
distribution: the DistributionStrategy object.
fn: function to run (will be run once per replica, each in its own thread).
args: positional arguments for `fn`
kwargs: keyword arguments for `fn`.
Returns:
Merged return value of `fn` across all replicas.
Raises:
RuntimeError: If fn() calls get_replica_context().merge_call() a different
number of times from the available devices.
"""
# TODO(josh11b): Add this option once we add synchronization to variable
# creation. Until then, this is pretty unsafe to use.
run_concurrently = False
if not context.executing_eagerly():
# Needed for per-thread device, etc. contexts in graph mode.
ops.get_default_graph().switch_to_thread_local()
coord = coordinator.Coordinator(clean_stop_exception_types=(_RequestedStop,))
shared_variable_store = {}
devices = distribution.extended.worker_devices
thread_local_callables = _get_thread_local_configuration_callable()
# TODO(isaprykin): Create these threads once instead of during every call.
threads = []
for index in range(len(devices)):
variable_creator_fn = shared_variable_creator.make_fn(
shared_variable_store, index)
t = _MirroredReplicaThread(distribution, coord, index, devices,
variable_creator_fn, fn,
distribute_utils.caching_scope_local,
distribute_utils.select_replica(index, args),
distribute_utils.select_replica(index, kwargs),
thread_local_callables)
threads.append(t)
for t in threads:
t.start()
# When `fn` starts `should_run` event is set on _MirroredReplicaThread
# (`MRT`) threads. The execution waits until
# `MRT.has_paused` is set, which indicates that either `fn` is
# complete or a `get_replica_context().merge_call()` is called. If `fn` is
# complete, then `MRT.done` is set to True. Otherwise, arguments
# of `get_replica_context().merge_call` from all paused threads are grouped
# and the `merge_fn` is performed. Results of the
# `get_replica_context().merge_call` are then set to `MRT.merge_result`.
# Each such `get_replica_context().merge_call` call returns the
# `MRT.merge_result` for that thread when `MRT.should_run` event
# is reset again. Execution of `fn` resumes.
try:
with coord.stop_on_exception():
all_done = False
while not all_done and not coord.should_stop():
done = []
if run_concurrently:
for t in threads:
t.should_run.set()
for t in threads:
t.has_paused.wait()
t.has_paused.clear()
if coord.should_stop():
return None
done.append(t.done)
else:
for t in threads:
t.should_run.set()
t.has_paused.wait()
t.has_paused.clear()
if coord.should_stop():
return None
done.append(t.done)
if coord.should_stop():
return None
all_done = all(done)
if not all_done:
if any(done):
raise RuntimeError("Some replicas made a different number of "
"replica_context().merge_call() calls.")
# get_replica_context().merge_call() case
merge_args = distribute_utils.regroup(
tuple(t.merge_args for t in threads))
merge_kwargs = distribute_utils.regroup(
tuple(t.merge_kwargs for t in threads))
# We capture the name_scope of the MRT when we call merge_fn
# to ensure that if we have opened a name scope in the MRT,
# it will be respected when executing the merge function. We only
# capture the name_scope from the first MRT and assume it is
# the same for all other MRTs.
mtt_captured_name_scope = threads[0].captured_name_scope
mtt_captured_var_scope = threads[0].captured_var_scope
# Capture and merge the control dependencies from all the threads.
mtt_captured_control_deps = set()
for t in threads:
mtt_captured_control_deps.update(t.captured_control_deps)
# Control is transfered from _MirroredReplicaThread (MRT) to the main
# thread, i.e., here, to perform `merge_fn`, and thus we preserve the
# name scope, control dependencies, etc. from MRT at the time
# `merge_call` is made.
# One special case is that the `merge_call` is made under an
# `tf.init_scope` in the MRT. `tf.init_scope` will clear control
# dependencies, pause gradient tape, and enter the lowest context on
# the `context_stack` that is not building a graph function. Entering
# the lowest context could be one of the two things: installation of a
# graph as the default graph or switch into eager mode. If the former
# is done and causes `merge_call` to be called in a different graph
# from the one in which `call_for_each_replica` is called, we do not
# allow this case (see comment in `_merge_call`) and we would not have
# arrived here due to the assertion in `_merge_call`. However, if the
# latter is done, we want to make sure the main thread enter an eager
# mode scope as well so that `merge_fn` does not have trouble
# accessing resources defined in MRT under the same context.
with ops.name_scope(
mtt_captured_name_scope), ops.control_dependencies(
mtt_captured_control_deps), variable_scope.variable_scope(
mtt_captured_var_scope), _maybe_enter_eager_mode(
threads[0].merge_call_entered_in_eager):
merge_result = threads[0].merge_fn(distribution, *merge_args,
**merge_kwargs)
for r, t in enumerate(threads):
t.merge_result = distribute_utils.select_replica(r, merge_result)
finally:
for t in threads:
t.should_run.set()
coord.join(threads)
return distribute_utils.regroup(tuple(t.main_result for t in threads))
class _MirroredReplicaThread(threading.Thread):
"""A thread that runs() a function on a device."""
def __init__(self, dist, coord, replica_id, devices, variable_creator_fn, fn,
caching_scope, args, kwargs, thread_local_callables=None):
super(_MirroredReplicaThread, self).__init__()
self.coord = coord
self.distribution = dist
self.devices = devices
self.replica_id = replica_id
self.replica_id_in_sync_group = (
dist.extended._get_replica_id_in_sync_group(replica_id)) # pylint: disable=protected-access
self.variable_creator_fn = variable_creator_fn
# State needed to run and return the results of `fn`.
self.main_fn = fn
self.main_args = args
self.main_kwargs = kwargs
self.main_result = None
self.done = False
# State needed to run the next merge_call() (if any) requested via
# ReplicaContext.
self.merge_fn = None
self.merge_args = None
self.merge_kwargs = None
self.merge_result = None
self.captured_name_scope = None
self.captured_var_scope = None
try:
self.caching_scope_entered = caching_scope.new_cache_scope_count
self.caching_scope_exited = caching_scope.cache_scope_exited_count
except AttributeError:
self.caching_scope_entered = None
self.caching_scope_exited = None
# We use a thread.Event for the main thread to signal when this
# thread should start running (`should_run`), and another for
# this thread to transfer control back to the main thread
# (`has_paused`, either when it gets to a
# `get_replica_context().merge_call` or when `fn` returns). In
# either case the event starts cleared, is signaled by calling
# set(). The receiving thread waits for the signal by calling
# wait() and then immediately clearing the event using clear().
self.should_run = threading.Event()
self.has_paused = threading.Event()
# These fields have to do with inheriting various contexts from the
# parent thread:
context.ensure_initialized()
ctx = context.context()
self.in_eager = ctx.executing_eagerly()
self.record_thread_local_summary_state()
self.record_thread_local_eager_context_state()
self.context_device_policy = (
pywrap_tfe.TFE_ContextGetDevicePlacementPolicy(
ctx._context_handle)) # pylint: disable=protected-access
self.graph = ops.get_default_graph()
with ops.init_scope():
self._init_in_eager = context.executing_eagerly()
self._init_graph = ops.get_default_graph()
self._variable_creator_stack = self.graph._variable_creator_stack[:] # pylint: disable=protected-access
self._var_scope = variable_scope.get_variable_scope()
# Adding a "/" at end lets us re-enter this scope later.
self._name_scope = self.graph.get_name_scope()
if self._name_scope:
self._name_scope += "/"
if self.replica_id > 0:
if not self._name_scope:
self._name_scope = ""
self._name_scope += "replica_%d/" % self.replica_id
self._thread_local_callables = thread_local_callables
def run(self):
self.should_run.wait()
self.should_run.clear()
try:
if self.coord.should_stop():
return
self.restore_thread_local_summary_state()
self.restore_thread_local_callable()
self.restore_thread_local_eager_context_state()
if (self.caching_scope_entered is not None and
self.caching_scope_exited is not None):
distribute_utils.caching_scope_local.new_cache_scope_count = self.caching_scope_entered
distribute_utils.caching_scope_local.cache_scope_exited_count = self.caching_scope_exited
# TODO(josh11b): Use current logical device instead of 0 here.
with self.coord.stop_on_exception(), \
_enter_graph(self._init_graph, self._init_in_eager), \
_enter_graph(self.graph, self.in_eager,
self._variable_creator_stack), \
context.device_policy(self.context_device_policy), \
_MirroredReplicaContext(self.distribution,
self.replica_id_in_sync_group), \
ops.device(self.devices[self.replica_id]), \
ops.name_scope(self._name_scope), \
variable_scope.variable_scope(
self._var_scope, reuse=self.replica_id > 0), \
variable_scope.variable_creator_scope(self.variable_creator_fn):
self.main_result = self.main_fn(*self.main_args, **self.main_kwargs)
self.done = True
finally:
self.has_paused.set()
def record_thread_local_summary_state(self):
"""Record the thread local summary state in self."""
# TODO(slebedev): is this still relevant? the referenced bug is closed.
summary_state = summary_ops_v2._summary_state # pylint: disable=protected-access
self._summary_step = summary_state.step
self._summary_writer = summary_state.writer
self._summary_recording = summary_state.is_recording
self._summary_recording_distribution_strategy = (
summary_state.is_recording_distribution_strategy)
def restore_thread_local_summary_state(self):
"""Restore thread local summary state from self."""
# TODO(slebedev): is this still relevant? the referenced bug is closed.
summary_state = summary_ops_v2._summary_state # pylint: disable=protected-access
summary_state.step = self._summary_step
summary_state.writer = self._summary_writer
summary_state.is_recording = self._summary_recording
summary_state.is_recording_distribution_strategy = (
self._summary_recording_distribution_strategy)
def record_thread_local_eager_context_state(self):
ctx = context.context()
eager_context_state = ctx._thread_local_data # pylint: disable=protected-access
self._eager_context_op_callbacks = eager_context_state.op_callbacks
# TODO(b/125892694): record other fields in EagerContext.
def restore_thread_local_eager_context_state(self):
ctx = context.context()
eager_context_state = ctx._thread_local_data # pylint: disable=protected-access
eager_context_state.op_callbacks = self._eager_context_op_callbacks
# TODO(b/125892694): record other fields in EagerContext.
def restore_thread_local_callable(self):
if self._thread_local_callables:
for fn in self._thread_local_callables:
fn()
class _MirroredReplicaContext(distribute_lib.ReplicaContext):
"""ReplicaContext for synchronized replica."""
def _merge_call(self, fn, args, kwargs):
"""`merge_call()` implementation for synchronized replica.
This pauses the current replica thread and passes `fn` and its arguments to
the main thread. The main thread will wait until all replicas pause, then
invoke `fn` with grouped arguments. The current replica thread will continue
after `fn` completes.
See `_call_for_each_replica` for the logic in the main thread.
Args:
fn: a function that is called in cross replica context with grouped
arguments from each replica. `fn` should returns grouped values.
args: positional arguments to `fn`.
kwargs: keyward arguments to `fn`.
Returns:
Return value of `fn` for the current replica.
Raises:
RuntimeError: when merge_call happens in a different graph, e.g. in a
different tf.function, which is not supported now.
_RequestedStop: when stop is requested.
"""
t = threading.current_thread()
assert isinstance(t, _MirroredReplicaThread)
t.merge_fn = fn
t.merge_args = args
t.merge_kwargs = kwargs
t.captured_name_scope = t.graph.get_name_scope()
# Adding a "/" at end lets us re-enter this scope later.
if t.captured_name_scope:
t.captured_name_scope += "/"
t.captured_var_scope = variable_scope.get_variable_scope()
t.captured_control_deps = t.graph._current_control_dependencies() # pylint: disable=protected-access
t.merge_call_entered_in_eager = context.context().executing_eagerly()
# It is problematic if `merge_call` is called under a different graph other
# than the one that `_call_for_each_replica` is called under, there are
# 3 cases this can happen:
#
# 1. The `fn` passed to `_call_for_each_replica` is decorated with
# `tf.function` and there is a `merge_call` in `fn`. Since
# MirroredStrategy traces a separate function per thread (per device),
# and each trace takes a shared lock, the lock is never released by the
# first thread and subsequent replica threads cannot proceed to trace
# their own functions. This issue is addressed by always converting
# `_call_for_each_replica(tf.function(f))` to
# ``tf.function(_call_for_each_replica(f))`.` in
# `MirroredStrategy._call_for_each_replica`.
#
# 2. The `fn` passed to `_call_for_each_replica` contains a nested
# `tf.function`, and there is a `merge_call` in the nested `tf.function`.
# In this case each thread can successfully trace its own function, but
# since the `merge_fn` passed to `merge_call` is executed in the main
# thread (where `_call_for_each_replica` is executed), it can't access
# the tensors that come from different graphs.
#
# 3. The `fn` passed to `_call_for_each_replica` contains a control-flow
# statement, and there is a `merge_call` inside the control-flow body,
# `fn` or `_call_for_each_replica` is decorated with `tf.function`.
# Control flow statement creates a separate graph for its body, similar
# to #2, `merge_fn` executed in the main thread can't access the
# tensors that come from different graphs.
#
# We raise an error for #2 and #3.
if ops.get_default_graph() != t.graph:
raise RuntimeError(
"`merge_call` called while defining a new graph or a tf.function."
" This can often happen if the function `fn` passed to"
" `strategy.run()` contains a nested `@tf.function`, and the nested "
"`@tf.function` contains a synchronization point, such as aggregating"
" gradients (e.g, optimizer.apply_gradients), or if the function `fn`"
" uses a control flow statement which contains a synchronization"
" point in the body. Such behaviors are not yet supported. Instead,"
" please avoid nested `tf.function`s or control flow statements that"
" may potentially cross a synchronization boundary, for example,"
" wrap the `fn` passed to `strategy.run` or the entire `strategy.run`"
" inside a `tf.function` or move the control flow out of `fn`. If"
" you are subclassing a `tf.keras.Model`, please avoid decorating"
" overridden methods `test_step` and `train_step` in `tf.function`.")
t.has_paused.set()
t.should_run.wait()
t.should_run.clear()
if t.coord.should_stop():
raise _RequestedStop()
t.merge_call_entered_in_eager = None
return t.merge_result
@property
def devices(self):
distribute_lib.require_replica_context(self)
return [
self._strategy.extended.worker_devices_by_replica[
self._replica_id_in_sync_group]
]
|
PypiClean
|
/parsec_cloud-2.16.0rc2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/parsec/core/ipcinterface.py
|
from __future__ import annotations
import sys
from contextlib import asynccontextmanager, contextmanager
from enum import Enum
from functools import partial
from pathlib import Path
from typing import Any, AsyncIterator, Awaitable, Callable, Iterator, Mapping
import trio
from structlog import get_logger
from trio.abc import Stream
from parsec.serde import (
BaseSchema,
MsgpackSerializer,
OneOfSchema,
SerdeError,
Unpacker,
fields,
packb,
)
from parsec.utils import open_service_nursery
logger = get_logger()
CommandHandlerCallback = Callable[[dict[str, object]], Awaitable[dict[str, Any]]]
class IPCCommand(Enum):
FOREGROUND = "foreground"
NEW_INSTANCE = "new_instance"
class IPCServerError(Exception):
pass
class IPCServerBadResponse(IPCServerError):
def __init__(self, rep: object) -> None:
self.rep = rep
def __repr__(self) -> str:
return f"Bad response from IPC server: {self.rep}"
class IPCServerNotRunning(IPCServerError):
pass
class IPCServerAlreadyRunning(IPCServerError):
pass
class ForegroundReqSchema(BaseSchema):
cmd = fields.EnumCheckedConstant(IPCCommand.FOREGROUND, required=True)
class NewInstanceReqSchema(BaseSchema):
cmd = fields.EnumCheckedConstant(IPCCommand.NEW_INSTANCE, required=True)
start_arg = fields.String(allow_none=True)
class CommandReqSchema(OneOfSchema):
type_field = "cmd"
type_schemas = {
IPCCommand.FOREGROUND: ForegroundReqSchema,
IPCCommand.NEW_INSTANCE: NewInstanceReqSchema,
}
def get_obj_type(self, obj: Mapping[str, object]) -> object:
return obj["cmd"]
class CommandRepSchema(BaseSchema):
status = fields.String(required=True)
reason = fields.String(allow_none=True)
cmd_req_serializer = MsgpackSerializer(CommandReqSchema)
cmd_rep_serializer = MsgpackSerializer(CommandRepSchema)
DEFAULT_WIN32_MUTEX_NAME = "parsec-cloud"
@contextmanager
def _install_win32_mutex(mutex_name: str) -> Iterator[None]:
# mypy: This tells mypy to do type check only on windows platform. (avoid undefined symbols
# errors on linux and macos) This function is not meant to be called on other platforms
if sys.platform == "win32":
from parsec.win32 import ERROR_ALREADY_EXISTS, CloseHandle, CreateMutex, GetLastError
try:
mutex = CreateMutex(None, False, mutex_name)
except OSError as exc:
raise IPCServerError(f"Cannot create mutex `{mutex_name}`: {exc}") from exc
status = GetLastError()
if status == ERROR_ALREADY_EXISTS:
CloseHandle(mutex)
raise IPCServerAlreadyRunning(f"Mutex `{mutex_name}` already exists")
try:
yield
finally:
CloseHandle(mutex)
else:
raise RuntimeError("_install_win32_mutex called on a platform different than `win32`")
@contextmanager
def _install_posix_file_lock(socket_file: Path) -> Iterator[None]:
import fcntl
try:
socket_file.parent.mkdir(mode=0o700, parents=True, exist_ok=True)
with open(socket_file, "a") as fd:
try:
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except OSError as exc:
raise IPCServerAlreadyRunning(f"Cannot lock file `{socket_file}`: {exc}") from exc
yield
# Lock is released on file descriptor closing
except OSError as exc:
raise IPCServerError(f"Cannot create lock file `{socket_file}`: {exc}") from exc
@asynccontextmanager
async def run_ipc_server(
cmd_handler: CommandHandlerCallback,
socket_file: Path,
win32_mutex_name: str = DEFAULT_WIN32_MUTEX_NAME,
) -> AsyncIterator[None]:
if sys.platform == "win32":
with _install_win32_mutex(win32_mutex_name):
async with _run_tcp_server(socket_file, cmd_handler):
yield
else:
with _install_posix_file_lock(socket_file):
async with _run_tcp_server(socket_file, cmd_handler):
yield
@asynccontextmanager
async def _run_tcp_server(
socket_file: Path, cmd_handler: CommandHandlerCallback
) -> AsyncIterator[None]:
async def _client_handler(stream: Stream) -> None:
# General exception handling
try:
# Stream handling
try:
unpacker = Unpacker()
async for raw in stream:
unpacker.feed(raw)
for cmd in unpacker:
cmd = cmd_req_serializer.load(cmd)
rep = await cmd_handler(cmd)
raw_rep = cmd_rep_serializer.dumps(rep)
logger.info("Command processed", cmd=cmd["cmd"], rep_status=rep["status"])
await stream.send_all(raw_rep)
except SerdeError as exc:
await stream.send_all(packb({"status": "invalid_format", "reason": str(exc)}))
finally:
await stream.aclose()
except trio.BrokenResourceError:
pass # Peer has closed the connection while we were sending a response
except Exception:
logger.exception("Unexpected crash")
try:
async with open_service_nursery() as nursery:
listeners = await nursery.start(
partial(trio.serve_tcp, _client_handler, 0, host="127.0.0.1")
)
port = listeners[0].socket.getsockname()[1]
# Make sure the path exists and write the socket file
socket_file.parent.mkdir(mode=0o700, parents=True, exist_ok=True)
socket_file.write_text(str(port))
logger.info("IPC server ready", port=port)
try:
yield
finally:
nursery.cancel_scope.cancel()
except OSError as exc:
raise IPCServerError(f"Cannot start IPC server: {exc}") from exc
async def send_to_ipc_server(socket_file: Path, cmd: IPCCommand, **kwargs: Any) -> dict[str, Any]:
try:
socket_port = int(socket_file.read_text().strip())
except (ValueError, OSError) as exc:
raise IPCServerNotRunning("Invalid IPC socket file") from exc
try:
stream = await trio.open_tcp_stream("127.0.0.1", socket_port)
raw_req = cmd_req_serializer.dumps({"cmd": cmd, **kwargs})
await stream.send_all(raw_req)
unpacker = Unpacker(exc_cls=IPCServerError)
while True:
raw = await stream.receive_some(1000)
if not raw:
raise IPCServerError(f"IPC server has closed the connection unexpectedly")
unpacker.feed(raw)
raw_rep = next(unpacker, None)
assert raw_rep is not None
rep = cmd_rep_serializer.load(raw_rep)
if rep:
if rep["status"] != "ok":
raise IPCServerBadResponse(rep)
return rep
except SerdeError as exc:
raise IPCServerError(f"Invalid message format: {exc}") from exc
except (OSError, trio.BrokenResourceError) as exc:
raise IPCServerNotRunning(f"Impossible to connect to IPC server: {exc}") from exc
|
PypiClean
|
/tiamat_pip-1.11.0-py3-none-any.whl/tiamatpip/ext/pip/_vendor/tenacity/_asyncio.py
|
import functools
import sys
import typing
from asyncio import sleep
from pip._vendor.tenacity import AttemptManager
from pip._vendor.tenacity import BaseRetrying
from pip._vendor.tenacity import DoAttempt
from pip._vendor.tenacity import DoSleep
from pip._vendor.tenacity import RetryCallState
WrappedFn = typing.TypeVar("WrappedFn", bound=typing.Callable)
_RetValT = typing.TypeVar("_RetValT")
class AsyncRetrying(BaseRetrying):
def __init__(self, sleep: typing.Callable[[float], typing.Awaitable] = sleep, **kwargs: typing.Any) -> None:
super().__init__(**kwargs)
self.sleep = sleep
async def __call__( # type: ignore # Change signature from supertype
self,
fn: typing.Callable[..., typing.Awaitable[_RetValT]],
*args: typing.Any,
**kwargs: typing.Any,
) -> _RetValT:
self.begin()
retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs)
while True:
do = self.iter(retry_state=retry_state)
if isinstance(do, DoAttempt):
try:
result = await fn(*args, **kwargs)
except BaseException: # noqa: B902
retry_state.set_exception(sys.exc_info())
else:
retry_state.set_result(result)
elif isinstance(do, DoSleep):
retry_state.prepare_for_next_attempt()
await self.sleep(do)
else:
return do
def __aiter__(self) -> "AsyncRetrying":
self.begin()
self._retry_state = RetryCallState(self, fn=None, args=(), kwargs={})
return self
async def __anext__(self) -> typing.Union[AttemptManager, typing.Any]:
while True:
do = self.iter(retry_state=self._retry_state)
if do is None:
raise StopAsyncIteration
elif isinstance(do, DoAttempt):
return AttemptManager(retry_state=self._retry_state)
elif isinstance(do, DoSleep):
self._retry_state.prepare_for_next_attempt()
await self.sleep(do)
else:
return do
def wraps(self, fn: WrappedFn) -> WrappedFn:
fn = super().wraps(fn)
# Ensure wrapper is recognized as a coroutine function.
@functools.wraps(fn)
async def async_wrapped(*args: typing.Any, **kwargs: typing.Any) -> typing.Any:
return await fn(*args, **kwargs)
# Preserve attributes
async_wrapped.retry = fn.retry
async_wrapped.retry_with = fn.retry_with
return async_wrapped
|
PypiClean
|
/gamma_desk-0.7.0-py3-none-any.whl/gdesk/panels/ndim/widget.py
|
import pathlib
import numpy as np
from qtpy.QtCore import Qt
from qtpy import QtWidgets, QtGui, QtCore
from ... import config
#
respath = pathlib.Path(config['respath'])
class NdimWidget(QtWidgets.QWidget):
"""Main widget for the ndim panel
It lets the user select the rows, column and optional color dim
and lets him slide through the other dims or do some calculations on them.
"""
DIM_CALC = dict(step=None, mean=np.mean, min=np.min, max=np.max, std=np.std, var=np.var)
def __init__(self, parent=None):
super().__init__(parent=parent)
self.panel = parent
self.setMinimumHeight(80)
self.setMinimumWidth(200)
self.vbox = QtWidgets.QVBoxLayout()
self.setLayout(self.vbox)
h = QtWidgets.QHBoxLayout()
self._row_col_color_labels = list()
self._row_col_color_labels.append(QtWidgets.QLabel("Row/y-dim: ", self))
h.addWidget(self._row_col_color_labels[-1])
self.rows = QtWidgets.QComboBox(self)
self.rows.setToolTip("Select which dimension is the row/y dim.")
self.rows.setMinimumHeight(15)
h.addWidget(self.rows)
self.vbox.addLayout(h)
h = QtWidgets.QHBoxLayout()
self._row_col_color_labels.append(QtWidgets.QLabel("Column/x-dim: ", self))
h.addWidget(self._row_col_color_labels[-1])
self.cols = QtWidgets.QComboBox(self)
self.cols.setMinimumHeight(15)
self.cols.setToolTip("Select which dimension is the col/x dim.")
h.addWidget(self.cols)
self.vbox.addLayout(h)
h = QtWidgets.QHBoxLayout()
self._row_col_color_labels.append(QtWidgets.QLabel("Color-dim: ", self))
h.addWidget(self._row_col_color_labels[-1])
self.color = QtWidgets.QComboBox(self)
self.color.setMinimumHeight(15)
self.color.setToolTip("Select which dimension is the color dim. Leave None if mono.")
h.addWidget(self.color)
self.vbox.addLayout(h)
self._collaps_label = QtWidgets.QLabel("▲")
self._collaps_label.mousePressEvent = lambda _: self.hide_row_column_color_selection()
self.vbox.addWidget(self._collaps_label)
self.vbox.addSpacerItem(QtWidgets.QSpacerItem(0, 0, QtWidgets.QSizePolicy.Minimum,
QtWidgets.QSizePolicy.Expanding))
self.rows.activated.connect(self.update_sliders)
self.cols.activated.connect(self.update_sliders)
self.color.activated.connect(self.update_sliders)
self.slider_widget = QtWidgets.QWidget(self)
self.vbox.addWidget(self.slider_widget)
self.data = None
self.data_name = None
self._color_dim_options = None
self._sliders = None
self._spin_boxes = None
self._dim_combos = None
self._dim_line_edits = None
self._dim_scale_labels = None
self.dim_names = None
self.dim_scales = None
self._cycling_dims = list()
self._play_labels = None
self.play_icon = QtGui.QPixmap(str(respath / 'icons' / 'px16' / 'control_play.png'))
self.pause_icon = QtGui.QPixmap(str(respath / 'icons' / 'px16' / 'control_pause.png'))
def hide_row_column_color_selection(self):
self.rows.hide()
self.cols.hide()
self.color.hide()
for l in self._row_col_color_labels:
l.hide()
self._collaps_label.setText("▼")
self._collaps_label.mousePressEvent = lambda _: self.show_row_column_color_selection()
def show_row_column_color_selection(self):
self.rows.show()
self.cols.show()
self.color.show()
for l in self._row_col_color_labels:
l.show()
self._collaps_label.setText("▲")
self._collaps_label.mousePressEvent = lambda _: self.hide_row_column_color_selection()
def load(self, data, name=None, dim_names=None, dim_scales=None):
"""Load a new ndim array
This data can come from code or from the gui that loads a new file
:param data: numpy nd array with the multi dim data
:param name: optional name of this data, is used when saving to hdf5
:param dim_names: optional list of length ndim with names per dimensions
:param dim_scales: optional list with (name, scale) tuples with scale values for each entry in a dim
:return: None
"""
self.data = data
self.data_name = name
if self.data is None:
self.dim_names = None
self.dim_scales = None
self.rows.clear()
self.color.clear()
self.cols.clear()
self._color_dim_options = {'None': None}
else:
# guess some defaults
if self.data.ndim > 3 and self.data.shape[-1] in (3, 4):
def_row = -3
def_column = -2
def_color = -1
else:
def_row = -2
def_column = -1
def_color = None
if dim_names is None:
dim_names = [None] * data.ndim
if dim_scales is None:
dim_scales = [(None, None)] * data.ndim
self.dim_names = dim_names
self.dim_scales = dim_scales
items = list()
for i, d in enumerate(self.data.shape):
if dim_names[i] is None or dim_names[i] == '':
dim_names[i] = f"dim-{i}"
item = f"{dim_names[i]}: [{d}]"
name, scale = dim_scales[i]
if name is not None or scale is not None:
item += f" - "
if name is not None:
item += name
if scale is not None:
item += f" [{scale[0]} - {scale[-1]}]"
items.append(item)
self.rows.clear()
self.rows.addItems(items)
self.rows.setCurrentIndex(self.data.ndim + def_row)
self.cols.clear()
self.cols.addItems(items)
self.cols.setCurrentIndex(self.data.ndim + def_column)
self.color.clear()
self.color.addItem("None")
self._color_dim_options = {'None': None}
for i, d in enumerate(self.data.shape):
if 3 <= d <= 4:
text = items[i]
self.color.addItem(text)
self._color_dim_options[text] = i
if def_color is None:
self.color.setCurrentIndex(0)
else:
self.color.setCurrentIndex(self.color.count() + def_color)
self.update_sliders()
def update_data(self, data):
"""Only update the data but leave the rest as is
If data is still None or the shape of the current and new data is not the same then the load method is called.
"""
if self.data is not None and self.data.shape != data.shape:
self.load(data)
if self.data is None:
self.load(data)
self.data = data
self._update_image()
def update_sliders(self):
"""Update the sliders after the x, y and color dims have changed"""
self._sliders = dict()
self._spin_boxes = dict()
self._dim_combos = dict()
self._dim_line_edits = dict()
self._dim_scale_labels = dict()
self._play_labels = dict()
self._cycling_dims = list()
self.vbox.removeWidget(self.slider_widget)
self.slider_widget.deleteLater()
self.slider_widget = QtWidgets.QWidget(self)
slider_lay = QtWidgets.QVBoxLayout()
if self.data is None:
ndim = 0
else:
ndim = self.data.ndim
for dim in range(ndim):
if dim in (self.row_dim, self.column_dim, self.color_dim):
continue
h = QtWidgets.QHBoxLayout()
dim_name = QtWidgets.QLineEdit(self.dim_names[dim])
dim_name.setMaximumWidth(80)
dim_name.setMinimumHeight(15)
self._dim_line_edits[dim] = dim_name
h.addWidget(dim_name)
scale_name, scale = self.dim_scales[dim]
if scale is not None:
if scale_name is not None:
scale_name_label = QtWidgets.QLabel(scale_name)
scale_name_label.setMinimumHeight(15)
h.addWidget(scale_name_label)
scale_label = QtWidgets.QLabel(f"{scale[0]:.4g}")
self._dim_scale_labels[dim] = scale_label
h.addWidget(scale_label)
h.addSpacerItem(QtWidgets.QSpacerItem(0, 0, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum))
slider_lay.addLayout(h)
h = QtWidgets.QHBoxLayout()
play = QtWidgets.QLabel(self)
play.setPixmap(self.play_icon)
play.mousePressEvent = lambda *args, d=dim: self._cycle_dim(dim=d)
self._play_labels[dim] = play
play.setMinimumHeight(15)
h.addWidget(play)
slider = QtWidgets.QSlider(Qt.Horizontal, self)
slider.setMinimum(0)
slider.setMaximum(self.data.shape[dim]-1)
slider.setTickInterval(1)
slider.setSingleStep(1)
slider.setPageStep(1)
slider.setMinimumHeight(15)
self._sliders[dim] = slider
slider.valueChanged.connect(self._update_image)
h.addWidget(slider)
spin = QtWidgets.QSpinBox(self)
spin.setValue(0)
spin.setMinimum(0)
spin.setMaximum(self.data.shape[dim]-1)
spin.setMinimumWidth(40)
spin.valueChanged.connect(slider.setValue)
spin.setMinimumHeight(15)
slider.valueChanged.connect(spin.setValue)
self._spin_boxes[dim] = spin
h.addWidget(spin)
lab = QtWidgets.QLabel(f"| {self.data.shape[dim]}")
lab.setMinimumWidth(20)
lab.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
lab.setMinimumHeight(15)
h.addWidget(lab)
dim_combo = QtWidgets.QComboBox(self)
dim_combo.addItems(list(self.DIM_CALC.keys()))
dim_combo.setCurrentIndex(0)
dim_combo.setMinimumHeight(15)
dim_combo.activated.connect(lambda *args, d=dim: self._combo_changed(d))
self._dim_combos[dim] = dim_combo
h.addWidget(dim_combo)
slider_lay.addLayout(h)
slider_lay.addSpacerItem(QtWidgets.QSpacerItem(0, 0, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding))
self.slider_widget.setLayout(slider_lay)
self.slider_widget.setMinimumHeight(10 + 25 * len(self._sliders))
self.vbox.addWidget(self.slider_widget)
self._update_image()
@property
def row_dim(self):
return self.rows.currentIndex()
@property
def column_dim(self):
return self.cols.currentIndex()
@property
def color_dim(self):
return self._color_dim_options[self.color.currentText()]
def _update_image(self):
"""Update output image after sliders or calc options have changed"""
if self.data is None:
return
# get the indexes right
indexes = [0] * self.data.ndim
indexes[self.row_dim] = slice(None)
indexes[self.column_dim] = slice(None)
if self.color_dim is not None:
indexes[self.color_dim] = slice(None)
for dim, slider in self._sliders.items():
if self._dim_combos[dim].currentText() == 'step':
indexes[dim] = slice(slider.value(), slider.value()+1) # use slicing to not (yet) loose the dim
else:
indexes[dim] = slice(None) # for calculations, we need all the data
# do the actual indexing
im = self.data[tuple(indexes)]
# perform any calculations if needed
for dim, combo in reversed(self._dim_combos.items()):
combo_text = combo.currentText()
if combo_text == 'step':
pass
else:
im = self.DIM_CALC[combo_text](im, axis=dim)
# get rid of any dims with len 1
im = im.squeeze()
# move around the axis until we have row/col and optionally color in this particular order
if self.color_dim is None:
if self.row_dim > self.column_dim:
im = np.moveaxis(im, 0, 1)
else:
if not self.row_dim < self.column_dim < self.color_dim:
im = np.moveaxis(im, np.argsort(np.array((self.row_dim, self.column_dim, self.color_dim))),
(0, 1, 2))
# send to any image viewer panel that is connected
for panel in self.parent().targetPanels('image'):
panel.show_array(im, zoomFitHist=False, log=False)
# update the scales
for dim, scale_label in self._dim_scale_labels.items():
scale_label.setText(f"{self.dim_scales[dim][1][self._sliders[dim].value()]:.4g}")
def _combo_changed(self, dim):
"""Update the sliders behavior based on the combo selection
The sliders are disabled when 'step' is not selected and the image is refreshed
"""
if self._dim_combos[dim].currentText() == 'step':
self._sliders[dim].setEnabled(True)
self._spin_boxes[dim].setEnabled(True)
else:
self._sliders[dim].setEnabled(False)
self._spin_boxes[dim].setEnabled(False)
if dim in self._cycling_dims:
self._cycle_dim(dim=dim)
self._update_image()
def get_save_data(self):
"""Get all possible relevant data to save to file"""
# update dim names with possible user adjusts
for dim, line_edit in self._dim_line_edits.items():
self.dim_names[dim] = line_edit.text()
return dict(data=self.data, data_name=self.data_name, dim_names=self.dim_names, dim_scales=self.dim_scales)
def _cycle_dim(self, dim):
"""handle the automatic cycling of a dim (play/pause button)"""
if dim in self._cycling_dims:
self._cycling_dims.remove(dim)
self._play_labels[dim].setPixmap(self.play_icon)
else:
if self._dim_combos[dim].currentText() != 'step':
return
self._play_labels[dim].setPixmap(self.pause_icon)
self._cycling_dims.append(dim)
if len(self._cycling_dims):
self._cycle_timer = QtCore.QTimer()
self._cycle_timer.timeout.connect(lambda: self._advance_dim(0))
self._cycle_timer.start(250)
else:
self._cycle_timer.stop()
def _advance_dim(self, ind):
"""Recursive method to advance in the automatic cycling
If one cycle overflows then it advances the second one in the list.
The order in which the dims are added to the _cycling_dims is used for this.
"""
try:
dim = self._cycling_dims[ind]
except IndexError:
return
val = self._spin_boxes[dim].value()
if val == self._spin_boxes[dim].maximum():
self._advance_dim(ind+1)
self._spin_boxes[dim].setValue(0)
self._sliders[dim].setValue(0)
else:
self._spin_boxes[dim].setValue(val+1)
self._sliders[dim].setValue(val+1)
def clear_data(self):
"""Clear current data and put back in startup state"""
self.data = None
self.load(data=None)
|
PypiClean
|
/poeem-1.2.2.tar.gz/poeem-1.2.2/src/python/embedding.py
|
import tensorflow as tf
from tensorflow.estimator import ModeKeys
import contextlib
from poeem.ops.python import clustering
def compute_distortion(x, x_tau):
# x, x_tau : [n_batch, d]
return tf.reduce_mean(tf.sqrt(tf.reduce_mean(tf.square(x - x_tau), axis=1)))
def compute_rotation(x, x_tau):
"""
solve the orthogonal Procrustes problem via closed form solution
check the paper "Optimized Product Quantization for Approximate Nearest Neighbor Search"
basically,
min_R ||RX - Y||_F^2
s.t. R^T R = I
the solution to R can be obtained as follows.
XY^T = USV^T
R = VU^T
The below code is for transposed X and Y
min_R ||XR - Y||_F^2
s.t. R^T R = I
the solution to R can be obtained as follows.
X^T Y = USV^T
R = U V^T
"""
M = tf.matmul(x, x_tau, transpose_a=True)
s, u, v = tf.svd(M)
R = tf.matmul(u, v, transpose_b=True)
return R
class CayleyTransformer:
def __init__(self, d):
self._d = d
indices = tf.where(tf.ones([self._d, self._d]))
x, y = tf.unstack(indices, axis=1)
self._upper_triangle_indices = tf.boolean_mask(indices, tf.less(x, y))
self._eye = tf.eye(self._d)
def param_to_matrix(self, cayley_param):
shape = [self._d, self._d]
ret = tf.scatter_nd(self._upper_triangle_indices, cayley_param, shape)
return ret - tf.transpose(ret)
def cayley_transform(self, cayley_param):
"""Maps an n choose 2 dimensional vector to a rotation matrix."""
A = self.param_to_matrix(cayley_param)
return tf.matmul(tf.linalg.inv(self._eye + A), self._eye - A)
def skew_symmetric_matrix_to_param(self, skew_sym_mat):
return tf.gather_nd(skew_sym_mat, self._upper_triangle_indices)
def inverse_cayley(self, rotation_matrix):
"""Maps a rotation matrix without +/-1 eigenvalues to an (n choose 2)
dimensional vector."""
rot = rotation_matrix
skew_sym_mat = tf.matmul(tf.linalg.inv(self._eye + rot), self._eye - rot)
return self.skew_symmetric_matrix_to_param(skew_sym_mat)
def gradient_transform(self, cayley_param, cayley_grad):
"""Compute InvCay( CayTrans(A)^{-1} d CayTrans_A (H) )."""
A, H = map(self.param_to_matrix, [cayley_param, cayley_grad])
# d Cay_A(H) = 2 (I + A)^{-1} H (I + A)^{-1}, see
# https://math.stackexchange.com/questions/3099219. Note they map A to -A.
# Thus Cay(A)^{-1} d Cay_A(H) = (I - A)^{-1} H (I + A)^{-1}.
return 2 * tf.linalg.inv(self._eye - A) @ H @ tf.linalg.inv(self._eye + A)
class PoeemQuantizer(object):
def __init__(self, coarse_K, K, D, d, rotate, buffer_size, svd_steps=200):
"""
Args:
coarse_K: int, size of coarse quantization
K, D: int, size of KD code.
d: dim of continuous input for each of D axis.
rotate: int, if we want to learn a rotation to minimize distortion
buffer_size: data buffer to initialize centroids
"""
self._coarse_K = coarse_K
self._K = K
self._D = D
self._d = d
self._sub_d = d // D
self._buffer_size = buffer_size
self._rotate = rotate
self._svd_steps = svd_steps
assert self._sub_d >= 1, '_sub_d = 0, _sub_d should be greater and equal than 1, this is caused by that D is greater than embedding dimension'
self._sample_buffer = tf.Variable(
name='sample_buffer',
initial_value=tf.zeros([0, self._d]),
shape=[None, self._d],
use_resource=True, # necessary to have undetermined dimension size.
trainable=False,
aggregation=tf.VariableAggregation.SUM)
self._sample_size = tf.get_variable(
name='sample_size',
shape=[],
dtype=tf.int32,
initializer=tf.zeros_initializer(),
use_resource=True, # necessary if variable is used in tf.cond condition
trainable=False,
aggregation=tf.VariableAggregation.SUM)
self._initialized = tf.get_variable(
name='initialized',
shape=[],
dtype=tf.int32,
initializer=tf.zeros_initializer(),
use_resource=True, # necessary if variable is used in tf.cond condition
trainable=False,
aggregation=tf.VariableAggregation.SUM)
self._coarse_centroids = tf.get_variable(
name='coarse_centroids',
shape=[self._coarse_K, self._d],
aggregation=tf.VariableAggregation.SUM) if self._coarse_K > 0 else None
self._centroids = tf.get_variable(
name="centroids_k",
shape=[self._D, self._K, self._sub_d],
aggregation=tf.VariableAggregation.SUM)
self._rotation_matrix = tf.get_variable(
name="rotation_matrix",
shape=[self._d, self._d],
initializer=tf.keras.initializers.Identity(),
trainable=True,
aggregation=tf.VariableAggregation.SUM) if rotate else None
def coarse_l2_distance(self, x, coarse_centroids):
n_batch = tf.shape(x)[0]
norm_1 = tf.tile(
tf.reduce_sum(x**2, axis=-1, keep_dims=True), # n_batch x coarse_K
[1, self._coarse_K])
norm_2 = tf.tile(
tf.transpose(
tf.reduce_sum(coarse_centroids**2, axis=-1, keep_dims=True),
[1, 0]), # n_batch x coarse_K
[n_batch, 1])
# x = tf.Print(x, [tf.shape(x), tf.shape(coarse_centroids)], message='before dot ')
dot = tf.matmul(x, coarse_centroids, transpose_b=True) # n_batch x coarse_K
# dot = tf.Print(dot, [tf.shape(x), tf.shape(coarse_centroids), tf.shape(dot)], message='after dot ')
# norm_1 = tf.Print(norm_1, [tf.shape(norm_1), tf.shape(norm_2), tf.shape(dot)], message='before l2_sqr ')
l2_sqr = norm_1 + norm_2 - 2 * dot # n_batch x coarse_K
# l2_sqr = tf.Print(l2_sqr, [tf.shape(norm_1), tf.shape(norm_2), tf.shape(dot)], message='after l2_sqr ')
return l2_sqr
def pq_l2_distance(self, x, centroids):
n_batch = tf.shape(x)[0]
x = tf.reshape(x, [n_batch, self._D, self._sub_d])
norm_1 = tf.reduce_sum(x**2, -1, keep_dims=True) # (n_batch, D, 1)
norm_2 = tf.expand_dims(tf.reduce_sum(centroids**2, -1), 0) # (1, D, K)
dot = tf.matmul(tf.transpose(x, perm=[1, 0, 2]),
tf.transpose(centroids, perm=[0, 2, 1])) # (D, n_batch, K)
l2_sqr = norm_1 + norm_2 - 2 * tf.transpose(dot, perm=[1, 0, 2]) # (n_batch, D, K)
return l2_sqr
def quantize(self, x, coarse_centroids, centroids):
# x: (batch_size, d)
n_batch = tf.shape(x)[0]
coarse_code, coarse_output = None, None
if self._coarse_K > 0:
# coarse_K > 0 means use residual
coarse_l2_sqr = self.coarse_l2_distance(x, coarse_centroids)
coarse_code = tf.argmin(coarse_l2_sqr, -1)
coarse_output = tf.nn.embedding_lookup(
coarse_centroids, coarse_code)
# compute residual
x = x - coarse_output
l2_sqr = self.pq_l2_distance(x, centroids) # (n_batch, D, K)
code = tf.argmin(l2_sqr, -1) # (n_batch, D)
neighbor_idxs = code
# Compute the outputs, which has shape (batch_size, D, sub_d)
D_base = tf.convert_to_tensor(
[self._K*d for d in range(self._D)], dtype=tf.int64)
neighbor_idxs += tf.expand_dims(D_base, 0) # (batch_size, D)
neighbor_idxs = tf.reshape(neighbor_idxs, [-1]) # (batch_size * D)
centroids = tf.reshape(centroids, [-1, self._sub_d])
output = tf.nn.embedding_lookup(centroids, neighbor_idxs)
output = tf.reshape(output, [n_batch, self._d])
if self._coarse_K > 0:
x_tau = coarse_output + output
else:
x_tau = output
return x_tau, coarse_code, code
def compute_centroids(self, x, max_iter=100, change_pct_thr=0.01):
coarse_centroids = None
# run coarse clustering and clustering in parallel
if self._coarse_K > 0:
coarse_centroids, _ = clustering.kmeans_raw(
x, self._coarse_K,
max_iter=max_iter, change_percentage_thr=change_pct_thr, verbose=1)
coarse_centroids = tf.reshape(coarse_centroids, [self._coarse_K, self._d])
centroids_list = []
for i in range(self._D):
ctrd, _ = clustering.kmeans_raw(
x[:, i*self._sub_d:(i+1)*self._sub_d], self._K,
max_iter=max_iter, change_percentage_thr=change_pct_thr, verbose=1)
centroids_list.append(ctrd)
centroids = tf.stack(centroids_list, axis=0)
centroids = tf.reshape(centroids, [self._D, self._K, self._sub_d])
return coarse_centroids, centroids
def forward(self, x, rotation_matrix=None):
"""Rotate x, compute quantized x_tau, then rotate x_tau back."""
with tf.name_scope("Poeem_forward"):
if self._rotate > 0:
assert rotation_matrix is not None
x = tf.matmul(x, rotation_matrix)
x_tau, coarse_code, code = self.quantize(
x, self._coarse_centroids, self._centroids)
x_tau = tf.matmul(x_tau, tf.transpose(rotation_matrix, [1, 0]))
else:
x_tau, coarse_code, code = self.quantize(
x, self._coarse_centroids, self._centroids)
return x_tau, coarse_code, code
def accumulate(self, x):
# cutoff data to fit in sample buffer
batch_size = tf.minimum(
tf.shape(x)[0],
self._buffer_size - self._sample_size)
x_sub = tf.slice(x, [0, 0], [batch_size, -1])
assign_tensor = self._sample_buffer.assign(
tf.concat([self._sample_buffer, x_sub], axis=0))
sample_size_tensor = self._sample_size.assign_add(batch_size)
return assign_tensor, sample_size_tensor
def init_centroids(self, max_iter=None, change_pct_thr=0.01):
assign_ops = []
if self._rotate > 0:
def loop_body(i, R, data):
x = tf.matmul(data, R)
coarse_centroids, centroids = self.compute_centroids(
x, max_iter=max_iter or 10, change_pct_thr=change_pct_thr)
x_tau, coarse_code, code = self.quantize(x, coarse_centroids, centroids)
distortion = compute_distortion(x, x_tau)
x_tau = tf.Print(x_tau, [i, distortion], message='distortion = ')
R = compute_rotation(data, x_tau)
i = tf.add(i, 1)
return i, R, data
def condition(i, R, data):
return tf.less(i, self._svd_steps)
# find the optimized rotation to minimize distortion, by alternative minimization
i = tf.constant(0)
R = tf.eye(self._d)
data = tf.reshape(self._sample_buffer, [self._buffer_size , self._d]) # necessary to avoid dimension inconsistency error
# run while_loop in sequence
_, R, _ = tf.while_loop(
condition, loop_body, [i, R, data],
shape_invariants=[
tf.TensorShape([]),
tf.TensorShape([self._d, self._d]),
tf.TensorShape([self._buffer_size, self._d])])
# apply the rotation
x = tf.matmul(self._sample_buffer, R)
coarse_centroids, centroids = self.compute_centroids(
x, max_iter=max_iter or 100, change_pct_thr=change_pct_thr)
assign_ops.append(self._rotation_matrix.assign(R))
else:
coarse_centroids, centroids = self.compute_centroids(self._sample_buffer,
max_iter=max_iter or 100, change_pct_thr=change_pct_thr)
if self._coarse_K > 0:
assign_ops.append(self._coarse_centroids.assign(coarse_centroids))
assign_ops.append(self._centroids.assign(centroids))
assign_ops.append(self._initialized.assign(1))
return assign_ops
def clear_sample_buffer(self):
buffer_assign = tf.assign(self._sample_buffer, tf.zeros([0, self._d]), validate_shape=False)
return buffer_assign,
class PoeemEmbed(object):
def __init__(self, emb_size, warmup_steps=1024, buffer_size=8192, mode=ModeKeys.PREDICT,
hparams=None, name="poeem_emb", initializer=None, svd_steps=200):
if hparams is None:
hparams = PoeemHparam()
self._warmup_steps = warmup_steps
self._buffer_size = buffer_size
self._hparams = hparams
self._mode = mode
self._d, self._K, self._D, self._coarse_K, self._rotate = (
emb_size, hparams.K, hparams.D, hparams.coarse_K, hparams.rotate)
self._quantizer = PoeemQuantizer(self._coarse_K, self._K, self._D, self._d,
self._rotate, buffer_size, svd_steps=svd_steps)
def forward(self, x):
def forward_layer(x):
n_batch = tf.shape(x)[0]
x_tau, coarse_code, code = (
# the existence of this op is necessary to enable control dependencies working
tf.identity(x),
tf.zeros([n_batch], dtype=tf.int64),
tf.zeros([n_batch, self._D], dtype=tf.int64))
regularizer = 0.0
return x_tau, coarse_code, code, regularizer
def pq_layer(x):
def accumulate_layer(x):
with tf.device('/device:CPU:0') if self._hparams.kmeans_on_cpu else (
contextlib.suppress()):
deps = self._quantizer.accumulate(x)
with tf.control_dependencies(deps):
return forward_layer(x)
def init_layer(x):
with tf.device('/device:CPU:0') if self._hparams.kmeans_on_cpu else (
contextlib.suppress()):
deps = self._quantizer.init_centroids(
max_iter=self._hparams.kmeans_max_iter,
change_pct_thr=self._hparams.kmeans_change_pct_thr)
with tf.control_dependencies(deps):
clear_deps = self._quantizer.clear_sample_buffer()
with tf.control_dependencies(clear_deps):
return internal_pq_layer(x, init=True)
def internal_pq_layer(x, init=False):
rotation_matrix=self._quantizer._rotation_matrix
if init and rotation_matrix is not None:
rotation_matrix = tf.stop_gradient(rotation_matrix)
x_tau, coarse_code, code = self._quantizer.forward(x, rotation_matrix)
regularizer = tf.reduce_sum((x_tau - tf.stop_gradient(x))**2)
if coarse_code is None: # dummy placeholder for tf.cond.
coarse_code = tf.zeros([tf.shape(x)[0]], dtype=tf.int64)
return x_tau, coarse_code, code, regularizer
if self._quantizer._buffer_size < 0 or self._mode != ModeKeys.TRAIN:
return internal_pq_layer(x) # skip kmeans/svd initialization.
return tf.cond(tf.less(self._quantizer._sample_size, self._quantizer._buffer_size),
lambda: accumulate_layer(x), # accumulate data
lambda: tf.cond(
tf.equal(self._quantizer._initialized, 0),
lambda: init_layer(x), # init centroids
lambda: internal_pq_layer(x)))
if self._mode == ModeKeys.TRAIN:
# forward_layer: 1. steps from 0 to _warmup_steps, train model without pq
# pq_layer: 2. steps from _warmup_steps to _buffer_size/batch_size,
# train model without pq and accumulate input data in _sample_buffer
# 3. clustering input data in _sample_buffer to init centroids and start to train pq model
step = tf.cast(tf.train.get_global_step(), tf.int32)
if self._warmup_steps > 0:
x_tau, coarse_code, code, regularizer = tf.cond(
tf.less_equal(step, self._warmup_steps),
true_fn=lambda: forward_layer(x),
false_fn=lambda: pq_layer(x))
else:
x_tau, coarse_code, code, regularizer = pq_layer(x)
update_ops = self.metrics(x, x_tau, coarse_code, code)
with tf.control_dependencies(update_ops):
x_tau = tf.identity(x_tau)
return x_tau, coarse_code, code, regularizer
def metrics(self, x, x_tau, coarse_code, code):
distortion = compute_distortion(x, x_tau)
tf.summary.scalar('distortion', distortion)
update_ops = []
def full_summary(dim, code, name):
v, idx, counts = tf.unique_with_counts(code)
tf.summary.scalar(
'code_distribution/unique_%s_count' % name, tf.shape(v)[0])
tf.summary.histogram(
'code_distribution/%s_histogram' % name, code)
tally_var = tf.Variable(tf.zeros([dim], dtype=tf.int64),
name=name + '_cumulative_count', dtype=tf.int64, trainable=False,
aggregation=tf.VariableAggregation.SUM)
update_ops.append(tf.scatter_add(tally_var, v, tf.to_int64(counts)))
tally_var = tf.to_float(tally_var)
cum_frac = (tally_var + 1e-8) / (tf.reduce_sum(tally_var) + 1e-8 * dim)
kl = tf.keras.losses.KLDivergence(reduction=tf.compat.v1.losses.Reduction.NONE)
# ideally cluster sizes should be uniform, so track distances to uniform.
uniform = tf.ones([dim], dtype=tf.float32) / dim
def tv(dist1, dist2): # total variation distance of two prob. measures.
return tf.reduce_sum(tf.abs(dist1 - dist2)) / 2.0
tf.summary.scalar('code_distribution/%s_kld' % name, kl(cum_frac, uniform))
tf.summary.scalar('code_distribution/%s_tvd' % name, tv(cum_frac, uniform))
if self._coarse_K > 0:
full_summary(self._coarse_K, coarse_code, 'coarse_code')
code_part0 = code[:, 0]
full_summary(self._K, code_part0, 'code0')
return update_ops
class PoeemHparam(object):
# A default Poeem parameter setting (demo)
def __repr__(self):
return str({k: getattr(self, k) for k in dir(self) if
not k.startswith('_') and 'built-in' not in str(getattr(self, k))})
def __init__(self,
coarse_K=128,
K=16,
D=32,
rotate=0,
kmeans_max_iter=100,
kmeans_change_pct_thr=1e-2,
kmeans_on_cpu=False):
"""
Args:
"""
self.coarse_K = coarse_K
self.K = K
self.D = D
self.rotate = rotate
self.kmeans_max_iter = kmeans_max_iter
self.kmeans_change_pct_thr = kmeans_change_pct_thr
self.kmeans_on_cpu = kmeans_on_cpu # gpu may run OOM.
|
PypiClean
|
/user_discord-2.0.6.tar.gz/user_discord-2.0.6/user_discord/discord/calls.py
|
from __future__ import annotations
import datetime
from typing import Callable, Dict, List, Optional, Tuple, TYPE_CHECKING, Union
from . import utils
from .errors import ClientException
from .utils import cached_slot_property
from .voice_client import VoiceClient
if TYPE_CHECKING:
from . import abc
from .abc import T as ConnectReturn
from .channel import DMChannel, GroupChannel
from .client import Client
from .member import VoiceState
from .message import Message
from .state import ConnectionState
from .user import BaseUser, User
_PrivateChannel = Union[abc.DMChannel, abc.GroupChannel]
__all__ = (
'CallMessage',
'PrivateCall',
'GroupCall',
)
def _running_only(func: Callable):
def decorator(self: Call, *args, **kwargs):
if self._ended:
raise ClientException('Call is over')
else:
return func(self, *args, **kwargs)
return decorator
class CallMessage:
"""Represents a group call message from Discord.
This is only received in cases where the message type is equivalent to
:attr:`MessageType.call`.
Attributes
-----------
ended_timestamp: Optional[:class:`datetime.datetime`]
An aware UTC datetime object that represents the time that the call has ended.
participants: List[:class:`User`]
A list of users that participated in the call.
message: :class:`Message`
The message associated with this call message.
"""
__slots__ = ('message', 'ended_timestamp', 'participants')
def __init__(self, message: Message, *, participants: List[User], ended_timestamp: Optional[str]) -> None:
self.message = message
self.ended_timestamp = utils.parse_time(ended_timestamp)
self.participants = participants
@property
def call_ended(self) -> bool:
""":class:`bool`: Indicates if the call has ended."""
return self.ended_timestamp is not None
@property
def initiator(self) -> User:
""":class:`.abc.User`: Returns the user that started the call."""
return self.message.author # type: ignore # Cannot be a Member in private messages
@property
def channel(self) -> _PrivateChannel:
""":class:`.abc.PrivateChannel`: The private channel associated with this message."""
return self.message.channel # type: ignore # Can only be a private channel here
@property
def duration(self) -> datetime.timedelta:
"""Queries the duration of the call.
If the call has not ended then the current duration will
be returned.
Returns
---------
:class:`datetime.timedelta`
The timedelta object representing the duration.
"""
if self.ended_timestamp is None:
return utils.utcnow() - self.message.created_at
else:
return self.ended_timestamp - self.message.created_at
class PrivateCall:
"""Represents the actual group call from Discord.
This is accompanied with a :class:`CallMessage` denoting the information.
.. versionadded:: 1.9
Attributes
-----------
channel: :class:`DMChannel`
The channel the call is in.
unavailable: :class:`bool`
Denotes if this call is unavailable.
region: :class:`str`
The region the call is being hosted at.
.. versionchanged:: 2.0
The type of this attribute has changed to :class:`str`.
"""
__slots__ = ('_state', '_ended', 'channel', '_cs_message', '_ringing', '_message_id', 'region', 'unavailable')
if TYPE_CHECKING:
channel: DMChannel
def __init__(
self,
*,
data: dict,
state: ConnectionState,
message: Optional[Message],
channel: abc.PrivateChannel,
) -> None:
self._state = state
self._cs_message = message
self.channel = channel # type: ignore # Will always be a DMChannel here
self._ended: bool = False
self._update(data)
def _delete(self) -> None:
self._ringing = tuple()
self._ended = True
def _get_recipients(self) -> Tuple[BaseUser, ...]:
channel = self.channel
return channel.me, channel.recipient
def _is_participating(self, user: BaseUser) -> bool:
state = self.voice_state_for(user)
return bool(state and state.channel and state.channel.id == self.channel.id)
def _update(self, data) -> None:
self._message_id = int(data['message_id'])
self.unavailable = data.get('unavailable', False)
try:
self.region: str = data['region']
except KeyError:
pass
channel = self.channel
recipients = self._get_recipients()
lookup = {u.id: u for u in recipients}
self._ringing = tuple(filter(None, map(lookup.get, data.get('ringing', []))))
for vs in data.get('voice_states', []):
self._state._update_voice_state(vs, channel.id)
@property
def ringing(self) -> List[BaseUser]:
"""List[:class:`.abc.User`]: A list of users that are currently being rung to join the call."""
return list(self._ringing)
@property
def initiator(self) -> Optional[User]:
"""Optional[:class:`.abc.User`]: Returns the user that started the call. Returns ``None`` if the message is not cached."""
return getattr(self.message, 'author', None)
@property
def connected(self) -> bool:
""":class:`bool`: Returns whether you're in the call (this does not mean you're in the call through the library)."""
return self._is_participating(self.channel.me)
@property
def members(self) -> List[BaseUser]:
"""List[:class:`.abc.User`]: Returns all users that are currently in this call."""
recipients = self._get_recipients()
return [u for u in recipients if self._is_participating(u)]
@property
def voice_states(self) -> Dict[int, VoiceState]:
"""Mapping[:class:`int`, :class:`VoiceState`]: Returns a mapping of user IDs who have voice states in this call."""
return {
k: v for k, v in self._state._voice_states.items() if bool(v and v.channel and v.channel.id == self.channel.id)
}
@cached_slot_property('_cs_message')
def message(self) -> Optional[Message]:
"""Optional[:class:`Message`]: The message associated with this call. Sometimes may not be cached."""
return self._state._get_message(self._message_id)
async def fetch_message(self) -> Message:
"""|coro|
Fetches and caches the message associated with this call.
Raises
-------
HTTPException
Retrieving the message failed.
Returns
-------
:class:`Message`
The message associated with this call.
"""
message = await self.channel.fetch_message(self._message_id)
state = self._state
if self.message is None:
if state._messages is not None:
state._messages.append(message)
self._cs_message = message
return message
async def change_region(self, region: str) -> None:
"""|coro|
Changes the channel's voice region.
Parameters
-----------
region: :class:`str`
A region to change the voice region to.
.. versionchanged:: 2.0
The type of this parameter has changed to :class:`str`.
Raises
-------
HTTPException
Failed to change the channel's voice region.
"""
await self._state.http.change_call_voice_region(self.channel.id, region)
@_running_only
async def ring(self) -> None:
"""|coro|
Rings the other recipient.
Raises
-------
Forbidden
Not allowed to ring the other recipient.
HTTPException
Ringing failed.
ClientException
The call has ended.
"""
channel = self.channel
await self._state.http.ring(channel.id)
@_running_only
async def stop_ringing(self) -> None:
"""|coro|
Stops ringing the other recipient.
Raises
-------
HTTPException
Stopping the ringing failed.
ClientException
The call has ended.
"""
channel = self.channel
await self._state.http.stop_ringing(channel.id, channel.recipient.id)
@_running_only
async def connect(
self,
*,
timeout: float = 60.0,
reconnect: bool = True,
cls: Callable[[Client, abc.VocalChannel], ConnectReturn] = VoiceClient,
) -> ConnectReturn:
"""|coro|
Connects to voice and creates a :class:`~discord.VoiceClient` to establish
your connection to the voice server.
There is an alias of this called :attr:`join`.
Parameters
-----------
timeout: :class:`float`
The timeout in seconds to wait for the voice endpoint.
reconnect: :class:`bool`
Whether the bot should automatically attempt
a reconnect if a part of the handshake fails
or the gateway goes down.
cls: Type[:class:`~discord.VoiceProtocol`]
A type that subclasses :class:`~discord.VoiceProtocol` to connect with.
Defaults to :class:`~discord.VoiceClient`.
Raises
-------
asyncio.TimeoutError
Could not connect to the voice channel in time.
~discord.ClientException
You are already connected to a voice channel.
~discord.opus.OpusNotLoaded
The opus library has not been loaded.
Returns
--------
:class:`~discord.VoiceProtocol`
A voice client that is fully connected to the voice server.
"""
return await self.channel.connect(timeout=timeout, reconnect=reconnect, cls=cls, ring=False)
@_running_only
async def join(
self,
*,
timeout: float = 60.0,
reconnect: bool = True,
cls: Callable[[Client, abc.VocalChannel], ConnectReturn] = VoiceClient,
) -> ConnectReturn:
"""|coro|
Connects to voice and creates a :class:`~discord.VoiceClient` to establish
your connection to the voice server.
This is an alias of :attr:`connect`.
Parameters
-----------
timeout: :class:`float`
The timeout in seconds to wait for the voice endpoint.
reconnect: :class:`bool`
Whether the bot should automatically attempt
a reconnect if a part of the handshake fails
or the gateway goes down.
cls: Type[:class:`~discord.VoiceProtocol`]
A type that subclasses :class:`~discord.VoiceProtocol` to connect with.
Defaults to :class:`~discord.VoiceClient`.
Raises
-------
asyncio.TimeoutError
Could not connect to the voice channel in time.
~discord.ClientException
You are already connected to a voice channel.
~discord.opus.OpusNotLoaded
The opus library has not been loaded.
Returns
--------
:class:`~discord.VoiceProtocol`
A voice client that is fully connected to the voice server.
"""
return await self.connect(timeout=timeout, reconnect=reconnect, cls=cls)
@_running_only
async def disconnect(self, force: bool = False) -> None:
"""|coro|
Disconnects this voice client from voice.
There is an alias of this called :attr:`leave`.
"""
state = self._state
if not (client := state._get_voice_client(self.channel.me.id)):
return
return await client.disconnect(force=force)
@_running_only
async def leave(self, force: bool = False) -> None:
"""|coro|
Disconnects this voice client from voice.
This is an alias of :attr:`disconnect`.
"""
return await self.disconnect(force=force)
def voice_state_for(self, user: abc.Snowflake) -> Optional[VoiceState]:
"""Retrieves the :class:`VoiceState` for a specified :class:`User`.
If the :class:`User` has no voice state then this function returns
``None``.
Parameters
------------
user: :class:`User`
The user to retrieve the voice state for.
Returns
--------
Optional[:class:`VoiceState`]
The voice state associated with this user.
"""
return self._state._voice_state_for(user.id)
class GroupCall(PrivateCall):
"""Represents a Discord group call.
This is accompanied with a :class:`CallMessage` denoting the information.
Attributes
-----------
channel: :class:`GroupChannel`
The channel the group call is in.
unavailable: :class:`bool`
Denotes if this group call is unavailable.
region: :class:`str`
The region the group call is being hosted in.
.. versionchanged:: 2.0
The type of this attribute has changed to :class:`str`.
"""
__slots__ = ()
if TYPE_CHECKING:
channel: GroupChannel
def _get_recipients(self) -> Tuple[BaseUser, ...]:
channel = self.channel
return *channel.recipients, channel.me
@_running_only
async def ring(self, *recipients: abc.Snowflake) -> None:
r"""|coro|
Rings the specified recipients.
Parameters
-----------
\*recipients: :class:`User`
The recipients to ring. The default is to ring all recipients.
Raises
-------
HTTPException
Stopping the ringing failed.
ClientException
The call has ended.
"""
await self._state.http.ring(self.channel.id, *{r.id for r in recipients})
@_running_only
async def stop_ringing(self, *recipients: abc.Snowflake) -> None:
r"""|coro|
Stops ringing the specified recipients.
Parameters
-----------
\*recipients: :class:`User`
The recipients to stop ringing.
Raises
-------
HTTPException
Ringing failed.
ClientException
The call has ended.
"""
channel = self.channel
await self._state.http.stop_ringing(channel.id, *{r.id for r in recipients or channel.recipients})
Call = Union[PrivateCall, GroupCall]
|
PypiClean
|
/collective.taxonomysupport-1.4.1.tar.gz/collective.taxonomysupport-1.4.1/collective/taxonomysupport/browser/taxonomy_utils.py
|
from Products.Five.browser import BrowserView
from collective.taxonomysupport.interfaces import ITaxonomyLevel
from Products.statusmessages.interfaces import IStatusMessage
from zope.interface import alsoProvides, noLongerProvides
from collective.taxonomysupport import taxonomysupportMessageFactory as _
class CheckTaxonomyAction(BrowserView):
def __init__(self, context, request):
self.context = context
self.request = request
def get_canonical(self):
pcs = self.context.restrictedTraverse('@@plone_context_state')
return pcs.canonical_object()
def check_taxonomy_action_add(self):
obj = self.get_canonical()
return not ITaxonomyLevel.providedBy(obj)
def check_taxonomy_action_remove(self):
obj = self.get_canonical()
return ITaxonomyLevel.providedBy(obj)
class ToggleMarkTaxonomyRoot(BrowserView):
def __init__(self, context, request):
self.context = context
self.request = request
def get_canonical(self):
pcs = self.context.restrictedTraverse('@@plone_context_state')
return pcs.canonical_object()
def add_interface(self):
obj = self.get_canonical()
messages = IStatusMessage(self.request)
if not ITaxonomyLevel.providedBy(obj):
alsoProvides(obj, ITaxonomyLevel)
obj.reindexObject()
messages.addStatusMessage(_('label_content_marked_as_taxonomyroot',
default=u"Content marked as taxonomy root"),
type='info')
else:
messages.addStatusMessage(_('label_content_already_taxonomyroot',
default=u"Content already marked as taxonomy root"),
type='warning')
self.request.response.redirect(obj.absolute_url())
def remove_interface(self):
obj = self.get_canonical()
messages = IStatusMessage(self.request)
if ITaxonomyLevel.providedBy(obj):
noLongerProvides(obj, ITaxonomyLevel)
obj.reindexObject()
messages.addStatusMessage(_('label_content_unmarked_as_taxonomyroot',
default=u"Content unmarked as taxonomy root"),
type='info')
else:
messages.addStatusMessage(_('label_content_already_unmarked_taxonomyroot',
default=u"Content was not marked as taxonomy root"),
type='warning')
self.request.response.redirect(obj.absolute_url())
|
PypiClean
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.