ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py
|
1a5c1bae888f7310d0936d62d8431c1a63a70995
|
'''
std::map<KEY, VALUE>
'''
mymap = {
'key1' : [1,2,3],
'key2' : [4,5,6,7]
}
FOO = 1.1
tuplemap = {
'A': ([1.0, 2.0, 3.0], [4.0, 5.0, 6.0], FOO),
'B': ([7.0, 8.0, 9.0], [0.0, 0.0, 0.0], FOO*2.2)
}
class MyClass:
def mymethod(self):
return 99
def somefloat(self):
return 1.1
def somestring(self):
return 'hi'
with stack:
tuplemap_stack = {
'A': ([1.0, 2.0, 3.0], [4.0, 5.0, 6.0], FOO),
'B': ([7.0, 8.0, 9.0], [0.0, 0.0, 0.0], FOO*2.2)
}
def test_stack():
print 'stack test...'
m1 = {
'K1' : [0,1],
'K2' : [2,3]
}
assert m1['K1'][0]==0
assert m1['K1'][1]==1
assert m1['K2'][0]==2
assert m1['K2'][1]==3
vecx = tuple.get( tuplemap_stack['A'], 0 )
assert vecx[0]==1.0
def test_heap():
print 'heap test...'
m1 = {
'K1' : [0,1],
'K2' : [2,3]
}
assert m1['K1'][0]==0
assert m1['K1'][1]==1
assert m1['K2'][0]==2
assert m1['K2'][1]==3
with get as 'std::get<%s>(*%s)':
vec = get(0, tuplemap['A'])
assert vec[0]==1.0
vecx = tuple.get( tuplemap['A'], 0 )
assert vecx[0]==1.0
print 'testing loop over map keys'
for key in m1:
print 'key:', key
print 'testing loop over map keys and values'
for (key,val) in m1:
print 'key:', key
print 'value:', val
keys = dict.keys(m1)
assert len(keys)==2
for key in keys:
print key
assert 'K1' in keys
assert 'invalid-key' not in keys
values = dict.values(m1)
assert len(values)==2
## golang style maps ##
mymap = map[string]int{
'foo':10,
'bar':100
}
assert mymap['foo']==10
assert mymap['bar']==100
obmap = map[string]MyClass{ 'xx': MyClass() }
#obmap = map[string]MyClass{}
#obmap['xx'] = MyClass()
assert obmap['xx'].mymethod()==99
print obmap['xx'].somefloat()
print obmap['xx'].somestring()
def main():
print mymap
assert mymap['key1'][0]==1
assert mymap['key2'][1]==5
test_heap()
test_stack()
print 'OK'
|
py
|
1a5c1c47d625384a212d952bc8364854b66f646a
|
# coding: utf-8
"""
Xenia Python Client Library
Python Client Library to interact with the Xenia API. # noqa: E501
The version of the OpenAPI document: v2.1
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from xenia_python_client_library.configuration import Configuration
class ModelRequestList(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'success': 'bool',
'result': 'object',
'error_message': 'str'
}
attribute_map = {
'success': 'success',
'result': 'result',
'error_message': 'error_message'
}
def __init__(self, success=None, result=None, error_message=None, local_vars_configuration=None): # noqa: E501
"""ModelRequestList - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._success = None
self._result = None
self._error_message = None
self.discriminator = None
self.success = success
self.result = result
self.error_message = error_message
@property
def success(self):
"""Gets the success of this ModelRequestList. # noqa: E501
:return: The success of this ModelRequestList. # noqa: E501
:rtype: bool
"""
return self._success
@success.setter
def success(self, success):
"""Sets the success of this ModelRequestList.
:param success: The success of this ModelRequestList. # noqa: E501
:type: bool
"""
if self.local_vars_configuration.client_side_validation and success is None: # noqa: E501
raise ValueError("Invalid value for `success`, must not be `None`") # noqa: E501
self._success = success
@property
def result(self):
"""Gets the result of this ModelRequestList. # noqa: E501
:return: The result of this ModelRequestList. # noqa: E501
:rtype: object
"""
return self._result
@result.setter
def result(self, result):
"""Sets the result of this ModelRequestList.
:param result: The result of this ModelRequestList. # noqa: E501
:type: object
"""
self._result = result
@property
def error_message(self):
"""Gets the error_message of this ModelRequestList. # noqa: E501
:return: The error_message of this ModelRequestList. # noqa: E501
:rtype: str
"""
return self._error_message
@error_message.setter
def error_message(self, error_message):
"""Sets the error_message of this ModelRequestList.
:param error_message: The error_message of this ModelRequestList. # noqa: E501
:type: str
"""
self._error_message = error_message
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ModelRequestList):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ModelRequestList):
return True
return self.to_dict() != other.to_dict()
|
py
|
1a5c1cc615f5aaaca8e7eeb3580ac766b76b1ef7
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: ec2_asg
short_description: Create or delete AWS Autoscaling Groups
description:
- Can create or delete AWS Autoscaling Groups
- Can be used with the ec2_lc module to manage Launch Configurations
version_added: "1.6"
author: "Gareth Rushgrove (@garethr)"
requirements: [ "boto3", "botocore" ]
options:
state:
description:
- register or deregister the instance
choices: ['present', 'absent']
default: present
name:
description:
- Unique name for group to be created or deleted
required: true
load_balancers:
description:
- List of ELB names to use for the group. Use for classic load balancers.
target_group_arns:
description:
- List of target group ARNs to use for the group. Use for application load balancers.
version_added: "2.4"
availability_zones:
description:
- List of availability zone names in which to create the group. Defaults to all the availability zones in the region if vpc_zone_identifier is not set.
launch_config_name:
description:
- Name of the Launch configuration to use for the group. See the ec2_lc module for managing these.
If unspecified then the current group value will be used. One of launch_config_name or launch_template must be provided.
launch_template:
description:
- Dictionary describing the Launch Template to use
suboptions:
version:
description:
- The version number of the launch template to use. Defaults to latest version if not provided.
default: "latest"
launch_template_name:
description:
- The name of the launch template. Only one of launch_template_name or launch_template_id is required.
launch_template_id:
description:
- The id of the launch template. Only one of launch_template_name or launch_template_id is required.
version_added: "2.8"
min_size:
description:
- Minimum number of instances in group, if unspecified then the current group value will be used.
max_size:
description:
- Maximum number of instances in group, if unspecified then the current group value will be used.
placement_group:
description:
- Physical location of your cluster placement group created in Amazon EC2.
version_added: "2.3"
desired_capacity:
description:
- Desired number of instances in group, if unspecified then the current group value will be used.
replace_all_instances:
description:
- In a rolling fashion, replace all instances that used the old launch configuration with one from the new launch configuration.
It increases the ASG size by C(replace_batch_size), waits for the new instances to be up and running.
After that, it terminates a batch of old instances, waits for the replacements, and repeats, until all old instances are replaced.
Once that's done the ASG size is reduced back to the expected size.
version_added: "1.8"
default: 'no'
type: bool
replace_batch_size:
description:
- Number of instances you'd like to replace at a time. Used with replace_all_instances.
required: false
version_added: "1.8"
default: 1
replace_instances:
description:
- List of instance_ids belonging to the named ASG that you would like to terminate and be replaced with instances matching the current launch
configuration.
version_added: "1.8"
lc_check:
description:
- Check to make sure instances that are being replaced with replace_instances do not already have the current launch_config.
version_added: "1.8"
default: 'yes'
type: bool
lt_check:
description:
- Check to make sure instances that are being replaced with replace_instances do not already have the current launch_template or launch_template version.
version_added: "2.8"
default: 'yes'
type: bool
vpc_zone_identifier:
description:
- List of VPC subnets to use
tags:
description:
- A list of tags to add to the Auto Scale Group. Optional key is 'propagate_at_launch', which defaults to true.
version_added: "1.7"
health_check_period:
description:
- Length of time in seconds after a new EC2 instance comes into service that Auto Scaling starts checking its health.
required: false
default: 300 seconds
version_added: "1.7"
health_check_type:
description:
- The service you want the health status from, Amazon EC2 or Elastic Load Balancer.
required: false
default: EC2
version_added: "1.7"
choices: ['EC2', 'ELB']
default_cooldown:
description:
- The number of seconds after a scaling activity completes before another can begin.
default: 300 seconds
version_added: "2.0"
wait_timeout:
description:
- How long to wait for instances to become viable when replaced. If you experience the error "Waited too long for ELB instances to be healthy",
try increasing this value.
default: 300
version_added: "1.8"
wait_for_instances:
description:
- Wait for the ASG instances to be in a ready state before exiting. If instances are behind an ELB, it will wait until the ELB determines all
instances have a lifecycle_state of "InService" and a health_status of "Healthy".
version_added: "1.9"
default: 'yes'
type: bool
termination_policies:
description:
- An ordered list of criteria used for selecting instances to be removed from the Auto Scaling group when reducing capacity.
- For 'Default', when used to create a new autoscaling group, the "Default"i value is used. When used to change an existent autoscaling group, the
current termination policies are maintained.
default: Default
choices: ['OldestInstance', 'NewestInstance', 'OldestLaunchConfiguration', 'ClosestToNextInstanceHour', 'Default']
version_added: "2.0"
notification_topic:
description:
- A SNS topic ARN to send auto scaling notifications to.
version_added: "2.2"
notification_types:
description:
- A list of auto scaling events to trigger notifications on.
default:
- 'autoscaling:EC2_INSTANCE_LAUNCH'
- 'autoscaling:EC2_INSTANCE_LAUNCH_ERROR'
- 'autoscaling:EC2_INSTANCE_TERMINATE'
- 'autoscaling:EC2_INSTANCE_TERMINATE_ERROR'
required: false
version_added: "2.2"
suspend_processes:
description:
- A list of scaling processes to suspend.
default: []
choices: ['Launch', 'Terminate', 'HealthCheck', 'ReplaceUnhealthy', 'AZRebalance', 'AlarmNotification', 'ScheduledActions', 'AddToLoadBalancer']
version_added: "2.3"
metrics_collection:
description:
- Enable ASG metrics collection
type: bool
default: 'no'
version_added: "2.6"
metrics_granularity:
description:
- When metrics_collection is enabled this will determine granularity of metrics collected by CloudWatch
default: "1minute"
version_added: "2.6"
metrics_list:
description:
- List of autoscaling metrics to collect when enabling metrics_collection
default:
- 'GroupMinSize'
- 'GroupMaxSize'
- 'GroupDesiredCapacity'
- 'GroupInServiceInstances'
- 'GroupPendingInstances'
- 'GroupStandbyInstances'
- 'GroupTerminatingInstances'
- 'GroupTotalInstances'
version_added: "2.6"
extends_documentation_fragment:
- aws
- ec2
"""
EXAMPLES = '''
# Basic configuration with Launch Configuration
- ec2_asg:
name: special
load_balancers: [ 'lb1', 'lb2' ]
availability_zones: [ 'eu-west-1a', 'eu-west-1b' ]
launch_config_name: 'lc-1'
min_size: 1
max_size: 10
desired_capacity: 5
vpc_zone_identifier: [ 'subnet-abcd1234', 'subnet-1a2b3c4d' ]
tags:
- environment: production
propagate_at_launch: no
# Rolling ASG Updates
# Below is an example of how to assign a new launch config to an ASG and terminate old instances.
#
# All instances in "myasg" that do not have the launch configuration named "my_new_lc" will be terminated in
# a rolling fashion with instances using the current launch configuration, "my_new_lc".
#
# This could also be considered a rolling deploy of a pre-baked AMI.
#
# If this is a newly created group, the instances will not be replaced since all instances
# will have the current launch configuration.
- name: create launch config
ec2_lc:
name: my_new_lc
image_id: ami-lkajsf
key_name: mykey
region: us-east-1
security_groups: sg-23423
instance_type: m1.small
assign_public_ip: yes
- ec2_asg:
name: myasg
launch_config_name: my_new_lc
health_check_period: 60
health_check_type: ELB
replace_all_instances: yes
min_size: 5
max_size: 5
desired_capacity: 5
region: us-east-1
# To only replace a couple of instances instead of all of them, supply a list
# to "replace_instances":
- ec2_asg:
name: myasg
launch_config_name: my_new_lc
health_check_period: 60
health_check_type: ELB
replace_instances:
- i-b345231
- i-24c2931
min_size: 5
max_size: 5
desired_capacity: 5
region: us-east-1
# Basic Configuration with Launch Template
- ec2_asg:
name: special
load_balancers: [ 'lb1', 'lb2' ]
availability_zones: [ 'eu-west-1a', 'eu-west-1b' ]
launch_template:
version: '1'
launch_template_name: 'lt-example'
launch_template_id: 'lt-123456'
min_size: 1
max_size: 10
desired_capacity: 5
vpc_zone_identifier: [ 'subnet-abcd1234', 'subnet-1a2b3c4d' ]
tags:
- environment: production
propagate_at_launch: no
'''
RETURN = '''
---
auto_scaling_group_name:
description: The unique name of the auto scaling group
returned: success
type: str
sample: "myasg"
auto_scaling_group_arn:
description: The unique ARN of the autoscaling group
returned: success
type: str
sample: "arn:aws:autoscaling:us-east-1:123456789012:autoScalingGroup:6a09ad6d-eeee-1234-b987-ee123ced01ad:autoScalingGroupName/myasg"
availability_zones:
description: The availability zones for the auto scaling group
returned: success
type: list
sample: [
"us-east-1d"
]
created_time:
description: Timestamp of create time of the auto scaling group
returned: success
type: str
sample: "2017-11-08T14:41:48.272000+00:00"
default_cooldown:
description: The default cooldown time in seconds.
returned: success
type: int
sample: 300
desired_capacity:
description: The number of EC2 instances that should be running in this group.
returned: success
type: int
sample: 3
healthcheck_period:
description: Length of time in seconds after a new EC2 instance comes into service that Auto Scaling starts checking its health.
returned: success
type: int
sample: 30
healthcheck_type:
description: The service you want the health status from, one of "EC2" or "ELB".
returned: success
type: str
sample: "ELB"
healthy_instances:
description: Number of instances in a healthy state
returned: success
type: int
sample: 5
in_service_instances:
description: Number of instances in service
returned: success
type: int
sample: 3
instance_facts:
description: Dictionary of EC2 instances and their status as it relates to the ASG.
returned: success
type: dict
sample: {
"i-0123456789012": {
"health_status": "Healthy",
"launch_config_name": "public-webapp-production-1",
"lifecycle_state": "InService"
}
}
instances:
description: list of instance IDs in the ASG
returned: success
type: list
sample: [
"i-0123456789012"
]
launch_config_name:
description: >
Name of launch configuration associated with the ASG. Same as launch_configuration_name,
provided for compatibility with ec2_asg module.
returned: success
type: str
sample: "public-webapp-production-1"
load_balancers:
description: List of load balancers names attached to the ASG.
returned: success
type: list
sample: ["elb-webapp-prod"]
max_size:
description: Maximum size of group
returned: success
type: int
sample: 3
min_size:
description: Minimum size of group
returned: success
type: int
sample: 1
pending_instances:
description: Number of instances in pending state
returned: success
type: int
sample: 1
tags:
description: List of tags for the ASG, and whether or not each tag propagates to instances at launch.
returned: success
type: list
sample: [
{
"key": "Name",
"value": "public-webapp-production-1",
"resource_id": "public-webapp-production-1",
"resource_type": "auto-scaling-group",
"propagate_at_launch": "true"
},
{
"key": "env",
"value": "production",
"resource_id": "public-webapp-production-1",
"resource_type": "auto-scaling-group",
"propagate_at_launch": "true"
}
]
target_group_arns:
description: List of ARNs of the target groups that the ASG populates
returned: success
type: list
sample: [
"arn:aws:elasticloadbalancing:ap-southeast-2:123456789012:targetgroup/target-group-host-hello/1a2b3c4d5e6f1a2b",
"arn:aws:elasticloadbalancing:ap-southeast-2:123456789012:targetgroup/target-group-path-world/abcd1234abcd1234"
]
target_group_names:
description: List of names of the target groups that the ASG populates
returned: success
type: list
sample: [
"target-group-host-hello",
"target-group-path-world"
]
termination_policies:
description: A list of termination policies for the group.
returned: success
type: str
sample: ["Default"]
unhealthy_instances:
description: Number of instances in an unhealthy state
returned: success
type: int
sample: 0
viable_instances:
description: Number of instances in a viable state
returned: success
type: int
sample: 1
vpc_zone_identifier:
description: VPC zone ID / subnet id for the auto scaling group
returned: success
type: str
sample: "subnet-a31ef45f"
metrics_collection:
description: List of enabled AutosSalingGroup metrics
returned: success
type: list
sample: [
{
"Granularity": "1Minute",
"Metric": "GroupInServiceInstances"
}
]
'''
import time
import traceback
from ansible.module_utils._text import to_native
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import boto3_conn, ec2_argument_spec, HAS_BOTO3, camel_dict_to_snake_dict, get_aws_connection_info, AWSRetry
try:
import botocore
except ImportError:
pass # will be detected by imported HAS_BOTO3
ASG_ATTRIBUTES = ('AvailabilityZones', 'DefaultCooldown', 'DesiredCapacity',
'HealthCheckGracePeriod', 'HealthCheckType', 'LaunchConfigurationName',
'LoadBalancerNames', 'MaxSize', 'MinSize', 'AutoScalingGroupName', 'PlacementGroup',
'TerminationPolicies', 'VPCZoneIdentifier')
INSTANCE_ATTRIBUTES = ('instance_id', 'health_status', 'lifecycle_state', 'launch_config_name')
backoff_params = dict(tries=10, delay=3, backoff=1.5)
@AWSRetry.backoff(**backoff_params)
def describe_autoscaling_groups(connection, group_name):
pg = connection.get_paginator('describe_auto_scaling_groups')
return pg.paginate(AutoScalingGroupNames=[group_name]).build_full_result().get('AutoScalingGroups', [])
@AWSRetry.backoff(**backoff_params)
def deregister_lb_instances(connection, lb_name, instance_id):
connection.deregister_instances_from_load_balancer(LoadBalancerName=lb_name, Instances=[dict(InstanceId=instance_id)])
@AWSRetry.backoff(**backoff_params)
def describe_instance_health(connection, lb_name, instances):
params = dict(LoadBalancerName=lb_name)
if instances:
params.update(Instances=instances)
return connection.describe_instance_health(**params)
@AWSRetry.backoff(**backoff_params)
def describe_target_health(connection, target_group_arn, instances):
return connection.describe_target_health(TargetGroupArn=target_group_arn, Targets=instances)
@AWSRetry.backoff(**backoff_params)
def suspend_asg_processes(connection, asg_name, processes):
connection.suspend_processes(AutoScalingGroupName=asg_name, ScalingProcesses=processes)
@AWSRetry.backoff(**backoff_params)
def resume_asg_processes(connection, asg_name, processes):
connection.resume_processes(AutoScalingGroupName=asg_name, ScalingProcesses=processes)
@AWSRetry.backoff(**backoff_params)
def describe_launch_configurations(connection, launch_config_name):
pg = connection.get_paginator('describe_launch_configurations')
return pg.paginate(LaunchConfigurationNames=[launch_config_name]).build_full_result()
@AWSRetry.backoff(**backoff_params)
def describe_launch_templates(connection, launch_template):
if launch_template['launch_template_id'] is not None:
try:
lt = connection.describe_launch_templates(LaunchTemplateIds=[launch_template['launch_template_id']])
return lt
except (botocore.exceptions.ClientError) as e:
module.fail_json(msg="No launch template found matching: %s" % launch_template)
else:
try:
lt = connection.describe_launch_templates(LaunchTemplateNames=[launch_template['launch_template_name']])
return lt
except (botocore.exceptions.ClientError) as e:
module.fail_json(msg="No launch template found matching: %s" % launch_template)
@AWSRetry.backoff(**backoff_params)
def create_asg(connection, **params):
connection.create_auto_scaling_group(**params)
@AWSRetry.backoff(**backoff_params)
def put_notification_config(connection, asg_name, topic_arn, notification_types):
connection.put_notification_configuration(
AutoScalingGroupName=asg_name,
TopicARN=topic_arn,
NotificationTypes=notification_types
)
@AWSRetry.backoff(**backoff_params)
def del_notification_config(connection, asg_name, topic_arn):
connection.delete_notification_configuration(
AutoScalingGroupName=asg_name,
TopicARN=topic_arn
)
@AWSRetry.backoff(**backoff_params)
def attach_load_balancers(connection, asg_name, load_balancers):
connection.attach_load_balancers(AutoScalingGroupName=asg_name, LoadBalancerNames=load_balancers)
@AWSRetry.backoff(**backoff_params)
def detach_load_balancers(connection, asg_name, load_balancers):
connection.detach_load_balancers(AutoScalingGroupName=asg_name, LoadBalancerNames=load_balancers)
@AWSRetry.backoff(**backoff_params)
def attach_lb_target_groups(connection, asg_name, target_group_arns):
connection.attach_load_balancer_target_groups(AutoScalingGroupName=asg_name, TargetGroupARNs=target_group_arns)
@AWSRetry.backoff(**backoff_params)
def detach_lb_target_groups(connection, asg_name, target_group_arns):
connection.detach_load_balancer_target_groups(AutoScalingGroupName=asg_name, TargetGroupARNs=target_group_arns)
@AWSRetry.backoff(**backoff_params)
def update_asg(connection, **params):
connection.update_auto_scaling_group(**params)
@AWSRetry.backoff(catch_extra_error_codes=['ScalingActivityInProgress'], **backoff_params)
def delete_asg(connection, asg_name, force_delete):
connection.delete_auto_scaling_group(AutoScalingGroupName=asg_name, ForceDelete=force_delete)
@AWSRetry.backoff(**backoff_params)
def terminate_asg_instance(connection, instance_id, decrement_capacity):
connection.terminate_instance_in_auto_scaling_group(InstanceId=instance_id,
ShouldDecrementDesiredCapacity=decrement_capacity)
def enforce_required_arguments_for_create():
''' As many arguments are not required for autoscale group deletion
they cannot be mandatory arguments for the module, so we enforce
them here '''
missing_args = []
if module.params.get('launch_config_name') is None and module.params.get('launch_template') is None:
module.fail_json(msg="Missing either launch_config_name or launch_template for autoscaling group create")
for arg in ('min_size', 'max_size'):
if module.params[arg] is None:
missing_args.append(arg)
if missing_args:
module.fail_json(msg="Missing required arguments for autoscaling group create: %s" % ",".join(missing_args))
def get_properties(autoscaling_group):
properties = dict()
properties['healthy_instances'] = 0
properties['in_service_instances'] = 0
properties['unhealthy_instances'] = 0
properties['pending_instances'] = 0
properties['viable_instances'] = 0
properties['terminating_instances'] = 0
instance_facts = dict()
autoscaling_group_instances = autoscaling_group.get('Instances')
if autoscaling_group_instances:
properties['instances'] = [i['InstanceId'] for i in autoscaling_group_instances]
for i in autoscaling_group_instances:
if i.get('LaunchConfigurationName'):
instance_facts[i['InstanceId']] = {'health_status': i['HealthStatus'],
'lifecycle_state': i['LifecycleState'],
'launch_config_name': i['LaunchConfigurationName']}
elif i.get('LaunchTemplate'):
instance_facts[i['InstanceId']] = {'health_status': i['HealthStatus'],
'lifecycle_state': i['LifecycleState'],
'launch_template': i['LaunchTemplate']}
else:
instance_facts[i['InstanceId']] = {'health_status': i['HealthStatus'],
'lifecycle_state': i['LifecycleState']}
if i['HealthStatus'] == 'Healthy' and i['LifecycleState'] == 'InService':
properties['viable_instances'] += 1
if i['HealthStatus'] == 'Healthy':
properties['healthy_instances'] += 1
else:
properties['unhealthy_instances'] += 1
if i['LifecycleState'] == 'InService':
properties['in_service_instances'] += 1
if i['LifecycleState'] == 'Terminating':
properties['terminating_instances'] += 1
if i['LifecycleState'] == 'Pending':
properties['pending_instances'] += 1
else:
properties['instances'] = []
properties['auto_scaling_group_name'] = autoscaling_group.get('AutoScalingGroupName')
properties['auto_scaling_group_arn'] = autoscaling_group.get('AutoScalingGroupARN')
properties['availability_zones'] = autoscaling_group.get('AvailabilityZones')
properties['created_time'] = autoscaling_group.get('CreatedTime')
properties['instance_facts'] = instance_facts
properties['load_balancers'] = autoscaling_group.get('LoadBalancerNames')
if autoscaling_group.get('LaunchConfigurationName'):
properties['launch_config_name'] = autoscaling_group.get('LaunchConfigurationName')
else:
properties['launch_template'] = autoscaling_group.get('LaunchTemplate')
properties['tags'] = autoscaling_group.get('Tags')
properties['min_size'] = autoscaling_group.get('MinSize')
properties['max_size'] = autoscaling_group.get('MaxSize')
properties['desired_capacity'] = autoscaling_group.get('DesiredCapacity')
properties['default_cooldown'] = autoscaling_group.get('DefaultCooldown')
properties['healthcheck_grace_period'] = autoscaling_group.get('HealthCheckGracePeriod')
properties['healthcheck_type'] = autoscaling_group.get('HealthCheckType')
properties['default_cooldown'] = autoscaling_group.get('DefaultCooldown')
properties['termination_policies'] = autoscaling_group.get('TerminationPolicies')
properties['target_group_arns'] = autoscaling_group.get('TargetGroupARNs')
properties['vpc_zone_identifier'] = autoscaling_group.get('VPCZoneIdentifier')
properties['metrics_collection'] = autoscaling_group.get('EnabledMetrics')
if properties['target_group_arns']:
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
elbv2_connection = boto3_conn(module,
conn_type='client',
resource='elbv2',
region=region,
endpoint=ec2_url,
**aws_connect_params)
tg_paginator = elbv2_connection.get_paginator('describe_target_groups')
tg_result = tg_paginator.paginate(TargetGroupArns=properties['target_group_arns']).build_full_result()
target_groups = tg_result['TargetGroups']
else:
target_groups = []
properties['target_group_names'] = [tg['TargetGroupName'] for tg in target_groups]
return properties
def get_launch_object(connection, ec2_connection):
launch_object = dict()
launch_config_name = module.params.get('launch_config_name')
launch_template = module.params.get('launch_template')
if launch_config_name is None and launch_template is None:
return launch_object
elif launch_config_name:
try:
launch_configs = describe_launch_configurations(connection, launch_config_name)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json(msg="Failed to describe launch configurations",
exception=traceback.format_exc())
if len(launch_configs['LaunchConfigurations']) == 0:
module.fail_json(msg="No launch config found with name %s" % launch_config_name)
launch_object = {"LaunchConfigurationName": launch_configs['LaunchConfigurations'][0]['LaunchConfigurationName']}
return launch_object
elif launch_template:
lt = describe_launch_templates(ec2_connection, launch_template)['LaunchTemplates'][0]
if launch_template['version'] is not None:
launch_object = {"LaunchTemplate": {"LaunchTemplateId": lt['LaunchTemplateId'], "Version": launch_template['version']}}
else:
launch_object = {"LaunchTemplate": {"LaunchTemplateId": lt['LaunchTemplateId'], "Version": str(lt['LatestVersionNumber'])}}
return launch_object
def elb_dreg(asg_connection, group_name, instance_id):
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
as_group = describe_autoscaling_groups(asg_connection, group_name)[0]
wait_timeout = module.params.get('wait_timeout')
count = 1
if as_group['LoadBalancerNames'] and as_group['HealthCheckType'] == 'ELB':
elb_connection = boto3_conn(module,
conn_type='client',
resource='elb',
region=region,
endpoint=ec2_url,
**aws_connect_params)
else:
return
for lb in as_group['LoadBalancerNames']:
deregister_lb_instances(elb_connection, lb, instance_id)
module.debug("De-registering %s from ELB %s" % (instance_id, lb))
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time() and count > 0:
count = 0
for lb in as_group['LoadBalancerNames']:
lb_instances = describe_instance_health(elb_connection, lb, [])
for i in lb_instances['InstanceStates']:
if i['InstanceId'] == instance_id and i['State'] == "InService":
count += 1
module.debug("%s: %s, %s" % (i['InstanceId'], i['State'], i['Description']))
time.sleep(10)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg="Waited too long for instance to deregister. {0}".format(time.asctime()))
def elb_healthy(asg_connection, elb_connection, group_name):
healthy_instances = set()
as_group = describe_autoscaling_groups(asg_connection, group_name)[0]
props = get_properties(as_group)
# get healthy, inservice instances from ASG
instances = []
for instance, settings in props['instance_facts'].items():
if settings['lifecycle_state'] == 'InService' and settings['health_status'] == 'Healthy':
instances.append(dict(InstanceId=instance))
module.debug("ASG considers the following instances InService and Healthy: %s" % instances)
module.debug("ELB instance status:")
lb_instances = list()
for lb in as_group.get('LoadBalancerNames'):
# we catch a race condition that sometimes happens if the instance exists in the ASG
# but has not yet show up in the ELB
try:
lb_instances = describe_instance_health(elb_connection, lb, instances)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'InvalidInstance':
return None
module.fail_json(msg="Failed to get load balancer.",
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except botocore.exceptions.BotoCoreError as e:
module.fail_json(msg="Failed to get load balancer.",
exception=traceback.format_exc())
for i in lb_instances.get('InstanceStates'):
if i['State'] == "InService":
healthy_instances.add(i['InstanceId'])
module.debug("ELB Health State %s: %s" % (i['InstanceId'], i['State']))
return len(healthy_instances)
def tg_healthy(asg_connection, elbv2_connection, group_name):
healthy_instances = set()
as_group = describe_autoscaling_groups(asg_connection, group_name)[0]
props = get_properties(as_group)
# get healthy, inservice instances from ASG
instances = []
for instance, settings in props['instance_facts'].items():
if settings['lifecycle_state'] == 'InService' and settings['health_status'] == 'Healthy':
instances.append(dict(Id=instance))
module.debug("ASG considers the following instances InService and Healthy: %s" % instances)
module.debug("Target Group instance status:")
tg_instances = list()
for tg in as_group.get('TargetGroupARNs'):
# we catch a race condition that sometimes happens if the instance exists in the ASG
# but has not yet show up in the ELB
try:
tg_instances = describe_target_health(elbv2_connection, tg, instances)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'InvalidInstance':
return None
module.fail_json(msg="Failed to get target group.",
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except botocore.exceptions.BotoCoreError as e:
module.fail_json(msg="Failed to get target group.",
exception=traceback.format_exc())
for i in tg_instances.get('TargetHealthDescriptions'):
if i['TargetHealth']['State'] == "healthy":
healthy_instances.add(i['Target']['Id'])
module.debug("Target Group Health State %s: %s" % (i['Target']['Id'], i['TargetHealth']['State']))
return len(healthy_instances)
def wait_for_elb(asg_connection, group_name):
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
wait_timeout = module.params.get('wait_timeout')
# if the health_check_type is ELB, we want to query the ELBs directly for instance
# status as to avoid health_check_grace period that is awarded to ASG instances
as_group = describe_autoscaling_groups(asg_connection, group_name)[0]
if as_group.get('LoadBalancerNames') and as_group.get('HealthCheckType') == 'ELB':
module.debug("Waiting for ELB to consider instances healthy.")
elb_connection = boto3_conn(module,
conn_type='client',
resource='elb',
region=region,
endpoint=ec2_url,
**aws_connect_params)
wait_timeout = time.time() + wait_timeout
healthy_instances = elb_healthy(asg_connection, elb_connection, group_name)
while healthy_instances < as_group.get('MinSize') and wait_timeout > time.time():
healthy_instances = elb_healthy(asg_connection, elb_connection, group_name)
module.debug("ELB thinks %s instances are healthy." % healthy_instances)
time.sleep(10)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg="Waited too long for ELB instances to be healthy. %s" % time.asctime())
module.debug("Waiting complete. ELB thinks %s instances are healthy." % healthy_instances)
def wait_for_target_group(asg_connection, group_name):
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
wait_timeout = module.params.get('wait_timeout')
# if the health_check_type is ELB, we want to query the ELBs directly for instance
# status as to avoid health_check_grace period that is awarded to ASG instances
as_group = describe_autoscaling_groups(asg_connection, group_name)[0]
if as_group.get('TargetGroupARNs') and as_group.get('HealthCheckType') == 'ELB':
module.debug("Waiting for Target Group to consider instances healthy.")
elbv2_connection = boto3_conn(module,
conn_type='client',
resource='elbv2',
region=region,
endpoint=ec2_url,
**aws_connect_params)
wait_timeout = time.time() + wait_timeout
healthy_instances = tg_healthy(asg_connection, elbv2_connection, group_name)
while healthy_instances < as_group.get('MinSize') and wait_timeout > time.time():
healthy_instances = tg_healthy(asg_connection, elbv2_connection, group_name)
module.debug("Target Group thinks %s instances are healthy." % healthy_instances)
time.sleep(10)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg="Waited too long for ELB instances to be healthy. %s" % time.asctime())
module.debug("Waiting complete. Target Group thinks %s instances are healthy." % healthy_instances)
def suspend_processes(ec2_connection, as_group):
suspend_processes = set(module.params.get('suspend_processes'))
try:
suspended_processes = set([p['ProcessName'] for p in as_group['SuspendedProcesses']])
except AttributeError:
# New ASG being created, no suspended_processes defined yet
suspended_processes = set()
if suspend_processes == suspended_processes:
return False
resume_processes = list(suspended_processes - suspend_processes)
if resume_processes:
resume_asg_processes(ec2_connection, module.params.get('name'), resume_processes)
if suspend_processes:
suspend_asg_processes(ec2_connection, module.params.get('name'), list(suspend_processes))
return True
def create_autoscaling_group(connection):
group_name = module.params.get('name')
load_balancers = module.params['load_balancers']
target_group_arns = module.params['target_group_arns']
availability_zones = module.params['availability_zones']
launch_config_name = module.params.get('launch_config_name')
launch_template = module.params.get('launch_template')
min_size = module.params['min_size']
max_size = module.params['max_size']
placement_group = module.params.get('placement_group')
desired_capacity = module.params.get('desired_capacity')
vpc_zone_identifier = module.params.get('vpc_zone_identifier')
set_tags = module.params.get('tags')
health_check_period = module.params.get('health_check_period')
health_check_type = module.params.get('health_check_type')
default_cooldown = module.params.get('default_cooldown')
wait_for_instances = module.params.get('wait_for_instances')
wait_timeout = module.params.get('wait_timeout')
termination_policies = module.params.get('termination_policies')
notification_topic = module.params.get('notification_topic')
notification_types = module.params.get('notification_types')
metrics_collection = module.params.get('metrics_collection')
metrics_granularity = module.params.get('metrics_granularity')
metrics_list = module.params.get('metrics_list')
try:
as_groups = describe_autoscaling_groups(connection, group_name)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json(msg="Failed to describe auto scaling groups.",
exception=traceback.format_exc())
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
ec2_connection = boto3_conn(module,
conn_type='client',
resource='ec2',
region=region,
endpoint=ec2_url,
**aws_connect_params)
if vpc_zone_identifier:
vpc_zone_identifier = ','.join(vpc_zone_identifier)
asg_tags = []
for tag in set_tags:
for k, v in tag.items():
if k != 'propagate_at_launch':
asg_tags.append(dict(Key=k,
Value=to_native(v),
PropagateAtLaunch=bool(tag.get('propagate_at_launch', True)),
ResourceType='auto-scaling-group',
ResourceId=group_name))
if not as_groups:
if not vpc_zone_identifier and not availability_zones:
availability_zones = module.params['availability_zones'] = [zone['ZoneName'] for
zone in ec2_connection.describe_availability_zones()['AvailabilityZones']]
enforce_required_arguments_for_create()
if desired_capacity is None:
desired_capacity = min_size
ag = dict(
AutoScalingGroupName=group_name,
MinSize=min_size,
MaxSize=max_size,
DesiredCapacity=desired_capacity,
Tags=asg_tags,
HealthCheckGracePeriod=health_check_period,
HealthCheckType=health_check_type,
DefaultCooldown=default_cooldown,
TerminationPolicies=termination_policies)
if vpc_zone_identifier:
ag['VPCZoneIdentifier'] = vpc_zone_identifier
if availability_zones:
ag['AvailabilityZones'] = availability_zones
if placement_group:
ag['PlacementGroup'] = placement_group
if load_balancers:
ag['LoadBalancerNames'] = load_balancers
if target_group_arns:
ag['TargetGroupARNs'] = target_group_arns
launch_object = get_launch_object(connection, ec2_connection)
if 'LaunchConfigurationName' in launch_object:
ag['LaunchConfigurationName'] = launch_object['LaunchConfigurationName']
elif 'LaunchTemplate' in launch_object:
ag['LaunchTemplate'] = launch_object['LaunchTemplate']
else:
module.fail_json(msg="Missing LaunchConfigurationName or LaunchTemplate",
exception=traceback.format_exc())
try:
create_asg(connection, **ag)
if metrics_collection:
connection.enable_metrics_collection(AutoScalingGroupName=group_name, Granularity=metrics_granularity, Metrics=metrics_list)
all_ag = describe_autoscaling_groups(connection, group_name)
if len(all_ag) == 0:
module.fail_json(msg="No auto scaling group found with the name %s" % group_name)
as_group = all_ag[0]
suspend_processes(connection, as_group)
if wait_for_instances:
wait_for_new_inst(connection, group_name, wait_timeout, desired_capacity, 'viable_instances')
if load_balancers:
wait_for_elb(connection, group_name)
# Wait for target group health if target group(s)defined
if target_group_arns:
wait_for_target_group(connection, group_name)
if notification_topic:
put_notification_config(connection, group_name, notification_topic, notification_types)
as_group = describe_autoscaling_groups(connection, group_name)[0]
asg_properties = get_properties(as_group)
changed = True
return changed, asg_properties
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Failed to create Autoscaling Group.",
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except botocore.exceptions.BotoCoreError as e:
module.fail_json(msg="Failed to create Autoscaling Group.",
exception=traceback.format_exc())
else:
as_group = as_groups[0]
initial_asg_properties = get_properties(as_group)
changed = False
if suspend_processes(connection, as_group):
changed = True
# process tag changes
if len(set_tags) > 0:
have_tags = as_group.get('Tags')
want_tags = asg_tags
dead_tags = []
have_tag_keyvals = [x['Key'] for x in have_tags]
want_tag_keyvals = [x['Key'] for x in want_tags]
for dead_tag in set(have_tag_keyvals).difference(want_tag_keyvals):
changed = True
dead_tags.append(dict(ResourceId=as_group['AutoScalingGroupName'],
ResourceType='auto-scaling-group', Key=dead_tag))
have_tags = [have_tag for have_tag in have_tags if have_tag['Key'] != dead_tag]
if dead_tags:
connection.delete_tags(Tags=dead_tags)
zipped = zip(have_tags, want_tags)
if len(have_tags) != len(want_tags) or not all(x == y for x, y in zipped):
changed = True
connection.create_or_update_tags(Tags=asg_tags)
# Handle load balancer attachments/detachments
# Attach load balancers if they are specified but none currently exist
if load_balancers and not as_group['LoadBalancerNames']:
changed = True
try:
attach_load_balancers(connection, group_name, load_balancers)
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Failed to update Autoscaling Group.",
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except botocore.exceptions.BotoCoreError as e:
module.fail_json(msg="Failed to update Autoscaling Group.",
exception=traceback.format_exc())
# Update load balancers if they are specified and one or more already exists
elif as_group['LoadBalancerNames']:
change_load_balancers = load_balancers is not None
# Get differences
if not load_balancers:
load_balancers = list()
wanted_elbs = set(load_balancers)
has_elbs = set(as_group['LoadBalancerNames'])
# check if all requested are already existing
if has_elbs - wanted_elbs and change_load_balancers:
# if wanted contains less than existing, then we need to delete some
elbs_to_detach = has_elbs.difference(wanted_elbs)
if elbs_to_detach:
changed = True
try:
detach_load_balancers(connection, group_name, list(elbs_to_detach))
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json(msg="Failed to detach load balancers %s: %s." % (elbs_to_detach, to_native(e)),
exception=traceback.format_exc())
if wanted_elbs - has_elbs:
# if has contains less than wanted, then we need to add some
elbs_to_attach = wanted_elbs.difference(has_elbs)
if elbs_to_attach:
changed = True
try:
attach_load_balancers(connection, group_name, list(elbs_to_attach))
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json(msg="Failed to attach load balancers %s: %s." % (elbs_to_attach, to_native(e)),
exception=traceback.format_exc())
# Handle target group attachments/detachments
# Attach target groups if they are specified but none currently exist
if target_group_arns and not as_group['TargetGroupARNs']:
changed = True
try:
attach_lb_target_groups(connection, group_name, target_group_arns)
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Failed to update Autoscaling Group.",
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except botocore.exceptions.BotoCoreError as e:
module.fail_json(msg="Failed to update Autoscaling Group.",
exception=traceback.format_exc())
# Update target groups if they are specified and one or more already exists
elif target_group_arns is not None and as_group['TargetGroupARNs']:
# Get differences
wanted_tgs = set(target_group_arns)
has_tgs = set(as_group['TargetGroupARNs'])
# check if all requested are already existing
if has_tgs.issuperset(wanted_tgs):
# if wanted contains less than existing, then we need to delete some
tgs_to_detach = has_tgs.difference(wanted_tgs)
if tgs_to_detach:
changed = True
try:
detach_lb_target_groups(connection, group_name, list(tgs_to_detach))
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json(msg="Failed to detach load balancer target groups %s: %s" % (tgs_to_detach, to_native(e)),
exception=traceback.format_exc())
if wanted_tgs.issuperset(has_tgs):
# if has contains less than wanted, then we need to add some
tgs_to_attach = wanted_tgs.difference(has_tgs)
if tgs_to_attach:
changed = True
try:
attach_lb_target_groups(connection, group_name, list(tgs_to_attach))
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json(msg="Failed to attach load balancer target groups %s: %s" % (tgs_to_attach, to_native(e)),
exception=traceback.format_exc())
# check for attributes that aren't required for updating an existing ASG
# check if min_size/max_size/desired capacity have been specified and if not use ASG values
if min_size is None:
min_size = as_group['MinSize']
if max_size is None:
max_size = as_group['MaxSize']
if desired_capacity is None:
desired_capacity = as_group['DesiredCapacity']
ag = dict(
AutoScalingGroupName=group_name,
MinSize=min_size,
MaxSize=max_size,
DesiredCapacity=desired_capacity,
HealthCheckGracePeriod=health_check_period,
HealthCheckType=health_check_type,
DefaultCooldown=default_cooldown,
TerminationPolicies=termination_policies)
# Get the launch object (config or template) if one is provided in args or use the existing one attached to ASG if not.
launch_object = get_launch_object(connection, ec2_connection)
if 'LaunchConfigurationName' in launch_object:
ag['LaunchConfigurationName'] = launch_object['LaunchConfigurationName']
elif 'LaunchTemplate' in launch_object:
ag['LaunchTemplate'] = launch_object['LaunchTemplate']
else:
try:
ag['LaunchConfigurationName'] = as_group['LaunchConfigurationName']
except Exception:
launch_template = as_group['LaunchTemplate']
# Prefer LaunchTemplateId over Name as it's more specific. Only one can be used for update_asg.
ag['LaunchTemplate'] = {"LaunchTemplateId": launch_template['LaunchTemplateId'], "Version": launch_template['Version']}
if availability_zones:
ag['AvailabilityZones'] = availability_zones
if vpc_zone_identifier:
ag['VPCZoneIdentifier'] = vpc_zone_identifier
try:
update_asg(connection, **ag)
if metrics_collection:
connection.enable_metrics_collection(AutoScalingGroupName=group_name, Granularity=metrics_granularity, Metrics=metrics_list)
else:
connection.disable_metrics_collection(AutoScalingGroupName=group_name, Metrics=metrics_list)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json(msg="Failed to update autoscaling group: %s" % to_native(e),
exception=traceback.format_exc())
if notification_topic:
try:
put_notification_config(connection, group_name, notification_topic, notification_types)
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Failed to update Autoscaling Group notifications.",
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except botocore.exceptions.BotoCoreError as e:
module.fail_json(msg="Failed to update Autoscaling Group notifications.",
exception=traceback.format_exc())
if wait_for_instances:
wait_for_new_inst(connection, group_name, wait_timeout, desired_capacity, 'viable_instances')
# Wait for ELB health if ELB(s)defined
if load_balancers:
module.debug('\tWAITING FOR ELB HEALTH')
wait_for_elb(connection, group_name)
# Wait for target group health if target group(s)defined
if target_group_arns:
module.debug('\tWAITING FOR TG HEALTH')
wait_for_target_group(connection, group_name)
try:
as_group = describe_autoscaling_groups(connection, group_name)[0]
asg_properties = get_properties(as_group)
if asg_properties != initial_asg_properties:
changed = True
except botocore.exceptions.ClientError as e:
module.fail_json(msg="Failed to read existing Autoscaling Groups.",
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except botocore.exceptions.BotoCoreError as e:
module.fail_json(msg="Failed to read existing Autoscaling Groups.",
exception=traceback.format_exc())
return changed, asg_properties
def delete_autoscaling_group(connection):
group_name = module.params.get('name')
notification_topic = module.params.get('notification_topic')
wait_for_instances = module.params.get('wait_for_instances')
wait_timeout = module.params.get('wait_timeout')
if notification_topic:
del_notification_config(connection, group_name, notification_topic)
groups = describe_autoscaling_groups(connection, group_name)
if groups:
wait_timeout = time.time() + wait_timeout
if not wait_for_instances:
delete_asg(connection, group_name, force_delete=True)
else:
updated_params = dict(AutoScalingGroupName=group_name, MinSize=0, MaxSize=0, DesiredCapacity=0)
update_asg(connection, **updated_params)
instances = True
while instances and wait_for_instances and wait_timeout >= time.time():
tmp_groups = describe_autoscaling_groups(connection, group_name)
if tmp_groups:
tmp_group = tmp_groups[0]
if not tmp_group.get('Instances'):
instances = False
time.sleep(10)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg="Waited too long for old instances to terminate. %s" % time.asctime())
delete_asg(connection, group_name, force_delete=False)
while describe_autoscaling_groups(connection, group_name) and wait_timeout >= time.time():
time.sleep(5)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg="Waited too long for ASG to delete. %s" % time.asctime())
return True
return False
def get_chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i + n]
def update_size(connection, group, max_size, min_size, dc):
module.debug("setting ASG sizes")
module.debug("minimum size: %s, desired_capacity: %s, max size: %s" % (min_size, dc, max_size))
updated_group = dict()
updated_group['AutoScalingGroupName'] = group['AutoScalingGroupName']
updated_group['MinSize'] = min_size
updated_group['MaxSize'] = max_size
updated_group['DesiredCapacity'] = dc
update_asg(connection, **updated_group)
def replace(connection):
batch_size = module.params.get('replace_batch_size')
wait_timeout = module.params.get('wait_timeout')
group_name = module.params.get('name')
max_size = module.params.get('max_size')
min_size = module.params.get('min_size')
desired_capacity = module.params.get('desired_capacity')
launch_config_name = module.params.get('launch_config_name')
# Required to maintain the default value being set to 'true'
if launch_config_name:
lc_check = module.params.get('lc_check')
else:
lc_check = False
# Mirror above behaviour for Launch Templates
launch_template = module.params.get('launch_template')
if launch_template:
lt_check = module.params.get('lt_check')
else:
lt_check = False
replace_instances = module.params.get('replace_instances')
replace_all_instances = module.params.get('replace_all_instances')
as_group = describe_autoscaling_groups(connection, group_name)[0]
if desired_capacity is None:
desired_capacity = as_group['DesiredCapacity']
wait_for_new_inst(connection, group_name, wait_timeout, as_group['MinSize'], 'viable_instances')
props = get_properties(as_group)
instances = props['instances']
if replace_all_instances:
# If replacing all instances, then set replace_instances to current set
# This allows replace_instances and replace_all_instances to behave same
replace_instances = instances
if replace_instances:
instances = replace_instances
# check to see if instances are replaceable if checking launch configs
if launch_config_name:
new_instances, old_instances = get_instances_by_launch_config(props, lc_check, instances)
elif launch_template:
new_instances, old_instances = get_instances_by_launch_template(props, lt_check, instances)
num_new_inst_needed = desired_capacity - len(new_instances)
if lc_check or lt_check:
if num_new_inst_needed == 0 and old_instances:
module.debug("No new instances needed, but old instances are present. Removing old instances")
terminate_batch(connection, old_instances, instances, True)
as_group = describe_autoscaling_groups(connection, group_name)[0]
props = get_properties(as_group)
changed = True
return(changed, props)
# we don't want to spin up extra instances if not necessary
if num_new_inst_needed < batch_size:
module.debug("Overriding batch size to %s" % num_new_inst_needed)
batch_size = num_new_inst_needed
if not old_instances:
changed = False
return(changed, props)
# check if min_size/max_size/desired capacity have been specified and if not use ASG values
if min_size is None:
min_size = as_group['MinSize']
if max_size is None:
max_size = as_group['MaxSize']
# set temporary settings and wait for them to be reached
# This should get overwritten if the number of instances left is less than the batch size.
as_group = describe_autoscaling_groups(connection, group_name)[0]
update_size(connection, as_group, max_size + batch_size, min_size + batch_size, desired_capacity + batch_size)
wait_for_new_inst(connection, group_name, wait_timeout, as_group['MinSize'] + batch_size, 'viable_instances')
wait_for_elb(connection, group_name)
wait_for_target_group(connection, group_name)
as_group = describe_autoscaling_groups(connection, group_name)[0]
props = get_properties(as_group)
instances = props['instances']
if replace_instances:
instances = replace_instances
module.debug("beginning main loop")
for i in get_chunks(instances, batch_size):
# break out of this loop if we have enough new instances
break_early, desired_size, term_instances = terminate_batch(connection, i, instances, False)
wait_for_term_inst(connection, term_instances)
wait_for_new_inst(connection, group_name, wait_timeout, desired_size, 'viable_instances')
wait_for_elb(connection, group_name)
wait_for_target_group(connection, group_name)
as_group = describe_autoscaling_groups(connection, group_name)[0]
if break_early:
module.debug("breaking loop")
break
update_size(connection, as_group, max_size, min_size, desired_capacity)
as_group = describe_autoscaling_groups(connection, group_name)[0]
asg_properties = get_properties(as_group)
module.debug("Rolling update complete.")
changed = True
return(changed, asg_properties)
def get_instances_by_launch_config(props, lc_check, initial_instances):
new_instances = []
old_instances = []
# old instances are those that have the old launch config
if lc_check:
for i in props['instances']:
# Check if migrating from launch_template to launch_config first
if 'launch_template' in props['instance_facts'][i]:
old_instances.append(i)
elif props['instance_facts'][i]['launch_config_name'] == props['launch_config_name']:
new_instances.append(i)
else:
old_instances.append(i)
else:
module.debug("Comparing initial instances with current: %s" % initial_instances)
for i in props['instances']:
if i not in initial_instances:
new_instances.append(i)
else:
old_instances.append(i)
module.debug("New instances: %s, %s" % (len(new_instances), new_instances))
module.debug("Old instances: %s, %s" % (len(old_instances), old_instances))
return new_instances, old_instances
def get_instances_by_launch_template(props, lt_check, initial_instances):
new_instances = []
old_instances = []
# old instances are those that have the old launch template or version of the same launch template
if lt_check:
for i in props['instances']:
# Check if migrating from launch_config_name to launch_template_name first
if 'launch_config_name' in props['instance_facts'][i]:
old_instances.append(i)
elif props['instance_facts'][i]['launch_template'] == props['launch_template']:
new_instances.append(i)
else:
old_instances.append(i)
else:
module.debug("Comparing initial instances with current: %s" % initial_instances)
for i in props['instances']:
if i not in initial_instances:
new_instances.append(i)
else:
old_instances.append(i)
module.debug("New instances: %s, %s" % (len(new_instances), new_instances))
module.debug("Old instances: %s, %s" % (len(old_instances), old_instances))
return new_instances, old_instances
def list_purgeable_instances(props, lc_check, lt_check, replace_instances, initial_instances):
instances_to_terminate = []
instances = (inst_id for inst_id in replace_instances if inst_id in props['instances'])
# check to make sure instances given are actually in the given ASG
# and they have a non-current launch config
if module.params.get('launch_config_name'):
if lc_check:
for i in instances:
if 'launch_template' in props['instance_facts'][i]:
instances_to_terminate.append(i)
elif props['instance_facts'][i]['launch_config_name'] != props['launch_config_name']:
instances_to_terminate.append(i)
else:
for i in instances:
if i in initial_instances:
instances_to_terminate.append(i)
elif module.params.get('launch_template'):
if lt_check:
for i in instances:
if 'launch_config_name' in props['instance_facts'][i]:
instances_to_terminate.append(i)
elif props['instance_facts'][i]['launch_template'] != props['launch_template']:
instances_to_terminate.append(i)
else:
for i in instances:
if i in initial_instances:
instances_to_terminate.append(i)
return instances_to_terminate
def terminate_batch(connection, replace_instances, initial_instances, leftovers=False):
batch_size = module.params.get('replace_batch_size')
min_size = module.params.get('min_size')
desired_capacity = module.params.get('desired_capacity')
group_name = module.params.get('name')
lc_check = module.params.get('lc_check')
lt_check = module.params.get('lt_check')
decrement_capacity = False
break_loop = False
as_group = describe_autoscaling_groups(connection, group_name)[0]
if desired_capacity is None:
desired_capacity = as_group['DesiredCapacity']
props = get_properties(as_group)
desired_size = as_group['MinSize']
if module.params.get('launch_config_name'):
new_instances, old_instances = get_instances_by_launch_config(props, lc_check, initial_instances)
else:
new_instances, old_instances = get_instances_by_launch_template(props, lt_check, initial_instances)
num_new_inst_needed = desired_capacity - len(new_instances)
# check to make sure instances given are actually in the given ASG
# and they have a non-current launch config
instances_to_terminate = list_purgeable_instances(props, lc_check, lt_check, replace_instances, initial_instances)
module.debug("new instances needed: %s" % num_new_inst_needed)
module.debug("new instances: %s" % new_instances)
module.debug("old instances: %s" % old_instances)
module.debug("batch instances: %s" % ",".join(instances_to_terminate))
if num_new_inst_needed == 0:
decrement_capacity = True
if as_group['MinSize'] != min_size:
if min_size is None:
min_size = as_group['MinSize']
updated_params = dict(AutoScalingGroupName=as_group['AutoScalingGroupName'], MinSize=min_size)
update_asg(connection, **updated_params)
module.debug("Updating minimum size back to original of %s" % min_size)
# if are some leftover old instances, but we are already at capacity with new ones
# we don't want to decrement capacity
if leftovers:
decrement_capacity = False
break_loop = True
instances_to_terminate = old_instances
desired_size = min_size
module.debug("No new instances needed")
if num_new_inst_needed < batch_size and num_new_inst_needed != 0:
instances_to_terminate = instances_to_terminate[:num_new_inst_needed]
decrement_capacity = False
break_loop = False
module.debug("%s new instances needed" % num_new_inst_needed)
module.debug("decrementing capacity: %s" % decrement_capacity)
for instance_id in instances_to_terminate:
elb_dreg(connection, group_name, instance_id)
module.debug("terminating instance: %s" % instance_id)
terminate_asg_instance(connection, instance_id, decrement_capacity)
# we wait to make sure the machines we marked as Unhealthy are
# no longer in the list
return break_loop, desired_size, instances_to_terminate
def wait_for_term_inst(connection, term_instances):
wait_timeout = module.params.get('wait_timeout')
group_name = module.params.get('name')
as_group = describe_autoscaling_groups(connection, group_name)[0]
count = 1
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time() and count > 0:
module.debug("waiting for instances to terminate")
count = 0
as_group = describe_autoscaling_groups(connection, group_name)[0]
props = get_properties(as_group)
instance_facts = props['instance_facts']
instances = (i for i in instance_facts if i in term_instances)
for i in instances:
lifecycle = instance_facts[i]['lifecycle_state']
health = instance_facts[i]['health_status']
module.debug("Instance %s has state of %s,%s" % (i, lifecycle, health))
if lifecycle.startswith('Terminating') or health == 'Unhealthy':
count += 1
time.sleep(10)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg="Waited too long for old instances to terminate. %s" % time.asctime())
def wait_for_new_inst(connection, group_name, wait_timeout, desired_size, prop):
# make sure we have the latest stats after that last loop.
as_group = describe_autoscaling_groups(connection, group_name)[0]
props = get_properties(as_group)
module.debug("Waiting for %s = %s, currently %s" % (prop, desired_size, props[prop]))
# now we make sure that we have enough instances in a viable state
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time() and desired_size > props[prop]:
module.debug("Waiting for %s = %s, currently %s" % (prop, desired_size, props[prop]))
time.sleep(10)
as_group = describe_autoscaling_groups(connection, group_name)[0]
props = get_properties(as_group)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg="Waited too long for new instances to become viable. %s" % time.asctime())
module.debug("Reached %s: %s" % (prop, desired_size))
return props
def asg_exists(connection):
group_name = module.params.get('name')
as_group = describe_autoscaling_groups(connection, group_name)
return bool(len(as_group))
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(required=True, type='str'),
load_balancers=dict(type='list'),
target_group_arns=dict(type='list'),
availability_zones=dict(type='list'),
launch_config_name=dict(type='str'),
launch_template=dict(type='dict',
default=None,
options=dict(
version=dict(type='str'),
launch_template_name=dict(type='str'),
launch_template_id=dict(type='str'),
),
),
min_size=dict(type='int'),
max_size=dict(type='int'),
placement_group=dict(type='str'),
desired_capacity=dict(type='int'),
vpc_zone_identifier=dict(type='list'),
replace_batch_size=dict(type='int', default=1),
replace_all_instances=dict(type='bool', default=False),
replace_instances=dict(type='list', default=[]),
lc_check=dict(type='bool', default=True),
lt_check=dict(type='bool', default=True),
wait_timeout=dict(type='int', default=300),
state=dict(default='present', choices=['present', 'absent']),
tags=dict(type='list', default=[]),
health_check_period=dict(type='int', default=300),
health_check_type=dict(default='EC2', choices=['EC2', 'ELB']),
default_cooldown=dict(type='int', default=300),
wait_for_instances=dict(type='bool', default=True),
termination_policies=dict(type='list', default='Default'),
notification_topic=dict(type='str', default=None),
notification_types=dict(type='list', default=[
'autoscaling:EC2_INSTANCE_LAUNCH',
'autoscaling:EC2_INSTANCE_LAUNCH_ERROR',
'autoscaling:EC2_INSTANCE_TERMINATE',
'autoscaling:EC2_INSTANCE_TERMINATE_ERROR'
]),
suspend_processes=dict(type='list', default=[]),
metrics_collection=dict(type='bool', default=False),
metrics_granularity=dict(type='str', default='1Minute'),
metrics_list=dict(type='list', default=[
'GroupMinSize',
'GroupMaxSize',
'GroupDesiredCapacity',
'GroupInServiceInstances',
'GroupPendingInstances',
'GroupStandbyInstances',
'GroupTerminatingInstances',
'GroupTotalInstances'
])
),
)
global module
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=[
['replace_all_instances', 'replace_instances'],
['launch_config_name', 'launch_template']]
)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
state = module.params.get('state')
replace_instances = module.params.get('replace_instances')
replace_all_instances = module.params.get('replace_all_instances')
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
connection = boto3_conn(module,
conn_type='client',
resource='autoscaling',
region=region,
endpoint=ec2_url,
**aws_connect_params)
changed = create_changed = replace_changed = False
exists = asg_exists(connection)
if state == 'present':
create_changed, asg_properties = create_autoscaling_group(connection)
elif state == 'absent':
changed = delete_autoscaling_group(connection)
module.exit_json(changed=changed)
# Only replace instances if asg existed at start of call
if exists and (replace_all_instances or replace_instances) and (module.params.get('launch_config_name') or module.params.get('launch_template')):
replace_changed, asg_properties = replace(connection)
if create_changed or replace_changed:
changed = True
module.exit_json(changed=changed, **asg_properties)
if __name__ == '__main__':
main()
|
py
|
1a5c1d10aed18c4f31dd8847242ae47c05386975
|
# -*- coding: utf-8 -*-
import logging
from operator import mod
from typing import List
from nodedge.blocks.block import Block
from nodedge.blocks.block_config import BLOCKS_ICONS_PATH, registerNode
from nodedge.blocks.block_exception import EvaluationError
from nodedge.socket_type import SocketType
_LOG = logging.getLogger(__name__)
try:
from nodedge.blocks.block_config import OP_NODE_MODULO
except NameError:
_LOG.warning(f"Not registered block: {__name__}")
op_block_string = -1
@registerNode(OP_NODE_MODULO)
class ModBlock(Block):
icon = f"{BLOCKS_ICONS_PATH}/percentage_100.png"
operationCode = OP_NODE_MODULO
operationTitle = "Modulo"
contentLabel = "%"
contentLabelObjectName = "BlockBackground"
evalString = "mod"
library = "operator"
inputSocketTypes: List[SocketType] = [
SocketType.Number,
SocketType.Number,
]
outputSocketTypes: List[SocketType] = [
SocketType.Number,
]
def evalImplementation(self):
inputs = []
for i in range(len(self.inputSockets)):
inputs.append(self.inputNodeAt(i))
try:
evaluatedInputs = [str(currentInput.eval()) for currentInput in inputs]
operation = f"{ModBlock.evalString}({', '.join(evaluatedInputs)})"
result = eval(operation)
except TypeError as e:
raise EvaluationError(e)
self.value = result
return self.value
|
py
|
1a5c1e585fe1f56b0ded377a5634c1b30abcc210
|
def atomicdictionary():
atomic = {"H":"Hydrogen","He":"Helium","Li":"Lithium","Be":"Berrylium","B":"Boron","C":"Carbon","N":"Nitrogen","F":"Fluorine","Ne":"Neon"}
print(atomic)
sym = input("Enter an existing symbol: ")
name = input("Enter an element name: ")
atomic[sym] = name
print(atomic)
sym = input("Enter a new symbol: ")
name = input("Enter a new element name: ")
atomic[sym] = name
print(atomic)
print("No. of elements: ",len(atomic))
ele=input("Enter element to search: ")
for i in atomic:
if(i==ele):
print("Element found!")
|
py
|
1a5c1f8310c481638ee012c42e2116d8c34cb90a
|
import os, time
from datetime import datetime
from panda3d.core import *
from direct.distributed.MsgTypes import *
from direct.gui.DirectGui import *
from direct.gui.DirectGuiGlobals import NO_FADE_SORT_INDEX
from direct.fsm import StateData
from direct.fsm import ClassicFSM
from direct.fsm import State
from direct.directnotify import DirectNotifyGlobal
from direct.task import Task
from otp.otpgui import OTPDialog
from otp.otpbase import OTPLocalizer
from otp.otpbase import OTPGlobals
from otp.uberdog.AccountDetailRecord import AccountDetailRecord, SubDetailRecord
import TTAccount
import GuiScreen
class LoginScreen(StateData.StateData, GuiScreen.GuiScreen):
AutoLoginName = base.config.GetString('%s-auto-login%s' % (game.name, os.getenv('otp_client', '')), '')
AutoLoginPassword = base.config.GetString('%s-auto-password%s' % (game.name, os.getenv('otp_client', '')), '')
notify = DirectNotifyGlobal.directNotify.newCategory('LoginScreen')
ActiveEntryColor = Vec4(1, 1, 1, 1)
InactiveEntryColor = Vec4(0.80000000000000004, 0.80000000000000004, 0.80000000000000004, 1)
def __init__(self, cr, doneEvent):
self.notify.debug('__init__')
StateData.StateData.__init__(self, doneEvent)
GuiScreen.GuiScreen.__init__(self)
self.cr = cr
self.loginInterface = self.cr.loginInterface
self.userName = ''
self.password = ''
self.fsm = ClassicFSM.ClassicFSM('LoginScreen', [
State.State('off', self.enterOff, self.exitOff, [
'login',
'waitForLoginResponse']),
State.State('login', self.enterLogin, self.exitLogin, [
'waitForLoginResponse',
'login',
'showLoginFailDialog']),
State.State('showLoginFailDialog', self.enterShowLoginFailDialog, self.exitShowLoginFailDialog, [
'login',
'showLoginFailDialog']),
State.State('waitForLoginResponse', self.enterWaitForLoginResponse, self.exitWaitForLoginResponse, [
'login',
'showLoginFailDialog',
'showConnectionProblemDialog']),
State.State('showConnectionProblemDialog', self.enterShowConnectionProblemDialog, self.exitShowConnectionProblemDialog, [
'login'])], 'off', 'off')
self.fsm.enterInitialState()
def load(self):
self.notify.debug('load')
masterScale = 0.80000000000000004
textScale = 0.10000000000000001 * masterScale
entryScale = 0.080000000000000002 * masterScale
lineHeight = 0.20999999999999999 * masterScale
buttonScale = 1.1499999999999999 * masterScale
buttonLineHeight = 0.14000000000000001 * masterScale
self.frame = DirectFrame(parent = aspect2d, relief = None, sortOrder = 20)
self.frame.hide()
linePos = -0.26000000000000001
self.nameLabel = DirectLabel(parent = self.frame, relief = None, pos = (-0.20999999999999999, 0, linePos), text = OTPLocalizer.LoginScreenUserName, text_scale = textScale, text_align = TextNode.ARight)
self.nameEntry = DirectEntry(parent = self.frame, relief = DGG.SUNKEN, borderWidth = (0.10000000000000001, 0.10000000000000001), scale = entryScale, pos = (-0.125, 0.0, linePos), width = OTPGlobals.maxLoginWidth, numLines = 1, focus = 0, cursorKeys = 1)
linePos -= lineHeight
self.passwordLabel = DirectLabel(parent = self.frame, relief = None, pos = (-0.20999999999999999, 0, linePos), text = OTPLocalizer.LoginScreenPassword, text_scale = textScale, text_align = TextNode.ARight)
self.passwordEntry = DirectEntry(parent = self.frame, relief = DGG.SUNKEN, borderWidth = (0.10000000000000001, 0.10000000000000001), scale = entryScale, pos = (-0.125, 0.0, linePos), width = OTPGlobals.maxLoginWidth, numLines = 1, focus = 0, cursorKeys = 1, obscured = 1, command = self._LoginScreen__handleLoginPassword)
linePos -= lineHeight
buttonImageScale = (1.7, 1.1000000000000001, 1.1000000000000001)
self.loginButton = DirectButton(parent = self.frame, relief = DGG.RAISED, borderWidth = (0.01, 0.01), pos = (0, 0, linePos), scale = buttonScale, text = OTPLocalizer.LoginScreenLogin, text_scale = 0.059999999999999998, text_pos = (0, -0.02), command = self._LoginScreen__handleLoginButton)
linePos -= buttonLineHeight
self.createAccountButton = DirectButton(parent = self.frame, relief = DGG.RAISED, borderWidth = (0.01, 0.01), pos = (0, 0, linePos), scale = buttonScale, text = OTPLocalizer.LoginScreenCreateAccount, text_scale = 0.059999999999999998, text_pos = (0, -0.02), command = self._LoginScreen__handleCreateAccount)
linePos -= buttonLineHeight
self.quitButton = DirectButton(parent = self.frame, relief = DGG.RAISED, borderWidth = (0.01, 0.01), pos = (0, 0, linePos), scale = buttonScale, text = OTPLocalizer.LoginScreenQuit, text_scale = 0.059999999999999998, text_pos = (0, -0.02), command = self._LoginScreen__handleQuit)
linePos -= buttonLineHeight
self.dialogDoneEvent = 'loginDialogAck'
dialogClass = OTPGlobals.getGlobalDialogClass()
self.dialog = dialogClass(dialogName = 'loginDialog', doneEvent = self.dialogDoneEvent, message = '', style = OTPDialog.Acknowledge, sortOrder = NO_FADE_SORT_INDEX + 100)
self.dialog.hide()
self.failDialog = DirectFrame(parent = aspect2dp, relief = DGG.RAISED, borderWidth = (0.01, 0.01), pos = (0, 0.10000000000000001, 0), text = '', text_scale = 0.080000000000000002, text_pos = (0.0, 0.29999999999999999), text_wordwrap = 15, sortOrder = NO_FADE_SORT_INDEX)
linePos = -0.050000000000000003
self.failTryAgainButton = DirectButton(parent = self.failDialog, relief = DGG.RAISED, borderWidth = (0.01, 0.01), pos = (0, 0, linePos), scale = 0.90000000000000002, text = OTPLocalizer.LoginScreenTryAgain, text_scale = 0.059999999999999998, text_pos = (0, -0.02), command = self._LoginScreen__handleFailTryAgain)
linePos -= buttonLineHeight
self.failCreateAccountButton = DirectButton(parent = self.failDialog, relief = DGG.RAISED, borderWidth = (0.01, 0.01), pos = (0, 0, linePos), scale = 0.90000000000000002, text = OTPLocalizer.LoginScreenCreateAccount, text_scale = 0.059999999999999998, text_pos = (0, -0.02), command = self._LoginScreen__handleFailCreateAccount)
linePos -= buttonLineHeight
self.failDialog.hide()
self.connectionProblemDialogDoneEvent = 'loginConnectionProblemDlgAck'
dialogClass = OTPGlobals.getGlobalDialogClass()
self.connectionProblemDialog = dialogClass(dialogName = 'connectionProblemDialog', doneEvent = self.connectionProblemDialogDoneEvent, message = '', style = OTPDialog.Acknowledge, sortOrder = NO_FADE_SORT_INDEX + 100)
self.connectionProblemDialog.hide()
def unload(self):
self.notify.debug('unload')
self.nameEntry.destroy()
self.passwordEntry.destroy()
self.failTryAgainButton.destroy()
self.failCreateAccountButton.destroy()
self.createAccountButton.destroy()
self.loginButton.destroy()
self.quitButton.destroy()
self.dialog.cleanup()
del self.dialog
self.failDialog.destroy()
del self.failDialog
self.connectionProblemDialog.cleanup()
del self.connectionProblemDialog
self.frame.destroy()
del self.fsm
del self.loginInterface
del self.cr
def enter(self):
if self.cr.playToken:
self.userName = '*'
self.password = self.cr.playToken
self.fsm.request('waitForLoginResponse')
else:
self.fsm.request('login')
def exit(self):
self.frame.hide()
self.ignore(self.dialogDoneEvent)
self.fsm.requestFinalState()
def enterOff(self):
pass
def exitOff(self):
pass
def enterLogin(self):
self.cr.resetPeriodTimer(None)
self.userName = ''
self.password = ''
self.userName = launcher.getLastLogin()
if self.userName and self.nameEntry.get():
if self.userName != self.nameEntry.get():
self.userName = ''
self.frame.show()
self.nameEntry.enterText(self.userName)
self.passwordEntry.enterText(self.password)
self.focusList = [
self.nameEntry,
self.passwordEntry]
focusIndex = 0
if self.userName:
focusIndex = 1
self.startFocusMgmt(startFocus = focusIndex)
def exitLogin(self):
self.stopFocusMgmt()
def enterShowLoginFailDialog(self, msg):
base.transitions.fadeScreen(0.5)
self.failDialog['text'] = msg
self.failDialog.show()
def _LoginScreen__handleFailTryAgain(self):
self.fsm.request('login')
def _LoginScreen__handleFailCreateAccount(self):
messenger.send(self.doneEvent, [
{
'mode': 'createAccount' }])
def _LoginScreen__handleFailNoNewAccountsAck(self):
self.dialog.hide()
self.fsm.request('showLoginFailDialog', [
self.failDialog['text']])
def exitShowLoginFailDialog(self):
base.transitions.noTransitions()
self.failDialog.hide()
def _LoginScreen__handleLoginPassword(self, password):
if password != '':
if self.nameEntry.get() != '':
self._LoginScreen__handleLoginButton()
def _LoginScreen__handleLoginButton(self):
self.removeFocus()
self.userName = self.nameEntry.get()
self.password = self.passwordEntry.get()
if self.userName == '':
self.dialog.setMessage(OTPLocalizer.LoginScreenLoginPrompt)
self.dialog.show()
self.acceptOnce(self.dialogDoneEvent, self._LoginScreen__handleEnterLoginAck)
else:
self.fsm.request('waitForLoginResponse')
def _LoginScreen__handleQuit(self):
self.removeFocus()
messenger.send(self.doneEvent, [
{
'mode': 'quit' }])
def _LoginScreen__handleCreateAccount(self):
self.removeFocus()
messenger.send(self.doneEvent, [
{
'mode': 'createAccount' }])
def enterWaitForLoginResponse(self):
self.cr.handler = self.handleWaitForLoginResponse
self.cr.userName = self.userName
self.cr.password = self.password
try:
error = self.loginInterface.authorize(self.userName, self.password)
except TTAccount.TTAccountException:
e = None
self.fsm.request('showConnectionProblemDialog', [
str(e)])
return None
if error:
self.notify.info(error)
freeTimeExpired = self.loginInterface.getErrorCode() == 10
if freeTimeExpired:
self.cr.logAccountInfo()
messenger.send(self.doneEvent, [
{
'mode': 'freeTimeExpired' }])
else:
self.fsm.request('showLoginFailDialog', [
error])
else:
self.loginInterface.sendLoginMsg()
self.waitForDatabaseTimeout(requestName = 'WaitForLoginResponse')
def exitWaitForLoginResponse(self):
self.cleanupWaitingForDatabase()
self.cr.handler = None
def enterShowConnectionProblemDialog(self, msg):
self.connectionProblemDialog.setMessage(msg)
self.connectionProblemDialog.show()
self.acceptOnce(self.connectionProblemDialogDoneEvent, self._LoginScreen__handleConnectionProblemAck)
def _LoginScreen__handleConnectionProblemAck(self):
self.connectionProblemDialog.hide()
self.fsm.request('login')
def exitShowConnectionProblemDialog(self):
pass
def handleWaitForLoginResponse(self, msgType, di):
if msgType == CLIENT_LOGIN_2_RESP:
self.handleLoginResponseMsg2(di)
elif msgType == CLIENT_LOGIN_RESP:
self.handleLoginResponseMsg(di)
elif msgType == CLIENT_LOGIN_3_RESP:
self.handleLoginResponseMsg3(di)
elif msgType == CLIENT_LOGIN_TOONTOWN_RESP:
self.handleLoginToontownResponse(di)
else:
self.cr.handleMessageType(msgType, di)
def getExtendedErrorMsg(self, errorString):
prefix = 'Bad DC Version Compare'
if len(errorString) < len(prefix):
return errorString
if errorString[:len(prefix)] == prefix:
return '%s%s' % (errorString, ', address=%s' % base.cr.getServerAddress())
return errorString
def handleLoginResponseMsg3(self, di):
now = time.time()
returnCode = di.getInt8()
errorString = self.getExtendedErrorMsg(di.getString())
self.notify.info('Login response return code %s' % returnCode)
if returnCode != 0:
self.notify.info('Login failed: %s' % errorString)
messenger.send(self.doneEvent, [
{
'mode': 'reject' }])
return None
accountDetailRecord = AccountDetailRecord()
accountDetailRecord.openChatEnabled = di.getString() == 'YES'
accountDetailRecord.createFriendsWithChat = di.getString() == 'YES'
chatCodeCreation = di.getString()
accountDetailRecord.chatCodeCreation = chatCodeCreation == 'YES'
parentControlledChat = chatCodeCreation == 'PARENT'
access = di.getString()
if access == 'VELVET':
access = OTPGlobals.AccessVelvetRope
elif access == 'FULL':
access = OTPGlobals.AccessFull
else:
self.notify.warning('Unknown access: %s' % access)
access = OTPGlobals.AccessUnknown
accountDetailRecord.piratesAccess = access
accountDetailRecord.familyAccountId = di.getInt32()
accountDetailRecord.playerAccountId = di.getInt32()
accountDetailRecord.playerName = di.getString()
accountDetailRecord.playerNameApproved = di.getInt8()
accountDetailRecord.maxAvatars = di.getInt32()
self.cr.openChatAllowed = accountDetailRecord.openChatEnabled
if not accountDetailRecord.chatCodeCreation:
pass
self.cr.secretChatAllowed = parentControlledChat
self.cr.setIsPaid(accountDetailRecord.piratesAccess)
self.userName = accountDetailRecord.playerName
self.cr.userName = accountDetailRecord.playerName
accountDetailRecord.numSubs = di.getUint16()
for i in range(accountDetailRecord.numSubs):
subDetailRecord = SubDetailRecord()
subDetailRecord.subId = di.getUint32()
subDetailRecord.subOwnerId = di.getUint32()
subDetailRecord.subName = di.getString()
subDetailRecord.subActive = di.getString()
access = di.getString()
if access == 'VELVET':
access = OTPGlobals.AccessVelvetRope
elif access == 'FULL':
access = OTPGlobals.AccessFull
else:
access = OTPGlobals.AccessUnknown
subDetailRecord.subAccess = access
subDetailRecord.subLevel = di.getUint8()
subDetailRecord.subNumAvatars = di.getUint8()
subDetailRecord.subNumConcur = di.getUint8()
subDetailRecord.subFounder = di.getString() == 'YES'
accountDetailRecord.subDetails[subDetailRecord.subId] = subDetailRecord
accountDetailRecord.WLChatEnabled = di.getString() == 'YES'
if accountDetailRecord.WLChatEnabled:
self.cr.whiteListChatEnabled = 1
else:
self.cr.whiteListChatEnabled = 0
self.notify.info('End of DISL token parse')
self.notify.info('accountDetailRecord: %s' % accountDetailRecord)
self.cr.accountDetailRecord = accountDetailRecord
self._LoginScreen__handleLoginSuccess()
def handleLoginResponseMsg2(self, di):
self.notify.debug('handleLoginResponseMsg2')
if self.notify.getDebug():
dgram = di.getDatagram()
dgram.dumpHex(ostream)
now = time.time()
returnCode = di.getUint8()
errorString = self.getExtendedErrorMsg(di.getString())
self.userName = di.getString()
self.cr.userName = self.userName
accountDetailRecord = AccountDetailRecord()
self.cr.accountDetailRecord = accountDetailRecord
canChat = di.getUint8()
self.cr.secretChatAllowed = canChat
self.notify.info('Chat from game server login: %s' % canChat)
sec = di.getUint32()
usec = di.getUint32()
serverTime = sec + usec / 1000000.0
self.cr.serverTimeUponLogin = serverTime
self.cr.clientTimeUponLogin = now
self.cr.globalClockRealTimeUponLogin = globalClock.getRealTime()
if hasattr(self.cr, 'toontownTimeManager'):
self.cr.toontownTimeManager.updateLoginTimes(serverTime, now, self.cr.globalClockRealTimeUponLogin)
serverDelta = serverTime - now
self.cr.setServerDelta(serverDelta)
self.notify.setServerDelta(serverDelta, 28800)
self.isPaid = di.getUint8()
self.cr.setIsPaid(self.isPaid)
if self.isPaid:
launcher.setPaidUserLoggedIn()
self.notify.info('Paid from game server login: %s' % self.isPaid)
self.cr.resetPeriodTimer(None)
if di.getRemainingSize() >= 4:
minutesRemaining = di.getInt32()
self.notify.info('Minutes remaining from server %s' % minutesRemaining)
if minutesRemaining >= 0:
self.notify.info('Spawning period timer')
self.cr.resetPeriodTimer(minutesRemaining * 60)
elif self.isPaid:
self.notify.warning('Negative minutes remaining for paid user (?)')
else:
self.notify.warning('Not paid, but also negative minutes remaining (?)')
else:
self.notify.info('Minutes remaining not returned from server; not spawning period timer')
familyStr = di.getString()
WhiteListResponse = di.getString()
if WhiteListResponse == 'YES':
self.cr.whiteListChatEnabled = 1
else:
self.cr.whiteListChatEnabled = 0
if di.getRemainingSize() > 0:
self.cr.accountDays = self.parseAccountDays(di.getInt32())
else:
self.cr.accountDays = 100000
if di.getRemainingSize() > 0:
self.lastLoggedInStr = di.getString()
self.notify.info('last logged in = %s' % self.lastLoggedInStr)
else:
self.lastLoggedInStr = ''
self.cr.lastLoggedIn = datetime.now()
if hasattr(self.cr, 'toontownTimeManager'):
self.cr.lastLoggedIn = self.cr.toontownTimeManager.convertStrToToontownTime(self.lastLoggedInStr)
self.cr.withParentAccount = False
self.notify.info('Login response return code %s' % returnCode)
if returnCode == 0:
self._LoginScreen__handleLoginSuccess()
elif returnCode == -13:
self.notify.info('Period Time Expired')
self.fsm.request('showLoginFailDialog', [
OTPLocalizer.LoginScreenPeriodTimeExpired])
else:
self.notify.info('Login failed: %s' % errorString)
messenger.send(self.doneEvent, [
{
'mode': 'reject' }])
def handleLoginResponseMsg(self, di):
self.notify.debug('handleLoginResponseMsg1')
if self.notify.getDebug():
dgram = di.getDatagram()
|
py
|
1a5c1fa75e341506a165201c3e40f833cc470dde
|
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The hypervisors admin extension."""
import webob.exc
from nova.api.openstack import extensions
from nova import compute
from nova import exception
from nova.i18n import _
from nova import servicegroup
authorize = extensions.extension_authorizer('compute', 'hypervisors')
class HypervisorsController(object):
"""The Hypervisors API controller for the OpenStack API."""
def __init__(self, ext_mgr):
self.host_api = compute.HostAPI()
self.servicegroup_api = servicegroup.API()
super(HypervisorsController, self).__init__()
self.ext_mgr = ext_mgr
def _view_hypervisor(self, hypervisor, service, detail, servers=None,
**kwargs):
hyp_dict = {
'id': hypervisor.id,
'hypervisor_hostname': hypervisor.hypervisor_hostname,
}
ext_status_loaded = self.ext_mgr.is_loaded('os-hypervisor-status')
if ext_status_loaded:
alive = self.servicegroup_api.service_is_up(service)
hyp_dict['state'] = 'up' if alive else "down"
hyp_dict['status'] = (
'disabled' if service.disabled else 'enabled')
if detail and not servers:
fields = ('vcpus', 'memory_mb', 'local_gb', 'vcpus_used',
'memory_mb_used', 'local_gb_used',
'hypervisor_type', 'hypervisor_version',
'free_ram_mb', 'free_disk_gb', 'current_workload',
'running_vms', 'cpu_info', 'disk_available_least')
ext_loaded = self.ext_mgr.is_loaded('os-extended-hypervisors')
if ext_loaded:
fields += ('host_ip',)
for field in fields:
hyp_dict[field] = hypervisor[field]
hyp_dict['service'] = {
'id': service.id,
'host': hypervisor.host,
}
if ext_status_loaded:
hyp_dict['service'].update(
disabled_reason=service.disabled_reason)
if servers:
hyp_dict['servers'] = [dict(name=serv['name'], uuid=serv['uuid'])
for serv in servers]
# Add any additional info
if kwargs:
hyp_dict.update(kwargs)
return hyp_dict
def index(self, req):
context = req.environ['nova.context']
authorize(context)
compute_nodes = self.host_api.compute_node_get_all(context)
req.cache_db_compute_nodes(compute_nodes)
return dict(hypervisors=[self._view_hypervisor(
hyp,
self.host_api.service_get_by_compute_host(
context, hyp.host),
False)
for hyp in compute_nodes])
def detail(self, req):
context = req.environ['nova.context']
authorize(context)
compute_nodes = self.host_api.compute_node_get_all(context)
req.cache_db_compute_nodes(compute_nodes)
return dict(hypervisors=[self._view_hypervisor(
hyp,
self.host_api.service_get_by_compute_host(
context, hyp.host),
True)
for hyp in compute_nodes])
def show(self, req, id):
context = req.environ['nova.context']
authorize(context)
try:
hyp = self.host_api.compute_node_get(context, id)
req.cache_db_compute_node(hyp)
except (ValueError, exception.ComputeHostNotFound):
msg = _("Hypervisor with ID '%s' could not be found.") % id
raise webob.exc.HTTPNotFound(explanation=msg)
service = self.host_api.service_get_by_compute_host(context, hyp.host)
return dict(hypervisor=self._view_hypervisor(hyp, service, True))
def uptime(self, req, id):
context = req.environ['nova.context']
authorize(context)
try:
hyp = self.host_api.compute_node_get(context, id)
req.cache_db_compute_node(hyp)
except (ValueError, exception.ComputeHostNotFound):
msg = _("Hypervisor with ID '%s' could not be found.") % id
raise webob.exc.HTTPNotFound(explanation=msg)
# Get the uptime
try:
host = hyp.host
uptime = self.host_api.get_host_uptime(context, host)
except NotImplementedError:
msg = _("Virt driver does not implement uptime function.")
raise webob.exc.HTTPNotImplemented(explanation=msg)
service = self.host_api.service_get_by_compute_host(context, host)
return dict(hypervisor=self._view_hypervisor(hyp, service, False,
uptime=uptime))
def search(self, req, id):
context = req.environ['nova.context']
authorize(context)
hypervisors = self.host_api.compute_node_search_by_hypervisor(
context, id)
if hypervisors:
return dict(hypervisors=[self._view_hypervisor(
hyp,
self.host_api.service_get_by_compute_host(
context, hyp.host),
False)
for hyp in hypervisors])
else:
msg = _("No hypervisor matching '%s' could be found.") % id
raise webob.exc.HTTPNotFound(explanation=msg)
def servers(self, req, id):
context = req.environ['nova.context']
authorize(context)
compute_nodes = self.host_api.compute_node_search_by_hypervisor(
context, id)
if not compute_nodes:
msg = _("No hypervisor matching '%s' could be found.") % id
raise webob.exc.HTTPNotFound(explanation=msg)
hypervisors = []
for compute_node in compute_nodes:
instances = self.host_api.instance_get_all_by_host(context,
compute_node.host)
service = self.host_api.service_get_by_compute_host(
context, compute_node.host)
hyp = self._view_hypervisor(compute_node, service, False,
instances)
hypervisors.append(hyp)
return dict(hypervisors=hypervisors)
def statistics(self, req):
context = req.environ['nova.context']
authorize(context)
stats = self.host_api.compute_node_statistics(context)
return dict(hypervisor_statistics=stats)
class Hypervisors(extensions.ExtensionDescriptor):
"""Admin-only hypervisor administration."""
name = "Hypervisors"
alias = "os-hypervisors"
namespace = "http://docs.openstack.org/compute/ext/hypervisors/api/v1.1"
updated = "2012-06-21T00:00:00Z"
def get_resources(self):
resources = [extensions.ResourceExtension('os-hypervisors',
HypervisorsController(self.ext_mgr),
collection_actions={'detail': 'GET',
'statistics': 'GET'},
member_actions={'uptime': 'GET',
'search': 'GET',
'servers': 'GET'})]
return resources
|
py
|
1a5c1fac7e5611257dd313f322b5ef8ac15da495
|
# Copyright Action Without Borders, Inc., the Alfajor authors and contributors.
# All rights reserved. See AUTHORS.
#
# This file is part of 'Alfajor' and is distributed under the BSD license.
# See LICENSE for more details.
"""An in-process browser that acts as a WSGI server."""
from __future__ import absolute_import
import cookielib
from cookielib import Cookie
import dummy_threading
from cStringIO import StringIO
from logging import getLogger
import os.path
from urlparse import urljoin, urlparse, urlunparse
from time import time
import urllib2
from wsgiref.util import request_uri
from blinker import signal
from werkzeug import (
BaseResponse,
FileStorage,
MultiDict,
create_environ,
parse_cookie,
run_wsgi_app,
url_encode,
)
from werkzeug.test import encode_multipart
from alfajor.browsers._lxml import (
ButtonElement,
DOMElement,
DOMMixin,
FormElement,
InputElement,
SelectElement,
TextareaElement,
html_parser_for,
)
from alfajor.browsers._waitexpr import WaitExpression
from alfajor.utilities import lazy_property, to_pairs
from alfajor._compat import property
__all__ = ['WSGI']
logger = getLogger('tests.browser')
after_browser_activity = signal('after_browser_activity')
before_browser_activity = signal('before_browser_activity')
class WSGI(DOMMixin):
capabilities = [
'in-process',
'cookies',
'headers',
'status',
]
wait_expression = WaitExpression
_wsgi_server = {
'multithread': False,
'multiprocess': False,
'run_once': False,
}
user_agent = {
'browser': 'wsgi',
'platform': 'python',
'version': '1.0',
}
def __init__(self, wsgi_app, base_url=None):
# accept additional request headers? (e.g. user agent)
self._wsgi_app = wsgi_app
self._base_url = base_url
self._referrer = None
self._request_environ = None
self._cookie_jar = CookieJar()
self._charset = 'utf-8'
self.status_code = 0
self.status = ''
self.response = None
self.headers = ()
def open(self, url, wait_for=None, timeout=0):
"""Open web page at *url*."""
self._open(url, refer=False)
def reset(self):
self._cookie_jar = CookieJar()
@property
def location(self):
if not self._request_environ:
return None
return request_uri(self._request_environ)
def wait_for(self, condition, timeout=None):
pass
def sync_document(self):
"""The document is always synced."""
_sync_document = DOMMixin.sync_document
@property
def cookies(self):
if not (self._cookie_jar and self.location):
return {}
request = urllib2.Request(self.location)
policy = self._cookie_jar._policy
policy._now = int(time())
# return ok will only return a cookie if the following attrs are set
# correctly => # "version", "verifiability", "secure", "expires",
# "port", "domain"
return dict((c.name, c.value.strip('"'))
for c in self._cookie_jar if policy.return_ok(c, request))
def set_cookie(self, name, value, domain=None, path=None,
session=True, expires=None, port=None, request=None):
"""
:param expires: Seconds from epoch
:param port: must match request port
:param domain: the fqn of your server hostname
"""
# Cookie(version, name, value, port, port_specified,
# domain, domain_specified, domain_initial_dot,
# path, path_specified, secure, expires,
# discard, comment, comment_url, rest,
# rfc2109=False):
cookie = Cookie(0, name, value, port, bool(port),
domain or '', bool(domain),
(domain and domain.startswith('.')),
path or '', bool(path), False, expires,
session, None, None, {}, False)
self._cookie_jar.set_cookie(cookie)
def delete_cookie(self, name, domain=None, path=None):
try:
self._cookie_jar.clear(domain, path, name)
except KeyError:
pass
# Internal methods
@lazy_property
def _lxml_parser(self):
return html_parser_for(self, wsgi_elements)
def _open(self, url, method='GET', data=None, refer=True, content_type=None):
before_browser_activity.send(self)
open_started = time()
environ = self._create_environ(url, method, data, refer, content_type)
# keep a copy, the app may mutate the environ
request_environ = dict(environ)
logger.info('%s(%s) == %s', method, url, request_uri(environ))
request_started = time()
rv = run_wsgi_app(self._wsgi_app, environ)
response = BaseResponse(*rv)
# TODO:
# response.make_sequence() # werkzeug 0.6+
# For now, must:
response.response = list(response.response)
if hasattr(rv[0], 'close'):
rv[0].close()
# end TODO
# request is complete after the app_iter (rv[0]) has been fully read +
# closed down.
request_ended = time()
self._request_environ = request_environ
self._cookie_jar.extract_from_werkzeug(response, environ)
self.status_code = response.status_code
# Automatically follow redirects
if 301 <= self.status_code <= 302:
logger.debug("Redirect to %s", response.headers['Location'])
after_browser_activity.send(self)
self._open(response.headers['Location'])
return
# redirects report the original referrer
self._referrer = request_uri(environ)
self.status = response.status
self.headers = response.headers
# TODO: unicodify
self.response = response.data
self._sync_document()
# TODO: what does a http-equiv redirect report for referrer?
if 'meta[http-equiv=refresh]' in self.document:
refresh = self.document['meta[http-equiv=refresh]'][0]
if 'content' in refresh.attrib:
parts = refresh.get('content').split(';url=', 1)
if len(parts) == 2:
logger.debug("HTTP-EQUIV Redirect to %s", parts[1])
after_browser_activity.send(self)
self._open(parts[1])
return
open_ended = time()
request_time = request_ended - request_started
logger.info("Fetched %s in %0.3fsec + %0.3fsec browser overhead",
url, request_time,
open_ended - open_started - request_time)
after_browser_activity.send(self)
def _create_environ(self, url, method, data, refer, content_type=None):
"""Return an environ to request *url*, including cookies."""
environ_args = dict(self._wsgi_server, method=method)
base_url = self._referrer if refer else self._base_url
environ_args.update(self._canonicalize_url(url, base_url))
environ_args.update(self._prep_input(method, data, content_type))
environ = create_environ(**environ_args)
if refer and self._referrer:
environ['HTTP_REFERER'] = self._referrer
environ.setdefault('REMOTE_ADDR', '127.0.0.1')
self._cookie_jar.export_to_environ(environ)
return environ
def _canonicalize_url(self, url, base_url):
"""Return fully qualified URL components formatted for environ."""
if '?' in url:
url, query_string = url.split('?', 1)
else:
query_string = None
canonical = {'query_string': query_string}
# canonicalize against last request (add host/port, resolve
# relative paths)
if base_url:
url = urljoin(base_url, url)
parsed = urlparse(url)
if not parsed.scheme:
raise RuntimeError(
"No base url available for resolving relative url %r" % url)
canonical['path'] = urlunparse((
'', '', parsed.path, parsed.params, '', ''))
canonical['base_url'] = urlunparse((
parsed.scheme, parsed.netloc, '', '', '', ''))
return canonical
def _prep_input(self, method, data, content_type):
"""Return encoded and packed POST data."""
if data is None or method != 'POST':
prepped = {
'input_stream': None,
'content_length': None,
'content_type': None,
}
if method == 'GET' and data:
qs = MultiDict()
for key, value in to_pairs(data):
qs.setlistdefault(key).append(value)
prepped['query_string'] = url_encode(qs)
return prepped
else:
payload = url_encode(MultiDict(to_pairs(data)))
content_type = 'application/x-www-form-urlencoded'
return {
'input_stream': StringIO(payload),
'content_length': len(payload),
'content_type': content_type
}
def _wrap_file(filename, content_type):
"""Open the file *filename* and wrap in a FileStorage object."""
assert os.path.isfile(filename), "File does not exist."
return FileStorage(
stream=open(filename, 'rb'),
filename=os.path.basename(filename),
content_type=content_type
)
class FormElement(FormElement):
"""A <form/> that can be submitted."""
def submit(self, wait_for=None, timeout=0, _extra_values=()):
"""Submit the form's values.
Equivalent to hitting 'return' in a browser form: the data is
submitted without the submit button's key/value pair.
"""
if _extra_values and hasattr(_extra_values, 'items'):
_extra_values = _extra_values.items()
values = self.form_values()
values.extend(_extra_values)
method = self.method or 'GET'
if self.action:
action = self.action
elif self.browser._referrer:
action = urlparse(self.browser._referrer).path
else:
action = '/'
self.browser._open(action, method=method, data=values,
content_type=self.get('enctype'))
class InputElement(InputElement):
"""An <input/> tag."""
# Toss aside checkbox code present in the base lxml @value
@property
def value(self):
return self.get('value')
@value.setter
def value(self, value):
self.set('value', value)
@value.deleter
def value(self):
if 'value' in self.attrib:
del self.attrib['value']
def click(self, wait_for=None, timeout=None):
if self.checkable:
self.checked = not self.checked
return
if self.type != 'submit':
super(InputElement, self).click(wait_for, timeout)
return
for element in self.iterancestors():
if element.tag == 'form':
break
else:
# Not in a form: clicking does nothing.
# TODO: probably not true
return
extra = ()
if 'name' in self.attrib:
extra = [[self.attrib['name'], self.attrib.get('value', 'Submit')]]
element.submit(wait_for=wait_for, timeout=timeout, _extra_values=extra)
class ButtonElement(object):
"""Buttons that can be .click()ed."""
def click(self, wait_for=None, timeout=0):
# TODO: process type=submit|reset|button?
for element in self.iterancestors():
if element.tag == 'form':
break
else:
# Not in a form: clicking does nothing.
return
pairs = []
name = self.attrib.get('name', False)
if name:
pairs.append((name, self.attrib.get('value', '')))
return element.submit(_extra_values=pairs)
class LinkElement(object):
"""Links that can be .click()ed."""
def click(self, wait_for=None, timeout=0):
try:
link = self.attrib['href']
except AttributeError:
pass
else:
self.browser._open(link, 'GET')
wsgi_elements = {
'*': DOMElement,
'a': LinkElement,
'button': ButtonElement,
'form': FormElement,
'input': InputElement,
'select': SelectElement,
'textarea': TextareaElement,
}
class CookieJar(cookielib.CookieJar):
"""A lock-less CookieJar that can clone itself."""
def __init__(self, policy=None):
if policy is None:
policy = cookielib.DefaultCookiePolicy()
self._policy = policy
self._cookies = {}
self._cookies_lock = dummy_threading.RLock()
def export_to_environ(self, environ):
if len(self):
u_request = _WSGI_urllib2_request(environ)
self.add_cookie_header(u_request)
def extract_from_werkzeug(self, response, request_environ):
headers = response.headers
if 'Set-Cookie' in headers or 'Set-Cookie2' in headers:
u_response = _Werkzeug_urlib2_response(response)
u_request = _WSGI_urllib2_request(request_environ)
self.extract_cookies(u_response, u_request)
class _Duck(object):
"""Has arbitrary attributes assigned at construction time."""
def __init__(self, **kw):
for attr, value in kw.iteritems():
setattr(self, attr, value)
class _Werkzeug_urlib2_response(object):
__slots__ = 'response',
def __init__(self, response):
self.response = response
def info(self):
return _Duck(getallmatchingheaders=self.response.headers.getlist,
getheaders=self.response.headers.getlist)
class _WSGI_urllib2_request(object):
def __init__(self, environ):
self.environ = environ
self.url = request_uri(self.environ)
self.url_parts = urlparse(self.url)
def get_full_url(self):
return self.url
def get_host(self):
return self.url_parts.hostname
def get_type(self):
return self.url_parts.scheme
def is_unverifiable(self):
return False
def get_origin_req_host(self):
raise Exception('fixme need previous request')
def has_header(self, header):
key = header.replace('-', '_').upper()
return key in self.environ or 'HTTP_%s' % key in self.environ
def get_header(self, header):
return self.environ.get('HTTP_%s' % header.replace('-', '_').upper())
def header_items(self):
items = []
for key, value in self.environ.iteritems():
if ((key.startswith('HTTP_') or key.startswith('CONTENT_')) and
isinstance(value, basestring)):
if key.startswith('HTTP_'):
key = key[5:]
key = key.replace('_', '-').title()
items.append((key, value))
return items
def add_unredirected_header(self, key, value):
if key == 'Cookie':
self.environ['HTTP_COOKIE'] = "%s: %s" % (key, value)
|
py
|
1a5c228ce01a47e000e9604906a9d442940aebee
|
from file_system.api import file
urlpatterns = file.urlpatterns
|
py
|
1a5c23cc3c3ad19780ad8a602666fd82a11d4990
|
## Include this in folder
## Import this to other file.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
def add_path(path):
if path not in sys.path:
sys.path.insert(0, path)
this_dir = os.path.dirname(__file__)
folder_name = os.path.join(this_dir, '..', 'folder_name')
add_path(scripts)
|
py
|
1a5c24f44f9375c588cecbacc9f9fa19ff44a998
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class PyMplhepData(PythonPackage):
"""Font (Data) sub-package for mplhep"""
homepage = "https://github.com/Scikit-HEP/mplhep_data"
pypi = "mplhep_data/mplhep_data-0.0.3.tar.gz"
version('0.0.3', sha256='b54d257f3f53c93a442cda7a6681ce267277e09173c0b41fd78820f78321772f')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-setuptools@42:', type='build')
depends_on('[email protected]:+toml', type='build')
|
py
|
1a5c258d6a5ef76302ea3281d5bb2ef79c1d4c34
|
#!/usr/bin/python
#
# Copyright (c) 2014 Juniper Networks, Inc. All rights reserved.
#
from optparse import OptionParser
import subprocess
import os
import glob
import platform
import ConfigParser
import socket
import requests
from StringIO import StringIO
from lxml import etree
from sandesh_common.vns.constants import ServiceHttpPortMap, \
NodeUVEImplementedServices, ServicesDefaultConfigurationFile, \
BackupImplementedServices
DPDK_NETLINK_TCP_PORT = 20914
try:
subprocess.check_call(["dpkg-vendor", "--derives-from", "debian"])
distribution = 'debian'
except:
distribution = 'centos'
class EtreeToDict(object):
"""Converts the xml etree to dictionary/list of dictionary."""
def __init__(self, xpath):
self.xpath = xpath
#end __init__
def _handle_list(self, elems):
"""Handles the list object in etree."""
a_list = []
for elem in elems.getchildren():
rval = self._get_one(elem, a_list)
if 'element' in rval.keys():
a_list.append(rval['element'])
elif 'list' in rval.keys():
a_list.append(rval['list'])
else:
a_list.append(rval)
if not a_list:
return None
return a_list
#end _handle_list
def _get_one(self, xp, a_list=None):
"""Recrusively looks for the entry in etree and converts to dictionary.
Returns a dictionary.
"""
val = {}
child = xp.getchildren()
if not child:
val.update({xp.tag: xp.text})
return val
for elem in child:
if elem.tag == 'list':
val.update({xp.tag: self._handle_list(elem)})
else:
rval = self._get_one(elem, a_list)
if elem.tag in rval.keys():
val.update({elem.tag: rval[elem.tag]})
else:
val.update({elem.tag: rval})
return val
#end _get_one
def get_all_entry(self, path):
"""All entries in the etree is converted to the dictionary
Returns the list of dictionary/didctionary.
"""
xps = path.xpath(self.xpath)
if type(xps) is not list:
return self._get_one(xps)
val = []
for xp in xps:
val.append(self._get_one(xp))
return val
#end get_all_entry
def find_entry(self, path, match):
"""Looks for a particular entry in the etree.
Returns the element looked for/None.
"""
xp = path.xpath(self.xpath)
f = filter(lambda x: x.text == match, xp)
if len(f):
return f[0].text
return None
#end find_entry
#end class EtreeToDict
class IntrospectUtil(object):
def __init__(self, ip, port, debug, timeout):
self._ip = ip
self._port = port
self._debug = debug
self._timeout = timeout
#end __init__
def _mk_url_str(self, path):
return "http://%s:%d/%s" % (self._ip, self._port, path)
#end _mk_url_str
def _load(self, path):
url = self._mk_url_str(path)
resp = requests.get(url, timeout=self._timeout)
if resp.status_code == requests.codes.ok:
return etree.fromstring(resp.text)
else:
if self._debug:
print 'URL: %s : HTTP error: %s' % (url, str(resp.status_code))
return None
#end _load
def get_uve(self, tname):
path = 'Snh_SandeshUVECacheReq?x=%s' % (tname)
xpath = './/' + tname
p = self._load(path)
if p is not None:
return EtreeToDict(xpath).get_all_entry(p)
else:
if self._debug:
print 'UVE: %s : not found' % (path)
return None
#end get_uve
#end class IntrospectUtil
def service_installed(svc, initd_svc):
if distribution == 'debian':
if initd_svc:
return os.path.exists('/etc/init.d/' + svc)
cmd = 'initctl show-config ' + svc
else:
cmd = 'chkconfig --list ' + svc
with open(os.devnull, "w") as fnull:
return not subprocess.call(cmd.split(), stdout=fnull, stderr=fnull)
def service_bootstatus(svc, initd_svc):
if distribution == 'debian':
# On ubuntu/debian there does not seem to be an easy way to find
# the boot status for init.d services without going through the
# /etc/rcX.d level
if initd_svc:
if glob.glob('/etc/rc*.d/S*' + svc):
return ''
else:
return ' (disabled on boot)'
cmd = 'initctl show-config ' + svc
cmdout = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE).communicate()[0]
if cmdout.find(' start on') != -1:
return ''
else:
return ' (disabled on boot)'
else:
cmd = 'chkconfig ' + svc
with open(os.devnull, "w") as fnull:
if not subprocess.call(cmd.split(), stdout=fnull, stderr=fnull):
return ''
else:
return ' (disabled on boot)'
def service_status(svc, initd_svc):
cmd = 'service ' + svc + ' status'
p = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE)
cmdout = p.communicate()[0]
if initd_svc:
if p.returncode == 0 or 'Active: active' in cmdout:
return 'active'
else:
return 'inactive'
if cmdout.find('running') != -1:
return 'active'
else:
return 'inactive'
def check_svc(svc, initd_svc=False):
psvc = svc + ':'
if service_installed(svc, initd_svc):
bootstatus = service_bootstatus(svc, initd_svc)
status = service_status(svc, initd_svc)
else:
bootstatus = ' (disabled on boot)'
status='inactive'
print '%-30s%s%s' %(psvc, status, bootstatus)
_DEFAULT_CONF_FILE_DIR = '/etc/contrail/'
_DEFAULT_CONF_FILE_EXTENSION = '.conf'
def get_http_server_port_from_conf(svc_name, debug):
# Open and extract conf file
if svc_name in ServicesDefaultConfigurationFile:
default_conf_file = ServicesDefaultConfigurationFile[svc_name]
else:
default_conf_file = _DEFAULT_CONF_FILE_DIR + svc_name + \
_DEFAULT_CONF_FILE_EXTENSION
try:
fp = open(default_conf_file)
except IOError as e:
if debug:
print '{0}: Could not read filename {1}'.format(\
svc_name, default_conf_file)
return -1
else:
data = StringIO('\n'.join(line.strip() for line in fp))
# Parse conf file
parser = ConfigParser.SafeConfigParser()
try:
parser.readfp(data)
except ConfigParser.ParsingError as e:
fp.close()
if debug:
print '{0}: Parsing error: {1}'.format(svc_name, \
str(e))
return -1
# Read DEFAULT.http_server_port from the conf file. If that fails try
# DEFAULTS.http_server_port (for python daemons)
try:
http_server_port = parser.getint('DEFAULT', 'http_server_port')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError, \
ValueError) as de:
try:
http_server_port = parser.getint('DEFAULTS', 'http_server_port')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError) as dse:
fp.close()
if debug:
print '{0}: DEFAULT/S.http_server_port not present'.format(
svc_name)
return -1
else:
fp.close()
return http_server_port
else:
fp.close()
return http_server_port
def get_default_http_server_port(svc_name, debug):
if svc_name in ServiceHttpPortMap:
return ServiceHttpPortMap[svc_name]
else:
if debug:
print '{0}: Introspect port not found'.format(svc_name)
return -1
def get_http_server_port(svc_name, debug):
http_server_port = get_http_server_port_from_conf(svc_name, debug)
if http_server_port == -1:
http_server_port = get_default_http_server_port(svc_name, debug)
return http_server_port
def get_svc_uve_status(svc_name, debug, timeout):
# Get the HTTP server (introspect) port for the service
http_server_port = get_http_server_port(svc_name, debug)
if http_server_port == -1:
return None, None
# Now check the NodeStatus UVE
svc_introspect = IntrospectUtil('localhost', http_server_port, debug,
timeout)
node_status = svc_introspect.get_uve('NodeStatus')
if node_status is None:
if debug:
print '{0}: NodeStatusUVE not found'.format(svc_name)
return None, None
node_status = [item for item in node_status if 'process_status' in item]
if not len(node_status):
if debug:
print '{0}: ProcessStatus not present in NodeStatusUVE'.format(
svc_name)
return None, None
process_status_info = node_status[0]['process_status']
if len(process_status_info) == 0:
if debug:
print '{0}: Empty ProcessStatus in NodeStatusUVE'.format(svc_name)
return None, None
return process_status_info[0]['state'], process_status_info[0]['description']
def check_svc_status(service_name, debug, detail, timeout):
service_sock = service_name.replace('-', '_')
service_sock = service_sock.replace('supervisor_', 'supervisord_') + '.sock'
cmd = 'supervisorctl -s unix:///tmp/' + service_sock + ' status'
cmdout = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE).communicate()[0]
if cmdout.find('refused connection') == -1:
cmdout = cmdout.replace(' STARTING', 'initializing')
cmdout = cmdout.replace(' RUNNING', 'active')
cmdout = cmdout.replace(' STOPPED', 'inactive')
cmdout = cmdout.replace(' FATAL', 'failed')
cmdoutlist = cmdout.split('\n')
if debug:
print '%s: %s' % (str(service_name), cmdoutlist)
for supervisor_svc_info_cmdout in cmdoutlist:
supervisor_svc_info = supervisor_svc_info_cmdout.split()
if len(supervisor_svc_info) >= 2:
svc_name = supervisor_svc_info[0]
svc_status = supervisor_svc_info[1]
svc_detail_info = ' '.join(supervisor_svc_info[2:])
# Extract UVE state only for running processes
svc_uve_description = None
if (svc_name in NodeUVEImplementedServices or
svc_name.rsplit('-', 1)[0] in NodeUVEImplementedServices) and svc_status == 'active':
try:
svc_uve_status, svc_uve_description = get_svc_uve_status(svc_name, debug, timeout)
except requests.ConnectionError, e:
if debug:
print 'Socket Connection error : %s' % (str(e))
svc_uve_status = "connection-error"
except (requests.Timeout, socket.timeout) as te:
if debug:
print 'Timeout error : %s' % (str(te))
svc_uve_status = "connection-timeout"
if svc_uve_status is not None:
if svc_uve_status == 'Non-Functional':
svc_status = 'initializing'
elif svc_uve_status == 'connection-error':
if svc_name in BackupImplementedServices:
svc_status = 'backup'
else:
svc_status = 'initializing'
elif svc_uve_status == 'connection-timeout':
svc_status = 'timeout'
else:
svc_status = 'initializing'
if svc_uve_description is not None and svc_uve_description is not '':
svc_status = svc_status + ' (' + svc_uve_description + ')'
if not detail:
print '{0:<30}{1:<20}'.format(svc_name, svc_status)
else:
print '{0:<30}{1:<20}{2:<40}'.format(svc_name, svc_status, svc_detail_info)
print
def check_status(svc_name, options):
check_svc(svc_name)
check_svc_status(svc_name, options.debug, options.detail, options.timeout)
def supervisor_status(nodetype, options):
if nodetype == 'compute':
print "== Contrail vRouter =="
check_status('supervisor-vrouter', options)
elif nodetype == 'config':
print "== Contrail Config =="
check_status('supervisor-config', options)
elif nodetype == 'control':
print "== Contrail Control =="
check_status('supervisor-control', options)
elif nodetype == 'analytics':
print "== Contrail Analytics =="
check_status('supervisor-analytics', options)
elif nodetype == 'database':
print "== Contrail Database =="
check_svc('contrail-database', initd_svc=True)
check_status('supervisor-database', options)
elif nodetype == 'webui':
print "== Contrail Web UI =="
check_status('supervisor-webui', options)
elif nodetype == 'support-service':
print "== Contrail Support Services =="
check_status('supervisor-support-service', options)
def package_installed(pkg):
if distribution == 'debian':
cmd = "dpkg -l " + pkg
else:
cmd = "rpm -q " + pkg
with open(os.devnull, "w") as fnull:
return (not subprocess.call(cmd.split(), stdout=fnull, stderr=fnull))
def main():
parser = OptionParser()
parser.add_option('-d', '--detail', dest='detail',
default=False, action='store_true',
help="show detailed status")
parser.add_option('-x', '--debug', dest='debug',
default=False, action='store_true',
help="show debugging information")
parser.add_option('-t', '--timeout', dest='timeout', type="float",
default=2,
help="timeout in seconds to use for HTTP requests to services")
(options, args) = parser.parse_args()
if args:
parser.error("No arguments are permitted")
control = package_installed('contrail-control')
analytics = package_installed('contrail-analytics')
agent = package_installed('contrail-vrouter')
capi = package_installed('contrail-config')
cwebui = package_installed('contrail-web-controller')
cwebstorage = package_installed('contrail-web-storage')
database = (package_installed('contrail-openstack-database') or
package_installed('contrail-database'))
storage = package_installed('contrail-storage')
vr = False
lsmodout = subprocess.Popen('lsmod', stdout=subprocess.PIPE).communicate()[0]
lsofvrouter = (subprocess.Popen(['lsof', '-ni:{0}'.format(DPDK_NETLINK_TCP_PORT),
'-sTCP:LISTEN'], stdout=subprocess.PIPE).communicate()[0])
if lsmodout.find('vrouter') != -1:
vr = True
elif lsofvrouter:
vr = True
if agent:
if not vr:
print "vRouter is NOT PRESENT\n"
supervisor_status('compute', options)
else:
if vr:
print "vRouter is PRESENT\n"
if control:
supervisor_status('control', options)
if analytics:
supervisor_status('analytics', options)
if capi:
supervisor_status('config', options)
if cwebui or cwebstorage:
supervisor_status('webui', options)
if database:
supervisor_status('database', options)
if capi:
supervisor_status('support-service', options)
if storage:
print "== Contrail Storage =="
check_svc('contrail-storage-stats')
if len(glob.glob('/var/crashes/core.*')) != 0:
print "========Run time service failures============="
for file in glob.glob('/var/crashes/core.*'):
print file
if __name__ == '__main__':
main()
|
py
|
1a5c25e1eaea3d5024ec0a32f90abc01dc9ed845
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("DTT0Analyzer")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_condDBv2_cff")
process.GlobalTag.globaltag = ''
process.load("Configuration.StandardSequences.GeometryDB_cff")
process.load("Geometry.DTGeometry.dtGeometry_cfi")
process.DTGeometryESModule.applyAlignment = False
process.DTGeometryESModule.fromDDD = False
process.load("CondCore.DBCommon.CondDBSetup_cfi")
process.source = cms.Source("EmptySource",
numberEventsInRun = cms.untracked.uint32(1),
firstRun = cms.untracked.uint32(1)
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.dtT0Analyzer = cms.EDAnalyzer("DTT0Analyzer",
rootFileName = cms.untracked.string("")
)
process.p = cms.Path(process.dtT0Analyzer)
|
py
|
1a5c25fbb1ba9e6ce7a8f2453b95c8a877b679a2
|
#
# Module providing the `Process` class which emulates `threading.Thread`
#
# multiprocessing/process.py
#
# Copyright (c) 2006-2008, R Oudkerk --- see COPYING.txt
#
__all__ = ['Process', 'current_process', 'active_children']
#
# Imports
#
import os
import sys
import signal
import itertools
#
#
#
try:
ORIGINAL_DIR = os.path.abspath(os.getcwd())
except OSError:
ORIGINAL_DIR = None
#
# Public functions
#
def current_process():
'''
Return process object representing the current process
'''
return _current_process
def active_children():
'''
Return list of process objects corresponding to live child processes
'''
_cleanup()
return list(_current_process._children)
#
#
#
def _cleanup():
# check for processes which have finished
for p in list(_current_process._children):
if p._popen.poll() is not None:
_current_process._children.discard(p)
#
# The `Process` class
#
class Process(object):
'''
Process objects represent activity that is run in a separate process
The class is analagous to `threading.Thread`
'''
_Popen = None
def __init__(self, group=None, target=None, name=None, args=(), kwargs={}):
assert group is None, 'group argument must be None for now'
count = next(_current_process._counter)
self._identity = _current_process._identity + (count,)
self._authkey = _current_process._authkey
self._daemonic = _current_process._daemonic
self._tempdir = _current_process._tempdir
self._parent_pid = os.getpid()
self._popen = None
self._target = target
self._args = tuple(args)
self._kwargs = dict(kwargs)
self._name = name or type(self).__name__ + '-' + \
':'.join(str(i) for i in self._identity)
def run(self):
'''
Method to be run in sub-process; can be overridden in sub-class
'''
if self._target:
self._target(*self._args, **self._kwargs)
def start(self):
'''
Start child process
'''
assert self._popen is None, 'cannot start a process twice'
assert self._parent_pid == os.getpid(), \
'can only start a process object created by current process'
assert not _current_process._daemonic, \
'daemonic processes are not allowed to have children'
_cleanup()
if self._Popen is not None:
Popen = self._Popen
else:
from .forking import Popen
self._popen = Popen(self)
_current_process._children.add(self)
def terminate(self):
'''
Terminate process; sends SIGTERM signal or uses TerminateProcess()
'''
self._popen.terminate()
def join(self, timeout=None):
'''
Wait until child process terminates
'''
assert self._parent_pid == os.getpid(), 'can only join a child process'
assert self._popen is not None, 'can only join a started process'
res = self._popen.wait(timeout)
if res is not None:
_current_process._children.discard(self)
def is_alive(self):
'''
Return whether process is alive
'''
if self is _current_process:
return True
assert self._parent_pid == os.getpid(), 'can only test a child process'
if self._popen is None:
return False
self._popen.poll()
return self._popen.returncode is None
@property
def name(self):
return self._name
@name.setter
def name(self, name):
assert isinstance(name, str), 'name must be a string'
self._name = name
@property
def daemon(self):
'''
Return whether process is a daemon
'''
return self._daemonic
@daemon.setter
def daemon(self, daemonic):
'''
Set whether process is a daemon
'''
assert self._popen is None, 'process has already started'
self._daemonic = daemonic
@property
def authkey(self):
return self._authkey
@authkey.setter
def authkey(self, authkey):
'''
Set authorization key of process
'''
self._authkey = AuthenticationString(authkey)
@property
def exitcode(self):
'''
Return exit code of process or `None` if it has yet to stop
'''
if self._popen is None:
return self._popen
return self._popen.poll()
@property
def ident(self):
'''
Return indentifier (PID) of process or `None` if it has yet to start
'''
if self is _current_process:
return os.getpid()
else:
return self._popen and self._popen.pid
pid = ident
def __repr__(self):
if self is _current_process:
status = 'started'
elif self._parent_pid != os.getpid():
status = 'unknown'
elif self._popen is None:
status = 'initial'
else:
if self._popen.poll() is not None:
status = self.exitcode
else:
status = 'started'
if type(status) is int:
if status == 0:
status = 'stopped'
else:
status = 'stopped[%s]' % _exitcode_to_name.get(status, status)
return '<%s(%s, %s%s)>' % (type(self).__name__, self._name,
status, self._daemonic and ' daemon' or '')
##
def _bootstrap(self):
from . import util
global _current_process
try:
self._children = set()
self._counter = itertools.count(1)
if sys.stdin is not None:
try:
sys.stdin.close()
sys.stdin = open(os.devnull)
except (OSError, ValueError):
pass
_current_process = self
util._finalizer_registry.clear()
util._run_after_forkers()
util.info('child process calling self.run()')
try:
self.run()
exitcode = 0
finally:
util._exit_function()
except SystemExit as e:
if not e.args:
exitcode = 1
elif type(e.args[0]) is int:
exitcode = e.args[0]
else:
sys.stderr.write(e.args[0] + '\n')
sys.stderr.flush()
exitcode = 1
except:
exitcode = 1
import traceback
sys.stderr.write('Process %s:\n' % self.name)
sys.stderr.flush()
traceback.print_exc()
util.info('process exiting with exitcode %d' % exitcode)
return exitcode
#
# We subclass bytes to avoid accidental transmission of auth keys over network
#
class AuthenticationString(bytes):
def __reduce__(self):
from .forking import Popen
if not Popen.thread_is_spawning():
raise TypeError(
'Pickling an AuthenticationString object is '
'disallowed for security reasons'
)
return AuthenticationString, (bytes(self),)
#
# Create object representing the main process
#
class _MainProcess(Process):
def __init__(self):
self._identity = ()
self._daemonic = False
self._name = 'MainProcess'
self._parent_pid = None
self._popen = None
self._counter = itertools.count(1)
self._children = set()
self._authkey = AuthenticationString(os.urandom(32))
self._tempdir = None
_current_process = _MainProcess()
del _MainProcess
#
# Give names to some return codes
#
_exitcode_to_name = {}
for name, signum in list(signal.__dict__.items()):
if name[:3]=='SIG' and '_' not in name:
_exitcode_to_name[-signum] = name
|
py
|
1a5c266f061921536af533a41d76b81fad160516
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
from . import geom
import math
import random
from math import sqrt, hypot
# Points are 3-tuples or 2-tuples of reals: (x,y,z) or (x,y)
# Faces are lists of integers (vertex indices into coord lists)
# After triangulation/quadrangulation, the tris and quads will
# be tuples instead of lists.
# Vmaps are lists taking vertex index -> Point
TOL = 1e-7 # a tolerance for fuzzy equality
GTHRESH = 75 # threshold above which use greedy to _Quandrangulate
ANGFAC = 1.0 # weighting for angles in quad goodness measure
DEGFAC = 10.0 # weighting for degree in quad goodness measure
# Angle kind constants
Ang0 = 1
Angconvex = 2
Angreflex = 3
Angtangential = 4
Ang360 = 5
def TriangulateFace(face, points):
"""Triangulate the given face.
Uses an easy triangulation first, followed by a constrained delauney
triangulation to get better shaped triangles.
Args:
face: list of int - indices in points, assumed CCW-oriented
points: geom.Points - holds coordinates for vertices
Returns:
list of (int, int, int) - 3-tuples are CCW-oriented vertices of
triangles making up the triangulation
"""
if len(face) <= 3:
return [tuple(face)]
tris = EarChopTriFace(face, points)
bord = _BorderEdges([face])
triscdt = _CDT(tris, bord, points)
return triscdt
def TriangulateFaceWithHoles(face, holes, points):
"""Like TriangulateFace, but with holes inside the face.
Works by making one complex polygon that has segments to
and from the holes ("islands"), and then using the same method
as TriangulateFace.
Args:
face: list of int - indices in points, assumed CCW-oriented
holes: list of list of int - each sublist is like face
but CW-oriented and assumed to be inside face
points: geom.Points - holds coordinates for vertices
Returns:
list of (int, int, int) - 3-tuples are CCW-oriented vertices of
triangles making up the triangulation
"""
if len(holes) == 0:
return TriangulateFace(face, points)
allfaces = [face] + holes
sholes = [_SortFace(h, points) for h in holes]
joinedface = _JoinIslands(face, sholes, points)
tris = EarChopTriFace(joinedface, points)
bord = _BorderEdges(allfaces)
triscdt = _CDT(tris, bord, points)
return triscdt
def QuadrangulateFace(face, points):
"""Quadrangulate the face (subdivide into convex quads and tris).
Like TriangulateFace, but after triangulating, join as many pairs
of triangles as possible into convex quadrilaterals.
Args:
face: list of int - indices in points, assumed CCW-oriented
points: geom.Points - holds coordinates for vertices
Returns:
list of 3-tuples or 4-tuples of ints - CCW-oriented vertices of
quadrilaterals and triangles making up the quadrangulation.
"""
if len(face) <= 3:
return [tuple(face)]
tris = EarChopTriFace(face, points)
bord = _BorderEdges([face])
triscdt = _CDT(tris, bord, points)
qs = _Quandrangulate(triscdt, bord, points)
return qs
def QuadrangulateFaceWithHoles(face, holes, points):
"""Like QuadrangulateFace, but with holes inside the faces.
Args:
face: list of int - indices in points, assumed CCW-oriented
holes: list of list of int - each sublist is like face
but CW-oriented and assumed to be inside face
points: geom.Points - holds coordinates for vertices
Returns:
list of 3-tuples or 4-tuples of ints - CCW-oriented vertices of
quadrilaterals and triangles making up the quadrangulation.
"""
if len(holes) == 0:
return QuadrangulateFace(face, points)
allfaces = [face] + holes
sholes = [_SortFace(h, points) for h in holes]
joinedface = _JoinIslands(face, sholes, points)
tris = EarChopTriFace(joinedface, points)
bord = _BorderEdges(allfaces)
triscdt = _CDT(tris, bord, points)
qs = _Quandrangulate(triscdt, bord, points)
return qs
def _SortFace(face, points):
"""Rotate face so leftmost vertex is first, where face is
list of indices in points."""
n = len(face)
if n <= 1:
return face
lefti = 0
leftv = face[0]
for i in range(1, n):
# following comparison is lexicographic on n-tuple
# so sorts on x first, using lower y as tie breaker.
if points.pos[face[i]] < points.pos[leftv]:
lefti = i
leftv = face[i]
return face[lefti:] + face[0:lefti]
def EarChopTriFace(face, points):
"""Triangulate given face, with coords given by indexing into points.
Return list of faces, each of which will be a triangle.
Use the ear-chopping method."""
# start with lowest coord in 2d space to try
# to get a pleasing uniform triangulation if starting with
# a regular structure (like a grid)
start = _GetLeastIndex(face, points)
ans = []
incr = 1
n = len(face)
while n > 3:
i = _FindEar(face, n, start, incr, points)
vm1 = face[(i - 1) % n]
v0 = face[i]
v1 = face[(i + 1) % n]
face = _ChopEar(face, i)
n = len(face)
incr = - incr
if incr == 1:
start = i % n
else:
start = (i - 1) % n
ans.append((vm1, v0, v1))
ans.append(tuple(face))
return ans
def _GetLeastIndex(face, points):
"""Return index of coordinate that is leftmost, lowest in face."""
bestindex = 0
bestpos = points.pos[face[0]]
for i in range(1, len(face)):
pos = points.pos[face[i]]
if pos[0] < bestpos[0] or \
(pos[0] == bestpos[0] and pos[1] < bestpos[1]):
bestindex = i
bestpos = pos
return bestindex
def _FindEar(face, n, start, incr, points):
"""An ear of a polygon consists of three consecutive vertices
v(-1), v0, v1 such that v(-1) can connect to v1 without intersecting
the polygon.
Finds an ear, starting at index 'start' and moving
in direction incr. (We attempt to alternate directions, to find
'nice' triangulations for simple convex polygons.)
Returns index into faces of v0 (will always find one, because
uses a desperation mode if fails to find one with above rule)."""
angk = _ClassifyAngles(face, n, points)
for mode in range(0, 5):
i = start
while True:
if _IsEar(face, i, n, angk, points, mode):
return i
i = (i + incr) % n
if i == start:
break # try next higher desperation mode
def _IsEar(face, i, n, angk, points, mode):
"""Return true, false depending on ear status of vertices
with indices i-1, i, i+1.
mode is amount of desperation: 0 is Normal mode,
mode 1 allows degenerate triangles (with repeated vertices)
mode 2 allows local self crossing (folded) ears
mode 3 allows any convex vertex (should always be one)
mode 4 allows anything (just to be sure loop terminates!)"""
k = angk[i]
vm2 = face[(i - 2) % n]
vm1 = face[(i - 1) % n]
v0 = face[i]
v1 = face[(i + 1) % n]
v2 = face[(i + 2) % n]
if vm1 == v0 or v0 == v1:
return (mode > 0)
b = (k == Angconvex or k == Angtangential or k == Ang0)
c = _InCone(vm1, v0, v1, v2, angk[(i + 1) % n], points) and \
_InCone(v1, vm2, vm1, v0, angk[(i - 1) % n], points)
if b and c:
return _EarCheck(face, n, angk, vm1, v0, v1, points)
if mode < 2:
return False
if mode == 3:
return SegsIntersect(vm2, vm1, v0, v1, points)
if mode == 4:
return b
return True
def _EarCheck(face, n, angk, vm1, v0, v1, points):
"""Return True if the successive vertices vm1, v0, v1
forms an ear. We already know that it is not a reflex
Angle, and that the local cone containment is ok.
What remains to check is that the edge vm1-v1 doesn't
intersect any other edge of the face (besides vm1-v0
and v0-v1). Equivalently, there can't be a reflex Angle
inside the triangle vm1-v0-v1. (Well, there are
messy cases when other points of the face coincide with
v0 or touch various lines involved in the ear.)"""
for j in range(0, n):
fv = face[j]
k = angk[j]
b = (k == Angreflex or k == Ang360) \
and not(fv == vm1 or fv == v0 or fv == v1)
if b:
# Is fv inside closure of triangle (vm1,v0,v1)?
c = not(Ccw(v0, vm1, fv, points) \
or Ccw(vm1, v1, fv, points) \
or Ccw(v1, v0, fv, points))
fvm1 = face[(j - 1) % n]
fv1 = face[(j + 1) % n]
# To try to deal with some degenerate cases,
# also check to see if either segment attached to fv
# intersects either segment of potential ear.
d = SegsIntersect(fvm1, fv, vm1, v0, points) or \
SegsIntersect(fvm1, fv, v0, v1, points) or \
SegsIntersect(fv, fv1, vm1, v0, points) or \
SegsIntersect(fv, fv1, v0, v1, points)
if c or d:
return False
return True
def _ChopEar(face, i):
"""Return a copy of face (of length n), omitting element i."""
return face[0:i] + face[i + 1:]
def _InCone(vtest, a, b, c, bkind, points):
"""Return true if point with index vtest is in Cone of points with
indices a, b, c, where Angle ABC has AngleKind Bkind.
The Cone is the set of points inside the left face defined by
segments ab and bc, disregarding all other segments of polygon for
purposes of inside test."""
if bkind == Angreflex or bkind == Ang360:
if _InCone(vtest, c, b, a, Angconvex, points):
return False
return not((not(Ccw(b, a, vtest, points)) \
and not(Ccw(b, vtest, a, points)) \
and Ccw(b, a, vtest, points))
or
(not(Ccw(b, c, vtest, points)) \
and not(Ccw(b, vtest, c, points)) \
and Ccw(b, a, vtest, points)))
else:
return Ccw(a, b, vtest, points) and Ccw(b, c, vtest, points)
def _JoinIslands(face, holes, points):
"""face is a CCW face containing the CW faces in the holes list,
where each hole is sorted so the leftmost-lowest vertex is first.
faces and holes are given as lists of indices into points.
The holes should be sorted by softface.
Add edges to make a new face that includes the holes (a Ccw traversal
of the new face will have the inside always on the left),
and return the new face."""
while len(holes) > 0:
(hole, holeindex) = _LeftMostFace(holes, points)
holes = holes[0:holeindex] + holes[holeindex + 1:]
face = _JoinIsland(face, hole, points)
return face
def _JoinIsland(face, hole, points):
"""Return a modified version of face that splices in the
vertices of hole (which should be sorted)."""
if len(hole) == 0:
return face
hv0 = hole[0]
d = _FindDiag(face, hv0, points)
newface = face[0:d + 1] + hole + [hv0] + face[d:]
return newface
def _LeftMostFace(holes, points):
"""Return (hole,index of hole in holes) where hole has
the leftmost first vertex. To be able to handle empty
holes gracefully, call an empty hole 'leftmost'.
Assumes holes are sorted by softface."""
assert(len(holes) > 0)
lefti = 0
lefthole = holes[0]
if len(lefthole) == 0:
return (lefthole, lefti)
leftv = lefthole[0]
for i in range(1, len(holes)):
ihole = holes[i]
if len(ihole) == 0:
return (ihole, i)
iv = ihole[0]
if points.pos[iv] < points.pos[leftv]:
(lefti, lefthole, leftv) = (i, ihole, iv)
return (lefthole, lefti)
def _FindDiag(face, hv, points):
"""Find a vertex in face that can see vertex hv, if possible,
and return the index into face of that vertex.
Should be able to find a diagonal that connects a vertex of face
left of v to hv without crossing face, but try two
more desperation passes after that to get SOME diagonal, even if
it might cross some edge somewhere.
First desperation pass (mode == 1): allow points right of hv.
Second desperation pass (mode == 2): allow crossing boundary poly"""
besti = - 1
bestdist = 1e30
for mode in range(0, 3):
for i in range(0, len(face)):
v = face[i]
if mode == 0 and points.pos[v] > points.pos[hv]:
continue # in mode 0, only want points left of hv
dist = _DistSq(v, hv, points)
if dist < bestdist:
if _IsDiag(i, v, hv, face, points) or mode == 2:
(besti, bestdist) = (i, dist)
if besti >= 0:
break # found one, so don't need other modes
assert(besti >= 0)
return besti
def _IsDiag(i, v, hv, face, points):
"""Return True if vertex v (at index i in face) can see vertex hv.
v and hv are indices into points.
(v, hv) is a diagonal if hv is in the cone of the Angle at index i on face
and no segment in face intersects (h, hv).
"""
n = len(face)
vm1 = face[(i - 1) % n]
v1 = face[(i + 1) % n]
k = _AngleKind(vm1, v, v1, points)
if not _InCone(hv, vm1, v, v1, k, points):
return False
for j in range(0, n):
vj = face[j]
vj1 = face[(j + 1) % n]
if SegsIntersect(v, hv, vj, vj1, points):
return False
return True
def _DistSq(a, b, points):
"""Return distance squared between coords with indices a and b in points.
"""
diff = Sub2(points.pos[a], points.pos[b])
return Dot2(diff, diff)
def _BorderEdges(facelist):
"""Return a set of (u,v) where u and v are successive vertex indices
in some face in the list in facelist."""
ans = set()
for i in range(0, len(facelist)):
f = facelist[i]
for j in range(1, len(f)):
ans.add((f[j - 1], f[j]))
ans.add((f[-1], f[0]))
return ans
def _CDT(tris, bord, points):
"""Tris is a list of triangles ((a,b,c), CCW-oriented indices into points)
Bord is a set of border edges (u,v), oriented so that tris
is a triangulation of the left face of the border(s).
Make the triangulation "Constrained Delaunay" by flipping "reversed"
quadrangulaterals until can flip no more.
Return list of triangles in new triangulation."""
td = _TriDict(tris)
re = _ReveresedEdges(tris, td, bord, points)
ts = set(tris)
# reverse the reversed edges until done.
# reversing and edge adds new edges, which may or
# may not be reversed or border edges, to re for
# consideration, but the process will stop eventually.
while len(re) > 0:
(a, b) = e = re.pop()
if e in bord or not _IsReversed(e, td, points):
continue
# rotate e in quad adbc to get other diagonal
erev = (b, a)
tl = td.get(e)
tr = td.get(erev)
if not tl or not tr:
continue # shouldn't happen
c = _OtherVert(tl, a, b)
d = _OtherVert(tr, a, b)
if c is None or d is None:
continue # shouldn't happen
newt1 = (c, d, b)
newt2 = (c, a, d)
del td[e]
del td[erev]
td[(c, d)] = newt1
td[(d, b)] = newt1
td[(b, c)] = newt1
td[(d, c)] = newt2
td[(c, a)] = newt2
td[(a, d)] = newt2
if tl in ts:
ts.remove(tl)
if tr in ts:
ts.remove(tr)
ts.add(newt1)
ts.add(newt2)
re.extend([(d, b), (b, c), (c, a), (a, d)])
return list(ts)
def _TriDict(tris):
"""tris is a list of triangles (a,b,c), CCW-oriented indices.
Return dict mapping all edges in the triangles to the containing
triangle list."""
ans = dict()
for i in range(0, len(tris)):
(a, b, c) = t = tris[i]
ans[(a, b)] = t
ans[(b, c)] = t
ans[(c, a)] = t
return ans
def _ReveresedEdges(tris, td, bord, points):
"""Return list of reversed edges in tris.
Only want edges not in bord, and only need one representative
of (u,v)/(v,u), so choose the one with u < v.
td is dictionary from _TriDict, and is used to find left and right
triangles of edges."""
ans = []
for i in range(0, len(tris)):
(a, b, c) = tris[i]
for e in [(a, b), (b, c), (c, a)]:
if e in bord:
continue
(u, v) = e
if u < v:
if _IsReversed(e, td, points):
ans.append(e)
return ans
def _IsReversed(e, td, points):
"""If e=(a,b) is a non-border edge, with left-face triangle tl and
right-face triangle tr, then it is 'reversed' if the circle through
a, b, and (say) the other vertex of tl contains the other vertex of tr.
td is a _TriDict, for finding triangles containing edges, and points
gives the coordinates for vertex indices used in edges."""
tl = td.get(e)
if not tl:
return False
(a, b) = e
tr = td.get((b, a))
if not tr:
return False
c = _OtherVert(tl, a, b)
d = _OtherVert(tr, a, b)
if c is None or d is None:
return False
return InCircle(a, b, c, d, points)
def _OtherVert(tri, a, b):
"""tri should be a tuple of 3 vertex indices, two of which are a and b.
Return the third index, or None if all vertices are a or b"""
for v in tri:
if v != a and v != b:
return v
return None
def _ClassifyAngles(face, n, points):
"""Return vector of anglekinds of the Angle around each point in face."""
return [_AngleKind(face[(i - 1) % n], face[i], face[(i + 1) % n], points) \
for i in list(range(0, n))]
def _AngleKind(a, b, c, points):
"""Return one of the Ang... constants to classify Angle formed by ABC,
in a counterclockwise traversal of a face,
where a, b, c are indices into points."""
if Ccw(a, b, c, points):
return Angconvex
elif Ccw(a, c, b, points):
return Angreflex
else:
vb = points.pos[b]
udotv = Dot2(Sub2(vb, points.pos[a]), Sub2(points.pos[c], vb))
if udotv > 0.0:
return Angtangential
else:
return Ang0 # to fix: return Ang360 if "inside" spur
def _Quandrangulate(tris, bord, points):
"""Tris is list of triangles, forming a triangulation of region whose
border edges are in set bord.
Combine adjacent triangles to make quads, trying for "good" quads where
possible. Some triangles will probably remain uncombined"""
(er, td) = _ERGraph(tris, bord, points)
if len(er) == 0:
return tris
if len(er) > GTHRESH:
match = _GreedyMatch(er)
else:
match = _MaxMatch(er)
return _RemoveEdges(tris, match)
def _RemoveEdges(tris, match):
"""tris is list of triangles.
er is as returned from _MaxMatch or _GreedyMatch.
Return list of (A,D,B,C) resulting from deleting edge (A,B) causing a merge
of two triangles; append to that list the remaining unmatched triangles."""
ans = []
triset = set(tris)
while len(match) > 0:
(_, e, tl, tr) = match.pop()
(a, b) = e
if tl in triset:
triset.remove(tl)
if tr in triset:
triset.remove(tr)
c = _OtherVert(tl, a, b)
d = _OtherVert(tr, a, b)
if c is None or d is None:
continue
ans.append((a, d, b, c))
return ans + list(triset)
def _ERGraph(tris, bord, points):
"""Make an 'Edge Removal Graph'.
Given a list of triangles, the 'Edge Removal Graph' is a graph whose
nodes are the triangles (think of a point in the center of them),
and whose edges go between adjacent triangles (they share a non-border
edge), such that it would be possible to remove the shared edge
and form a convex quadrilateral. Forming a quadrilateralization
is then a matter of finding a matching (set of edges that don't
share a vertex - remember, these are the 'face' vertices).
For better quadrilaterlization, we'll make the Edge Removal Graph
edges have weights, with higher weights going to the edges that
are more desirable to remove. Then we want a maximum weight matching
in this graph.
We'll return the graph in a kind of implicit form, using edges of
the original triangles as a proxy for the edges between the faces
(i.e., the edge of the triangle is the shared edge). We'll arbitrarily
pick the triangle graph edge with lower-index start vertex.
Also, to aid in traversing the implicit graph, we'll keep the left
and right triangle triples with edge 'ER edge'.
Finally, since we calculate it anyway, we'll return a dictionary
mapping edges of the triangles to the triangle triples they're in.
Args:
tris: list of (int, int, int) giving a triple of vertex indices for
triangles, assumed CCW oriented
bord: set of (int, int) giving vertex indices for border edges
points: geom.Points - for mapping vertex indices to coords
Returns:
(list of (weight,e,tl,tr), dict)
where edge e=(a,b) is non-border edge
with left face tl and right face tr (each a triple (i,j,k)),
where removing the edge would form an "OK" quad (no concave angles),
with weight representing the desirability of removing the edge
The dict maps int pairs (a,b) to int triples (i,j,k), that is,
mapping edges to their containing triangles.
"""
td = _TriDict(tris)
dd = _DegreeDict(tris)
ans = []
ctris = tris[:] # copy, so argument not affected
while len(ctris) > 0:
(i, j, k) = tl = ctris.pop()
for e in [(i, j), (j, k), (k, i)]:
if e in bord:
continue
(a, b) = e
# just consider one of (a,b) and (b,a), to avoid dups
if a > b:
continue
erev = (b, a)
tr = td.get(erev)
if not tr:
continue
c = _OtherVert(tl, a, b)
d = _OtherVert(tr, a, b)
if c is None or d is None:
continue
# calculate amax, the max of the new angles that would
# be formed at a and b if tl and tr were combined
amax = max(Angle(c, a, b, points) + Angle(d, a, b, points),
Angle(c, b, a, points) + Angle(d, b, a, points))
if amax > 180.0:
continue
weight = ANGFAC * (180.0 - amax) + DEGFAC * (dd[a] + dd[b])
ans.append((weight, e, tl, tr))
return (ans, td)
def _GreedyMatch(er):
"""er is list of (weight,e,tl,tr).
Find maximal set so that each triangle appears in at most
one member of set"""
# sort in order of decreasing weight
er.sort(key=lambda v: v[0], reverse=True)
match = set()
ans = []
while len(er) > 0:
(_, _, tl, tr) = q = er.pop()
if tl not in match and tr not in match:
match.add(tl)
match.add(tr)
ans.append(q)
return ans
def _MaxMatch(er):
"""Like _GreedyMatch, but use divide and conquer to find best possible set.
Args:
er: list of (weight,e,tl,tr) - see _ERGraph
Returns:
list that is a subset of er giving a maximum weight match
"""
(ans, _) = _DCMatch(er)
return ans
def _DCMatch(er):
"""Recursive helper for _MaxMatch.
Divide and Conquer approach to finding max weight matching.
If we're lucky, there's an edge in er that separates the edge removal
graph into (at least) two separate components. Then the max weight
is either one that includes that edge or excludes it - and we can
use a recursive call to _DCMatch to handle each component separately
on what remains of the graph after including/excluding the separating edge.
If we're not lucky, we fall back on _EMatch (see below).
Args:
er: list of (weight, e, tl, tr) (see _ERGraph)
Returns:
(list of (weight, e, tl, tr), float) - the subset forming a maximum
matching, and the total weight of the match.
"""
if not er:
return ([], 0.0)
if len(er) == 1:
return (er, er[0][0])
match = []
matchw = 0.0
for i in range(0, len(er)):
(nc, comp) = _FindComponents(er, i)
if nc == 1:
# er[i] doesn't separate er
continue
(wi, _, tl, tr) = er[i]
if comp[tl] != comp[tr]:
# case 1: er separates graph
# compare the matches that include er[i] versus
# those that exclude it
(a, b) = _PartitionComps(er, comp, i, comp[tl], comp[tr])
ax = _CopyExcluding(a, tl, tr)
bx = _CopyExcluding(b, tl, tr)
(axmatch, wax) = _DCMatch(ax)
(bxmatch, wbx) = _DCMatch(bx)
if len(ax) == len(a):
wa = wax
amatch = axmatch
else:
(amatch, wa) = _DCMatch(a)
if len(bx) == len(b):
wb = wbx
bmatch = bxmatch
else:
(bmatch, wb) = _DCMatch(b)
w = wa + wb
wx = wax + wbx + wi
if w > wx:
match = amatch + bmatch
matchw = w
else:
match = [er[i]] + axmatch + bxmatch
matchw = wx
else:
# case 2: er not needed to separate graph
(a, b) = _PartitionComps(er, comp, -1, 0, 0)
(amatch, wa) = _DCMatch(a)
(bmatch, wb) = _DCMatch(b)
match = amatch + bmatch
matchw = wa + wb
if match:
break
if not match:
return _EMatch(er)
return (match, matchw)
def _EMatch(er):
"""Exhaustive match helper for _MaxMatch.
This is the case when we were unable to find a single edge
separating the edge removal graph into two components.
So pick a single edge and try _DCMatch on the two cases of
including or excluding that edge. We may be lucky in these
subcases (say, if the graph is currently a simple cycle, so
only needs one more edge after the one we pick here to separate
it into components). Otherwise, we'll end up back in _EMatch
again, and the worse case will be exponential.
Pick a random edge rather than say, the first, to hopefully
avoid some pathological cases.
Args:
er: list of (weight, el, tl, tr) (see _ERGraph)
Returns:
(list of (weight, e, tl, tr), float) - the subset forming a maximum
matching, and the total weight of the match.
"""
if not er:
return ([], 0.0)
if len(er) == 1:
return (er, er[1][1])
i = random.randint(0, len(er) - 1)
eri = (wi, _, tl, tr) = er[i]
# case a: include eri. exclude other edges that touch tl or tr
a = _CopyExcluding(er, tl, tr)
a.append(eri)
(amatch, wa) = _DCMatch(a)
wa += wi
if len(a) == len(er) - 1:
# if a excludes only eri, then er didn't touch anything else
# in the graph, and the best match will always include er
# and we can skip the call for case b
wb = -1.0
bmatch = []
else:
b = er[:i] + er[i + 1:]
(bmatch, wb) = _DCMatch(b)
if wa > wb:
match = amatch
match.append(eri)
matchw = wa
else:
match = bmatch
matchw = wb
return (match, matchw)
def _FindComponents(er, excepti):
"""Find connected components induced by edges, excluding one edge.
Args:
er: list of (weight, el, tl, tr) (see _ERGraph)
excepti: index in er of edge to be excluded
Returns:
(int, dict): int is number of connected components found,
dict maps triangle triple ->
connected component index (starting at 1)
"""
ncomps = 0
comp = dict()
for i in range(0, len(er)):
(_, _, tl, tr) = er[i]
for t in [tl, tr]:
if t not in comp:
ncomps += 1
_FCVisit(er, excepti, comp, t, ncomps)
return (ncomps, comp)
def _FCVisit(er, excepti, comp, t, compnum):
"""Helper for _FindComponents depth-first-search."""
comp[t] = compnum
for i in range(0, len(er)):
if i == excepti:
continue
(_, _, tl, tr) = er[i]
if tl == t or tr == t:
s = tl
if s == t:
s = tr
if s not in comp:
_FCVisit(er, excepti, comp, s, compnum)
def _PartitionComps(er, comp, excepti, compa, compb):
"""Partition the edges of er by component number, into two lists.
Generally, put odd components into first list and even into second,
except that component compa goes in the first and compb goes in the second,
and we ignore edge er[excepti].
Args:
er: list of (weight, el, tl, tr) (see _ERGraph)
comp: dict - mapping triangle triple -> connected component index
excepti: int - index in er to ignore (unless excepti==-1)
compa: int - component to go in first list of answer (unless 0)
compb: int - component to go in second list of answer (unless 0)
Returns:
(list, list) - a partition of er according to above rules
"""
parta = []
partb = []
for i in range(0, len(er)):
if i == excepti:
continue
tl = er[i][2]
c = comp[tl]
if c == compa or (c != compb and (c & 1) == 1):
parta.append(er[i])
else:
partb.append(er[i])
return (parta, partb)
def _CopyExcluding(er, s, t):
"""Return a copy of er, excluding all those involving triangles s and t.
Args:
er: list of (weight, e, tl, tr) - see _ERGraph
s: 3-tuple of int - a triangle
t: 3-tuple of int - a triangle
Returns:
Copy of er excluding those with tl or tr == s or t
"""
ans = []
for e in er:
(_, _, tl, tr) = e
if tl == s or tr == s or tl == t or tr == t:
continue
ans.append(e)
return ans
def _DegreeDict(tris):
"""Return a dictionary mapping vertices in tris to the number of triangles
that they are touch."""
ans = dict()
for t in tris:
for v in t:
if v in ans:
ans[v] = ans[v] + 1
else:
ans[v] = 1
return ans
def PolygonPlane(face, points):
"""Return a Normal vector for the face with 3d coords given by indexing
into points."""
if len(face) < 3:
return (0.0, 0.0, 1.0) # arbitrary, we really have no idea
else:
coords = [points.pos[i] for i in face]
return Normal(coords)
# This Normal appears to be on the CCW-traversing side of a polygon
def Normal(coords):
"""Return an average Normal vector for the point list, 3d coords."""
if len(coords) < 3:
return (0.0, 0.0, 1.0) # arbitrary
(ax, ay, az) = coords[0]
(bx, by, bz) = coords[1]
(cx, cy, cz) = coords[2]
if len(coords) == 3:
sx = (ay - by) * (az + bz) + \
(by - cy) * (bz + cz) + \
(cy - ay) * (cz + az)
sy = (az - bz) * (ax + bx) + \
(bz - cz) * (bx + cx) + \
(cz - az) * (cx + ax)
sz = (ax - bx) * (by + by) + \
(bx - cx) * (by + cy) + \
(cx - ax) * (cy + ay)
return Norm3(sx, sy, sz)
else:
sx = (ay - by) * (az + bz) + (by - cy) * (bz + cz)
sy = (az - bz) * (ax + bx) + (bz - cz) * (bx + cx)
sz = (ax - bx) * (ay + by) + (bx - cx) * (by + cy)
return _NormalAux(coords[3:], coords[0], sx, sy, sz)
def _NormalAux(rest, first, sx, sy, sz):
(ax, ay, az) = rest[0]
if len(rest) == 1:
(bx, by, bz) = first
else:
(bx, by, bz) = rest[1]
nx = sx + (ay - by) * (az + bz)
ny = sy + (az - bz) * (ax + bx)
nz = sz + (ax - bx) * (ay + by)
if len(rest) == 1:
return Norm3(nx, ny, nz)
else:
return _NormalAux(rest[1:], first, nx, ny, nz)
def Norm3(x, y, z):
"""Return vector (x,y,z) normalized by dividing by squared length.
Return (0.0, 0.0, 1.0) if the result is undefined."""
sqrlen = x * x + y * y + z * z
if sqrlen < 1e-100:
return (0.0, 0.0, 1.0)
else:
try:
d = sqrt(sqrlen)
return (x / d, y / d, z / d)
except:
return (0.0, 0.0, 1.0)
# We're using right-hand coord system, where
# forefinger=x, middle=y, thumb=z on right hand.
# Then, e.g., (1,0,0) x (0,1,0) = (0,0,1)
def Cross3(a, b):
"""Return the cross product of two vectors, a x b."""
(ax, ay, az) = a
(bx, by, bz) = b
return (ay * bz - az * by, az * bx - ax * bz, ax * by - ay * bx)
def Dot2(a, b):
"""Return the dot product of two 2d vectors, a . b."""
return a[0] * b[0] + a[1] * b[1]
def Perp2(a, b):
"""Return a sort of 2d cross product."""
return a[0] * b[1] - a[1] * b[0]
def Sub2(a, b):
"""Return difference of 2d vectors, a-b."""
return (a[0] - b[0], a[1] - b[1])
def Add2(a, b):
"""Return the sum of 2d vectors, a+b."""
return (a[0] + b[0], a[1] + b[1])
def Length2(v):
"""Return length of vector v=(x,y)."""
return hypot(v[0], v[1])
def LinInterp2(a, b, alpha):
"""Return the point alpha of the way from a to b."""
beta = 1 - alpha
return (beta * a[0] + alpha * b[0], beta * a[1] + alpha * b[1])
def Normalized2(p):
"""Return vector p normlized by dividing by its squared length.
Return (0.0, 1.0) if the result is undefined."""
(x, y) = p
sqrlen = x * x + y * y
if sqrlen < 1e-100:
return (0.0, 1.0)
else:
try:
d = sqrt(sqrlen)
return (x / d, y / d)
except:
return (0.0, 1.0)
def Angle(a, b, c, points):
"""Return Angle abc in degrees, in range [0,180),
where a,b,c are indices into points."""
u = Sub2(points.pos[c], points.pos[b])
v = Sub2(points.pos[a], points.pos[b])
n1 = Length2(u)
n2 = Length2(v)
if n1 == 0.0 or n2 == 0.0:
return 0.0
else:
costheta = Dot2(u, v) / (n1 * n2)
if costheta > 1.0:
costheta = 1.0
if costheta < - 1.0:
costheta = - 1.0
return math.acos(costheta) * 180.0 / math.pi
def SegsIntersect(ixa, ixb, ixc, ixd, points):
"""Return true if segment AB intersects CD,
false if they just touch. ixa, ixb, ixc, ixd are indices
into points."""
a = points.pos[ixa]
b = points.pos[ixb]
c = points.pos[ixc]
d = points.pos[ixd]
u = Sub2(b, a)
v = Sub2(d, c)
w = Sub2(a, c)
pp = Perp2(u, v)
if abs(pp) > TOL:
si = Perp2(v, w) / pp
ti = Perp2(u, w) / pp
return 0.0 < si < 1.0 and 0.0 < ti < 1.0
else:
# parallel or overlapping
if Dot2(u, u) == 0.0 or Dot2(v, v) == 0.0:
return False
else:
pp2 = Perp2(w, v)
if abs(pp2) > TOL:
return False # parallel, not collinear
z = Sub2(b, c)
(vx, vy) = v
(wx, wy) = w
(zx, zy) = z
if vx == 0.0:
(t0, t1) = (wy / vy, zy / vy)
else:
(t0, t1) = (wx / vx, zx / vx)
return 0.0 < t0 < 1.0 and 0.0 < t1 < 1.0
def Ccw(a, b, c, points):
"""Return true if ABC is a counterclockwise-oriented triangle,
where a, b, and c are indices into points.
Returns false if not, or if colinear within TOL."""
(ax, ay) = (points.pos[a][0], points.pos[a][1])
(bx, by) = (points.pos[b][0], points.pos[b][1])
(cx, cy) = (points.pos[c][0], points.pos[c][1])
d = ax * by - bx * ay - ax * cy + cx * ay + bx * cy - cx * by
return d > TOL
def InCircle(a, b, c, d, points):
"""Return true if circle through points with indices a, b, c
contains point with index d (indices into points).
Except: if ABC forms a counterclockwise oriented triangle
then the test is reversed: return true if d is outside the circle.
Will get false, no matter what orientation, if d is cocircular, with TOL^2.
| xa ya xa^2+ya^2 1 |
| xb yb xb^2+yb^2 1 | > 0
| xc yc xc^2+yc^2 1 |
| xd yd xd^2+yd^2 1 |
"""
(xa, ya, za) = _Icc(points.pos[a])
(xb, yb, zb) = _Icc(points.pos[b])
(xc, yc, zc) = _Icc(points.pos[c])
(xd, yd, zd) = _Icc(points.pos[d])
det = xa * (yb * zc - yc * zb - yb * zd + yd * zb + yc * zd - yd * zc) \
- xb * (ya * zc - yc * za - ya * zd + yd * za + yc * zd - yd * zc) \
+ xc * (ya * zb - yb * za - ya * zd + yd * za + yb * zd - yd * zb) \
- xd * (ya * zb - yb * za - ya * zc + yc * za + yb * zc - yc * zb)
return det > TOL * TOL
def _Icc(p):
(x, y) = (p[0], p[1])
return (x, y, x * x + y * y)
|
py
|
1a5c273577e1261081b4daa22c06d38eb7ac215d
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pytest
from product_management import create_product, delete_product
from reference_image_management import (
create_reference_image, delete_reference_image, list_reference_images)
PROJECT_ID = os.getenv('GCLOUD_PROJECT')
LOCATION = 'us-west1'
PRODUCT_DISPLAY_NAME = 'fake_product_display_name_for_testing'
PRODUCT_CATEGORY = 'homegoods'
PRODUCT_ID = 'fake_product_id_for_testing'
REFERENCE_IMAGE_ID = 'fake_reference_image_id_for_testing'
GCS_URI = 'gs://python-docs-samples-tests/product_search/shoes_1.jpg'
@pytest.fixture
def product():
# set up
create_product(
PROJECT_ID, LOCATION, PRODUCT_ID,
PRODUCT_DISPLAY_NAME, PRODUCT_CATEGORY)
yield None
# tear down
delete_product(PROJECT_ID, LOCATION, PRODUCT_ID)
def test_create_reference_image(capsys, product):
list_reference_images(PROJECT_ID, LOCATION, PRODUCT_ID)
out, _ = capsys.readouterr()
assert REFERENCE_IMAGE_ID not in out
create_reference_image(
PROJECT_ID, LOCATION, PRODUCT_ID, REFERENCE_IMAGE_ID,
GCS_URI)
list_reference_images(PROJECT_ID, LOCATION, PRODUCT_ID)
out, _ = capsys.readouterr()
assert REFERENCE_IMAGE_ID in out
delete_product(PROJECT_ID, LOCATION, PRODUCT_ID)
def test_delete_reference_image(capsys, product):
create_reference_image(
PROJECT_ID, LOCATION, PRODUCT_ID, REFERENCE_IMAGE_ID,
GCS_URI)
list_reference_images(PROJECT_ID, LOCATION, PRODUCT_ID)
out, _ = capsys.readouterr()
assert REFERENCE_IMAGE_ID in out
delete_reference_image(
PROJECT_ID, LOCATION, PRODUCT_ID, REFERENCE_IMAGE_ID)
list_reference_images(PROJECT_ID, LOCATION, PRODUCT_ID)
out, _ = capsys.readouterr()
assert REFERENCE_IMAGE_ID not in out
delete_product(PROJECT_ID, LOCATION, PRODUCT_ID)
|
py
|
1a5c276a08091cb85639a3bdde8503952012f264
|
import math
from torch import Tensor
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.checkpoint import checkpoint
import random
def evaluate(model, crit, batches):
model.eval()
hidden = mem = None
with torch.no_grad():
postfix = {}
total_loss = 0
mem = hidden = None
pbar = tqdm(desc='eval', total=len(batches) // bptt, postfix=postfix)
for i in range(0, len(batches), bptt):
chunk = batches[i:i+1+bptt]
x, target = chunk[:-1], chunk[1:]
y, mem, hidden = model(x, mem, hidden)
loss = crit(y.flatten(end_dim=1), target.flatten())
total_loss += loss.item()
# progress bar
pbar.update(1)
cur_loss = total_loss / pbar.n
postfix['loss'] = f"{cur_loss:.3f}"
if cur_loss < 20:
postfix['ppl'] = f"{math.exp(cur_loss):.3f}"
postfix['bpc'] = f"{cur_loss / math.log(2):.3f}"
pbar.set_postfix(postfix)
pbar.close()
return total_loss / pbar.n
def train(model, crit, optim, sched, dataset, epochs):
hid,mem = None,None
for i in range(epochs):
model.train()
batches = dataset.train_data
postfix = {'lr': optim.param_groups[0]['lr']}
total_loss = 0
pbar = tqdm(desc=f"train[{i+1}]", total=len(batches) // bptt, postfix=postfix)
while True:
seq_len = random.randint(bptt - 5, bptt + 5)
if i + seq_len > len(batches):
break
chunk = batches[i:i+1+seq_len]
x, target = chunk[:-1], chunk[1:]
i += seq_len
if hid ==None:
y, mem, hid = model(x)
else:
y, mem, hid = model(x,mem, hid)
mem = [m.detach() for m in mem ]
hid = [(h1.detach(),h2.detach()) for h1,h2 in hid ]
loss = crit(y.flatten(end_dim=1), target.flatten())
# loss = 0
# for j in range(len(x)):
# y, mem, hidden = model.forward(x[j].unsqueeze(0), mem, hidden)
# loss += crit(y[-1], target[j])
if False:
loss.backward()
torch.nn.utils.clip_grad_norm_(model.parameters(), 0.25)
optim.step()
else:
scaler.scale(loss).backward()
scaler.unscale_(optim) # for clipping
torch.nn.utils.clip_grad_norm_(model.parameters(), 0.25)
scaler.step(optim)
scaler.update()
optim.zero_grad()
total_loss += loss.item()
# progress bar accounting
pbar.update(1)
cur_loss = total_loss / pbar.n
postfix['loss'] = f"{cur_loss:.3f}"
if cur_loss < 20:
postfix['ppl'] = f"{math.exp(cur_loss):.3f}"
postfix['bpc'] = f"{cur_loss / math.log(2):.3f}"
pbar.set_postfix(postfix)
pbar.close()
val_loss = evaluate(model, crit, dataset.valid_data)
sched.step(val_loss)
with open('model.pt', 'wb') as f:
torch.save(model, f)
if __name__ == '__main__':
from tqdm import tqdm
from model import SHARNN
from data import enwik8
fresh = True
cuda = True
distributed = False
bsz = 16
epochs = 40
bptt = 1024
device = 'cuda' if cuda else 'cpu'
if distributed:
torch.distributed.init_process_group(backend='nccl')
rank = torch.distributed.get_rank()
torch.cuda.set_device(rank)
dataset = enwik8()
if not fresh:
with open('model.pt', 'rb') as f:
model = torch.load(f)
else:
model = SHARNN(n_token=dataset.n_token, embed_dim=1024, hidden_dim=4096, ff_dim=2048, n_layers=4, heads=1, max_len=5000, dropout=0.1, tied=True)
model.to(device)
if distributed:
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[rank], output_device=rank, dim=1, find_unused_parameters=True)
# optim = torch.optim.Adam(model.parameters(), lr=0.002)
from pytorch_lamb import Lamb
optim = Lamb(model.parameters(), lr=0.002, min_trust=0.25)
crit = nn.CrossEntropyLoss().to(device)
# sched = torch.optim.lr_scheduler.CosineAnnealingLR(optim, epochs)
sched = torch.optim.lr_scheduler.ReduceLROnPlateau(optim, patience=2)
scaler = torch.cuda.amp.GradScaler()
if True:
train(model, crit, optim, sched, dataset, epochs)
test_loss = evaluate(model, crit, dataset.test_data)
print(f"Test | loss {test_loss:.3f} | ppl {math.exp(test_loss):.3f} | bpc {test_loss / math.log(2):.3f}")
exit()
|
py
|
1a5c279ce30c95968ae8e501269a759bad36ada2
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Any, Dict, List, Optional, Tuple
from ax.core.types import TCandidateMetadata, TConfig
from ax.models.torch.alebo import ei_or_nei
from ax.models.torch.botorch import BotorchModel
from ax.models.torch.cbo_sac import generate_model_space_decomposition
from ax.models.torch_base import TorchModel
from ax.utils.common.docutils import copy_doc
from ax.utils.common.logger import get_logger
from botorch.fit import fit_gpytorch_model
from botorch.models.contextual import LCEAGP
from botorch.models.gpytorch import GPyTorchModel
from botorch.models.model_list_gp_regression import ModelListGP
from gpytorch.mlls.exact_marginal_log_likelihood import ExactMarginalLogLikelihood
from torch import Tensor
MIN_OBSERVED_NOISE_LEVEL = 1e-7
logger = get_logger(__name__)
def get_map_model(
train_X: Tensor,
train_Y: Tensor,
train_Yvar: Tensor,
decomposition: Dict[str, List[int]],
train_embedding: bool = True,
cat_feature_dict: Optional[Dict] = None,
embs_feature_dict: Optional[Dict] = None,
embs_dim_list: Optional[List[int]] = None,
context_weight_dict: Optional[Dict] = None,
) -> Tuple[LCEAGP, ExactMarginalLogLikelihood]:
"""Obtain MAP fitting of Latent Context Embedding Additive (LCE-A) GP."""
# assert train_X is non-batched
assert train_X.dim() < 3, "Don't support batch training"
model = LCEAGP(
train_X=train_X,
train_Y=train_Y,
train_Yvar=train_Yvar,
decomposition=decomposition,
train_embedding=train_embedding,
embs_dim_list=embs_dim_list,
cat_feature_dict=cat_feature_dict,
embs_feature_dict=embs_feature_dict,
context_weight_dict=context_weight_dict,
)
mll = ExactMarginalLogLikelihood(model.likelihood, model)
fit_gpytorch_model(mll)
return model, mll
class LCEABO(BotorchModel):
r"""Does Bayesian optimization with Latent Context Embedding Additive (LCE-A) GP.
The parameter space decomposition must be provided.
Args:
decomposition: Keys are context names. Values are the lists of parameter
names belong to the context, e.g.
{'context1': ['p1_c1', 'p2_c1'],'context2': ['p1_c2', 'p2_c2']}.
gp_model_args: Dictionary of kwargs to pass to GP model training.
- train_embedding: Boolen. If true, we will train context embedding;
otherwise, we use pre-trained embeddings from embds_feature_dict only.
Default is True.
"""
def __init__(
self,
decomposition: Dict[str, List[str]],
cat_feature_dict: Optional[Dict] = None,
embs_feature_dict: Optional[Dict] = None,
context_weight_dict: Optional[Dict] = None,
embs_dim_list: Optional[List[int]] = None,
gp_model_args: Optional[Dict[str, Any]] = None,
) -> None:
# add validation for input decomposition
for param_list in list(decomposition.values()):
assert len(param_list) == len(
list(decomposition.values())[0]
), "Each Context should contain same number of parameters"
self.decomposition = decomposition
self.cat_feature_dict = cat_feature_dict
self.embs_feature_dict = embs_feature_dict
self.context_weight_dict = context_weight_dict
self.embs_dim_list = embs_dim_list
self.gp_model_args = gp_model_args if gp_model_args is not None else {}
self.feature_names: List[str] = []
self.train_embedding = self.gp_model_args.get("train_embedding", True)
super().__init__(
model_constructor=self.get_and_fit_model,
acqf_constructor=ei_or_nei, # pyre-ignore
)
@copy_doc(TorchModel.fit)
def fit(
self,
Xs: List[Tensor],
Ys: List[Tensor],
Yvars: List[Tensor],
bounds: List[Tuple[float, float]],
task_features: List[int],
feature_names: List[str],
metric_names: List[str],
fidelity_features: List[int],
candidate_metadata: Optional[List[List[TCandidateMetadata]]] = None,
) -> None:
if len(feature_names) == 0:
raise ValueError("feature names are required for LCEABO")
self.feature_names = feature_names
super().fit(
Xs=Xs,
Ys=Ys,
Yvars=Yvars,
bounds=bounds,
task_features=task_features,
feature_names=feature_names,
metric_names=metric_names,
fidelity_features=fidelity_features,
)
@copy_doc(TorchModel.best_point)
def best_point(
self,
bounds: List[Tuple[float, float]],
objective_weights: Tensor,
outcome_constraints: Optional[Tuple[Tensor, Tensor]] = None,
linear_constraints: Optional[Tuple[Tensor, Tensor]] = None,
fixed_features: Optional[Dict[int, float]] = None,
model_gen_options: Optional[TConfig] = None,
target_fidelities: Optional[Dict[int, float]] = None,
) -> Optional[Tensor]:
raise NotImplementedError
def get_and_fit_model(
self,
Xs: List[Tensor],
Ys: List[Tensor],
Yvars: List[Tensor],
task_features: List[int],
fidelity_features: List[int],
metric_names: List[str],
state_dict: Optional[Dict[str, Tensor]] = None,
fidelity_model_id: Optional[int] = None,
**kwargs: Any,
) -> GPyTorchModel:
"""Get a fitted LCEAGP model for each outcome.
Args:
Xs: X for each outcome.
Ys: Y for each outcome.
Yvars: Noise variance of Y for each outcome.
Returns: Fitted LCEAGP model.
"""
# generate model space decomposition dict
decomp_index = generate_model_space_decomposition(
decomposition=self.decomposition, feature_names=self.feature_names
)
models = []
for i, X in enumerate(Xs):
Yvar = Yvars[i].clamp_min_(MIN_OBSERVED_NOISE_LEVEL)
gp_m, _ = get_map_model(
train_X=X,
train_Y=Ys[i],
train_Yvar=Yvar,
decomposition=decomp_index,
train_embedding=self.train_embedding,
cat_feature_dict=self.cat_feature_dict,
embs_feature_dict=self.embs_feature_dict,
embs_dim_list=self.embs_dim_list,
context_weight_dict=self.context_weight_dict,
)
models.append(gp_m)
if len(models) == 1:
model = models[0]
else:
model = ModelListGP(*models)
model.to(Xs[0])
return model
|
py
|
1a5c29f10ad5e7491d58c5fa9678c5524bc4088a
|
import unittest, random, sys, time, os
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_browse as h2b, h2o_import as h2i, h2o_exec as h2e
def write_syn_dataset(csvPathname, rowCount, colCount, SEED):
# 8 random generatators, 1 per column
r1 = random.Random(SEED)
dsf = open(csvPathname, "w+")
for i in range(rowCount):
rowData = []
# just reuse the same col data, since we're just parsing
# don't want to compress?
# r = random.random()
r = random.randint(1,1500)
for j in range(colCount):
rowData.append(r)
rowDataCsv = ",".join(map(str,rowData))
dsf.write(rowDataCsv + "\n")
dsf.close()
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED, localhost
SEED = h2o.setup_random_seed()
localhost = h2o.decide_if_localhost()
java_extra_args='-XX:+PrintGCDetails'
if (localhost):
h2o.build_cloud(2, java_heap_GB=6, java_extra_args=java_extra_args)
else:
h2o_hosts.build_cloud_with_hosts(2, java_heap_GB=6, java_extra_args=java_extra_args)
@classmethod
def tearDownClass(cls):
## print "sleeping 3600"
# h2o.sleep(3600)
h2o.tear_down_cloud()
def test_parse_500_cols_fvec(self):
h2o.beta_features = True
SYNDATASETS_DIR = h2o.make_syn_dir()
tryList = [
(1000, 500, 'cA', 1800, 1800),
]
h2b.browseTheCloud()
for (rowCount, colCount, orig_hex_key, timeoutSecs, timeoutSecs2) in tryList:
SEEDPERFILE = random.randint(0, sys.maxint)
csvFilename = 'syn_' + str(SEEDPERFILE) + "_" + str(rowCount) + 'x' + str(colCount) + '.csv'
csvPathname = SYNDATASETS_DIR + '/' + csvFilename
# create sym links
multifile = 1000
# there is already one file. assume it's the "0" case
for p in range(1, multifile):
csvPathnameLink = csvPathname + "_" + str(p)
os.symlink(csvFilename, csvPathnameLink)
print "\nCreating random", csvPathname
write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE)
for trial in range(10):
hex_key = orig_hex_key + str(trial)
start = time.time()
parseResult = h2i.import_parse(path=csvPathname + "*", schema='local', hex_key=hex_key, delete_on_done=1,
timeoutSecs=timeoutSecs, doSummary=False)
print "Parse:", parseResult['destination_key'], "took", time.time() - start, "seconds"
start = time.time()
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'], timeoutSecs=timeoutSecs2)
print "Inspect:", parseResult['destination_key'], "took", time.time() - start, "seconds"
h2o_cmd.infoFromInspect(inspect, csvPathname)
print "\n" + csvPathname, \
" numRows:", "{:,}".format(inspect['numRows']), \
" numCols:", "{:,}".format(inspect['numCols'])
# should match # of cols in header or ??
self.assertEqual(inspect['numCols'], colCount,
"parse created result with the wrong number of cols %s %s" % (inspect['numCols'], colCount))
self.assertEqual(inspect['numRows'], rowCount * multifile,
"parse created result with the wrong number of rows (header shouldn't count) %s %s" % \
(inspect['numRows'], rowCount * multifile))
# h2i.delete_keys_at_all_nodes()
if __name__ == '__main__':
h2o.unit_main()
|
py
|
1a5c2a828939395da4c883e311caebea787eb6a6
|
#-*- coding:utf-8 -*-
# &Author AnFany
import pandas as pd
data = pd.read_csv(r'C:\Users\GWT9\Desktop\iris.csv')
# y值Softmax
ydata = data['Species'].values
# x值
xdata = data.iloc[:, 1:5].values
# 数据处理
import numpy as np
# x数据标准化
handle_x_data = (xdata - np.mean(xdata, axis=0)) / np.std(xdata, axis=0)
# y数据独热化
ydata = pd.get_dummies(data['Species']).values
# 因为数据中类别比较集中,不易于训练,因此打乱数据
# 首先将x数据和y数据合在一起
xydata = np.hstack((handle_x_data, ydata))
# 打乱顺序
np.random.shuffle(xydata)
# 分离数据
X_DATA = xydata[:, :4]
Y_DATA = xydata[:, 4:]
Data = [X_DATA, Y_DATA]
# 数据结构
# X_DATA.shape = (样本数, 特征数)
# Y_DATA.shape = (样本数, 类别数)
# 类别
# setosa [1,0,0]
# versicolor [0,1,0]
# virginica [0,0,1]
|
py
|
1a5c2bea6c679a5f6f5dafb9d2989195d17ce554
|
"""Tests for certbot.helpful_parser"""
import unittest
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock
from certbot import errors
from certbot._internal.cli import HelpfulArgumentParser
from certbot._internal.cli import _DomainsAction
from certbot._internal import constants
class TestScanningFlags(unittest.TestCase):
'''Test the prescan_for_flag method of HelpfulArgumentParser'''
def test_prescan_no_help_flag(self):
arg_parser = HelpfulArgumentParser(['run'], {})
detected_flag = arg_parser.prescan_for_flag('--help',
['all', 'certonly'])
self.assertFalse(detected_flag)
detected_flag = arg_parser.prescan_for_flag('-h',
['all, certonly'])
self.assertFalse(detected_flag)
def test_prescan_unvalid_topic(self):
arg_parser = HelpfulArgumentParser(['--help', 'all'], {})
detected_flag = arg_parser.prescan_for_flag('--help',
['potato'])
self.assertIs(detected_flag, True)
detected_flag = arg_parser.prescan_for_flag('-h',
arg_parser.help_topics)
self.assertFalse(detected_flag)
def test_prescan_valid_topic(self):
arg_parser = HelpfulArgumentParser(['-h', 'all'], {})
detected_flag = arg_parser.prescan_for_flag('-h',
arg_parser.help_topics)
self.assertEqual(detected_flag, 'all')
detected_flag = arg_parser.prescan_for_flag('--help',
arg_parser.help_topics)
self.assertFalse(detected_flag)
class TestDetermineVerbs(unittest.TestCase):
'''Tests for determine_verb methods of HelpfulArgumentParser'''
def test_determine_verb_wrong_verb(self):
arg_parser = HelpfulArgumentParser(['potato'], {})
self.assertEqual(arg_parser.verb, "run")
self.assertEqual(arg_parser.args, ["potato"])
def test_determine_verb_help(self):
arg_parser = HelpfulArgumentParser(['--help', 'everything'], {})
self.assertEqual(arg_parser.verb, "help")
self.assertEqual(arg_parser.args, ["--help", "everything"])
arg_parser = HelpfulArgumentParser(['-d', 'some_domain', '--help',
'all'], {})
self.assertEqual(arg_parser.verb, "help")
self.assertEqual(arg_parser.args, ['-d', 'some_domain', '--help',
'all'])
def test_determine_verb(self):
arg_parser = HelpfulArgumentParser(['certonly'], {})
self.assertEqual(arg_parser.verb, 'certonly')
self.assertEqual(arg_parser.args, [])
arg_parser = HelpfulArgumentParser(['auth'], {})
self.assertEqual(arg_parser.verb, 'certonly')
self.assertEqual(arg_parser.args, [])
arg_parser = HelpfulArgumentParser(['everything'], {})
self.assertEqual(arg_parser.verb, 'run')
self.assertEqual(arg_parser.args, [])
class TestAdd(unittest.TestCase):
'''Tests for add method in HelpfulArgumentParser'''
def test_add_trivial_argument(self):
arg_parser = HelpfulArgumentParser(['run'], {})
arg_parser.add(None, "--hello-world")
parsed_args = arg_parser.parser.parse_args(['--hello-world',
'Hello World!'])
self.assertIs(parsed_args.hello_world, 'Hello World!')
self.assertFalse(hasattr(parsed_args, 'potato'))
def test_add_expected_argument(self):
arg_parser = HelpfulArgumentParser(['--help', 'run'], {})
arg_parser.add(
[None, "run", "certonly", "register"],
"--eab-kid", dest="eab_kid", action="store",
metavar="EAB_KID",
help="Key Identifier for External Account Binding")
parsed_args = arg_parser.parser.parse_args(["--eab-kid", None])
self.assertIs(parsed_args.eab_kid, None)
self.assertTrue(hasattr(parsed_args, 'eab_kid'))
class TestAddGroup(unittest.TestCase):
'''Test add_group method of HelpfulArgumentParser'''
def test_add_group_no_input(self):
arg_parser = HelpfulArgumentParser(['run'], {})
self.assertRaises(TypeError, arg_parser.add_group)
def test_add_group_topic_not_visible(self):
# The user request help on run. A topic that given somewhere in the
# args won't be added to the groups in the parser.
arg_parser = HelpfulArgumentParser(['--help', 'run'], {})
arg_parser.add_group("auth",
description="description of auth")
self.assertEqual(arg_parser.groups, {})
def test_add_group_topic_requested_help(self):
arg_parser = HelpfulArgumentParser(['--help', 'run'], {})
arg_parser.add_group("run",
description="description of run")
self.assertTrue(arg_parser.groups["run"])
arg_parser.add_group("certonly", description="description of certonly")
with self.assertRaises(KeyError):
self.assertFalse(arg_parser.groups["certonly"])
class TestParseArgsErrors(unittest.TestCase):
'''Tests for errors that should be met for some cases in parse_args method
in HelpfulArgumentParser'''
def test_parse_args_renew_force_interactive(self):
arg_parser = HelpfulArgumentParser(['renew', '--force-interactive'],
{})
arg_parser.add(
None, constants.FORCE_INTERACTIVE_FLAG, action="store_true")
with self.assertRaises(errors.Error):
arg_parser.parse_args()
def test_parse_args_non_interactive_and_force_interactive(self):
arg_parser = HelpfulArgumentParser(['--force-interactive',
'--non-interactive'], {})
arg_parser.add(
None, constants.FORCE_INTERACTIVE_FLAG, action="store_true")
arg_parser.add(
None, "--non-interactive", dest="noninteractive_mode",
action="store_true"
)
with self.assertRaises(errors.Error):
arg_parser.parse_args()
def test_parse_args_subset_names_wildcard_domain(self):
arg_parser = HelpfulArgumentParser(['--domain',
'*.example.com,potato.example.com',
'--allow-subset-of-names'], {})
# The following arguments are added because they have to be defined
# in order for arg_parser to run completely. They are not used for the
# test.
arg_parser.add(
None, constants.FORCE_INTERACTIVE_FLAG, action="store_true")
arg_parser.add(
None, "--non-interactive", dest="noninteractive_mode",
action="store_true")
arg_parser.add(
None, "--staging"
)
arg_parser.add(None, "--dry-run")
arg_parser.add(None, "--csr")
arg_parser.add(None, "--must-staple")
arg_parser.add(None, "--validate-hooks")
arg_parser.add(None, "-d", "--domain", dest="domains",
metavar="DOMAIN", action=_DomainsAction)
arg_parser.add(None, "--allow-subset-of-names")
# with self.assertRaises(errors.Error):
# arg_parser.parse_args()
def test_parse_args_hosts_and_auto_hosts(self):
arg_parser = HelpfulArgumentParser(['--hsts', '--auto-hsts'], {})
arg_parser.add(
None, "--hsts", action="store_true", dest="hsts")
arg_parser.add(
None, "--auto-hsts", action="store_true", dest="auto_hsts")
# The following arguments are added because they have to be defined
# in order for arg_parser to run completely. They are not used for the
# test.
arg_parser.add(
None, constants.FORCE_INTERACTIVE_FLAG, action="store_true")
arg_parser.add(
None, "--non-interactive", dest="noninteractive_mode",
action="store_true")
arg_parser.add(None, "--staging")
arg_parser.add(None, "--dry-run")
arg_parser.add(None, "--csr")
arg_parser.add(None, "--must-staple")
arg_parser.add(None, "--validate-hooks")
arg_parser.add(None, "--allow-subset-of-names")
with self.assertRaises(errors.Error):
arg_parser.parse_args()
class TestAddDeprecatedArgument(unittest.TestCase):
"""Tests for add_deprecated_argument method of HelpfulArgumentParser"""
@mock.patch.object(HelpfulArgumentParser, "modify_kwargs_for_default_detection")
def test_no_default_detection_modifications(self, mock_modify):
arg_parser = HelpfulArgumentParser(["run"], {}, detect_defaults=True)
arg_parser.add_deprecated_argument("--foo", 0)
arg_parser.parse_args()
mock_modify.assert_not_called()
if __name__ == '__main__':
unittest.main() # pragma: no cover
|
py
|
1a5c2bfacb45cb0c4c2640c588488d6d843d5b5b
|
import os
import win32api
import time
import win32gui
import sys
import platform
import ctypes
import winproc
from win32.lib import win32con
oFolds=[]
openedExe={}
isFolder=False;
def foChild(hwnd,cc):
global oFolds;
global isFolder
#print(hwnd," : "+win32gui.GetClassName(hwnd))
cname=win32gui.GetClassName(hwnd)
if cname=="ToolbarWindow32":
tname=win32gui.GetWindowText(hwnd)
#print("cname:"+tname)
if tname.find("地址: ")>=0:
addr=tname.replace("地址: ","")
isFolder=True
#print("addr:"+addr);
oFolds.append(addr)
def findChild(hwnd,className):
tchild=1;
rst=hwnd;
index=0;
while tchild>0:
tchild=win32gui.FindWindowEx(hwnd,index,None,None)
index=tchild;
if tchild>0:
print("child:"+win32gui.GetClassName(tchild))
def foo(hwnd,mouse):
global isFolder
isFolder=False
clz=win32gui.GetClassName(hwnd)
if not clz=="CabinetWClass":
return;
#print(hwnd," : ")
if 1==1:
#print(win32gui.GetWindowText(hwnd))
win32gui.EnumChildWindows(hwnd,foChild ,None)
if isFolder:
print(win32gui.GetWindowText(hwnd))
win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0)
def getOpenFolds():
win32gui.EnumWindows(foo, 0)
if __name__ == "__main__":
print("args:",sys.argv)
args=sys.argv
getOpenFolds()
|
py
|
1a5c2c8b8c49d48f943373d7e22a291c56d9dcd4
|
# qubit number=3
# total number=83
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.h(input_qubit[1]) # number=70
prog.rx(-0.09738937226128368,input_qubit[2]) # number=2
prog.h(input_qubit[1]) # number=33
prog.y(input_qubit[2]) # number=56
prog.cz(input_qubit[2],input_qubit[1]) # number=34
prog.h(input_qubit[1]) # number=35
prog.h(input_qubit[1]) # number=3
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit_QC453.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = provider.get_backend("ibmq_belem")
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
circuit1.measure_all()
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
|
py
|
1a5c2d0b3a90e870b1d616e9caaf98dd6c866d61
|
#!/usr/bin/env python
'''
===============================================================================
Interactive Image Segmentation using GrabCut algorithm.
This sample shows interactive image segmentation using grabcut algorithm.
USAGE:
python grabcut.py <filename>
README FIRST:
Two windows will show up, one for input and one for output.
At first, in input window, draw a rectangle around the object using
mouse right button. Then press 'n' to segment the object (once or a few times)
For any finer touch-ups, you can press any of the keys below and draw lines on
the areas you want. Then again press 'n' for updating the output.
Key '0' - To select areas of sure background
Key '1' - To select areas of sure foreground
Key '2' - To select areas of probable background
Key '3' - To select areas of probable foreground
Key 'n' - To update the segmentation
Key 'r' - To reset the setup
Key 's' - To save the results
===============================================================================
'''
# Python 2/3 compatibility
from __future__ import print_function
import numpy as np
import cv2
import sys
BLUE = [255,0,0] # rectangle color
RED = [0,0,255] # PR BG
GREEN = [0,255,0] # PR FG
BLACK = [0,0,0] # sure BG
WHITE = [255,255,255] # sure FG
DRAW_BG = {'color' : BLACK, 'val' : 0}
DRAW_FG = {'color' : WHITE, 'val' : 1}
DRAW_PR_FG = {'color' : GREEN, 'val' : 3}
DRAW_PR_BG = {'color' : RED, 'val' : 2}
# setting up flags
rect = (0,0,1,1)
drawing = False # flag for drawing curves
rectangle = False # flag for drawing rect
rect_over = False # flag to check if rect drawn
rect_or_mask = 100 # flag for selecting rect or mask mode
value = DRAW_FG # drawing initialized to FG
thickness = 3 # brush thickness
def onmouse(event,x,y,flags,param):
global img,img2,drawing,value,mask,rectangle,rect,rect_or_mask,ix,iy,rect_over
# Draw Rectangle
if event == cv2.EVENT_RBUTTONDOWN:
rectangle = True
ix,iy = x,y
elif event == cv2.EVENT_MOUSEMOVE:
if rectangle == True:
img = img2.copy()
cv2.rectangle(img,(ix,iy),(x,y),BLUE,2)
rect = (min(ix,x),min(iy,y),abs(ix-x),abs(iy-y))
rect_or_mask = 0
elif event == cv2.EVENT_RBUTTONUP:
rectangle = False
rect_over = True
cv2.rectangle(img,(ix,iy),(x,y),BLUE,2)
rect = (min(ix,x),min(iy,y),abs(ix-x),abs(iy-y))
rect_or_mask = 0
print(" Now press the key 'n' a few times until no further change \n")
# draw touchup curves
if event == cv2.EVENT_LBUTTONDOWN:
if rect_over == False:
print("first draw rectangle \n")
else:
drawing = True
cv2.circle(img,(x,y),thickness,value['color'],-1)
cv2.circle(mask,(x,y),thickness,value['val'],-1)
elif event == cv2.EVENT_MOUSEMOVE:
if drawing == True:
cv2.circle(img,(x,y),thickness,value['color'],-1)
cv2.circle(mask,(x,y),thickness,value['val'],-1)
elif event == cv2.EVENT_LBUTTONUP:
if drawing == True:
drawing = False
cv2.circle(img,(x,y),thickness,value['color'],-1)
cv2.circle(mask,(x,y),thickness,value['val'],-1)
if __name__ == '__main__':
# print documentation
print(__doc__)
# Loading images
if len(sys.argv) == 2:
filename = sys.argv[1] # for drawing purposes
else:
print("No input image given, so loading default image, ../data/lena.jpg \n")
print("Correct Usage: python grabcut.py <filename> \n")
filename = '../data/lena.jpg'
img = cv2.imread(filename)
img2 = img.copy() # a copy of original image
mask = np.zeros(img.shape[:2],dtype = np.uint8) # mask initialized to PR_BG
output = np.zeros(img.shape,np.uint8) # output image to be shown
# input and output windows
cv2.namedWindow('output')
cv2.namedWindow('input')
cv2.setMouseCallback('input',onmouse)
cv2.moveWindow('input',img.shape[1]+10,90)
print(" Instructions: \n")
print(" Draw a rectangle around the object using right mouse button \n")
while(1):
cv2.imshow('output',output)
cv2.imshow('input',img)
k = cv2.waitKey(1)
# key bindings
if k == 27: # esc to exit
break
elif k == ord('0'): # BG drawing
print(" mark background regions with left mouse button \n")
value = DRAW_BG
elif k == ord('1'): # FG drawing
print(" mark foreground regions with left mouse button \n")
value = DRAW_FG
elif k == ord('2'): # PR_BG drawing
value = DRAW_PR_BG
elif k == ord('3'): # PR_FG drawing
value = DRAW_PR_FG
elif k == ord('s'): # save image
bar = np.zeros((img.shape[0],5,3),np.uint8)
res = np.hstack((img2,bar,img,bar,output))
cv2.imwrite('grabcut_output.png',res)
print(" Result saved as image \n")
elif k == ord('r'): # reset everything
print("resetting \n")
rect = (0,0,1,1)
drawing = False
rectangle = False
rect_or_mask = 100
rect_over = False
value = DRAW_FG
img = img2.copy()
mask = np.zeros(img.shape[:2],dtype = np.uint8) # mask initialized to PR_BG
output = np.zeros(img.shape,np.uint8) # output image to be shown
elif k == ord('n'): # segment the image
print(""" For finer touchups, mark foreground and background after pressing keys 0-3
and again press 'n' \n""")
if (rect_or_mask == 0): # grabcut with rect
bgdmodel = np.zeros((1,65),np.float64)
fgdmodel = np.zeros((1,65),np.float64)
cv2.grabCut(img2,mask,rect,bgdmodel,fgdmodel,1,cv2.GC_INIT_WITH_RECT)
rect_or_mask = 1
elif rect_or_mask == 1: # grabcut with mask
bgdmodel = np.zeros((1,65),np.float64)
fgdmodel = np.zeros((1,65),np.float64)
cv2.grabCut(img2,mask,rect,bgdmodel,fgdmodel,1,cv2.GC_INIT_WITH_MASK)
mask2 = np.where((mask==1) + (mask==3),255,0).astype('uint8')
output = cv2.bitwise_and(img2,img2,mask=mask2)
cv2.destroyAllWindows()
|
py
|
1a5c2dc7bd45754faaf80ea608568adb582aa437
|
# Copyright 2020 Graphcore Ltd.
import os
import numpy as np
from tensorflow.python import ipu
from tensorflow.python.ipu.scopes import ipu_scope
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
cfg = ipu.utils.create_ipu_config(profiling=True, use_poplar_text_report=True)
cfg = ipu.utils.auto_select_ipus(cfg, 1)
ipu.utils.configure_ipu_system(cfg)
size = 5
with tf.device("cpu"):
x_data = tf.placeholder(np.float32, [size])
y_data = tf.placeholder(np.float32, [size])
def add_op(x, y):
outputs = {
"output_types": [tf.float32],
"output_shapes": [tf.TensorShape([size])],
}
base_path = os.getcwd()
lib_path = os.path.join(base_path, "libcustom_op.so")
gp_path = os.path.join(base_path, "custom_codelet.gp")
return ipu.custom_ops.precompiled_user_op([x, y],
lib_path,
gp_path,
outs=outputs)
with ipu_scope("/device:IPU:0"):
xla_result = ipu.ipu_compiler.compile(add_op, [x_data, y_data])
with tf.Session() as sess:
a = np.random.rand(size)
b = np.random.rand(size)
result = sess.run(xla_result, feed_dict = {x_data: a, y_data: b})
# Show result from the IPU:
print("IPU:", result[0])
# Same calculation on host for comparison:
print("numpy:", a + b)
|
py
|
1a5c2dedeb8eb0f658ff956085a6162f27d6ed7b
|
import copy
import datetime
from django.contrib.auth.models import User
from django.db import models
class RevisionableModel(models.Model):
base = models.ForeignKey('self', models.SET_NULL, null=True)
title = models.CharField(blank=True, max_length=255)
when = models.DateTimeField(default=datetime.datetime.now)
def __str__(self):
return "%s (%s, %s)" % (self.title, self.id, self.base.id)
def save(self, *args, **kwargs):
super(RevisionableModel, self).save(*args, **kwargs)
if not self.base:
self.base = self
kwargs.pop('force_insert', None)
kwargs.pop('force_update', None)
super(RevisionableModel, self).save(*args, **kwargs)
def new_revision(self):
new_revision = copy.copy(self)
new_revision.pk = None
return new_revision
class Order(models.Model):
created_by = models.ForeignKey(User, models.CASCADE)
text = models.TextField()
class TestObject(models.Model):
first = models.CharField(max_length=20)
second = models.CharField(max_length=20)
third = models.CharField(max_length=20)
def __str__(self):
return 'TestObject: %s,%s,%s' % (self.first, self.second, self.third)
|
py
|
1a5c2e08b08a1a8bd2d4327e911d2943e3ee8786
|
# coding: utf-8
# In[1]:
# Load dependencies
import numpy as np
import pandas as pd
import sys
sys.path.insert(0,'../../../statistics_helper/')
from excel_utils import *
# # Estimating the total biomass of humans
# To estimate the total biomass of humans, we rely on estimates of the total human population from the [UN World Population Prospects of 2017](https://esa.un.org/unpd/wpp/Download/Standard/Population/) (File - 'Total Population - Both Sexes'). We use the estimate for the total human population in 2015
# In[2]:
#Load data from the UN
data = pd.read_excel('humans_data.xlsx',index_col=0,skiprows=16)
# Use data from 2015, multiply by 1000 because data is given in thousands
tot_human_pop = data.loc[1,'2015']*1e3
print('The UN estimate for the total human population is ≈%.1e' %tot_human_pop)
# We use an estimate for the average body mass of humans of ≈50 kg from [Hern](http://link.springer.com/article/10.1023/A:1022153110536). We convert the average body weight to carbon mass assuming 70% water content and 50% carbon out of the dry weight:
# In[3]:
wet_to_c = 0.15
human_cc = 5e4*wet_to_c
# We estimate the total biomass of humans by multiplying the total number of humans by the average carbon content of a single human:
# In[4]:
best_estimate = tot_human_pop*human_cc
print('Our best estimate for the total biomass of humans is ≈%.2f Gt C' %(best_estimate/1e15))
# In[5]:
# Feed results to the chordate biomass data
old_results = pd.read_excel('../../animal_biomass_estimate.xlsx',index_col=0)
result = old_results.copy()
result.loc['Humans',(['Biomass [Gt C]','Uncertainty'])] = (best_estimate/1e15,None)
result.to_excel('../../animal_biomass_estimate.xlsx')
# Feed results to Table 1 & Fig. 1
update_results(sheet='Table1 & Fig1',
row=('Animals','Humans'),
col='Biomass [Gt C]',
values=best_estimate/1e15,
path='../../../results.xlsx')
# Feed results to Table S1
update_results(sheet='Table S1',
row=('Animals','Humans'),
col='Number of individuals',
values=tot_human_pop,
path='../../../results.xlsx')
|
py
|
1a5c2fc09ed3daedf40d973e580ba8f08d7085bc
|
import abc
class PostprocessingABC(metaclass=abc.ABCMeta):
@classmethod
def __subclasshook__(cls, subclass):
return (hasattr(subclass, 'process_data') and
callable(subclass.process_data) and
hasattr(subclass, '_re') or
NotImplemented)
@abc.abstractmethod
def __init__(self, regex_parser):
self._re = regex_parser
@abc.abstractmethod
def process_data(self, chemical_structure, text_sentences=[]):
"""
postprocess the chemical structure and find additional information in the surrounding text (if provided)
:param chemical_structure:
:param text_sentences:
:return:
"""
raise NotImplementedError
|
py
|
1a5c30c153ac494953e545952a2f96fb9c007722
|
def from_file_h5(item, molecular_system=None, atom_indices='all', frame_indices='all'):
from molsysmt.forms.api_file_h5 import to_mdtraj_Topology as file_h5_to_mdtraj_Topology
from molsysmt.native.io.topology import from_mdtraj_Topology as mdtraj_Topology_to_molsysmt_Topology
tmp_item, _ = file_h5_to_mdtraj_Topology(item)
tmp_item, _ = mdtraj_Topology_to_molsysmt_Topology(tmp_item, atom_indices=atom_indices)
if molecular_system is not None:
tmp_molecular_system = molecular_system.combine_with_items(tmp_item, atom_indices=atom_indices)
else:
tmp_molecular_system = None
return tmp_item, tmp_molecular_system
|
py
|
1a5c31e3a3a94712f2cae8f6b8521eaa088dcc02
|
from __future__ import unicode_literals
from django.test import SimpleTestCase
from localflavor.cn.forms import (CNProvinceSelect, CNPostCodeField,
CNIDCardField, CNPhoneNumberField,
CNCellNumberField)
class CNLocalFlavorTests(SimpleTestCase):
def test_CNProvinceSelect(self):
f = CNProvinceSelect()
correct_output = '''<select name="provinces">
<option value="anhui">\u5b89\u5fbd</option>
<option value="beijing">\u5317\u4eac</option>
<option value="chongqing">\u91cd\u5e86</option>
<option value="fujian">\u798f\u5efa</option>
<option value="gansu">\u7518\u8083</option>
<option value="guangdong">\u5e7f\u4e1c</option>
<option value="guangxi">\u5e7f\u897f\u58ee\u65cf\u81ea\u6cbb\u533a</option>
<option value="guizhou">\u8d35\u5dde</option>
<option value="hainan">\u6d77\u5357</option>
<option value="hebei">\u6cb3\u5317</option>
<option value="heilongjiang">\u9ed1\u9f99\u6c5f</option>
<option value="henan">\u6cb3\u5357</option>
<option value="hongkong">\u9999\u6e2f</option>
<option value="hubei" selected="selected">\u6e56\u5317</option>
<option value="hunan">\u6e56\u5357</option>
<option value="jiangsu">\u6c5f\u82cf</option>
<option value="jiangxi">\u6c5f\u897f</option>
<option value="jilin">\u5409\u6797</option>
<option value="liaoning">\u8fbd\u5b81</option>
<option value="macao">\u6fb3\u95e8</option>
<option value="neimongol">\u5185\u8499\u53e4\u81ea\u6cbb\u533a</option>
<option value="ningxia">\u5b81\u590f\u56de\u65cf\u81ea\u6cbb\u533a</option>
<option value="qinghai">\u9752\u6d77</option>
<option value="shaanxi">\u9655\u897f</option>
<option value="shandong">\u5c71\u4e1c</option>
<option value="shanghai">\u4e0a\u6d77</option>
<option value="shanxi">\u5c71\u897f</option>
<option value="sichuan">\u56db\u5ddd</option>
<option value="taiwan">\u53f0\u6e7e</option>
<option value="tianjin">\u5929\u6d25</option>
<option value="xinjiang">\u65b0\u7586\u7ef4\u543e\u5c14\u81ea\u6cbb\u533a</option>
<option value="xizang">\u897f\u85cf\u81ea\u6cbb\u533a</option>
<option value="yunnan">\u4e91\u5357</option>
<option value="zhejiang">\u6d59\u6c5f</option>
</select>'''
self.assertHTMLEqual(f.render('provinces', 'hubei'), correct_output)
def test_CNPostCodeField(self):
error_format = ['Enter a post code in the format XXXXXX.']
valid = {
'091209': '091209'
}
invalid = {
'09120': error_format,
'09120916': error_format
}
self.assertFieldOutput(CNPostCodeField, valid, invalid)
def test_CNIDCardField(self):
valid = {
# A valid 1st generation ID Card Number.
'110101491001001': '110101491001001',
# A valid 2nd generation ID Card number.
'11010119491001001X': '11010119491001001X',
# Another valid 2nd gen ID Number with a case change
'11010119491001001x': '11010119491001001X'
}
wrong_format = ['ID Card Number consists of 15 or 18 digits.']
wrong_location = ['Invalid ID Card Number: Wrong location code']
wrong_bday = ['Invalid ID Card Number: Wrong birthdate']
wrong_checksum = ['Invalid ID Card Number: Wrong checksum']
invalid = {
'abcdefghijklmnop': wrong_format,
'1010101010101010': wrong_format,
'010101491001001': wrong_location, # 1st gen, 01 is invalid
'110101491041001': wrong_bday, # 1st gen. There wasn't day 41
'92010119491001001X': wrong_location, # 2nd gen, 92 is invalid
'91010119491301001X': wrong_bday,
# 2nd gen, 19491301 is invalid date
'910101194910010014': wrong_checksum # 2nd gen
}
self.assertFieldOutput(CNIDCardField, valid, invalid)
def test_CNPhoneNumberField(self):
error_format = ['Enter a valid phone number.']
valid = {
'010-12345678': '010-12345678',
'010-1234567': '010-1234567',
'0101-12345678': '0101-12345678',
'0101-1234567': '0101-1234567',
'010-12345678-020': '010-12345678-020'
}
invalid = {
'01x-12345678': error_format,
'12345678': error_format,
'01123-12345678': error_format,
'010-123456789': error_format,
'010-12345678-': error_format
}
self.assertFieldOutput(CNPhoneNumberField, valid, invalid)
def test_CNCellNumberField(self):
error_format = ['Enter a valid cell number.']
valid = {
'13012345678': '13012345678',
}
invalid = {
'130123456789': error_format,
'14012345678': error_format
}
self.assertFieldOutput(CNCellNumberField, valid, invalid)
|
py
|
1a5c350747f2515376e529d8d23305b153814bc0
|
import json, os
from threading import Thread
from igdb.wrapper import IGDBWrapper
global wrapper #Global variable to reference wrapper so it can be used across all methods and getters
"""
Use formatted strings in getters
def example(str):
return f'my {str} is formatted'
def example2(str):
return 'my {0} is formatted'.format(str)
"""
"""
GETTERS FOR API INFORMATION
Main Idea:
-> Given a certain reference ID, make a query extracting the name, url, etc specifed and return it in a string
-> Said string will contain the information to be replaced in the main hastable that stores our game infromation
-> We have to iterate through the transfromed byte array given from the query lookign for the key that is not the id associated
-> Hence why we have a double for loop in each getter
-> Every 'for key in endpoint[0]:' has a in index ([0]) because the api gives us an array of one element,
the hashtable containing the requested url, name or array of items
"""
def getCoverUrl(id):
covers = json.loads(wrapper.api_request('covers', f'fields url; where id = {id};'))
for key in covers[0]:
if key == "url":
return ("https:" + covers[0]['url']).replace('thumb',"1080p")
def getPlatforms(id):
platforms = json.loads(wrapper.api_request('platforms', f'fields name; where id = {id};'))
for key in platforms[0]:
if key == "name":
if platforms[0][key] == "Xbox Series":
return platforms[0][key] + " X"
return platforms[0][key]
def getGenres(id):
genres = json.loads(wrapper.api_request('genres', f'fields name; where id = {id};'))
for key in genres[0]:
if key == "name":
return genres[0][key]
def getInvolvedCompanies(id):
# We do this internal method to avoid over complicating the code and adding an external method to only call it here
def getCompany(id):
company = json.loads(wrapper.api_request('companies', f'fields name; where id = {id};'))
for key in company[0]:
if key == "name":
return company[0][key]
involved_companies = json.loads(wrapper.api_request('involved_companies', f'fields company; where id = {id};'))
for key in involved_companies[0]:
if key == "company":
# Internal method is called and it's value is returned in the main method
return getCompany(involved_companies[0][key])
def getSummary(id, wrapper):
# This method is intended to be used externally, import it where needed
# summary is a list of dictionaries that follows a json format
summary = json.loads(wrapper.api_request('games', f'fields storyline, summary; where id = {id};'))
# Since some games do not have a storyline description, we can use the summary of the game
# or just simply put that it has no summary yet
# summary[0] is the first dictionary that is in the list of json formatted dictionaries
if "storyline" in summary[0]:
return summary[0]['storyline'] # summary[0][key], since summary[0] is a dictionary
elif "summary" in summary[0]:
return summary[0]['summary']
else:
return "This game has no summary yet"
"""""""""""""""""""""""""""""""""""""""""MAIN METHOD FOR EXTRACTING GAMES"""""""""""""""""""""""""""""""""""""""""""""""
def extractAPIGames(endpoint: str, query: str, fileNumber:int):
byte_array = wrapper.api_request(endpoint, query) #Byte array that stores the infromation given from the API with a given endpoint & query
games = json.loads(byte_array) #Convert the byte array into a json-like hashtable for easy extraction and iteration
print(f"Started Game Extraction for file {fileNumber}")
gamesExtracted = 0
"""
MAIN FOR LOOP TO EXTRACT DATA FROM API
Main Idea:
-> games is a hashtable that is modeled in a json format to extract data from API
-> Every value from the hashtable is an id reference to the actual information from the API
-> We iterate through each key and extract that information rlated to that key using a getter
-> Some keys have values that are arrays of ID's, so we have to call the getter for each individual ID in
the array for that key
"""
for game in games:
gamesExtracted += 1
print(f"Games: {gamesExtracted} - File: {fileNumber}")
for key in game:
if key == "cover":
game[key] = getCoverUrl(game[key])
elif key == "platforms":
#game[key] is an array of platforms ids
for i in range(len(game[key])):
#game[key][i] is a platform ID in the array that must be extracted with the getter
game[key][i] = getPlatforms(game[key][i])
elif key == "genres":
for i in range(len(game[key])):
game[key][i] = getGenres(game[key][i])
elif key == "involved_companies":
for i in range(len(game[key])):
game[key][i] = getInvolvedCompanies(game[key][i])
#We parse the hashtable information to a .json file we deliver as output using json.dump()
with open(f'../res/data_{fileNumber}.json', 'w') as outfile:
json.dump(games, outfile, indent=4)
print(f"Games Extracted: {gamesExtracted}")
print(f"Finished, check your data_{fileNumber}.json")
#Command to initialize game extraction every time this file is ran
if __name__ == "__main__":
"""
TO FIX UNAUTHORIZED URL FROM API:
SEND POST REQUEST TO THIS ENDPOINT:
https://id.twitch.tv/oauth2/token?client_id=yourClientID&client_secret=yourClientSecret&grant_type=client_credentials
"""
wrapper = IGDBWrapper("2zu4l0leu7rrc9i8ysagqlxuu5rh89", "9tvwz8wnwyjuqvn5h4nmq8k413wzwt")
# extractAPIGames('games', 'fields name,genres,platforms,cover,involved_companies; where platforms=48 & category=0; limit 200;',1) #PS4
# extractAPIGames('games', 'fields name,genres,platforms,cover,involved_companies; where platforms=49 & category=0; limit 200;', 2) #XB1
# extractAPIGames('games', 'fields name,genres,platforms,cover,involved_companies; where platforms=130 & category=0; limit 200;',3) #Switch
# extractAPIGames('games', 'fields name,genres,platforms,cover,involved_companies; where platforms=6 & category=0; limit 200;', 4) #PC
# extractAPIGames('games', 'fields name,genres,platforms,cover,involved_companies; where platforms=167; limit 200;', 5) #PS5
# extractAPIGames('games', 'fields name,genres,platforms,cover,involved_companies; where platforms=169; limit 200;',6) #XB Series X
|
py
|
1a5c355b1a810370206a68e1243946133aa85e84
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Evaluates a TFGAN trained compression model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import data_provider
import networks
import summaries
import tensorflow as tf
from absl import app
from absl import flags
FLAGS = flags.FLAGS
flags.DEFINE_string('master', '', 'Name of the TensorFlow master to use.')
flags.DEFINE_string('checkpoint_dir', '/tmp/compression/',
'Directory where the model was written to.')
flags.DEFINE_string('eval_dir', '/tmp/compression/',
'Directory where the results are saved to.')
flags.DEFINE_integer('max_number_of_evaluations', None,
'Number of times to run evaluation. If `None`, run '
'forever.')
flags.DEFINE_string('dataset_dir', None, 'Location of data.')
# Compression-specific flags.
flags.DEFINE_integer('batch_size', 32, 'The number of images in each batch.')
flags.DEFINE_integer('patch_size', 32, 'The size of the patches to train on.')
flags.DEFINE_integer('bits_per_patch', 1230,
'The number of bits to produce per patch.')
flags.DEFINE_integer('model_depth', 64,
'Number of filters for compression model')
def main(_, run_eval_loop=True):
with tf.name_scope('inputs'):
images = data_provider.provide_data(
'validation', FLAGS.batch_size, dataset_dir=FLAGS.dataset_dir,
patch_size=FLAGS.patch_size)
# In order for variables to load, use the same variable scope as in the
# train job.
with tf.variable_scope('generator'):
reconstructions, _, prebinary = networks.compression_model(
images,
num_bits=FLAGS.bits_per_patch,
depth=FLAGS.model_depth,
is_training=False)
summaries.add_reconstruction_summaries(images, reconstructions, prebinary)
# Visualize losses.
pixel_loss_per_example = tf.reduce_mean(
tf.abs(images - reconstructions), axis=[1, 2, 3])
pixel_loss = tf.reduce_mean(pixel_loss_per_example)
tf.summary.histogram('pixel_l1_loss_hist', pixel_loss_per_example)
tf.summary.scalar('pixel_l1_loss', pixel_loss)
# Create ops to write images to disk.
uint8_images = data_provider.float_image_to_uint8(images)
uint8_reconstructions = data_provider.float_image_to_uint8(reconstructions)
uint8_reshaped = summaries.stack_images(uint8_images, uint8_reconstructions)
image_write_ops = tf.write_file(
'%s/%s'% (FLAGS.eval_dir, 'compression.png'),
tf.image.encode_png(uint8_reshaped[0]))
# For unit testing, use `run_eval_loop=False`.
if not run_eval_loop: return
tf.contrib.training.evaluate_repeatedly(
FLAGS.checkpoint_dir,
master=FLAGS.master,
hooks=[tf.contrib.training.SummaryAtEndHook(FLAGS.eval_dir),
tf.contrib.training.StopAfterNEvalsHook(1)],
eval_ops=image_write_ops,
max_number_of_evaluations=FLAGS.max_number_of_evaluations)
if __name__ == '__main__':
app.run()
|
py
|
1a5c362e88891a26906da5eb383f5d39f2bb3619
|
from collections import deque
import numpy as np
import torch
import torch.nn as nn
import torchvision.transforms as T
import random
from LearningAgents.LearningAgent import LearningAgent
from Utils.LevelSelection import LevelSelectionSchema
from LearningAgents.Memory import ReplayMemory
from SBEnvironment.SBEnvironmentWrapper import SBEnvironmentWrapper
from torch.utils.tensorboard import SummaryWriter
from einops import rearrange, reduce
class MultiHeadRelationalModuleImage(nn.Module):
def __init__(self, h, w, outputs, device, ch_in):
super(MultiHeadRelationalModuleImage, self).__init__()
self.device = device
self.input_type = 'image'
self.output_type = 'discrete'
self.transform = T.Compose([T.ToPILImage(), T.Resize((h, w)),
T.ToTensor(), T.Normalize((.5, .5, .5), (.5, .5, .5))])
self.conv1_ch = 8
self.conv2_ch = 10 # dim `F` in paper
self.conv3_ch = 24
self.conv4_ch = 30
self.H = h
self.W = w
self.node_size = 64 # entity embedding size
self.lin_hid = 100
self.out_dim = outputs # actions
self.outputs = outputs
self.ch_in = ch_in
self.sp_coord_dim = 2
self.N = int(self.H * self.W)
self.n_heads = 1
self.conv1 = nn.Conv2d(self.ch_in, self.conv1_ch, kernel_size=(7, 7), padding=1)
self.conv2 = nn.Conv2d(self.conv1_ch, self.conv2_ch, kernel_size=(3, 3), padding=1)
self.feature_head = nn.Sequential(self.conv1, self.conv2)
self.proj_shape = (self.conv2_ch + self.sp_coord_dim, self.n_heads * self.node_size)
# Multihead attention
self.k_proj = nn.Linear(*self.proj_shape)
self.q_proj = nn.Linear(*self.proj_shape)
self.v_proj = nn.Linear(*self.proj_shape)
# Compute shape by doing one forward pass
with torch.no_grad():
self.N = int(self.feature_head(torch.rand(size=(1, self.ch_in, self.H, self.W))).flatten().size(0)/self.conv2_ch)
self.k_lin = nn.Linear(self.node_size, self.N)
self.q_lin = nn.Linear(self.node_size, self.N)
self.a_lin = nn.Linear(self.N, self.N)
self.node_shape = (self.n_heads, self.N, self.node_size)
self.k_norm = nn.LayerNorm(self.node_shape, elementwise_affine=True)
self.q_norm = nn.LayerNorm(self.node_shape, elementwise_affine=True)
self.v_norm = nn.LayerNorm(self.node_shape, elementwise_affine=True)
self.linear1 = nn.Linear(self.n_heads * self.node_size, self.node_size)
self.norm1 = nn.LayerNorm([self.N, self.node_size], elementwise_affine=False)
self.linear2 = nn.Linear(self.node_size, self.out_dim)
def forward(self, x):
N, Cin, H, W = x.shape
x = self.conv1(x)
x = torch.relu(x)
x = self.conv2(x)
x = torch.relu(x)
with torch.no_grad():
self.conv_map = x.clone()
_, _, cH, cW = x.shape
xcoords = torch.arange(cW).repeat(cH, 1).float() / cW
ycoords = torch.arange(cH).repeat(cW, 1).transpose(1, 0).float() / cH
spatial_coords = torch.stack([xcoords, ycoords], dim=0)
spatial_coords = spatial_coords.unsqueeze(dim=0)
spatial_coords = spatial_coords.repeat(N, 1, 1, 1).to(x.device)
x = torch.cat([x, spatial_coords], dim=1)
x = x.permute(0, 2, 3, 1) # batch_size, H, W, C
x = x.flatten(1, 2) # batch_size, HxW, C
# key, query, value separation
K = rearrange(self.k_proj(x), "b n (head d) -> b head n d", head=self.n_heads)
K = self.k_norm(K)
Q = rearrange(self.q_proj(x), "b n (head d) -> b head n d", head=self.n_heads)
Q = self.q_norm(Q)
V = rearrange(self.v_proj(x), "b n (head d) -> b head n d", head=self.n_heads)
V = self.v_norm(V)
A = torch.nn.functional.elu(self.q_lin(Q) + self.k_lin(K))
A = self.a_lin(A)
A = torch.nn.functional.softmax(A, dim=3)
with torch.no_grad():
self.att_map = A.cpu().clone()
E = torch.einsum('bhfc,bhcd->bhfd', A, V)
# collapse head dimension
E = rearrange(E, 'b head n d -> b n (head d)')
# B N D' . D' D -> B N D
E = self.linear1(E)
E = torch.relu(E)
E = self.norm1(E)
# B N D -> B D
E = E.max(dim=1)[0]
y = self.linear2(E)
y = torch.nn.functional.elu(y)
return y
def train_model_memory(self, target_net: torch.nn.Module, total_train_time: int, train_time: int, train_batch: int,
gamma: float, memory: ReplayMemory, optimizer: torch.optim, lr=0.001, sample_eps=1):
pass
def transform(self, state):
t = T.Compose([T.ToPILImage(), T.Resize((self.H, self.W)),
T.ToTensor(), T.Normalize((.5, .5, .5), (.5, .5, .5))])
return t(state)
|
py
|
1a5c364ae5d8f6ea0c03ae86c20d109bca44f5d8
|
import rospy
import tf2_ros as tf2
import math
from tf.transformations import quaternion_from_euler
from tf2_geometry_msgs import PoseStamped
from geometry_msgs.msg import Point, Quaternion
from tf.transformations import quaternion_from_euler
from visualization_msgs.msg import Marker
from geometry_msgs.msg import Vector3
from std_msgs.msg import ColorRGBA
from actionlib_msgs.msg import GoalStatus
from dynamic_stack_decider.abstract_action_element import AbstractActionElement
class GoToBall(AbstractActionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(GoToBall, self).__init__(blackboard, dsd, parameters)
if 'target' not in parameters.keys():
rospy.logerr('The parameter "target" could not be used to decide whether map information is accesible')
else:
self.target = parameters['target']
self.blocking = parameters.get('blocking', True)
self.distance = parameters.get('distance', self.blackboard.config['ball_approach_dist'])
def perform(self, reevaluate=False):
if 'map_goal' == self.target:
goal_angle = self.blackboard.world_model.get_map_based_opp_goal_angle_from_ball()
ball_x, ball_y = self.blackboard.world_model.get_ball_position_xy()
goal_x = ball_x - math.cos(goal_angle) * self.distance
goal_y = ball_y - math.sin(goal_angle) * self.distance
ball_point = (goal_x, goal_y, goal_angle, self.blackboard.map_frame)
elif 'detection_goal' == self.target:
x_dist = self.blackboard.world_model.get_detection_based_goal_position_uv()[0] - \
self.blackboard.world_model.get_ball_position_uv()[0]
y_dist = self.blackboard.world_model.get_detection_based_goal_position_uv()[1] - \
self.blackboard.world_model.get_ball_position_uv()[1]
goal_angle = math.atan2(y_dist, x_dist)
ball_u, ball_v = self.blackboard.world_model.get_ball_position_uv()
goal_u = ball_u + math.cos(goal_angle) * self.distance
goal_v = ball_v + math.sin(goal_angle) * self.distance
ball_point = (goal_u, goal_v, goal_angle, self.blackboard.world_model.base_footprint_frame)
elif 'none' == self.target or 'current_orientation' == self.target:
ball_u, ball_v = self.blackboard.world_model.get_ball_position_uv()
ball_point = (ball_u, ball_v, 0, self.blackboard.world_model.base_footprint_frame)
elif 'close' == self.target:
ball_u, ball_v = self.blackboard.world_model.get_ball_position_uv()
angle = math.atan2(ball_v, ball_u)
ball_point = (ball_u, ball_v, angle, self.blackboard.world_model.base_footprint_frame)
else:
rospy.logerr("Target %s for go_to_ball action not specified.", self.target)
return
pose_msg = PoseStamped()
pose_msg.header.stamp = rospy.Time.now()
pose_msg.header.frame_id = ball_point[3]
pose_msg.pose.position = Point(ball_point[0], ball_point[1], 0)
quaternion = quaternion_from_euler(0, 0, ball_point[2])
pose_msg.pose.orientation.x = quaternion[0]
pose_msg.pose.orientation.y = quaternion[1]
pose_msg.pose.orientation.z = quaternion[2]
pose_msg.pose.orientation.w = quaternion[3]
self.blackboard.pathfinding.publish(pose_msg)
approach_marker = Marker()
approach_marker.pose.position.x = self.distance
approach_marker.type = Marker.SPHERE
approach_marker.action = Marker.MODIFY
approach_marker.id = 1
color = ColorRGBA()
color.r = 1.0
color.g = 1.0
color.b = 1.0
color.a = 1.0
approach_marker.color = color
approach_marker.lifetime = rospy.Duration(nsecs=0.5)
scale = Vector3(0.2, 0.2, 0.2)
approach_marker.scale = scale
approach_marker.header.stamp = rospy.Time.now()
approach_marker.header.frame_id = self.blackboard.world_model.base_footprint_frame
self.blackboard.pathfinding.approach_marker_pub.publish(approach_marker)
if self.blackboard.pathfinding.status in [GoalStatus.SUCCEEDED, GoalStatus.ABORTED] or not self.blocking:
self.pop()
|
py
|
1a5c37ab8570dc8ea1c5a659da961f677ac4277c
|
#!/usr/bin/python
################################################################################
# 21630046-5cc5-11e4-af55-00155d01fe08
#
# Justin Dierking
# [email protected]
# [email protected]
#
# 10/24/2014 Original Construction
################################################################################
class Finding:
def __init__(self):
self.output = []
self.is_compliant = False
self.uuid = "21630046-5cc5-11e4-af55-00155d01fe08"
def check(self, cli):
# Initialize Compliance
self.is_compliant = False
# Execute command and parse capture standard output
stdout = cli.system("cat /etc/issue")
# Split stdout
self.output = stdout.split('\n')
# Process standard output
if "attorneys, psychotherapists, or clergy" in stdout:
self.is_compliant = True
return self.is_compliant
def fix(self, cli):
cli.system("/dev/null > /etc/issue")
cli.system('echo "You are accessing a U.S. Government (USG) Information System (IS) that is provided for USG-authorized use only. By using this IS (which includes any device attached to this IS), you consent to the following conditions:" >> /etc/issue')
cli.system('echo "-The USG routinely intercepts and monitors communications on this IS for purposes including, but not limited to, penetration testing, COMSEC monitoring, network operations and defense, personnel misconduct (PM), law enforcement (LE), and counterintelligence (CI) investigations." >> /etc/issue')
cli.system('echo "-At any time, the USG may inspect and seize data stored on this IS." >> /etc/issue')
cli.system('echo "-Communications using, or data stored on, this IS are not private, are subject to routine monitoring, interception, and search, and may be disclosed or used for any USG-authorized purpose." >> /etc/issue')
cli.system('echo "-This IS includes security measures (e.g., authentication and access controls) to protect USG interests -- not for your personal benefit or privacy." >> /etc/issue')
cli.system('echo "-Notwithstanding the above, using this IS does not constitute consent to PM, LE or CI investigative searching or monitoring of the content of privileged communications, or work product, related to personal representation or services by attorneys, psychotherapists, or clergy, and their assistants. Such communications and work product are private and confidential. See User Agreement for details." >> /etc/issue')
|
py
|
1a5c3819b3dc0fd8dc36c17cdc7f37873bd6d468
|
import json
import re
from urllib.parse import urlparse
import scrapy
from scrapy import Request
from config.DBInfo import SessionFactory
from db.transaction import lianjia_transaction
from tools import tools
class LianJiaSpider(scrapy.Spider):
name = "lianjia"
start_urls = [
"https://www.lianjia.com/city/"
]
def parse(self, response):
tools.writeLog("lianjia", "start")
hreflist = response.selector.css(".city_list_ul a::attr(href)").extract()
for url in hreflist:
yield Request(url + "chengjiao", callback=self.mainPage)
def mainPage(self, response):
title = response.xpath('//title/text()').extract()
if "|成交查询" in title[0]:
res=self.recursiveListUrl(response)
for url in res:
yield Request(url, callback=self.getList)
yield Request(response.url + "pg1/", callback=self.getList)
def getList(self, response):
res = self.recursiveListUrl(response)
for url in res:
yield Request(url, callback=self.getList)
infourl = response.selector.css('.listContent .title a::attr(href)').extract()
for url in infourl:
yield Request(url, callback=self.detail)
try:
strpageinfo = response.selector.css('.page-box .house-lst-page-box ::attr(page-data)').extract()[0]
pageinfo = json.loads(strpageinfo)
cur = pageinfo['curPage']
total = pageinfo['totalPage']
ourl=response.url
result = urlparse(ourl)
if "pg" not in result.path:
ourl+="pg1/"
result = urlparse(ourl)
if cur == 1:
while cur < total:
cur += 1
res = re.sub(r'pg\d+', "pg" + str(cur), result.path)
res = result.scheme + "://" + result.netloc + res
yield Request(res, callback=self.getList)
except:
pass
def detail(self, response):
# 成交时间
date = response.selector.css('.house-title .wrapper span ::text').extract()[0][0:-2]
price = response.selector.css('.info.fr .price i ::text').extract()[0]
avgPrice = response.selector.css('.info.fr .price b ::text').extract()[0]
ljID = response.selector.css('.transaction .content li:first-child ::text').extract()[1]
address = ''
address1 = ''
address2 = ''
address3 = ''
address4 = ''
address5 = ''
address6 = ''
address7 = ''
address8 = ''
address9 = ''
address10 = ''
index = 1
for i in response.selector.css('.deal-bread ::text').extract()[1:-1]:
i = i.replace("二手房成交价格", "")
address += i
if i != '' and i != '>':
if index == 1:
address1 = i
if index == 2:
address2 = i
if index == 3:
address3 = i
if index == 4:
address4 = i
if index == 5:
address5 = i
if index == 6:
address6 = i
if index == 7:
address7 = i
if index == 8:
address8 = i
if index == 9:
address9 = i
if index == 10:
address10 = i
index += 1
data = lianjia_transaction({
'transactiondate': date,
'price': float(price) * 10000,
'avgPrice': avgPrice,
'ljID': ljID.strip(),
'address': address,
'address1': address1,
'address2': address2,
'address3': address3,
'address4': address4,
'address5': address5,
'address6': address6,
'address7': address7,
'address8': address8,
'address9': address9,
'address10': address10,
'url': response.url
})
session = SessionFactory()
# 添加到session:
session.add(data)
# 提交即保存到数据库:
try:
session.commit()
except Exception as e:
if 'Duplicate' in repr(e):
session.close()
else:
session.close()
# 关闭session:
session.close()
def writelog(self, url):
with open("log.txt", 'a') as f:
f.write(url + "\n")
def recursiveListUrl(self, response):
host = urlparse(response.url)
host = host.scheme + "://" + host.netloc
areaList = response.selector.css('.position a::attr(href)').extract()
ret = []
for url in areaList:
if "https://" in url:
ret.append(url)
else:
ret.append(host + url)
return ret
|
py
|
1a5c38e22fe1d0064eeae772d7fc8e84db7c7947
|
__version_info__ = 2, 2, 2
__version__ = '2.2.2'
from discogs_client.client import Client
from discogs_client.models import Artist, Release, Master, Label, User, \
Listing, Track, Price, Video, List, ListItem
|
py
|
1a5c39e2833757ca9fd17cb4bece6a3003f0b8f7
|
from typing import Any, Dict, Type
from .awac import AWAC
from .awr import AWR, DiscreteAWR
from .base import AlgoBase
from .bc import BC, DiscreteBC
from .bcq import BCQ, DiscreteBCQ
from .bear import BEAR
from .combo import COMBO
from .cql import CQL, DiscreteCQL
from .crr import CRR
from .ddpg import DDPG
from .dqn import DQN, DoubleDQN
from .iql import IQL
from .mopo import MOPO
from .plas import PLAS, PLASWithPerturbation
from .random_policy import DiscreteRandomPolicy, RandomPolicy
from .sac import SAC, DiscreteSAC
from .td3 import TD3
from .td3_plus_bc import TD3PlusBC
__all__ = [
"AlgoBase",
"AWAC",
"AWR",
"DiscreteAWR",
"BC",
"DiscreteBC",
"BCQ",
"DiscreteBCQ",
"BEAR",
"COMBO",
"CQL",
"DiscreteCQL",
"CRR",
"DDPG",
"DQN",
"DoubleDQN",
"IQL",
"MOPO",
"PLAS",
"PLASWithPerturbation",
"SAC",
"DiscreteSAC",
"TD3",
"TD3PlusBC",
"RandomPolicy",
"DiscreteRandomPolicy",
"get_algo",
"create_algo",
]
DISCRETE_ALGORITHMS: Dict[str, Type[AlgoBase]] = {
"awr": DiscreteAWR,
"bc": DiscreteBC,
"bcq": DiscreteBCQ,
"cql": DiscreteCQL,
"dqn": DQN,
"double_dqn": DoubleDQN,
"sac": DiscreteSAC,
"random": DiscreteRandomPolicy,
}
CONTINUOUS_ALGORITHMS: Dict[str, Type[AlgoBase]] = {
"awac": AWAC,
"awr": AWR,
"bc": BC,
"bcq": BCQ,
"bear": BEAR,
"combo": COMBO,
"cql": CQL,
"crr": CRR,
"ddpg": DDPG,
"iql": IQL,
"mopo": MOPO,
"plas": PLASWithPerturbation,
"sac": SAC,
"td3": TD3,
"td3_plus_bc": TD3PlusBC,
"random": RandomPolicy,
}
def get_algo(name: str, discrete: bool) -> Type[AlgoBase]:
"""Returns algorithm class from its name.
Args:
name (str): algorithm name in snake_case.
discrete (bool): flag to use discrete action-space algorithm.
Returns:
type: algorithm class.
"""
if discrete:
if name in DISCRETE_ALGORITHMS:
return DISCRETE_ALGORITHMS[name]
raise ValueError(f"{name} does not support discrete action-space.")
if name in CONTINUOUS_ALGORITHMS:
return CONTINUOUS_ALGORITHMS[name]
raise ValueError(f"{name} does not support continuous action-space.")
def create_algo(name: str, discrete: bool, **params: Any) -> AlgoBase:
"""Returns algorithm object from its name.
Args:
name (str): algorithm name in snake_case.
discrete (bool): flag to use discrete action-space algorithm.
params (any): arguments for algorithm.
Returns:
d3rlpy.algos.base.AlgoBase: algorithm.
"""
return get_algo(name, discrete)(**params)
|
py
|
1a5c3ab337f560a4d74ef2c5d6ad101d2e4e1b4c
|
# $Id: 150_srtp_3_1.py 3334 2010-10-05 16:32:04Z nanang $
#
from inc_cfg import *
test_param = TestParam(
"Callee=optional (with duplicated offer) SRTP, caller=optional SRTP",
[
InstanceParam("callee", "--null-audio --use-srtp=3 --srtp-secure=0 --max-calls=1"),
InstanceParam("caller", "--null-audio --use-srtp=1 --srtp-secure=0 --max-calls=1")
]
)
|
py
|
1a5c3ba0c390d1a9bed53ad2d1d11d658fc46693
|
# -*- coding: utf-8 -*-
"""
sphinx.domains.rst
~~~~~~~~~~~~~~~~~~
The reStructuredText domain.
:copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from six import iteritems
from sphinx import addnodes
from sphinx.directives import ObjectDescription
from sphinx.domains import Domain, ObjType
from sphinx.locale import _
from sphinx.roles import XRefRole
from sphinx.util.nodes import make_refnode
if False:
# For type annotation
from typing import Any, Dict, Iterator, List, Tuple # NOQA
from docutils import nodes # NOQA
from sphinx.application import Sphinx # NOQA
from sphinx.builders import Builder # NOQA
from sphinx.environment import BuildEnvironment # NOQA
dir_sig_re = re.compile(r'\.\. (.+?)::(.*)$')
class ReSTMarkup(ObjectDescription):
"""
Description of generic reST markup.
"""
def add_target_and_index(self, name, sig, signode):
# type: (unicode, unicode, addnodes.desc_signature) -> None
targetname = self.objtype + '-' + name
if targetname not in self.state.document.ids:
signode['names'].append(targetname)
signode['ids'].append(targetname)
signode['first'] = (not self.names)
self.state.document.note_explicit_target(signode)
objects = self.env.domaindata['rst']['objects']
key = (self.objtype, name)
if key in objects:
self.state_machine.reporter.warning(
'duplicate description of %s %s, ' % (self.objtype, name) +
'other instance in ' + self.env.doc2path(objects[key]),
line=self.lineno)
objects[key] = self.env.docname
indextext = self.get_index_text(self.objtype, name)
if indextext:
self.indexnode['entries'].append(('single', indextext,
targetname, '', None))
def get_index_text(self, objectname, name):
# type: (unicode, unicode) -> unicode
if self.objtype == 'directive':
return _('%s (directive)') % name
elif self.objtype == 'role':
return _('%s (role)') % name
return ''
def parse_directive(d):
# type: (unicode) -> Tuple[unicode, unicode]
"""Parse a directive signature.
Returns (directive, arguments) string tuple. If no arguments are given,
returns (directive, '').
"""
dir = d.strip()
if not dir.startswith('.'):
# Assume it is a directive without syntax
return (dir, '')
m = dir_sig_re.match(dir)
if not m:
return (dir, '')
parsed_dir, parsed_args = m.groups()
return (parsed_dir.strip(), ' ' + parsed_args.strip())
class ReSTDirective(ReSTMarkup):
"""
Description of a reST directive.
"""
def handle_signature(self, sig, signode):
# type: (unicode, addnodes.desc_signature) -> unicode
name, args = parse_directive(sig)
desc_name = '.. %s::' % name
signode += addnodes.desc_name(desc_name, desc_name)
if len(args) > 0:
signode += addnodes.desc_addname(args, args)
return name
class ReSTRole(ReSTMarkup):
"""
Description of a reST role.
"""
def handle_signature(self, sig, signode):
# type: (unicode, addnodes.desc_signature) -> unicode
signode += addnodes.desc_name(':%s:' % sig, ':%s:' % sig)
return sig
class ReSTDomain(Domain):
"""ReStructuredText domain."""
name = 'rst'
label = 'reStructuredText'
object_types = {
'directive': ObjType(_('directive'), 'dir'),
'role': ObjType(_('role'), 'role'),
}
directives = {
'directive': ReSTDirective,
'role': ReSTRole,
}
roles = {
'dir': XRefRole(),
'role': XRefRole(),
}
initial_data = {
'objects': {}, # fullname -> docname, objtype
} # type: Dict[unicode, Dict[unicode, Tuple[unicode, ObjType]]]
def clear_doc(self, docname):
# type: (unicode) -> None
for (typ, name), doc in list(self.data['objects'].items()):
if doc == docname:
del self.data['objects'][typ, name]
def merge_domaindata(self, docnames, otherdata):
# type: (List[unicode], Dict) -> None
# XXX check duplicates
for (typ, name), doc in otherdata['objects'].items():
if doc in docnames:
self.data['objects'][typ, name] = doc
def resolve_xref(self, env, fromdocname, builder, typ, target, node,
contnode):
# type: (BuildEnvironment, unicode, Builder, unicode, unicode, nodes.Node, nodes.Node) -> nodes.Node # NOQA
objects = self.data['objects']
objtypes = self.objtypes_for_role(typ)
for objtype in objtypes:
if (objtype, target) in objects:
return make_refnode(builder, fromdocname,
objects[objtype, target],
objtype + '-' + target,
contnode, target + ' ' + objtype)
def resolve_any_xref(self, env, fromdocname, builder, target,
node, contnode):
# type: (BuildEnvironment, unicode, Builder, unicode, nodes.Node, nodes.Node) -> List[nodes.Node] # NOQA
objects = self.data['objects']
results = []
for objtype in self.object_types:
if (objtype, target) in self.data['objects']:
results.append(('rst:' + self.role_for_objtype(objtype),
make_refnode(builder, fromdocname,
objects[objtype, target],
objtype + '-' + target,
contnode, target + ' ' + objtype)))
return results
def get_objects(self):
# type: () -> Iterator[Tuple[unicode, unicode, unicode, unicode, unicode, int]]
for (typ, name), docname in iteritems(self.data['objects']):
yield name, name, typ, docname, typ + '-' + name, 1
def setup(app):
# type: (Sphinx) -> Dict[unicode, Any]
app.add_domain(ReSTDomain)
return {
'version': 'builtin',
'env_version': 1,
'parallel_read_safe': True,
'parallel_write_safe': True,
}
|
py
|
1a5c3c5636fe9ebef12c8da146f79bc5c52ea055
|
# -*- coding: utf-8 -*-
import io
import demjson
import pandas as pd
import requests
from zvdata.api import df_to_db, init_entities
from zvdata.recorder import Recorder
from zvt.api.common import china_stock_code_to_id
from zvt.domain import StockIndex
from zvdata.utils.time_utils import to_pd_timestamp
class ChinaIndexListSpider(Recorder):
data_schema = StockIndex
def __init__(self, batch_size=10, force_update=False, sleeping_time=2.0, provider='exchange') -> None:
self.provider = provider
super(ChinaIndexListSpider, self).__init__(batch_size, force_update, sleeping_time)
def run(self):
# 上证、中证
self.fetch_csi_index()
# 深证
self.fetch_szse_index()
# 国证
self.fetch_cni_index()
def fetch_csi_index(self) -> None:
"""
抓取上证、中证指数列表
"""
url = 'http://www.csindex.com.cn/zh-CN/indices/index' \
'?page={}&page_size={}&data_type=json&class_1=1&class_2=2&class_7=7&class_10=10'
index_list = []
page = 1
page_size = 50
while True:
query_url = url.format(page, page_size)
response = requests.get(query_url)
response_dict = demjson.decode(response.text)
response_index_list = response_dict.get('list', [])
if len(response_index_list) == 0:
break
index_list.extend(response_index_list)
self.logger.info(f'上证、中证指数第 {page} 页抓取完成...')
page += 1
self.sleep()
df = pd.DataFrame(index_list)
df = df[['base_date', 'base_point', 'index_code', 'indx_sname', 'online_date', 'class_eseries']]
df.columns = ['timestamp', 'base_point', 'code', 'name', 'list_date', 'class_eseries']
df['category'] = df['class_eseries'].apply(lambda x: x.split(' ')[0].lower())
df = df.drop('class_eseries', axis=1)
df = df.loc[df['code'].str.contains(r'^\d{6}$')]
self.persist_index(df)
self.logger.info('上证、中证指数列表抓取完成...')
# 抓取上证、中证指数成分股
self.fetch_csi_index_component(df)
self.logger.info('上证、中证指数成分股抓取完成...')
def fetch_csi_index_component(self, df: pd.DataFrame):
"""
抓取上证、中证指数成分股
"""
query_url = 'http://www.csindex.com.cn/uploads/file/autofile/cons/{}cons.xls'
for _, index in df.iterrows():
index_code = index['code']
url = query_url.format(index_code)
try:
response = requests.get(url)
response.raise_for_status()
except requests.HTTPError as error:
self.logger.error(f'{index["name"]} - {index_code} 成分股抓取错误 ({error})')
continue
response_df = pd.read_excel(io.BytesIO(response.content))
index_id = f'index_cn_{index_code}'
response_df = response_df[['成分券代码Constituent Code']].rename(columns={'成分券代码Constituent Code': 'stock_code'})
response_df['id'] = response_df['stock_code'].apply(
lambda x: f'{index_id}_{china_stock_code_to_id(str(x))}')
response_df['entity_id'] = response_df['id']
response_df['stock_id'] = response_df['stock_code'].apply(lambda x: china_stock_code_to_id(str(x)))
response_df['index_id'] = index_id
response_df.drop('stock_code', axis=1, inplace=True)
df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider)
self.logger.info(f'{index["name"]} - {index_code} 成分股抓取完成...')
self.sleep()
def fetch_szse_index(self) -> None:
"""
抓取深证指数列表
"""
url = 'http://www.szse.cn/api/report/ShowReport?SHOWTYPE=xlsx&CATALOGID=1812_zs&TABKEY=tab1'
response = requests.get(url)
df = pd.read_excel(io.BytesIO(response.content), dtype='str')
df.columns = ['code', 'name', 'timestamp', 'base_point', 'list_date']
df['category'] = 'szse'
df = df.loc[df['code'].str.contains(r'^\d{6}$')]
self.persist_index(df)
self.logger.info('深证指数列表抓取完成...')
# 抓取深证指数成分股
self.fetch_szse_index_component(df)
self.logger.info('深证指数成分股抓取完成...')
def fetch_szse_index_component(self, df: pd.DataFrame):
"""
抓取深证指数成分股
"""
query_url = 'http://www.szse.cn/api/report/ShowReport?SHOWTYPE=xlsx&CATALOGID=1747_zs&TABKEY=tab1&ZSDM={}'
for _, index in df.iterrows():
index_code = index['code']
url = query_url.format(index_code)
response = requests.get(url)
response_df = pd.read_excel(io.BytesIO(response.content), dtype='str')
index_id = f'index_cn_{index_code}'
response_df = response_df[['证券代码']]
response_df['id'] = response_df['证券代码'].apply(lambda x: f'{index_id}_{china_stock_code_to_id(str(x))}')
response_df['entity_id'] = response_df['id']
response_df['stock_id'] = response_df['证券代码'].apply(lambda x: china_stock_code_to_id(str(x)))
response_df['index_id'] = index_id
response_df.drop('证券代码', axis=1, inplace=True)
df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider)
self.logger.info(f'{index["name"]} - {index_code} 成分股抓取完成...')
self.sleep()
def fetch_cni_index(self) -> None:
"""
抓取国证指数列表
"""
url = 'http://www.cnindex.com.cn/zstx/jcxl/'
response = requests.get(url)
response.encoding = 'utf-8'
dfs = pd.read_html(response.text)
# 第 9 个 table 之后为非股票指数
dfs = dfs[1:9]
result_df = pd.DataFrame()
for df in dfs:
header = df.iloc[0]
df = df[1:]
df.columns = header
df.astype('str')
result_df = pd.concat([result_df, df])
result_df = result_df.drop('样本股数量', axis=1)
result_df.columns = ['name', 'code', 'timestamp', 'base_point', 'list_date']
result_df['timestamp'] = result_df['timestamp'].apply(lambda x: x.replace('-', ''))
result_df['list_date'] = result_df['list_date'].apply(lambda x: x.replace('-', ''))
result_df['category'] = 'csi'
result_df = result_df.loc[result_df['code'].str.contains(r'^\d{6}$')]
self.persist_index(result_df)
self.logger.info('国证指数列表抓取完成...')
# 抓取国证指数成分股
self.fetch_cni_index_component(result_df)
self.logger.info('国证指数成分股抓取完成...')
def fetch_cni_index_component(self, df: pd.DataFrame):
"""
抓取国证指数成分股
"""
query_url = 'http://www.cnindex.com.cn/docs/yb_{}.xls'
for _, index in df.iterrows():
index_code = index['code']
url = query_url.format(index_code)
try:
response = requests.get(url)
response.raise_for_status()
except requests.HTTPError as error:
self.logger.error(f'{index["name"]} - {index_code} 成分股抓取错误 ({error})')
continue
response_df = pd.read_excel(io.BytesIO(response.content), dtype='str')
index_id = f'index_cn_{index_code}'
try:
response_df = response_df[['样本股代码']]
except KeyError:
response_df = response_df[['证券代码']]
response_df.columns = ['stock_code']
response_df['id'] = response_df['stock_code'].apply(
lambda x: f'{index_id}_{china_stock_code_to_id(str(x))}')
response_df['entity_id'] = response_df['id']
response_df['stock_id'] = response_df['stock_code'].apply(lambda x: china_stock_code_to_id(str(x)))
response_df['index_id'] = index_id
response_df.drop('stock_code', axis=1, inplace=True)
df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider)
self.logger.info(f'{index["name"]} - {index_code} 成分股抓取完成...')
self.sleep()
def persist_index(self, df) -> None:
df['timestamp'] = df['timestamp'].apply(lambda x: to_pd_timestamp(x))
df['list_date'] = df['list_date'].apply(lambda x: to_pd_timestamp(x))
df['id'] = df['code'].apply(lambda code: f'index_cn_{code}')
df['entity_id'] = df['id']
df['exchange'] = 'cn'
df['entity_type'] = 'index'
df['is_delisted'] = False
df = df.dropna(axis=0, how='any')
df = df.drop_duplicates(subset='id', keep='last')
init_entities(df, entity_type='index', provider=self.provider)
if __name__ == '__main__':
spider = ChinaIndexListSpider(provider='exchange')
spider.run()
|
py
|
1a5c3d7c31c2450a3709b49771d169ff46186e7e
|
__copyright__ = "Copyright (c) 2020-2021 Jina AI Limited. All rights reserved."
__license__ = "Apache-2.0"
from typing import Optional, List, Dict
import re
from jina import Executor, DocumentArray, requests, Document
from jina.logging.logger import JinaLogger
class Sentencizer(Executor):
"""
:class:`Sentencizer` split the text on the doc-level
into sentences on the chunk-level with a rule-base strategy.
The text is split by the punctuation characters listed in ``punct_chars``.
The sentences that are shorter than the ``min_sent_len``
or longer than the ``max_sent_len`` after stripping will be discarded.
:param min_sent_len: the minimal number of characters,
(including white spaces) of the sentence, by default 1.
:param max_sent_len: the maximal number of characters,
(including white spaces) of the sentence, by default 512.
:param punct_chars: the punctuation characters to split on,
whatever is in the list will be used,
for example ['!', '.', '?'] will use '!', '.' and '?'
:param uniform_weight: the definition of it should have
uniform weight or should be calculated
:param args: Additional positional arguments
:param kwargs: Additional keyword arguments
"""
def __init__(self,
min_sent_len: int = 1,
max_sent_len: int = 512,
punct_chars: Optional[List[str]] = None,
uniform_weight: bool = True,
default_traversal_path: Optional[List[str]] = None,
*args, **kwargs):
"""Set constructor."""
super().__init__(*args, **kwargs)
self.min_sent_len = min_sent_len
self.max_sent_len = max_sent_len
self.punct_chars = punct_chars
self.uniform_weight = uniform_weight
self.logger = JinaLogger(self.__class__.__name__)
self.default_traversal_path = default_traversal_path or ['r']
if not punct_chars:
self.punct_chars = ['!', '.', '?', '։', '؟', '۔', '܀', '܁', '܂', '‼', '‽', '⁇', '⁈', '⁉', '⸮', '﹖', '﹗',
'!', '.', '?', '。', '。', '\n']
if self.min_sent_len > self.max_sent_len:
self.logger.warning('the min_sent_len (={}) should be smaller or equal to the max_sent_len (={})'.format(
self.min_sent_len, self.max_sent_len))
self._slit_pat = re.compile('\s*([^{0}]+)(?<!\s)[{0}]*'.format(''.join(set(self.punct_chars))))
@requests
def segment(self, docs: DocumentArray, parameters: Dict, **kwargs):
"""
Split the text into sentences.
:param docs: Documents that contain the text
:param parameters: Dictionary of parameters
:param kwargs: Additional keyword arguments
:return: a list of chunk dicts with the split sentences
"""
traversal_path = parameters.get('traversal_paths', self.default_traversal_path)
flat_docs = docs.traverse_flat(traversal_path)
for doc in flat_docs:
text = doc.text
ret = [(m.group(0), m.start(), m.end()) for m in
re.finditer(self._slit_pat, text)]
if not ret:
ret = [(text, 0, len(text))]
for ci, (r, s, e) in enumerate(ret):
f = re.sub('\n+', ' ', r).strip()
f = f[:self.max_sent_len]
if len(f) > self.min_sent_len:
doc.chunks.append(
Document(
text=f,
offset=ci,
weight=1.0 if self.uniform_weight else len(f) / len(text),
location=[s, e])
)
|
py
|
1a5c3f9c8bb2e2d5595a501e8bd17b0455dc4165
|
#!/usr/bin/env python3
import os
import numpy as np
import pandas as pd
import networkx as nx
import random
from collections import Counter
from itertools import product
from itertools import islice
import csv
import sys
def main():
numpert=int(sys.argv[1]) # number of perturbations to generate
numnodes=int(sys.argv[2]) # number of nodes to generate them for
outf = sys.argv[3] # output folder
prefix = sys.argv[4] # prefix for column labels
options=sys.argv[5].split(',')
df1=pd.DataFrame()
if numpert > len(options)**numnodes:
numpert = len(options)**numnodes
while len(df1.index)<numpert:
temp=np.random.choice(a=options,size=[numpert-len(df1.index),numnodes]) # set to 200,000 to ensure unique random combinations in a timely manner (or 500,000 to run faster)
df2=pd.DataFrame(temp)
df1 = pd.concat([df1, df2])
df1=df1.drop_duplicates()
if len(df1.index)>numpert:
df1=df1.iloc[0:numpert,]
df1=df1.transpose(copy=True)
l1=[]
for i in range(0,numpert):
l1.append(prefix+'_' + str(i+1).rjust(len(str(numpert)), '0'))
df1.columns=l1
df1.to_csv(os.path.join(outf),sep=' ', index=False,na_rep='NA')
main()
|
py
|
1a5c3fdf64d606a4dc458f2145fa4fda27159d34
|
# -*- coding: utf-8 -*-
# © Toons
from binascii import unhexlify
import cSecp256k1 as secp256k1
from cSecp256k1 import _ecdsa
from cSecp256k1 import _schnorr
msg = secp256k1.hash_sha256(b"message to sign")
_msg = secp256k1.hash_sha256(b"bad message to check")
pr_key = secp256k1.hash_sha256(b"secret")
pu_key = secp256k1.PublicKey.from_secret(b"secret")
enc_pu_key = secp256k1.PublicKey.from_secret(b"secret").encode()
k = b"%064x" % secp256k1.rand_k()
rfc6979_k = b"%064x" % secp256k1.rfc6979_k(
unhexlify(msg), unhexlify(pr_key)
)[0]
class TestCSecp256k1Signatures:
def test_C_ecdsa_sign(self, benchmark):
signer = _ecdsa.sign
sig = benchmark(signer, msg, pr_key, k, 1).contents
assert not _ecdsa.verify(_msg, pu_key.x, pu_key.y, sig.r, sig.s)
def test_C_ecdsa_verify(self, benchmark):
sig = _ecdsa.sign(msg, pr_key, k, 1).contents
verifier = _ecdsa.verify
assert benchmark(verifier, msg, pu_key.x, pu_key.y, sig.r, sig.s)
def test_C_ecdsa_rfc6949_sign(self, benchmark):
signer = _ecdsa.sign
sig = benchmark(signer, msg, pr_key, rfc6979_k, 1).contents
assert not _ecdsa.verify(_msg, pu_key.x, pu_key.y, sig.r, sig.s)
def test_C_ecdsa_rfc6949_verify(self, benchmark):
sig = _ecdsa.sign(msg, pr_key, rfc6979_k, 1).contents
verifier = _ecdsa.verify
assert benchmark(verifier, msg, pu_key.x, pu_key.y, sig.r, sig.s)
def test_C_schnorr_bcrypto410_sign(self, benchmark):
signer = _schnorr.bcrypto410_sign
sig = benchmark(signer, msg, pr_key).contents
assert not _schnorr.bcrypto410_verify(
_msg, pu_key.x, pu_key.y, sig.r, sig.s
)
def test_C_schnorr_bcrypto410_verify(self, benchmark):
sig = _schnorr.bcrypto410_sign(msg, pr_key).contents
verifier = _schnorr.bcrypto410_verify
assert benchmark(verifier, msg, pu_key.x, pu_key.y, sig.r, sig.s)
def test_C_schnorr_sign(self, benchmark):
signer = _schnorr.sign
sig = benchmark(signer, msg, pr_key, k).contents
assert not _schnorr.verify(_msg, pu_key.x, sig.r, sig.s)
def test_C_schnorr_verify(self, benchmark):
sig = _schnorr.sign(msg, pr_key, k).contents
verifier = _schnorr.verify
assert benchmark(verifier, msg, pu_key.x, sig.r, sig.s)
try:
from pySecp256k1 import schnorr
import binascii
class TestCompare:
def test_schnorr(self):
signer = _schnorr.bcrypto410_sign
sig = signer(msg, pr_key).contents
assert sig.raw() == binascii.hexlify(
schnorr.bcrypto410_sign(
binascii.unhexlify(msg), binascii.unhexlify(pr_key)
)
)
except ImportError:
pass
|
py
|
1a5c3fec592aab2179ee94982591dc41588e29ac
|
"""ResNet50 Backbone.
- 640x640 crop
- 48 batch size (12 x 4)
- 4x schedule (48 epochs)
- 0.1 learning rate, step policy = (24, 36)
"""
_base_ = [
"../_base_/models/resnet50.py",
"../_base_/datasets/bdd100k_scene.py",
"../_base_/schedules/schedule_4x.py",
"../_base_/default_runtime.py",
]
load_from = "https://dl.cv.ethz.ch/bdd100k/tagging/scene/models/resnet50_4x_scene_tag_bdd100k.pth"
|
py
|
1a5c41ac44c2ad97fc704216757f76177a87e493
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_djcat
------------
Tests for `djcat` catalog path.
"""
from django.test import TestCase
from django.apps import apps
from django.conf import settings
from djcat.path import Path
from djcat.register import CatalogItem
from djcat.exceptions import *
class TestPathCase(TestCase):
"""Path test"""
def setUp(self):
self.c = self.create_category(name="Realty", is_active=True)
self.c1 = self.create_category(name="Flat", parent=self.c, is_unique_in_path=True, is_active=True)
self.c2 = self.create_category(name="Flatbuy", parent=self.c1, is_active=True,
item_class='catalog_module_realty.models.FlatBuy')
self.item_class = CatalogItem.get_item_by_class(self.c2.item_class)
self.item = self.item_class.class_obj.objects.create(category=self.c2, price=11,
building_type=1, room=2)
def create_category(self, **kwargs):
self.CategoryModel = apps.get_model(settings.DJCAT_CATEGORY_MODEL)
c = self.CategoryModel.objects.create(**kwargs)
c.refresh_from_db()
return c
def test_bad_args(self):
"""Test path resolver with bad args"""
self.assertEqual(Path(path=None).category, None)
self.assertEqual(Path(path='').category, None)
self.assertEqual(Path(path='/').category, None)
self.assertEqual(Path(path='//').category, None)
self.assertEqual(Path(path='/sdgsdgf/').category, None)
def test_resolve_category(self):
"""Test path resolver with category path only"""
self.assertEqual(self.c2.get_url_paths(),
{'full': ['realty', 'flat', 'flatbuy'], 'unique': ['flat', 'flatbuy']})
path = Path(path='/realty/')
self.assertEqual(path.category, self.c)
path = Path(path='/realty/flat/flatbuy')
self.assertEqual(path.category, self.c2)
path = Path(path='flat/flatbuy')
self.assertEqual(path.category, self.c2)
path = Path(path='flatbuy')
self.assertEqual(path.category, self.c2)
def test_resolve_category_and_attrs(self):
"""Test path resolver with category and path attributes"""
path = Path(path='flat/flatbuy/brick/asdfasdfasdfasdf')
self.assertEqual(path.category, None)
path = Path(path='flat/flatbuy/brick')
self.assertEqual(path.category, self.c2)
self.assertEqual(path.attrs[0]['attribute'].attr_name, 'building_type')
self.assertEqual(path.attrs[0]['path_value'], 'brick')
path = Path(path='flat/flatbuy/brick/1roomed')
self.assertEqual(path.category, self.c2)
self.assertEqual(path.attrs[0]['attribute'].attr_name, 'building_type')
self.assertEqual(path.attrs[0]['path_value'], 'brick')
self.assertEqual(path.attrs[1]['attribute'].attr_name, 'rooms')
self.assertEqual(path.attrs[1]['path_value'], '1roomed')
def test_resolve_item_instance(self):
"""Test path resolver with item"""
path = Path(path='flat/flatbuy/'+self.item.slug)
self.assertEqual(path.category, self.c2)
self.assertEqual(path.item, self.item)
def test_parse_query(self):
"""Test resolve & parse query"""
path = Path(path='flat/flatbuy/', query='pr_f100-t500.pr_t15', query_allow_multiple=True)
self.assertEqual(path.attrs[0]['query_value'][0], {'from': 100, 'to': 500})
self.assertEqual(path.attrs[0]['query_value'][1], {'to': 15})
path = Path(path='flat/flatbuy/brick', query='rbt_22')
self.assertEqual(path.attrs[0]['path_value'], 'brick')
self.assertEqual(path.attrs[0]['query_value'], [])
path = Path(path='flat/flatbuy/brick', query='rbt_2')
self.assertEqual(path.attrs[0]['query_value'], [2])
path = Path(path='flat/flatbuy/brick', query='rbt_1,2,3')
self.assertEqual(path.attrs[0]['query_value'], [[1,2,3]])
path = Path(path='flat/flatbuy/brick', query='rbt_1,2,3,10')
self.assertEqual(path.attrs[0]['query_value'], [])
path = Path(path='flat/flatbuy/brick', query='rbt_1-5')
self.assertEqual(path.attrs[0]['query_value'], [])
def test_build_query_from_value(self):
"""Test attribute build query string"""
a = self.item_class.get_attr_by_key('pr').class_obj()
v = a.build_query({'from': 100, 'to': 567})
self.assertEqual(v, 'pr_f100-t567')
v = a.build_query({'from': 100})
self.assertEqual(v, 'pr_f100')
a = self.item_class.get_attr_by_key('rbt').class_obj()
v = a.build_query([1, 2, 3])
self.assertEqual(v, 'rbt_1,2,3')
|
py
|
1a5c43182682c2ac65bf18e3dc1a1fb37182dcea
|
import json
from pathlib import Path
from blspy import AugSchemeMPL, PublicKeyMPL, SignatureMPL
from pipscoin.util.byte_types import hexstr_to_bytes
from pipscoin.util.hash import std_hash
def validate_alert_file(file_path: Path, pubkey: str) -> bool:
text = file_path.read_text()
validated = validate_alert(text, pubkey)
return validated
def validate_alert(text: str, pubkey: str) -> bool:
json_obj = json.loads(text)
data = json_obj["data"]
message = bytes(data, "UTF-8")
signature = json_obj["signature"]
signature = SignatureMPL.from_bytes(hexstr_to_bytes(signature))
pubkey_bls = PublicKeyMPL.from_bytes(hexstr_to_bytes(pubkey))
sig_match_my = AugSchemeMPL.verify(pubkey_bls, message, signature)
return sig_match_my
def create_alert_file(alert_file_path: Path, key, genesis_challenge_preimage: str):
bytes_preimage = bytes(genesis_challenge_preimage, "UTF-8")
genesis_challenge = std_hash(bytes_preimage)
file_dict = {
"ready": True,
"genesis_challenge": genesis_challenge.hex(),
"genesis_challenge_preimage": genesis_challenge_preimage,
}
data: str = json.dumps(file_dict)
signature = AugSchemeMPL.sign(key, bytes(data, "utf-8"))
file_data = {"data": data, "signature": f"{signature}"}
file_data_json = json.dumps(file_data)
alert_file_path.write_text(file_data_json)
def create_not_ready_alert_file(alert_file_path: Path, key):
file_dict = {
"ready": False,
}
data: str = json.dumps(file_dict)
signature = AugSchemeMPL.sign(key, bytes(data, "utf-8"))
file_data = {"data": data, "signature": f"{signature}"}
file_data_json = json.dumps(file_data)
alert_file_path.write_text(file_data_json)
|
py
|
1a5c43f9fc360a0a162328b7eadfdecb51166ade
|
"""Control Action item definition."""
from gaphas.geometry import Rectangle
from gaphor.core.modeling import DrawContext
from gaphor.diagram.presentation import (
Classified,
ElementPresentation,
from_package_str,
)
from gaphor.diagram.shapes import Box, Text, draw_border
from gaphor.diagram.support import represents
from gaphor.diagram.text import FontStyle, FontWeight
from gaphor.i18n import gettext
from gaphor.RAAML import raaml
from gaphor.UML.recipes import stereotypes_str
@represents(raaml.ControlAction)
class ControlActionItem(Classified, ElementPresentation):
def __init__(self, diagram, id=None):
super().__init__(diagram, id)
self.watch("subject[NamedElement].name").watch(
"subject[NamedElement].namespace.name"
)
def update_shapes(self, event=None):
self.shape = Box(
Box(
Text(
text=lambda: stereotypes_str(
self.subject, [gettext("Control Action")]
),
),
Text(
text=lambda: self.subject.name or "",
width=lambda: self.width - 4,
style={
"font-weight": FontWeight.BOLD,
"font-style": FontStyle.NORMAL,
},
),
Text(
text=lambda: from_package_str(self),
style={"font-size": "x-small"},
),
style={"padding": (12, 4, 12, 4)},
),
draw=draw_control_action,
)
def draw_control_action(box, context: DrawContext, bounding_box: Rectangle):
draw_border(box, context, bounding_box)
|
py
|
1a5c44f290fd502f9b38b8f22995fa9b562ab2f9
|
from .conditional import Conditional
from .enumeration import Enum, EnumItem
from .factor import Factor, FactorEncryptMethod, FactorIndexGroup, FactorType
from .pipeline import Pipeline, PipelineStage, PipelineTriggerType, PipelineUnit
from .pipeline_action import AggregateArithmetic, AggregateArithmeticHolder, DeleteTopicActionType, FindBy, \
FromFactor, FromTopic, MemoryWriter, PipelineAction, PipelineActionType, ReadTopicActionType, SystemActionType, \
ToFactor, ToTopic, WriteTopicActionType
from .pipeline_action_delete import DeleteRowAction, DeleteRowsAction, DeleteTopicAction
from .pipeline_action_read import ExistsAction, ReadFactorAction, ReadFactorsAction, ReadRowAction, ReadRowsAction, \
ReadTopicAction
from .pipeline_action_system import AlarmAction, AlarmActionSeverity, CopyToMemoryAction, WriteToExternalAction
from .pipeline_action_write import InsertRowAction, MappingFactor, MappingRow, MergeRowAction, WriteFactorAction, \
WriteTopicAction
from .pipeline_graphic import PipelineGraphic, TopicGraphic, TopicRect
from .space import Space
from .topic import is_aggregation_topic, is_raw_topic, Topic, TopicKind, TopicType
from .user import User, UserRole
from .user_group import UserGroup
|
py
|
1a5c4541420b268e5fd9e54690013b64fb9cc84d
|
import tensorflow as tf
import numpy as np
from dataset import create_artifact_dataset
import model
#Define opt
def get_args():
my_parser = argparse.ArgumentParser()
my_parser.add_argument('-m','--model_save_path',type=str,help='Path to Saved_model',required=True)
my_parser.add_argument('-c','--checkpoint_path',type=str,help='Path to checkpoints',required=True)
my_parser.add_argument('-l','--log_path',type=str,help='Path to logdir',required=True)
my_parser.add_argument('-v','--version',type=int,help='ARCNN version to train 1: Original | 2: Fast ARCNN | 3: Dilated |4. Attention',required=True,choices=[1,2,3,4])
my_parser.add_argument('-e','--epochs',type=int,help='Number of epochs',default=50)
my_parser.add_argument('-d','--dataset',type=str,help='Path to folder of images for training',required=True)
#Optional Args
my_parser.add_argument('--batch_size',type=int,help='Batch size',default=16)
my_parser.add_argument('--patch_size',type=int,help='Patch size for training',default=100)
my_parser.add_argument('--stride_size',type=int,help='Stride of patches',default=35)
my_parser.add_argument('--jpq_upper',type=int,help='Highest JPEG quality for compression',default=20)
my_parser.add_argument('--jpq_lower',type=int,help='Lowest JPEG quality for compression',default=10)
return my_parser
#Define the metrics
def ssim(y_true,y_pred):
return tf.image.ssim(y_true,y_pred,max_val=1.0)
def psnr(y_true,y_pred):
return tf.image.psnr(y_true,y_pred,max_val=1.0)
@tf.function
def custom_loss(y_true, y_pred):
alpha = tf.constant(0.5)
mssim = alpha*(1-tf.image.ssim_multiscale(y_true,y_pred,max_val=1.0,filter_size=3))
mse = tf.metrics.mae(y_true, y_pred)
loss = tf.reduce_mean(mssim) + (1-alpha)*tf.reduce_mean(mse)
return loss
#Create Dirs
def makedirs(opt):
try:
os.mkdir(opt.checkpoint_path)
except:
pass
try:
os.mkdir(opt.log_path)
except:
pass
if __name__ == "__main__":
physical_devices = tf.config.experimental.list_physical_devices("GPU")
for i in physical_devices:
tf.config.experimental.set_memory_growth(i, True)
# Get args
opt = get_args().parse_args()
#make dirs
makedirs(opt)
#Create Model
ver = 2
if (opt.version == 1):
model = model.get_ARCNN((None,None,1))
elif (opt.version == 2):
model = model.get_Fast_ARCNN((None,None,1))
elif (opt.version == 3):
model = model.get_ARCNN_lite((None,None,1))
elif (opt.version == 4):
model = model.get_ARCNN_att((None,None,1))
#Load Dataset
data = create_artifact_dataset(fpath=opt.dataset,
batch_size=opt.batch_size,
p=opt.patch_size,
s=opt.stride_size,
jpq=(opt.jpq_lower,opt.jpq_upper)))
data = data.prefetch(tf.data.experimental.AUTOTUNE)
#Set callbacks
tboard = tf.keras.callbacks.TensorBoard(log_dir="./logs/ARCNN_ssim",write_images=True)
filepath="./checkpoints/ARCNN_ssim/weights-improvement-{epoch:02d}-{ssim:.2f}.hdf5"
cp = tf.keras.callbacks.ModelCheckpoint(filepath,monitor="ssim",verbose=1,save_weights_only=True)
lr_reduce = tf.keras.callbacks.ReduceLROnPlateau(monitor='ssim', factor=0.1, patience=5, verbose=1,mode='max',
min_delta=0.001,
cooldown=2,
min_lr=1e-6)
#Train Model
optim = tf.keras.optimizers.Adam(learning_rate=1e-3)
model.compile(optimizer=optim,loss=custom_loss,metrics=[ssim,psnr])
model.fit(data,epochs=opt.epochs,callbacks=[tboard,cp,lr_reduce])
#SaveModel
model.save(opt.model_save_path,save_format="tf")
|
py
|
1a5c45ef62c624860dc5f238e8c4de4beb877ac5
|
"""
Read demography data from input database.
"""
from typing import List
from functools import lru_cache
import numpy as np
import pandas as pd
from autumn.inputs.database import get_input_db
INF = float("inf")
def _get_death_rates(country_iso_code: str):
input_db = get_input_db()
death_df = input_db.query("deaths", conditions=[f"iso3='{country_iso_code}'"],)
pop_df = input_db.query(
"population", conditions=[f"iso3='{country_iso_code}'", "region IS NULL",],
)
# Calculate mean year and time period
death_df["mean_year"] = (death_df["start_year"] + death_df["end_year"]) / 2
death_df["period"] = death_df["end_year"] - death_df["start_year"]
# Combine population and total death data so we can calulate death rate.
# Throws away data for population over 100 y.o.
rate_df = pd.merge(
death_df, pop_df, left_on=["start_year", "start_age"], right_on=["year", "start_age"]
)
# Calculate death rate.
rate_df["death_rate"] = rate_df["death_count"] / (rate_df["population"] * rate_df["period"])
cols = ["mean_year", "start_age", "death_rate"]
rate_df = rate_df.drop(columns=[c for c in rate_df.columns if c not in cols])
rate_df = rate_df.sort_values(["mean_year", "start_age"])
return rate_df
def _get_life_expectancy(country_iso_code: str):
input_db = get_input_db()
expectancy_df = input_db.query("life_expectancy", conditions=[f"iso3='{country_iso_code}'"],)
# Calculate mean year
expectancy_df["mean_year"] = (expectancy_df["start_year"] + expectancy_df["end_year"]) / 2
cols = ["mean_year", "start_age", "life_expectancy"]
expectancy_df = expectancy_df.drop(columns=[c for c in expectancy_df.columns if c not in cols])
expectancy_df = expectancy_df.sort_values(["mean_year", "start_age"])
return expectancy_df
def get_death_rates_by_agegroup(age_breakpoints: List[float], country_iso_code: str):
"""
Find death rates from UN data that are specific to the age groups provided.
Returns a list of death rates and a list of years.
"""
assert age_breakpoints == sorted(age_breakpoints)
assert age_breakpoints[0] == 0
input_db = get_input_db()
rate_df = _get_death_rates(country_iso_code)
years = rate_df["mean_year"].unique().tolist()
orig_ages = rate_df["start_age"].unique().tolist()
year_step = 5
year_rates = {}
for year in years:
orig_rates = rate_df[rate_df["mean_year"] == year]["death_rate"].tolist()
new_rates = downsample_rate(orig_rates, orig_ages, year_step, age_breakpoints)
year_rates[year] = new_rates
death_rates_by_agegroup = {}
for i, age in enumerate(age_breakpoints):
death_rates_by_agegroup[age] = [year_rates[y][i] for y in years]
return death_rates_by_agegroup, years
def get_life_expectancy_by_agegroup(age_breakpoints: List[float], country_iso_code: str):
"""
Find life expectancy from UN data that are specific to the age groups provided.
Returns a list of life expectancy and a list of years.
"""
assert age_breakpoints == sorted(age_breakpoints)
assert age_breakpoints[0] == 0
life_expectancy_df = _get_life_expectancy(country_iso_code)
years = life_expectancy_df["mean_year"].unique().tolist()
orig_ages = life_expectancy_df["start_age"].unique().tolist()
year_step = 5
year_expectancy = {}
for year in years:
orig_expectancy = life_expectancy_df[life_expectancy_df["mean_year"] == year]["life_expectancy"].tolist()
new_expectancy = downsample_rate(orig_expectancy, orig_ages, year_step, age_breakpoints)
year_expectancy[year] = new_expectancy
life_expectancy_by_agegroup = {}
for i, age in enumerate(age_breakpoints):
life_expectancy_by_agegroup[age] = [year_expectancy[y][i] for y in years]
return life_expectancy_by_agegroup, years
def get_iso3_from_country_name(country_name: str):
"""
Return the iso3 code matching with a given country name.
"""
input_db = get_input_db()
country_df = input_db.query("countries", conditions=[f"country='{country_name}'"])
results = country_df["iso3"].tolist()
if results:
return results[0]
else:
raise ValueError(f"Country name {country_name} not found")
def get_crude_birth_rate(country_iso_code: str):
"""
Gets crude birth rate over time for a given country.
Returns a list of birth rates and a list of years.
"""
input_db = get_input_db()
birth_df = input_db.query("birth_rates", conditions=[f"iso3='{country_iso_code}'"])
birth_df = birth_df.sort_values(["mean_year"])
return birth_df["birth_rate"].tolist(), birth_df["mean_year"].tolist()
def get_population_by_agegroup(
age_breakpoints: List[float], country_iso_code: str, region: str = None, year: int = 2020
):
"""
Find population for age bins.
Returns a list of ints, each item being the population for that age bracket.
"""
assert age_breakpoints == sorted(age_breakpoints)
assert age_breakpoints[0] == 0
input_db = get_input_db()
pop_df = input_db.query(
"population",
conditions=[
f"iso3='{country_iso_code}'",
f"year={year}",
f"region='{region}'" if region else "region IS NULL",
],
)
pop_df = pop_df.sort_values(["start_age"])
orig_ages = pop_df["start_age"].tolist()
orig_pop = pop_df["population"].tolist()
assert len(orig_ages) == len(orig_pop)
population = downsample_quantity(orig_pop, orig_ages, age_breakpoints)
return [int(p) for p in population]
def downsample_rate(
orig_rates: List[float], orig_bins: List[float], orig_step: float, new_bins: List[float]
):
"""
Downsample original rates from their current bins to new bins
Assume new bins are smaller than, or equal to, the original bins.
Requires that original values are equispaced by `orig_step` amount.
"""
num_orig_bins = len(orig_bins)
num_new_bins = len(new_bins)
weights = get_bin_weights(orig_bins, new_bins)
new_rates = [0 for _ in range(num_new_bins)]
orig_rates = np.array(orig_rates)
for i_n in range(num_new_bins):
time_chunks = np.zeros(num_orig_bins)
for i_o in range(num_orig_bins):
time_chunks[i_o] = weights[i_o, i_n] * orig_step
new_rates[i_n] = (orig_rates * time_chunks).sum() / time_chunks.sum()
return new_rates
def downsample_quantity(orig_vals: List[float], orig_bins: List[float], new_bins: List[float]):
"""
Downsample original values from their current bins to new bins
Assume new bins are smaller than, or equal to, the original bins
"""
num_orig_bins = len(orig_bins)
num_new_bins = len(new_bins)
weights = get_bin_weights(orig_bins, new_bins)
new_vals = [0 for _ in range(num_new_bins)]
for i_n in range(num_new_bins):
for i_o in range(num_orig_bins):
new_vals[i_n] += weights[i_o, i_n] * orig_vals[i_o]
assert sum(orig_vals) - sum(new_vals) < 1e-3
return new_vals
def get_bin_weights(orig_bins: List[float], new_bins: List[float]):
"""
Gets 2D weight matrix for moving from orig bins to new bins.
"""
num_orig_bins = len(orig_bins)
num_new_bins = len(new_bins)
weights = np.zeros([num_orig_bins, num_new_bins])
for i_n, new_start in enumerate(new_bins):
# Find the new bin end
if i_n == num_new_bins - 1:
new_end = INF
else:
new_end = new_bins[i_n + 1]
# Loop through all old bins, take matching proportion
for i_o, orig_start in enumerate(orig_bins):
# Find the orig bin end
if i_o == len(orig_bins) - 1:
orig_end = INF
else:
orig_end = orig_bins[i_o + 1]
is_new_bin_inside_old_one = new_start > orig_start and new_end < orig_end
assert not is_new_bin_inside_old_one, "New bin inside old bin"
if orig_end == INF and new_end == INF:
# Final bins, add everything
assert new_start <= orig_start, "Cannot slice up infinity"
weights[i_o, i_n] = 1
elif orig_start <= new_start < orig_end:
# New bin starts at start, or half way through an old bin
# We get a fraction of the end of the bin
weights[i_o, i_n] = (orig_end - new_start) / (orig_end - orig_start)
elif new_start < orig_start and new_end >= orig_end:
# New bin encompasses old bin, add the whole thing
weights[i_o, i_n] = 1
elif orig_start < new_end < orig_end:
# New bin ends inside an old bin, take a fraction of the start.
weights[i_o, i_n] = (new_end - orig_start) / (orig_end - orig_start)
return weights
|
py
|
1a5c469379a37cf12fa9defbc97119660042bc5a
|
import os
from pycorrector.utils.text_utils import lcs, get_all_unify_pinyins
from pypinyin import pinyin, lazy_pinyin, Style
import Levenshtein
from pycorrector.utils.langconv import Converter
pwd_path = os.path.abspath(os.path.dirname(__file__))
# same_pinyin_path0 = os.path.join(pwd_path, 'same_pinyin.txt')
same_pinyin_path2 = os.path.join(pwd_path, 'pycorrector/data/similar_pinyin.txt')
gb2312_simple_chinese_unicode = [0x4E00,0x4E01,0x4E03,0x4E07,0x4E08,0x4E09,0x4E0A,0x4E0B,0x4E0C,0x4E0D,
0x4E0E,0x4E10,0x4E11,0x4E13,0x4E14,0x4E15,0x4E16,0x4E18,0x4E19,0x4E1A,
0x4E1B,0x4E1C,0x4E1D,0x4E1E,0x4E22,0x4E24,0x4E25,0x4E27,0x4E28,0x4E2A,
0x4E2B,0x4E2C,0x4E2D,0x4E30,0x4E32,0x4E34,0x4E36,0x4E38,0x4E39,0x4E3A,
0x4E3B,0x4E3D,0x4E3E,0x4E3F,0x4E43,0x4E45,0x4E47,0x4E48,0x4E49,0x4E4B,
0x4E4C,0x4E4D,0x4E4E,0x4E4F,0x4E50,0x4E52,0x4E53,0x4E54,0x4E56,0x4E58,
0x4E59,0x4E5C,0x4E5D,0x4E5E,0x4E5F,0x4E60,0x4E61,0x4E66,0x4E69,0x4E70,
0x4E71,0x4E73,0x4E7E,0x4E86,0x4E88,0x4E89,0x4E8B,0x4E8C,0x4E8D,0x4E8E,
0x4E8F,0x4E91,0x4E92,0x4E93,0x4E94,0x4E95,0x4E98,0x4E9A,0x4E9B,0x4E9F,
0x4EA0,0x4EA1,0x4EA2,0x4EA4,0x4EA5,0x4EA6,0x4EA7,0x4EA8,0x4EA9,0x4EAB,
0x4EAC,0x4EAD,0x4EAE,0x4EB2,0x4EB3,0x4EB5,0x4EBA,0x4EBB,0x4EBF,0x4EC0,
0x4EC1,0x4EC2,0x4EC3,0x4EC4,0x4EC5,0x4EC6,0x4EC7,0x4EC9,0x4ECA,0x4ECB,
0x4ECD,0x4ECE,0x4ED1,0x4ED3,0x4ED4,0x4ED5,0x4ED6,0x4ED7,0x4ED8,0x4ED9,
0x4EDD,0x4EDE,0x4EDF,0x4EE1,0x4EE3,0x4EE4,0x4EE5,0x4EE8,0x4EEA,0x4EEB,
0x4EEC,0x4EF0,0x4EF2,0x4EF3,0x4EF5,0x4EF6,0x4EF7,0x4EFB,0x4EFD,0x4EFF,
0x4F01,0x4F09,0x4F0A,0x4F0D,0x4F0E,0x4F0F,0x4F10,0x4F11,0x4F17,0x4F18,
0x4F19,0x4F1A,0x4F1B,0x4F1E,0x4F1F,0x4F20,0x4F22,0x4F24,0x4F25,0x4F26,
0x4F27,0x4F2A,0x4F2B,0x4F2F,0x4F30,0x4F32,0x4F34,0x4F36,0x4F38,0x4F3A,
0x4F3C,0x4F3D,0x4F43,0x4F46,0x4F4D,0x4F4E,0x4F4F,0x4F50,0x4F51,0x4F53,
0x4F55,0x4F57,0x4F58,0x4F59,0x4F5A,0x4F5B,0x4F5C,0x4F5D,0x4F5E,0x4F5F,
0x4F60,0x4F63,0x4F64,0x4F65,0x4F67,0x4F69,0x4F6C,0x4F6F,0x4F70,0x4F73,
0x4F74,0x4F76,0x4F7B,0x4F7C,0x4F7E,0x4F7F,0x4F83,0x4F84,0x4F88,0x4F89,
0x4F8B,0x4F8D,0x4F8F,0x4F91,0x4F94,0x4F97,0x4F9B,0x4F9D,0x4FA0,0x4FA3,
0x4FA5,0x4FA6,0x4FA7,0x4FA8,0x4FA9,0x4FAA,0x4FAC,0x4FAE,0x4FAF,0x4FB5,
0x4FBF,0x4FC3,0x4FC4,0x4FC5,0x4FCA,0x4FCE,0x4FCF,0x4FD0,0x4FD1,0x4FD7,
0x4FD8,0x4FDA,0x4FDC,0x4FDD,0x4FDE,0x4FDF,0x4FE1,0x4FE3,0x4FE6,0x4FE8,
0x4FE9,0x4FEA,0x4FED,0x4FEE,0x4FEF,0x4FF1,0x4FF3,0x4FF8,0x4FFA,0x4FFE,
0x500C,0x500D,0x500F,0x5012,0x5014,0x5018,0x5019,0x501A,0x501C,0x501F,
0x5021,0x5025,0x5026,0x5028,0x5029,0x502A,0x502C,0x502D,0x502E,0x503A,
0x503C,0x503E,0x5043,0x5047,0x5048,0x504C,0x504E,0x504F,0x5055,0x505A,
0x505C,0x5065,0x506C,0x5076,0x5077,0x507B,0x507E,0x507F,0x5080,0x5085,
0x5088,0x508D,0x50A3,0x50A5,0x50A7,0x50A8,0x50A9,0x50AC,0x50B2,0x50BA,
0x50BB,0x50CF,0x50D6,0x50DA,0x50E6,0x50E7,0x50EC,0x50ED,0x50EE,0x50F3,
0x50F5,0x50FB,0x5106,0x5107,0x510B,0x5112,0x5121,0x513F,0x5140,0x5141,
0x5143,0x5144,0x5145,0x5146,0x5148,0x5149,0x514B,0x514D,0x5151,0x5154,
0x5155,0x5156,0x515A,0x515C,0x5162,0x5165,0x5168,0x516B,0x516C,0x516D,
0x516E,0x5170,0x5171,0x5173,0x5174,0x5175,0x5176,0x5177,0x5178,0x5179,
0x517B,0x517C,0x517D,0x5180,0x5181,0x5182,0x5185,0x5188,0x5189,0x518C,
0x518D,0x5192,0x5195,0x5196,0x5197,0x5199,0x519B,0x519C,0x51A0,0x51A2,
0x51A4,0x51A5,0x51AB,0x51AC,0x51AF,0x51B0,0x51B1,0x51B2,0x51B3,0x51B5,
0x51B6,0x51B7,0x51BB,0x51BC,0x51BD,0x51C0,0x51C4,0x51C6,0x51C7,0x51C9,
0x51CB,0x51CC,0x51CF,0x51D1,0x51DB,0x51DD,0x51E0,0x51E1,0x51E4,0x51EB,
0x51ED,0x51EF,0x51F0,0x51F3,0x51F5,0x51F6,0x51F8,0x51F9,0x51FA,0x51FB,
0x51FC,0x51FD,0x51FF,0x5200,0x5201,0x5202,0x5203,0x5206,0x5207,0x5208,
0x520A,0x520D,0x520E,0x5211,0x5212,0x5216,0x5217,0x5218,0x5219,0x521A,
0x521B,0x521D,0x5220,0x5224,0x5228,0x5229,0x522B,0x522D,0x522E,0x5230,
0x5233,0x5236,0x5237,0x5238,0x5239,0x523A,0x523B,0x523D,0x523F,0x5240,
0x5241,0x5242,0x5243,0x524A,0x524C,0x524D,0x5250,0x5251,0x5254,0x5256,
0x525C,0x525E,0x5261,0x5265,0x5267,0x5269,0x526A,0x526F,0x5272,0x527D,
0x527F,0x5281,0x5282,0x5288,0x5290,0x5293,0x529B,0x529D,0x529E,0x529F,
0x52A0,0x52A1,0x52A2,0x52A3,0x52A8,0x52A9,0x52AA,0x52AB,0x52AC,0x52AD,
0x52B1,0x52B2,0x52B3,0x52BE,0x52BF,0x52C3,0x52C7,0x52C9,0x52CB,0x52D0,
0x52D2,0x52D6,0x52D8,0x52DF,0x52E4,0x52F0,0x52F9,0x52FA,0x52FE,0x52FF,
0x5300,0x5305,0x5306,0x5308,0x530D,0x530F,0x5310,0x5315,0x5316,0x5317,
0x5319,0x531A,0x531D,0x5320,0x5321,0x5323,0x5326,0x532A,0x532E,0x5339,
0x533A,0x533B,0x533E,0x533F,0x5341,0x5343,0x5345,0x5347,0x5348,0x5349,
0x534A,0x534E,0x534F,0x5351,0x5352,0x5353,0x5355,0x5356,0x5357,0x535A,
0x535C,0x535E,0x535F,0x5360,0x5361,0x5362,0x5363,0x5364,0x5366,0x5367,
0x5369,0x536B,0x536E,0x536F,0x5370,0x5371,0x5373,0x5374,0x5375,0x5377,
0x5378,0x537A,0x537F,0x5382,0x5384,0x5385,0x5386,0x5389,0x538B,0x538C,
0x538D,0x5395,0x5398,0x539A,0x539D,0x539F,0x53A2,0x53A3,0x53A5,0x53A6,
0x53A8,0x53A9,0x53AE,0x53B6,0x53BB,0x53BF,0x53C1,0x53C2,0x53C8,0x53C9,
0x53CA,0x53CB,0x53CC,0x53CD,0x53D1,0x53D4,0x53D6,0x53D7,0x53D8,0x53D9,
0x53DB,0x53DF,0x53E0,0x53E3,0x53E4,0x53E5,0x53E6,0x53E8,0x53E9,0x53EA,
0x53EB,0x53EC,0x53ED,0x53EE,0x53EF,0x53F0,0x53F1,0x53F2,0x53F3,0x53F5,
0x53F6,0x53F7,0x53F8,0x53F9,0x53FB,0x53FC,0x53FD,0x5401,0x5403,0x5404,
0x5406,0x5408,0x5409,0x540A,0x540C,0x540D,0x540E,0x540F,0x5410,0x5411,
0x5412,0x5413,0x5415,0x5416,0x5417,0x541B,0x541D,0x541E,0x541F,0x5420,
0x5421,0x5423,0x5426,0x5427,0x5428,0x5429,0x542B,0x542C,0x542D,0x542E,
0x542F,0x5431,0x5432,0x5434,0x5435,0x5438,0x5439,0x543B,0x543C,0x543E,
0x5440,0x5443,0x5446,0x5448,0x544A,0x544B,0x5450,0x5452,0x5453,0x5454,
0x5455,0x5456,0x5457,0x5458,0x5459,0x545B,0x545C,0x5462,0x5464,0x5466,
0x5468,0x5471,0x5472,0x5473,0x5475,0x5476,0x5477,0x5478,0x547B,0x547C,
0x547D,0x5480,0x5482,0x5484,0x5486,0x548B,0x548C,0x548E,0x548F,0x5490,
0x5492,0x5494,0x5495,0x5496,0x5499,0x549A,0x549B,0x549D,0x54A3,0x54A4,
0x54A6,0x54A7,0x54A8,0x54A9,0x54AA,0x54AB,0x54AC,0x54AD,0x54AF,0x54B1,
0x54B3,0x54B4,0x54B8,0x54BB,0x54BD,0x54BF,0x54C0,0x54C1,0x54C2,0x54C4,
0x54C6,0x54C7,0x54C8,0x54C9,0x54CC,0x54CD,0x54CE,0x54CF,0x54D0,0x54D1,
0x54D2,0x54D3,0x54D4,0x54D5,0x54D7,0x54D9,0x54DA,0x54DC,0x54DD,0x54DE,
0x54DF,0x54E5,0x54E6,0x54E7,0x54E8,0x54E9,0x54EA,0x54ED,0x54EE,0x54F2,
0x54F3,0x54FA,0x54FC,0x54FD,0x54FF,0x5501,0x5506,0x5507,0x5509,0x550F,
0x5510,0x5511,0x5514,0x551B,0x5520,0x5522,0x5523,0x5524,0x5527,0x552A,
0x552C,0x552E,0x552F,0x5530,0x5531,0x5533,0x5537,0x553C,0x553E,0x553F,
0x5541,0x5543,0x5544,0x5546,0x5549,0x554A,0x5550,0x5555,0x5556,0x555C,
0x5561,0x5564,0x5565,0x5566,0x5567,0x556A,0x556C,0x556D,0x556E,0x5575,
0x5576,0x5577,0x5578,0x557B,0x557C,0x557E,0x5580,0x5581,0x5582,0x5583,
0x5584,0x5587,0x5588,0x5589,0x558A,0x558B,0x558F,0x5591,0x5594,0x5598,
0x5599,0x559C,0x559D,0x559F,0x55A7,0x55B1,0x55B3,0x55B5,0x55B7,0x55B9,
0x55BB,0x55BD,0x55BE,0x55C4,0x55C5,0x55C9,0x55CC,0x55CD,0x55D1,0x55D2,
0x55D3,0x55D4,0x55D6,0x55DC,0x55DD,0x55DF,0x55E1,0x55E3,0x55E4,0x55E5,
0x55E6,0x55E8,0x55EA,0x55EB,0x55EC,0x55EF,0x55F2,0x55F3,0x55F5,0x55F7,
0x55FD,0x55FE,0x5600,0x5601,0x5608,0x5609,0x560C,0x560E,0x560F,0x5618,
0x561B,0x561E,0x561F,0x5623,0x5624,0x5627,0x562C,0x562D,0x5631,0x5632,
0x5634,0x5636,0x5639,0x563B,0x563F,0x564C,0x564D,0x564E,0x5654,0x5657,
0x5658,0x5659,0x565C,0x5662,0x5664,0x5668,0x5669,0x566A,0x566B,0x566C,
0x5671,0x5676,0x567B,0x567C,0x5685,0x5686,0x568E,0x568F,0x5693,0x56A3,
0x56AF,0x56B7,0x56BC,0x56CA,0x56D4,0x56D7,0x56DA,0x56DB,0x56DD,0x56DE,
0x56DF,0x56E0,0x56E1,0x56E2,0x56E4,0x56EB,0x56ED,0x56F0,0x56F1,0x56F4,
0x56F5,0x56F9,0x56FA,0x56FD,0x56FE,0x56FF,0x5703,0x5704,0x5706,0x5708,
0x5709,0x570A,0x571C,0x571F,0x5723,0x5728,0x5729,0x572A,0x572C,0x572D,
0x572E,0x572F,0x5730,0x5733,0x5739,0x573A,0x573B,0x573E,0x5740,0x5742,
0x5747,0x574A,0x574C,0x574D,0x574E,0x574F,0x5750,0x5751,0x5757,0x575A,
0x575B,0x575C,0x575D,0x575E,0x575F,0x5760,0x5761,0x5764,0x5766,0x5768,
0x5769,0x576A,0x576B,0x576D,0x576F,0x5773,0x5776,0x5777,0x577B,0x577C,
0x5782,0x5783,0x5784,0x5785,0x5786,0x578B,0x578C,0x5792,0x5793,0x579B,
0x57A0,0x57A1,0x57A2,0x57A3,0x57A4,0x57A6,0x57A7,0x57A9,0x57AB,0x57AD,
0x57AE,0x57B2,0x57B4,0x57B8,0x57C2,0x57C3,0x57CB,0x57CE,0x57CF,0x57D2,
0x57D4,0x57D5,0x57D8,0x57D9,0x57DA,0x57DD,0x57DF,0x57E0,0x57E4,0x57ED,
0x57EF,0x57F4,0x57F8,0x57F9,0x57FA,0x57FD,0x5800,0x5802,0x5806,0x5807,
0x580B,0x580D,0x5811,0x5815,0x5819,0x581E,0x5820,0x5821,0x5824,0x582A,
0x5830,0x5835,0x5844,0x584C,0x584D,0x5851,0x5854,0x5858,0x585E,0x5865,
0x586B,0x586C,0x587E,0x5880,0x5881,0x5883,0x5885,0x5889,0x5892,0x5893,
0x5899,0x589A,0x589E,0x589F,0x58A8,0x58A9,0x58BC,0x58C1,0x58C5,0x58D1,
0x58D5,0x58E4,0x58EB,0x58EC,0x58EE,0x58F0,0x58F3,0x58F6,0x58F9,0x5902,
0x5904,0x5907,0x590D,0x590F,0x5914,0x5915,0x5916,0x5919,0x591A,0x591C,
0x591F,0x5924,0x5925,0x5927,0x5929,0x592A,0x592B,0x592D,0x592E,0x592F,
0x5931,0x5934,0x5937,0x5938,0x5939,0x593A,0x593C,0x5941,0x5942,0x5944,
0x5947,0x5948,0x5949,0x594B,0x594E,0x594F,0x5951,0x5954,0x5955,0x5956,
0x5957,0x5958,0x595A,0x5960,0x5962,0x5965,0x5973,0x5974,0x5976,0x5978,
0x5979,0x597D,0x5981,0x5982,0x5983,0x5984,0x5986,0x5987,0x5988,0x598A,
0x598D,0x5992,0x5993,0x5996,0x5997,0x5999,0x599E,0x59A3,0x59A4,0x59A5,
0x59A8,0x59A9,0x59AA,0x59AB,0x59AE,0x59AF,0x59B2,0x59B9,0x59BB,0x59BE,
0x59C6,0x59CA,0x59CB,0x59D0,0x59D1,0x59D2,0x59D3,0x59D4,0x59D7,0x59D8,
0x59DA,0x59DC,0x59DD,0x59E3,0x59E5,0x59E8,0x59EC,0x59F9,0x59FB,0x59FF,
0x5A01,0x5A03,0x5A04,0x5A05,0x5A06,0x5A07,0x5A08,0x5A09,0x5A0C,0x5A11,
0x5A13,0x5A18,0x5A1C,0x5A1F,0x5A20,0x5A23,0x5A25,0x5A29,0x5A31,0x5A32,
0x5A34,0x5A36,0x5A3C,0x5A40,0x5A46,0x5A49,0x5A4A,0x5A55,0x5A5A,0x5A62,
0x5A67,0x5A6A,0x5A74,0x5A75,0x5A76,0x5A77,0x5A7A,0x5A7F,0x5A92,0x5A9A,
0x5A9B,0x5AAA,0x5AB2,0x5AB3,0x5AB5,0x5AB8,0x5ABE,0x5AC1,0x5AC2,0x5AC9,
0x5ACC,0x5AD2,0x5AD4,0x5AD6,0x5AD8,0x5ADC,0x5AE0,0x5AE1,0x5AE3,0x5AE6,
0x5AE9,0x5AEB,0x5AF1,0x5B09,0x5B16,0x5B17,0x5B32,0x5B34,0x5B37,0x5B40,
0x5B50,0x5B51,0x5B53,0x5B54,0x5B55,0x5B57,0x5B58,0x5B59,0x5B5A,0x5B5B,
0x5B5C,0x5B5D,0x5B5F,0x5B62,0x5B63,0x5B64,0x5B65,0x5B66,0x5B69,0x5B6A,
0x5B6C,0x5B70,0x5B71,0x5B73,0x5B75,0x5B7A,0x5B7D,0x5B80,0x5B81,0x5B83,
0x5B84,0x5B85,0x5B87,0x5B88,0x5B89,0x5B8B,0x5B8C,0x5B8F,0x5B93,0x5B95,
0x5B97,0x5B98,0x5B99,0x5B9A,0x5B9B,0x5B9C,0x5B9D,0x5B9E,0x5BA0,0x5BA1,
0x5BA2,0x5BA3,0x5BA4,0x5BA5,0x5BA6,0x5BAA,0x5BAB,0x5BB0,0x5BB3,0x5BB4,
0x5BB5,0x5BB6,0x5BB8,0x5BB9,0x5BBD,0x5BBE,0x5BBF,0x5BC2,0x5BC4,0x5BC5,
0x5BC6,0x5BC7,0x5BCC,0x5BD0,0x5BD2,0x5BD3,0x5BDD,0x5BDE,0x5BDF,0x5BE1,
0x5BE4,0x5BE5,0x5BE8,0x5BEE,0x5BF0,0x5BF8,0x5BF9,0x5BFA,0x5BFB,0x5BFC,
0x5BFF,0x5C01,0x5C04,0x5C06,0x5C09,0x5C0A,0x5C0F,0x5C11,0x5C14,0x5C15,
0x5C16,0x5C18,0x5C1A,0x5C1C,0x5C1D,0x5C22,0x5C24,0x5C25,0x5C27,0x5C2C,
0x5C31,0x5C34,0x5C38,0x5C39,0x5C3A,0x5C3B,0x5C3C,0x5C3D,0x5C3E,0x5C3F,
0x5C40,0x5C41,0x5C42,0x5C45,0x5C48,0x5C49,0x5C4A,0x5C4B,0x5C4E,0x5C4F,
0x5C50,0x5C51,0x5C55,0x5C59,0x5C5E,0x5C60,0x5C61,0x5C63,0x5C65,0x5C66,
0x5C6E,0x5C6F,0x5C71,0x5C79,0x5C7A,0x5C7F,0x5C81,0x5C82,0x5C88,0x5C8C,
0x5C8D,0x5C90,0x5C91,0x5C94,0x5C96,0x5C97,0x5C98,0x5C99,0x5C9A,0x5C9B,
0x5C9C,0x5CA2,0x5CA3,0x5CA9,0x5CAB,0x5CAC,0x5CAD,0x5CB1,0x5CB3,0x5CB5,
0x5CB7,0x5CB8,0x5CBD,0x5CBF,0x5CC1,0x5CC4,0x5CCB,0x5CD2,0x5CD9,0x5CE1,
0x5CE4,0x5CE5,0x5CE6,0x5CE8,0x5CEA,0x5CED,0x5CF0,0x5CFB,0x5D02,0x5D03,
0x5D06,0x5D07,0x5D0E,0x5D14,0x5D16,0x5D1B,0x5D1E,0x5D24,0x5D26,0x5D27,
0x5D29,0x5D2D,0x5D2E,0x5D34,0x5D3D,0x5D3E,0x5D47,0x5D4A,0x5D4B,0x5D4C,
0x5D58,0x5D5B,0x5D5D,0x5D69,0x5D6B,0x5D6C,0x5D6F,0x5D74,0x5D82,0x5D99,
0x5D9D,0x5DB7,0x5DC5,0x5DCD,0x5DDB,0x5DDD,0x5DDE,0x5DE1,0x5DE2,0x5DE5,
0x5DE6,0x5DE7,0x5DE8,0x5DE9,0x5DEB,0x5DEE,0x5DEF,0x5DF1,0x5DF2,0x5DF3,
0x5DF4,0x5DF7,0x5DFD,0x5DFE,0x5E01,0x5E02,0x5E03,0x5E05,0x5E06,0x5E08,
0x5E0C,0x5E0F,0x5E10,0x5E11,0x5E14,0x5E15,0x5E16,0x5E18,0x5E19,0x5E1A,
0x5E1B,0x5E1C,0x5E1D,0x5E26,0x5E27,0x5E2D,0x5E2E,0x5E31,0x5E37,0x5E38,
0x5E3B,0x5E3C,0x5E3D,0x5E42,0x5E44,0x5E45,0x5E4C,0x5E54,0x5E55,0x5E5B,
0x5E5E,0x5E61,0x5E62,0x5E72,0x5E73,0x5E74,0x5E76,0x5E78,0x5E7A,0x5E7B,
0x5E7C,0x5E7D,0x5E7F,0x5E80,0x5E84,0x5E86,0x5E87,0x5E8A,0x5E8B,0x5E8F,
0x5E90,0x5E91,0x5E93,0x5E94,0x5E95,0x5E96,0x5E97,0x5E99,0x5E9A,0x5E9C,
0x5E9E,0x5E9F,0x5EA0,0x5EA5,0x5EA6,0x5EA7,0x5EAD,0x5EB3,0x5EB5,0x5EB6,
0x5EB7,0x5EB8,0x5EB9,0x5EBE,0x5EC9,0x5ECA,0x5ED1,0x5ED2,0x5ED3,0x5ED6,
0x5EDB,0x5EE8,0x5EEA,0x5EF4,0x5EF6,0x5EF7,0x5EFA,0x5EFE,0x5EFF,0x5F00,
0x5F01,0x5F02,0x5F03,0x5F04,0x5F08,0x5F0A,0x5F0B,0x5F0F,0x5F11,0x5F13,
0x5F15,0x5F17,0x5F18,0x5F1B,0x5F1F,0x5F20,0x5F25,0x5F26,0x5F27,0x5F29,
0x5F2A,0x5F2D,0x5F2F,0x5F31,0x5F39,0x5F3A,0x5F3C,0x5F40,0x5F50,0x5F52,
0x5F53,0x5F55,0x5F56,0x5F57,0x5F58,0x5F5D,0x5F61,0x5F62,0x5F64,0x5F66,
0x5F69,0x5F6A,0x5F6C,0x5F6D,0x5F70,0x5F71,0x5F73,0x5F77,0x5F79,0x5F7B,
0x5F7C,0x5F80,0x5F81,0x5F82,0x5F84,0x5F85,0x5F87,0x5F88,0x5F89,0x5F8A,
0x5F8B,0x5F8C,0x5F90,0x5F92,0x5F95,0x5F97,0x5F98,0x5F99,0x5F9C,0x5FA1,
0x5FA8,0x5FAA,0x5FAD,0x5FAE,0x5FB5,0x5FB7,0x5FBC,0x5FBD,0x5FC3,0x5FC4,
0x5FC5,0x5FC6,0x5FC9,0x5FCC,0x5FCD,0x5FCF,0x5FD0,0x5FD1,0x5FD2,0x5FD6,
0x5FD7,0x5FD8,0x5FD9,0x5FDD,0x5FE0,0x5FE1,0x5FE4,0x5FE7,0x5FEA,0x5FEB,
0x5FED,0x5FEE,0x5FF1,0x5FF5,0x5FF8,0x5FFB,0x5FFD,0x5FFE,0x5FFF,0x6000,
0x6001,0x6002,0x6003,0x6004,0x6005,0x6006,0x600A,0x600D,0x600E,0x600F,
0x6012,0x6014,0x6015,0x6016,0x6019,0x601B,0x601C,0x601D,0x6020,0x6021,
0x6025,0x6026,0x6027,0x6028,0x6029,0x602A,0x602B,0x602F,0x6035,0x603B,
0x603C,0x603F,0x6041,0x6042,0x6043,0x604B,0x604D,0x6050,0x6052,0x6055,
0x6059,0x605A,0x605D,0x6062,0x6063,0x6064,0x6067,0x6068,0x6069,0x606A,
0x606B,0x606C,0x606D,0x606F,0x6070,0x6073,0x6076,0x6078,0x6079,0x607A,
0x607B,0x607C,0x607D,0x607F,0x6083,0x6084,0x6089,0x608C,0x608D,0x6092,
0x6094,0x6096,0x609A,0x609B,0x609D,0x609F,0x60A0,0x60A3,0x60A6,0x60A8,
0x60AB,0x60AC,0x60AD,0x60AF,0x60B1,0x60B2,0x60B4,0x60B8,0x60BB,0x60BC,
0x60C5,0x60C6,0x60CA,0x60CB,0x60D1,0x60D5,0x60D8,0x60DA,0x60DC,0x60DD,
0x60DF,0x60E0,0x60E6,0x60E7,0x60E8,0x60E9,0x60EB,0x60EC,0x60ED,0x60EE,
0x60EF,0x60F0,0x60F3,0x60F4,0x60F6,0x60F9,0x60FA,0x6100,0x6101,0x6106,
0x6108,0x6109,0x610D,0x610E,0x610F,0x6115,0x611A,0x611F,0x6120,0x6123,
0x6124,0x6126,0x6127,0x612B,0x613F,0x6148,0x614A,0x614C,0x614E,0x6151,
0x6155,0x615D,0x6162,0x6167,0x6168,0x6170,0x6175,0x6177,0x618B,0x618E,
0x6194,0x619D,0x61A7,0x61A8,0x61A9,0x61AC,0x61B7,0x61BE,0x61C2,0x61C8,
0x61CA,0x61CB,0x61D1,0x61D2,0x61D4,0x61E6,0x61F5,0x61FF,0x6206,0x6208,
0x620A,0x620B,0x620C,0x620D,0x620E,0x620F,0x6210,0x6211,0x6212,0x6215,
0x6216,0x6217,0x6218,0x621A,0x621B,0x621F,0x6221,0x6222,0x6224,0x6225,
0x622A,0x622C,0x622E,0x6233,0x6234,0x6237,0x623D,0x623E,0x623F,0x6240,
0x6241,0x6243,0x6247,0x6248,0x6249,0x624B,0x624C,0x624D,0x624E,0x6251,
0x6252,0x6253,0x6254,0x6258,0x625B,0x6263,0x6266,0x6267,0x6269,0x626A,
0x626B,0x626C,0x626D,0x626E,0x626F,0x6270,0x6273,0x6276,0x6279,0x627C,
0x627E,0x627F,0x6280,0x6284,0x6289,0x628A,0x6291,0x6292,0x6293,0x6295,
0x6296,0x6297,0x6298,0x629A,0x629B,0x629F,0x62A0,0x62A1,0x62A2,0x62A4,
0x62A5,0x62A8,0x62AB,0x62AC,0x62B1,0x62B5,0x62B9,0x62BB,0x62BC,0x62BD,
0x62BF,0x62C2,0x62C4,0x62C5,0x62C6,0x62C7,0x62C8,0x62C9,0x62CA,0x62CC,
0x62CD,0x62CE,0x62D0,0x62D2,0x62D3,0x62D4,0x62D6,0x62D7,0x62D8,0x62D9,
0x62DA,0x62DB,0x62DC,0x62DF,0x62E2,0x62E3,0x62E5,0x62E6,0x62E7,0x62E8,
0x62E9,0x62EC,0x62ED,0x62EE,0x62EF,0x62F1,0x62F3,0x62F4,0x62F6,0x62F7,
0x62FC,0x62FD,0x62FE,0x62FF,0x6301,0x6302,0x6307,0x6308,0x6309,0x630E,
0x6311,0x6316,0x631A,0x631B,0x631D,0x631E,0x631F,0x6320,0x6321,0x6322,
0x6323,0x6324,0x6325,0x6328,0x632A,0x632B,0x632F,0x6332,0x6339,0x633A,
0x633D,0x6342,0x6343,0x6345,0x6346,0x6349,0x634B,0x634C,0x634D,0x634E,
0x634F,0x6350,0x6355,0x635E,0x635F,0x6361,0x6362,0x6363,0x6367,0x6369,
0x636D,0x636E,0x6371,0x6376,0x6377,0x637A,0x637B,0x6380,0x6382,0x6387,
0x6388,0x6389,0x638A,0x638C,0x638E,0x638F,0x6390,0x6392,0x6396,0x6398,
0x63A0,0x63A2,0x63A3,0x63A5,0x63A7,0x63A8,0x63A9,0x63AA,0x63AC,0x63AD,
0x63AE,0x63B0,0x63B3,0x63B4,0x63B7,0x63B8,0x63BA,0x63BC,0x63BE,0x63C4,
0x63C6,0x63C9,0x63CD,0x63CE,0x63CF,0x63D0,0x63D2,0x63D6,0x63DE,0x63E0,
0x63E1,0x63E3,0x63E9,0x63EA,0x63ED,0x63F2,0x63F4,0x63F6,0x63F8,0x63FD,
0x63FF,0x6400,0x6401,0x6402,0x6405,0x640B,0x640C,0x640F,0x6410,0x6413,
0x6414,0x641B,0x641C,0x641E,0x6420,0x6421,0x6426,0x642A,0x642C,0x642D,
0x6434,0x643A,0x643D,0x643F,0x6441,0x6444,0x6445,0x6446,0x6447,0x6448,
0x644A,0x6452,0x6454,0x6458,0x645E,0x6467,0x6469,0x646D,0x6478,0x6479,
0x647A,0x6482,0x6484,0x6485,0x6487,0x6491,0x6492,0x6495,0x6496,0x6499,
0x649E,0x64A4,0x64A9,0x64AC,0x64AD,0x64AE,0x64B0,0x64B5,0x64B7,0x64B8,
0x64BA,0x64BC,0x64C0,0x64C2,0x64C5,0x64CD,0x64CE,0x64D0,0x64D2,0x64D7,
0x64D8,0x64DE,0x64E2,0x64E4,0x64E6,0x6500,0x6509,0x6512,0x6518,0x6525,
0x652B,0x652E,0x652F,0x6534,0x6535,0x6536,0x6538,0x6539,0x653B,0x653E,
0x653F,0x6545,0x6548,0x6549,0x654C,0x654F,0x6551,0x6555,0x6556,0x6559,
0x655B,0x655D,0x655E,0x6562,0x6563,0x6566,0x656B,0x656C,0x6570,0x6572,
0x6574,0x6577,0x6587,0x658B,0x658C,0x6590,0x6591,0x6593,0x6597,0x6599,
0x659B,0x659C,0x659F,0x65A1,0x65A4,0x65A5,0x65A7,0x65A9,0x65AB,0x65AD,
0x65AF,0x65B0,0x65B9,0x65BC,0x65BD,0x65C1,0x65C3,0x65C4,0x65C5,0x65C6,
0x65CB,0x65CC,0x65CE,0x65CF,0x65D2,0x65D6,0x65D7,0x65E0,0x65E2,0x65E5,
0x65E6,0x65E7,0x65E8,0x65E9,0x65EC,0x65ED,0x65EE,0x65EF,0x65F0,0x65F1,
0x65F6,0x65F7,0x65FA,0x6600,0x6602,0x6603,0x6606,0x660A,0x660C,0x660E,
0x660F,0x6613,0x6614,0x6615,0x6619,0x661D,0x661F,0x6620,0x6625,0x6627,
0x6628,0x662D,0x662F,0x6631,0x6634,0x6635,0x6636,0x663C,0x663E,0x6641,
0x6643,0x664B,0x664C,0x664F,0x6652,0x6653,0x6654,0x6655,0x6656,0x6657,
0x665A,0x665F,0x6661,0x6664,0x6666,0x6668,0x666E,0x666F,0x6670,0x6674,
0x6676,0x6677,0x667A,0x667E,0x6682,0x6684,0x6687,0x668C,0x6691,0x6696,
0x6697,0x669D,0x66A7,0x66A8,0x66AE,0x66B4,0x66B9,0x66BE,0x66D9,0x66DB,
0x66DC,0x66DD,0x66E6,0x66E9,0x66F0,0x66F2,0x66F3,0x66F4,0x66F7,0x66F9,
0x66FC,0x66FE,0x66FF,0x6700,0x6708,0x6709,0x670A,0x670B,0x670D,0x6710,
0x6714,0x6715,0x6717,0x671B,0x671D,0x671F,0x6726,0x6728,0x672A,0x672B,
0x672C,0x672D,0x672F,0x6731,0x6734,0x6735,0x673A,0x673D,0x6740,0x6742,
0x6743,0x6746,0x6748,0x6749,0x674C,0x674E,0x674F,0x6750,0x6751,0x6753,
0x6756,0x675C,0x675E,0x675F,0x6760,0x6761,0x6765,0x6768,0x6769,0x676A,
0x676D,0x676F,0x6770,0x6772,0x6773,0x6775,0x6777,0x677C,0x677E,0x677F,
0x6781,0x6784,0x6787,0x6789,0x678B,0x6790,0x6795,0x6797,0x6798,0x679A,
0x679C,0x679D,0x679E,0x67A2,0x67A3,0x67A5,0x67A7,0x67A8,0x67AA,0x67AB,
0x67AD,0x67AF,0x67B0,0x67B3,0x67B5,0x67B6,0x67B7,0x67B8,0x67C1,0x67C3,
0x67C4,0x67CF,0x67D0,0x67D1,0x67D2,0x67D3,0x67D4,0x67D8,0x67D9,0x67DA,
0x67DC,0x67DD,0x67DE,0x67E0,0x67E2,0x67E5,0x67E9,0x67EC,0x67EF,0x67F0,
0x67F1,0x67F3,0x67F4,0x67FD,0x67FF,0x6800,0x6805,0x6807,0x6808,0x6809,
0x680A,0x680B,0x680C,0x680E,0x680F,0x6811,0x6813,0x6816,0x6817,0x681D,
0x6821,0x6829,0x682A,0x6832,0x6833,0x6837,0x6838,0x6839,0x683C,0x683D,
0x683E,0x6840,0x6841,0x6842,0x6843,0x6844,0x6845,0x6846,0x6848,0x6849,
0x684A,0x684C,0x684E,0x6850,0x6851,0x6853,0x6854,0x6855,0x6860,0x6861,
0x6862,0x6863,0x6864,0x6865,0x6866,0x6867,0x6868,0x6869,0x686B,0x6874,
0x6876,0x6877,0x6881,0x6883,0x6885,0x6886,0x688F,0x6893,0x6897,0x68A2,
0x68A6,0x68A7,0x68A8,0x68AD,0x68AF,0x68B0,0x68B3,0x68B5,0x68C0,0x68C2,
0x68C9,0x68CB,0x68CD,0x68D2,0x68D5,0x68D8,0x68DA,0x68E0,0x68E3,0x68EE,
0x68F0,0x68F1,0x68F5,0x68F9,0x68FA,0x68FC,0x6901,0x6905,0x690B,0x690D,
0x690E,0x6910,0x6912,0x691F,0x6920,0x6924,0x692D,0x6930,0x6934,0x6939,
0x693D,0x693F,0x6942,0x6954,0x6957,0x695A,0x695D,0x695E,0x6960,0x6963,
0x6966,0x696B,0x696E,0x6971,0x6977,0x6978,0x6979,0x697C,0x6980,0x6982,
0x6984,0x6986,0x6987,0x6988,0x6989,0x698D,0x6994,0x6995,0x6998,0x699B,
0x699C,0x69A7,0x69A8,0x69AB,0x69AD,0x69B1,0x69B4,0x69B7,0x69BB,0x69C1,
0x69CA,0x69CC,0x69CE,0x69D0,0x69D4,0x69DB,0x69DF,0x69E0,0x69ED,0x69F2,
0x69FD,0x69FF,0x6A0A,0x6A17,0x6A18,0x6A1F,0x6A21,0x6A28,0x6A2A,0x6A2F,
0x6A31,0x6A35,0x6A3D,0x6A3E,0x6A44,0x6A47,0x6A50,0x6A58,0x6A59,0x6A5B,
0x6A61,0x6A65,0x6A71,0x6A79,0x6A7C,0x6A80,0x6A84,0x6A8E,0x6A90,0x6A91,
0x6A97,0x6AA0,0x6AA9,0x6AAB,0x6AAC,0x6B20,0x6B21,0x6B22,0x6B23,0x6B24,
0x6B27,0x6B32,0x6B37,0x6B39,0x6B3A,0x6B3E,0x6B43,0x6B46,0x6B47,0x6B49,
0x6B4C,0x6B59,0x6B62,0x6B63,0x6B64,0x6B65,0x6B66,0x6B67,0x6B6A,0x6B79,
0x6B7B,0x6B7C,0x6B81,0x6B82,0x6B83,0x6B84,0x6B86,0x6B87,0x6B89,0x6B8A,
0x6B8B,0x6B8D,0x6B92,0x6B93,0x6B96,0x6B9A,0x6B9B,0x6BA1,0x6BAA,0x6BB3,
0x6BB4,0x6BB5,0x6BB7,0x6BBF,0x6BC1,0x6BC2,0x6BC5,0x6BCB,0x6BCD,0x6BCF,
0x6BD2,0x6BD3,0x6BD4,0x6BD5,0x6BD6,0x6BD7,0x6BD9,0x6BDB,0x6BE1,0x6BEA,
0x6BEB,0x6BEF,0x6BF3,0x6BF5,0x6BF9,0x6BFD,0x6C05,0x6C06,0x6C07,0x6C0D,
0x6C0F,0x6C10,0x6C11,0x6C13,0x6C14,0x6C15,0x6C16,0x6C18,0x6C19,0x6C1A,
0x6C1B,0x6C1F,0x6C21,0x6C22,0x6C24,0x6C26,0x6C27,0x6C28,0x6C29,0x6C2A,
0x6C2E,0x6C2F,0x6C30,0x6C32,0x6C34,0x6C35,0x6C38,0x6C3D,0x6C40,0x6C41,
0x6C42,0x6C46,0x6C47,0x6C49,0x6C4A,0x6C50,0x6C54,0x6C55,0x6C57,0x6C5B,
0x6C5C,0x6C5D,0x6C5E,0x6C5F,0x6C60,0x6C61,0x6C64,0x6C68,0x6C69,0x6C6A,
0x6C70,0x6C72,0x6C74,0x6C76,0x6C79,0x6C7D,0x6C7E,0x6C81,0x6C82,0x6C83,
0x6C85,0x6C86,0x6C88,0x6C89,0x6C8C,0x6C8F,0x6C90,0x6C93,0x6C94,0x6C99,
0x6C9B,0x6C9F,0x6CA1,0x6CA3,0x6CA4,0x6CA5,0x6CA6,0x6CA7,0x6CA9,0x6CAA,
0x6CAB,0x6CAD,0x6CAE,0x6CB1,0x6CB2,0x6CB3,0x6CB8,0x6CB9,0x6CBB,0x6CBC,
0x6CBD,0x6CBE,0x6CBF,0x6CC4,0x6CC5,0x6CC9,0x6CCA,0x6CCC,0x6CD0,0x6CD3,
0x6CD4,0x6CD5,0x6CD6,0x6CD7,0x6CDB,0x6CDE,0x6CE0,0x6CE1,0x6CE2,0x6CE3,
0x6CE5,0x6CE8,0x6CEA,0x6CEB,0x6CEE,0x6CEF,0x6CF0,0x6CF1,0x6CF3,0x6CF5,
0x6CF6,0x6CF7,0x6CF8,0x6CFA,0x6CFB,0x6CFC,0x6CFD,0x6CFE,0x6D01,0x6D04,
0x6D07,0x6D0B,0x6D0C,0x6D0E,0x6D12,0x6D17,0x6D19,0x6D1A,0x6D1B,0x6D1E,
0x6D25,0x6D27,0x6D2A,0x6D2B,0x6D2E,0x6D31,0x6D32,0x6D33,0x6D35,0x6D39,
0x6D3B,0x6D3C,0x6D3D,0x6D3E,0x6D41,0x6D43,0x6D45,0x6D46,0x6D47,0x6D48,
0x6D4A,0x6D4B,0x6D4D,0x6D4E,0x6D4F,0x6D51,0x6D52,0x6D53,0x6D54,0x6D59,
0x6D5A,0x6D5C,0x6D5E,0x6D60,0x6D63,0x6D66,0x6D69,0x6D6A,0x6D6E,0x6D6F,
0x6D74,0x6D77,0x6D78,0x6D7C,0x6D82,0x6D85,0x6D88,0x6D89,0x6D8C,0x6D8E,
0x6D91,0x6D93,0x6D94,0x6D95,0x6D9B,0x6D9D,0x6D9E,0x6D9F,0x6DA0,0x6DA1,
0x6DA3,0x6DA4,0x6DA6,0x6DA7,0x6DA8,0x6DA9,0x6DAA,0x6DAB,0x6DAE,0x6DAF,
0x6DB2,0x6DB5,0x6DB8,0x6DBF,0x6DC0,0x6DC4,0x6DC5,0x6DC6,0x6DC7,0x6DCB,
0x6DCC,0x6DD1,0x6DD6,0x6DD8,0x6DD9,0x6DDD,0x6DDE,0x6DE0,0x6DE1,0x6DE4,
0x6DE6,0x6DEB,0x6DEC,0x6DEE,0x6DF1,0x6DF3,0x6DF7,0x6DF9,0x6DFB,0x6DFC,
0x6E05,0x6E0A,0x6E0C,0x6E0D,0x6E0E,0x6E10,0x6E11,0x6E14,0x6E16,0x6E17,
0x6E1A,0x6E1D,0x6E20,0x6E21,0x6E23,0x6E24,0x6E25,0x6E29,0x6E2B,0x6E2D,
0x6E2F,0x6E32,0x6E34,0x6E38,0x6E3A,0x6E43,0x6E44,0x6E4D,0x6E4E,0x6E53,
0x6E54,0x6E56,0x6E58,0x6E5B,0x6E5F,0x6E6B,0x6E6E,0x6E7E,0x6E7F,0x6E83,
0x6E85,0x6E86,0x6E89,0x6E8F,0x6E90,0x6E98,0x6E9C,0x6E9F,0x6EA2,0x6EA5,
0x6EA7,0x6EAA,0x6EAF,0x6EB1,0x6EB2,0x6EB4,0x6EB6,0x6EB7,0x6EBA,0x6EBB,
0x6EBD,0x6EC1,0x6EC2,0x6EC7,0x6ECB,0x6ECF,0x6ED1,0x6ED3,0x6ED4,0x6ED5,
0x6ED7,0x6EDA,0x6EDE,0x6EDF,0x6EE0,0x6EE1,0x6EE2,0x6EE4,0x6EE5,0x6EE6,
0x6EE8,0x6EE9,0x6EF4,0x6EF9,0x6F02,0x6F06,0x6F09,0x6F0F,0x6F13,0x6F14,
0x6F15,0x6F20,0x6F24,0x6F29,0x6F2A,0x6F2B,0x6F2D,0x6F2F,0x6F31,0x6F33,
0x6F36,0x6F3E,0x6F46,0x6F47,0x6F4B,0x6F4D,0x6F58,0x6F5C,0x6F5E,0x6F62,
0x6F66,0x6F6D,0x6F6E,0x6F72,0x6F74,0x6F78,0x6F7A,0x6F7C,0x6F84,0x6F88,
0x6F89,0x6F8C,0x6F8D,0x6F8E,0x6F9C,0x6FA1,0x6FA7,0x6FB3,0x6FB6,0x6FB9,
0x6FC0,0x6FC2,0x6FC9,0x6FD1,0x6FD2,0x6FDE,0x6FE0,0x6FE1,0x6FEE,0x6FEF,
0x7011,0x701A,0x701B,0x7023,0x7035,0x7039,0x704C,0x704F,0x705E,0x706B,
0x706C,0x706D,0x706F,0x7070,0x7075,0x7076,0x7078,0x707C,0x707E,0x707F,
0x7080,0x7085,0x7089,0x708A,0x708E,0x7092,0x7094,0x7095,0x7096,0x7099,
0x709C,0x709D,0x70AB,0x70AC,0x70AD,0x70AE,0x70AF,0x70B1,0x70B3,0x70B7,
0x70B8,0x70B9,0x70BB,0x70BC,0x70BD,0x70C0,0x70C1,0x70C2,0x70C3,0x70C8,
0x70CA,0x70D8,0x70D9,0x70DB,0x70DF,0x70E4,0x70E6,0x70E7,0x70E8,0x70E9,
0x70EB,0x70EC,0x70ED,0x70EF,0x70F7,0x70F9,0x70FD,0x7109,0x710A,0x7110,
0x7113,0x7115,0x7116,0x7118,0x7119,0x711A,0x7126,0x712F,0x7130,0x7131,
0x7136,0x7145,0x714A,0x714C,0x714E,0x715C,0x715E,0x7164,0x7166,0x7167,
0x7168,0x716E,0x7172,0x7173,0x7178,0x717A,0x717D,0x7184,0x718A,0x718F,
0x7194,0x7198,0x7199,0x719F,0x71A0,0x71A8,0x71AC,0x71B3,0x71B5,0x71B9,
0x71C3,0x71CE,0x71D4,0x71D5,0x71E0,0x71E5,0x71E7,0x71EE,0x71F9,0x7206,
0x721D,0x7228,0x722A,0x722C,0x7230,0x7231,0x7235,0x7236,0x7237,0x7238,
0x7239,0x723B,0x723D,0x723F,0x7247,0x7248,0x724C,0x724D,0x7252,0x7256,
0x7259,0x725B,0x725D,0x725F,0x7261,0x7262,0x7266,0x7267,0x7269,0x726E,
0x726F,0x7272,0x7275,0x7279,0x727A,0x727E,0x727F,0x7280,0x7281,0x7284,
0x728A,0x728B,0x728D,0x728F,0x7292,0x729F,0x72AC,0x72AD,0x72AF,0x72B0,
0x72B4,0x72B6,0x72B7,0x72B8,0x72B9,0x72C1,0x72C2,0x72C3,0x72C4,0x72C8,
0x72CD,0x72CE,0x72D0,0x72D2,0x72D7,0x72D9,0x72DE,0x72E0,0x72E1,0x72E8,
0x72E9,0x72EC,0x72ED,0x72EE,0x72EF,0x72F0,0x72F1,0x72F2,0x72F3,0x72F4,
0x72F7,0x72F8,0x72FA,0x72FB,0x72FC,0x7301,0x7303,0x730A,0x730E,0x7313,
0x7315,0x7316,0x7317,0x731B,0x731C,0x731D,0x731E,0x7321,0x7322,0x7325,
0x7329,0x732A,0x732B,0x732C,0x732E,0x7331,0x7334,0x7337,0x7338,0x7339,
0x733E,0x733F,0x734D,0x7350,0x7352,0x7357,0x7360,0x736C,0x736D,0x736F,
0x737E,0x7384,0x7387,0x7389,0x738B,0x738E,0x7391,0x7396,0x739B,0x739F,
0x73A2,0x73A9,0x73AB,0x73AE,0x73AF,0x73B0,0x73B2,0x73B3,0x73B7,0x73BA,
0x73BB,0x73C0,0x73C2,0x73C8,0x73C9,0x73CA,0x73CD,0x73CF,0x73D0,0x73D1,
0x73D9,0x73DE,0x73E0,0x73E5,0x73E7,0x73E9,0x73ED,0x73F2,0x7403,0x7405,
0x7406,0x7409,0x740A,0x740F,0x7410,0x741A,0x741B,0x7422,0x7425,0x7426,
0x7428,0x742A,0x742C,0x742E,0x7430,0x7433,0x7434,0x7435,0x7436,0x743C,
0x7441,0x7455,0x7457,0x7459,0x745A,0x745B,0x745C,0x745E,0x745F,0x746D,
0x7470,0x7476,0x7477,0x747E,0x7480,0x7481,0x7483,0x7487,0x748B,0x748E,
0x7490,0x749C,0x749E,0x74A7,0x74A8,0x74A9,0x74BA,0x74D2,0x74DC,0x74DE,
0x74E0,0x74E2,0x74E3,0x74E4,0x74E6,0x74EE,0x74EF,0x74F4,0x74F6,0x74F7,
0x74FF,0x7504,0x750D,0x750F,0x7511,0x7513,0x7518,0x7519,0x751A,0x751C,
0x751F,0x7525,0x7528,0x7529,0x752B,0x752C,0x752D,0x752F,0x7530,0x7531,
0x7532,0x7533,0x7535,0x7537,0x7538,0x753A,0x753B,0x753E,0x7540,0x7545,
0x7548,0x754B,0x754C,0x754E,0x754F,0x7554,0x7559,0x755A,0x755B,0x755C,
0x7565,0x7566,0x756A,0x7572,0x7574,0x7578,0x7579,0x757F,0x7583,0x7586,
0x758B,0x758F,0x7591,0x7592,0x7594,0x7596,0x7597,0x7599,0x759A,0x759D,
0x759F,0x75A0,0x75A1,0x75A3,0x75A4,0x75A5,0x75AB,0x75AC,0x75AE,0x75AF,
0x75B0,0x75B1,0x75B2,0x75B3,0x75B4,0x75B5,0x75B8,0x75B9,0x75BC,0x75BD,
0x75BE,0x75C2,0x75C3,0x75C4,0x75C5,0x75C7,0x75C8,0x75C9,0x75CA,0x75CD,
0x75D2,0x75D4,0x75D5,0x75D6,0x75D8,0x75DB,0x75DE,0x75E2,0x75E3,0x75E4,
0x75E6,0x75E7,0x75E8,0x75EA,0x75EB,0x75F0,0x75F1,0x75F4,0x75F9,0x75FC,
0x75FF,0x7600,0x7601,0x7603,0x7605,0x760A,0x760C,0x7610,0x7615,0x7617,
0x7618,0x7619,0x761B,0x761F,0x7620,0x7622,0x7624,0x7625,0x7626,0x7629,
0x762A,0x762B,0x762D,0x7630,0x7633,0x7634,0x7635,0x7638,0x763C,0x763E,
0x763F,0x7640,0x7643,0x764C,0x764D,0x7654,0x7656,0x765C,0x765E,0x7663,
0x766B,0x766F,0x7678,0x767B,0x767D,0x767E,0x7682,0x7684,0x7686,0x7687,
0x7688,0x768B,0x768E,0x7691,0x7693,0x7696,0x7699,0x76A4,0x76AE,0x76B1,
0x76B2,0x76B4,0x76BF,0x76C2,0x76C5,0x76C6,0x76C8,0x76CA,0x76CD,0x76CE,
0x76CF,0x76D0,0x76D1,0x76D2,0x76D4,0x76D6,0x76D7,0x76D8,0x76DB,0x76DF,
0x76E5,0x76EE,0x76EF,0x76F1,0x76F2,0x76F4,0x76F8,0x76F9,0x76FC,0x76FE,
0x7701,0x7704,0x7707,0x7708,0x7709,0x770B,0x770D,0x7719,0x771A,0x771F,
0x7720,0x7722,0x7726,0x7728,0x7729,0x772D,0x772F,0x7735,0x7736,0x7737,
0x7738,0x773A,0x773C,0x7740,0x7741,0x7743,0x7747,0x7750,0x7751,0x775A,
0x775B,0x7761,0x7762,0x7763,0x7765,0x7766,0x7768,0x776B,0x776C,0x7779,
0x777D,0x777E,0x777F,0x7780,0x7784,0x7785,0x778C,0x778D,0x778E,0x7791,
0x7792,0x779F,0x77A0,0x77A2,0x77A5,0x77A7,0x77A9,0x77AA,0x77AC,0x77B0,
0x77B3,0x77B5,0x77BB,0x77BD,0x77BF,0x77CD,0x77D7,0x77DB,0x77DC,0x77E2,
0x77E3,0x77E5,0x77E7,0x77E9,0x77EB,0x77EC,0x77ED,0x77EE,0x77F3,0x77F6,
0x77F8,0x77FD,0x77FE,0x77FF,0x7800,0x7801,0x7802,0x7809,0x780C,0x780D,
0x7811,0x7812,0x7814,0x7816,0x7817,0x7818,0x781A,0x781C,0x781D,0x781F,
0x7823,0x7825,0x7826,0x7827,0x7829,0x782C,0x782D,0x7830,0x7834,0x7837,
0x7838,0x7839,0x783A,0x783B,0x783C,0x783E,0x7840,0x7845,0x7847,0x784C,
0x784E,0x7850,0x7852,0x7855,0x7856,0x7857,0x785D,0x786A,0x786B,0x786C,
0x786D,0x786E,0x7877,0x787C,0x7887,0x7889,0x788C,0x788D,0x788E,0x7891,
0x7893,0x7897,0x7898,0x789A,0x789B,0x789C,0x789F,0x78A1,0x78A3,0x78A5,
0x78A7,0x78B0,0x78B1,0x78B2,0x78B3,0x78B4,0x78B9,0x78BE,0x78C1,0x78C5,
0x78C9,0x78CA,0x78CB,0x78D0,0x78D4,0x78D5,0x78D9,0x78E8,0x78EC,0x78F2,
0x78F4,0x78F7,0x78FA,0x7901,0x7905,0x7913,0x791E,0x7924,0x7934,0x793A,
0x793B,0x793C,0x793E,0x7940,0x7941,0x7946,0x7948,0x7949,0x7953,0x7956,
0x7957,0x795A,0x795B,0x795C,0x795D,0x795E,0x795F,0x7960,0x7962,0x7965,
0x7967,0x7968,0x796D,0x796F,0x7977,0x7978,0x797A,0x7980,0x7981,0x7984,
0x7985,0x798A,0x798F,0x799A,0x79A7,0x79B3,0x79B9,0x79BA,0x79BB,0x79BD,
0x79BE,0x79C0,0x79C1,0x79C3,0x79C6,0x79C9,0x79CB,0x79CD,0x79D1,0x79D2,
0x79D5,0x79D8,0x79DF,0x79E3,0x79E4,0x79E6,0x79E7,0x79E9,0x79EB,0x79ED,
0x79EF,0x79F0,0x79F8,0x79FB,0x79FD,0x7A00,0x7A02,0x7A03,0x7A06,0x7A0B,
0x7A0D,0x7A0E,0x7A14,0x7A17,0x7A1A,0x7A1E,0x7A20,0x7A23,0x7A33,0x7A37,
0x7A39,0x7A3B,0x7A3C,0x7A3D,0x7A3F,0x7A46,0x7A51,0x7A57,0x7A70,0x7A74,
0x7A76,0x7A77,0x7A78,0x7A79,0x7A7A,0x7A7F,0x7A80,0x7A81,0x7A83,0x7A84,
0x7A86,0x7A88,0x7A8D,0x7A91,0x7A92,0x7A95,0x7A96,0x7A97,0x7A98,0x7A9C,
0x7A9D,0x7A9F,0x7AA0,0x7AA5,0x7AA6,0x7AA8,0x7AAC,0x7AAD,0x7AB3,0x7ABF,
0x7ACB,0x7AD6,0x7AD9,0x7ADE,0x7ADF,0x7AE0,0x7AE3,0x7AE5,0x7AE6,0x7AED,
0x7AEF,0x7AF9,0x7AFA,0x7AFD,0x7AFF,0x7B03,0x7B04,0x7B06,0x7B08,0x7B0A,
0x7B0B,0x7B0F,0x7B11,0x7B14,0x7B15,0x7B19,0x7B1B,0x7B1E,0x7B20,0x7B24,
0x7B25,0x7B26,0x7B28,0x7B2A,0x7B2B,0x7B2C,0x7B2E,0x7B31,0x7B33,0x7B38,
0x7B3A,0x7B3C,0x7B3E,0x7B45,0x7B47,0x7B49,0x7B4B,0x7B4C,0x7B4F,0x7B50,
0x7B51,0x7B52,0x7B54,0x7B56,0x7B58,0x7B5A,0x7B5B,0x7B5D,0x7B60,0x7B62,
0x7B6E,0x7B71,0x7B72,0x7B75,0x7B77,0x7B79,0x7B7B,0x7B7E,0x7B80,0x7B85,
0x7B8D,0x7B90,0x7B94,0x7B95,0x7B97,0x7B9C,0x7B9D,0x7BA1,0x7BA2,0x7BA6,
0x7BA7,0x7BA8,0x7BA9,0x7BAA,0x7BAB,0x7BAC,0x7BAD,0x7BB1,0x7BB4,0x7BB8,
0x7BC1,0x7BC6,0x7BC7,0x7BCC,0x7BD1,0x7BD3,0x7BD9,0x7BDA,0x7BDD,0x7BE1,
0x7BE5,0x7BE6,0x7BEA,0x7BEE,0x7BF1,0x7BF7,0x7BFC,0x7BFE,0x7C07,0x7C0B,
0x7C0C,0x7C0F,0x7C16,0x7C1F,0x7C26,0x7C27,0x7C2A,0x7C38,0x7C3F,0x7C40,
0x7C41,0x7C4D,0x7C73,0x7C74,0x7C7B,0x7C7C,0x7C7D,0x7C89,0x7C91,0x7C92,
0x7C95,0x7C97,0x7C98,0x7C9C,0x7C9D,0x7C9E,0x7C9F,0x7CA2,0x7CA4,0x7CA5,
0x7CAA,0x7CAE,0x7CB1,0x7CB2,0x7CB3,0x7CB9,0x7CBC,0x7CBD,0x7CBE,0x7CC1,
0x7CC5,0x7CC7,0x7CC8,0x7CCA,0x7CCC,0x7CCD,0x7CD5,0x7CD6,0x7CD7,0x7CD9,
0x7CDC,0x7CDF,0x7CE0,0x7CE8,0x7CEF,0x7CF8,0x7CFB,0x7D0A,0x7D20,0x7D22,
0x7D27,0x7D2B,0x7D2F,0x7D6E,0x7D77,0x7DA6,0x7DAE,0x7E3B,0x7E41,0x7E47,
0x7E82,0x7E9B,0x7E9F,0x7EA0,0x7EA1,0x7EA2,0x7EA3,0x7EA4,0x7EA5,0x7EA6,
0x7EA7,0x7EA8,0x7EA9,0x7EAA,0x7EAB,0x7EAC,0x7EAD,0x7EAF,0x7EB0,0x7EB1,
0x7EB2,0x7EB3,0x7EB5,0x7EB6,0x7EB7,0x7EB8,0x7EB9,0x7EBA,0x7EBD,0x7EBE,
0x7EBF,0x7EC0,0x7EC1,0x7EC2,0x7EC3,0x7EC4,0x7EC5,0x7EC6,0x7EC7,0x7EC8,
0x7EC9,0x7ECA,0x7ECB,0x7ECC,0x7ECD,0x7ECE,0x7ECF,0x7ED0,0x7ED1,0x7ED2,
0x7ED3,0x7ED4,0x7ED5,0x7ED7,0x7ED8,0x7ED9,0x7EDA,0x7EDB,0x7EDC,0x7EDD,
0x7EDE,0x7EDF,0x7EE0,0x7EE1,0x7EE2,0x7EE3,0x7EE5,0x7EE6,0x7EE7,0x7EE8,
0x7EE9,0x7EEA,0x7EEB,0x7EED,0x7EEE,0x7EEF,0x7EF0,0x7EF1,0x7EF2,0x7EF3,
0x7EF4,0x7EF5,0x7EF6,0x7EF7,0x7EF8,0x7EFA,0x7EFB,0x7EFC,0x7EFD,0x7EFE,
0x7EFF,0x7F00,0x7F01,0x7F02,0x7F03,0x7F04,0x7F05,0x7F06,0x7F07,0x7F08,
0x7F09,0x7F0B,0x7F0C,0x7F0D,0x7F0E,0x7F0F,0x7F11,0x7F12,0x7F13,0x7F14,
0x7F15,0x7F16,0x7F17,0x7F18,0x7F19,0x7F1A,0x7F1B,0x7F1C,0x7F1D,0x7F1F,
0x7F20,0x7F21,0x7F22,0x7F23,0x7F24,0x7F25,0x7F26,0x7F27,0x7F28,0x7F29,
0x7F2A,0x7F2B,0x7F2C,0x7F2D,0x7F2E,0x7F2F,0x7F30,0x7F31,0x7F32,0x7F33,
0x7F34,0x7F35,0x7F36,0x7F38,0x7F3A,0x7F42,0x7F44,0x7F45,0x7F50,0x7F51,
0x7F54,0x7F55,0x7F57,0x7F58,0x7F5A,0x7F5F,0x7F61,0x7F62,0x7F68,0x7F69,
0x7F6A,0x7F6E,0x7F71,0x7F72,0x7F74,0x7F79,0x7F7E,0x7F81,0x7F8A,0x7F8C,
0x7F8E,0x7F94,0x7F9A,0x7F9D,0x7F9E,0x7F9F,0x7FA1,0x7FA4,0x7FA7,0x7FAF,
0x7FB0,0x7FB2,0x7FB8,0x7FB9,0x7FBC,0x7FBD,0x7FBF,0x7FC1,0x7FC5,0x7FCA,
0x7FCC,0x7FCE,0x7FD4,0x7FD5,0x7FD8,0x7FDF,0x7FE0,0x7FE1,0x7FE5,0x7FE6,
0x7FE9,0x7FEE,0x7FF0,0x7FF1,0x7FF3,0x7FFB,0x7FFC,0x8000,0x8001,0x8003,
0x8004,0x8005,0x8006,0x800B,0x800C,0x800D,0x8010,0x8012,0x8014,0x8015,
0x8016,0x8017,0x8018,0x8019,0x801C,0x8020,0x8022,0x8025,0x8026,0x8027,
0x8028,0x8029,0x802A,0x8031,0x8033,0x8035,0x8036,0x8037,0x8038,0x803B,
0x803D,0x803F,0x8042,0x8043,0x8046,0x804A,0x804B,0x804C,0x804D,0x8052,
0x8054,0x8058,0x805A,0x8069,0x806A,0x8071,0x807F,0x8080,0x8083,0x8084,
0x8086,0x8087,0x8089,0x808B,0x808C,0x8093,0x8096,0x8098,0x809A,0x809B,
0x809C,0x809D,0x809F,0x80A0,0x80A1,0x80A2,0x80A4,0x80A5,0x80A9,0x80AA,
0x80AB,0x80AD,0x80AE,0x80AF,0x80B1,0x80B2,0x80B4,0x80B7,0x80BA,0x80BC,
0x80BD,0x80BE,0x80BF,0x80C0,0x80C1,0x80C2,0x80C3,0x80C4,0x80C6,0x80CC,
0x80CD,0x80CE,0x80D6,0x80D7,0x80D9,0x80DA,0x80DB,0x80DC,0x80DD,0x80DE,
0x80E1,0x80E4,0x80E5,0x80E7,0x80E8,0x80E9,0x80EA,0x80EB,0x80EC,0x80ED,
0x80EF,0x80F0,0x80F1,0x80F2,0x80F3,0x80F4,0x80F6,0x80F8,0x80FA,0x80FC,
0x80FD,0x8102,0x8106,0x8109,0x810A,0x810D,0x810E,0x810F,0x8110,0x8111,
0x8112,0x8113,0x8114,0x8116,0x8118,0x811A,0x811E,0x812C,0x812F,0x8131,
0x8132,0x8136,0x8138,0x813E,0x8146,0x8148,0x814A,0x814B,0x814C,0x8150,
0x8151,0x8153,0x8154,0x8155,0x8159,0x815A,0x8160,0x8165,0x8167,0x8169,
0x816D,0x816E,0x8170,0x8171,0x8174,0x8179,0x817A,0x817B,0x817C,0x817D,
0x817E,0x817F,0x8180,0x8182,0x8188,0x818A,0x818F,0x8191,0x8198,0x819B,
0x819C,0x819D,0x81A3,0x81A6,0x81A8,0x81AA,0x81B3,0x81BA,0x81BB,0x81C0,
0x81C1,0x81C2,0x81C3,0x81C6,0x81CA,0x81CC,0x81E3,0x81E7,0x81EA,0x81EC,
0x81ED,0x81F3,0x81F4,0x81FB,0x81FC,0x81FE,0x8200,0x8201,0x8202,0x8204,
0x8205,0x8206,0x820C,0x820D,0x8210,0x8212,0x8214,0x821B,0x821C,0x821E,
0x821F,0x8221,0x8222,0x8223,0x8228,0x822A,0x822B,0x822C,0x822D,0x822F,
0x8230,0x8231,0x8233,0x8234,0x8235,0x8236,0x8237,0x8238,0x8239,0x823B,
0x823E,0x8244,0x8247,0x8249,0x824B,0x824F,0x8258,0x825A,0x825F,0x8268,
0x826E,0x826F,0x8270,0x8272,0x8273,0x8274,0x8279,0x827A,0x827D,0x827E,
0x827F,0x8282,0x8284,0x8288,0x828A,0x828B,0x828D,0x828E,0x828F,0x8291,
0x8292,0x8297,0x8298,0x8299,0x829C,0x829D,0x829F,0x82A1,0x82A4,0x82A5,
0x82A6,0x82A8,0x82A9,0x82AA,0x82AB,0x82AC,0x82AD,0x82AE,0x82AF,0x82B0,
0x82B1,0x82B3,0x82B4,0x82B7,0x82B8,0x82B9,0x82BD,0x82BE,0x82C1,0x82C4,
0x82C7,0x82C8,0x82CA,0x82CB,0x82CC,0x82CD,0x82CE,0x82CF,0x82D1,0x82D2,
0x82D3,0x82D4,0x82D5,0x82D7,0x82D8,0x82DB,0x82DC,0x82DE,0x82DF,0x82E0,
0x82E1,0x82E3,0x82E4,0x82E5,0x82E6,0x82EB,0x82EF,0x82F1,0x82F4,0x82F7,
0x82F9,0x82FB,0x8301,0x8302,0x8303,0x8304,0x8305,0x8306,0x8307,0x8308,
0x8309,0x830C,0x830E,0x830F,0x8311,0x8314,0x8315,0x8317,0x831A,0x831B,
0x831C,0x8327,0x8328,0x832B,0x832C,0x832D,0x832F,0x8331,0x8333,0x8334,
0x8335,0x8336,0x8338,0x8339,0x833A,0x833C,0x8340,0x8343,0x8346,0x8347,
0x8349,0x834F,0x8350,0x8351,0x8352,0x8354,0x835A,0x835B,0x835C,0x835E,
0x835F,0x8360,0x8361,0x8363,0x8364,0x8365,0x8366,0x8367,0x8368,0x8369,
0x836A,0x836B,0x836C,0x836D,0x836E,0x836F,0x8377,0x8378,0x837B,0x837C,
0x837D,0x8385,0x8386,0x8389,0x838E,0x8392,0x8393,0x8398,0x839B,0x839C,
0x839E,0x83A0,0x83A8,0x83A9,0x83AA,0x83AB,0x83B0,0x83B1,0x83B2,0x83B3,
0x83B4,0x83B6,0x83B7,0x83B8,0x83B9,0x83BA,0x83BC,0x83BD,0x83C0,0x83C1,
0x83C5,0x83C7,0x83CA,0x83CC,0x83CF,0x83D4,0x83D6,0x83D8,0x83DC,0x83DD,
0x83DF,0x83E0,0x83E1,0x83E5,0x83E9,0x83EA,0x83F0,0x83F1,0x83F2,0x83F8,
0x83F9,0x83FD,0x8401,0x8403,0x8404,0x8406,0x840B,0x840C,0x840D,0x840E,
0x840F,0x8411,0x8418,0x841C,0x841D,0x8424,0x8425,0x8426,0x8427,0x8428,
0x8431,0x8438,0x843C,0x843D,0x8446,0x8451,0x8457,0x8459,0x845A,0x845B,
0x845C,0x8461,0x8463,0x8469,0x846B,0x846C,0x846D,0x8471,0x8473,0x8475,
0x8476,0x8478,0x847A,0x8482,0x8487,0x8488,0x8489,0x848B,0x848C,0x848E,
0x8497,0x8499,0x849C,0x84A1,0x84AF,0x84B2,0x84B4,0x84B8,0x84B9,0x84BA,
0x84BD,0x84BF,0x84C1,0x84C4,0x84C9,0x84CA,0x84CD,0x84D0,0x84D1,0x84D3,
0x84D6,0x84DD,0x84DF,0x84E0,0x84E3,0x84E5,0x84E6,0x84EC,0x84F0,0x84FC,
0x84FF,0x850C,0x8511,0x8513,0x8517,0x851A,0x851F,0x8521,0x852B,0x852C,
0x8537,0x8538,0x8539,0x853A,0x853B,0x853C,0x853D,0x8543,0x8548,0x8549,
0x854A,0x8556,0x8559,0x855E,0x8564,0x8568,0x8572,0x8574,0x8579,0x857A,
0x857B,0x857E,0x8584,0x8585,0x8587,0x858F,0x859B,0x859C,0x85A4,0x85A8,
0x85AA,0x85AE,0x85AF,0x85B0,0x85B7,0x85B9,0x85C1,0x85C9,0x85CF,0x85D0,
0x85D3,0x85D5,0x85DC,0x85E4,0x85E9,0x85FB,0x85FF,0x8605,0x8611,0x8616,
0x8627,0x8629,0x8638,0x863C,0x864D,0x864E,0x864F,0x8650,0x8651,0x8654,
0x865A,0x865E,0x8662,0x866B,0x866C,0x866E,0x8671,0x8679,0x867A,0x867B,
0x867C,0x867D,0x867E,0x867F,0x8680,0x8681,0x8682,0x868A,0x868B,0x868C,
0x868D,0x8693,0x8695,0x869C,0x869D,0x86A3,0x86A4,0x86A7,0x86A8,0x86A9,
0x86AA,0x86AC,0x86AF,0x86B0,0x86B1,0x86B4,0x86B5,0x86B6,0x86BA,0x86C0,
0x86C4,0x86C6,0x86C7,0x86C9,0x86CA,0x86CB,0x86CE,0x86CF,0x86D0,0x86D1,
0x86D4,0x86D8,0x86D9,0x86DB,0x86DE,0x86DF,0x86E4,0x86E9,0x86ED,0x86EE,
0x86F0,0x86F1,0x86F2,0x86F3,0x86F4,0x86F8,0x86F9,0x86FE,0x8700,0x8702,
0x8703,0x8707,0x8708,0x8709,0x870A,0x870D,0x8712,0x8713,0x8715,0x8717,
0x8718,0x871A,0x871C,0x871E,0x8721,0x8722,0x8723,0x8725,0x8729,0x872E,
0x8731,0x8734,0x8737,0x873B,0x873E,0x873F,0x8747,0x8748,0x8749,0x874C,
0x874E,0x8753,0x8757,0x8759,0x8760,0x8763,0x8764,0x8765,0x876E,0x8770,
0x8774,0x8776,0x877B,0x877C,0x877D,0x877E,0x8782,0x8783,0x8785,0x8788,
0x878B,0x878D,0x8793,0x8797,0x879F,0x87A8,0x87AB,0x87AC,0x87AD,0x87AF,
0x87B3,0x87B5,0x87BA,0x87BD,0x87C0,0x87C6,0x87CA,0x87CB,0x87D1,0x87D2,
0x87D3,0x87DB,0x87E0,0x87E5,0x87EA,0x87EE,0x87F9,0x87FE,0x8803,0x880A,
0x8813,0x8815,0x8816,0x881B,0x8821,0x8822,0x8832,0x8839,0x883C,0x8840,
0x8844,0x8845,0x884C,0x884D,0x8854,0x8857,0x8859,0x8861,0x8862,0x8863,
0x8864,0x8865,0x8868,0x8869,0x886B,0x886C,0x886E,0x8870,0x8872,0x8877,
0x887D,0x887E,0x887F,0x8881,0x8882,0x8884,0x8885,0x8888,0x888B,0x888D,
0x8892,0x8896,0x889C,0x88A2,0x88A4,0x88AB,0x88AD,0x88B1,0x88B7,0x88BC,
0x88C1,0x88C2,0x88C5,0x88C6,0x88C9,0x88CE,0x88D2,0x88D4,0x88D5,0x88D8,
0x88D9,0x88DF,0x88E2,0x88E3,0x88E4,0x88E5,0x88E8,0x88F0,0x88F1,0x88F3,
0x88F4,0x88F8,0x88F9,0x88FC,0x88FE,0x8902,0x890A,0x8910,0x8912,0x8913,
0x8919,0x891A,0x891B,0x8921,0x8925,0x892A,0x892B,0x8930,0x8934,0x8936,
0x8941,0x8944,0x895E,0x895F,0x8966,0x897B,0x897F,0x8981,0x8983,0x8986,
0x89C1,0x89C2,0x89C4,0x89C5,0x89C6,0x89C7,0x89C8,0x89C9,0x89CA,0x89CB,
0x89CC,0x89CE,0x89CF,0x89D0,0x89D1,0x89D2,0x89D6,0x89DA,0x89DC,0x89DE,
0x89E3,0x89E5,0x89E6,0x89EB,0x89EF,0x89F3,0x8A00,0x8A07,0x8A3E,0x8A48,
0x8A79,0x8A89,0x8A8A,0x8A93,0x8B07,0x8B26,0x8B66,0x8B6C,0x8BA0,0x8BA1,
0x8BA2,0x8BA3,0x8BA4,0x8BA5,0x8BA6,0x8BA7,0x8BA8,0x8BA9,0x8BAA,0x8BAB,
0x8BAD,0x8BAE,0x8BAF,0x8BB0,0x8BB2,0x8BB3,0x8BB4,0x8BB5,0x8BB6,0x8BB7,
0x8BB8,0x8BB9,0x8BBA,0x8BBC,0x8BBD,0x8BBE,0x8BBF,0x8BC0,0x8BC1,0x8BC2,
0x8BC3,0x8BC4,0x8BC5,0x8BC6,0x8BC8,0x8BC9,0x8BCA,0x8BCB,0x8BCC,0x8BCD,
0x8BCE,0x8BCF,0x8BD1,0x8BD2,0x8BD3,0x8BD4,0x8BD5,0x8BD6,0x8BD7,0x8BD8,
0x8BD9,0x8BDA,0x8BDB,0x8BDC,0x8BDD,0x8BDE,0x8BDF,0x8BE0,0x8BE1,0x8BE2,
0x8BE3,0x8BE4,0x8BE5,0x8BE6,0x8BE7,0x8BE8,0x8BE9,0x8BEB,0x8BEC,0x8BED,
0x8BEE,0x8BEF,0x8BF0,0x8BF1,0x8BF2,0x8BF3,0x8BF4,0x8BF5,0x8BF6,0x8BF7,
0x8BF8,0x8BF9,0x8BFA,0x8BFB,0x8BFC,0x8BFD,0x8BFE,0x8BFF,0x8C00,0x8C01,
0x8C02,0x8C03,0x8C04,0x8C05,0x8C06,0x8C07,0x8C08,0x8C0A,0x8C0B,0x8C0C,
0x8C0D,0x8C0E,0x8C0F,0x8C10,0x8C11,0x8C12,0x8C13,0x8C14,0x8C15,0x8C16,
0x8C17,0x8C18,0x8C19,0x8C1A,0x8C1B,0x8C1C,0x8C1D,0x8C1F,0x8C20,0x8C21,
0x8C22,0x8C23,0x8C24,0x8C25,0x8C26,0x8C27,0x8C28,0x8C29,0x8C2A,0x8C2B,
0x8C2C,0x8C2D,0x8C2E,0x8C2F,0x8C30,0x8C31,0x8C32,0x8C33,0x8C34,0x8C35,
0x8C36,0x8C37,0x8C41,0x8C46,0x8C47,0x8C49,0x8C4C,0x8C55,0x8C5A,0x8C61,
0x8C62,0x8C6A,0x8C6B,0x8C73,0x8C78,0x8C79,0x8C7A,0x8C82,0x8C85,0x8C89,
0x8C8A,0x8C8C,0x8C94,0x8C98,0x8D1D,0x8D1E,0x8D1F,0x8D21,0x8D22,0x8D23,
0x8D24,0x8D25,0x8D26,0x8D27,0x8D28,0x8D29,0x8D2A,0x8D2B,0x8D2C,0x8D2D,
0x8D2E,0x8D2F,0x8D30,0x8D31,0x8D32,0x8D33,0x8D34,0x8D35,0x8D36,0x8D37,
0x8D38,0x8D39,0x8D3A,0x8D3B,0x8D3C,0x8D3D,0x8D3E,0x8D3F,0x8D40,0x8D41,
0x8D42,0x8D43,0x8D44,0x8D45,0x8D46,0x8D47,0x8D48,0x8D49,0x8D4A,0x8D4B,
0x8D4C,0x8D4D,0x8D4E,0x8D4F,0x8D50,0x8D53,0x8D54,0x8D55,0x8D56,0x8D58,
0x8D59,0x8D5A,0x8D5B,0x8D5C,0x8D5D,0x8D5E,0x8D60,0x8D61,0x8D62,0x8D63,
0x8D64,0x8D66,0x8D67,0x8D6B,0x8D6D,0x8D70,0x8D73,0x8D74,0x8D75,0x8D76,
0x8D77,0x8D81,0x8D84,0x8D85,0x8D8A,0x8D8B,0x8D91,0x8D94,0x8D9F,0x8DA3,
0x8DB1,0x8DB3,0x8DB4,0x8DB5,0x8DB8,0x8DBA,0x8DBC,0x8DBE,0x8DBF,0x8DC3,
0x8DC4,0x8DC6,0x8DCB,0x8DCC,0x8DCE,0x8DCF,0x8DD1,0x8DD6,0x8DD7,0x8DDA,
0x8DDB,0x8DDD,0x8DDE,0x8DDF,0x8DE3,0x8DE4,0x8DE8,0x8DEA,0x8DEB,0x8DEC,
0x8DEF,0x8DF3,0x8DF5,0x8DF7,0x8DF8,0x8DF9,0x8DFA,0x8DFB,0x8DFD,0x8E05,
0x8E09,0x8E0A,0x8E0C,0x8E0F,0x8E14,0x8E1D,0x8E1E,0x8E1F,0x8E22,0x8E23,
0x8E29,0x8E2A,0x8E2C,0x8E2E,0x8E2F,0x8E31,0x8E35,0x8E39,0x8E3A,0x8E3D,
0x8E40,0x8E41,0x8E42,0x8E44,0x8E47,0x8E48,0x8E49,0x8E4A,0x8E4B,0x8E51,
0x8E52,0x8E59,0x8E66,0x8E69,0x8E6C,0x8E6D,0x8E6F,0x8E70,0x8E72,0x8E74,
0x8E76,0x8E7C,0x8E7F,0x8E81,0x8E85,0x8E87,0x8E8F,0x8E90,0x8E94,0x8E9C,
0x8E9E,0x8EAB,0x8EAC,0x8EAF,0x8EB2,0x8EBA,0x8ECE,0x8F66,0x8F67,0x8F68,
0x8F69,0x8F6B,0x8F6C,0x8F6D,0x8F6E,0x8F6F,0x8F70,0x8F71,0x8F72,0x8F73,
0x8F74,0x8F75,0x8F76,0x8F77,0x8F78,0x8F79,0x8F7A,0x8F7B,0x8F7C,0x8F7D,
0x8F7E,0x8F7F,0x8F81,0x8F82,0x8F83,0x8F84,0x8F85,0x8F86,0x8F87,0x8F88,
0x8F89,0x8F8A,0x8F8B,0x8F8D,0x8F8E,0x8F8F,0x8F90,0x8F91,0x8F93,0x8F94,
0x8F95,0x8F96,0x8F97,0x8F98,0x8F99,0x8F9A,0x8F9B,0x8F9C,0x8F9E,0x8F9F,
0x8FA3,0x8FA8,0x8FA9,0x8FAB,0x8FB0,0x8FB1,0x8FB6,0x8FB9,0x8FBD,0x8FBE,
0x8FC1,0x8FC2,0x8FC4,0x8FC5,0x8FC7,0x8FC8,0x8FCE,0x8FD0,0x8FD1,0x8FD3,
0x8FD4,0x8FD5,0x8FD8,0x8FD9,0x8FDB,0x8FDC,0x8FDD,0x8FDE,0x8FDF,0x8FE2,
0x8FE4,0x8FE5,0x8FE6,0x8FE8,0x8FE9,0x8FEA,0x8FEB,0x8FED,0x8FEE,0x8FF0,
0x8FF3,0x8FF7,0x8FF8,0x8FF9,0x8FFD,0x9000,0x9001,0x9002,0x9003,0x9004,
0x9005,0x9006,0x9009,0x900A,0x900B,0x900D,0x900F,0x9010,0x9011,0x9012,
0x9014,0x9016,0x9017,0x901A,0x901B,0x901D,0x901E,0x901F,0x9020,0x9021,
0x9022,0x9026,0x902D,0x902E,0x902F,0x9035,0x9036,0x9038,0x903B,0x903C,
0x903E,0x9041,0x9042,0x9044,0x9047,0x904D,0x904F,0x9050,0x9051,0x9052,
0x9053,0x9057,0x9058,0x905B,0x9062,0x9063,0x9065,0x9068,0x906D,0x906E,
0x9074,0x9075,0x907D,0x907F,0x9080,0x9082,0x9083,0x9088,0x908B,0x9091,
0x9093,0x9095,0x9097,0x9099,0x909B,0x909D,0x90A1,0x90A2,0x90A3,0x90A6,
0x90AA,0x90AC,0x90AE,0x90AF,0x90B0,0x90B1,0x90B3,0x90B4,0x90B5,0x90B6,
0x90B8,0x90B9,0x90BA,0x90BB,0x90BE,0x90C1,0x90C4,0x90C5,0x90C7,0x90CA,
0x90CE,0x90CF,0x90D0,0x90D1,0x90D3,0x90D7,0x90DB,0x90DC,0x90DD,0x90E1,
0x90E2,0x90E6,0x90E7,0x90E8,0x90EB,0x90ED,0x90EF,0x90F4,0x90F8,0x90FD,
0x90FE,0x9102,0x9104,0x9119,0x911E,0x9122,0x9123,0x912F,0x9131,0x9139,
0x9143,0x9146,0x9149,0x914A,0x914B,0x914C,0x914D,0x914E,0x914F,0x9150,
0x9152,0x9157,0x915A,0x915D,0x915E,0x9161,0x9162,0x9163,0x9164,0x9165,
0x9169,0x916A,0x916C,0x916E,0x916F,0x9170,0x9171,0x9172,0x9174,0x9175,
0x9176,0x9177,0x9178,0x9179,0x917D,0x917E,0x917F,0x9185,0x9187,0x9189,
0x918B,0x918C,0x918D,0x9190,0x9191,0x9192,0x919A,0x919B,0x91A2,0x91A3,
0x91AA,0x91AD,0x91AE,0x91AF,0x91B4,0x91B5,0x91BA,0x91C7,0x91C9,0x91CA,
0x91CC,0x91CD,0x91CE,0x91CF,0x91D1,0x91DC,0x9274,0x928E,0x92AE,0x92C8,
0x933E,0x936A,0x938F,0x93CA,0x93D6,0x943E,0x946B,0x9485,0x9486,0x9487,
0x9488,0x9489,0x948A,0x948B,0x948C,0x948D,0x948E,0x948F,0x9490,0x9492,
0x9493,0x9494,0x9495,0x9497,0x9499,0x949A,0x949B,0x949C,0x949D,0x949E,
0x949F,0x94A0,0x94A1,0x94A2,0x94A3,0x94A4,0x94A5,0x94A6,0x94A7,0x94A8,
0x94A9,0x94AA,0x94AB,0x94AC,0x94AD,0x94AE,0x94AF,0x94B0,0x94B1,0x94B2,
0x94B3,0x94B4,0x94B5,0x94B6,0x94B7,0x94B8,0x94B9,0x94BA,0x94BB,0x94BC,
0x94BD,0x94BE,0x94BF,0x94C0,0x94C1,0x94C2,0x94C3,0x94C4,0x94C5,0x94C6,
0x94C8,0x94C9,0x94CA,0x94CB,0x94CC,0x94CD,0x94CE,0x94D0,0x94D1,0x94D2,
0x94D5,0x94D6,0x94D7,0x94D8,0x94D9,0x94DB,0x94DC,0x94DD,0x94DE,0x94DF,
0x94E0,0x94E1,0x94E2,0x94E3,0x94E4,0x94E5,0x94E7,0x94E8,0x94E9,0x94EA,
0x94EB,0x94EC,0x94ED,0x94EE,0x94EF,0x94F0,0x94F1,0x94F2,0x94F3,0x94F4,
0x94F5,0x94F6,0x94F7,0x94F8,0x94F9,0x94FA,0x94FC,0x94FD,0x94FE,0x94FF,
0x9500,0x9501,0x9502,0x9503,0x9504,0x9505,0x9506,0x9507,0x9508,0x9509,
0x950A,0x950B,0x950C,0x950D,0x950E,0x950F,0x9510,0x9511,0x9512,0x9513,
0x9514,0x9515,0x9516,0x9517,0x9518,0x9519,0x951A,0x951B,0x951D,0x951E,
0x951F,0x9521,0x9522,0x9523,0x9524,0x9525,0x9526,0x9528,0x9529,0x952A,
0x952B,0x952C,0x952D,0x952E,0x952F,0x9530,0x9531,0x9532,0x9534,0x9535,
0x9536,0x9537,0x9538,0x9539,0x953A,0x953B,0x953C,0x953E,0x953F,0x9540,
0x9541,0x9542,0x9544,0x9545,0x9546,0x9547,0x9549,0x954A,0x954C,0x954D,
0x954E,0x954F,0x9550,0x9551,0x9552,0x9553,0x9554,0x9556,0x9557,0x9558,
0x9559,0x955B,0x955C,0x955D,0x955E,0x955F,0x9561,0x9562,0x9563,0x9564,
0x9565,0x9566,0x9567,0x9568,0x9569,0x956A,0x956B,0x956C,0x956D,0x956F,
0x9570,0x9571,0x9572,0x9573,0x9576,0x957F,0x95E8,0x95E9,0x95EA,0x95EB,
0x95ED,0x95EE,0x95EF,0x95F0,0x95F1,0x95F2,0x95F3,0x95F4,0x95F5,0x95F6,
0x95F7,0x95F8,0x95F9,0x95FA,0x95FB,0x95FC,0x95FD,0x95FE,0x9600,0x9601,
0x9602,0x9603,0x9604,0x9605,0x9606,0x9608,0x9609,0x960A,0x960B,0x960C,
0x960D,0x960E,0x960F,0x9610,0x9611,0x9612,0x9614,0x9615,0x9616,0x9617,
0x9619,0x961A,0x961C,0x961D,0x961F,0x9621,0x9622,0x962A,0x962E,0x9631,
0x9632,0x9633,0x9634,0x9635,0x9636,0x963B,0x963C,0x963D,0x963F,0x9640,
0x9642,0x9644,0x9645,0x9646,0x9647,0x9648,0x9649,0x964B,0x964C,0x964D,
0x9650,0x9654,0x9655,0x965B,0x965F,0x9661,0x9662,0x9664,0x9667,0x9668,
0x9669,0x966A,0x966C,0x9672,0x9674,0x9675,0x9676,0x9677,0x9685,0x9686,
0x9688,0x968B,0x968D,0x968F,0x9690,0x9694,0x9697,0x9698,0x9699,0x969C,
0x96A7,0x96B0,0x96B3,0x96B6,0x96B9,0x96BC,0x96BD,0x96BE,0x96C0,0x96C1,
0x96C4,0x96C5,0x96C6,0x96C7,0x96C9,0x96CC,0x96CD,0x96CE,0x96CF,0x96D2,
0x96D5,0x96E0,0x96E8,0x96E9,0x96EA,0x96EF,0x96F3,0x96F6,0x96F7,0x96F9,
0x96FE,0x9700,0x9701,0x9704,0x9706,0x9707,0x9708,0x9709,0x970D,0x970E,
0x970F,0x9713,0x9716,0x971C,0x971E,0x972A,0x972D,0x9730,0x9732,0x9738,
0x9739,0x973E,0x9752,0x9753,0x9756,0x9759,0x975B,0x975E,0x9760,0x9761,
0x9762,0x9765,0x9769,0x9773,0x9774,0x9776,0x977C,0x9785,0x978B,0x978D,
0x9791,0x9792,0x9794,0x9798,0x97A0,0x97A3,0x97AB,0x97AD,0x97AF,0x97B2,
0x97B4,0x97E6,0x97E7,0x97E9,0x97EA,0x97EB,0x97EC,0x97ED,0x97F3,0x97F5,
0x97F6,0x9875,0x9876,0x9877,0x9878,0x9879,0x987A,0x987B,0x987C,0x987D,
0x987E,0x987F,0x9880,0x9881,0x9882,0x9883,0x9884,0x9885,0x9886,0x9887,
0x9888,0x9889,0x988A,0x988C,0x988D,0x988F,0x9890,0x9891,0x9893,0x9894,
0x9896,0x9897,0x9898,0x989A,0x989B,0x989C,0x989D,0x989E,0x989F,0x98A0,
0x98A1,0x98A2,0x98A4,0x98A5,0x98A6,0x98A7,0x98CE,0x98D1,0x98D2,0x98D3,
0x98D5,0x98D8,0x98D9,0x98DA,0x98DE,0x98DF,0x98E7,0x98E8,0x990D,0x9910,
0x992E,0x9954,0x9955,0x9963,0x9965,0x9967,0x9968,0x9969,0x996A,0x996B,
0x996C,0x996D,0x996E,0x996F,0x9970,0x9971,0x9972,0x9974,0x9975,0x9976,
0x9977,0x997A,0x997C,0x997D,0x997F,0x9980,0x9981,0x9984,0x9985,0x9986,
0x9987,0x9988,0x998A,0x998B,0x998D,0x998F,0x9990,0x9991,0x9992,0x9993,
0x9994,0x9995,0x9996,0x9997,0x9998,0x9999,0x99A5,0x99A8,0x9A6C,0x9A6D,
0x9A6E,0x9A6F,0x9A70,0x9A71,0x9A73,0x9A74,0x9A75,0x9A76,0x9A77,0x9A78,
0x9A79,0x9A7A,0x9A7B,0x9A7C,0x9A7D,0x9A7E,0x9A7F,0x9A80,0x9A81,0x9A82,
0x9A84,0x9A85,0x9A86,0x9A87,0x9A88,0x9A8A,0x9A8B,0x9A8C,0x9A8F,0x9A90,
0x9A91,0x9A92,0x9A93,0x9A96,0x9A97,0x9A98,0x9A9A,0x9A9B,0x9A9C,0x9A9D,
0x9A9E,0x9A9F,0x9AA0,0x9AA1,0x9AA2,0x9AA3,0x9AA4,0x9AA5,0x9AA7,0x9AA8,
0x9AB0,0x9AB1,0x9AB6,0x9AB7,0x9AB8,0x9ABA,0x9ABC,0x9AC0,0x9AC1,0x9AC2,
0x9AC5,0x9ACB,0x9ACC,0x9AD1,0x9AD3,0x9AD8,0x9ADF,0x9AE1,0x9AE6,0x9AEB,
0x9AED,0x9AEF,0x9AF9,0x9AFB,0x9B03,0x9B08,0x9B0F,0x9B13,0x9B1F,0x9B23,
0x9B2F,0x9B32,0x9B3B,0x9B3C,0x9B41,0x9B42,0x9B43,0x9B44,0x9B45,0x9B47,
0x9B48,0x9B49,0x9B4D,0x9B4F,0x9B51,0x9B54,0x9C7C,0x9C7F,0x9C81,0x9C82,
0x9C85,0x9C86,0x9C87,0x9C88,0x9C8B,0x9C8D,0x9C8E,0x9C90,0x9C91,0x9C92,
0x9C94,0x9C95,0x9C9A,0x9C9B,0x9C9C,0x9C9E,0x9C9F,0x9CA0,0x9CA1,0x9CA2,
0x9CA3,0x9CA4,0x9CA5,0x9CA6,0x9CA7,0x9CA8,0x9CA9,0x9CAB,0x9CAD,0x9CAE,
0x9CB0,0x9CB1,0x9CB2,0x9CB3,0x9CB4,0x9CB5,0x9CB6,0x9CB7,0x9CB8,0x9CBA,
0x9CBB,0x9CBC,0x9CBD,0x9CC3,0x9CC4,0x9CC5,0x9CC6,0x9CC7,0x9CCA,0x9CCB,
0x9CCC,0x9CCD,0x9CCE,0x9CCF,0x9CD0,0x9CD3,0x9CD4,0x9CD5,0x9CD6,0x9CD7,
0x9CD8,0x9CD9,0x9CDC,0x9CDD,0x9CDE,0x9CDF,0x9CE2,0x9E1F,0x9E20,0x9E21,
0x9E22,0x9E23,0x9E25,0x9E26,0x9E28,0x9E29,0x9E2A,0x9E2B,0x9E2C,0x9E2D,
0x9E2F,0x9E31,0x9E32,0x9E33,0x9E35,0x9E36,0x9E37,0x9E38,0x9E39,0x9E3A,
0x9E3D,0x9E3E,0x9E3F,0x9E41,0x9E42,0x9E43,0x9E44,0x9E45,0x9E46,0x9E47,
0x9E48,0x9E49,0x9E4A,0x9E4B,0x9E4C,0x9E4E,0x9E4F,0x9E51,0x9E55,0x9E57,
0x9E58,0x9E5A,0x9E5B,0x9E5C,0x9E5E,0x9E63,0x9E64,0x9E66,0x9E67,0x9E68,
0x9E69,0x9E6A,0x9E6B,0x9E6C,0x9E6D,0x9E70,0x9E71,0x9E73,0x9E7E,0x9E7F,
0x9E82,0x9E87,0x9E88,0x9E8B,0x9E92,0x9E93,0x9E9D,0x9E9F,0x9EA6,0x9EB4,
0x9EB8,0x9EBB,0x9EBD,0x9EBE,0x9EC4,0x9EC9,0x9ECD,0x9ECE,0x9ECF,0x9ED1,
0x9ED4,0x9ED8,0x9EDB,0x9EDC,0x9EDD,0x9EDF,0x9EE0,0x9EE2,0x9EE5,0x9EE7,
0x9EE9,0x9EEA,0x9EEF,0x9EF9,0x9EFB,0x9EFC,0x9EFE,0x9F0B,0x9F0D,0x9F0E,
0x9F10,0x9F13,0x9F17,0x9F19,0x9F20,0x9F22,0x9F2C,0x9F2F,0x9F37,0x9F39,
0x9F3B,0x9F3D,0x9F3E,0x9F44,0x9F50,0x9F51,0x9F7F,0x9F80,0x9F83,0x9F84,
0x9F85,0x9F86,0x9F87,0x9F88,0x9F89,0x9F8A,0x9F8B,0x9F8C,0x9F99,0x9F9A,]
def traditional2simplified(sentence):
"""
将sentence中的繁体字转为简体字
:param sentence: 待转换的句子
:return: 将句子中繁体字转换为简体字之后的句子
"""
return Converter('zh-hans').convert(sentence)
# s1 = get_unify_pinyins('佛')
# s2 = get_unify_pinyins('佛口蛇心')
#b2312_simple_chinese_unicode=[0x524a, 0x56af]
unicode_pinyins_map = dict()
pinyin_set = set()
for unicode in gb2312_simple_chinese_unicode:
unicode_pinyins = get_all_unify_pinyins(chr(unicode))
unicode_pinyins_map[unicode] = unicode_pinyins
for unicode_pinyin in unicode_pinyins:
pinyin_set.add(unicode_pinyin)
pinyin_similarity_map = {}
for pinyin1 in pinyin_set:
pinyin_similarity_map[pinyin1] = dict()
for pinyin2 in pinyin_set:
if pinyin2 == pinyin1:
pinyin_similarity_map[pinyin1][pinyin2] = 1
continue
edit_distance = Levenshtein.distance(pinyin1, pinyin2)
pinyin_similarity_map[pinyin1][pinyin2] = 1 - edit_distance / (max(len(pinyin1), len(pinyin2)))
similar_pinyin_map = {}
count = 0
for unicode1 in gb2312_simple_chinese_unicode:
pinyins1 = unicode_pinyins_map[unicode1]
similar_pinyin_map[chr(unicode1)] = []
for unicode2 in gb2312_simple_chinese_unicode:
if unicode1 == unicode2:
continue
pinyins2 = unicode_pinyins_map[unicode2]
matched = False
for pinyin1 in pinyins1:
for pinyin2 in pinyins2:
if len(pinyin1) >= len(pinyin2):
lcs_info = lcs(pinyin_similarity_map, [pinyin1], [pinyin2],0.7)
else:
lcs_info = lcs(pinyin_similarity_map, [pinyin2], [pinyin1],0.7)
if len(lcs_info) > 0:
similar_pinyin_map[chr(unicode1)].append(chr(unicode2))
matched = True
break
if matched:
break
count+=1
if (count % 100) == 0:
print(str(count) + " chars processed.\n")
with open(same_pinyin_path2, 'w', encoding='utf-8') as f:
for char in similar_pinyin_map.keys():
chars = similar_pinyin_map[char]
line = char + '\t' + "".join(chars) + '\n'
f.write(line)
|
py
|
1a5c475122ae3bd3dfce5a6a1d0905f4e7e5ca8a
|
from .cache import clear_cache
from .cache import make_base_cache_dir as _make_base_cache_dir
from .dataloader import SmartDataLoader
from .dataset import SmartDataset
_make_base_cache_dir()
|
py
|
1a5c496633cc19614b9143fb939906890d996916
|
from typing import List
from financial_data.extensions.database import db
from .interface import EfficiencyIndicatorsInterface
from .model import EfficiencyIndicators
class EfficiencyIndicatorsService:
@staticmethod
def get_all() -> List[EfficiencyIndicators]:
return EfficiencyIndicators.query.all()
@staticmethod
def get_by_id(asset_id: int) -> EfficiencyIndicators:
return EfficiencyIndicators.query.filter(EfficiencyIndicators.asset_id == asset_id).first()
@staticmethod
def get_by_symbol(asset_symbol: str) -> EfficiencyIndicators:
return EfficiencyIndicators.query.filter(EfficiencyIndicators.asset_symbol == asset_symbol.upper()).first()
@staticmethod
def update(ei: EfficiencyIndicators, ei_changes: EfficiencyIndicatorsInterface) -> EfficiencyIndicators:
ei.update(ei_changes)
db.session.commit()
return ei
@staticmethod
def delete_by_symbol(asset_symbol: str) -> List[str]:
ei = EfficiencyIndicators.query.filter(EfficiencyIndicators.asset_symbol == asset_symbol.upper()).first()
if not ei:
return []
db.session.delete(ei)
db.session.commit()
return asset_symbol.upper()
@staticmethod
def create(new_attrs: EfficiencyIndicatorsInterface) -> EfficiencyIndicators:
new_ei = EfficiencyIndicators(
asset_symbol=new_attrs['asset_symbol'].upper(),
search_date=new_attrs['search_date'],
gross_margin=new_attrs['gross_margin'],
ebitda_margin=new_attrs['ebitda_margin'],
ebit_margin=new_attrs['ebit_margin'],
net_margin=new_attrs['net_margin']
)
db.session.add(new_ei)
db.session.commit()
return new_ei
|
py
|
1a5c49c214de0a858d772a55379644114379cea2
|
#! /usr/bin/env python3
'''
FAVITES: FrAmework for VIral Transmission and Evolution Simulation
'''
import argparse
from json import loads
from os import makedirs
from os.path import abspath,expanduser,isdir,isfile
from sys import platform,stderr
from subprocess import call,check_output,CalledProcessError,STDOUT
from tempfile import NamedTemporaryFile
from warnings import warn
from urllib.error import URLError
from urllib.request import urlopen
DOCKER_IMAGE = "niemasd/favites"
MAIN_VERSION_SYMBOLS = {'0','1','2','3','4','5','6','7','8','9','.'}
# return True if the given tag (string) is a main version (e.g. '1.1.1') or False if not (e.g. '1.1.1a')
def is_main_version(tag):
for c in tag:
if c not in MAIN_VERSION_SYMBOLS:
return False
return True
# get the latest FAVITES Docker image main version
def get_latest_version():
try:
DOCKER_TAGS = list(); curr_url = "https://hub.docker.com/v2/repositories/%s/tags/?page=1" % DOCKER_IMAGE
while curr_url is not None:
tmp = loads(urlopen(curr_url).read().decode('utf-8'))
DOCKER_TAGS += [e['name'] for e in tmp['results']]
curr_url = tmp['next']
DOCKER_TAGS = [tag for tag in DOCKER_TAGS if is_main_version(tag)] # remove non-main-version
DOCKER_TAGS = [tuple(int(i) for i in tag.split('.')) for tag in DOCKER_TAGS] # convert to tuple of ints
DOCKER_TAGS.sort() # sort in ascending order
return '.'.join(str(i) for i in DOCKER_TAGS[-1])
except Exception as e:
raise RuntimeError("Failed to use Python 3 urllib to connect to FAVITES Docker repository webpage\n%s" % str(e))
# if Mac OS X, use portable TMPDIR
if platform == 'darwin':
from os import environ
environ['TMPDIR'] = '/tmp/docker_tmp'
# parse user args
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-c', '--config', required=True, type=str, help="Configuration file")
parser.add_argument('-o', '--out_dir', required=False, type=str, help="Output directory")
parser.add_argument('-s', '--random_number_seed', required=False, type=int, help="Random number seed")
parser.add_argument('-v', '--verbose', action="store_true", help="Print verbose messages to stderr")
parser.add_argument('-u', '--update', nargs='*', help="Update Docker image (-u to pull newest version, -u <VERSION> to pull <VERSION>)")
args = parser.parse_args()
# check user args
CONFIG = abspath(expanduser(args.config))
assert isfile(CONFIG), "ERROR: Cannot open configuration file: %s" % CONFIG
try:
CONFIG_DICT = eval(open(CONFIG).read())
except:
raise SyntaxError("Malformed FAVITES configuration file. Must be valid JSON")
if args.out_dir is not None:
if 'out_dir' in CONFIG_DICT:
warn("Output directory specified in command line (%s) and config file (%s). Command line will take precedence" % (args.out_dir, CONFIG_DICT['out_dir']))
CONFIG_DICT['out_dir'] = args.out_dir
assert 'out_dir' in CONFIG_DICT, "Parameter 'out_dir' is not in the configuration file!"
OUTPUT_DIR = abspath(expanduser(CONFIG_DICT['out_dir']))
if args.random_number_seed is not None:
if "random_number_seed" in CONFIG_DICT:
warn("Random number seed specified in command line (%d) and config file (%s). Command line will take precedence" % (args.random_number_seed, CONFIG_DICT['random_number_seed']))
CONFIG_DICT["random_number_seed"] = args.random_number_seed
if "random_number_seed" not in CONFIG_DICT:
CONFIG_DICT["random_number_seed"] = ""
CN_FILE = None
if 'contact_network_file' in CONFIG_DICT:
CN_FILE = abspath(expanduser(CONFIG_DICT['contact_network_file']))
assert isfile(CN_FILE), "File not found: %s" % CONFIG_DICT['contact_network_file']
CONFIG_DICT['contact_network_file'] = '/FAVITES_MOUNT/%s' % CN_FILE.split('/')[-1]
TN_FILE = None
if 'transmission_network_file' in CONFIG_DICT:
TN_FILE = abspath(expanduser(CONFIG_DICT['transmission_network_file']))
assert isfile(TN_FILE), "File not found: %s" % CONFIG_DICT['transmission_network_file']
CONFIG_DICT['transmission_network_file'] = '/FAVITES_MOUNT/%s' % TN_FILE.split('/')[-1]
SAMPLE_TIME_FILE = None
if 'sample_time_file' in CONFIG_DICT:
SAMPLE_TIME_FILE = abspath(expanduser(CONFIG_DICT['sample_time_file']))
assert isfile(SAMPLE_TIME_FILE), "File not found: %s" % CONFIG_DICT['sample_time_file']
CONFIG_DICT['sample_time_file'] = '/FAVITES_MOUNT/%s' % SAMPLE_TIME_FILE.split('/')[-1]
TREE_FILE = None
if 'tree_file' in CONFIG_DICT:
TREE_FILE = abspath(expanduser(CONFIG_DICT['tree_file']))
assert isfile(TREE_FILE), "File not found: %s" % CONFIG_DICT['tree_file']
CONFIG_DICT['tree_file'] = '/FAVITES_MOUNT/%s' % TREE_FILE.split('/')[-1]
ERRORFREE_SEQ_FILE = None
if 'errorfree_sequence_file' in CONFIG_DICT:
ERRORFREE_SEQ_FILE = abspath(expanduser(CONFIG_DICT['errorfree_sequence_file']))
assert isfile(ERRORFREE_SEQ_FILE), "File not found: %s" % CONFIG_DICT['errorfree_sequence_file']
CONFIG_DICT['errorfree_sequence_file'] = '/FAVITES_MOUNT/%s' % ERRORFREE_SEQ_FILE.split('/')[-1]
HMMBUILD_MSA_FILE = None
if 'hmmbuild_msafile' in CONFIG_DICT:
HMMBUILD_MSA_FILE = abspath(expanduser(CONFIG_DICT['hmmbuild_msafile']))
assert isfile(HMMBUILD_MSA_FILE), "File not found: %s" % CONFIG_DICT['hmmbuild_msafile']
CONFIG_DICT['hmmbuild_msafile'] = '/FAVITES_MOUNT/%s' % HMMBUILD_MSA_FILE.split('/')[-1]
TMP_CONFIG = NamedTemporaryFile('w')
TMP_CONFIG.write(str(CONFIG_DICT).replace(": inf",": float('inf')"))
TMP_CONFIG.flush()
# pull the newest versioned Docker image (if applicable)
if args.update is None:
version = None
try:
o = check_output(['docker','images']).decode().splitlines()
for l in o:
if l.startswith(DOCKER_IMAGE):
version = '%s:%s' % (DOCKER_IMAGE,l.split()[1]); break
except CalledProcessError as e:
raise RuntimeError("docker images command failed\n%s"%e.output)
if version is None:
args.update = []
if args.update is not None:
assert len(args.update) < 2, "More than one Docker image version specified. Must either specify just -u or -u <VERSION>"
if len(args.update) == 0:
tag = get_latest_version()
else:
tag = args.update[0]
version = '%s:%s'%(DOCKER_IMAGE,tag)
try:
need_to_pull = True
if tag != 'latest':
o = check_output(['docker','images']).decode().splitlines()
for l in o:
if l.startswith(DOCKER_IMAGE) and l.split()[1] == version.split(':')[1]:
need_to_pull = False; break
except CalledProcessError as e:
raise RuntimeError("docker images command failed\n%s"%e.output)
if need_to_pull:
print("Pulling Docker image (%s)..." % tag, end=' ', file=stderr); stderr.flush()
try:
o = check_output(['docker','pull',version], stderr=STDOUT)
print("done", file=stderr); stderr.flush()
except Exception as e:
if "manifest for %s not found"%version in e.output.decode():
raise ValueError("Invalid FAVITES version specified: %s"%tag)
else:
raise RuntimeError("docker pull command failed\n%s"%e.output)
try:
print("Removing old Docker images...", end=' ', file=stderr); stderr.flush()
o = check_output(['docker','images']).decode().splitlines()
for l in o:
if l.startswith(DOCKER_IMAGE):
p = l.split()
if tag != p[1]:
check_output(['docker','image','rm','--force',p[2]])
print("done", file=stderr)
except:
print("Failed to remove old Docker images", file=stderr); stderr.flush()
# create output directory
try:
makedirs(OUTPUT_DIR)
except:
if isdir(OUTPUT_DIR):
response = 'x'
while len(response) == 0 or response[0] not in {'y','n'}:
response = input("ERROR: Output directory exists. Overwrite? All contents will be deleted. (y/n) ").strip().lower()
if response[0] == 'y':
from shutil import rmtree
rmtree(OUTPUT_DIR); makedirs(OUTPUT_DIR)
else:
exit(-1)
# call Docker image for user
COMMAND = ['docker','run',] # Docker command
COMMAND += ['-v',TMP_CONFIG.name+':/FAVITES_MOUNT/USER_CONFIG.JSON'] # mount config file
COMMAND += ['-v',OUTPUT_DIR+':/FAVITES_MOUNT/OUTPUT_DIR'] # mount output directory
COMMAND += ['-v',TMP_CONFIG.name+':/USER_CONFIG.JSON'] # compatibility for older Docker images
COMMAND += ['-v',OUTPUT_DIR+':/OUTPUT_DIR']
###################
# hack to access ccm config and supporting file
COMMAND += ['-v','/Users/b37v456/GIT/social_sampling_in_epidemics/simulations/20220610/contact_config.json:/FAVITES_MOUNT/contact_config.json']
COMMAND += ['-v','/Users/b37v456/GIT/social_sampling_in_epidemics/simulations/20220610/initial_contact_graph.csv:/FAVITES_MOUNT/initial_contact_graph.csv']
###################
if CN_FILE is not None: # mount contact network file (if need be)
COMMAND += ['-v',CN_FILE+':'+CONFIG_DICT['contact_network_file']]
if TN_FILE is not None: # mount transmission network file (if need be)
COMMAND += ['-v',TN_FILE+':'+CONFIG_DICT['transmission_network_file']]
if SAMPLE_TIME_FILE is not None:
COMMAND += ['-v',SAMPLE_TIME_FILE+':'+CONFIG_DICT['sample_time_file']]
if TREE_FILE is not None:
COMMAND += ['-v',TREE_FILE+':'+CONFIG_DICT['tree_file']]
if ERRORFREE_SEQ_FILE is not None:
COMMAND += ['-v',ERRORFREE_SEQ_FILE+':'+CONFIG_DICT['errorfree_sequence_file']]
if HMMBUILD_MSA_FILE is not None:
COMMAND += ['-v',HMMBUILD_MSA_FILE+':'+CONFIG_DICT['hmmbuild_msafile']]
if not platform.startswith('win'): # if not Windows,
from os import geteuid,getegid
COMMAND += ['-u',str(geteuid())+':'+str(getegid())] # make output files owned by user instead of root
COMMAND += [version] # Docker image
try:
if args.verbose:
print("\n\nRunning FAVITES Docker command:\n%s\n\n" % ' '.join(COMMAND))
call(COMMAND)
except:
exit(-1)
TMP_CONFIG.close()
|
py
|
1a5c4a7def13fb86cf4f06dcec07e520f6399a0f
|
# Copyright (c) 2011-2020 Eric Froemling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------
"""Defines base session class."""
from __future__ import annotations
import weakref
from typing import TYPE_CHECKING
import _ba
if TYPE_CHECKING:
from weakref import ReferenceType
from typing import Sequence, List, Dict, Any, Optional, Set
import ba
class Session:
"""Defines a high level series of activities with a common purpose.
category: Gameplay Classes
Examples of sessions are ba.FreeForAllSession, ba.DualTeamSession, and
ba.CoopSession.
A Session is responsible for wrangling and transitioning between various
ba.Activity instances such as mini-games and score-screens, and for
maintaining state between them (players, teams, score tallies, etc).
Attributes:
teams
All the ba.Teams in the Session. Most things should use the team
list in ba.Activity; not this.
players
All ba.Players in the Session. Most things should use the player
list in ba.Activity; not this. Some players, such as those who have
not yet selected a character, will only appear on this list.
min_players
The minimum number of Players who must be present for the Session
to proceed past the initial joining screen.
max_players
The maximum number of Players allowed in the Session.
lobby
The ba.Lobby instance where new ba.Players go to select a
Profile/Team/etc. before being added to games.
Be aware this value may be None if a Session does not allow
any such selection.
campaign
The ba.Campaign instance this Session represents, or None if
there is no associated Campaign.
"""
# Note: even though these are instance vars, we annotate them at the
# class level so that docs generation can access their types.
campaign: Optional[ba.Campaign]
lobby: ba.Lobby
max_players: int
min_players: int
players: List[ba.Player]
teams: List[ba.Team]
def __init__(self,
depsets: Sequence[ba.DependencySet],
team_names: Sequence[str] = None,
team_colors: Sequence[Sequence[float]] = None,
use_team_colors: bool = True,
min_players: int = 1,
max_players: int = 8,
allow_mid_activity_joins: bool = True):
"""Instantiate a session.
depsets should be a sequence of successfully resolved ba.DependencySet
instances; one for each ba.Activity the session may potentially run.
"""
# pylint: disable=too-many-statements
# pylint: disable=too-many-locals
# pylint: disable=cyclic-import
from ba._lobby import Lobby
from ba._stats import Stats
from ba._gameutils import sharedobj
from ba._gameactivity import GameActivity
from ba._team import Team
from ba._error import DependencyError
from ba._dependency import Dependency, AssetPackage
# First off, resolve all dependency-sets we were passed.
# If things are missing, we'll try to gather them into a single
# missing-deps exception if possible to give the caller a clean
# path to download missing stuff and try again.
missing_asset_packages: Set[str] = set()
for depset in depsets:
try:
depset.resolve()
except DependencyError as exc:
# Gather/report missing assets only; barf on anything else.
if all(issubclass(d.cls, AssetPackage) for d in exc.deps):
for dep in exc.deps:
assert isinstance(dep.config, str)
missing_asset_packages.add(dep.config)
else:
missing_info = [(d.cls, d.config) for d in exc.deps]
raise RuntimeError(
f'Missing non-asset dependencies: {missing_info}')
# Throw a combined exception if we found anything missing.
if missing_asset_packages:
raise DependencyError([
Dependency(AssetPackage, set_id)
for set_id in missing_asset_packages
])
# Ok; looks like our dependencies check out.
# Now give the engine a list of asset-set-ids to pass along to clients.
required_asset_packages: Set[str] = set()
for depset in depsets:
required_asset_packages.update(depset.get_asset_package_ids())
# print('Would set host-session asset-reqs to:',
# required_asset_packages)
# First thing, wire up our internal engine data.
self._sessiondata = _ba.register_session(self)
self.tournament_id: Optional[str] = None
# FIXME: This stuff shouldn't be here.
self.sharedobjs: Dict[str, Any] = {}
# TeamGameActivity uses this to display a help overlay on the first
# activity only.
self.have_shown_controls_help_overlay = False
self.campaign = None
# FIXME: Should be able to kill this I think.
self.campaign_state: Dict[str, str] = {}
self._use_teams = (team_names is not None)
self._use_team_colors = use_team_colors
self._in_set_activity = False
self._allow_mid_activity_joins = allow_mid_activity_joins
self.teams = []
self.players = []
self._next_team_id = 0
self._activity_retained: Optional[ba.Activity] = None
self.launch_end_session_activity_time: Optional[float] = None
self._activity_end_timer: Optional[ba.Timer] = None
# Hacky way to create empty weak ref; must be a better way.
class _EmptyObj:
pass
self._activity_weak: ReferenceType[ba.Activity]
self._activity_weak = weakref.ref(_EmptyObj()) # type: ignore
if self._activity_weak() is not None:
raise Exception('Error creating empty activity weak ref.')
self._next_activity: Optional[ba.Activity] = None
self.wants_to_end = False
self._ending = False
self.min_players = min_players
self.max_players = max_players
# Create Teams.
if self._use_teams:
assert team_names is not None
assert team_colors is not None
for i, color in enumerate(team_colors):
team = Team(team_id=self._next_team_id,
name=GameActivity.get_team_display_string(
team_names[i]),
color=color)
self.teams.append(team)
self._next_team_id += 1
try:
with _ba.Context(self):
self.on_team_join(team)
except Exception:
from ba import _error
_error.print_exception(
f'Error in on_team_join for {self}.')
self.lobby = Lobby()
self.stats = Stats()
# Instantiate our session globals node
# (so it can apply default settings).
sharedobj('globals')
@property
def use_teams(self) -> bool:
"""(internal)"""
return self._use_teams
@property
def use_team_colors(self) -> bool:
"""(internal)"""
return self._use_team_colors
def on_player_request(self, player: ba.Player) -> bool:
"""Called when a new ba.Player wants to join the Session.
This should return True or False to accept/reject.
"""
from ba._lang import Lstr
# Limit player counts *unless* we're in a stress test.
if _ba.app.stress_test_reset_timer is None:
if len(self.players) >= self.max_players:
# Print a rejection message *only* to the client trying to
# join (prevents spamming everyone else in the game).
_ba.playsound(_ba.getsound('error'))
_ba.screenmessage(
Lstr(resource='playerLimitReachedText',
subs=[('${COUNT}', str(self.max_players))]),
color=(0.8, 0.0, 0.0),
clients=[player.get_input_device().client_id],
transient=True)
return False
_ba.playsound(_ba.getsound('dripity'))
return True
def on_player_leave(self, player: ba.Player) -> None:
"""Called when a previously-accepted ba.Player leaves the session."""
# pylint: disable=too-many-statements
# pylint: disable=too-many-branches
# pylint: disable=cyclic-import
from ba._freeforallsession import FreeForAllSession
from ba._lang import Lstr
from ba import _error
# Remove them from the game rosters.
if player in self.players:
_ba.playsound(_ba.getsound('playerLeft'))
team: Optional[ba.Team]
# The player will have no team if they are still in the lobby.
try:
team = player.team
except _error.TeamNotFoundError:
team = None
activity = self._activity_weak()
# If he had no team, he's in the lobby.
# If we have a current activity with a lobby, ask them to
# remove him.
if team is None:
with _ba.Context(self):
try:
self.lobby.remove_chooser(player)
except Exception:
_error.print_exception(
'Error in Lobby.remove_chooser()')
# *If* they were actually in the game, announce their departure.
if team is not None:
_ba.screenmessage(
Lstr(resource='playerLeftText',
subs=[('${PLAYER}', player.get_name(full=True))]))
# Remove him from his team and session lists.
# (he may not be on the team list since player are re-added to
# team lists every activity)
if team is not None and player in team.players:
# Testing; can remove this eventually.
if isinstance(self, FreeForAllSession):
if len(team.players) != 1:
_error.print_error('expected 1 player in FFA team')
team.players.remove(player)
# Remove player from any current activity.
if activity is not None and player in activity.players:
activity.players.remove(player)
# Run the activity callback unless its been expired.
if not activity.is_expired():
try:
with _ba.Context(activity):
activity.on_player_leave(player)
except Exception:
_error.print_exception(
'exception in on_player_leave for activity',
activity)
else:
_error.print_error('expired activity in on_player_leave;'
" shouldn't happen")
player.set_activity(None)
player.set_node(None)
# Reset the player; this will remove its actor-ref and clear
# its calls/etc
try:
with _ba.Context(activity):
player.reset()
except Exception:
_error.print_exception(
'exception in player.reset in'
' on_player_leave for player', player)
# If we're a non-team session, remove the player's team completely.
if not self._use_teams and team is not None:
# If the team's in an activity, call its on_team_leave
# callback.
if activity is not None and team in activity.teams:
activity.teams.remove(team)
if not activity.is_expired():
try:
with _ba.Context(activity):
activity.on_team_leave(team)
except Exception:
_error.print_exception(
'exception in on_team_leave for activity',
activity)
else:
_error.print_error(
'expired activity in on_player_leave p2'
"; shouldn't happen")
# Clear the team's game-data (so dying stuff will
# have proper context).
try:
with _ba.Context(activity):
team.reset_gamedata()
except Exception:
_error.print_exception(
'exception clearing gamedata for team:', team,
'for player:', player, 'in activity:', activity)
# Remove the team from the session.
self.teams.remove(team)
try:
with _ba.Context(self):
self.on_team_leave(team)
except Exception:
_error.print_exception(
'exception in on_team_leave for session', self)
# Clear the team's session-data (so dying stuff will
# have proper context).
try:
with _ba.Context(self):
team.reset_sessiondata()
except Exception:
_error.print_exception(
'exception clearing sessiondata for team:', team,
'in session:', self)
# Now remove them from the session list.
self.players.remove(player)
else:
print('ERROR: Session.on_player_leave called'
' for player not in our list.')
def end(self) -> None:
"""Initiates an end to the session and a return to the main menu.
Note that this happens asynchronously, allowing the
session and its activities to shut down gracefully.
"""
self.wants_to_end = True
if self._next_activity is None:
self.launch_end_session_activity()
def launch_end_session_activity(self) -> None:
"""(internal)"""
from ba import _error
from ba._activitytypes import EndSessionActivity
from ba._enums import TimeType
with _ba.Context(self):
curtime = _ba.time(TimeType.REAL)
if self._ending:
# Ignore repeats unless its been a while.
assert self.launch_end_session_activity_time is not None
since_last = (curtime - self.launch_end_session_activity_time)
if since_last < 30.0:
return
_error.print_error(
'launch_end_session_activity called twice (since_last=' +
str(since_last) + ')')
self.launch_end_session_activity_time = curtime
self.set_activity(_ba.new_activity(EndSessionActivity))
self.wants_to_end = False
self._ending = True # Prevent further activity-mucking.
def on_team_join(self, team: ba.Team) -> None:
"""Called when a new ba.Team joins the session."""
def on_team_leave(self, team: ba.Team) -> None:
"""Called when a ba.Team is leaving the session."""
def _complete_end_activity(self, activity: ba.Activity,
results: Any) -> None:
# Run the subclass callback in the session context.
try:
with _ba.Context(self):
self.on_activity_end(activity, results)
except Exception:
from ba import _error
_error.print_exception(
'exception in on_activity_end() for session', self, 'activity',
activity, 'with results', results)
def end_activity(self, activity: ba.Activity, results: Any, delay: float,
force: bool) -> None:
"""Commence shutdown of a ba.Activity (if not already occurring).
'delay' is the time delay before the Activity actually ends
(in seconds). Further calls to end() will be ignored up until
this time, unless 'force' is True, in which case the new results
will replace the old.
"""
from ba._general import Call
from ba._enums import TimeType
# Only pay attention if this is coming from our current activity.
if activity is not self._activity_retained:
return
# If this activity hasn't begun yet, just set it up to end immediately
# once it does.
if not activity.has_begun():
activity.set_immediate_end(results, delay, force)
# The activity has already begun; get ready to end it.
else:
if (not activity.has_ended()) or force:
activity.set_has_ended(True)
# Set a timer to set in motion this activity's demise.
self._activity_end_timer = _ba.Timer(
delay,
Call(self._complete_end_activity, activity, results),
timetype=TimeType.BASE)
def handlemessage(self, msg: Any) -> Any:
"""General message handling; can be passed any message object."""
from ba._lobby import PlayerReadyMessage
from ba._error import UNHANDLED
from ba._messages import PlayerProfilesChangedMessage
if isinstance(msg, PlayerReadyMessage):
self._on_player_ready(msg.chooser)
return None
if isinstance(msg, PlayerProfilesChangedMessage):
# If we have a current activity with a lobby, ask it to reload
# profiles.
with _ba.Context(self):
self.lobby.reload_profiles()
return None
return UNHANDLED
def set_activity(self, activity: ba.Activity) -> None:
"""Assign a new current ba.Activity for the session.
Note that this will not change the current context to the new
Activity's. Code must be run in the new activity's methods
(on_transition_in, etc) to get it. (so you can't do
session.set_activity(foo) and then ba.newnode() to add a node to foo)
"""
# pylint: disable=too-many-statements
# pylint: disable=too-many-branches
from ba import _error
from ba._gameutils import sharedobj
from ba._enums import TimeType
# Sanity test: make sure this doesn't get called recursively.
if self._in_set_activity:
raise Exception(
'Session.set_activity() cannot be called recursively.')
if activity.session is not _ba.getsession():
raise Exception("Provided Activity's Session is not current.")
# Quietly ignore this if the whole session is going down.
if self._ending:
return
if activity is self._activity_retained:
_error.print_error('activity set to already-current activity')
return
if self._next_activity is not None:
raise Exception('Activity switch already in progress (to ' +
str(self._next_activity) + ')')
self._in_set_activity = True
prev_activity = self._activity_retained
if prev_activity is not None:
with _ba.Context(prev_activity):
gprev = sharedobj('globals')
else:
gprev = None
with _ba.Context(activity):
# Now that it's going to be front and center,
# set some global values based on what the activity wants.
glb = sharedobj('globals')
glb.use_fixed_vr_overlay = activity.use_fixed_vr_overlay
glb.allow_kick_idle_players = activity.allow_kick_idle_players
if activity.inherits_slow_motion and gprev is not None:
glb.slow_motion = gprev.slow_motion
else:
glb.slow_motion = activity.slow_motion
if activity.inherits_music and gprev is not None:
glb.music_continuous = True # Prevent restarting same music.
glb.music = gprev.music
glb.music_count += 1
if activity.inherits_camera_vr_offset and gprev is not None:
glb.vr_camera_offset = gprev.vr_camera_offset
if activity.inherits_vr_overlay_center and gprev is not None:
glb.vr_overlay_center = gprev.vr_overlay_center
glb.vr_overlay_center_enabled = gprev.vr_overlay_center_enabled
# If they want to inherit tint from the previous activity.
if activity.inherits_tint and gprev is not None:
glb.tint = gprev.tint
glb.vignette_outer = gprev.vignette_outer
glb.vignette_inner = gprev.vignette_inner
# Let the activity do its thing.
activity.start_transition_in()
self._next_activity = activity
# If we have a current activity, tell it it's transitioning out;
# the next one will become current once this one dies.
if prev_activity is not None:
# pylint: disable=protected-access
prev_activity._transitioning_out = True
# pylint: enable=protected-access
# Activity will be None until the next one begins.
with _ba.Context(prev_activity):
prev_activity.on_transition_out()
# Setting this to None should free up the old activity to die,
# which will call begin_next_activity.
# We can still access our old activity through
# self._activity_weak() to keep it up to date on player
# joins/departures/etc until it dies.
self._activity_retained = None
# There's no existing activity; lets just go ahead with the begin call.
else:
self.begin_next_activity()
# Tell the C layer that this new activity is now 'foregrounded'.
# This means that its globals node controls global stuff and stuff
# like console operations, keyboard shortcuts, etc will run in it.
# pylint: disable=protected-access
# noinspection PyProtectedMember
activity._activity_data.make_foreground()
# pylint: enable=protected-access
# We want to call _destroy() for the previous activity once it should
# tear itself down, clear out any self-refs, etc. If the new activity
# has a transition-time, set it up to be called after that passes;
# otherwise call it immediately. After this call the activity should
# have no refs left to it and should die (which will trigger the next
# activity to run).
if prev_activity is not None:
if activity.transition_time > 0.0:
# FIXME: We should tweak the activity to not allow
# node-creation/etc when we call _destroy (or after).
with _ba.Context('ui'):
# pylint: disable=protected-access
# noinspection PyProtectedMember
_ba.timer(activity.transition_time,
prev_activity._destroy,
timetype=TimeType.REAL)
# Just run immediately.
else:
# noinspection PyProtectedMember
prev_activity._destroy() # pylint: disable=protected-access
self._in_set_activity = False
def getactivity(self) -> Optional[ba.Activity]:
"""Return the current foreground activity for this session."""
return self._activity_weak()
def get_custom_menu_entries(self) -> List[Dict[str, Any]]:
"""Subclasses can override this to provide custom menu entries.
The returned value should be a list of dicts, each containing
a 'label' and 'call' entry, with 'label' being the text for
the entry and 'call' being the callable to trigger if the entry
is pressed.
"""
return []
def _request_player(self, player: ba.Player) -> bool:
# If we're ending, allow no new players.
if self._ending:
return False
# Ask the user.
try:
with _ba.Context(self):
result = self.on_player_request(player)
except Exception:
from ba import _error
_error.print_exception('error in on_player_request call for', self)
result = False
# If the user said yes, add the player to the session list.
if result:
self.players.append(player)
# If we have a current activity with a lobby,
# ask it to bring up a chooser for this player.
# otherwise they'll have to wait around for the next activity.
with _ba.Context(self):
try:
self.lobby.add_chooser(player)
except Exception:
from ba import _error
_error.print_exception('exception in lobby.add_chooser()')
return result
def on_activity_end(self, activity: ba.Activity, results: Any) -> None:
"""Called when the current ba.Activity has ended.
The ba.Session should look at the results and start
another ba.Activity.
"""
def begin_next_activity(self) -> None:
"""Called once the previous activity has been totally torn down.
This means we're ready to begin the next one
"""
if self._next_activity is not None:
# We store both a weak and a strong ref to the new activity;
# the strong is to keep it alive and the weak is so we can access
# it even after we've released the strong-ref to allow it to die.
self._activity_retained = self._next_activity
self._activity_weak = weakref.ref(self._next_activity)
self._next_activity = None
# Lets kick out any players sitting in the lobby since
# new activities such as score screens could cover them up;
# better to have them rejoin.
self.lobby.remove_all_choosers_and_kick_players()
activity = self._activity_weak()
assert activity is not None
activity.begin(self)
def _on_player_ready(self, chooser: ba.Chooser) -> None:
"""Called when a ba.Player has checked themself ready."""
from ba._lang import Lstr
lobby = chooser.lobby
activity = self._activity_weak()
# In joining activities, we wait till all choosers are ready
# and then create all players at once.
if activity is not None and activity.is_joining_activity:
if lobby.check_all_ready():
choosers = lobby.get_choosers()
min_players = self.min_players
if len(choosers) >= min_players:
for lch in lobby.get_choosers():
self._add_chosen_player(lch)
lobby.remove_all_choosers()
# Get our next activity going.
self._complete_end_activity(activity, {})
else:
_ba.screenmessage(Lstr(resource='notEnoughPlayersText',
subs=[('${COUNT}', str(min_players))
]),
color=(1, 1, 0))
_ba.playsound(_ba.getsound('error'))
else:
return
# Otherwise just add players on the fly.
else:
self._add_chosen_player(chooser)
lobby.remove_chooser(chooser.getplayer())
def _add_chosen_player(self, chooser: ba.Chooser) -> ba.Player:
# pylint: disable=too-many-statements
# pylint: disable=too-many-branches
from ba import _error
from ba._lang import Lstr
from ba._team import Team
from ba import _freeforallsession
player = chooser.getplayer()
if player not in self.players:
_error.print_error('player not found in session '
'player-list after chooser selection')
activity = self._activity_weak()
assert activity is not None
# We need to reset the player's input here, as it is currently
# referencing the chooser which could inadvertently keep it alive.
player.reset_input()
# Pass it to the current activity if it has already begun
# (otherwise it'll get passed once begin is called).
pass_to_activity = (activity.has_begun()
and not activity.is_joining_activity)
# If we're not allowing mid-game joins, don't pass; just announce
# the arrival.
if pass_to_activity:
if not self._allow_mid_activity_joins:
pass_to_activity = False
with _ba.Context(self):
_ba.screenmessage(Lstr(resource='playerDelayedJoinText',
subs=[('${PLAYER}',
player.get_name(full=True))
]),
color=(0, 1, 0))
# If we're a non-team game, each player gets their own team
# (keeps mini-game coding simpler if we can always deal with teams).
if self._use_teams:
team = chooser.get_team()
else:
our_team_id = self._next_team_id
team = Team(team_id=our_team_id,
name=chooser.getplayer().get_name(full=True,
icon=False),
color=chooser.get_color())
self.teams.append(team)
self._next_team_id += 1
try:
with _ba.Context(self):
self.on_team_join(team)
except Exception:
_error.print_exception(f'exception in on_team_join for {self}')
if pass_to_activity:
if team in activity.teams:
_error.print_error(
'Duplicate team ID in ba.Session._add_chosen_player')
activity.teams.append(team)
try:
with _ba.Context(activity):
activity.on_team_join(team)
except Exception:
_error.print_exception(
f'ERROR: exception in on_team_join for {activity}')
player.set_data(team=team,
character=chooser.get_character_name(),
color=chooser.get_color(),
highlight=chooser.get_highlight())
self.stats.register_player(player)
if pass_to_activity:
if isinstance(self, _freeforallsession.FreeForAllSession):
if player.team.players:
_error.print_error('expected 0 players in FFA team')
# Don't actually add the player to their team list if we're not
# in an activity. (players get (re)added to their team lists
# when the activity begins).
player.team.players.append(player)
if player in activity.players:
_error.print_exception(
f'Dup player in ba.Session._add_chosen_player: {player}')
else:
activity.players.append(player)
player.set_activity(activity)
pnode = activity.create_player_node(player)
player.set_node(pnode)
try:
with _ba.Context(activity):
activity.on_player_join(player)
except Exception:
_error.print_exception(
f'Error on on_player_join for {activity}')
return player
|
py
|
1a5c4ad1515d742608ce0bb50f90b0e4155ae7b7
|
# !/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/3/31 21:43
# @Author : Yunhao Cao
# @File : __init__.py
__author__ = 'Yunhao Cao'
__all__ = [
]
def _test():
pass
def _main():
pass
if __name__ == '__main__':
_main()
|
py
|
1a5c4adb18f5e135c5c4e1c74a628dd377920897
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import codecs
import numpy as np
import re
import itertools
from collections import Counter
import os
# from gensim.models import word2vec
def clean_str(string):
"""
Tokenization/string cleaning for all datasets except for SST.
Original taken from https://github.com/yoonkim/CNN_sentence/blob/master/process_data.py
"""
string = re.sub(r"[^A-Za-z0-9(),!?\'\`]", " ", string)
string = re.sub(r"\'s", " \'s", string)
string = re.sub(r"\'ve", " \'ve", string)
string = re.sub(r"n\'t", " n\'t", string)
string = re.sub(r"\'re", " \'re", string)
string = re.sub(r"\'d", " \'d", string)
string = re.sub(r"\'ll", " \'ll", string)
string = re.sub(r",", " , ", string)
string = re.sub(r"!", " ! ", string)
string = re.sub(r"\(", " \( ", string)
string = re.sub(r"\)", " \) ", string)
string = re.sub(r"\?", " \? ", string)
string = re.sub(r"\s{2,}", " ", string)
return string.strip().lower()
def get_chinese_text():
if not os.path.isdir("data/"):
os.system("mkdir data/")
if (not os.path.exists('data/pos.txt')) or \
(not os.path.exists('data/neg')):
os.system("wget -q https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/example/chinese_text.zip -P data/")
os.chdir("./data")
os.system("unzip -u chinese_text.zip")
os.chdir("..")
def load_data_and_labels():
"""
Loads MR polarity data from files, splits the data into words and generates labels.
Returns split sentences and labels.
"""
# download dataset
get_chinese_text()
# Load data from files
positive_examples = list(codecs.open("./data/pos.txt", "r", "utf-8").readlines())
positive_examples = [s.strip() for s in positive_examples]
positive_examples = [pe for pe in positive_examples if len(pe) < 100]
negative_examples = list(codecs.open("./data/neg.txt", "r", "utf-8").readlines())
negative_examples = [s.strip() for s in negative_examples]
negative_examples = [ne for ne in negative_examples if len(ne) < 100]
# Split by words
x_text = positive_examples + negative_examples
# x_text = [clean_str(sent) for sent in x_text]
x_text = [list(s) for s in x_text]
# Generate labels
positive_labels = [[0, 1] for _ in positive_examples]
negative_labels = [[1, 0] for _ in negative_examples]
y = np.concatenate([positive_labels, negative_labels], 0)
return [x_text, y]
def pad_sentences(sentences, padding_word="</s>"):
"""
Pads all sentences to the same length. The length is defined by the longest sentence.
Returns padded sentences.
"""
sequence_length = max(len(x) for x in sentences)
padded_sentences = []
for i in range(len(sentences)):
sentence = sentences[i]
num_padding = sequence_length - len(sentence)
new_sentence = sentence + [padding_word] * num_padding
padded_sentences.append(new_sentence)
return padded_sentences
def build_vocab(sentences):
"""
Builds a vocabulary mapping from word to index based on the sentences.
Returns vocabulary mapping and inverse vocabulary mapping.
"""
# Build vocabulary
word_counts = Counter(itertools.chain(*sentences))
# Mapping from index to word
vocabulary_inv = [x[0] for x in word_counts.most_common()]
# Mapping from word to index
vocabulary = {x: i for i, x in enumerate(vocabulary_inv)}
return [vocabulary, vocabulary_inv]
def build_input_data(sentences, labels, vocabulary):
"""
Maps sentencs and labels to vectors based on a vocabulary.
"""
x = np.array([[vocabulary[word] for word in sentence] for sentence in sentences])
y = np.array(labels)
return [x, y]
def build_input_data_with_word2vec(sentences, labels, word2vec):
"""Map sentences and labels to vectors based on a pretrained word2vec"""
x_vec = []
for sent in sentences:
vec = []
for word in sent:
if word in word2vec:
vec.append(word2vec[word])
else:
vec.append(word2vec['</s>'])
x_vec.append(vec)
x_vec = np.array(x_vec)
y_vec = np.array(labels)
return [x_vec, y_vec]
def load_data_with_word2vec(word2vec):
"""
Loads and preprocessed data for the MR dataset.
Returns input vectors, labels, vocabulary, and inverse vocabulary.
"""
# Load and preprocess data
sentences, labels = load_data_and_labels()
sentences_padded = pad_sentences(sentences)
# vocabulary, vocabulary_inv = build_vocab(sentences_padded)
return build_input_data_with_word2vec(sentences_padded, labels, word2vec)
def load_data():
"""
Loads and preprocessed data for the MR dataset.
Returns input vectors, labels, vocabulary, and inverse vocabulary.
"""
# Load and preprocess data
sentences, labels = load_data_and_labels()
sentences_padded = pad_sentences(sentences)
vocabulary, vocabulary_inv = build_vocab(sentences_padded)
x, y = build_input_data(sentences_padded, labels, vocabulary)
return [x, y, vocabulary, vocabulary_inv]
def batch_iter(data, batch_size, num_epochs):
"""
Generates a batch iterator for a dataset.
"""
data = np.array(data)
data_size = len(data)
num_batches_per_epoch = int(len(data) / batch_size) + 1
for epoch in range(num_epochs):
# Shuffle the data at each epoch
shuffle_indices = np.random.permutation(np.arange(data_size))
shuffled_data = data[shuffle_indices]
for batch_num in range(num_batches_per_epoch):
start_index = batch_num * batch_size
end_index = min((batch_num + 1) * batch_size, data_size)
yield shuffled_data[start_index:end_index]
def load_pretrained_word2vec(infile):
if isinstance(infile, str):
infile = open(infile)
word2vec = {}
for idx, line in enumerate(infile):
if idx == 0:
vocab_size, dim = line.strip().split()
else:
tks = line.strip().split()
word2vec[tks[0]] = map(float, tks[1:])
return word2vec
def load_google_word2vec(path):
model = word2vec.Word2Vec.load_word2vec_format(path, binary=True)
return model
|
py
|
1a5c4b1e685958aad660605cacbc77a194e28472
|
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
import random
def random_attack():
def attack(family, train, valid, x, y):
kwargs = {}
kwargs['family'] = family
gaussian_links = ["inverse", "log", "identity"]
binomial_links = ["logit"]
poisson_links = ["log", "identity"]
gamma_links = ["inverse", "log", "identity"]
# randomly select parameters and their corresponding values
if random.randint(0,1): kwargs['max_iterations'] = random.randint(1,50)
if random.random() > 0.8: kwargs['beta_epsilon'] = random.random()
if random.randint(0,1): kwargs['solver'] = ["AUTO", "IRLSM", "L_BFGS", "COORDINATE_DESCENT_NAIVE",
"COORDINATE_DESCENT"][random.randint(0,1)]
if random.randint(0,1): kwargs['standardize'] = [True, False][random.randint(0,1)]
if random.randint(0,1):
if family == "gaussian": kwargs['link'] = gaussian_links[random.randint(0,2)]
elif family == "binomial": kwargs['link'] = binomial_links[random.randint(0,0)]
elif family == "poisson" : kwargs['link'] = poisson_links[random.randint(0,1)]
elif family == "gamma" : kwargs['link'] = gamma_links[random.randint(0,2)]
if random.randint(0,1): kwargs['alpha'] = [random.random()]
if family == "binomial":
if random.randint(0,1): kwargs['prior'] = random.random()
if random.randint(0,1): kwargs['lambda_search'] = [True, False][random.randint(0,1)]
if 'lambda_search' in kwargs.keys():
if random.randint(0,1): kwargs['nlambdas'] = random.randint(2,10)
do_validation = [True, False][random.randint(0,1)]
# beta constraints
if random.randint(0,1):
bc = []
for n in x:
if train[n].isnumeric():
name = train.names[n]
lower_bound = random.uniform(-1,1)
upper_bound = lower_bound + random.random()
bc.append([name, lower_bound, upper_bound])
if len(bc) > 0:
beta_constraints = h2o.H2OFrame(python_obj=zip(*bc))
beta_constraints.set_names(['names', 'lower_bounds', 'upper_bounds'])
kwargs['beta_constraints'] = beta_constraints._id
# display the parameters and their corresponding values
print "-----------------------"
print "x: {0}".format(x)
print "y: {0}".format(y)
print "validation: {0}".format(do_validation)
for k, v in zip(kwargs.keys(), kwargs.values()):
if k == 'beta_constraints':
print k + ": "
beta_constraints.show()
else:
print k + ": {0}".format(v)
if do_validation: h2o.glm(x=train[x], y=train[y], validation_x=valid[x], validation_y=valid[y], **kwargs)
else: h2o.glm(x=train[x], y=train[y], **kwargs)
print "-----------------------"
print "Import and data munging..."
seed = random.randint(1,10000)
print "SEED: {0}".format(seed)
pros = h2o.upload_file(pyunit_utils.locate("smalldata/prostate/prostate.csv.zip"))
pros[1] = pros[1].asfactor()
r = pros[0].runif(seed=seed) # a column of length pros.nrow with values between 0 and 1
# ~80/20 train/validation split
pros_train = pros[r > .2]
pros_valid = pros[r <= .2]
cars = h2o.upload_file(pyunit_utils.locate("smalldata/junit/cars.csv"))
r = cars[0].runif(seed=seed)
cars_train = cars[r > .2]
cars_valid = cars[r <= .2]
print
print "======================================================================"
print "============================== Binomial =============================="
print "======================================================================"
for i in range(10):
attack("binomial", pros_train, pros_valid, random.sample([2,3,4,5,6,7,8],random.randint(1,7)), 1)
print
print "======================================================================"
print "============================== Gaussian =============================="
print "======================================================================"
for i in range(10):
attack("gaussian", cars_train, cars_valid, random.sample([2,3,4,5,6,7],random.randint(1,6)), 1)
print
print "======================================================================"
print "============================== Poisson =============================="
print "======================================================================"
for i in range(10):
attack("poisson", cars_train, cars_valid, random.sample([1,3,4,5,6,7],random.randint(1,6)), 2)
print
print "======================================================================"
print "============================== Gamma =============================="
print "======================================================================"
for i in range(10):
attack("gamma", pros_train, pros_valid, random.sample([1,2,3,5,6,7,8],random.randint(1,7)), 4)
if __name__ == "__main__":
pyunit_utils.standalone_test(random_attack)
else:
random_attack()
|
py
|
1a5c4ddba69dc794454575e9e51eefd1890f30e0
|
def horario(update, context):
update.message.reply_text(
""" **Horario**
¡Hola! Tienes que hacer tu propio horario, la universidad solo provee los horarios y aulas de cada ramo.
No es tan complicado, solo necesitas seguir la siguiente guía.
**[Como hacer tu propio horario](https://wiki.inf.udec.cl/doku.php?id=horario)**
*¿Tienes alguna duda o encontraste un error? Avisa al administrador.* """, parse_mode='Markdown')
|
py
|
1a5c4ebd7289708202a0cb0fd01b7c07e5d81539
|
#!/usr/bin/env python3
""" Parent class for color Adjustments for faceswap.py converter """
import logging
import numpy as np
from plugins.convert._config import Config
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
def get_config(plugin_name, configfile=None):
""" Return the config for the requested model """
return Config(plugin_name, configfile=configfile).config_dict
class Adjustment():
""" Parent class for adjustments """
def __init__(self, configfile=None, config=None):
logger.debug("Initializing %s: (configfile: %s, config: %s)",
self.__class__.__name__, configfile, config)
self.config = self.set_config(configfile, config)
logger.debug("config: %s", self.config)
logger.debug("Initialized %s", self.__class__.__name__)
def set_config(self, configfile, config):
""" Set the config to either global config or passed in config """
section = ".".join(self.__module__.split(".")[-2:])
if config is None:
retval = get_config(section, configfile)
else:
config.section = section
retval = config.config_dict
config.section = None
logger.debug("Config: %s", retval)
return retval
def process(self, old_face, new_face, raw_mask):
""" Override for specific color adjustment process """
raise NotImplementedError
def run(self, old_face, new_face, raw_mask):
""" Perform selected adjustment on face """
logger.trace("Performing color adjustment")
# Remove Mask for processing
reinsert_mask = False
if new_face.shape[2] == 4:
reinsert_mask = True
final_mask = new_face[:, :, -1]
new_face = new_face[:, :, :3]
new_face = self.process(old_face, new_face, raw_mask)
new_face = np.clip(new_face, 0.0, 1.0)
if reinsert_mask and new_face.shape[2] != 4:
# Reinsert Mask
new_face = np.concatenate((new_face, np.expand_dims(final_mask, axis=-1)), -1)
logger.trace("Performed color adjustment")
return new_face
|
py
|
1a5c4f6c9aa1555ca5b2f969c457274ff923413d
|
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import random
import time
from typing import Optional, Tuple, Union
import attr
from netaddr import IPAddress # type: ignore
from twisted.internet import defer
from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS
from twisted.internet.interfaces import IStreamClientEndpoint
from twisted.web.client import URI, Agent, HTTPConnectionPool, RedirectAgent
from twisted.web.http import stringToDatetime
from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent, IBodyProducer, IResponse
from zope.interface import implementer
from sydent.http.httpcommon import read_body_with_max_size
from sydent.http.srvresolver import SrvResolver, pick_server_from_list
from sydent.util import json_decoder
from sydent.util.ttlcache import TTLCache
# period to cache .well-known results for by default
WELL_KNOWN_DEFAULT_CACHE_PERIOD = 24 * 3600
# jitter to add to the .well-known default cache ttl
WELL_KNOWN_DEFAULT_CACHE_PERIOD_JITTER = 10 * 60
# period to cache failure to fetch .well-known for
WELL_KNOWN_INVALID_CACHE_PERIOD = 1 * 3600
# cap for .well-known cache period
WELL_KNOWN_MAX_CACHE_PERIOD = 48 * 3600
# The maximum size (in bytes) to allow a well-known file to be.
WELL_KNOWN_MAX_SIZE = 50 * 1024 # 50 KiB
logger = logging.getLogger(__name__)
well_known_cache = TTLCache("well-known")
@implementer(IAgent)
class MatrixFederationAgent:
"""An Agent-like thing which provides a `request` method which will look up a matrix
server and send an HTTP request to it.
Doesn't implement any retries. (Those are done in MatrixFederationHttpClient.)
:param reactor: twisted reactor to use for underlying requests
:type reactor: IReactor
:param tls_client_options_factory: Factory to use for fetching client tls
options, or none to disable TLS.
:type tls_client_options_factory: ClientTLSOptionsFactory, None
:param _well_known_tls_policy: TLS policy to use for fetching .well-known
files. None to use a default (browser-like) implementation.
:type _well_known_tls_policy: IPolicyForHTTPS, None
:param _srv_resolver: SRVResolver impl to use for looking up SRV records.
None to use a default implementation.
:type _srv_resolver: SrvResolver, None
:param _well_known_cache: TTLCache impl for storing cached well-known
lookups. Omit to use a default implementation.
:type _well_known_cache: TTLCache
"""
def __init__(
self,
reactor,
tls_client_options_factory,
_well_known_tls_policy=None,
_srv_resolver: Optional["SrvResolver"] = None,
_well_known_cache: "TTLCache" = well_known_cache,
) -> None:
self._reactor = reactor
self._tls_client_options_factory = tls_client_options_factory
if _srv_resolver is None:
_srv_resolver = SrvResolver()
self._srv_resolver = _srv_resolver
self._pool = HTTPConnectionPool(reactor)
self._pool.retryAutomatically = False
self._pool.maxPersistentPerHost = 5
self._pool.cachedConnectionTimeout = 2 * 60
agent_args = {}
if _well_known_tls_policy is not None:
# the param is called 'contextFactory', but actually passing a
# contextfactory is deprecated, and it expects an IPolicyForHTTPS.
agent_args["contextFactory"] = _well_known_tls_policy
_well_known_agent = RedirectAgent(
Agent(self._reactor, pool=self._pool, **agent_args),
)
self._well_known_agent = _well_known_agent
# our cache of .well-known lookup results, mapping from server name
# to delegated name. The values can be:
# `bytes`: a valid server-name
# `None`: there is no (valid) .well-known here
self._well_known_cache = _well_known_cache
@defer.inlineCallbacks
def request(
self,
method: bytes,
uri: bytes,
headers: Optional["Headers"] = None,
bodyProducer: Optional["IBodyProducer"] = None,
) -> IResponse:
"""
:param method: HTTP method (GET/POST/etc).
:param uri: Absolute URI to be retrieved.
:param headers: HTTP headers to send with the request, or None to
send no extra headers.
:param bodyProducer: An object which can generate bytes to make up the
body of this request (for example, the properly encoded contents of
a file for a file upload). Or None if the request is to have
no body.
:returns a deferred that fires when the header of the response has
been received (regardless of the response status code). Fails if
there is any problem which prevents that response from being received
(including problems that prevent the request from being sent).
"""
parsed_uri = URI.fromBytes(uri, defaultPort=-1)
res = yield defer.ensureDeferred(self._route_matrix_uri(parsed_uri))
# set up the TLS connection params
#
# XXX disabling TLS is really only supported here for the benefit of the
# unit tests. We should make the UTs cope with TLS rather than having to make
# the code support the unit tests.
if self._tls_client_options_factory is None:
tls_options = None
else:
tls_options = self._tls_client_options_factory.get_options(
res.tls_server_name.decode("ascii")
)
# make sure that the Host header is set correctly
if headers is None:
headers = Headers()
else:
headers = headers.copy()
assert headers is not None
if not headers.hasHeader(b"host"):
headers.addRawHeader(b"host", res.host_header)
class EndpointFactory:
@staticmethod
def endpointForURI(_uri):
ep = LoggingHostnameEndpoint(
self._reactor,
res.target_host,
res.target_port,
)
if tls_options is not None:
ep = wrapClientTLS(tls_options, ep)
return ep
agent = Agent.usingEndpointFactory(self._reactor, EndpointFactory(), self._pool)
res = yield agent.request(method, uri, headers, bodyProducer)
return res
async def _route_matrix_uri(
self, parsed_uri: "URI", lookup_well_known: bool = True
) -> "_RoutingResult":
"""Helper for `request`: determine the routing for a Matrix URI
:param parsed_uri: uri to route. Note that it should be parsed with
URI.fromBytes(uri, defaultPort=-1) to set the `port` to -1 if there
is no explicit port given.
:param lookup_well_known: True if we should look up the .well-known
file if there is no SRV record.
:returns a routing result.
"""
# check for an IP literal
try:
ip_address = IPAddress(parsed_uri.host.decode("ascii"))
except Exception:
# not an IP address
ip_address = None
if ip_address:
port = parsed_uri.port
if port == -1:
port = 8448
return _RoutingResult(
host_header=parsed_uri.netloc,
tls_server_name=parsed_uri.host,
target_host=parsed_uri.host,
target_port=port,
)
if parsed_uri.port != -1:
# there is an explicit port
return _RoutingResult(
host_header=parsed_uri.netloc,
tls_server_name=parsed_uri.host,
target_host=parsed_uri.host,
target_port=parsed_uri.port,
)
if lookup_well_known:
# try a .well-known lookup
well_known_server = await self._get_well_known(parsed_uri.host)
if well_known_server:
# if we found a .well-known, start again, but don't do another
# .well-known lookup.
# parse the server name in the .well-known response into host/port.
# (This code is lifted from twisted.web.client.URI.fromBytes).
if b":" in well_known_server:
well_known_host, well_known_port = well_known_server.rsplit(b":", 1)
try:
well_known_port = int(well_known_port)
except ValueError:
# the part after the colon could not be parsed as an int
# - we assume it is an IPv6 literal with no port (the closing
# ']' stops it being parsed as an int)
well_known_host, well_known_port = well_known_server, -1
else:
well_known_host, well_known_port = well_known_server, -1
new_uri = URI(
scheme=parsed_uri.scheme,
netloc=well_known_server,
host=well_known_host,
port=well_known_port,
path=parsed_uri.path,
params=parsed_uri.params,
query=parsed_uri.query,
fragment=parsed_uri.fragment,
)
res = await self._route_matrix_uri(new_uri, lookup_well_known=False)
return res
# try a SRV lookup
service_name = b"_matrix._tcp.%s" % (parsed_uri.host,)
server_list = await self._srv_resolver.resolve_service(service_name)
if not server_list:
target_host = parsed_uri.host
port = 8448
logger.debug(
"No SRV record for %s, using %s:%i",
parsed_uri.host.decode("ascii"),
target_host.decode("ascii"),
port,
)
else:
target_host, port = pick_server_from_list(server_list)
logger.debug(
"Picked %s:%i from SRV records for %s",
target_host.decode("ascii"),
port,
parsed_uri.host.decode("ascii"),
)
return _RoutingResult(
host_header=parsed_uri.netloc,
tls_server_name=parsed_uri.host,
target_host=target_host,
target_port=port,
)
async def _get_well_known(self, server_name: bytes) -> Optional[bytes]:
"""Attempt to fetch and parse a .well-known file for the given server
:param server_name: Name of the server, from the requested url.
:returns either the new server name, from the .well-known, or None if
there was no .well-known file.
"""
try:
result = self._well_known_cache[server_name]
except KeyError:
# TODO: should we linearise so that we don't end up doing two .well-known
# requests for the same server in parallel?
result, cache_period = await self._do_get_well_known(server_name)
if cache_period > 0:
self._well_known_cache.set(server_name, result, cache_period)
return result
async def _do_get_well_known(
self, server_name: bytes
) -> Tuple[Union[bytes, None, object], int]:
"""Actually fetch and parse a .well-known, without checking the cache
:param server_name: Name of the server, from the requested url
:returns a tuple of (result, cache period), where result is one of:
- the new server name from the .well-known (as a `bytes`)
- None if there was no .well-known file.
- INVALID_WELL_KNOWN if the .well-known was invalid
"""
uri = b"https://%s/.well-known/matrix/server" % (server_name,)
uri_str = uri.decode("ascii")
logger.info("Fetching %s", uri_str)
try:
response = await self._well_known_agent.request(b"GET", uri)
body = await read_body_with_max_size(response, WELL_KNOWN_MAX_SIZE)
if response.code != 200:
raise Exception("Non-200 response %s" % (response.code,))
parsed_body = json_decoder.decode(body.decode("utf-8"))
logger.info("Response from .well-known: %s", parsed_body)
if not isinstance(parsed_body, dict):
raise Exception("not a dict")
if "m.server" not in parsed_body:
raise Exception("Missing key 'm.server'")
except Exception as e:
logger.info("Error fetching %s: %s", uri_str, e)
# add some randomness to the TTL to avoid a stampeding herd every hour
# after startup
cache_period: float = WELL_KNOWN_INVALID_CACHE_PERIOD
cache_period += random.uniform(0, WELL_KNOWN_DEFAULT_CACHE_PERIOD_JITTER)
return (None, cache_period)
result = parsed_body["m.server"].encode("ascii")
cache_period = _cache_period_from_headers(
response.headers,
time_now=self._reactor.seconds,
)
if cache_period is None:
cache_period = WELL_KNOWN_DEFAULT_CACHE_PERIOD
# add some randomness to the TTL to avoid a stampeding herd every 24 hours
# after startup
cache_period += random.uniform(0, WELL_KNOWN_DEFAULT_CACHE_PERIOD_JITTER)
else:
cache_period = min(cache_period, WELL_KNOWN_MAX_CACHE_PERIOD)
return (result, cache_period)
@implementer(IStreamClientEndpoint)
class LoggingHostnameEndpoint:
"""A wrapper for HostnameEndpint which logs when it connects"""
def __init__(self, reactor, host, port, *args, **kwargs):
self.host = host
self.port = port
self.ep = HostnameEndpoint(reactor, host, port, *args, **kwargs)
logger.info("Endpoint created with %s:%d", host, port)
def connect(self, protocol_factory):
logger.info("Connecting to %s:%i", self.host.decode("ascii"), self.port)
return self.ep.connect(protocol_factory)
def _cache_period_from_headers(headers, time_now=time.time):
cache_controls = _parse_cache_control(headers)
if b"no-store" in cache_controls:
return 0
if b"max-age" in cache_controls:
try:
max_age = int(cache_controls[b"max-age"])
return max_age
except ValueError:
pass
expires = headers.getRawHeaders(b"expires")
if expires is not None:
try:
expires_date = stringToDatetime(expires[-1])
return expires_date - time_now()
except ValueError:
# RFC7234 says 'A cache recipient MUST interpret invalid date formats,
# especially the value "0", as representing a time in the past (i.e.,
# "already expired").
return 0
return None
def _parse_cache_control(headers):
cache_controls = {}
for hdr in headers.getRawHeaders(b"cache-control", []):
for directive in hdr.split(b","):
splits = [x.strip() for x in directive.split(b"=", 1)]
k = splits[0].lower()
v = splits[1] if len(splits) > 1 else None
cache_controls[k] = v
return cache_controls
@attr.s
class _RoutingResult:
"""The result returned by `_route_matrix_uri`.
Contains the parameters needed to direct a federation connection to a particular
server.
Where a SRV record points to several servers, this object contains a single server
chosen from the list.
"""
host_header = attr.ib()
"""
The value we should assign to the Host header (host:port from the matrix
URI, or .well-known).
:type: bytes
"""
tls_server_name = attr.ib()
"""
The server name we should set in the SNI (typically host, without port, from the
matrix URI or .well-known)
:type: bytes
"""
target_host = attr.ib()
"""
The hostname (or IP literal) we should route the TCP connection to (the target of the
SRV record, or the hostname from the URL/.well-known)
:type: bytes
"""
target_port = attr.ib()
"""
The port we should route the TCP connection to (the target of the SRV record, or
the port from the URL/.well-known, or 8448)
:type: int
"""
|
py
|
1a5c50be65d9845d3ce9a2901ecb25e4665f2322
|
from copy import deepcopy
import torch
import torch.nn as nn
import torch.nn.functional as func
from torchsupport.interacting.off_policy_training import OffPolicyTraining
class AWACTraining(OffPolicyTraining):
def __init__(self, policy, value, agent, environment,
beta=1.0, clip=None, tau=5e-3, **kwargs):
self.value = ...
super().__init__(
policy, agent, environment,
{"value": value}, **kwargs
)
self.beta = beta
self.clip = clip
self.tau = tau
self.target = deepcopy(value)
def update_target(self):
with torch.no_grad():
tp = self.target.parameters()
ap = self.value.parameters()
for t, a in zip(tp, ap):
t *= (1 - self.tau)
t += self.tau * a
def action_nll(self, policy, action):
return func.cross_entropy(policy, action, reduction='none')
def policy_loss(self, policy, action, advantage):
weight = torch.exp(advantage / self.beta)
if self.clip is not None:
weight = weight.clamp(0, self.clip)
negative_log_likelihood = self.action_nll(policy, action)
weighted_loss = negative_log_likelihood * weight
return weighted_loss.mean()
def state_value(self, state, value=None):
value = value or self.value
action_value = value(state)
policy = self.policy(state)
expected = action_value * policy.softmax(dim=1)
expected = expected.sum(dim=1)
return expected
def run_policy(self, sample):
initial_state = sample.initial_state
action = sample.action
with torch.no_grad():
action_value = self.value(initial_state)
inds = torch.arange(action.size(0), device=action.device)
action_value = action_value[inds, action]
value = self.state_value(initial_state)
advantage = action_value - value
self.current_losses["mean advantage"] = float(advantage.mean())
policy = self.policy(initial_state)
return policy, action, advantage
def auxiliary_loss(self, value, target):
return func.mse_loss(value.view(-1), target.view(-1))
def run_auxiliary(self, sample):
self.update_target()
initial_state = sample.initial_state
final_state = sample.final_state
action = sample.action
rewards = sample.rewards
action_value = self.value(initial_state)
inds = torch.arange(action.size(0), device=action.device)
action_value = action_value[inds, action]
with torch.no_grad():
state_value = self.state_value(
final_state, value=self.target
)
done_mask = 1.0 - sample.done.float()
target = rewards + self.discount * done_mask * state_value
self.current_losses["mean state value"] = float(state_value.mean())
self.current_losses["mean target value"] = float(target.mean())
return action_value, target
|
py
|
1a5c50d0261b2e6a1cded4e225a886ce600a8c9b
|
# modified from https://github.com/raoyongming/DynamicViT and https://github.com/facebookresearch/deit
import argparse
import numpy as np
import torch.backends.cudnn as cudnn
from pathlib import Path
from timm.data import Mixup
from timm.models import create_model
from timm.scheduler import create_scheduler
from timm.optim import create_optimizer
from timm.utils import NativeScaler, get_state_dict, ModelEma
from deit.datasets import build_dataset2, get_post_process,build_dataset
import utils
from timm.utils import accuracy, ModelEma
from torchvision import utils as vutils
import torch
from torchvision import transforms
import models
from PIL import Image
import os
import pdb
import torch.nn.functional as F
import torch.nn as nn
def get_transform(input_size):
t = []
resize_im = (input_size != 224)
if resize_im:
size = int((256 / 224) * args.input_size)
t.append(
transforms.Resize(size, interpolation=3), # to maintain same ratio w.r.t. 224 images
)
t.append(transforms.CenterCrop(args.input_size))
t.append(transforms.ToTensor())
else:
t.append(transforms.ToTensor())
return transforms.Compose(t)
def get_keep_indices(decisions):
keep_indices = []
for i in range(3):
if i == 0:
keep_indices.append(decisions[i])
else:
keep_indices.append(keep_indices[-1][decisions[i]])
return keep_indices
def gen_masked_tokens(tokens, indices, alpha=0.3):
indices = [i for i in range(196) if i not in indices]
tokens = tokens.copy()
tokens[indices] = alpha * tokens[indices] + (1 - alpha) * 255
return tokens
def recover_image(tokens):
# image: (C, 196, 16, 16)
image = tokens.reshape(14, 14, 16, 16, 3).swapaxes(1, 2).reshape(224, 224, 3)
return image
def gen_visualization(image, keep_indices):
# keep_indices = get_keep_indices(decisions)
image_tokens = image.reshape(14, 16, 14, 16, 3).swapaxes(1, 2).reshape(196, 16, 16, 3)
viz = recover_image(gen_masked_tokens(image_tokens, keep_indices))
return viz
def get_args_parser():
parser = argparse.ArgumentParser('DeiT training and evaluation script', add_help=False)
parser.add_argument('--batch-size', default=64, type=int)
parser.add_argument('--epochs', default=300, type=int)
# Model parameters
parser.add_argument('--model', default='deit_base_patch16_224', type=str, metavar='MODEL',
help='Name of model to train')
parser.add_argument('--input-size', default=224, type=int, help='images input size')
parser.add_argument('--drop', type=float, default=0.0, metavar='PCT',
help='Dropout rate (default: 0.)')
parser.add_argument('--drop-path', type=float, default=0.1, metavar='PCT',
help='Drop path rate (default: 0.1)')
parser.add_argument('--model-ema', action='store_true')
parser.add_argument('--no-model-ema', action='store_false', dest='model_ema')
parser.set_defaults(model_ema=True)
parser.add_argument('--model-ema-decay', type=float, default=0.99996, help='')
parser.add_argument('--model-ema-force-cpu', action='store_true', default=False, help='')
# Optimizer parameters
parser.add_argument('--opt', default='adamw', type=str, metavar='OPTIMIZER',
help='Optimizer (default: "adamw"')
parser.add_argument('--opt-eps', default=1e-8, type=float, metavar='EPSILON',
help='Optimizer Epsilon (default: 1e-8)')
parser.add_argument('--opt-betas', default=None, type=float, nargs='+', metavar='BETA',
help='Optimizer Betas (default: None, use opt default)')
parser.add_argument('--clip-grad', type=float, default=None, metavar='NORM',
help='Clip gradient norm (default: None, no clipping)')
parser.add_argument('--momentum', type=float, default=0.9, metavar='M',
help='SGD momentum (default: 0.9)')
parser.add_argument('--weight-decay', type=float, default=0.05,
help='weight decay (default: 0.05)')
# Learning rate schedule parameters
parser.add_argument('--sched', default='cosine', type=str, metavar='SCHEDULER',
help='LR scheduler (default: "cosine"')
parser.add_argument('--lr', type=float, default=5e-4, metavar='LR',
help='learning rate (default: 5e-4)')
parser.add_argument('--lr-noise', type=float, nargs='+', default=None, metavar='pct, pct',
help='learning rate noise on/off epoch percentages')
parser.add_argument('--lr-noise-pct', type=float, default=0.67, metavar='PERCENT',
help='learning rate noise limit percent (default: 0.67)')
parser.add_argument('--lr-noise-std', type=float, default=1.0, metavar='STDDEV',
help='learning rate noise std-dev (default: 1.0)')
parser.add_argument('--warmup-lr', type=float, default=1e-6, metavar='LR',
help='warmup learning rate (default: 1e-6)')
parser.add_argument('--min-lr', type=float, default=1e-5, metavar='LR',
help='lower lr bound for cyclic schedulers that hit 0 (1e-5)')
parser.add_argument('--decay-epochs', type=float, default=30, metavar='N',
help='epoch interval to decay LR')
parser.add_argument('--warmup-epochs', type=int, default=5, metavar='N',
help='epochs to warmup LR, if scheduler supports')
parser.add_argument('--cooldown-epochs', type=int, default=10, metavar='N',
help='epochs to cooldown LR at min_lr, after cyclic schedule ends')
parser.add_argument('--patience-epochs', type=int, default=10, metavar='N',
help='patience epochs for Plateau LR scheduler (default: 10')
parser.add_argument('--decay-rate', '--dr', type=float, default=0.1, metavar='RATE',
help='LR decay rate (default: 0.1)')
# Augmentation parameters
parser.add_argument('--color-jitter', type=float, default=0.4, metavar='PCT',
help='Color jitter factor (default: 0.4)')
parser.add_argument('--aa', type=str, default='rand-m9-mstd0.5-inc1', metavar='NAME',
help='Use AutoAugment policy. "v0" or "original". " + \
"(default: rand-m9-mstd0.5-inc1)'),
parser.add_argument('--smoothing', type=float, default=0.1, help='Label smoothing (default: 0.1)')
parser.add_argument('--train-interpolation', type=str, default='bicubic',
help='Training interpolation (random, bilinear, bicubic default: "bicubic")')
parser.add_argument('--repeated-aug', action='store_true')
parser.add_argument('--no-repeated-aug', action='store_false', dest='repeated_aug')
parser.set_defaults(repeated_aug=True)
# * Random Erase params
parser.add_argument('--reprob', type=float, default=0.25, metavar='PCT',
help='Random erase prob (default: 0.25)')
parser.add_argument('--remode', type=str, default='pixel',
help='Random erase mode (default: "pixel")')
parser.add_argument('--recount', type=int, default=1,
help='Random erase count (default: 1)')
parser.add_argument('--resplit', action='store_true', default=False,
help='Do not random erase first (clean) augmentation split')
# * Mixup params
parser.add_argument('--mixup', type=float, default=0.8,
help='mixup alpha, mixup enabled if > 0. (default: 0.8)')
parser.add_argument('--cutmix', type=float, default=1.0,
help='cutmix alpha, cutmix enabled if > 0. (default: 1.0)')
parser.add_argument('--cutmix-minmax', type=float, nargs='+', default=None,
help='cutmix min/max ratio, overrides alpha and enables cutmix if set (default: None)')
parser.add_argument('--mixup-prob', type=float, default=1.0,
help='Probability of performing mixup or cutmix when either/both is enabled')
parser.add_argument('--mixup-switch-prob', type=float, default=0.5,
help='Probability of switching to cutmix when both mixup and cutmix enabled')
parser.add_argument('--mixup-mode', type=str, default='batch',
help='How to apply mixup/cutmix params. Per "batch", "pair", or "elem"')
# Distillation parameters
parser.add_argument('--teacher-model', default='regnety_160', type=str, metavar='MODEL',
help='Name of teacher model to train (default: "regnety_160"')
parser.add_argument('--teacher-path', type=str, default='')
parser.add_argument('--distillation-type', default='none', choices=['none', 'soft', 'hard'], type=str, help="")
parser.add_argument('--distillation-alpha', default=0.5, type=float, help="")
parser.add_argument('--distillation-tau', default=1.0, type=float, help="")
# * Finetuning params
parser.add_argument('--finetune', default='', help='finetune from checkpoint')
# Dataset parameters
parser.add_argument('--data-path', default='/datasets01/imagenet_full_size/061417/', type=str,
help='dataset path')
parser.add_argument('--data-set', default='IMNET', choices=['CIFAR', 'IMNET', 'INAT', 'INAT19'],
type=str, help='Image Net dataset path')
parser.add_argument('--inat-category', default='name',
choices=['kingdom', 'phylum', 'class', 'order', 'supercategory', 'family', 'genus', 'name'],
type=str, help='semantic granularity')
parser.add_argument('--output_dir', default='./test_img/', help='path where to save')
parser.add_argument('--device', default='cuda',
help='device to use for training / testing')
parser.add_argument('--seed', default=0, type=int)
parser.add_argument('--resume', default='', help='resume from checkpoint')
parser.add_argument('--start_epoch', default=0, type=int, metavar='N',
help='start epoch')
parser.add_argument('--eval', action='store_false', help='Perform evaluation only')
parser.add_argument('--dist-eval', action='store_true', default=False, help='Enabling distributed evaluation')
parser.add_argument('--num_workers', default=10, type=int)
parser.add_argument('--pin-mem', action='store_true',
help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.')
parser.add_argument('--no-pin-mem', action='store_false', dest='pin_mem',
help='')
parser.set_defaults(pin_mem=True)
# distributed training parameters
parser.add_argument('--world_size', default=1, type=int,
help='number of distributed processes')
parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training')
parser.add_argument('--excel_filename', type=str, default='attention_matrix_cls', help='filename of saving excel')
# visualization
parser.add_argument('--img-path', default='', type=str,
help='path to images to be visualized. Set '' to visualize batch images in imagenet val.')
parser.add_argument('--save-name', default='', type=str,
help='name to save when visualizing a single image. Set '' to save name as the original image.')
parser.add_argument('--layer-wise-prune', action='store_true',
help='set true when visualize a model trained without layer to stage training strategy')
return parser
IMAGENET_DEFAULT_MEAN = (0.485, 0.456, 0.406)
IMAGENET_DEFAULT_STD = (0.229, 0.224, 0.225)
def unnormalize(input_tensor):
return (input_tensor*IMAGENET_DEFAULT_STD)+IMAGENET_DEFAULT_MEAN
def save_image_tensor(input_tensor: torch.Tensor, filename):
"""
"""
assert ((len(input_tensor.shape) == 4 and input_tensor.shape[0] == 1) or len(input_tensor.shape) == 3)
input_tensor = input_tensor.clone().detach()
input_tensor = input_tensor.to(torch.device('cpu'))
vutils.save_image(input_tensor, filename)
@torch.no_grad()
def visualize_single_img(img_input, model, device, transform, post_process, save_name):
model.eval()
# img: 1, 3, H, W
image_raw = transform(img_input)
save_image_tensor(image_raw, Path(args.output_dir, '{}.jpg'.format(save_name)))
images = post_process(image_raw)
images = images.unsqueeze(0)
images = images.to(device, non_blocking=True)
print(images.shape)
# compute output
with torch.cuda.amp.autocast():
output = model(images)
vis_dict = model.get_vis_dict()
image_raw = image_raw * 255
image_raw = image_raw.squeeze(0).permute(1, 2, 0).cpu().numpy()
for k in vis_dict:
keep_indices = vis_dict[k]
viz = gen_visualization(image_raw, keep_indices)
viz = torch.from_numpy(viz).permute(2, 0, 1)
viz = viz / 255
save_image_tensor(viz,
Path(args.output_dir, '{}_{}.jpg'.format(save_name, k)))
print("Visualization finished")
@torch.no_grad()
def visualize(data_loader, model, device, post_process):
metric_logger = utils.MetricLogger(delimiter=" ")
header = 'Test:'
# switch to evaluation mode
model.eval()
# set stage_wise_prune = True if the trained model is under layer-to-stage training strategy
model.stage_wise_prune = not args.layer_wise_prune
threshold = 0.7 # the exit threshold of coarse stage
all_index = 0
for images_raw_full, target_full in metric_logger.log_every(data_loader, 10, header):
B = images_raw_full.shape[0]
for index in range(B):
all_index += 1
images_raw = images_raw_full[index:index + 1]
target = target_full[index:index + 1]
assert images_raw.shape[0] == 1
images = post_process(images_raw)
images = images.to(device, non_blocking=True)
target = target.to(device, non_blocking=True)
# input
images_list = []
resized_img = F.interpolate(images, (112, 112), mode='bilinear', align_corners=True)
images_list.append(resized_img)
images_list.append(images)
# compute output
with torch.cuda.amp.autocast():
output = model(images_list)
if nn.functional.softmax(output[0]).max() > threshold:
output = output[0]
exit_stage = "coarse"
else:
output = output[1]
exit_stage = "fine"
acc1, acc5 = accuracy(output, target, topk=(1, 5))
if acc1 == 0:
judger = 'wrong'
elif acc1 == 100:
judger = 'right'
else:
raise ValueError('xxxx')
if exit_stage == "coarse":
name = 'label{}_{}_coarse_index{}.jpg'.format(str(target.item()),judger,all_index)
save_image_tensor(images_raw, Path(args.output_dir, name))
continue
informative_index = model.get_vis_data()
images_raw = images_raw * 255
images_raw = images_raw.squeeze(0).permute(1, 2, 0).cpu().numpy()
keep_indices = informative_index.tolist()[0]
viz = gen_visualization(images_raw, keep_indices)
viz = torch.from_numpy(viz).permute(2, 0, 1)
viz = viz / 255
name = 'label{}_{}_{}_index{}.jpg'.format(
str(target.item()), judger,exit_stage, all_index)
save_image_tensor(viz, Path(args.output_dir, name))
batch_size = images.shape[0]
metric_logger.meters['acc1'].update(acc1.item(), n=batch_size)
metric_logger.meters['acc5'].update(acc5.item(), n=batch_size)
print("Visualization finished")
# gather the stats from all processes
metric_logger.synchronize_between_processes()
return {k: meter.global_avg for k, meter in metric_logger.meters.items()}
def vis_single(args):
device = torch.device(args.device)
# fix the seed for reproducibility
seed = args.seed
torch.manual_seed(seed)
np.random.seed(seed)
# random.seed(seed)
cudnn.benchmark = True
transform = get_transform(input_size=224) # set input_size to other value if the test image is not 224*224
post_process = get_post_process()
print("Creating model: {args.model}")
model = create_model(
args.model,
pretrained=False,
num_classes=1000,
drop_rate=args.drop,
drop_path_rate=args.drop_path,
drop_block_rate=None,
)
model.to(device)
n_parameters = sum(p.numel() for p in model.parameters() if p.requires_grad)
print('number of params:', n_parameters)
if args.resume:
if args.resume.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
args.resume, map_location='cpu', check_hash=True)
else:
checkpoint = torch.load(args.resume, map_location='cpu')
model.load_state_dict(checkpoint['model'])
img_input = Image.open(args.img_path)
if args.save_name == '':
save_name = os.path.basename(args.img_path).split('.')[0]
else:
save_name = args.save_name
if args.eval:
test_stats = visualize_single_img(img_input, model, device, transform, post_process, save_name=save_name)
return
def vis_batch(args):
utils.init_distributed_mode(args)
print(args)
if args.distillation_type != 'none' and args.finetune and not args.eval:
raise NotImplementedError("Finetuning with distillation not yet supported")
device = torch.device(args.device)
# fix the seed for reproducibility
seed = args.seed
torch.manual_seed(seed)
np.random.seed(seed)
# random.seed(seed)
cudnn.benchmark = True
dataset_val, args.nb_classes = build_dataset2(is_train=False, args=args)
post_process = get_post_process()
if True: # args.distributed:
num_tasks = utils.get_world_size()
global_rank = utils.get_rank()
if args.dist_eval:
if len(dataset_val) % num_tasks != 0:
print('Warning: Enabling distributed evaluation with an eval dataset not divisible by process number. '
'This will slightly alter validation results as extra duplicate entries are added to achieve '
'equal num of samples per-process.')
sampler_val = torch.utils.data.DistributedSampler(
dataset_val, num_replicas=num_tasks, rank=global_rank, shuffle=True)
else:
sampler_val = torch.utils.data.RandomSampler(dataset_val)
else:
sampler_val = torch.utils.data.RandomSampler(dataset_val)
data_loader_val = torch.utils.data.DataLoader(
dataset_val, sampler=sampler_val,
batch_size=args.batch_size,
num_workers=args.num_workers,
pin_memory=args.pin_mem,
drop_last=False,
)
print("Creating model: {args.model}")
model = create_model(
args.model,
pretrained=False,
num_classes=args.nb_classes,
drop_rate=args.drop,
drop_path_rate=args.drop_path,
drop_block_rate=None,
)
model.to(device)
model_ema = None
if args.model_ema:
# Important to create EMA model after cuda(), DP wrapper, and AMP but before SyncBN and DDP wrapper
model_ema = ModelEma(
model,
decay=args.model_ema_decay,
device='cpu' if args.model_ema_force_cpu else '',
resume='')
model_without_ddp = model
if args.distributed:
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu])
model_without_ddp = model.module
n_parameters = sum(p.numel() for p in model.parameters() if p.requires_grad)
print('number of params:', n_parameters)
linear_scaled_lr = args.lr * args.batch_size * utils.get_world_size() / 512.0
args.lr = linear_scaled_lr
optimizer = create_optimizer(args, model_without_ddp)
loss_scaler = NativeScaler()
lr_scheduler, _ = create_scheduler(args, optimizer)
if args.distillation_type != 'none':
assert args.teacher_path, 'need to specify teacher-path when using distillation'
print("Creating teacher model: {args.teacher_model}")
teacher_model = create_model(
args.teacher_model,
pretrained=False,
num_classes=args.nb_classes,
global_pool='avg',
)
if args.teacher_path.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
args.teacher_path, map_location='cpu', check_hash=True)
else:
checkpoint = torch.load(args.teacher_path, map_location='cpu')
teacher_model.load_state_dict(checkpoint['model'])
teacher_model.to(device)
teacher_model.eval()
# wrap the criterion in our custom DistillationLoss, which
# just dispatches to the original criterion if args.distillation_type is 'none'
if args.resume:
if args.resume.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
args.resume, map_location='cpu', check_hash=True)
else:
checkpoint = torch.load(args.resume, map_location='cpu')
model_without_ddp.load_state_dict(checkpoint['model'])
if not args.eval and 'optimizer' in checkpoint and 'lr_scheduler' in checkpoint and 'epoch' in checkpoint:
optimizer.load_state_dict(checkpoint['optimizer'])
lr_scheduler.load_state_dict(checkpoint['lr_scheduler'])
args.start_epoch = checkpoint['epoch'] + 1
if args.model_ema:
utils._load_checkpoint_for_ema(model_ema, checkpoint['model_ema'])
if 'scaler' in checkpoint:
loss_scaler.load_state_dict(checkpoint['scaler'])
visualize(data_loader_val, model, device, post_process=post_process)
if __name__ == '__main__':
parser = argparse.ArgumentParser('DeiT training and evaluation script', parents=[get_args_parser()])
args = parser.parse_args()
if args.output_dir:
Path(args.output_dir).mkdir(parents=True, exist_ok=True)
args.eval = True
if args.img_path == '':
# To visualize batch images of imagenet val, please run this:
vis_batch(args)
else:
# To visualize a single image, please run this:
vis_single(args)
|
py
|
1a5c50f97a254e58929c628aae8abb7724603ec1
|
"""为diagnosis应用定义路由"""
from django.urls import path
from django.conf.urls import url
from . import views
app_name = 'diagnosis'
urlpatterns = [
path('pic_upload/', views.PicUploadView.as_view(), name='pic_upload'),
path('pic_process/', views.PicProcessView.as_view(), name='pic_process'),
path('result/', views.ResultView.as_view(), name='result'),
path('main_layout/', views.MainLayoutView.as_view(), name='main_layout'),
path('result_list/', views.ResultListView.as_view(), name='result_list'),
path('result_detail/<num>/', views.ResultDetailView.as_view(), name='detail'),
path('append_result/<img_url>/', views.PicProcessView.getPicture),
path('update_result/<checked>/<result>/<idx>/', views.UpdateResultView.as_view())
]
|
py
|
1a5c510660aab957c03b34f1ef3780e9ec17efd4
|
from __future__ import print_function
import os
import pytest
import six
from six.moves import queue
import threading
import time
import shutil
import sys
import wandb
from wandb.util import mkdir_exists_ok
from .utils import first_filestream
def test_send_status_request_stopped(mock_server, backend_interface):
mock_server.ctx["stopped"] = True
with backend_interface() as interface:
status_resp = interface.communicate_stop_status()
assert status_resp is not None
assert status_resp.run_should_stop
def test_parallel_requests(mock_server, backend_interface):
mock_server.ctx["stopped"] = True
work_queue = queue.Queue()
with backend_interface() as interface:
def send_sync_request(i):
work_queue.get()
if i % 3 == 0:
status_resp = interface.communicate_stop_status()
assert status_resp is not None
assert status_resp.run_should_stop
elif i % 3 == 2:
summary_resp = interface.communicate_get_summary()
assert summary_resp is not None
assert hasattr(summary_resp, "item")
work_queue.task_done()
for i in range(10):
work_queue.put(None)
t = threading.Thread(target=send_sync_request, args=(i,))
t.daemon = True
t.start()
work_queue.join()
def test_send_status_request_network(mock_server, backend_interface):
mock_server.ctx["rate_limited_times"] = 3
with backend_interface() as interface:
interface.publish_files({"files": [("test.txt", "live")]})
status_resp = interface.communicate_network_status()
assert status_resp is not None
assert len(status_resp.network_responses) > 0
assert status_resp.network_responses[0].http_status_code == 429
def test_resume_success(mocked_run, test_settings, mock_server, backend_interface):
test_settings.update(resume="allow", source=wandb.sdk.wandb_settings.Source.INIT)
mock_server.ctx["resume"] = True
with backend_interface(initial_run=False) as interface:
run_result = interface.communicate_run(mocked_run)
assert run_result.HasField("error") is False
assert run_result.run.starting_step == 16
def test_resume_error_never(mocked_run, test_settings, mock_server, backend_interface):
test_settings.update(resume="never", source=wandb.sdk.wandb_settings.Source.INIT)
mock_server.ctx["resume"] = True
with backend_interface(initial_run=False) as interface:
run_result = interface.communicate_run(mocked_run)
assert run_result.HasField("error")
assert (
run_result.error.message
== "resume='never' but run (%s) exists" % mocked_run.id
)
def test_resume_error_must(mocked_run, test_settings, mock_server, backend_interface):
test_settings.update(resume="must", source=wandb.sdk.wandb_settings.Source.INIT)
mock_server.ctx["resume"] = False
with backend_interface(initial_run=False) as interface:
run_result = interface.communicate_run(mocked_run)
assert run_result.HasField("error")
assert (
run_result.error.message
== "resume='must' but run (%s) doesn't exist" % mocked_run.id
)
def test_save_live_existing_file(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
with open(os.path.join(mocked_run.dir, "test.txt"), "w") as f:
f.write("TEST TEST")
interface.publish_files({"files": [("test.txt", "live")]})
assert len(mock_server.ctx["storage?file=test.txt"]) == 1
assert any(
[
"test.txt" in request_dict.get("uploaded", [])
for request_dict in mock_server.ctx["file_stream"]
]
)
def test_save_live_write_after_policy(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
interface.publish_files({"files": [("test.txt", "live")]})
with open(os.path.join(mocked_run.dir, "test.txt"), "w") as f:
f.write("TEST TEST")
assert len(mock_server.ctx["storage?file=test.txt"]) == 1
def test_preempting_sent_to_server(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
interface.publish_preempting()
assert any(
[
"preempting" in request_dict
for request_dict in mock_server.ctx["file_stream"]
]
)
def test_save_live_multi_write(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
interface.publish_files({"files": [("test.txt", "live")]})
test_file = os.path.join(mocked_run.dir, "test.txt")
with open(test_file, "w") as f:
f.write("TEST TEST")
# File system polling happens every second
time.sleep(1.5)
with open(test_file, "w") as f:
f.write("TEST TEST TEST TEST")
assert len(mock_server.ctx["storage?file=test.txt"]) == 2
def test_save_live_glob_multi_write(mocked_run, mock_server, mocker, backend_interface):
def mock_min_size(self, size):
return 1
mocker.patch("wandb.filesync.dir_watcher.PolicyLive.RATE_LIMIT_SECONDS", 1)
mocker.patch(
"wandb.filesync.dir_watcher.PolicyLive.min_wait_for_size", mock_min_size
)
with backend_interface() as interface:
interface.publish_files({"files": [("checkpoints/*", "live")]})
mkdir_exists_ok(os.path.join(mocked_run.dir, "checkpoints"))
test_file_1 = os.path.join(mocked_run.dir, "checkpoints", "test_1.txt")
test_file_2 = os.path.join(mocked_run.dir, "checkpoints", "test_2.txt")
# To debug this test adds some prints to the dir_watcher.py _on_file_* handlers
print("Wrote file 1")
with open(test_file_1, "w") as f:
f.write("TEST TEST")
time.sleep(2)
print("Wrote file 1 2nd time")
with open(test_file_1, "w") as f:
f.write("TEST TEST TEST TEST")
# File system polling happens every second
time.sleep(1.5)
print("Wrote file 2")
with open(test_file_2, "w") as f:
f.write("TEST TEST TEST TEST")
print("Wrote file 1 3rd time")
with open(test_file_1, "w") as f:
f.write("TEST TEST TEST TEST TEST TEST")
print("Stopping backend")
print("Backend stopped")
print(
"CTX:", [(k, v) for k, v in mock_server.ctx.items() if k.startswith("storage")]
)
assert len(mock_server.ctx["storage?file=checkpoints/test_1.txt"]) == 3
assert len(mock_server.ctx["storage?file=checkpoints/test_2.txt"]) == 1
def test_save_rename_file(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
interface.publish_files({"files": [("test.txt", "live")]})
test_file = os.path.join(mocked_run.dir, "test.txt")
with open(test_file, "w") as f:
f.write("TEST TEST")
# File system polling happens every second
time.sleep(1.5)
shutil.copy(test_file, test_file.replace("test.txt", "test-copy.txt"))
assert len(mock_server.ctx["storage?file=test.txt"]) == 1
assert len(mock_server.ctx["storage?file=test-copy.txt"]) == 1
def test_save_end_write_after_policy(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
interface.publish_files({"files": [("test.txt", "end")]})
with open(os.path.join(mocked_run.dir, "test.txt"), "w") as f:
f.write("TEST TEST")
assert len(mock_server.ctx["storage?file=test.txt"]) == 1
def test_save_end_existing_file(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
with open(os.path.join(mocked_run.dir, "test.txt"), "w") as f:
f.write("TEST TEST")
interface.publish_files({"files": [("test.txt", "end")]})
assert len(mock_server.ctx["storage?file=test.txt"]) == 1
def test_save_end_multi_write(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
interface.publish_files({"files": [("test.txt", "end")]})
test_file = os.path.join(mocked_run.dir, "test.txt")
with open(test_file, "w") as f:
f.write("TEST TEST")
# File system polling happens every second
time.sleep(1.5)
with open(test_file, "w") as f:
f.write("TEST TEST TEST TEST")
assert len(mock_server.ctx["storage?file=test.txt"]) == 1
def test_save_now_write_after_policy(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
interface.publish_files({"files": [("test.txt", "now")]})
with open(os.path.join(mocked_run.dir, "test.txt"), "w") as f:
f.write("TEST TEST")
assert len(mock_server.ctx["storage?file=test.txt"]) == 1
def test_save_now_existing_file(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
with open(os.path.join(mocked_run.dir, "test.txt"), "w") as f:
f.write("TEST TEST")
interface.publish_files({"files": [("test.txt", "now")]})
assert len(mock_server.ctx["storage?file=test.txt"]) == 1
def test_save_now_multi_write(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
interface.publish_files({"files": [("test.txt", "now")]})
test_file = os.path.join(mocked_run.dir, "test.txt")
with open(test_file, "w") as f:
f.write("TEST TEST")
# File system polling happens every second
time.sleep(1.5)
with open(test_file, "w") as f:
f.write("TEST TEST TEST TEST")
assert len(mock_server.ctx["storage?file=test.txt"]) == 1
def test_save_glob_multi_write(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
interface.publish_files({"files": [("checkpoints/*", "now")]})
mkdir_exists_ok(os.path.join(mocked_run.dir, "checkpoints"))
test_file_1 = os.path.join(mocked_run.dir, "checkpoints", "test_1.txt")
test_file_2 = os.path.join(mocked_run.dir, "checkpoints", "test_2.txt")
print("Wrote file 1")
with open(test_file_1, "w") as f:
f.write("TEST TEST")
# File system polling happens every second
time.sleep(1.5)
print("Wrote file 2")
with open(test_file_2, "w") as f:
f.write("TEST TEST TEST TEST")
time.sleep(1.5)
print("Stopping backend")
print("Backend stopped")
print(
"CTX", [(k, v) for k, v in mock_server.ctx.items() if k.startswith("storage")]
)
assert len(mock_server.ctx["storage?file=checkpoints/test_1.txt"]) == 1
assert len(mock_server.ctx["storage?file=checkpoints/test_2.txt"]) == 1
def test_save_now_relative_path(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
interface.publish_files({"files": [("foo/test.txt", "now")]})
test_file = os.path.join(mocked_run.dir, "foo", "test.txt")
mkdir_exists_ok(os.path.dirname(test_file))
with open(test_file, "w") as f:
f.write("TEST TEST")
print("DAMN DUDE", mock_server.ctx)
assert len(mock_server.ctx["storage?file=foo/test.txt"]) == 1
def test_save_now_twice(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
file_path = os.path.join("foo", "test.txt")
interface.publish_files({"files": [(file_path, "now")]})
test_file = os.path.join(mocked_run.dir, file_path)
mkdir_exists_ok(os.path.dirname(test_file))
with open(test_file, "w") as f:
f.write("TEST TEST")
time.sleep(1.5)
with open(test_file, "w") as f:
f.write("TEST TEST TEST TEST")
interface.publish_files({"files": [(file_path, "now")]})
print("DAMN DUDE", mock_server.ctx)
assert len(mock_server.ctx["storage?file=foo/test.txt"]) == 2
def test_output(mocked_run, mock_server, backend_interface):
with backend_interface() as interface:
for i in range(100):
interface.publish_output("stdout", "\rSome recurring line")
interface.publish_output("stdout", "\rFinal line baby\n")
print("DUDE!", mock_server.ctx)
stream = first_filestream(mock_server.ctx)
assert "Final line baby" in stream["files"]["output.log"]["content"][0]
def test_sync_spell_run(mocked_run, mock_server, backend_interface, parse_ctx):
try:
os.environ["SPELL_RUN_URL"] = "https://spell.run/foo"
with backend_interface() as interface:
pass
print("CTX", mock_server.ctx)
ctx = parse_ctx(mock_server.ctx)
assert ctx.config["_wandb"]["value"]["spell_url"] == "https://spell.run/foo"
# Check that we pinged spells API
assert mock_server.ctx["spell_data"] == {
"access_token": None,
"url": "{}/mock_server_entity/test/runs/{}".format(
mocked_run._settings.base_url, mocked_run.id
),
}
finally:
del os.environ["SPELL_RUN_URL"]
def test_upgrade_upgraded(
mocked_run, mock_server, backend_interface, restore_version,
):
wandb.__version__ = "0.0.6"
wandb.__hack_pypi_latest_version__ = "0.0.8"
with backend_interface(initial_run=False) as interface:
ret = interface.communicate_check_version()
assert ret
assert (
ret.upgrade_message
== "wandb version 0.0.8 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
)
assert not ret.delete_message
assert not ret.yank_message
# We need a run to cleanly shutdown backend
run_result = interface.communicate_run(mocked_run)
assert run_result.HasField("error") is False
def test_upgrade_yanked(
mocked_run, mock_server, backend_interface, restore_version,
):
wandb.__version__ = "0.0.2"
wandb.__hack_pypi_latest_version__ = "0.0.8"
with backend_interface(initial_run=False) as interface:
ret = interface.communicate_check_version()
assert ret
assert (
ret.upgrade_message
== "wandb version 0.0.8 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
)
assert not ret.delete_message
assert (
ret.yank_message
== "wandb version 0.0.2 has been recalled! Please upgrade."
)
# We need a run to cleanly shutdown backend
run_result = interface.communicate_run(mocked_run)
assert run_result.HasField("error") is False
def test_upgrade_yanked_message(
mocked_run, mock_server, backend_interface, restore_version,
):
wandb.__version__ = "0.0.3"
wandb.__hack_pypi_latest_version__ = "0.0.8"
with backend_interface(initial_run=False) as interface:
ret = interface.communicate_check_version()
assert ret
assert (
ret.upgrade_message
== "wandb version 0.0.8 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
)
assert not ret.delete_message
assert (
ret.yank_message
== "wandb version 0.0.3 has been recalled! (just cuz) Please upgrade."
)
# We need a run to cleanly shutdown backend
run_result = interface.communicate_run(mocked_run)
assert run_result.HasField("error") is False
def test_upgrade_removed(
mocked_run, mock_server, backend_interface, restore_version,
):
wandb.__version__ = "0.0.4"
wandb.__hack_pypi_latest_version__ = "0.0.8"
with backend_interface(initial_run=False) as interface:
ret = interface.communicate_check_version()
assert ret
assert (
ret.upgrade_message
== "wandb version 0.0.8 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
)
assert (
ret.delete_message
== "wandb version 0.0.4 has been retired! Please upgrade."
)
assert not ret.yank_message
# We need a run to cleanly shutdown backend
run_result = interface.communicate_run(mocked_run)
assert run_result.HasField("error") is False
# TODO: test other sender methods
@pytest.mark.parametrize("empty_query", [True, False])
@pytest.mark.parametrize("local_none", [True, False])
@pytest.mark.parametrize("outdated", [True, False])
def test_exit_poll_local(
publish_util, mock_server, collect_responses, empty_query, local_none, outdated
):
mock_server.ctx["out_of_date"] = outdated
mock_server.ctx["empty_query"] = empty_query
mock_server.ctx["local_none"] = local_none
publish_util()
out_of_date = collect_responses.local_info.out_of_date
if empty_query:
assert out_of_date
elif local_none:
assert not out_of_date
else:
assert out_of_date == outdated
|
py
|
1a5c5145d7ec8e457d2a649be08f7d2c156f4075
|
from abc import ABCMeta, abstractmethod
class AbstractStats():
__metaclass__ = ABCMeta
@abstractmethod
def incr(self, namespace, amount, rate=None):
pass
@abstractmethod
def decr(self, namespace, amount, rate=None):
pass
@abstractmethod
def timing(self, namespace, timems):
pass
@abstractmethod
def time(self, namespace):
"""
Should return a context manager
:param namespace:
:return:
"""
pass
@abstractmethod
def gauge(self, namespace, amount, rate=False, delta=False):
pass
@abstractmethod
def set(self, namespace, arr):
pass
|
py
|
1a5c51841e8e9904a3bfb254725eb4a86129ab8f
|
from matplotlib import pyplot as plt
import numpy as np
def configure_ax_asia(ax, extent=None, tight_layout=True):
ax.coastlines(resolution='50m')
xticks = range(60, 160, 20)
ax.set_xticks(xticks)
ax.set_xticklabels([f'${t}\\degree$ E' for t in xticks])
ax.set_xticks(np.linspace(58, 150, 47), minor=True)
yticks = range(20, 60, 20)
ax.set_yticks(yticks)
ax.set_yticklabels([f'${t}\\degree$ N' for t in yticks])
ax.set_yticks(np.linspace(2, 56, 28), minor=True)
ax.tick_params(labelbottom=True, labeltop=False, labelleft=True, labelright=False,
bottom=True, top=True, left=True, right=True, which='both')
if extent is not None:
ax.set_xlim((extent[0], extent[1]))
ax.set_ylim((extent[2], extent[3]))
else:
ax.set_xlim((58, 150))
ax.set_ylim((2, 56))
if tight_layout:
plt.tight_layout()
|
py
|
1a5c525c6aae8b6a7a05f1854848b865253ab037
|
"""
Configuration for django-hosts. Sets urlconf on the request to a module
under hipikat.urls matching the requested CNAME, if it's one of 'www' or
'blog'. (Outside of testing and development, it should be; configure the
web server should to redirect requests on hipikat.org/foo/bar
to www.hipikat.org/foo/bar)
"""
from django_hosts import patterns, host
host_patterns = patterns(
'hipikat.urls',
host(r'www', 'www', name='main_site'),
host(r'blog', 'blog', name='blog'),
host(r'broken', 'broken', name='not_configured'),
)
|
py
|
1a5c5281d17991f36eec322d2fcb6383b570e202
|
#!/usr/bin/env python
"""Tests for `rocketPy.Quaternion`"""
import pytest
import rocketPy as rp
from rocketPy.quaternion import Quaternion
import numpy as np
import numpy.testing as nptest
# test if the creation is good:
##
def test_generate_array():
nptest.assert_allclose(Quaternion(), Quaternion([0, 1, 0, 0]))
# check the normalization
nptest.assert_allclose(Quaternion(
[3, -4, 0, 0]), Quaternion([0.6, -0.8, 0, 0]))
nptest.assert_allclose(Quaternion(
[3, 4, 0., 0]), Quaternion([0.6, 0.8, 0, 0]))
nptest.assert_allclose(Quaternion(
[3, -4, 0, 0]), np.array([0.6, -0.8, 0, 0]))
with pytest.raises(ValueError):
q1 = Quaternion([0, 0, 0, 0])
def test_from_angle():
q1 = Quaternion.from_angle(np.pi / 2, [1, 0, 0])
r2o2 = (1 / 2)**0.5 # root 2/2
nptest.assert_allclose(q1, np.array([r2o2, r2o2, 0, 0]))
q2 = Quaternion.from_angle(np.pi, [1, 1, 1])
n = np.array([1, 1, 1])
r1o3 = (1 / 3)**0.5
nptest.assert_allclose(q2, np.array([0, r1o3, r1o3, r1o3]), 1e-7, 1e-7)
def test_rotation_matrix():
q1 = Quaternion.from_angle(np.pi / 4, [1, 0, 0])
q2 = Quaternion.from_angle(np.pi, [0, 1, 0])
q3 = Quaternion.from_angle(30 * np.pi / 180, [0, 1, 1])
q4 = Quaternion.from_angle(30 * np.pi / 180, [0, 2, 1])
r1 = q1.rot_matrix()
r2 = q2.rot_matrix()
r3 = q3.rot_matrix()
r4 = q4.rot_matrix()
R1 = np.array([[1.0000000, 0.0000000, 0.0000000],
[0.0000000, 0.7071068, -0.7071068],
[0.0000000, 0.7071068, 0.7071068]])
R2 = np.array([[-1.0000000, 0.0000000, 0.0000000],
[0.0000000, 1.0000000, 0.0000000],
[-0.0000000, 0.0000000, -1.0000000]])
R3 = np.array([[0.8660254, -0.3535534, 0.3535534],
[0.3535534, 0.9330127, 0.0669873],
[-0.3535534, 0.0669873, 0.9330127]])
R4 = np.array([[0.8660254, -0.2236068, 0.4472136],
[0.2236068, 0.9732051, 0.0535898],
[-0.4472136, 0.0535898, 0.8928203]])
nptest.assert_allclose(r1, R1, 1e-7, 1e-7)
nptest.assert_allclose(r2, R2, 1e-7, 1e-7)
nptest.assert_allclose(r3, R3, 1e-7, 1e-7)
nptest.assert_allclose(r4, R4, 1e-7, 1e-7)
def test_axis_angle():
q1 = Quaternion.from_angle(np.pi / 4, [1, 0, 0])
q2 = Quaternion.from_angle(np.pi, [0, 1, 0])
q3 = Quaternion.from_angle(30 * np.pi / 180, [0, 1, 1])
q4 = Quaternion.from_angle(30 * np.pi / 180, [0, 2, 1])
nptest.assert_allclose(q1.axis_angle()[0], [1, 0, 0], 1e-7, 1e-7)
nptest.assert_allclose(q1.axis_angle()[1], np.pi / 4, 1e-7, 1e-7)
nptest.assert_allclose(q2.axis_angle()[0], [0, 1, 0], 1e-7, 1e-7)
nptest.assert_allclose(q2.axis_angle()[1], np.pi, 1e-7, 1e-7)
nptest.assert_allclose(q3.axis_angle()[0], [
0, 1 / 2**0.5, 1 / 2**0.5], 1e-7, 1e-7)
nptest.assert_allclose(q3.axis_angle()[1], np.pi / 6, 1e-7, 1e-7)
nptest.assert_allclose(q4.axis_angle()[0], [
0, 2 / 5**0.5, 1 / 5**0.5], 1e-7, 1e-7)
nptest.assert_allclose(q4.axis_angle()[1], np.pi / 6, 1e-7, 1e-7)
#nptest.assert_allclose(r2,R2, 1e-7,1e-7)
#nptest.assert_allclose(r3,R3, 1e-7,1e-7)
#nptest.assert_allclose(r4,R4, 1e-7,1e-7)
def test_derivative():
pass
##
|
py
|
1a5c5668641eb170a2de3e835242ccf175b1cb28
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Base class for RPC testing
import logging
import optparse
import os
import sys
import shutil
import tempfile
import traceback
from .util import (
initialize_chain,
start_nodes,
connect_nodes_bi,
sync_blocks,
sync_mempools,
stop_nodes,
stop_node,
enable_coverage,
check_json_precision,
initialize_chain_clean,
PortSeed,
)
from .authproxy import JSONRPCException
class BitcoinTestFramework(object):
def __init__(self):
self.num_nodes = 4
self.setup_clean_chain = False
self.nodes = None
def run_test(self):
raise NotImplementedError
def add_options(self, parser):
pass
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
if self.setup_clean_chain:
initialize_chain_clean(self.options.tmpdir, self.num_nodes)
else:
initialize_chain(self.options.tmpdir, self.num_nodes, self.options.cachedir)
def stop_node(self, num_node):
stop_node(self.nodes[num_node], num_node)
def setup_nodes(self):
return start_nodes(self.num_nodes, self.options.tmpdir)
def setup_network(self, split = False):
self.nodes = self.setup_nodes()
# Connect the nodes as a "chain". This allows us
# to split the network between nodes 1 and 2 to get
# two halves that can work on competing chains.
# If we joined network halves, connect the nodes from the joint
# on outward. This ensures that chains are properly reorganised.
if not split:
connect_nodes_bi(self.nodes, 1, 2)
sync_blocks(self.nodes[1:3])
sync_mempools(self.nodes[1:3])
connect_nodes_bi(self.nodes, 0, 1)
connect_nodes_bi(self.nodes, 2, 3)
self.is_network_split = split
self.sync_all()
def split_network(self):
"""
Split the network of four nodes into nodes 0/1 and 2/3.
"""
assert not self.is_network_split
stop_nodes(self.nodes)
self.setup_network(True)
def sync_all(self):
if self.is_network_split:
sync_blocks(self.nodes[:2])
sync_blocks(self.nodes[2:])
sync_mempools(self.nodes[:2])
sync_mempools(self.nodes[2:])
else:
sync_blocks(self.nodes)
sync_mempools(self.nodes)
def join_network(self):
"""
Join the (previously split) network halves together.
"""
assert self.is_network_split
stop_nodes(self.nodes)
self.setup_network(False)
def main(self):
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave elicoinds and test.* datadir on exit or error")
parser.add_option("--noshutdown", dest="noshutdown", default=False, action="store_true",
help="Don't stop elicoinds after the test execution")
parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../../src"),
help="Source directory containing elicoind/elicoin-cli (default: %default)")
parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../cache"),
help="Directory for caching pregenerated datadirs")
parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"),
help="Root directory for datadirs")
parser.add_option("--tracerpc", dest="trace_rpc", default=False, action="store_true",
help="Print out all RPC calls as they are made")
parser.add_option("--portseed", dest="port_seed", default=os.getpid(), type='int',
help="The seed to use for assigning port numbers (default: current process id)")
parser.add_option("--coveragedir", dest="coveragedir",
help="Write tested RPC commands into this directory")
self.add_options(parser)
(self.options, self.args) = parser.parse_args()
# backup dir variable for removal at cleanup
self.options.root, self.options.tmpdir = self.options.tmpdir, self.options.tmpdir + '/' + str(self.options.port_seed)
if self.options.trace_rpc:
logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
if self.options.coveragedir:
enable_coverage(self.options.coveragedir)
PortSeed.n = self.options.port_seed
os.environ['PATH'] = self.options.srcdir+":"+self.options.srcdir+"/qt:"+os.environ['PATH']
check_json_precision()
success = False
try:
os.makedirs(self.options.tmpdir, exist_ok=False)
self.setup_chain()
self.setup_network()
self.run_test()
success = True
except JSONRPCException as e:
print("JSONRPC error: "+e.error['message'])
traceback.print_tb(sys.exc_info()[2])
except AssertionError as e:
print("Assertion failed: " + str(e))
traceback.print_tb(sys.exc_info()[2])
except KeyError as e:
print("key not found: "+ str(e))
traceback.print_tb(sys.exc_info()[2])
except Exception as e:
print("Unexpected exception caught during testing: " + repr(e))
traceback.print_tb(sys.exc_info()[2])
except KeyboardInterrupt as e:
print("Exiting after " + repr(e))
if not self.options.noshutdown:
print("Stopping nodes")
stop_nodes(self.nodes)
else:
print("Note: elicoinds were not stopped and may still be running")
if not self.options.nocleanup and not self.options.noshutdown and success:
print("Cleaning up")
shutil.rmtree(self.options.tmpdir)
if not os.listdir(self.options.root):
os.rmdir(self.options.root)
else:
print("Not cleaning up dir %s" % self.options.tmpdir)
if os.getenv("PYTHON_DEBUG", ""):
# Dump the end of the debug logs, to aid in debugging rare
# travis failures.
import glob
filenames = glob.glob(self.options.tmpdir + "/node*/regtest/debug.log")
MAX_LINES_TO_PRINT = 1000
for f in filenames:
print("From" , f, ":")
from collections import deque
print("".join(deque(open(f), MAX_LINES_TO_PRINT)))
if success:
print("Tests successful")
sys.exit(0)
else:
print("Failed")
sys.exit(1)
# Test framework for doing p2p comparison testing, which sets up some bitcoind
# binaries:
# 1 binary: test binary
# 2 binaries: 1 test binary, 1 ref binary
# n>2 binaries: 1 test binary, n-1 ref binaries
class ComparisonTestFramework(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 2
self.setup_clean_chain = True
def add_options(self, parser):
parser.add_option("--testbinary", dest="testbinary",
default=os.getenv("ELICOIND", "elicoind"),
help="elicoind binary to test")
parser.add_option("--refbinary", dest="refbinary",
default=os.getenv("ELICOIND", "elicoind"),
help="elicoind binary to use for reference nodes (if any)")
def setup_network(self):
self.nodes = start_nodes(
self.num_nodes, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1']] * self.num_nodes,
binary=[self.options.testbinary] +
[self.options.refbinary]*(self.num_nodes-1))
|
py
|
1a5c575fa01ff6dcb5ca96957b52e7c4438da597
|
import torch
from torch.cuda.amp import GradScaler, autocast
from torch.nn.parallel import DistributedDataParallel as DDP
from torch.nn.utils import clip_grad_norm_
import torch.utils.data.distributed as ddist
import torch.utils.data as td
from torch import nn, optim
from pytorch_quik import arg, ddp, io, metrics
from contextlib import nullcontext
from argparse import ArgumentParser, Namespace
from typing import Any, Callable, Dict, Optional
from dataclasses import dataclass, field, asdict
from tqdm import tqdm
from .mlflow import QuikMlflow
@dataclass
class World:
"""World related data."""
device: torch.device = field(init=False)
node_id: int = 0
total_nodes: int = 1
gpu_id: int = None
total_gpus: int = None
rank_id: int = field(init=False)
world_size: int = field(init=False)
use_ray_tune: bool = False
use_init_group: bool = False
is_ddp: bool = field(init=False)
is_logger: bool = field(init=False)
def __post_init__(self):
if self.gpu_id is None:
self.device = torch.device("cpu")
self.master_port = None
self.rank_id = None
self.world_size = None
self.is_ddp = False
else:
if self.use_ray_tune:
self.device = torch.device("cuda")
else:
self.device = torch.device("cuda", self.gpu_id)
self.rank_id = self.node_id * self.total_gpus + self.gpu_id
self.world_size = self.total_gpus * self.total_nodes
self.is_ddp = True
self.is_logger = not self.is_ddp
if self.gpu_id == 0:
self.is_logger = True
@dataclass
class DlKwargs:
"""Data loader keyword arguments."""
batch_size: int = 24
shuffle: bool = False
pin_memory: bool = True
num_workers: int = 0
@dataclass
class OptKwargs:
"""Optimizer keyword arguments"""
lr: int
weight_decay: int
eps: int
betas: tuple
class QuikTrek:
"""A class for maintaining the general data for the full trek to
be shared between travelers.
"""
def __init__(
self,
gpu: Optional[int] = None,
args: Optional[Namespace] = None,
):
"""Constructor, primarily adding learning arguments
and creating dataclasses."""
if args is None:
parser = arg.add_ddp_args(ArgumentParser())
parser = arg.add_learn_args(parser)
parser = arg.add_mlflow_args(parser)
parser = arg.add_ray_tune_args(parser)
args = parser.parse_args()
self.args = args
self.epochs = args.epochs
self.create_dataclasses(gpu, args)
self.trek_prep(args)
def create_dataclasses(self, gpu: int, args: Namespace):
"""Create a World, DlKwargs, and OptKwargs dataclass"""
self.world = World(
args.nr,
args.nodes,
gpu,
args.gpus,
getattr(args, "use_ray_tune", False),
getattr(args, "use_init_group", False),
)
self.dlkwargs = DlKwargs(
batch_size=args.bs,
num_workers=args.num_workers,
)
self.optkwargs = OptKwargs(
lr=args.lr,
weight_decay=args.weight_decay,
eps=args.eps,
betas=args.betas,
)
self.args.device = self.world.device
def trek_prep(self, args: Namespace):
"""Create the MLFlow run, clear the cuda cache,
and setup ddp (if ddp and not ray tune)."""
if args.use_mlflow and self.world.is_logger:
self.mlflow = QuikMlflow(self.args)
self.mlflow.create_run(
[
self.dlkwargs,
self.optkwargs,
self.world,
]
)
if self.world.device.type == "cuda":
torch.cuda.empty_cache()
if self.world.gpu_id is not None and not getattr(
self.args, "use_ray_tune", False
):
torch.cuda.set_device(self.world.device)
ddp.setup(self.world.gpu_id, self.world)
class QuikTraveler:
"""A class for traversing a model either in training, validation, or
testing. Is always a singular run - but can be part of a multi-GPU run.
"""
metrics = metrics.LossMetrics(0.99)
def __init__(self, trek, type: Optional[str] = None):
"""Constructor, primarily adding learning arguments
and creating the QuikAmp."""
self.type = type
self.world = trek.world
self.args = trek.args
self.trek = trek
self.find_unused_parameters = trek.args.find_unused_parameters
self.amp = QuikAmp(trek.args.mixed_precision)
def set_criterion(
self,
criterion_fcn: Callable[..., nn.Module],
kwargs: Optional[Dict[str, Any]] = {},
):
"""Add the criterion to the traveler"""
if callable(criterion_fcn):
self.criterion = criterion_fcn(**kwargs)
self.criterion.to(self.world.device)
def set_optimizer(
self,
optimizer_fcn: Callable[..., optim.Optimizer],
kwargs: Optional[Dict[str, Any]] = {},
):
"""Add the optimizer to the traveler"""
if hasattr(self.model, "module"):
self.optimizer = optimizer_fcn(
self.model.module.parameters(),
**asdict(self.trek.optkwargs),
**kwargs,
)
else:
self.optimizer = optimizer_fcn(
self.model.parameters(),
**asdict(self.trek.optkwargs),
**kwargs,
)
def set_scheduler(
self,
scheduler_fcn: Callable[..., optim.Optimizer],
kwargs: Optional[Dict[str, Any]] = {},
):
"""Add the scheduler to the traveler"""
self.scheduler = scheduler_fcn(
self.optimizer,
**kwargs,
)
def add_model(
self,
model: nn.Module,
state_dict: Optional[Dict[str, torch.Tensor]] = None,
):
"""Add the model to the traveler"""
self.model = model
if state_dict is not None:
self.model.load_state_dict(state_dict)
self.model.to(self.world.device)
if self.world.is_ddp:
self.model = DDP(
self.model,
device_ids=[self.world.device],
output_device=self.world.device,
find_unused_parameters=self.find_unused_parameters,
)
def add_data(self, tensorDataset: td.TensorDataset):
"""Add the dataloader (via QuikData) to the traveler"""
self.data = QuikData(
tensorDataset, self.world, self.trek.dlkwargs, self.trek.epochs
)
if self.type == "train":
self.metrics.steps = self.data.steps
def backward(self, loss: torch.Tensor, clip: Optional[bool] = True):
"""Run the model.backward (plus consider a scaler)."""
if hasattr(self.amp, "scaler"):
self.amp.backward(self, loss, clip)
else:
loss.backward()
if hasattr(self.amp, "optimizer"):
self.optimizer.step()
def add_loss(self, loss: torch.Tensor, pbar: tqdm, epoch: int):
"""Add the training loss to the model metrics."""
self.metrics.add_loss(loss)
if self.args.use_mlflow and pbar is not None:
loss = self.metrics.metric_dict["train_loss"]
step = pbar.n + (self.metrics.steps * epoch)
self.trek.mlflow.log_metric("train_loss", loss, step)
def add_vloss(self, vlosses, nums, epoch):
"""Add the valid loss to the model metrics."""
self.metrics.add_vloss(vlosses, nums)
if self.args.use_mlflow and self.world.is_logger:
vloss = self.metrics.metric_dict["valid_loss"]
step = self.metrics.steps * (epoch + 1)
self.trek.mlflow.log_metric("valid_loss", vloss, step)
def save_state_dict(self, epoch):
""" "Run save_state_dict within the traveler"""
sd_id = io.save_state_dict(self.model, self.args, epoch)
if self.args.use_mlflow and self.world.is_logger:
self.trek.mlflow.log_artifact(str(sd_id))
def record_results(
self,
label_names,
accuracy: Optional[bool] = True,
f1: Optional[bool] = True,
confusion: Optional[bool] = True,
):
"""Record the confusion matrix and classification report both in
MLFlow and in the traveler."""
cm_id = io.id_str("confusion", self.args)
self.cm = metrics.build_confusion_matrix(
self.data.predictions,
self.data.labels,
label_names,
cm_id,
)
self.cr = metrics.build_class_dict(
self.data.predictions,
self.data.labels,
label_names,
)
if self.args.use_mlflow and self.world.is_logger:
self.trek.mlflow.log_artifact(cm_id)
{
self.trek.mlflow.log_metric(metric, value, 0)
for metric, value in self.cr.items()
}
class QuikData:
"""A class for providing data to a traveler."""
def __init__(
self,
tensorDataset: td.TensorDataset,
world: World,
dlkwargs: Dict[str, Any],
epochs: int,
):
"""Constructor, primarily adding the dataset and dataloader."""
self.dataset = tensorDataset
self.labels = self.dataset.tensors[2].cpu().numpy()
self.dlkwargs = dlkwargs
self.add_data_loader(world)
self.steps = len(self.data_loader)
self.total_steps = self.steps * epochs
def add_sampler(self, world, sampler_fcn=None, kwargs={}):
"""Adds the data sampler to the data"""
if world.is_ddp:
self.sampler = ddist.DistributedSampler(
self.dataset,
num_replicas=world.world_size,
rank=world.rank_id,
)
elif callable(sampler_fcn):
self.sampler = sampler_fcn(**kwargs)
else:
self.sampler = sampler_fcn
def add_data_loader(self, world):
"""Adds the data loader to the data"""
if not hasattr(self, "sampler"):
self.add_sampler(world)
self.data_loader = td.DataLoader(
dataset=self.dataset,
sampler=self.sampler,
**asdict(self.dlkwargs),
)
def add_results(self, predictions: torch.Tensor, labels: torch.Tensor):
"""Adds the predictions df and labels df to the data"""
self.predictions = predictions
self.labels = labels
class QuikAmp:
"""A class to manage automatic mixed precision. Provides
a nullcontext to your forward function if it's not being
used.
"""
def __init__(self, mixed_precision: bool):
"""Constructor, add the automatic mixed precision values of scaler and
autocast. If there's no amp, it adds a nullcontext as a callable for
the with statement."""
if mixed_precision:
self.scaler = GradScaler()
self.caster = autocast()
else:
self.caster = nullcontext()
def backward(
self,
trvlr: QuikTraveler,
loss: torch.Tensor,
clip: Optional[bool] = True,
):
"""Backward propogation with automatic mixed precision."""
self.scaler.scale(loss).backward()
# https://pytorch.org/docs/stable/notes/amp_examples.html#working-with-unscaled-gradients
if clip:
self.scaler.unscale_(trvlr.optimizer)
# https://discuss.pytorch.org/t/about-torch-nn-utils-clip-grad-norm/13873
if hasattr(trvlr.model, "module"):
clip_grad_norm_(trvlr.model.module.parameters(), 1.0)
else:
clip_grad_norm_(trvlr.model.parameters(), 1.0)
self.scaler.step(trvlr.optimizer)
self.scaler.update()
|
py
|
1a5c5826af97c8f90e948c8e02370150550df129
|
# coding=utf-8
"""Author: Konrad Zemek
Copyright (C) 2015 ACK CYFRONET AGH
This software is released under the MIT license cited in 'LICENSE.txt'
Functions wrapping capabilities of docker binary.
"""
import json
import os
import subprocess
import sys
# noinspection PyDefaultArgument
def run(image, docker_host=None, detach=False, dns_list=[], add_host={},
envs={}, hostname=None, interactive=False, link={}, tty=False, rm=False,
reflect=[], volumes=[], name=None, workdir=None, user=None, group=None,
group_add=[], cpuset_cpus=None, privileged=False, run_params=[], command=None,
output=False, stdin=None, stdout=None, stderr=None):
cmd = ['docker']
if docker_host:
cmd.extend(['-H', docker_host])
cmd.append('run')
if detach:
cmd.append('-d')
for addr in dns_list:
cmd.extend(['--dns', addr])
for key, value in add_host.iteritems():
cmd.extend(['--add-host', '{0}:{1}'.format(key, value)])
for key in envs:
cmd.extend(['-e', '{0}={1}'.format(key, envs[key])])
if hostname:
cmd.extend(['-h', hostname])
if detach or sys.__stdin__.isatty():
if interactive:
cmd.append('-i')
if tty:
cmd.append('-t')
for container, alias in link.items():
cmd.extend(['--link', '{0}:{1}'.format(container, alias)])
if name:
cmd.extend(['--name', name])
if rm:
cmd.append('--rm')
for path, read in reflect:
vol = '{0}:{0}:{1}'.format(os.path.abspath(path), read)
cmd.extend(['-v', vol])
# Volume can be in one of three forms
# 1. 'path_on_docker'
# 2. ('path_on_host', 'path_on_docker', 'ro'/'rw')
# 3. {'volumes_from': 'volume name'}
for entry in volumes:
if isinstance(entry, tuple):
path, bind, readable = entry
vol = '{0}:{1}:{2}'.format(os.path.abspath(path), bind, readable)
cmd.extend(['-v', vol])
elif isinstance(entry, dict):
volume_name = entry['volumes_from']
cmd.extend(['--volumes-from', volume_name])
else:
cmd.extend(['-v', entry])
if workdir:
cmd.extend(['-w', os.path.abspath(workdir)])
if user:
user_group = '{0}:{1}'.format(user, group) if group else user
cmd.extend(['-u', user_group])
for g in group_add:
cmd.extend(['--group-add', g])
if privileged:
cmd.append('--privileged')
if cpuset_cpus:
cmd.extend(['--cpuset-cpus', cpuset_cpus])
cmd.extend(run_params)
cmd.append(image)
if isinstance(command, basestring):
cmd.extend(['sh', '-c', command])
elif isinstance(command, list):
cmd.extend(command)
elif command is not None:
raise ValueError('{0} is not a string nor list'.format(command))
if detach or output:
return subprocess.check_output(cmd, stdin=stdin, stderr=stderr).decode(
'utf-8').strip()
return subprocess.call(cmd, stdin=stdin, stderr=stderr, stdout=stdout)
def exec_(container, command, docker_host=None, user=None, group=None,
detach=False, interactive=False, tty=False, privileged=False,
output=False, stdin=None, stdout=None, stderr=None):
cmd = ['docker']
if docker_host:
cmd.extend(['-H', docker_host])
cmd.append('exec')
if user:
user_group = '{0}:{1}'.format(user, group) if group else user
cmd.extend(['-u', user_group])
if detach:
cmd.append('-d')
if detach or sys.__stdin__.isatty():
if interactive:
cmd.append('-i')
if tty:
cmd.append('-t')
if privileged:
cmd.append('--privileged')
cmd.append(container)
if isinstance(command, basestring):
cmd.extend(['sh', '-c', command])
elif isinstance(command, list):
cmd.extend(command)
else:
raise ValueError('{0} is not a string nor list'.format(command))
if detach or output:
return subprocess.check_output(cmd, stdin=stdin, stderr=stderr).decode(
'utf-8').strip()
return subprocess.call(cmd, stdin=stdin, stderr=stderr, stdout=stdout)
def inspect(container, docker_host=None):
cmd = ['docker']
if docker_host:
cmd.extend(['-H', docker_host])
cmd.extend(['inspect', container])
out = subprocess.check_output(cmd, universal_newlines=True)
return json.loads(out)[0]
def logs(container, docker_host=None):
cmd = ['docker']
if docker_host:
cmd.extend(['-H', docker_host])
cmd.extend(['logs', container])
return subprocess.check_output(cmd, universal_newlines=True,
stderr=subprocess.STDOUT)
def remove(containers, docker_host=None, force=False,
link=False, volumes=False):
cmd = ['docker']
if docker_host:
cmd.extend(['-H', docker_host])
cmd.append('rm')
if force:
cmd.append('-f')
if link:
cmd.append('-l')
if volumes:
cmd.append('-v')
cmd.extend(containers)
subprocess.check_call(cmd)
def cp(container, src_path, dest_path, to_container=False):
"""Copying file between docker container and host
:param container: str, docker id or name
:param src_path: str
:param dest_path: str
:param to_container: bool, if True file will be copied from host to
container, otherwise from docker container to host
"""
cmd = ["docker", "cp"]
if to_container:
cmd.extend([src_path, "{0}:{1}".format(container, dest_path)])
else:
cmd.extend(["{0}:{1}".format(container, src_path), dest_path])
subprocess.check_call(cmd)
def login(user, password, repository='hub.docker.com'):
"""Logs into docker repository."""
subprocess.check_call(['docker', 'login', '-u', user, '-p', password,
repository])
def build_image(image, build_args):
"""Builds and tags docker image."""
subprocess.check_call(['docker', 'build', '--no-cache', '--force-rm', '-t',
image] + build_args)
def tag_image(image, tag):
"""Tags docker image."""
subprocess.check_call(['docker', 'tag', image, tag])
def push_image(image):
"""Pushes docker image to the repository."""
subprocess.check_call(['docker', 'push', image])
def pull_image(image):
"""Pulls docker image from the repository."""
subprocess.check_call(['docker', 'pull', image])
def remove_image(image):
"""Removes docker image."""
subprocess.check_call(['docker', 'rmi', '-f', image])
def create_volume(path, name, image, command):
cmd = ['docker']
cmd.append('create')
cmd.append('-v')
cmd.append(path)
cmd.append('--name')
cmd.append(name)
cmd.append(image)
cmd.append(command)
return subprocess.check_output(cmd, universal_newlines=True,
stderr=subprocess.STDOUT)
|
py
|
1a5c5847baf36a1c8c9ade9e59a92f3690c6ebce
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tables
from openstack_dashboard.api import cinder
from openstack_dashboard.api import keystone
from openstack_dashboard.dashboards.project.volumes.snapshots \
import tables as snapshots_tables
from openstack_dashboard.dashboards.project.volumes.volumes \
import tables as volumes_tables
class UpdateVolumeSnapshotStatus(tables.LinkAction):
name = "update_status"
verbose_name = _("Update Status")
url = "horizon:admin:volumes:snapshots:update_status"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (("volume",
"snapshot_extension:snapshot_actions:"
"update_snapshot_status"),)
class UpdateRow(tables.Row):
ajax = True
def get_data(self, request, snapshot_id):
snapshot = cinder.volume_snapshot_get(request, snapshot_id)
snapshot._volume = cinder.volume_get(request, snapshot.volume_id)
snapshot.host_name = getattr(snapshot._volume,
'os-vol-host-attr:host')
tenant_id = getattr(snapshot._volume,
'os-vol-tenant-attr:tenant_id')
try:
tenant = keystone.tenant_get(request, tenant_id)
snapshot.tenant_name = getattr(tenant, "name")
except Exception:
msg = _('Unable to retrieve volume project information.')
exceptions.handle(request, msg)
return snapshot
class VolumeSnapshotsTable(volumes_tables.VolumesTableBase):
name = tables.WrappingColumn("name", verbose_name=_("Name"),
link="horizon:admin:volumes:snapshots:detail")
volume_name = snapshots_tables.SnapshotVolumeNameColumn(
"name", verbose_name=_("Volume Name"),
link="horizon:admin:volumes:volumes:detail")
host = tables.Column("host_name", verbose_name=_("Host"))
tenant = tables.Column("tenant_name", verbose_name=_("Project"))
class Meta(object):
name = "volume_snapshots"
verbose_name = _("Volume Snapshots")
pagination_param = 'snapshot_marker'
prev_pagination_param = 'prev_snapshot_marker'
table_actions = (snapshots_tables.VolumeSnapshotsFilterAction,
snapshots_tables.DeleteVolumeSnapshot,)
row_actions = (snapshots_tables.DeleteVolumeSnapshot,
UpdateVolumeSnapshotStatus,
snapshots_tables.UpdateMetadata)
row_class = UpdateRow
status_columns = ("status",)
columns = ('tenant', 'host', 'name', 'description', 'size', 'status',
'volume_name',)
|
py
|
1a5c58a66b109a11bb3fd405adc10373976343f9
|
from pathlib import Path
class Asset:
"""Git リポジトリへの配置対象リソースを管理する。
Args:
src (Path): 元となるテンプレートを格納するディレクトリ
destination (Path): Git のルートディレクトリから見たリソースの配置先
"""
def __init__(self,
src: Path,
destination: str):
self._src = src
self._destination = destination
@property
def source(self) -> Path:
"""Path: テンプレート格納先"""
return self._src
@property
def destination(self) -> str:
"""str: Git リポジトリへのリソース格納先"""
return self._destination
|
py
|
1a5c58ac4507dd86b04fe0e34cbb80062c0ef2b1
|
import sys
import vlc
import json
from PyQt5.QtWidgets import QMainWindow
from PyQt5.QtGui import QPixmap
from PyQt5 import uic
from internationalization import LANGUAGE
class Menu(QMainWindow):
def __init__(self, lang, username):
QMainWindow.__init__(self)
uic.loadUi("windows/Menu.ui", self)
self.lang = lang
self.reload_text()
self.username = username
death_star_image = QPixmap("resources/death_star.png")
self.image.setPixmap(death_star_image)
self.cantina_song = vlc.MediaPlayer("resources/cantina.mp3")
self.cantina_song.play()
self.play_button.clicked.connect(self.go_to_play)
self.leaderboards_button.clicked.connect(self.open_leaderboards)
self.exit_button.clicked.connect(self.close_game)
def showEvent(self, event):
"""Play the game song when the window appears
This is an override method"""
self.cantina_song.play()
def closeEvent(self, event):
"""Stop the game song when the window close
This is an override method"""
self.cantina_song.stop()
def go_to_play(self):
"""Go to create lobby window"""
from chooseSideWindow import ChooseSide
self.choose = ChooseSide(self.lang, self.username)
self.choose.show()
self.close()
def open_leaderboards(self):
"""Show the leaderboards window"""
from leaderboardsWindow import Leaderboards
self.leader = Leaderboards(self.lang)
self.leader.show()
def reload_text(self):
"""Change the language of the window according to the chosen previously"""
self.language = LANGUAGE.get(self.lang)
self.leaderboards_button.setText(self.language["leaderboards"])
self.exit_button.setText(self.language["exit"])
self.setWindowTitle(self.language["menu"])
self.play_button.setText(self.language["play"])
def close_game(self):
"""Close the game window"""
self.close()
|
py
|
1a5c59ded7184e2c5b016b23d356a5318f1cdf17
|
# Generated by Django 2.1.5 on 2019-02-23 03:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lemlit', '0022_auto_20190123_1052'),
]
operations = [
migrations.AlterField(
model_name='suratizinpenelitianmahasiswa',
name='tujuan_surat',
field=models.CharField(max_length=150),
),
]
|
py
|
1a5c5ab8988f0e4dd2a27433c2733727d184764d
|
#!/usr/bin/env python
from typing import Tuple
from redbot.message import headers
from redbot.speak import Note, categories, levels
from redbot.syntax import rfc7231
from redbot.type import AddNoteMethodType, ParamDictType
class x_xss_protection(headers.HttpHeader):
canonical_name = "X-XSS-Protection"
description = """\
The `X-XSS-Protection` response header field can be sent by servers to control how
older versions of Internet Explorer configure their Cross Site Scripting protection."""
reference = \
"https://blogs.msdn.microsoft.com/ieinternals/2011/01/31/controlling-the-xss-filter/"
syntax = r'(?:[10](?:\s*;\s*%s)*)' % rfc7231.parameter
list_header = False
deprecated = False
valid_in_requests = False
valid_in_responses = True
def parse(self, field_value: str, add_note: AddNoteMethodType) -> Tuple[int, ParamDictType]:
try:
protect, param_str = field_value.split(';', 1)
except ValueError:
protect, param_str = field_value, ""
try:
protect_int = int(protect)
except ValueError:
raise
params = headers.parse_params(param_str, add_note, True)
if protect_int == 0:
add_note(XSS_PROTECTION_OFF)
else: # 1
if params.get('mode', None) == "block":
add_note(XSS_PROTECTION_BLOCK)
else:
add_note(XSS_PROTECTION_ON)
return protect_int, params
class XSS_PROTECTION_ON(Note):
category = categories.SECURITY
level = levels.INFO
summary = "%(response)s enables XSS filtering in IE8+."
text = """\
Recent versions of Internet Explorer have built-in Cross-Site Scripting (XSS) attack protection;
they try to automatically filter requests that fit a particular profile.
%(response)s has explicitly enabled this protection. If IE detects a Cross-site scripting attack,
it will "sanitise" the page to prevent the attack. In other words, the page will still render.
This header probably won't have any effect on other clients.
See [this blog entry](http://bit.ly/tJbICH) for more information."""
class XSS_PROTECTION_OFF(Note):
category = categories.SECURITY
level = levels.INFO
summary = "%(response)s disables XSS filtering in IE8+."
text = """\
Recent versions of Internet Explorer have built-in Cross-Site Scripting (XSS) attack protection;
they try to automatically filter requests that fit a particular profile.
%(response)s has explicitly disabled this protection. In some scenarios, this is useful to do, if
the protection interferes with the application.
This header probably won't have any effect on other clients.
See [this blog entry](http://bit.ly/tJbICH) for more information."""
class XSS_PROTECTION_BLOCK(Note):
category = categories.SECURITY
level = levels.INFO
summary = "%(response)s blocks XSS attacks in IE8+."
text = """\
Recent versions of Internet Explorer have built-in Cross-Site Scripting (XSS) attack protection;
they try to automatically filter requests that fit a particular profile.
Usually, IE will rewrite the attacking HTML, so that the attack is neutralised, but the content can
still be seen. %(response)s instructs IE to not show such pages at all, but rather to display an
error.
This header probably won't have any effect on other clients.
See [this blog entry](http://bit.ly/tJbICH) for more information."""
class OneXXSSTest(headers.HeaderTest):
name = 'X-XSS-Protection'
inputs = [b'1']
expected_out = (1, {}) # type: ignore
expected_err = [XSS_PROTECTION_ON]
class ZeroXXSSTest(headers.HeaderTest):
name = 'X-XSS-Protection'
inputs = [b'0']
expected_out = (0, {}) # type: ignore
expected_err = [XSS_PROTECTION_OFF]
class OneBlockXXSSTest(headers.HeaderTest):
name = 'X-XSS-Protection'
inputs = [b'1; mode=block']
expected_out = (1, {'mode': 'block'})
expected_err = [XSS_PROTECTION_BLOCK]
class BadXXSSTest(headers.HeaderTest):
name = 'X-XSS-Protection'
inputs = [b'foo']
expected_out = None # type: ignore
expected_err = [headers.BAD_SYNTAX]
|
py
|
1a5c5ba3abebddf92fc01226be5bfd590474cf6b
|
#!/usr/bin/env python3
import numpy as np
import matplotlib.pyplot as plt
for k in range(5):
dataFile = "../setup/data/data_set_" + str(k).zfill(3) + ".dat"
x = np.loadtxt(dataFile)
plt.plot(x[:, 0], x[:, 1], '-o')
plt.autoscale(enable=True, axis='x', tight=True)
plt.xlabel('time')
plt.ylabel('noisy logistic data')
plt.title('All data sets')
plt.grid(True)
plt.show()
|
py
|
1a5c5bdb317595a6b963e9a340939757b26a457b
|
from functools import partial
from inspect import isclass
from graphql_relay import from_global_id, to_global_id
from ..types import ID, Field, Interface, ObjectType
from ..types.interface import InterfaceOptions
from ..types.utils import get_type
def is_node(objecttype):
"""
Check if the given objecttype has Node as an interface
"""
if not isclass(objecttype):
return False
if not issubclass(objecttype, ObjectType):
return False
for i in objecttype._meta.interfaces:
if issubclass(i, Node):
return True
return False
class GlobalID(Field):
def __init__(self, node=None, parent_type=None, required=True, *args, **kwargs):
super(GlobalID, self).__init__(ID, required=required, *args, **kwargs)
self.node = node or Node
self.parent_type_name = parent_type._meta.name if parent_type else None
@staticmethod
def id_resolver(parent_resolver, node, root, info, parent_type_name=None, **args):
type_id = parent_resolver(root, info, **args)
parent_type_name = parent_type_name or info.parent_type.name
return node.to_global_id(parent_type_name, type_id) # root._meta.name
def get_resolver(self, parent_resolver):
return partial(
self.id_resolver,
parent_resolver,
self.node,
parent_type_name=self.parent_type_name,
)
class NodeField(Field):
def __init__(self, node, type=False, **kwargs):
assert issubclass(node, Node), "NodeField can only operate in Nodes"
self.node_type = node
self.field_type = type
super(NodeField, self).__init__(
# If we don's specify a type, the field type will be the node
# interface
type or node,
id=ID(required=True, description="The ID of the object"),
**kwargs
)
def get_resolver(self, parent_resolver):
return partial(self.node_type.node_resolver, get_type(self.field_type))
class AbstractNode(Interface):
class Meta:
abstract = True
@classmethod
def __init_subclass_with_meta__(cls, **options):
_meta = InterfaceOptions(cls)
_meta.fields = {"id": GlobalID(cls, description="The ID of the object")}
super(AbstractNode, cls).__init_subclass_with_meta__(_meta=_meta, **options)
class Node(AbstractNode):
"""An object with an ID"""
@classmethod
def Field(cls, *args, **kwargs): # noqa: N802
return NodeField(cls, *args, **kwargs)
@classmethod
def node_resolver(cls, only_type, root, info, id):
return cls.get_node_from_global_id(info, id, only_type=only_type)
@classmethod
def get_node_from_global_id(cls, info, global_id, only_type=None):
try:
_type, _id = cls.from_global_id(global_id)
graphene_type = info.schema.get_type(_type).graphene_type
except Exception:
return None
if only_type:
assert graphene_type == only_type, ("Must receive a {} id.").format(
only_type._meta.name
)
# We make sure the ObjectType implements the "Node" interface
if cls not in graphene_type._meta.interfaces:
return None
get_node = getattr(graphene_type, "get_node", None)
if get_node:
return get_node(info, _id)
@classmethod
def from_global_id(cls, global_id):
return from_global_id(global_id)
@classmethod
def to_global_id(cls, type, id):
return to_global_id(type, id)
|
py
|
1a5c5c851c731b44ffee620a29b3e3bdfcd0e096
|
# Copyright 2020 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A collection of dictionary-based wrappers around the "vanilla" transforms for model output tensors
defined in :py:class:`monai.transforms.utility.array`.
Class names are ended with 'd' to denote dictionary-based transforms.
"""
from typing import Optional
from monai.config.type_definitions import KeysCollection
from monai.utils.misc import ensure_tuple_rep
from monai.transforms.compose import MapTransform
from monai.transforms.post.array import SplitChannel, Activations, AsDiscrete, KeepLargestConnectedComponent
class SplitChanneld(MapTransform):
"""
Dictionary-based wrapper of :py:class:`monai.transforms.SplitChannel`.
All the input specified by `keys` should be splitted into same count of data.
"""
def __init__(self, keys: KeysCollection, output_postfixes, to_onehot=False, num_classes=None):
"""
Args:
keys: keys of the corresponding items to be transformed.
See also: :py:class:`monai.transforms.compose.MapTransform`
output_postfixes (list, tuple): the postfixes to construct keys to store splitted data.
for example: if the key of input data is `pred` and split 2 classes, the output
data keys will be: pred_(output_postfixes[0]), pred_(output_postfixes[1])
to_onehot (bool or list of bool): whether to convert the data to One-Hot format, default is False.
num_classes (int or list of int): the class number used to convert to One-Hot format
if `to_onehot` is True.
"""
super().__init__(keys)
if not isinstance(output_postfixes, (list, tuple)):
raise ValueError("must specify key postfixes to store splitted data.")
self.output_postfixes = output_postfixes
self.to_onehot = ensure_tuple_rep(to_onehot, len(self.keys))
self.num_classes = ensure_tuple_rep(num_classes, len(self.keys))
self.splitter = SplitChannel()
def __call__(self, data):
d = dict(data)
for idx, key in enumerate(self.keys):
rets = self.splitter(d[key], self.to_onehot[idx], self.num_classes[idx])
assert len(self.output_postfixes) == len(rets), "count of splitted results must match output_postfixes."
for i, r in enumerate(rets):
d[f"{key}_{self.output_postfixes[i]}"] = r
return d
class Activationsd(MapTransform):
"""
Dictionary-based wrapper of :py:class:`monai.transforms.AddActivations`.
Add activation layers to the input data specified by `keys`.
"""
def __init__(self, keys: KeysCollection, output_postfix: str = "act", sigmoid=False, softmax=False, other=None):
"""
Args:
keys: keys of the corresponding items to model output and label.
See also: :py:class:`monai.transforms.compose.MapTransform`
output_postfix: the postfix string to construct keys to store converted data.
for example: if the keys of input data is `pred` and `label`, output_postfix is `act`,
the output data keys will be: `pred_act`, `label_act`.
if set to None, will replace the original data with the same key.
sigmoid (bool, tuple or list of bool): whether to execute sigmoid function on model
output before transform.
softmax (bool, tuple or list of bool): whether to execute softmax function on model
output before transform.
other (Callable, tuple or list of Callables): callable function to execute other activation layers,
for example: `other = lambda x: torch.tanh(x)`
"""
super().__init__(keys)
if output_postfix is not None and not isinstance(output_postfix, str):
raise ValueError("output_postfix must be a string.")
self.output_postfix = output_postfix
self.sigmoid = ensure_tuple_rep(sigmoid, len(self.keys))
self.softmax = ensure_tuple_rep(softmax, len(self.keys))
self.other = ensure_tuple_rep(other, len(self.keys))
self.converter = Activations()
def __call__(self, data):
d = dict(data)
for idx, key in enumerate(self.keys):
ret = self.converter(d[key], self.sigmoid[idx], self.softmax[idx], self.other[idx])
output_key = key if self.output_postfix is None else f"{key}_{self.output_postfix}"
d[output_key] = ret
return d
class AsDiscreted(MapTransform):
"""
Dictionary-based wrapper of :py:class:`monai.transforms.AsDiscrete`.
"""
def __init__(
self,
keys: KeysCollection,
output_postfix: str = "discreted",
argmax: bool = False,
to_onehot: bool = False,
n_classes: Optional[int] = None,
threshold_values: bool = False,
logit_thresh: float = 0.5,
):
"""
Args:
keys: keys of the corresponding items to model output and label.
See also: :py:class:`monai.transforms.compose.MapTransform`
output_postfix: the postfix string to construct keys to store converted data.
for example: if the keys of input data is `pred` and `label`, output_postfix is `discreted`,
the output data keys will be: `pred_discreted`, `label_discreted`.
if set to None, will replace the original data with the same key.
argmax: whether to execute argmax function on input data before transform.
to_onehot: whether to convert input data into the one-hot format. Defaults to False.
n_classes: the number of classes to convert to One-Hot format.
threshold_values: whether threshold the float value to int number 0 or 1, default is False.
logit_thresh: the threshold value for thresholding operation, default is 0.5.
"""
super().__init__(keys)
if output_postfix is not None and not isinstance(output_postfix, str):
raise ValueError("output_postfix must be a string.")
self.output_postfix = output_postfix
self.argmax = ensure_tuple_rep(argmax, len(self.keys))
self.to_onehot = ensure_tuple_rep(to_onehot, len(self.keys))
self.n_classes = ensure_tuple_rep(n_classes, len(self.keys))
self.threshold_values = ensure_tuple_rep(threshold_values, len(self.keys))
self.logit_thresh = ensure_tuple_rep(logit_thresh, len(self.keys))
self.converter = AsDiscrete()
def __call__(self, data):
d = dict(data)
for idx, key in enumerate(self.keys):
output_key = key if self.output_postfix is None else f"{key}_{self.output_postfix}"
d[output_key] = self.converter(
d[key],
self.argmax[idx],
self.to_onehot[idx],
self.n_classes[idx],
self.threshold_values[idx],
self.logit_thresh[idx],
)
return d
class KeepLargestConnectedComponentd(MapTransform):
"""
dictionary-based wrapper of :py:class:monai.transforms.utility.array.KeepLargestConnectedComponent.
"""
def __init__(
self,
keys: KeysCollection,
applied_labels,
independent: bool = True,
connectivity: Optional[int] = None,
output_postfix: str = "largestcc",
):
"""
Args:
keys: keys of the corresponding items to be transformed.
See also: :py:class:`monai.transforms.compose.MapTransform`
applied_labels (int, list or tuple of int): Labels for applying the connected component on.
If only one channel. The pixel whose value is not in this list will remain unchanged.
If the data is in one-hot format, this is used to determine what channels to apply.
independent (bool): consider several labels as a whole or independent, default is `True`.
Example use case would be segment label 1 is liver and label 2 is liver tumor, in that case
you want this "independent" to be specified as False.
connectivity: Maximum number of orthogonal hops to consider a pixel/voxel as a neighbor.
Accepted values are ranging from 1 to input.ndim. If ``None``, a full
connectivity of ``input.ndim`` is used.
output_postfix: the postfix string to construct keys to store converted data.
for example: if the keys of input data is `label`, output_postfix is `largestcc`,
the output data keys will be: `label_largestcc`.
if set to None, will replace the original data with the same key.
"""
super().__init__(keys)
if output_postfix is not None and not isinstance(output_postfix, str):
raise ValueError("output_postfix must be a string.")
self.output_postfix = output_postfix
self.converter = KeepLargestConnectedComponent(applied_labels, independent, connectivity)
def __call__(self, data):
d = dict(data)
for idx, key in enumerate(self.keys):
output_key = key if self.output_postfix is None else f"{key}_{self.output_postfix}"
d[output_key] = self.converter(d[key])
return d
SplitChannelD = SplitChannelDict = SplitChanneld
ActivationsD = ActivationsDict = Activationsd
AsDiscreteD = AsDiscreteDict = AsDiscreted
KeepLargestConnectedComponentD = KeepLargestConnectedComponentDict = KeepLargestConnectedComponentd
|
py
|
1a5c5c98057c01b03a18d8c12f19886cb7e84e3d
|
'''
Web browser GUI-launched tasks run in a separate process. This module provides
mechanisms for interacting withe the task running in another process, e.g.,
calling functions to start/stop the task, enabling/disabling decoder adaptation, etc.
'''
import os
import sys
import time
import xmlrpc.client
import multiprocessing as mp
import collections
from riglib import experiment
from riglib.mp_proxy import FuncProxy
from . import websocket
from config import config
from .json_param import Parameters
import io
import traceback
log_filename = os.path.join(config.log_path, "tasktrack_log")
def log_error(err, mode='a'):
traceback.print_exc(None, err)
with open(log_filename, mode) as fp:
err.seek(0)
fp.write(err.read())
def log_str(s, mode="a", newline=True):
if newline and not s.endswith("\n"):
s += "\n"
with open(log_filename, mode) as fp:
fp.write(s)
class Track(object):
'''
Tracker for task instantiation running in a separate process. This is a singleton.
'''
def __init__(self, use_websock=True):
# shared memory to store the status of the task in a char array
self.status = mp.Array('c', 256)
self.reset()
self.proc = None
self.init_pipe()
if use_websock:
self.websock = websocket.Server(self.notify)
else:
self.websock = None
def init_pipe(self):
self.tracker_end_of_pipe, self.task_end_of_pipe = mp.Pipe()
def notify(self, msg):
if msg['status'] == "error" or msg['State'] == "stopped":
self.status.value = b""
def runtask(self, **kwargs):
'''
Begin running of task
'''
log_str("Running new task: \n", mode="w")
self.init_pipe()
# initialize task status
# self.status.value = b"testing" if 'saveid' in kwargs else b"running"
self.status.value = b"running" if 'saveid' in kwargs else b"testing"
# create a proxy for interacting with attributes/functions of the task.
# The task runs in a separate process and we cannot directly access python
# attributes of objects in other processes
self.task_proxy = TaskObjProxy(self.tracker_end_of_pipe)
# Spawn the process
args = (self.tracker_end_of_pipe, self.task_end_of_pipe, self.websock)
print("Track.runtask")
print(kwargs)
if 'seq' in kwargs:
kwargs['seq_params'] = kwargs['seq'].params
kwargs['seq'] = kwargs['seq'].get() ## retreive the database data on this end of the pipe
print(kwargs['seq'])
self.task_args = args
self.task_kwargs = kwargs
self.proc = mp.Process(target=remote_runtask, args=args, kwargs=kwargs)
log_str("Spawning process...")
log_str(str(kwargs))
self.proc.start()
def __del__(self):
'''
Destructor for Track object. Not sure if this function ever gets called
since Track is a singleton created upon import of the db.tracker.ajax module...
'''
if not self.websock is None:
self.websock.stop()
# def pausetask(self):
# self.status.value = bytes(self.task_proxy.pause())
def stoptask(self):
'''
Terminate the task gracefully by running riglib.experiment.Experiment.end_task
'''
assert self.status.value in [b"testing", b"running"]
try:
self.task_proxy.end_task()
except Exception as e:
traceback.print_exc()
err = io.StringIO()
traceback.print_exc(None, err)
err.seek(0)
return dict(status="error", msg=err.read())
status = self.status.value.decode("utf-8")
self.status.value = b""
self.reset()
return status
def reset(self):
self.task_proxy = None
self.task_kwargs = {}
self.task_args = ()
def get_status(self):
return self.status.value.decode("utf-8")
def update_alive(self):
""" Check if the remote process is still alive, and if dead, reset the task_proxy object """
if (not self.proc is None) and (not self.proc.is_alive()):
print("process died in error, destroying proxy object")
self.reset()
def task_running(self):
print(self.get_status())
return self.get_status() in ["running", "testing"]
def remote_runtask(tracker_end_of_pipe, task_end_of_pipe, websock, **kwargs):
'''
Target function to execute in the spawned process to start the task
'''
log_str("remote_runtask")
print("*************************** STARTING TASK *****************************")
use_websock = not (websock is None)
# Rerout prints to stdout to the websocket
if use_websock:
sys.stdout = websock
# os.nice sets the 'niceness' of the task, i.e. how willing the process is
# to share resources with other OS processes. Zero is neutral
if not sys.platform == "win32":
os.nice(0)
status = "running" if 'saveid' in kwargs else "testing"
# Force all tasks to use the Notify feature defined above.
if use_websock:
kwargs['params']['websock'] = websock
kwargs['feats'].insert(0, websocket.NotifyFeat)
kwargs['params']['tracker_status'] = status
kwargs['params']['tracker_end_of_pipe'] = tracker_end_of_pipe
try:
# Instantiate the task
task_wrapper = TaskWrapper(**kwargs)
print("Created task wrapper..")
cmd = task_end_of_pipe.recv()
log_str("Initial command: " + str(cmd))
# Rerout prints to stdout to the websocket
if use_websock: sys.stdout = websock
while (cmd is not None) and (task_wrapper.task.state is not None):
log_str('remote command received: %s, %s, %s\n' % cmd)
try:
fn_name = cmd[0]
cmd_args = cmd[1]
cmd_kwargs = cmd[2]
# look up the function by name
fn = getattr(task_wrapper, fn_name)
# run the function and save the return value as a single object
# if an exception is thrown, the code will jump to the last 'except' case
ret = fn(*cmd_args, **cmd_kwargs)
log_str("return value: %s\n" % str(ret))
# send the return value back to the remote process
task_end_of_pipe.send(ret)
# hang and wait for the next command to come in
log_str("task state = %s, stop status=%s, waiting for next command...\n" % (task_wrapper.task.state, str(task_wrapper.task.stop)))
cmd = task_end_of_pipe.recv()
except KeyboardInterrupt:
# Handle the KeyboardInterrupt separately. How the hell would
# a keyboard interrupt even get here?
cmd = None
except Exception as e:
err = io.StringIO()
log_error(err, mode='a')
task_end_of_pipe.send(e)
if task_end_of_pipe.poll(60.):
cmd = task_end_of_pipe.recv()
else:
cmd = None
log_str('Done with command: %s\n\n' % fn_name)
except:
task_wrapper = None
err = io.StringIO()
log_error(err, mode='a')
err.seek(0)
if use_websock:
websock.send(dict(status="error", msg=err.read()))
err.seek(0)
print(err.read())
log_str('End of task while loop\n')
# Redirect printing from the websocket back to the shell
if use_websock:
websock.write("Running task cleanup functions....\n")
sys.stdout = sys.__stdout__
print("Running task cleanup functions....\n")
# Initiate task cleanup
if task_wrapper is None:
print("\nERROR: Task was never initialized, cannot run cleanup function!")
print("see %s for error messages" % log_filename)
if 'saveid' in kwargs:
from . import dbq
dbq.hide_task_entry(kwargs['saveid'])
print('hiding task entry!')
cleanup_successful = False
else:
log_str("Starting cleanup...")
cleanup_successful = task_wrapper.cleanup()
# inform the user in the browser that the task is done!
if cleanup_successful == True or cleanup_successful is None:
if use_websock: websock.write("\n\n...done!\n")
else:
if use_websock: websock.write("\n\nError! Check for errors in the terminal!\n")
print("*************************** EXITING TASK *****************************")
class TaskWrapper(object):
'''
Wrapper for Experiment classes launched from the web interface
'''
def __init__(self, subj, base_class, feats, params, seq=None, seq_params=None, saveid=None):
'''
Parameters
----------
subj : tracker.models.Subject instance
Database record for subject performing the task
base_class : a child class of riglib.experiment.Experiment
The base class for the task, without the feature mixins
feats : list
List of features to enable for the task
params : json_param.Parameters, or string representation of JSON object
user input on configurable task parameters
seq : models.Sequence instance, or tuple
Database record of Sequence parameters/static target sequence
If passed in as a tuple, then it's the result of calling 'seq.get' on the models.Sequence instance
seq_params: params from seq (see above)
saveid : int, optional
ID number of db.tracker.models.TaskEntry associated with this task
if None specified, then the data saved will not be linked to the
database entry and will be lost after the program exits
'''
log_str("TaskWrapper constructor")
self.saveid = saveid
self.subj = subj
if isinstance(params, Parameters):
self.params = params
elif isinstance(params, str):
self.params = Parameters(params)
elif isinstance(params, dict):
self.params = Parameters.from_dict(params)
if None in feats:
raise Exception("Features not found properly in database!")
else:
Task = experiment.make(base_class, feats=feats)
# Run commands which must be executed before the experiment class can be instantiated (e.g., starting neural recording)
Task.pre_init(saveid=saveid)
self.params.trait_norm(Task.class_traits())
if issubclass(Task, experiment.Sequence):
# from . import models
# retreive the sequence data from the db, or from the input argument if the input arg was a tuple
if isinstance(seq, tuple):
gen_constructor, gen_params = seq
elif hasattr(seq, 'get'): #isinstance(seq, models.Sequence):
gen_constructor, gen_params = seq.get()
# Typically, 'gen_constructor' is the experiment.generate.runseq function (not an element of namelist.generators)
else:
raise ValueError("Unrecognized type for seq")
gen = gen_constructor(Task, **gen_params)
self.params.params['seq_params'] = seq_params
# 'gen' is now a true python generator usable by experiment.Sequence
self.task = Task(gen, **self.params.params)
log_str("instantiating task with a generator\n")
else:
self.task = Task(**self.params.params)
self.task.start()
def report(self):
return experiment.report(self.task)
def pause(self):
self.task.pause = not self.task.pause
return "pause" if self.task.pause else "running"
def end_task(self):
return self.task.end_task()
def enable_clda(self):
self.task.enable_clda()
def disable_clda(self):
self.task.disable_clda()
def get_state(self):
return self.task.state
def __getattr__(self, attr):
""" Redirect attribute access to the task object if the attribute can't be found in the wrapper """
try:
return self.task.__getattribute__(attr)
except:
raise AttributeError("Could not get task attribute: %s" % attr)
def set_task_attr(self, attr, value):
setattr(self.task, attr, value)
def cleanup(self):
self.task.join()
print("Calling saveout/task cleanup code")
if self.saveid is not None:
# get object representing function calls to the remote database
# returns the result of tracker.dbq.rpc_handler
database = xmlrpc.client.ServerProxy("http://localhost:8000/RPC2/", allow_none=True)
# from tracker import dbq as database
cleanup_successful = self.task.cleanup(database, self.saveid, subject=self.subj)
# if not self.task._task_init_complete:
# from tracker import dbq
# dbq.hide_task_entry(self.saveid)
# print 'hiding task entry!'
# else:
# print 'not hiding task entry!'
else:
cleanup_successful = True
self.task.terminate()
return cleanup_successful
class TaskObjProxy(object):
def __init__(self, tracker_end_of_pipe):
self.tracker_end_of_pipe = tracker_end_of_pipe
def __getattr__(self, attr):
log_str("remotely getting attribute: %s\n" % attr)
self.tracker_end_of_pipe.send(("__getattr__", [attr], {}))
ret = self.tracker_end_of_pipe.recv()
if isinstance(ret, Exception):
# Assume that the attribute can't be retreived b/c the name refers
# to a function
ret = FuncProxy(attr, self.tracker_end_of_pipe)
return ret
def end_task(self):
end_task_fn = FuncProxy("end_task", self.tracker_end_of_pipe)
end_task_fn()
self.tracker_end_of_pipe.send(None)
def remote_set_attr(self, attr, value):
log_str('trying to remotely set attribute %s to %s\n' % (attr, value))
ret = FuncProxy('set_task_attr', self.tracker_end_of_pipe)
ret(attr, value)
|
py
|
1a5c5ca55574272903edf274dbfea30dba0e2c05
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
python lr_generator.py
"""
import numpy as np
from mindspore import Tensor
from src.config import cfg
def lr_generator(lr_init, total_epochs, steps_per_epoch):
lr_each_step = []
for i in range(total_epochs):
if i in cfg.schedule:
lr_init *= cfg.gamma
for _ in range(steps_per_epoch):
lr_each_step.append(lr_init)
lr_each_step = np.array(lr_each_step).astype(np.float32)
return Tensor(lr_each_step)
|
py
|
1a5c5cb5a175e53d3f0a4d095e76ab2d8408cdec
|
import json
from query.selection import AndClause, Compound, OrClause, ScalarSelector
class TempoIQEncoder(json.JSONEncoder):
def encode_point(self, point):
return {
't': self.encode_datetime(point.timestamp),
'v': point.value}
def encode_datetime(self, dt):
return dt.isoformat()
class WriteEncoder(TempoIQEncoder):
encoders = {
'Device': 'encode_device',
'Sensor': 'encode_sensor',
'Point': 'encode_point',
'datetime': 'encode_datetime',
'Rule': 'encode_rule',
'Trigger': 'encode_trigger',
'Webhook': 'encode_webhook'
}
def default(self, o):
encoder_name = self.encoders.get(o.__class__.__name__)
if encoder_name is None:
super(TempoIQEncoder, self).default(o)
encoder = getattr(self, encoder_name)
return encoder(o)
def encode_condition(self, condition):
return {
'trigger': self.encode_trigger(condition.trigger),
'filter': {
'and': map(self.encode_filter, condition.filters)
}
}
def encode_device(self, device):
return device.key
def encode_filter(self, _filter):
return {
'operation': _filter.inclusion,
'type': _filter.filter_type,
'arguments': _filter.args
}
def encode_rule(self, rule):
read_encoder = ReadEncoder()
j = {
'conditions': map(self.encode_condition, rule.conditions),
'name': rule.name,
'alerts': rule.alert_by,
'actions': [self.default(rule.action)],
'selection': {
'search': {
'filters': {
'devices': read_encoder.default(
rule.selection['devices']),
'sensors': read_encoder.default(
rule.selection['sensors'])
}
}
}
}
if rule.key is not None:
j['key'] = rule.key
return j
def encode_sensor(self, sensor):
return sensor.key
def encode_trigger(self, trigger):
return {
'name': trigger.trigger_type,
'arguments': trigger.args
}
def encode_webhook(self, webhook):
return {
'url': webhook.url
}
class CreateEncoder(TempoIQEncoder):
encoders = {
'Device': 'encode_device',
'Sensor': 'encode_sensor'
}
def default(self, o):
encoder_name = self.encoders.get(o.__class__.__name__)
if encoder_name is None:
super(TempoIQEncoder, self).default(o)
encoder = getattr(self, encoder_name)
return encoder(o)
def encode_device(self, device):
return {
'key': device.key,
'name': device.name,
'attributes': device.attributes,
'sensors': map(self.encode_sensor, device.sensors)
}
def encode_sensor(self, sensor):
return {
'key': sensor.key,
'name': sensor.name,
'attributes': sensor.attributes
}
class ReadEncoder(TempoIQEncoder):
encoders = {
'Point': 'encode_point',
'datetime': 'encode_datetime',
'ScalarSelector': 'encode_scalar_selector',
'AndClause': 'encode_compound_clause',
'OrClause': 'encode_compound_clause',
'QueryBuilder': 'encode_query_builder',
'Selection': 'encode_selection',
'Find': 'encode_function',
'Interpolation': 'encode_function',
'MultiRollup': 'encode_function',
'Rollup': 'encode_function',
'Aggregation': 'encode_function',
'ConvertTZ': 'encode_function'
}
def default(self, o):
encoder_name = self.encoders.get(o.__class__.__name__)
if encoder_name is None:
super(TempoIQEncoder, self).default(o)
encoder = getattr(self, encoder_name)
return encoder(o)
def encode_compound_clause(self, clause):
name = None
if isinstance(clause, AndClause):
name = 'and'
else:
name = 'or'
result = []
for selector in clause.selectors:
if isinstance(selector, (AndClause, OrClause)):
result.append(self.encode_compound_clause(selector))
elif isinstance(selector, ScalarSelector):
result.append(self.encode_scalar_selector(selector))
else:
raise ValueError("invalid selector type")
return {
name: result
}
def encode_function(self, function):
return {
'name': function.name,
'arguments': function.args
}
def encode_query_builder(self, builder):
j = {
'search': {
'select': builder.object_type,
'filters': {
'devices': self.encode_selection(
builder.selection['devices']),
'sensors': self.encode_selection(
builder.selection['sensors'])
}
},
builder.operation.name: builder.operation.args
}
if not j['search']['filters']['devices']:
if not j['search']['filters']['sensors']:
j['search']['filters']['devices'] = 'all'
j['search']['filters']['sensors'] = 'all'
else:
j['search']['filters']['devices'] = 'all'
else:
if not j['search']['filters']['sensors']:
j['search']['filters']['sensors'] = 'all'
if len(builder.pipeline) > 0:
j['fold'] = {
'functions': map(self.encode_function, builder.pipeline)
}
return j
def encode_scalar_selector(self, selector):
return {
selector.key: selector.value
}
def encode_selection(self, selection):
if selection.selection is None:
return {}
if isinstance(selection.selection, Compound):
if len(selection.selection.selectors) == 0:
return {}
else:
return self.default(selection.selection)
return self.encode_scalar_selector(selection.selection)
|
py
|
1a5c5dae4c042d3ff9e3ceaf736e735e37197529
|
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Boot Interface for iLO drivers and its supporting methods.
"""
import os
import tempfile
from ironic_lib import utils as ironic_utils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
import six.moves.urllib.parse as urlparse
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common.glance_service import service_utils
from ironic.common.i18n import _
from ironic.common.i18n import _LE
from ironic.common.i18n import _LW
from ironic.common import image_service
from ironic.common import images
from ironic.common import states
from ironic.common import swift
from ironic.conductor import utils as manager_utils
from ironic.drivers import base
from ironic.drivers.modules import deploy_utils
from ironic.drivers.modules.ilo import common as ilo_common
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
REQUIRED_PROPERTIES = {
'ilo_deploy_iso': _("UUID (from Glance) of the deployment ISO. "
"Required.")
}
COMMON_PROPERTIES = REQUIRED_PROPERTIES
def parse_driver_info(node):
"""Gets the driver specific Node deployment info.
This method validates whether the 'driver_info' property of the
supplied node contains the required information for this driver to
deploy images to the node.
:param node: a single Node.
:returns: A dict with the driver_info values.
:raises: MissingParameterValue, if any of the required parameters are
missing.
"""
info = node.driver_info
d_info = {}
d_info['ilo_deploy_iso'] = info.get('ilo_deploy_iso')
error_msg = _("Error validating iLO virtual media deploy. Some parameters"
" were missing in node's driver_info")
deploy_utils.check_for_missing_params(d_info, error_msg)
return d_info
def _get_boot_iso_object_name(node):
"""Returns the boot iso object name for a given node.
:param node: the node for which object name is to be provided.
"""
return "boot-%s" % node.uuid
def _get_boot_iso(task, root_uuid):
"""This method returns a boot ISO to boot the node.
It chooses one of the three options in the order as below:
1. Does nothing if 'ilo_boot_iso' is present in node's instance_info and
'boot_iso_created_in_web_server' is not set in 'driver_internal_info'.
2. Image deployed has a meta-property 'boot_iso' in Glance. This should
refer to the UUID of the boot_iso which exists in Glance.
3. Generates a boot ISO on the fly using kernel and ramdisk mentioned in
the image deployed. It uploads the generated boot ISO to Swift.
:param task: a TaskManager instance containing the node to act on.
:param root_uuid: the uuid of the root partition.
:returns: boot ISO URL. Should be either of below:
* A Swift object - It should be of format 'swift:<object-name>'. It is
assumed that the image object is present in
CONF.ilo.swift_ilo_container;
* A Glance image - It should be format 'glance://<glance-image-uuid>'
or just <glance-image-uuid>;
* An HTTP URL.
On error finding the boot iso, it returns None.
:raises: MissingParameterValue, if any of the required parameters are
missing in the node's driver_info or instance_info.
:raises: InvalidParameterValue, if any of the parameters have invalid
value in the node's driver_info or instance_info.
:raises: SwiftOperationError, if operation with Swift fails.
:raises: ImageCreationFailed, if creation of boot ISO failed.
:raises: exception.ImageRefValidationFailed if ilo_boot_iso is not
HTTP(S) URL.
"""
LOG.debug("Trying to get a boot ISO to boot the baremetal node")
# Option 1 - Check if user has provided ilo_boot_iso in node's
# instance_info
driver_internal_info = task.node.driver_internal_info
boot_iso_created_in_web_server = (
driver_internal_info.get('boot_iso_created_in_web_server'))
if (task.node.instance_info.get('ilo_boot_iso')
and not boot_iso_created_in_web_server):
LOG.debug("Using ilo_boot_iso provided in node's instance_info")
boot_iso = task.node.instance_info['ilo_boot_iso']
if not service_utils.is_glance_image(boot_iso):
try:
image_service.HttpImageService().validate_href(boot_iso)
except exception.ImageRefValidationFailed:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Virtual media deploy accepts only Glance "
"images or HTTP(S) URLs as "
"instance_info['ilo_boot_iso']. Either %s "
"is not a valid HTTP(S) URL or is "
"not reachable."), boot_iso)
return task.node.instance_info['ilo_boot_iso']
# Option 2 - Check if user has provided a boot_iso in Glance. If boot_iso
# is a supported non-glance href execution will proceed to option 3.
deploy_info = _parse_deploy_info(task.node)
image_href = deploy_info['image_source']
image_properties = (
images.get_image_properties(
task.context, image_href, ['boot_iso', 'kernel_id', 'ramdisk_id']))
boot_iso_uuid = image_properties.get('boot_iso')
kernel_href = (task.node.instance_info.get('kernel') or
image_properties.get('kernel_id'))
ramdisk_href = (task.node.instance_info.get('ramdisk') or
image_properties.get('ramdisk_id'))
if boot_iso_uuid:
LOG.debug("Found boot_iso %s in Glance", boot_iso_uuid)
return boot_iso_uuid
if not kernel_href or not ramdisk_href:
LOG.error(_LE("Unable to find kernel or ramdisk for "
"image %(image)s to generate boot ISO for %(node)s"),
{'image': image_href, 'node': task.node.uuid})
return
# NOTE(rameshg87): Functionality to share the boot ISOs created for
# similar instances (instances with same deployed image) is
# not implemented as of now. Creation/Deletion of such a shared boot ISO
# will require synchronisation across conductor nodes for the shared boot
# ISO. Such a synchronisation mechanism doesn't exist in ironic as of now.
# Option 3 - Create boot_iso from kernel/ramdisk, upload to Swift
# or web server and provide its name.
deploy_iso_uuid = deploy_info['ilo_deploy_iso']
boot_mode = deploy_utils.get_boot_mode_for_deploy(task.node)
boot_iso_object_name = _get_boot_iso_object_name(task.node)
kernel_params = CONF.pxe.pxe_append_params
with tempfile.NamedTemporaryFile(dir=CONF.tempdir) as fileobj:
boot_iso_tmp_file = fileobj.name
images.create_boot_iso(task.context, boot_iso_tmp_file,
kernel_href, ramdisk_href,
deploy_iso_uuid, root_uuid,
kernel_params, boot_mode)
if CONF.ilo.use_web_server_for_images:
boot_iso_url = (
ilo_common.copy_image_to_web_server(boot_iso_tmp_file,
boot_iso_object_name))
driver_internal_info = task.node.driver_internal_info
driver_internal_info['boot_iso_created_in_web_server'] = True
task.node.driver_internal_info = driver_internal_info
task.node.save()
LOG.debug("Created boot_iso %(boot_iso)s for node %(node)s",
{'boot_iso': boot_iso_url, 'node': task.node.uuid})
return boot_iso_url
else:
container = CONF.ilo.swift_ilo_container
swift_api = swift.SwiftAPI()
swift_api.create_object(container, boot_iso_object_name,
boot_iso_tmp_file)
LOG.debug("Created boot_iso %s in Swift", boot_iso_object_name)
return 'swift:%s' % boot_iso_object_name
def _clean_up_boot_iso_for_instance(node):
"""Deletes the boot ISO if it was created for the instance.
:param node: an ironic node object.
"""
ilo_boot_iso = node.instance_info.get('ilo_boot_iso')
if not ilo_boot_iso:
return
if ilo_boot_iso.startswith('swift'):
swift_api = swift.SwiftAPI()
container = CONF.ilo.swift_ilo_container
boot_iso_object_name = _get_boot_iso_object_name(node)
try:
swift_api.delete_object(container, boot_iso_object_name)
except exception.SwiftOperationError as e:
LOG.exception(_LE("Failed to clean up boot ISO for node "
"%(node)s. Error: %(error)s."),
{'node': node.uuid, 'error': e})
elif CONF.ilo.use_web_server_for_images:
result = urlparse.urlparse(ilo_boot_iso)
ilo_boot_iso_name = os.path.basename(result.path)
boot_iso_path = os.path.join(
CONF.deploy.http_root, ilo_boot_iso_name)
ironic_utils.unlink_without_raise(boot_iso_path)
def _parse_deploy_info(node):
"""Gets the instance and driver specific Node deployment info.
This method validates whether the 'instance_info' and 'driver_info'
property of the supplied node contains the required information for
this driver to deploy images to the node.
:param node: a single Node.
:returns: A dict with the instance_info and driver_info values.
:raises: MissingParameterValue, if any of the required parameters are
missing.
:raises: InvalidParameterValue, if any of the parameters have invalid
value.
"""
info = {}
info.update(deploy_utils.get_image_instance_info(node))
info.update(parse_driver_info(node))
return info
class IloVirtualMediaBoot(base.BootInterface):
def get_properties(self):
return COMMON_PROPERTIES
def validate(self, task):
"""Validate the deployment information for the task's node.
:param task: a TaskManager instance containing the node to act on.
:raises: InvalidParameterValue, if some information is invalid.
:raises: MissingParameterValue if 'kernel_id' and 'ramdisk_id' are
missing in the Glance image or 'kernel' and 'ramdisk' not provided
in instance_info for non-Glance image.
"""
node = task.node
d_info = _parse_deploy_info(node)
if node.driver_internal_info.get('is_whole_disk_image'):
props = []
elif service_utils.is_glance_image(d_info['image_source']):
props = ['kernel_id', 'ramdisk_id']
else:
props = ['kernel', 'ramdisk']
deploy_utils.validate_image_properties(task.context, d_info, props)
def prepare_ramdisk(self, task, ramdisk_params):
"""Prepares the boot of deploy ramdisk using virtual media.
This method prepares the boot of the deploy ramdisk after
reading relevant information from the node's driver_info and
instance_info.
:param task: a task from TaskManager.
:param ramdisk_params: the parameters to be passed to the ramdisk.
:returns: None
:raises: MissingParameterValue, if some information is missing in
node's driver_info or instance_info.
:raises: InvalidParameterValue, if some information provided is
invalid.
:raises: IronicException, if some power or set boot boot device
operation failed on the node.
:raises: IloOperationError, if some operation on iLO failed.
"""
node = task.node
# NOTE(TheJulia): If this method is being called by something
# aside from deployment and clean, such as conductor takeover, we
# should treat this as a no-op and move on otherwise we would modify
# the state of the node due to virtual media operations.
if (node.provision_state != states.DEPLOYING and
node.provision_state != states.CLEANING):
return
# Clear ilo_boot_iso if it's a glance image to force recreate
# another one again (or use existing one in glance).
# This is mainly for rebuild scenario.
if service_utils.is_glance_image(
node.instance_info.get('image_source')):
instance_info = node.instance_info
instance_info.pop('ilo_boot_iso', None)
node.instance_info = instance_info
node.save()
# Eject all virtual media devices, as we are going to use them
# during deploy.
ilo_common.eject_vmedia_devices(task)
deploy_nic_mac = deploy_utils.get_single_nic_with_vif_port_id(task)
ramdisk_params['BOOTIF'] = deploy_nic_mac
deploy_iso = node.driver_info['ilo_deploy_iso']
ilo_common.setup_vmedia(task, deploy_iso, ramdisk_params)
def prepare_instance(self, task):
"""Prepares the boot of instance.
This method prepares the boot of the instance after reading
relevant information from the node's instance_info.
It does the following depending on boot_option for deploy:
- If the boot_option requested for this deploy is 'local' or image
is a whole disk image, then it sets the node to boot from disk.
- Otherwise it finds/creates the boot ISO to boot the instance
image, attaches the boot ISO to the bare metal and then sets
the node to boot from CDROM.
:param task: a task from TaskManager.
:returns: None
:raises: IloOperationError, if some operation on iLO failed.
"""
ilo_common.cleanup_vmedia_boot(task)
# For iscsi_ilo driver, we boot from disk every time if the image
# deployed is a whole disk image.
node = task.node
iwdi = node.driver_internal_info.get('is_whole_disk_image')
if deploy_utils.get_boot_option(node) == "local" or iwdi:
manager_utils.node_set_boot_device(task, boot_devices.DISK,
persistent=True)
else:
drv_int_info = node.driver_internal_info
root_uuid_or_disk_id = drv_int_info.get('root_uuid_or_disk_id')
if root_uuid_or_disk_id:
self._configure_vmedia_boot(task, root_uuid_or_disk_id)
else:
LOG.warning(_LW("The UUID for the root partition could not "
"be found for node %s"), node.uuid)
def clean_up_instance(self, task):
"""Cleans up the boot of instance.
This method cleans up the environment that was setup for booting
the instance. It ejects virtual media
:param task: a task from TaskManager.
:returns: None
:raises: IloOperationError, if some operation on iLO failed.
"""
_clean_up_boot_iso_for_instance(task.node)
driver_internal_info = task.node.driver_internal_info
driver_internal_info.pop('boot_iso_created_in_web_server', None)
driver_internal_info.pop('root_uuid_or_disk_id', None)
task.node.driver_internal_info = driver_internal_info
task.node.save()
ilo_common.cleanup_vmedia_boot(task)
def clean_up_ramdisk(self, task):
"""Cleans up the boot of ironic ramdisk.
This method cleans up virtual media devices setup for the deploy
ramdisk.
:param task: a task from TaskManager.
:returns: None
:raises: IloOperationError, if some operation on iLO failed.
"""
ilo_common.cleanup_vmedia_boot(task)
def _configure_vmedia_boot(self, task, root_uuid):
"""Configure vmedia boot for the node.
:param task: a task from TaskManager.
:param root_uuid: uuid of the root partition
:returns: None
:raises: IloOperationError, if some operation on iLO failed.
"""
node = task.node
boot_iso = _get_boot_iso(task, root_uuid)
if not boot_iso:
LOG.error(_LE("Cannot get boot ISO for node %s"), node.uuid)
return
# Upon deploy complete, some distros cloud images reboot the system as
# part of its configuration. Hence boot device should be persistent and
# not one-time.
ilo_common.setup_vmedia_for_boot(task, boot_iso)
manager_utils.node_set_boot_device(task,
boot_devices.CDROM,
persistent=True)
i_info = node.instance_info
i_info['ilo_boot_iso'] = boot_iso
node.instance_info = i_info
node.save()
|
py
|
1a5c5e4ae485bf0c20a8c367a5bec2bd99714c17
|
import copy
import json
import logging
import math
import os
import shutil
import tarfile
import tempfile
import sys
from io import open
import torch.nn.functional as F
import torch
from torch import nn
from torch.nn import CrossEntropyLoss, SmoothL1Loss
import numpy as np
def resize_pos_embed(posemb, posemb_new):
# Rescale the grid of position embeddings when loading from state_dict. Adapted from
# https://github.com/google-research/vision_transformer/blob/00883dd691c63a6830751563748663526e811cee/vit_jax/checkpoint.py#L224
ntok_new = posemb_new.shape[1]
if True:
posemb_tok, posemb_grid = posemb[:, :1], posemb[0, 1:]
ntok_new -= 1
else:
posemb_tok, posemb_grid = posemb[:, :0], posemb[0]
gs_old = int(math.sqrt(len(posemb_grid)))
gs_new = int(math.sqrt(ntok_new))
#_logger.info('Position embedding grid-size from %s to %s', gs_old, gs_new)
posemb_grid = posemb_grid.reshape(1, gs_old, gs_old, -1).permute(0, 3, 1, 2)
posemb_grid = F.interpolate(posemb_grid, size=(gs_new, gs_new), mode='bilinear')
posemb_grid = posemb_grid.permute(0, 2, 3, 1).reshape(1, gs_new * gs_new, -1)
posemb = torch.cat([posemb_tok, posemb_grid], dim=1)
return posemb
def initialize_clip(VISUAL_CONFIG, num_patches = 240, adapter_config=None):
import clip
clip_model, preprocess = clip.load(VISUAL_CONFIG.clip_model_name, jit=False, adapter_config=adapter_config)
if VISUAL_CONFIG.clip_model_name == "ViT-B/32" and VISUAL_CONFIG.reset_pos_embedding:
#from timm.models.vision_transformer import resize_pos_embed
pos_embed = nn.Parameter(torch.zeros(num_patches + 1, 768).float())
pos_embed.weight = resize_pos_embed(clip_model.visual.positional_embedding.unsqueeze(0), pos_embed.unsqueeze(0))
clip_model.visual.positional_embedding = pos_embed
# model.visual.positional_embedding = model.visual.positional_embedding.to("cuda")
#print(model.visual.positional_embedding.device)
# pass
if VISUAL_CONFIG.freeze_clip:
for parameter in clip_model.parameters():
parameter.requires_grad = False
return clip_model
def initialize_vit(VISUAL_CONFIG, model_type = "ViT-B_32", pretrained_dir = "data/ViT-B_32.npz", img_size = (384, 640), num_patches = 240):
from vit.models.modeling import VisionTransformer, CONFIGS
config = CONFIGS[model_type]
model = VisionTransformer(config, img_size = 224, zero_head=True, num_classes=1)
model.load_from(np.load(pretrained_dir))
pos_embed = nn.Parameter(torch.zeros(num_patches + 1, 768).float())
pos_embed.weight = resize_pos_embed(model.transformer.embeddings.position_embeddings, pos_embed.unsqueeze(0))
model.transformer.embeddings.position_embeddings = pos_embed
if VISUAL_CONFIG.freeze_clip:
for parameter in model.parameters():
parameter.requires_grad = False
return model
def initialize_optimizer(visual_model, lr, momentum, weight_decay):
optimizer = torch.optim.SGD(visual_model.parameters(), lr,
momentum=momentum,
weight_decay=weight_decay)
return optimizer
def adjust_learning_rate(optimizer, epoch, args):
"""Decay the learning rate based on schedule"""
lr = args.sgd_lr
for milestone in args.schedule:
lr *= 0.1 if epoch >= milestone else 1.
for param_group in optimizer.param_groups:
param_group['lr'] = lr
from torch.optim import Optimizer
class FusedOptimizer(Optimizer):
def __init__(self, optimizers):
self.optimizers = optimizers
param_groups = []
for optimizer in self.optimizers:
param_groups += optimizer.param_groups
#super(FusedOptimizer, self).__init__([], {})
self.param_groups = param_groups
def step(self):
for optimizer in self.optimizers:
optimizer.step()
|
py
|
1a5c5f1e97ef438f3b3f4d96c4bf71c72843c3d6
|
# Copyright (c) 2013 NTT DOCOMO, INC.
# Copyright 2014 IBM Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The bare-metal admin extension."""
from oslo_utils import importutils
import webob
from jacket.api.compute.openstack import common
from jacket.api.compute.openstack import extensions
from jacket.api.compute.openstack import wsgi
import jacket.compute.conf
from jacket.i18n import _
ironic_client = importutils.try_import('ironicclient.client')
ironic_exc = importutils.try_import('ironicclient.exc')
ALIAS = "os-baremetal-nodes"
authorize = extensions.os_compute_authorizer(ALIAS)
node_fields = ['id', 'cpus', 'local_gb', 'memory_mb', 'pm_address',
'pm_user', 'service_host', 'terminal_port', 'instance_uuid']
node_ext_fields = ['uuid', 'task_state', 'updated_at', 'pxe_config_path']
interface_fields = ['id', 'address', 'datapath_id', 'port_no']
CONF = jacket.compute.conf.CONF
CONF.import_opt('api_version',
'jacket.compute.virt.ironic.driver',
group='ironic')
CONF.import_opt('api_endpoint',
'jacket.compute.virt.ironic.driver',
group='ironic')
CONF.import_opt('admin_username',
'jacket.compute.virt.ironic.driver',
group='ironic')
CONF.import_opt('admin_password',
'jacket.compute.virt.ironic.driver',
group='ironic')
CONF.import_opt('admin_tenant_name',
'jacket.compute.virt.ironic.driver',
group='ironic')
def _check_ironic_client_enabled():
"""Check whether Ironic is installed or not."""
if ironic_client is None:
common.raise_feature_not_supported()
def _get_ironic_client():
"""return an Ironic client."""
# TODO(NobodyCam): Fix insecure setting
kwargs = {'os_username': CONF.ironic.admin_username,
'os_password': CONF.ironic.admin_password,
'os_auth_url': CONF.ironic.admin_url,
'os_tenant_name': CONF.ironic.admin_tenant_name,
'os_service_type': 'baremetal',
'os_endpoint_type': 'public',
'insecure': 'true',
'ironic_url': CONF.ironic.api_endpoint}
icli = ironic_client.get_client(CONF.ironic.api_version, **kwargs)
return icli
def _no_ironic_proxy(cmd):
raise webob.exc.HTTPBadRequest(
explanation=_("Command Not supported. Please use Ironic "
"command %(cmd)s to perform this "
"action.") % {'cmd': cmd})
class BareMetalNodeController(wsgi.Controller):
"""The Bare-Metal Node API controller for the OpenStack API."""
def _node_dict(self, node_ref):
d = {}
for f in node_fields:
d[f] = node_ref.get(f)
for f in node_ext_fields:
d[f] = node_ref.get(f)
return d
@extensions.expected_errors((404, 501))
def index(self, req):
context = req.environ['compute.context']
authorize(context)
nodes = []
# proxy command to Ironic
_check_ironic_client_enabled()
icli = _get_ironic_client()
ironic_nodes = icli.node.list(detail=True)
for inode in ironic_nodes:
node = {'id': inode.uuid,
'interfaces': [],
'host': 'IRONIC MANAGED',
'task_state': inode.provision_state,
'cpus': inode.properties.get('cpus', 0),
'memory_mb': inode.properties.get('memory_mb', 0),
'disk_gb': inode.properties.get('local_gb', 0)}
nodes.append(node)
return {'nodes': nodes}
@extensions.expected_errors((404, 501))
def show(self, req, id):
context = req.environ['compute.context']
authorize(context)
# proxy command to Ironic
_check_ironic_client_enabled()
icli = _get_ironic_client()
try:
inode = icli.node.get(id)
except ironic_exc.NotFound:
msg = _("Node %s could not be found.") % id
raise webob.exc.HTTPNotFound(explanation=msg)
iports = icli.node.list_ports(id)
node = {'id': inode.uuid,
'interfaces': [],
'host': 'IRONIC MANAGED',
'task_state': inode.provision_state,
'cpus': inode.properties.get('cpus', 0),
'memory_mb': inode.properties.get('memory_mb', 0),
'disk_gb': inode.properties.get('local_gb', 0),
'instance_uuid': inode.instance_uuid}
for port in iports:
node['interfaces'].append({'address': port.address})
return {'node': node}
@extensions.expected_errors(400)
def create(self, req, body):
_no_ironic_proxy("port-create")
@extensions.expected_errors(400)
def delete(self, req, id):
_no_ironic_proxy("port-create")
@wsgi.action('add_interface')
@extensions.expected_errors(400)
def _add_interface(self, req, id, body):
_no_ironic_proxy("port-create")
@wsgi.action('remove_interface')
@extensions.expected_errors(400)
def _remove_interface(self, req, id, body):
_no_ironic_proxy("port-delete")
class BareMetalNodes(extensions.V21APIExtensionBase):
"""Admin-only bare-metal node administration."""
name = "BareMetalNodes"
alias = ALIAS
version = 1
def get_resources(self):
resource = [extensions.ResourceExtension(ALIAS,
BareMetalNodeController(),
member_actions={"action": "POST"})]
return resource
def get_controller_extensions(self):
"""It's an abstract function V21APIExtensionBase and the extension
will not be loaded without it.
"""
return []
|
py
|
1a5c606ec44a19b00c5d729807b5275b390e1c39
|
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Bin Xiao ([email protected])
# Modified by Dequan Wang and Xingyi Zhou
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import math
import logging
import torch
import torch.nn as nn
from .DCNv2.dcn_v2 import DCN
import torch.utils.model_zoo as model_zoo
BN_MOMENTUM = 0.1
logger = logging.getLogger(__name__)
model_urls = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1,
bias=False)
self.bn3 = nn.BatchNorm2d(planes * self.expansion,
momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
def fill_up_weights(up):
w = up.weight.data
f = math.ceil(w.size(2) / 2)
c = (2 * f - 1 - f % 2) / (2. * f)
for i in range(w.size(2)):
for j in range(w.size(3)):
w[0, 0, i, j] = \
(1 - math.fabs(i / f - c)) * (1 - math.fabs(j / f - c))
for c in range(1, w.size(0)):
w[c, 0, :, :] = w[0, 0, :, :]
def fill_fc_weights(layers):
for m in layers.modules():
if isinstance(m, nn.Conv2d):
nn.init.normal_(m.weight, std=0.001)
# torch.nn.init.kaiming_normal_(m.weight.data, nonlinearity='relu')
# torch.nn.init.xavier_normal_(m.weight.data)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
class PoseResNet(nn.Module):
def __init__(self, block, layers, heads, head_conv):
self.inplanes = 64
self.heads = heads
self.deconv_with_bias = False
super(PoseResNet, self).__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
# used for deconv layers
self.deconv_layers = self._make_deconv_layer(
3,
[256, 128, 64],
[4, 4, 4],
)
for head in self.heads:
classes = self.heads[head]
if head_conv > 0:
fc = nn.Sequential(
nn.Conv2d(64, head_conv,
kernel_size=3, padding=1, bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(head_conv, classes,
kernel_size=1, stride=1,
padding=0, bias=True))
if 'hm' in head:
fc[-1].bias.data.fill_(-2.19)
else:
fill_fc_weights(fc)
else:
fc = nn.Conv2d(64, classes,
kernel_size=1, stride=1,
padding=0, bias=True)
if 'hm' in head:
fc.bias.data.fill_(-2.19)
else:
fill_fc_weights(fc)
self.__setattr__(head, fc)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion, momentum=BN_MOMENTUM),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def _get_deconv_cfg(self, deconv_kernel, index):
if deconv_kernel == 4:
padding = 1
output_padding = 0
elif deconv_kernel == 3:
padding = 1
output_padding = 1
elif deconv_kernel == 2:
padding = 0
output_padding = 0
return deconv_kernel, padding, output_padding
def _make_deconv_layer(self, num_layers, num_filters, num_kernels):
assert num_layers == len(num_filters), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
assert num_layers == len(num_kernels), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
layers = []
for i in range(num_layers):
kernel, padding, output_padding = \
self._get_deconv_cfg(num_kernels[i], i)
planes = num_filters[i]
fc = DCN(self.inplanes, planes,
kernel_size=(3, 3), stride=1,
padding=1, dilation=1, deformable_groups=1)
# fc = nn.Conv2d(self.inplanes, planes,
# kernel_size=3, stride=1,
# padding=1, dilation=1, bias=False)
# fill_fc_weights(fc)
up = nn.ConvTranspose2d(
in_channels=planes,
out_channels=planes,
kernel_size=kernel,
stride=2,
padding=padding,
output_padding=output_padding,
bias=self.deconv_with_bias)
fill_up_weights(up)
layers.append(fc)
layers.append(nn.BatchNorm2d(planes, momentum=BN_MOMENTUM))
layers.append(nn.ReLU(inplace=True))
layers.append(up)
layers.append(nn.BatchNorm2d(planes, momentum=BN_MOMENTUM))
layers.append(nn.ReLU(inplace=True))
self.inplanes = planes
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.deconv_layers(x)
ret = {}
for head in self.heads:
ret[head] = self.__getattr__(head)(x)
return [ret]
def init_weights(self, num_layers):
if 1:
url = model_urls['resnet{}'.format(num_layers)]
pretrained_state_dict = model_zoo.load_url(url)
print('=> loading pretrained model {}'.format(url))
self.load_state_dict(pretrained_state_dict, strict=False)
print('=> init deconv weights from normal distribution')
for name, m in self.deconv_layers.named_modules():
if isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
resnet_spec = {18: (BasicBlock, [2, 2, 2, 2]),
34: (BasicBlock, [3, 4, 6, 3]),
50: (Bottleneck, [3, 4, 6, 3]),
101: (Bottleneck, [3, 4, 23, 3]),
152: (Bottleneck, [3, 8, 36, 3])}
def get_pose_net(num_layers, heads, head_conv=256, opt=None):
block_class, layers = resnet_spec[num_layers]
model = PoseResNet(block_class, layers, heads, head_conv=head_conv)
model.init_weights(num_layers)
return model
|
py
|
1a5c608da10c54e7412b2b14131b9bd212d29c9a
|
import numpy as np
import scipy as sp
import logging
from beatmap import io as io
from beatmap import utils as util
from beatmap import vis as figs
from collections import namedtuple
def bet(iso_df, a_o, info, *args):
"""
Performs BET analysis on isotherm data for all relative pressure ranges.
This function performs BET analysis of any relative pressure range where
the starting relative pressure is less than the ending relative pressure.
Results of the analysis are written to arrays, the indexes of the arrays
correspond to the starting and ending relative pressure.
eg the specific surface area value with the indicies [3,9] is the specific
surface area for the relative pressure range that begins with the 4th data
point and ends with the 10th.
Arrays of results are stored in the bet_results named tuple.
Indexing of named tuple elements is in order of priority, data used by
other function are given priority.
Rather than pass individual parameters, this function can accept
*isotherm_data (where isotherm_data is a named tuple output by
a data import function).
Parameters
----------
iso_df: dataframe
Isotherm data, output by a data import function.
a_o : float
Cross sectional area of adsorbate, in square Angstrom, output by a
data import function.
info : string
Adsorbate-adsorbent information, output by a data import
function.
Returns
-------
bet_results : namedtuple
Contains the results of BET analysis.
Tuple elements are, in order of index:
- ``bet_results.intercept`` (array) : 2D array of intercept values
for the BET plot trendline. Indicies correspond to first and last
datapoint used in the analysis.
- ``bet_results.iso_df`` (dataframe) : Experimental isotherm data.
- ``bet_results.nm`` (array) : 2D array of monolayer adsorbed
amounts, in mol/g, indicies correspond to first and last datapoint
used in the analysis.
- ``bet_results.slope`` (array) : 2D array of slope values for the
BET plot trendline. Indicies correspond to first and last datapoint
used in the analysis.
- ``bet_results.ssa`` (array) : 2D array of specific surface area
values, in m^2/g, indicies correspond to first and last datapoint
used in the analysis.
- ``bet_results.c`` (array) : 2D array of BET constants values,
indicies correspond to first and last datapoint used in the
analysis.
- ``bet_results.err`` (array) : 2D array of average error between
a datapoint and the theoretical BET isotherm. Indicies correspond
to first and last datapoint used in the analysis.
- ``bet_results.r`` (array) : 2D array of r values for the BET plot
trendline. Indicies correspond to first and last datapoint used in
the analysis.
- ``bet_results.num_pts`` (array) : 2D array of the number of
experimental data points per relative pressure range.
-``bet_results.info`` (string) : string of adsorbate-adsorbent info
by other functions to name files.
"""
ssa_array = np.zeros((len(iso_df), len(iso_df)))
c_array = np.zeros((len(iso_df), len(iso_df)))
nm_array = np.zeros((len(iso_df), len(iso_df)))
err_array = np.zeros((len(iso_df), len(iso_df)))
slope = np.zeros((len(iso_df), len(iso_df)))
intercept = np.zeros((len(iso_df), len(iso_df)))
r = np.zeros((len(iso_df), len(iso_df)))
bet_c = np.zeros(len(iso_df))
number_pts = np.zeros((len(iso_df), len(iso_df)))
for i in range(len(iso_df)):
for j in range(len(iso_df)):
if i > j:
a = iso_df.iloc[j : i + 1]
X = a.relp
y = a.bet
m, b, r_value, p_value, std_err = sp.stats.linregress(X, y)
slope[i, j] = m
intercept[i, j] = b
r[i, j] = r_value
c = 0
nm = 0
bet_c = 0
if b != 0:
c = m / b + 1 # avoiding divide by zero issues
nm = 1 / (b * c)
bet_c = (1 / (nm * c)) + (c - 1) * iso_df.relp / (nm * c)
spec_sa = nm * 6.022 * 10 ** 23 * a_o * 10 ** -20
ssa_array[i, j] = spec_sa
c_array[i, j] = c
nm_array[i, j] = nm
number_pts[i, j] = i - j + 1
errors = np.nan_to_num(abs(bet_c - iso_df.bet) / bet_c)
if i - j == 1:
err_array[i, j] = 0
else:
err_array[i, j] = 100 * sum(errors[j : i + 1]) / (i + 1 - j)
# error is normalized for the interval of relative pressures
# used to compute C, so, min and max error corresponds to the
# best and worst fit over the interval used in BET analysis,
# not the entire isotherm
results = namedtuple(
"results", "intercept iso_df nm slope ssa c err r num_pts info",
)
bet_results = results(
np.nan_to_num(intercept),
iso_df,
nm_array,
slope,
ssa_array,
c_array,
err_array,
r,
number_pts,
info,
)
return bet_results
def single_point_bet(df, a_o):
"""
Performs single point BET analysis on an isotherm data set for all
relative pressure ranges. Can be used to check for agreement between BET
and single point BET.
Parameters
----------
bet_results : namedtuple
Contains all information required for BET analysis. Results of BET
analysis are also stored in this named tuple.
Relevant fields for single point BET anaylsis are:
- ``bet_results.raw_data`` (dataframe) : experimental isotherm data.
- ``bet_results.a_o`` (flaot) : the cross sectional area of the
adsorbate molecule, in square angstrom.
Returns
-------
singlept_results : namedtuple
Contains the results of single point BET analysis. Relevant fields are:
- ``singlept_results.ssa`` (array) : 2D array of specific surface
area values, in m^2/g, indicies correspond to first and last
datapoint used in the analysis.
- ``singlept_results.nm`` (array) : 2D array of monolayer adsorbed
amounts, in mol/g, indicies correspond to first and last datapoint
used in the analysis.
"""
ssa_array = np.zeros((len(df), len(df)))
nm_array = np.zeros((len(df), len(df)))
for i in range(len(df)):
for j in range(len(df)):
if i > j:
n_range = df.n[j:i]
relp_range = df.relp[j:i]
n = np.ma.median(n_range)
relp = np.ma.median(relp_range)
nm_array[i, j] = n * (1 - relp)
ssa_array[i, j] = n * 6.022 * 10 ** 23 * a_o * 10 ** -20
singlept_results = namedtuple("singlept_results", ("ssa", "nm"))
singlept_results.ssa = ssa_array
singlept_results.nm = nm_array
return singlept_results
def check_1(intercept):
"""
Checks that y intercept of the BET plot's linear regression is positive.
Parameters
----------
intercept : array
2D array of y-intercept values.
Returns
-------
check1 : array
Array of 1s and 0s where 0 corresponds to relative pressure ranges
where the y-intercept is negative or zero, ie ranges that fail this
check.
"""
check1 = intercept[:, :] > 0
if np.any(check1) is False:
logging.warning("All relative pressure ranges fail check 1.")
return check1
def check_2(df):
"""
Checks that n(p-po) aka check2 is increasing.
This is a necessary condition for linearity of the BET dataset.
Parameters
----------
df : dataframe
Dataframe of imported experimental isothermal adsorption data.
Returns
-------
check2 : array
Array of 1s and 0s where 0 corresponds to relative pressure ranges
where n(p-po) isn't consistently increasing with relative pressure, ie
ranges that fail this check.
"""
df["check2"] = df.n * (1 - df.relp)
check2 = np.ones((len(df), len(df)))
minus1 = np.concatenate(([0], df.check2[:-1]))
test = df.check2 - minus1 >= 0
test = np.tile(test, (len(df), 1))
check2 = check2 * test
check2 = check2.T
if np.any(check2) is False:
logging.warning("All relative pressure ranges fail check 2.")
return check2
def check_3(df, nm):
"""
Checks that nm, amount adsorbed in the monolayer, is in the range of
data points used in BET analysis.
Parameters
----------
df : dataframe
Dataframe of imported experimental isothermal adsorption data.
nm : array
2D array of BET specific amount of adsorbate in the monolayer, the
coordinates of the array corresponding to relative pressures, units
[moles / gram].
Returns
-------
check3 : array
Array of 1s and 0s where 0 corresponds to relative pressure ranges nm
is not included in the range of experimental n values, ie ranges that
fail this check.
"""
check3 = np.zeros((len(df), len(df)))
for i in range(np.shape(check3)[0]):
for j in range(np.shape(check3)[1]):
if df.iloc[j, 1] <= nm[i, j] <= df.iloc[i, 1]:
check3[i, j] = 1
if np.any(check3) is False:
logging.warning("All relative pressure ranges fail check 3.")
return check3
def check_4(df, nm, slope, intercept):
"""
Checks that relative pressure is consistent.
The relative pressure corresponding to nm is found from linear
interpolation of the experiemental data.
A second relative pressure is found by setting n to nm in the BET equation
and solving for relative pressure.
The two relative pressures are compared and must agree within 10% to pass
this check.
Parameters
----------
df : dataframe
Dataframe of imported experimental isothermal adsorption data.
nm : array
2D array of BET specific amount of adsorbate in the monolayer,
the coordinates of the array corresponding to relative pressures,
units [moles / gram].
slope : array
2D array of slope values resulting from linear regression applied to
relevant experimental data.
intercept : array
2D array of y-intercept values resulting from linear regression applied
to relevant experimental data.
Returns
-------
check4 : array
Array of 1s and 0s where 0 corresponds to relative pressure values that
do not agree within 10%, ie ranges that fail this check.
"""
check4 = np.zeros((len(df), len(df)))
for i in range(np.shape(check4)[0]):
for j in range(np.shape(check4)[1]):
if nm[i, j] != 0 and i > 0 and j > 0:
# find relp corresponding to nm
relpm = util.lin_interp(df, nm[i, j])
# BET eq solved for relp is a quadratic, coeff = [a, b, c]
coeff = [
-1 * slope[i, j] * nm[i, j],
slope[i, j] * nm[i, j] - 1 - intercept[i, j] * nm[i, j],
intercept[i, j] * nm[i, j],
]
# find roots
# (relp value where nm occurs on theoretical isotherm)
roots = np.roots(coeff) # note: some roots are imaginary
roots = [item.real for item in roots if len(roots) == 2]
# find the difference between
relp_m_1 = roots[0]
diff_1 = abs((relp_m_1 - relpm) / relpm)
relp_m_2 = roots[1]
diff_2 = abs((relp_m_2 - relpm) / relpm)
diff = min(diff_1, diff_2)
if diff < 0.1:
check4[i, j] = 1
if np.any(check4) is False:
logging.warning("All relative pressure ranges fail check 4.")
return check4
def check_5(df, points=5):
"""
Checks that relative pressure ranges contain a minium number of data points.
Parameters
----------
df : dataframe
Dataframe of imported experimental isothermal adsorption data.
points : int
Minimum number of data points required for BET analysis to be
considered valid, default value is 5.
Returns
-------
check5 : array
Array of 1s and 0s where 0 corresponds to ranges of experimental data
that contain less than the minimum number of points.
"""
check5 = np.ones((len(df), len(df)))
for i in range(len(df)):
for j in range(len(df)):
if i - j < points - 1:
check5[i, j] = 0
if np.any(check5) is False:
logging.warning("All relative pressure ranges fail check 5.")
return check5
def rouq_mask(
intercept,
iso_df,
nm,
slope,
*args,
check1=True,
check2=True,
check3=True,
check4=True,
check5=True,
points=5
):
"""
Calls all check functions and combines their masks into one "rouqerol mask".
Rather than pass individual parameters, this function can accept
*bet_results (where bet_results is a named tuple output by the bet
function).
Parameters
----------
intercept : array
2D array of intercept values, used in check1.
iso_df : dataframe
Dataframe of isotherm data, used in check2.
nm : array
2D array of amount in the monolayer values, used in check3 and check4.
slope : array
2D array of slope values, used in check4
check1 : boolean
True means the will be evalued, False means the check will not be
evaluated.
check2 : boolean
True means the will be evalued, False means the check will not be
evaluated.
check3 : boolean
True means the will be evalued, False means the check will not be
evaluated.
check4 : boolean
True means the will be evalued, False means the check will not be
evaluated.
check5 : boolean
True means the will be evalued, False means the check will not be
evaluated.
points : int
The minimum number of experimental data points for a relative pressure
interval to be considered valid.
Returns
-------
rouq_mask : namedtuple
Contains arrays for the result of each check and a masked array that is
the result of all selected checks.
Fields of the named tuple are:
-``rouq_mask.mask`` (MaskedArray) : object where invalid BET results
are masked.
-``rouq_mask.check1 (array) : array of 1s and 0s where 0 corresponds
failing check1.
-``rouq_mask.check2 (array) : array of 1s and 0s where 0 corresponds
failing check2.
-``rouq_mask.check3 (array) : array of 1s and 0s where 0 corresponds
failing check3.
-``rouq_mask.check4 (array) : array of 1s and 0s where 0 corresponds
failing check4.
-``rouq_mask.check5 (array) : array of 1s and 0s where 0 corresponds
failing check5.
"""
mask = np.ones((len(iso_df), len(iso_df)))
for i in range(len(iso_df)):
for j in range(len(iso_df)):
if j >= i:
mask[i, j] = 0
if check1 is True:
check1 = check_1(intercept)
else:
check1 = np.ones((len(iso_df), len(iso_df)))
if check2 is True:
check2 = check_2(iso_df)
else:
check2 = np.ones((len(iso_df), len(iso_df)))
if check3 is True:
check3 = check_3(iso_df, nm)
else:
check3 = np.ones((len(iso_df), len(iso_df)))
if check4 is True:
check4 = check_4(iso_df, nm, slope, intercept)
else:
check4 = np.ones((len(iso_df), len(iso_df)))
if check5 is True:
check5 = check_5(iso_df, points)
else:
check5 = np.ones((len(iso_df), len(iso_df)))
mask = np.multiply(check1, mask)
mask = np.multiply(check2, mask)
mask = np.multiply(check3, mask)
mask = np.multiply(check4, mask)
mask = np.multiply(check5, mask)
mask.astype(bool) # converting mask to boolean
# inverting mask so that 0 = valid, 1 = invalid, to work well with numpy masks
invertedmask = np.logical_not(mask)
rouq_mask = namedtuple("rouq_mask", "mask check1 check2 check3 check4 check5")
mask_results = rouq_mask(invertedmask, check1, check2, check3, check4, check5)
return mask_results
def ssa_answer(bet_results, mask_results, criterion="error"):
"""
Logs a single specific surface area answer from the valid relative
pressure range with the lowest error, most number of points, maximum
specific surface area, or minimum specific surface area.
Parameters
----------
bet_results : named tuple
``bet_results.ssa`` contains the array of specific surface values.
rouq_mask : named tuple
``rouq_mask.mask`` contains the mask used to remove invaid specific
surface area values from consideration.
criterion : string
Used to specify the criterion for a final specific surface area answer,
either 'error', 'points', 'max', or 'min. Defaults to 'error'.
Returns
-------
ssa_ans : float
Specific surface answer corresponding to user defined criteria.
"""
mask = mask_results.mask
if mask.all():
raise ValueError(
"No valid relative pressure ranges. Specific surface"
" area not calculated."
)
ssa = np.ma.array(bet_results.ssa, mask=mask)
if criterion == "points":
pts = np.ma.array(bet_results.num_pts, mask=mask)
max_pts = np.max(pts)
ssa_ans_array = np.ma.masked_where(pts < max_pts, ssa)
try:
ssa_ans = float(ssa_ans_array.compressed())
except ValueError:
raise Exception(
"Error, so single specific surface area answer. Multiple"
+ "relative pressure ranges with the maximum number of points."
)
return 0
logging.info(
"The specific surface area value, based on %s is %.2f m2/g."
% (criterion, ssa_ans)
)
return ssa_ans
if criterion == "error":
err = np.ma.array(bet_results.err, mask=mask)
errormax, error_max_idx, errormin, error_min_idx = util.max_min(err)
ssa_ans = ssa[int(error_min_idx[0]), int(error_min_idx[1])]
logging.info(
"The specific surface area value, based on %s is %.2f m2/g."
% (criterion, ssa_ans)
)
return ssa_ans
if criterion == "max":
ssa_ans = np.max(ssa)
logging.info(
"The specific surface area value, based on %s is %.2f m2/g."
% (criterion, ssa_ans)
)
return ssa_ans
if criterion == "min":
ssa_ans = np.min(ssa)
logging.info(
"The specific surface area value, based on %s is %.2f m2/g."
% (criterion, ssa_ans)
)
return ssa_ans
else:
raise ValueError("Invalid criterion, must be points, error, min, or max.")
def run_beatmap(
file=None,
info=None,
a_o=None,
check1=True,
check2=True,
check3=True,
check4=True,
check5=True,
points=5,
save_figures=True,
export_data=False,
ssa_criterion="error",
ssa_gradient="Greens",
err_gradient="Greys",
):
"""
A single function that executes all necessary BEaTmap algorithims.
This function is built to be as user friendly as possible. The file
name/path of the isotherm data, information about the isotherm, and the
cross sectional surface area of the adsorbate can be passed using the
file, info, and a_o parameters respectively. Or, if left empty, the user
be prompted to input them.
eg. ``run_beatmap('myfile.csv', 'nitrogen on carbon', 16.2)`` or
``run_beatmap()`` will execute the function. In the later case the user
will provide the parameters passed in the former through promts in their
console.
Additional parameters to set which of the Roquerol criteria are applied,
the minimum number of data points per valid relative pressure range,
the criteria used to select a single specific surface area, and more, are
defined and set to reasonable default values.
Parameters
----------
file : string
File name (if file is in parent directory) or file path.
info : string
Adsorbate-adsorbent information.
a_o : float
Cross sectional area of adsorbate, in square Angstrom.
check1 : boolean
If check1 is True any relative pressure ranges with a negative y
intercept are considered invalid.
check2 : boolean
If check2 is True any relative pressure ranges where n(p-po) is
decreasing are considered invalid.
check3 : boolean
If check3 is True any relative pressure ranges where the monolayer
amount falls outside of the relative pressure range are considered
invalid.
check4 : boolean
If check4 is True any relative pressure range where there is
disagreement of more than 10% between the actual relative pressure
where monolayer coverage occurs and the relative pressure where
monolayer coverage occurs on the theoretical isotherm are considered
invalid.
check5 : boolean
If check5 is True relative pressure ranges that contain fewer points
than specified by the user are considered invalid.
points : interger
The minimum number of points for a valid relative pressure range.
save_figures : boolean
If save_figures is True any figures created by this function will be
saved as .png files in the parent directory.
export_data : boolean
If export data is True .csv files of the isotherm data and the BEaTmap
results will be created and saved in the parent directory.
ssa_criterion : string
Used to set which criterion is used to provide a single specific
surface area value. 'error' will output the valid ssa answer with the
lowest error, 'points' will output the ssa answer with the most
datapoints.
ssa_gradient : string
Color gradient for heatmap, must be a vaild color gradient name
in the seaborn package.
err_gradient : string
Color gradient for heatmap, must be a vaild color gradient name
in the seaborn package, default is grey.
Returns
-------
"""
# run_beatmap_import_data imports isotherm data from a .csv file and returns
# the results in the isotherm_data namedtuple
isotherm_data = io.import_data(file, info, a_o)
figs.experimental_data_plot(isotherm_data, save_file=save_figures)
# bet_results uses isotherm_data, applies BET analysis and returns the results
# in the bet_results namedtuple
bet_results = bet(isotherm_data.iso_df, isotherm_data.a_o, isotherm_data.info)
# mask_results uses isotherm_data and bet_results, applies the roquerol
# criteria specified by the user, and returns the results in the
# mask_results named tuple
mask_results = rouq_mask(
bet_results.intercept,
bet_results.iso_df,
bet_results.nm,
bet_results.slope,
check1=check1,
check2=check2,
check3=check3,
check4=check4,
check5=check5,
points=points,
)
# mask_results are used to highlight the valid bet_results in the
# following functions
# ssa_ans = ssa_answer(bet_results, mask_results, ssa_criterion)
figs.ssa_heatmap(bet_results, mask_results, save_figures)
figs.err_heatmap(bet_results, mask_results, save_figures)
figs.bet_combo_plot(bet_results, mask_results, save_figures)
figs.iso_combo_plot(bet_results, mask_results, save_figures)
figs.ascii_tables(bet_results, mask_results)
if export_data is True:
io.export_raw_data(isotherm_data)
io.export_processed_data(bet_results, points)
combo_results = namedtuple(
"results",
"ssa c nm err intercept slope r mask"
" check1 check2 check3 check4 check5 num_pts",
)
results = combo_results(
bet_results.ssa,
bet_results.c,
bet_results.nm,
bet_results.err,
bet_results.intercept,
bet_results.slope,
bet_results.r,
mask_results.mask,
mask_results.check1,
mask_results.check2,
mask_results.check3,
mask_results.check4,
mask_results.check5,
bet_results.num_pts,
)
return results
|
py
|
1a5c615a1eecd62a362059212b4505d726c476d3
|
# -*- coding: utf-8 -*-
"""Fram framework bootstrap module."""
import argparse
import sys
import six
__author__ = "Shawn Lee"
__email__ = "[email protected]"
def fram_plugins():
"""Go through all the loaded modules and look for fram plugins.
A plugin is defined by a module that defines a FRAM_PLUGIN variable that is
a dict."""
plugins = []
for mod_name in list(sys.modules):
instance = sys.modules[mod_name]
if hasattr(instance, "FRAM_PLUGIN"):
plugins.append(instance.FRAM_PLUGIN)
return plugins
def parser_from_plugins(plugins, description):
"""Go through all the loaded plugins and build out a cli parser."""
parser = argparse.ArgumentParser(description, conflict_handler="resolve")
for plugin in plugins:
if "argparse" in plugin:
for argument, options in six.iteritems(plugin["argparse"]):
kwargs = {}
for option in ["help", "action", "default", "required"]:
val = options.get(option)
if val:
kwargs[option] = options.get(option)
args = [argument] + options.get(
"additional_args", [])
parser.add_argument(*args, **kwargs)
return parser
def decorated_main_from_plugins(plugins, func):
"""Go through all the loaded plugins and build the main decorators."""
for plugin in plugins:
if "main_decorator" in plugin:
func = plugin["main_decorator"](func)
return func
def parser_callbacks(plugins, parser):
"""Go through all the loaded plugins and run the parser callbacks."""
framework = {}
if parser:
try:
framework["argparse"] = parser.parse_args()
except AttributeError:
print (
"ERROR: Did you return parser.parse_args() in your argument\n"
" parser? Just return the parser. Fram framework will\n"
" call parse_args at a later time.")
sys.exit(1)
# Since we have parsed_args, go through all callbacks.
for plugin in plugins:
if "argparse" in plugin:
for argument, options in six.iteritems(plugin["argparse"]):
if "callback" in options:
framework[argument.strip("-")] = (
options["callback"](getattr(
framework["argparse"],
argument.strip("-"))))
return framework
def run(func, description=None, argument_parser=None):
"""Bootstrap up the library."""
plugins = fram_plugins()
parser = parser_from_plugins(plugins, description)
if argument_parser:
try:
parser = argument_parser(parser)
except TypeError:
print (
"ERROR: Did you return parser.parse_args() in your argument\n"
" parser? Just return the parser. Fram framework will\n"
" call parse_args at a later time.")
sys.exit(1)
func = decorated_main_from_plugins(plugins, func)
framework = parser_callbacks(plugins, parser)
return func(framework)
|
py
|
1a5c61af76d5cad67528b61ac53bd08e2a84f7ad
|
from django import forms
from django.db import models
from tank.settings import MEDIA_URL
# Create your models here.
def get_upload_path(instance, filename):
return '%s/%s-%s/%s' % (instance.artifact, instance.revision,
instance.timestamp, instance.name)
class Package(models.Model):
artifact = models.CharField(max_length=128)
name = models.CharField(max_length=256)
revision = models.CharField(max_length=64)
timestamp = models.CharField(max_length=128)
checksum = models.CharField(max_length=128)
file = models.FileField(upload_to=get_upload_path)
def __unicode__(self):
return u'%s %s %s %s %s' % (self.artifact, self.revision,
self.timestamp, self.name, self.checksum)
def __str__(self):
field_json_str = "{" \
"'artifact': '%s'," \
"'package_name': '%s'," \
"'revision': '%s'," \
"'timestamp': '%s'," \
"'checksum': '%s'" \
"}" % (
self.artifact, self.name,
self.revision, self.timestamp,
self.checksum)
return field_json_str
def download_link(self):
return '%s/%s/%s-%s/%s' % (MEDIA_URL.rstrip('/'), self.artifact,
self.revision, self.timestamp, self.name)
|
py
|
1a5c637c2ebb6b144efe0a6c4578d2a2869531a3
|
from mitie import tokenize
from rasa_nlu import Interpreter
class MITIESklearnInterpreter(Interpreter):
def __init__(self,metadata):
self.extractor = named_entity_extractor(metadata["entity_extractor"])#,metadata["feature_extractor"])
self.classifier = text_categorizer(metadata["intent_classifier"])#,metadata["feature_extractor"])
def get_entities(self,tokens):
d = {}
entities = self.extractor.extract_entities(tokens)
for e in entities:
_range = e[0]
d[e[1]] = " ".join(tokens[i] for i in _range)
return d
def get_intent(self,tokens):
label, _ = self.classifier(tokens) # don't use the score
return label
def parse(self,text):
tokens = tokenize(text)
intent = self.get_intent(tokens)
entities = self.get_entities(tokens)
return {'intent':intent,'entities': entities}
|
py
|
1a5c64a7d162343d60d4d2a62b7bcfa3c2f4a32b
|
# Tango import
import tango
from tango import DevFailed, DevState
# Additional import
from ska.base.commands import BaseCommand
from tmc.common.tango_client import TangoClient
from tmc.common.tango_server_helper import TangoServerHelper
from . import const
from .attribute_callbacks import (
CbfHealthStateAttributeUpdator,
PssHealthStateAttributeUpdator,
PstHealthStateAttributeUpdator,
)
class TelescopeOn(BaseCommand):
"""
A class for CspMasterLeafNode's TelescopeOn() command. On command is inherited from BaseCommand.
It Sets the State to On.
"""
def check_allowed(self):
"""
Checks whether this command is allowed to be run in current device state
:return: True if this command is allowed to be run in current device state
:rtype: boolean
:raises: DevFailed if this command is not allowed to be run in current device state
"""
if self.state_model.op_state in [DevState.FAULT, DevState.UNKNOWN]:
tango.Except.throw_exception(
f"Command TelescopeOn is not allowed in current state {self.state_model.op_state}.",
"Failed to invoke On command on CspMasterLeafNode.",
"CspMasterLeafNode.TelescopeOn()",
tango.ErrSeverity.ERR,
)
return True
def telescope_on_cmd_ended_cb(self, event):
"""
Callback function immediately executed when the asynchronous invoked
command returns. Checks whether the Telescope On command has been successfully invoked on CSPMaster.
:param event: a CmdDoneEvent object. This class is used to pass data
to the callback method in asynchronous callback model for command
execution.
:type: CmdDoneEvent object
It has the following members:
- device : (DeviceProxy) The DeviceProxy object on which the call was executed.
- cmd_name : (str) The command name
- argout_raw : (DeviceData) The command argout
- argout : The command argout
- err : (bool) A boolean flag set to true if the command failed. False otherwise
- errors : (sequence<DevError>) The error stack
- ext
:return: none
"""
this_device = TangoServerHelper.get_instance()
if event.err:
log_msg = f"{const.ERR_INVOKING_CMD}{event.cmd_name}\n{event.errors}"
self.logger.error(log_msg)
this_device.write_attr("activityMessage", log_msg, False)
else:
log_msg = f"{const.STR_COMMAND}{event.cmd_name}{const.STR_INVOKE_SUCCESS}"
self.logger.info(log_msg)
this_device.write_attr("activityMessage", log_msg, False)
def do(self):
"""
Method to invoke On command on CSP Element.
param argin:
None
raises:
DevFailed on communication failure with CspMaster or CspMaster is in error state.
"""
device_data = self.target
this_device = TangoServerHelper.get_instance()
try:
csp_mln_client_obj = TangoClient(this_device.read_property("CspMasterFQDN")[0])
csp_mln_client_obj.send_command_async(
const.CMD_ON, [], self.telescope_on_cmd_ended_cb
)
self.logger.debug(const.STR_ON_CMD_ISSUED)
this_device.write_attr("activityMessage", const.STR_ON_CMD_ISSUED, False)
device_data.cbf_health_updator = CbfHealthStateAttributeUpdator()
device_data.cbf_health_updator.start()
device_data.pss_health_updator = PssHealthStateAttributeUpdator()
device_data.pss_health_updator.start()
device_data.pst_health_updator = PstHealthStateAttributeUpdator()
device_data.pst_health_updator.start()
except DevFailed as dev_failed:
log_msg = f"{const.ERR_EXE_ON_CMD}{dev_failed}"
self.logger.exception(dev_failed)
this_device.write_attr("activityMessage", const.ERR_EXE_ON_CMD, False)
tango.Except.re_throw_exception(
dev_failed,
const.STR_ON_EXEC,
log_msg,
"CspMasterLeafNode.TelescopeOnCommand",
tango.ErrSeverity.ERR,
)
|
py
|
1a5c64ebd01959c8872d091dbee76ed80bf6025c
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle_job import PaddleJob
import registry
__all__ = ["PaddleJob", "registry"]
|
py
|
1a5c65313ee1593e51bfd0b2c9a340b3c50c040b
|
#!/usr/bin/env python
""" Tests of Larch Scripts """
import unittest
import time
import ast
import numpy as np
from sys import version_info
from utils import TestCase
from larch import Interpreter
class TestScripts(TestCase):
'''testing of asteval'''
def test01_basic(self):
self.runscript('a.lar', dirname='larch_scripts')
assert(len(self.session.get_errors()) == 0)
self.isTrue("n < 10")
self.isTrue("n > 5")
self.isTrue("x > 3")
def test02_autobk(self):
self.runscript('doc_autobk1.lar', dirname='../examples/xafs/')
assert(len(self.session.get_errors()) == 0)
self.isTrue("cu.e0 > 8950.0")
self.isTrue("len(cu.k) > 200")
self.isTrue("max(abs(cu.chi)) < 2.0")
def test03_autobk2(self):
self.runscript('doc_autobk2.lar', dirname='../examples/xafs/')
assert(len(self.session.get_errors()) == 0)
self.isTrue("dat.e0 > 10000.0")
self.isTrue("len(dat.k) > 200")
def test04_autobk_clamp(self):
self.runscript('doc_autobk3.lar', dirname='../examples/xafs/')
assert(len(self.session.get_errors()) == 0)
self.isTrue("dat.e0 > 11000.0")
self.isTrue("len(dat.k) > 200")
def test05_autobk_with_std(self):
self.runscript('doc_autobk4.lar', dirname='../examples/xafs/')
assert(len(self.session.get_errors()) == 0)
self.isTrue("cu2.e0 > 8950.0")
self.isTrue("len(cu2.k) > 200")
self.isTrue("max(abs(cu2.chi)) < 2.0")
def test06_ftwin1(self):
self.runscript('doc_ftwin1.lar', dirname='../examples/xafs/')
assert(len(self.session.get_errors()) == 0)
self.isTrue("len(hann_win1) == 401")
self.isTrue("hann_win3.sum() > 50.0")
self.runscript('doc_ftwin2.lar', dirname='../examples/xafs/')
assert(len(self.session.get_errors()) == 0)
self.isTrue("len(kai_win1) == 401")
self.isTrue("kai_win1.sum() > 20.0")
def test07_xafsft1(self):
self.runscript('doc_xafsft1.lar', dirname='../examples/xafs/')
assert(len(self.session.get_errors()) == 0)
self.isTrue("len(d2.k) > 200")
self.isTrue("len(d2.kwin) > 200")
self.isTrue("d1.chir_mag.sum() > 30")
self.isTrue("where(d1.chir_mag>1)[0][0] > 60")
def test08_xafsft2(self):
self.runscript('doc_xafsft2.lar', dirname='../examples/xafs/')
assert(len(self.session.get_errors()) == 0)
self.isTrue("len(d3.k) > 200")
self.isTrue("len(d3.kwin) > 200")
self.isTrue("len(d4.k) > 200")
self.isTrue("len(d4.kwin) > 200")
self.isTrue("len(d1.r) > 100")
self.isTrue("len(d1.chir_mag) > 100")
self.isTrue("len(d3.r) > 100")
self.isTrue("len(d3.chir_mag) > 100")
self.isTrue("len(d4.r) > 100")
self.isTrue("len(d4.chir_mag) > 100")
self.isTrue("len(d4.chir_re) > 100")
self.isTrue("len(d4.chir_im) > 100")
def test09_xafsft3(self):
self.runscript('doc_xafsft3.lar', dirname='../examples/xafs/')
assert(len(self.session.get_errors()) == 0)
self.isTrue("len(dat.k) > 200")
self.isTrue("len(dat.kwin) > 200")
def test10_xafsft3(self):
self.runscript('doc_xafsft4.lar', dirname='../examples/xafs/')
assert(len(self.session.get_errors()) == 0)
self.isTrue("len(dat.r) > 200")
self.isTrue("len(dat.rwin) > 200")
self.isTrue("len(dat.q) > 200")
self.isTrue("len(dat.chiq_re) > 200")
def test11_wavelet1(self):
self.runscript('wavelet_example.lar', dirname='../examples/xafs/')
assert(len(self.session.get_errors()) == 0)
self.isTrue("f.wcauchy_im.shape == (326, 318)")
self.isTrue("f.wcauchy_mag.sum() > 300")
def test12_feffit_kws(self):
self.runscript('test_epsk_kws.lar', dirname='../examples/feffit/')
assert(len(self.session.get_errors()) == 0)
out = self.session.run('out')
for row in out:
amp = row[5]
amp_err = row[5]
delr= row[7]
self.assertTrue(amp > 0.5)
self.assertTrue(amp < 2.0)
self.assertTrue(amp_err > 0.0)
self.assertTrue(amp_err < 2.0)
self.assertTrue(abs(delr) < 0.1)
if __name__ == '__main__': # pragma: no cover
for suite in (TestScripts,):
suite = unittest.TestLoader().loadTestsFromTestCase(suite)
unittest.TextTestRunner(verbosity=13).run(suite)
|
py
|
1a5c65df1238ff8b4638fe10ad4728df73ebe785
|
# -*- coding: utf-8 -*-
"""
Dora Team
Teknofest 2021- Türkçe Doğal Dil İşleme Yarışması
"""
import numpy as np
import pandas as pd
import re
import string
from sklearn.linear_model import SGDClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.multiclass import OneVsRestClassifier
from sklearn.svm import LinearSVC
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.preprocessing import LabelEncoder
from sklearn.metrics import accuracy_score
from sklearn.pipeline import Pipeline
from sklearn.model_selection import GridSearchCV, train_test_split
from sklearn.metrics import classification_report
from sklearn.feature_selection import chi2
from wordcloud import WordCloud
import matplotlib.pyplot as plt
from tkinter import *
from PIL import ImageTk,Image
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
from matplotlib.backends.backend_tkagg import (FigureCanvasTkAgg, NavigationToolbar2Tk)
import numpy as np
#arayüz penceresi
pencere = Tk()
pencere.geometry("900x600+100+50")
pencere.title("Dora-Şiir Türü Belirleme Programı")
pencere.configure(background="#B7CBF0")
#logo ekleme
canvas = Canvas(pencere, width = 100, height = 81, bg="#B7CBF0")
canvas.pack()
canvas.place(x=170,y=490)
img = ImageTk.PhotoImage(Image.open("logodoram2.png"))
canvas.create_image(0, 0, anchor=NW, image=img)
#arayüz widgetleri
label=Label(pencere,fg="blue", bg="#B7CBF0",font="bold")
label.config(text="Şiiri yazınız:",font=("Arial",14))
label.place(x=20,y=20)
siirtext=Text(pencere,width=50,height=20)
siirtext.place(x=20,y=50)
siirtext.insert(END,"Korkma, sönmez bu şafaklarda yüzen al sancak;\nSönmeden yurdumun üstünde tüten en son ocak.\nO benim milletimin yıldızıdır, parlayacak;\nO benimdir, o benim milletimindir ancak.\n\nÇatma, kurban olayım çehreni ey nazlı hilâl!\nKahraman ırkıma bir gül… ne bu şiddet bu celâl?\nSana olmaz dökülen kanlarımız sonra helâl,\nHakkıdır, Hakk’a tapan, milletimin istiklâl.")
sonuclabel=Label(pencere,fg="blue",bg="#B7CBF0",font="bold")
sonuclabel.config(text='Şiir Türü=',font=("Arial",14))
sonuclabel.place(x=450,y=50)
sonucsgd=Label(pencere,fg="blue",bg="#B7CBF0",font="bold")
sonucsgd.config(text='SGD Sonuc:',font=("Arial",14))
sonucsvc=Label(pencere,fg="blue",bg="#B7CBF0",font="bold")
sonucsvc.config(text='Linear SVC sonuc:',font=("Arial",14))
sonuc2=Label(pencere,fg="blue",bg="#B7CBF0",font="bold")
sonuc2.config(text='Linear SVC Doğruluk Oranı:',font=("Arial",14))
sonuc3=Label(pencere,fg="blue",bg="#B7CBF0",font="bold")
sonuc3.config(text='SGD Doğruluk Oranı:',font=("Arial",14))
#veri seti okuma
pd.set_option('display.max_colwidth', None)
df=pd.read_csv('siirdataset.csv', encoding = 'utf-8')
df['siir']=df['siir'].str.lower()
class_ = df['tur'].value_counts().keys()
sum_ = df['tur'].value_counts()
daf = pd.DataFrame(zip(class_,sum_), columns = ['tur', 'Toplam'])
def siir_kontrol():
pd.set_option('display.max_colwidth', None)
df=pd.read_csv('siirdataset.csv', encoding = 'utf-8')
df['siir']=df['siir'].str.lower()
class_ = df['tur'].value_counts().keys()
sum_ = df['tur'].value_counts()
daf = pd.DataFrame(zip(class_,sum_), columns = ['tur', 'Toplam'])
#stop-words-gereksiz kelime ayıklama
stop_words=pd.read_csv('stopwords.txt', sep=" ", header=None)
stop_words.columns=['words_list']
pat2 = r'\b(?:{})\b'.format('|'.join(list(stop_words['words_list'].str.lower())))
df['yenisiir'] = df['siir'].str.lower().str.replace(pat2, '')
#Noktalama işaretlerinin ayıklanması
df=df.dropna()
PUNCT_TO_REMOVE = string.punctuation
def remove_punctuation(text):
return text.translate(str.maketrans('', '', PUNCT_TO_REMOVE))
df['siir_son'] = df['yenisiir'].apply(lambda text: remove_punctuation(text))
#test-train oluşturma
X_train, X_test, y_train, y_test = train_test_split(df['siir_son'], df['tur'], random_state = 0)
trial = Pipeline([('vect', CountVectorizer()),('tfidf', TfidfTransformer()), ('clf', SGDClassifier())])
parameters = {'vect__max_df': (0.5, 0.75, 1.0),'vect__max_features': (None, 5000, 10000, 50000),'vect__ngram_range':((1, 1),(1, 2)),
'clf__max_iter': (20,),
'clf__alpha': (0.00001, 0.000001),
'clf__penalty': ('l2', 'elasticnet'),
# ‘clf__max_iter’: (10, 50, 80),
}
#en iyi skoru gösterme
grid_search = GridSearchCV(trial, parameters, n_jobs=-1, verbose=1)
grid_search.fit(X_train, y_train)
print("En iyi Skor: %0.3f" % grid_search.best_score_)
print("Best parameters")
best_parameters = grid_search.best_estimator_.get_params()
print(best_parameters)
#SGD
trial = Pipeline([('vect', CountVectorizer(max_df=0.75, ngram_range=(1, 2))),
('tfidf', TfidfTransformer()),
('clf', SGDClassifier(loss='modified_huber',alpha=1e-05, max_iter=20, penalty='elasticnet')),])
trial.fit(X_train, y_train)
print("SGDC Doğruluk Oranı: " + str(trial.score(X_test, y_test)))
sonuc3["text"]="SGDC Doğruluk Oranı: "+str(trial.score(X_test, y_test))
#linear SVC
trial2 = Pipeline([('vectorizer',CountVectorizer()),
('tfidf', TfidfTransformer()),
('classifier', LinearSVC())])
trial2.fit(X_train, y_train)
print("Linear SVC Doğruluk Oranı= " + str(trial2.score(X_test, y_test)))
sonuc2["text"]="Linear SVC Doğruluk Oranı= "+str(trial2.score(X_test, y_test))
#sınıflandırma raporu
y_pred=trial.predict(X_test)
print(classification_report(y_test, y_pred))
#test1-SGD
cv = CountVectorizer(ngram_range=(1,2))
siirsoz=siirtext.get(1.0,END)
data = [siirsoz]
n=3
tahmin1 = trial.predict(data)
if tahmin1=='epik':
sonuct1='Epik'
elif tahmin1=='lirik':
sonuct1='Lirik'
elif tahmin1=='didaktik':
sonuct1='Didaktik'
elif tahmin1=='pastoral':
sonuct1='Pastoral'
elif tahmin1=='satirik':
sonuct1='Satirik'
else:
sonuct1='Dramatik'
print(sonuct1)
sonucsvc["text"]="Linear SVC Sonucu="+str(sonuct1)
#test2-linear svc
tahmin2 = trial2.predict(data)
if tahmin2=='epik':
sonuct2='Epik'
elif tahmin2=='lirik':
sonuct2='Lirik'
elif tahmin2=='didaktik':
sonuct2='Didaktik'
elif tahmin2=='pastoral':
sonuct2='Pastoral'
elif tahmin2=='satirik':
sonuct2='Satirik'
else:
sonuct2='Dramatik'
print(sonuct2)
sonucsgd["text"]="SGDC Sonucu="+str(sonuct2)
if str(tahmin1)==str(tahmin2):
sonuclabel["text"]="Şiir Türü="+str(sonuct2)
else:
sonuclabel["text"]=str(sonuct1)+" veya "+str(sonuct2)
#temizle butonu
def temizle():
siirtext.delete('1.0', END)
sonucsgd.place_forget()
sonucsvc.place_forget()
sonuc2.place_forget()
sonuc3.place_forget()
sonuclabel["text"]="Şiir Türü="
#linear svc ve sgd doğruluk oranlarını göster
def oran_goster():
sonucsgd.place(x=450,y=150)
sonucsvc.place(x=450,y=190)
sonuc2.place(x=450,y=230)
sonuc3.place(x=450,y=270)
#Türlerine göre şiir sayıları çubuk grafiği çiz
def grafik_cizdir():
pencere2 = Tk()
pencere2.geometry("600x300+600+300")
pencere2.title("Türlerine Göre Şiir Sayıları Grafiği")
pd.set_option('display.max_colwidth', None)
df=pd.read_csv('siirdataset.csv', encoding = 'utf-8')
df['siir']=df['siir'].str.lower()
class_ = df['tur'].value_counts().keys()
sum_ = df['tur'].value_counts()
daf = pd.DataFrame(zip(class_,sum_), columns = ['tur', 'Toplam'])
fig,ax=plt.subplots(figsize=(15,5))
ax.bar(daf.tur,daf.Toplam,width=.8)
plt.xlabel('şiir türleri')
plt.ylabel('Şiir Sayıları')
plt.show()
"""
#türlerine göre şiir sayıları pasta grafik
fig, ax = plt.subplots(figsize=(15, 10))
ax.pie(daf.Toplam, labels =daf.tur, autopct = '%1.1f%%', startangle = 90 )
ax.axis('equal')
plt.show()
"""
canvas = FigureCanvasTkAgg(fig,master = pencere2)
canvas.draw()
canvas.get_tk_widget().pack()
toolbar = NavigationToolbar2Tk(canvas,pencere2)
toolbar.update()
canvas.get_tk_widget().pack()
def kelimebulutu_ciz():
#en sık kullanılan kelimeler içim kelime bulutu oluşturma-tüm türler
pd.set_option('display.max_colwidth', None)
df=pd.read_csv('siirdataset.csv', encoding = 'utf-8')
df['siir']=df['siir'].str.lower()
class_ = df['tur'].value_counts().keys()
sum_ = df['tur'].value_counts()
daf = pd.DataFrame(zip(class_,sum_), columns = ['tur', 'Toplam'])
#stop-words-gereksiz kelime ayıklama
stop_words=pd.read_csv('stopwords.txt', sep=" ", header=None)
stop_words.columns=['words_list']
pat2 = r'\b(?:{})\b'.format('|'.join(list(stop_words['words_list'].str.lower())))
df['yenisiir'] = df['siir'].str.lower().str.replace(pat2, '')
#Noktalama işaretlerinin ayıklanması
df=df.dropna()
PUNCT_TO_REMOVE = string.punctuation
def remove_punctuation(text):
return text.translate(str.maketrans('', '', PUNCT_TO_REMOVE))
df['siir_son'] = df['yenisiir'].apply(lambda text: remove_punctuation(text))
pencere3 = Tk()
pencere3.geometry("600x300+600+300")
pencere3.title("Word Cloud")
wordcloud = WordCloud(width=1000, height=500).generate("+".join(df['siir_son']))
fig,ax=plt.subplots(figsize=(15,5))
plt.imshow(wordcloud, interpolation='bilinear')
plt.axis("off")
plt.show()
canvas = FigureCanvasTkAgg(fig,master = pencere3)
canvas.draw()
canvas.get_tk_widget().pack()
toolbar = NavigationToolbar2Tk(canvas,pencere3)
toolbar.update()
canvas.get_tk_widget().pack()
#butonlar
buton=Button(pencere)
buton.config(text="Şiir Türü Bul",bg="blue",fg="white",font="bold",command=siir_kontrol,width=20)
buton.place(x=20,y=400)
butonoran=Button(pencere)
butonoran.config(text="Doğruluk Oranları",bg="blue",fg="white",font="bold",command=oran_goster,width="20")
butonoran.place(x=450,y=100)
butontemizle=Button(pencere)
butontemizle.config(text="Temizle",bg="blue",fg="white",font="bold",command=temizle,width="20")
butontemizle.place(x=235,y=400)
butoncikis=Button(pencere)
butoncikis.config(text="Çıkış",bg="blue",fg="white",command=pencere.destroy,width="17")
butoncikis.place(x=295,y=450)
butongrafik=Button(pencere)
butongrafik.config(text="Şiir Sayıları Grafiği",bg="blue",fg="white",width="17",command=grafik_cizdir)
butongrafik.place(x=20,y=450)
butonbulut=Button(pencere)
butonbulut.config(text="Kelime Bulutu Çiz",bg="blue",fg="white",width="17",command=kelimebulutu_ciz)
butonbulut.place(x=157,y=450)
mainloop()
|
py
|
1a5c665810187f93569019dea45d87b9bddfc05d
|
from trust_monitor.verifier.structs import *
from trust_monitor.verifier.statistics import *
from suds.client import Client
from trust_monitor.verifier.parser import IRParser, IMAMeasureHandler
from trust_monitor.verifier.parser import ContainerCheckAnalysis
import logging
import gc
import xmltodict
import ssl
# use logging system of django.
logger = logging.getLogger('driver')
class ParsingOAT():
def __init__(self):
logger.info('Parsing OAT Set structures')
Digest.digests_dict = {}
Digest.digests_query_done = False
Digest.packages_query_done = False
Digest.packages_query = set()
Package.pkg_dict = {}
IMARecord.records = []
Subject.subj_label_dict = {}
Object.obj_label_dict = {}
ssl._create_default_https_context = ssl._create_unverified_context
def parsing(self, analysis, checked_containers,
report_url, report_id, infoDigest):
doCheckContAnalysis = False
containers = {}
if 'cont-check' in analysis:
doCheckContAnalysis = True
logger.info('Understand what kind of analysis to do')
for item in analysis.split(','):
if item.startswith('cont-list'):
logger.info('Analysis include containters')
checked_containers = item.split('=')[1]
break
try:
if report_url is not None and report_id != 0:
client = Client(report_url)
logger.info('report url ' + str(report_url))
logger.info('report id ' + str(report_id))
report_str = client.service.fetchReport(report_id)
logger.info('Start to parser IR %s', str(report_id))
IRParser(report_str, ContainerCheckAnalysis(doCheckContAnalysis,
containers,
checked_containers,
infoDigest))
logger.info('Parsing of IR done.')
try:
data_xml = xmltodict.parse(report_str)
host_name = (data_xml['ns3:Report']['ns3:QuoteData']
['ns3:TpmSignature']['ns3:KeyInfo']['KeyName'])
except Exception:
host_name = (data_xml['ns3:Report']['ns3:QuoteData']
['ns3:TpmSignature']['ns3:KeyInfo']
['ns2:KeyName'])
logger.info(host_name)
infoDigest.host = host_name
gc.collect()
except Exception as e:
logger.error('Error opening IR, %s', e)
del report_str
gc.collect()
return 2
return 0
|
py
|
1a5c6811248b528a954fd4a2600471ce7d28a8f5
|
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from django.urls import re_path
from awx.api.views import JobEventDetail, JobEventChildrenList
urls = [
re_path(r'^(?P<pk>[0-9]+)/$', JobEventDetail.as_view(), name='job_event_detail'),
re_path(r'^(?P<pk>[0-9]+)/children/$', JobEventChildrenList.as_view(), name='job_event_children_list'),
]
__all__ = ['urls']
|
py
|
1a5c6812415e8886e3cfe9e9b66923c2c15061c6
|
"""
Typeclass for Player objects
Note that this object is primarily intended to
store OOC information, not game info! This
object represents the actual user (not their
character) and has NO actual precence in the
game world (this is handled by the associated
character object, so you should customize that
instead for most things).
"""
from django.conf import settings
from django.utils import timezone
from evennia.typeclasses.models import TypeclassBase
from evennia.players.manager import PlayerManager
from evennia.players.models import PlayerDB
from evennia.comms.models import ChannelDB
from evennia.commands import cmdhandler
from evennia.utils import logger
from evennia.utils.utils import (lazy_property, to_str,
make_iter, to_unicode,
variable_from_module)
from evennia.typeclasses.attributes import NickHandler
from evennia.scripts.scripthandler import ScriptHandler
from evennia.commands.cmdsethandler import CmdSetHandler
from django.utils.translation import ugettext as _
__all__ = ("DefaultPlayer",)
_SESSIONS = None
_AT_SEARCH_RESULT = variable_from_module(*settings.SEARCH_AT_RESULT.rsplit('.', 1))
_MULTISESSION_MODE = settings.MULTISESSION_MODE
_CMDSET_PLAYER = settings.CMDSET_PLAYER
_CONNECT_CHANNEL = None
class DefaultPlayer(PlayerDB):
"""
This is the base Typeclass for all Players. Players represent
the person playing the game and tracks account info, password
etc. They are OOC entities without presence in-game. A Player
can connect to a Character Object in order to "enter" the
game.
Player Typeclass API:
* Available properties (only available on initiated typeclass objects)
key (string) - name of player
name (string)- wrapper for user.username
aliases (list of strings) - aliases to the object. Will be saved to
database as AliasDB entries but returned as strings.
dbref (int, read-only) - unique #id-number. Also "id" can be used.
date_created (string) - time stamp of object creation
permissions (list of strings) - list of permission strings
user (User, read-only) - django User authorization object
obj (Object) - game object controlled by player. 'character' can also
be used.
sessions (list of Sessions) - sessions connected to this player
is_superuser (bool, read-only) - if the connected user is a superuser
* Handlers
locks - lock-handler: use locks.add() to add new lock strings
db - attribute-handler: store/retrieve database attributes on this
self.db.myattr=val, val=self.db.myattr
ndb - non-persistent attribute handler: same as db but does not
create a database entry when storing data
scripts - script-handler. Add new scripts to object with scripts.add()
cmdset - cmdset-handler. Use cmdset.add() to add new cmdsets to object
nicks - nick-handler. New nicks with nicks.add().
* Helper methods
msg(outgoing_string, from_obj=None, **kwargs)
#swap_character(new_character, delete_old_character=False)
execute_cmd(raw_string)
search(ostring, global_search=False, attribute_name=None,
use_nicks=False, location=None,
ignore_errors=False, player=False)
is_typeclass(typeclass, exact=False)
swap_typeclass(new_typeclass, clean_attributes=False, no_default=True)
access(accessing_obj, access_type='read', default=False)
check_permstring(permstring)
* Hook methods
basetype_setup()
at_player_creation()
- note that the following hooks are also found on Objects and are
usually handled on the character level:
at_init()
at_access()
at_cmdset_get(**kwargs)
at_first_login()
at_post_login(sessid=None)
at_disconnect()
at_message_receive()
at_message_send()
at_server_reload()
at_server_shutdown()
"""
__metaclass__ = TypeclassBase
objects = PlayerManager()
# properties
@lazy_property
def cmdset(self):
return CmdSetHandler(self, True)
@lazy_property
def scripts(self):
return ScriptHandler(self)
@lazy_property
def nicks(self):
return NickHandler(self)
# session-related methods
def get_session(self, sessid):
"""
Return session with given sessid connected to this player.
note that the sessionhandler also accepts sessid as an iterable.
"""
global _SESSIONS
if not _SESSIONS:
from evennia.server.sessionhandler import SESSIONS as _SESSIONS
return _SESSIONS.session_from_player(self, sessid)
def get_all_sessions(self):
"Return all sessions connected to this player"
global _SESSIONS
if not _SESSIONS:
from evennia.server.sessionhandler import SESSIONS as _SESSIONS
return _SESSIONS.sessions_from_player(self)
sessions = property(get_all_sessions) # alias shortcut
def disconnect_session_from_player(self, sessid):
"""
Access method for disconnecting a given session from the player
(connection happens automatically in the sessionhandler)
"""
# this should only be one value, loop just to make sure to
# clean everything
sessions = (session for session in self.get_all_sessions()
if session.sessid == sessid)
for session in sessions:
# this will also trigger unpuppeting
session.sessionhandler.disconnect(session)
# puppeting operations
def puppet_object(self, sessid, obj):
"""
Use the given session to control (puppet) the given object (usually
a Character type).
Args:
sessid (int): session id of session to connect
obj (Object): the object to start puppeting
Raises:
RuntimeError with message if puppeting is not possible
returns True if successful, False otherwise
"""
# safety checks
if not obj:
raise RuntimeError("Object not found")
session = self.get_session(sessid)
if not session:
raise RuntimeError("Session not found")
if self.get_puppet(sessid) == obj:
# already puppeting this object
raise RuntimeError("You are already puppeting this object.")
if not obj.access(self, 'puppet'):
# no access
raise RuntimeError("You don't have permission to puppet '%s'." % obj.key)
if obj.player:
# object already puppeted
if obj.player == self:
if obj.sessid.count():
# we may take over another of our sessions
# output messages to the affected sessions
if _MULTISESSION_MODE in (1, 3):
txt1 = "{c%s{n{G is now shared from another of your sessions.{n"
txt2 = "Sharing {c%s{n with another of your sessions."
else:
txt1 = "{c%s{n{R is now acted from another of your sessions.{n"
txt2 = "Taking over {c%s{n from another of your sessions."
self.unpuppet_object(obj.sessid.get())
self.msg(txt1 % obj.name, sessid=obj.sessid.get(), _forced_nomulti=True)
self.msg(txt2 % obj.name, sessid=sessid, _forced_nomulti=True)
elif obj.player.is_connected:
# controlled by another player
raise RuntimeError("{R{c%s{R is already puppeted by another Player.")
# do the puppeting
if session.puppet:
# cleanly unpuppet eventual previous object puppeted by this session
self.unpuppet_object(sessid)
# if we get to this point the character is ready to puppet or it
# was left with a lingering player/sessid reference from an unclean
# server kill or similar
obj.at_pre_puppet(self, sessid=sessid)
# do the connection
obj.sessid.add(sessid)
obj.player = self
session.puid = obj.id
session.puppet = obj
# validate/start persistent scripts on object
obj.scripts.validate()
obj.at_post_puppet()
# re-cache locks to make sure superuser bypass is updated
obj.locks.cache_lock_bypass(obj)
def unpuppet_object(self, sessid):
"""
Disengage control over an object
Args:
sessid(int): the session id to disengage
Raises:
RuntimeError with message about error.
"""
if _MULTISESSION_MODE == 1:
sessions = self.get_all_sessions()
else:
sessions = self.get_session(sessid)
if not sessions:
raise RuntimeError("No session was found.")
for session in make_iter(sessions):
obj = session.puppet or None
if not obj:
raise RuntimeError("No puppet was found to disconnect from.")
elif obj:
# do the disconnect, but only if we are the last session to puppet
obj.at_pre_unpuppet()
obj.sessid.remove(session.sessid)
if not obj.sessid.count():
del obj.player
obj.at_post_unpuppet(self, sessid=sessid)
# Just to be sure we're always clear.
session.puppet = None
session.puid = None
def unpuppet_all(self):
"""
Disconnect all puppets. This is called by server
before a reset/shutdown.
"""
for session in (sess for sess in self.get_all_sessions() if sess.puppet):
self.unpuppet_object(session.sessid)
def get_puppet(self, sessid, return_dbobj=False):
"""
Get an object puppeted by this session through this player. This is
the main method for retrieving the puppeted object from the
player's end.
sessid - return character connected to this sessid,
"""
session = self.get_session(sessid)
if not session:
return None
if return_dbobj:
return session.puppet
return session.puppet and session.puppet or None
def get_all_puppets(self):
"""
Get all currently puppeted objects as a list.
"""
return list(set(session.puppet for session in self.get_all_sessions()
if session.puppet))
def __get_single_puppet(self):
"""
This is a legacy convenience link for users of
MULTISESSION_MODE 0 or 1. It will return
only the first puppet. For mode 2, this returns
a list of all characters.
"""
puppets = self.get_all_puppets()
if _MULTISESSION_MODE in (0, 1):
return puppets and puppets[0] or None
return puppets
character = property(__get_single_puppet)
puppet = property(__get_single_puppet)
# utility methods
def delete(self, *args, **kwargs):
"""
Deletes the player permanently.
"""
for session in self.get_all_sessions():
# unpuppeting all objects and disconnecting the user, if any
# sessions remain (should usually be handled from the
# deleting command)
try:
self.unpuppet_object(session.sessid)
except RuntimeError:
# no puppet to disconnect from
pass
session.sessionhandler.disconnect(session, reason=_("Player being deleted."))
self.scripts.stop()
self.attributes.clear()
self.nicks.clear()
self.aliases.clear()
super(PlayerDB, self).delete(*args, **kwargs)
## methods inherited from database model
def msg(self, text=None, from_obj=None, sessid=None, **kwargs):
"""
Evennia -> User
This is the main route for sending data back to the user from the
server.
Args:
text (str, optional): text data to send
from_obj (Object or Player, optional): object sending. If given,
its at_msg_send() hook will be called.
sessid (int or list, optional): session id or ids to receive this
send. If given, overrules MULTISESSION_MODE.
Notes:
All other keywords are passed on to the protocol.
"""
text = to_str(text, force_string=True) if text else ""
if from_obj:
# call hook
try:
from_obj.at_msg_send(text=text, to_obj=self, **kwargs)
except Exception:
pass
# session relay
if sessid:
# this could still be an iterable if sessid is an iterable
sessions = self.get_session(sessid)
if sessions:
# this is a special instruction to ignore MULTISESSION_MODE
# and only relay to this given session.
kwargs["_nomulti"] = True
for session in make_iter(sessions):
session.msg(text=text, **kwargs)
return
# we only send to the first of any connected sessions - the sessionhandler
# will disperse this to the other sessions based on MULTISESSION_MODE.
sessions = self.get_all_sessions()
if sessions:
sessions[0].msg(text=text, **kwargs)
def execute_cmd(self, raw_string, sessid=None, **kwargs):
"""
Do something as this player. This method is never called normally,
but only when the player object itself is supposed to execute the
command. It takes player nicks into account, but not nicks of
eventual puppets.
raw_string - raw command input coming from the command line.
sessid - the optional session id to be responsible for the command-send
**kwargs - other keyword arguments will be added to the found command
object instace as variables before it executes. This is
unused by default Evennia but may be used to set flags and
change operating paramaters for commands at run-time.
"""
raw_string = to_unicode(raw_string)
raw_string = self.nicks.nickreplace(raw_string,
categories=("inputline", "channel"), include_player=False)
if not sessid and _MULTISESSION_MODE in (0, 1):
# in this case, we should either have only one sessid, or the sessid
# should not matter (since the return goes to all of them we can
# just use the first one as the source)
try:
sessid = self.get_all_sessions()[0].sessid
except IndexError:
# this can happen for bots
sessid = None
return cmdhandler.cmdhandler(self, raw_string,
callertype="player", sessid=sessid, **kwargs)
def search(self, searchdata, return_puppet=False,
nofound_string=None, multimatch_string=None, **kwargs):
"""
This is similar to the ObjectDB search method but will search for
Players only. Errors will be echoed, and None returned if no Player
is found.
searchdata - search criterion, the Player's key or dbref to search for
return_puppet - will try to return the object the player controls
instead of the Player object itself. If no
puppeted object exists (since Player is OOC), None will
be returned.
nofound_string - optional custom string for not-found error message.
multimatch_string - optional custom string for multimatch error header.
Extra keywords are ignored, but are allowed in call in order to make
API more consistent with objects.models.TypedObject.search.
"""
# handle me, self and *me, *self
if isinstance(searchdata, basestring):
# handle wrapping of common terms
if searchdata.lower() in ("me", "*me", "self", "*self",):
return self
matches = self.__class__.objects.player_search(searchdata)
matches = _AT_SEARCH_RESULT(self, searchdata, matches, global_search=True,
nofound_string=nofound_string,
multimatch_string=multimatch_string)
if matches and return_puppet:
try:
return matches.puppet
except AttributeError:
return None
return matches
def access(self, accessing_obj, access_type='read', default=False, **kwargs):
"""
Determines if another object has permission to access this object
in whatever way.
Args:
accessing_obj (Object): Object trying to access this one
access_type (str): Type of access sought
default (bool): What to return if no lock of access_type was found
Kwargs:
Passed to the at_access hook along with the result.
"""
result = super(DefaultPlayer, self).access(accessing_obj, access_type=access_type, default=default)
self.at_access(result, accessing_obj, access_type, **kwargs)
return result
## player hooks
def basetype_setup(self):
"""
This sets up the basic properties for a player.
Overload this with at_player_creation rather than
changing this method.
"""
# A basic security setup
lockstring = "examine:perm(Wizards);edit:perm(Wizards);delete:perm(Wizards);boot:perm(Wizards);msg:all()"
self.locks.add(lockstring)
# The ooc player cmdset
self.cmdset.add_default(_CMDSET_PLAYER, permanent=True)
def at_player_creation(self):
"""
This is called once, the very first time
the player is created (i.e. first time they
register with the game). It's a good place
to store attributes all players should have,
like configuration values etc.
"""
# set an (empty) attribute holding the characters this player has
lockstring = "attrread:perm(Admins);attredit:perm(Admins);attrcreate:perm(Admins)"
self.attributes.add("_playable_characters", [], lockstring=lockstring)
def at_init(self):
"""
This is always called whenever this object is initiated --
that is, whenever it its typeclass is cached from memory. This
happens on-demand first time the object is used or activated
in some way after being created but also after each server
restart or reload. In the case of player objects, this usually
happens the moment the player logs in or reconnects after a
reload.
"""
pass
# Note that the hooks below also exist in the character object's
# typeclass. You can often ignore these and rely on the character
# ones instead, unless you are implementing a multi-character game
# and have some things that should be done regardless of which
# character is currently connected to this player.
def at_first_save(self):
"""
This is a generic hook called by Evennia when this object is
saved to the database the very first time. You generally
don't override this method but the hooks called by it.
"""
self.basetype_setup()
self.at_player_creation()
permissions = settings.PERMISSION_PLAYER_DEFAULT
if hasattr(self, "_createdict"):
# this will only be set if the utils.create_player
# function was used to create the object.
cdict = self._createdict
if cdict.get("locks"):
self.locks.add(cdict["locks"])
if cdict.get("permissions"):
permissions = cdict["permissions"]
del self._createdict
self.permissions.add(permissions)
def at_access(self, result, accessing_obj, access_type, **kwargs):
"""
This is called with the result of an access call, along with
any kwargs used for that call. The return of this method does
not affect the result of the lock check. It can be used e.g. to
customize error messages in a central location or other effects
based on the access result.
"""
pass
def at_cmdset_get(self, **kwargs):
"""
Called just before cmdsets on this player are requested by the
command handler. If changes need to be done on the fly to the
cmdset before passing them on to the cmdhandler, this is the
place to do it. This is called also if the player currently
have no cmdsets. kwargs are usually not used unless the
cmdset is generated dynamically.
"""
pass
def at_first_login(self):
"""
Called the very first time this player logs into the game.
"""
pass
def at_pre_login(self):
"""
Called every time the user logs in, just before the actual
login-state is set.
"""
pass
def _send_to_connect_channel(self, message):
"Helper method for loading the default comm channel"
global _CONNECT_CHANNEL
if not _CONNECT_CHANNEL:
try:
_CONNECT_CHANNEL = ChannelDB.objects.filter(db_key=settings.DEFAULT_CHANNELS[1]["key"])[0]
except Exception:
logger.log_trace()
now = timezone.now()
now = "%02i-%02i-%02i(%02i:%02i)" % (now.year, now.month,
now.day, now.hour, now.minute)
if _CONNECT_CHANNEL:
_CONNECT_CHANNEL.tempmsg("[%s, %s]: %s" % (_CONNECT_CHANNEL.key, now, message))
else:
logger.log_infomsg("[%s]: %s" % (now, message))
def at_post_login(self, sessid=None):
"""
Called at the end of the login process, just before letting
the player loose. This is called before an eventual Character's
at_post_login hook.
"""
self._send_to_connect_channel("{G%s connected{n" % self.key)
if _MULTISESSION_MODE == 0:
# in this mode we should have only one character available. We
# try to auto-connect to our last conneted object, if any
self.puppet_object(sessid, self.db._last_puppet)
elif _MULTISESSION_MODE == 1:
# in this mode all sessions connect to the same puppet.
self.puppet_object(sessid, self.db._last_puppet)
elif _MULTISESSION_MODE in (2, 3):
# In this mode we by default end up at a character selection
# screen. We execute look on the player.
self.execute_cmd("look", sessid=sessid)
def at_disconnect(self, reason=None):
"""
Called just before user is disconnected.
"""
reason = reason and "(%s)" % reason or ""
self._send_to_connect_channel("{R%s disconnected %s{n" % (self.key, reason))
def at_post_disconnect(self):
"""
This is called after disconnection is complete. No messages
can be relayed to the player from here. After this call, the
player should not be accessed any more, making this a good
spot for deleting it (in the case of a guest player account,
for example).
"""
pass
def at_message_receive(self, message, from_obj=None):
"""
Called when any text is emitted to this
object. If it returns False, no text
will be sent automatically.
"""
return True
def at_message_send(self, message, to_object):
"""
Called whenever this object tries to send text
to another object. Only called if the object supplied
itself as a sender in the msg() call.
"""
pass
def at_server_reload(self):
"""
This hook is called whenever the server is shutting down for
restart/reboot. If you want to, for example, save non-persistent
properties across a restart, this is the place to do it.
"""
pass
def at_server_shutdown(self):
"""
This hook is called whenever the server is shutting down fully
(i.e. not for a restart).
"""
pass
class DefaultGuest(DefaultPlayer):
"""
This class is used for guest logins. Unlike Players, Guests and their
characters are deleted after disconnection.
"""
def at_post_login(self, sessid=None):
"""
In theory, guests only have one character regardless of which
MULTISESSION_MODE we're in. They don't get a choice.
"""
self._send_to_connect_channel("{G%s connected{n" % self.key)
self.puppet_object(sessid, self.db._last_puppet)
def at_disconnect(self):
"""
A Guest's characters aren't meant to linger on the server. When a
Guest disconnects, we remove its character.
"""
super(DefaultGuest, self).at_disconnect()
characters = self.db._playable_characters
for character in filter(None, characters):
character.delete()
def at_server_shutdown(self):
"""
We repeat at_disconnect() here just to be on the safe side.
"""
super(DefaultGuest, self).at_server_shutdown()
characters = self.db._playable_characters
for character in filter(None, characters):
character.delete()
def at_post_disconnect(self):
"""
Guests aren't meant to linger on the server, either. We need to wait
until after the Guest disconnects to delete it, though.
"""
super(DefaultGuest, self).at_post_disconnect()
self.delete()
|
py
|
1a5c68b7f9579f4181cbe6ab4575419553adedbc
|
from django.apps import AppConfig
class Gs8BihuConfig(AppConfig):
name = 'gs8bihu'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.