max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 7
115
| max_stars_count
int64 101
368k
| id
stringlengths 2
8
| content
stringlengths 6
1.03M
|
---|---|---|---|---|
samples/order/capture.py | Hey-Marvelous/PayPal-Python-SDK | 653 | 12703758 | from paypalrestsdk import Order
import logging
logging.basicConfig(level=logging.INFO)
order = Order.find("<ORDER_ID>")
capture = order.capture({
"amount": {
"currency": "USD",
"total": "4.54"},
"is_final_capture": True})
if capture.success():
print("Capture[%s] successfully" % (capture.id))
else:
print(capture.error)
|
ote_cli/ote_cli/datasets/__init__.py | opencv/openvino_training_extensions | 775 | 12703765 | """
File system based datasets registry.
"""
# Copyright (C) 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.
from ote_sdk.entities.model_template import TaskType
# pylint: disable=too-many-return-statements
def get_dataset_class(task_type):
"""
Returns a dataset class by task type.
Args:
task_type: A task type such as ANOMALY_CLASSIFICATION, ANOMALY_DETECTION, ANOMALY_SEGMENTATION,
CLASSIFICATION, INSTANCE_SEGMENTATION, DETECTION, CLASSIFICATION, ROTATED_DETECTION, SEGMENTATION.
"""
if task_type == TaskType.ANOMALY_CLASSIFICATION:
from ote_anomalib.data.dataset import AnomalyClassificationDataset
return AnomalyClassificationDataset
if task_type == TaskType.ANOMALY_DETECTION:
from ote_anomalib.data.dataset import AnomalyDetectionDataset
return AnomalyDetectionDataset
if task_type == TaskType.ANOMALY_SEGMENTATION:
from ote_anomalib.data.dataset import AnomalySegmentationDataset
return AnomalySegmentationDataset
if task_type == TaskType.CLASSIFICATION:
from .image_classification.dataset import ImageClassificationDataset
return ImageClassificationDataset
if task_type == TaskType.DETECTION:
from .object_detection.dataset import ObjectDetectionDataset
return ObjectDetectionDataset
if task_type == TaskType.INSTANCE_SEGMENTATION:
from .instance_segmentation.dataset import InstanceSegmentationDataset
return InstanceSegmentationDataset
if task_type == TaskType.ROTATED_DETECTION:
from .rotated_detection.dataset import RotatedDetectionDataset
return RotatedDetectionDataset
if task_type == TaskType.SEGMENTATION:
from .semantic_segmentation.dataset import SemanticSegmentationDataset
return SemanticSegmentationDataset
raise ValueError(f"Invalid task type: {task_type}")
|
tests/unit/firewalls/test_domain.py | DoctorJohn/hcloud-python | 156 | 12703776 | import datetime
from dateutil.tz import tzoffset
from hcloud.firewalls.domain import Firewall
class TestFirewall(object):
def test_created_is_datetime(self):
firewall = Firewall(id=1, created="2016-01-30T23:50+00:00")
assert firewall.created == datetime.datetime(
2016, 1, 30, 23, 50, tzinfo=tzoffset(None, 0)
)
|
rest-service/manager_rest/rest/resources_v1/executions.py | cloudify-cosmo/cloudify-manager | 124 | 12703808 | #########
# Copyright (c) 2017 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
#
import uuid
from datetime import datetime
from flask_restful.reqparse import Argument
from flask_restful_swagger import swagger
from flask_restful.inputs import boolean
from cloudify.models_states import ExecutionState
from manager_rest import manager_exceptions, workflow_executor
from manager_rest.maintenance import is_bypass_maintenance_mode
from manager_rest.resource_manager import (
ResourceManager,
get_resource_manager,
)
from manager_rest.rest import requests_schema
from manager_rest.rest.rest_decorators import (
marshal_with,
not_while_cancelling
)
from manager_rest.rest.rest_utils import (
get_args_and_verify_arguments,
get_json_and_verify_params,
verify_and_convert_bool,
parse_datetime_string
)
from manager_rest.security import SecuredResource
from manager_rest.security.authorization import authorize
from manager_rest.storage import (
get_storage_manager,
models,
)
class Executions(SecuredResource):
@swagger.operation(
responseClass='List[{0}]'.format(models.Execution.__name__),
nickname="list",
notes="Returns a list of executions for the optionally provided "
"deployment id.",
parameters=[{'name': 'deployment_id',
'description': 'List execution of a specific deployment',
'required': False,
'allowMultiple': False,
'dataType': 'string',
'defaultValue': None,
'paramType': 'query'},
{'name': 'include_system_workflows',
'description': 'Include executions of system workflows',
'required': False,
'allowMultiple': False,
'dataType': 'bool',
'defaultValue': False,
'paramType': 'query'}]
)
@authorize('execution_list')
@marshal_with(models.Execution)
def get(self, _include=None, **kwargs):
"""List executions"""
args = get_args_and_verify_arguments(
[Argument('deployment_id', required=False),
Argument('include_system_workflows', type=boolean,
default=False)]
)
deployment_id_filter = ResourceManager.create_filters_dict(
deployment_id=args.deployment_id)
return get_resource_manager().list_executions(
is_include_system_workflows=args.include_system_workflows,
include=_include,
filters=deployment_id_filter).items
@authorize('execution_start')
@not_while_cancelling
@marshal_with(models.Execution)
def post(self, **kwargs):
"""Execute a workflow"""
request_dict = get_json_and_verify_params({'deployment_id',
'workflow_id'})
allow_custom_parameters = verify_and_convert_bool(
'allow_custom_parameters',
request_dict.get('allow_custom_parameters', False))
force = verify_and_convert_bool(
'force',
request_dict.get('force', False))
dry_run = verify_and_convert_bool(
'dry_run',
request_dict.get('dry_run', False))
queue = verify_and_convert_bool(
'queue',
request_dict.get('queue', False))
deployment_id = request_dict['deployment_id']
workflow_id = request_dict['workflow_id']
parameters = request_dict.get('parameters', None)
wait_after_fail = request_dict.get('wait_after_fail', 600)
scheduled_time = request_dict.get('scheduled_time', None)
if scheduled_time:
sm = get_storage_manager()
schedule = models.ExecutionSchedule(
id='{}_{}'.format(workflow_id, uuid.uuid4().hex),
deployment=sm.get(models.Deployment, deployment_id),
created_at=datetime.utcnow(),
since=self._parse_scheduled_time(scheduled_time),
rule={'count': 1},
slip=0,
workflow_id=workflow_id,
parameters=parameters,
execution_arguments={
'allow_custom_parameters': allow_custom_parameters,
'force': force,
'is_dry_run': dry_run,
'wait_after_fail': wait_after_fail,
},
stop_on_fail=False,
)
schedule.next_occurrence = schedule.compute_next_occurrence()
sm.put(schedule)
return models.Execution(status=ExecutionState.SCHEDULED), 201
if parameters is not None and not isinstance(parameters, dict):
raise manager_exceptions.BadParametersError(
f"request body's 'parameters' field must be a dict but"
f" is of type {parameters.__class__.__name__}")
sm = get_storage_manager()
rm = get_resource_manager()
with sm.transaction():
deployment = sm.get(models.Deployment, deployment_id)
rm.verify_deployment_environment_created_successfully(deployment)
execution = models.Execution(
workflow_id=workflow_id,
deployment=deployment,
parameters=parameters,
is_dry_run=dry_run,
status=ExecutionState.PENDING,
allow_custom_parameters=allow_custom_parameters,
)
sm.put(execution)
messages = rm.prepare_executions(
[execution],
bypass_maintenance=is_bypass_maintenance_mode(),
force=force,
queue=queue,
wait_after_fail=wait_after_fail,
)
workflow_executor.execute_workflow(messages)
return execution, 201
def _parse_scheduled_time(self, scheduled_time):
scheduled_utc = parse_datetime_string(scheduled_time)
if scheduled_utc <= datetime.utcnow():
raise manager_exceptions.BadParametersError(
'Date `{0}` has already passed, please provide'
' valid date. \nExpected format: YYYYMMDDHHMM+HHMM or'
' YYYYMMDDHHMM-HHMM i.e: 201801012230-0500'
' (Jan-01-18 10:30pm EST)'.format(scheduled_time))
return scheduled_utc
class ExecutionsId(SecuredResource):
@swagger.operation(
responseClass=models.Execution,
nickname="getById",
notes="Returns the execution state by its id.",
)
@authorize('execution_get')
@marshal_with(models.Execution)
def get(self, execution_id, _include=None, **kwargs):
"""
Get execution by id
"""
return get_storage_manager().get(
models.Execution,
execution_id,
include=_include
)
@swagger.operation(
responseClass=models.Execution,
nickname="modify_state",
notes="Modifies a running execution state (currently, only cancel"
" and force-cancel are supported)",
parameters=[{'name': 'body',
'description': 'json with an action key. '
'Legal values for action are: [cancel,'
' force-cancel]',
'required': True,
'allowMultiple': False,
'dataType': requests_schema.ModifyExecutionRequest.__name__, # NOQA
'paramType': 'body'}],
consumes=[
"application/json"
]
)
@authorize('execution_cancel')
@marshal_with(models.Execution)
def post(self, execution_id, **kwargs):
"""
Apply execution action (cancel, force-cancel) by id
"""
request_dict = get_json_and_verify_params({'action'})
action = request_dict['action']
valid_actions = ['cancel', 'force-cancel', 'kill', 'resume',
'force-resume', 'requeue']
if action not in valid_actions:
raise manager_exceptions.BadParametersError(
'Invalid action: {0}, Valid action values are: {1}'.format(
action, valid_actions))
if action in ('resume', 'force-resume'):
return get_resource_manager().resume_execution(
execution_id, force=action == 'force-resume')
return get_resource_manager().cancel_execution(
execution_id, action == 'force-cancel', action == 'kill')
@swagger.operation(
responseClass=models.Execution,
nickname="updateExecutionStatus",
notes="Updates the execution's status",
parameters=[{'name': 'status',
'description': "The execution's new status",
'required': True,
'allowMultiple': False,
'dataType': 'string',
'paramType': 'body'},
{'name': 'error',
'description': "An error message. If omitted, "
"error will be updated to an empty "
"string",
'required': False,
'allowMultiple': False,
'dataType': 'string',
'paramType': 'body'}],
consumes=[
"application/json"
]
)
@authorize('execution_status_update')
@marshal_with(models.Execution)
def patch(self, execution_id, **kwargs):
"""
Update execution status by id
"""
request_dict = get_json_and_verify_params({'status'})
return get_resource_manager().update_execution_status(
execution_id,
request_dict['status'],
request_dict.get('error', '')
)
|
examples/adc/01-closed_shell.py | QuESt-Calculator/pyscf | 501 | 12703832 | #!/usr/bin/env python
'''
IP/EA-RADC calculations for closed-shell N2
'''
from pyscf import gto, scf, adc
mol = gto.Mole()
r = 1.098
mol.atom = [
['N', ( 0., 0. , -r/2 )],
['N', ( 0., 0. , r/2)],]
mol.basis = {'N':'aug-cc-pvdz'}
mol.build()
mf = scf.RHF(mol)
mf.conv_tol = 1e-12
mf.kernel()
myadc = adc.ADC(mf)
#IP-RADC(2) for 1 root
myadc.verbose = 6
eip,vip,pip,xip = myadc.kernel()
#EA-RADC(2)-x for 1 root
myadc.method = "adc(2)-x"
myadc.method_type = "ea"
eea,vea,pea,xea = myadc.kernel()
#Get EA-RADC(2)-x eigenevector analysis only
myadc.compute_properties = False
myadc.analyze()
#EA-RADC(3) for 3 roots and properties
myadc.compute_properties = True
myadc.method = "adc(3)"
myadc.method_type = "ea"
eea,vea,pea,xea = myadc.kernel(nroots = 3)
myadc.analyze()
|
etl/parsers/etw/Microsoft_Windows_AppxPackagingOM.py | IMULMUL/etl-parser | 104 | 12703885 | <gh_stars>100-1000
# -*- coding: utf-8 -*-
"""
Microsoft-Windows-AppxPackagingOM
GUID : ba723d81-0d0c-4f1e-80c8-54740f508ddf
"""
from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct
from etl.utils import WString, CString, SystemTime, Guid
from etl.dtyp import Sid
from etl.parsers.etw.core import Etw, declare, guid
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=100, version=0)
class Microsoft_Windows_AppxPackagingOM_100_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"reason" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=101, version=0)
class Microsoft_Windows_AppxPackagingOM_101_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"name" / WString,
"xmlNamespace" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=102, version=0)
class Microsoft_Windows_AppxPackagingOM_102_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=103, version=0)
class Microsoft_Windows_AppxPackagingOM_103_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=104, version=0)
class Microsoft_Windows_AppxPackagingOM_104_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=105, version=0)
class Microsoft_Windows_AppxPackagingOM_105_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=106, version=0)
class Microsoft_Windows_AppxPackagingOM_106_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=107, version=0)
class Microsoft_Windows_AppxPackagingOM_107_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=110, version=0)
class Microsoft_Windows_AppxPackagingOM_110_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=111, version=0)
class Microsoft_Windows_AppxPackagingOM_111_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"logo" / WString,
"field" / WString,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=112, version=0)
class Microsoft_Windows_AppxPackagingOM_112_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=113, version=0)
class Microsoft_Windows_AppxPackagingOM_113_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=114, version=0)
class Microsoft_Windows_AppxPackagingOM_114_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=115, version=0)
class Microsoft_Windows_AppxPackagingOM_115_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=116, version=0)
class Microsoft_Windows_AppxPackagingOM_116_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=117, version=0)
class Microsoft_Windows_AppxPackagingOM_117_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"elementName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=118, version=0)
class Microsoft_Windows_AppxPackagingOM_118_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"elementName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=119, version=0)
class Microsoft_Windows_AppxPackagingOM_119_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"elementName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=120, version=0)
class Microsoft_Windows_AppxPackagingOM_120_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"elementName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=121, version=0)
class Microsoft_Windows_AppxPackagingOM_121_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"elementName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=122, version=0)
class Microsoft_Windows_AppxPackagingOM_122_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=123, version=0)
class Microsoft_Windows_AppxPackagingOM_123_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"reason" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=124, version=0)
class Microsoft_Windows_AppxPackagingOM_124_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=126, version=0)
class Microsoft_Windows_AppxPackagingOM_126_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"fullValue" / WString,
"fieldValue" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=127, version=0)
class Microsoft_Windows_AppxPackagingOM_127_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"fieldName" / WString,
"fieldValue" / WString,
"duplicateLineNumber" / Int32ul,
"duplicateColumnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=128, version=0)
class Microsoft_Windows_AppxPackagingOM_128_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"reason" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=129, version=0)
class Microsoft_Windows_AppxPackagingOM_129_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=130, version=0)
class Microsoft_Windows_AppxPackagingOM_130_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=131, version=0)
class Microsoft_Windows_AppxPackagingOM_131_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=132, version=0)
class Microsoft_Windows_AppxPackagingOM_132_0(Etw):
pattern = Struct(
"errorCode" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=133, version=0)
class Microsoft_Windows_AppxPackagingOM_133_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"attributeName1" / WString,
"attributeName2" / WString,
"elementName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=134, version=0)
class Microsoft_Windows_AppxPackagingOM_134_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"attributeName1" / WString,
"attributeName2" / WString,
"elementName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=135, version=0)
class Microsoft_Windows_AppxPackagingOM_135_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"xmlNamespace" / WString,
"reason" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=139, version=0)
class Microsoft_Windows_AppxPackagingOM_139_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"elementName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=140, version=0)
class Microsoft_Windows_AppxPackagingOM_140_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"attributeName1" / WString,
"attributeName2" / WString,
"elementName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=141, version=0)
class Microsoft_Windows_AppxPackagingOM_141_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=142, version=0)
class Microsoft_Windows_AppxPackagingOM_142_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"elementName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=143, version=0)
class Microsoft_Windows_AppxPackagingOM_143_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=144, version=0)
class Microsoft_Windows_AppxPackagingOM_144_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=145, version=0)
class Microsoft_Windows_AppxPackagingOM_145_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=146, version=0)
class Microsoft_Windows_AppxPackagingOM_146_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=147, version=0)
class Microsoft_Windows_AppxPackagingOM_147_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=150, version=0)
class Microsoft_Windows_AppxPackagingOM_150_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"expectedValue" / WString,
"actualValue" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=151, version=0)
class Microsoft_Windows_AppxPackagingOM_151_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"expectedValue" / WString,
"actualValue" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=152, version=0)
class Microsoft_Windows_AppxPackagingOM_152_0(Etw):
pattern = Struct(
"errorCode" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=153, version=0)
class Microsoft_Windows_AppxPackagingOM_153_0(Etw):
pattern = Struct(
"errorCode" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=154, version=0)
class Microsoft_Windows_AppxPackagingOM_154_0(Etw):
pattern = Struct(
"errorCode" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=155, version=0)
class Microsoft_Windows_AppxPackagingOM_155_0(Etw):
pattern = Struct(
"errorCode" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=156, version=0)
class Microsoft_Windows_AppxPackagingOM_156_0(Etw):
pattern = Struct(
"errorCode" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=157, version=0)
class Microsoft_Windows_AppxPackagingOM_157_0(Etw):
pattern = Struct(
"subjectName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=158, version=0)
class Microsoft_Windows_AppxPackagingOM_158_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"elementName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=159, version=0)
class Microsoft_Windows_AppxPackagingOM_159_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=160, version=0)
class Microsoft_Windows_AppxPackagingOM_160_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"expectedValue" / WString,
"actualValue" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=161, version=0)
class Microsoft_Windows_AppxPackagingOM_161_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"expectedValue" / WString,
"actualValue" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=162, version=0)
class Microsoft_Windows_AppxPackagingOM_162_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"packageName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=163, version=0)
class Microsoft_Windows_AppxPackagingOM_163_0(Etw):
pattern = Struct(
"errorCode" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=164, version=0)
class Microsoft_Windows_AppxPackagingOM_164_0(Etw):
pattern = Struct(
"subjectName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=165, version=0)
class Microsoft_Windows_AppxPackagingOM_165_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"fileId" / Int64ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=170, version=0)
class Microsoft_Windows_AppxPackagingOM_170_0(Etw):
pattern = Struct(
"packageFullName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=171, version=0)
class Microsoft_Windows_AppxPackagingOM_171_0(Etw):
pattern = Struct(
"packageFullName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=172, version=0)
class Microsoft_Windows_AppxPackagingOM_172_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"fileName" / WString,
"packageFullName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=173, version=0)
class Microsoft_Windows_AppxPackagingOM_173_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"currentFileName" / WString,
"currentPackageFullName" / WString,
"conflictingFileName" / WString,
"conflictingPackageFullName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=174, version=0)
class Microsoft_Windows_AppxPackagingOM_174_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"currentFileName" / WString,
"currentPackageFullName" / WString,
"conflictingFileName" / WString,
"conflictingPackageFullName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=175, version=0)
class Microsoft_Windows_AppxPackagingOM_175_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"fileName" / WString,
"packageFullName" / WString,
"expectedValue" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=176, version=0)
class Microsoft_Windows_AppxPackagingOM_176_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"fileName" / WString,
"packageFullName" / WString,
"expectedValue" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=177, version=0)
class Microsoft_Windows_AppxPackagingOM_177_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"fileName" / WString,
"packageFullName" / WString,
"expectedValue" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=178, version=0)
class Microsoft_Windows_AppxPackagingOM_178_0(Etw):
pattern = Struct(
"errorCode" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=179, version=0)
class Microsoft_Windows_AppxPackagingOM_179_0(Etw):
pattern = Struct(
"errorCode" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=180, version=0)
class Microsoft_Windows_AppxPackagingOM_180_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"extensionCategoryName" / WString,
"attributeName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=182, version=0)
class Microsoft_Windows_AppxPackagingOM_182_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=183, version=0)
class Microsoft_Windows_AppxPackagingOM_183_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=184, version=0)
class Microsoft_Windows_AppxPackagingOM_184_0(Etw):
pattern = Struct(
"errorCode" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=185, version=0)
class Microsoft_Windows_AppxPackagingOM_185_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=186, version=0)
class Microsoft_Windows_AppxPackagingOM_186_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=187, version=0)
class Microsoft_Windows_AppxPackagingOM_187_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=188, version=0)
class Microsoft_Windows_AppxPackagingOM_188_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=189, version=0)
class Microsoft_Windows_AppxPackagingOM_189_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=190, version=0)
class Microsoft_Windows_AppxPackagingOM_190_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=191, version=0)
class Microsoft_Windows_AppxPackagingOM_191_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=192, version=0)
class Microsoft_Windows_AppxPackagingOM_192_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=193, version=0)
class Microsoft_Windows_AppxPackagingOM_193_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"targetDeviceFamily" / WString,
"version" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=195, version=0)
class Microsoft_Windows_AppxPackagingOM_195_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=196, version=0)
class Microsoft_Windows_AppxPackagingOM_196_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=197, version=0)
class Microsoft_Windows_AppxPackagingOM_197_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=198, version=0)
class Microsoft_Windows_AppxPackagingOM_198_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=199, version=0)
class Microsoft_Windows_AppxPackagingOM_199_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=200, version=0)
class Microsoft_Windows_AppxPackagingOM_200_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=201, version=0)
class Microsoft_Windows_AppxPackagingOM_201_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=202, version=0)
class Microsoft_Windows_AppxPackagingOM_202_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=203, version=0)
class Microsoft_Windows_AppxPackagingOM_203_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=204, version=0)
class Microsoft_Windows_AppxPackagingOM_204_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=205, version=0)
class Microsoft_Windows_AppxPackagingOM_205_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=206, version=0)
class Microsoft_Windows_AppxPackagingOM_206_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=207, version=0)
class Microsoft_Windows_AppxPackagingOM_207_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=208, version=0)
class Microsoft_Windows_AppxPackagingOM_208_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=209, version=0)
class Microsoft_Windows_AppxPackagingOM_209_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=210, version=0)
class Microsoft_Windows_AppxPackagingOM_210_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=211, version=0)
class Microsoft_Windows_AppxPackagingOM_211_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=212, version=0)
class Microsoft_Windows_AppxPackagingOM_212_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=213, version=0)
class Microsoft_Windows_AppxPackagingOM_213_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=214, version=0)
class Microsoft_Windows_AppxPackagingOM_214_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=215, version=0)
class Microsoft_Windows_AppxPackagingOM_215_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=216, version=0)
class Microsoft_Windows_AppxPackagingOM_216_0(Etw):
pattern = Struct(
"namespace" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=217, version=0)
class Microsoft_Windows_AppxPackagingOM_217_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=218, version=0)
class Microsoft_Windows_AppxPackagingOM_218_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=219, version=0)
class Microsoft_Windows_AppxPackagingOM_219_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=220, version=0)
class Microsoft_Windows_AppxPackagingOM_220_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=221, version=0)
class Microsoft_Windows_AppxPackagingOM_221_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=222, version=0)
class Microsoft_Windows_AppxPackagingOM_222_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"mainPackageName" / WString,
"mainPackagePublisher" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=223, version=0)
class Microsoft_Windows_AppxPackagingOM_223_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=224, version=0)
class Microsoft_Windows_AppxPackagingOM_224_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=225, version=0)
class Microsoft_Windows_AppxPackagingOM_225_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=226, version=0)
class Microsoft_Windows_AppxPackagingOM_226_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=227, version=0)
class Microsoft_Windows_AppxPackagingOM_227_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=228, version=0)
class Microsoft_Windows_AppxPackagingOM_228_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=229, version=0)
class Microsoft_Windows_AppxPackagingOM_229_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"reason" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=230, version=0)
class Microsoft_Windows_AppxPackagingOM_230_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=231, version=0)
class Microsoft_Windows_AppxPackagingOM_231_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=232, version=0)
class Microsoft_Windows_AppxPackagingOM_232_0(Etw):
pattern = Struct(
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"ignoredElement" / WString,
"xpathToRequiredChildElement" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=233, version=0)
class Microsoft_Windows_AppxPackagingOM_233_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"extensionCategoryName" / WString,
"attributeName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=234, version=0)
class Microsoft_Windows_AppxPackagingOM_234_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=235, version=0)
class Microsoft_Windows_AppxPackagingOM_235_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"fileName" / WString,
"packageFullName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=236, version=0)
class Microsoft_Windows_AppxPackagingOM_236_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=237, version=0)
class Microsoft_Windows_AppxPackagingOM_237_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"reason" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=238, version=0)
class Microsoft_Windows_AppxPackagingOM_238_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"reason" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=239, version=0)
class Microsoft_Windows_AppxPackagingOM_239_0(Etw):
pattern = Struct(
"errorCode" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=240, version=0)
class Microsoft_Windows_AppxPackagingOM_240_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=241, version=0)
class Microsoft_Windows_AppxPackagingOM_241_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=242, version=0)
class Microsoft_Windows_AppxPackagingOM_242_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=243, version=0)
class Microsoft_Windows_AppxPackagingOM_243_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"extensionCategoryName" / WString,
"attributeName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=244, version=0)
class Microsoft_Windows_AppxPackagingOM_244_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=245, version=0)
class Microsoft_Windows_AppxPackagingOM_245_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=246, version=0)
class Microsoft_Windows_AppxPackagingOM_246_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=247, version=0)
class Microsoft_Windows_AppxPackagingOM_247_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"reason" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=248, version=0)
class Microsoft_Windows_AppxPackagingOM_248_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"reason" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=249, version=0)
class Microsoft_Windows_AppxPackagingOM_249_0(Etw):
pattern = Struct(
"errorCode" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=250, version=0)
class Microsoft_Windows_AppxPackagingOM_250_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=251, version=0)
class Microsoft_Windows_AppxPackagingOM_251_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=252, version=0)
class Microsoft_Windows_AppxPackagingOM_252_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=253, version=0)
class Microsoft_Windows_AppxPackagingOM_253_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=254, version=0)
class Microsoft_Windows_AppxPackagingOM_254_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=255, version=0)
class Microsoft_Windows_AppxPackagingOM_255_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=256, version=0)
class Microsoft_Windows_AppxPackagingOM_256_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=257, version=0)
class Microsoft_Windows_AppxPackagingOM_257_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=258, version=0)
class Microsoft_Windows_AppxPackagingOM_258_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=259, version=0)
class Microsoft_Windows_AppxPackagingOM_259_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"value" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=260, version=0)
class Microsoft_Windows_AppxPackagingOM_260_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"currentFileName" / WString,
"currentPackageFullName" / WString,
"conflictingFileName" / WString,
"conflictingPackageFullName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=261, version=0)
class Microsoft_Windows_AppxPackagingOM_261_0(Etw):
pattern = Struct(
"errorCode" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=262, version=0)
class Microsoft_Windows_AppxPackagingOM_262_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=263, version=0)
class Microsoft_Windows_AppxPackagingOM_263_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"extensionCategoryName" / WString,
"attributeName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=264, version=0)
class Microsoft_Windows_AppxPackagingOM_264_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=265, version=0)
class Microsoft_Windows_AppxPackagingOM_265_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"lineNumber" / Int32ul,
"columnNumber" / Int32ul,
"attributeName" / WString,
"attributeValue" / WString,
"attributeLength" / Int32ul,
"buildVersion" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=300, version=0)
class Microsoft_Windows_AppxPackagingOM_300_0(Etw):
pattern = Struct(
"zipMode" / Int8ul,
"hashMethod" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=301, version=0)
class Microsoft_Windows_AppxPackagingOM_301_0(Etw):
pattern = Struct(
"fileCount" / Int64ul,
"totalSize" / Int64ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=302, version=0)
class Microsoft_Windows_AppxPackagingOM_302_0(Etw):
pattern = Struct(
"fileName" / WString,
"contentType" / WString,
"compressionOption" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=308, version=0)
class Microsoft_Windows_AppxPackagingOM_308_0(Etw):
pattern = Struct(
"fileName" / WString,
"size" / Int64ul,
"compressionOption" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=310, version=0)
class Microsoft_Windows_AppxPackagingOM_310_0(Etw):
pattern = Struct(
"readerOptions" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=312, version=0)
class Microsoft_Windows_AppxPackagingOM_312_0(Etw):
pattern = Struct(
"requestCount" / Int32ul,
"priority" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=318, version=0)
class Microsoft_Windows_AppxPackagingOM_318_0(Etw):
pattern = Struct(
"value" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=319, version=0)
class Microsoft_Windows_AppxPackagingOM_319_0(Etw):
pattern = Struct(
"value" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=362, version=0)
class Microsoft_Windows_AppxPackagingOM_362_0(Etw):
pattern = Struct(
"capabilitySid" / Sid
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=363, version=0)
class Microsoft_Windows_AppxPackagingOM_363_0(Etw):
pattern = Struct(
"capabilitySid" / Sid,
"result" / Int32ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=390, version=0)
class Microsoft_Windows_AppxPackagingOM_390_0(Etw):
pattern = Struct(
"packageFullName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=391, version=0)
class Microsoft_Windows_AppxPackagingOM_391_0(Etw):
pattern = Struct(
"packageFullName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=392, version=0)
class Microsoft_Windows_AppxPackagingOM_392_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"fileName" / WString,
"packageFullName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=393, version=0)
class Microsoft_Windows_AppxPackagingOM_393_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"fileName" / WString,
"packageFullName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=394, version=0)
class Microsoft_Windows_AppxPackagingOM_394_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"fileName" / WString,
"packageFullName" / WString,
"expectedValue" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=395, version=0)
class Microsoft_Windows_AppxPackagingOM_395_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"fileName" / WString,
"packageFullName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=396, version=0)
class Microsoft_Windows_AppxPackagingOM_396_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"currentFileName" / WString,
"currentPackageFullName" / WString,
"attributeName" / WString,
"expectedValue" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=397, version=0)
class Microsoft_Windows_AppxPackagingOM_397_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"fileName" / WString,
"packageFullName" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=398, version=0)
class Microsoft_Windows_AppxPackagingOM_398_0(Etw):
pattern = Struct(
"errorCode" / Int32ul,
"currentFileName" / WString,
"currentPackageFullName" / WString,
"currentManifestLineNumber" / Int32ul,
"currentManifestColumnNumber" / Int32ul,
"referenceFileName" / WString,
"referencePackageFullName" / WString,
"referenceManifestLineNumber" / Int32ul,
"referenceManifestColumnNumber" / Int32ul,
"xPathToMismatchLocation" / WString
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=424, version=0)
class Microsoft_Windows_AppxPackagingOM_424_0(Etw):
pattern = Struct(
"fileCount" / Int64ul
)
@declare(guid=guid("ba723d81-0d0c-4f1e-80c8-54740f508ddf"), event_id=426, version=0)
class Microsoft_Windows_AppxPackagingOM_426_0(Etw):
pattern = Struct(
"fileName" / WString,
"contentType" / WString,
"compressionOption" / Int32ul
)
|
src/test/expect_in_atomic_printf.py | jalapenopuzzle/rr | 5,156 | 12703900 | <reponame>jalapenopuzzle/rr<gh_stars>1000+
from util import *
import re
send_gdb('bt')
expect_gdb('atomic_printf')
ok()
|
nncf/experimental/tensorflow/graph/transformations/commands.py | GreenWaves-Technologies/nncf | 136 | 12703902 | <reponame>GreenWaves-Technologies/nncf<filename>nncf/experimental/tensorflow/graph/transformations/commands.py
"""
Copyright (c) 2022 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from typing import Dict
from typing import Any
from nncf.common.graph.transformations.commands import TargetPoint
from nncf.common.graph.transformations.commands import TargetType
from nncf.common.stateful_classes_registry import TF_STATEFUL_CLASSES
class TFTargetPointStateNames:
OP_NAME = 'op_name'
OP_TYPE_NAME = 'op_type_name'
PORT_ID = 'port_id'
TARGET_TYPE = 'target_type'
@TF_STATEFUL_CLASSES.register()
class TFTargetPoint(TargetPoint):
"""
Describes where the compression operation should be placed.
"""
_state_names = TFTargetPointStateNames
def __init__(self,
op_name: str,
op_type_name: str,
port_id: int,
target_type: TargetType):
"""
Initializes target point for TensorFlow backend.
:param op_name: Name of a node in the `FuncGraph`.
:param op_type_name: Type of operation.
:param port_id: Port id.
:param target_type: Type of the target point.
"""
super().__init__(target_type)
self.op_name = op_name
self.op_type_name = op_type_name
self.port_id = port_id
def __eq__(self, other: 'TFTargetPoint') -> bool:
return isinstance(other, TFTargetPoint) and \
self.type == other.type and \
self.op_name == other.op_name and \
self.op_type_name == other.op_type_name and \
self.port_id == other.port_id
def __str__(self) -> str:
items = [
super().__str__(),
self.op_name,
self.op_type_name,
str(self.port_id),
]
return ' '.join(items)
def get_state(self) -> Dict[str, Any]:
"""
Returns a dictionary with Python data structures (dict, list, tuple, str, int, float, True, False, None) that
represents state of the object.
:return: State of the object.
"""
state = {
self._state_names.OP_NAME: self.op_name,
self._state_names.OP_TYPE_NAME: self.op_type_name,
self._state_names.PORT_ID: self.port_id,
self._state_names.TARGET_TYPE: self.type.get_state(),
}
return state
@classmethod
def from_state(cls, state: Dict[str, Any]) -> 'TFTargetPoint':
"""
Creates the object from its state.
:param state: Output of `get_state()` method.
"""
kwargs = {
cls._state_names.OP_NAME: state[cls._state_names.OP_NAME],
cls._state_names.OP_TYPE_NAME: state[cls._state_names.OP_TYPE_NAME],
cls._state_names.PORT_ID: state[cls._state_names.PORT_ID],
cls._state_names.TARGET_TYPE: TargetType.from_state(state[cls._state_names.TARGET_TYPE]),
}
return cls(**kwargs)
|
components/cronet/tools_unittest.py | google-ar/chromium | 2,151 | 12703914 | #!/usr/bin/python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Run tools/ unittests."""
import sys
import unittest
if __name__ == '__main__':
suite = unittest.TestLoader().discover('tools', pattern = "*_unittest.py")
sys.exit(0 if unittest.TextTestRunner().run(suite).wasSuccessful() else 1)
|
ch08/08_15.py | leeseedong/book-cryptocurrency | 121 | 12703943 | <filename>ch08/08_15.py
import pykorbit
email = "<EMAIL>"
password = "<PASSWORD>"
key = "<KEY>"
secret = "<KEY>"
korbit = pykorbit.Korbit(email, password, key, secret)
balance = korbit.get_balances()
print(balance)
|
scripts/rectify.py | d-m-bailey/openlane-openroad | 354 | 12703958 | #!/usr/bin/env python3
# Copyright 2020 Efabless Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
takes a lef file and a rectangle => trims all RECT statements within the area
"""
import re
import sys
ARGV = sys.argv
if len(ARGV) < 5:
print("Usage " + ARGV[0] + " llx lly urx ury")
sys.exit(-1)
LLX = float(ARGV[1])
LLY = float(ARGV[2])
URX = float(ARGV[3])
URY = float(ARGV[4])
LAYERS = ["li1", "met1", "met2", "met3", "met4", "met5"]
RECT_REGEX = r"^\s*RECT\s+(-?\d+\.?\d*)\s+(-?\d+\.?\d*)\s+(-?\d+\.?\d*)\s+(-?\d+\.?\d*)\s+;$"
# SIZE_REGEX = r"^\s*SIZE\s+(-?\d+\.?\d*)\s+BY\s+\s+(-?\d+\.?\d*);$"
def get_cut_rect_x(rect, axis):
"""
cuts one rect about an x axis
"""
rects = [rect]
llx, lly, urx, ury = rect
if llx < axis and urx > axis:
rects = [(llx, lly, axis, ury),
(axis, lly, urx, ury)]
return rects
def get_cut_rect_y(rect, axis):
"""
cuts one rect about an y axis
"""
rects = [rect]
llx, lly, urx, ury = rect
if lly < axis and ury > axis:
rects = [(llx, lly, urx, axis),
(llx, axis, urx, ury)]
return rects
def rects2cutrects(rects, axis, direction):
"""
cut a list of rects (4-tuple) and returns another list of of rects (4-tuple)
cut by an x or y axis
axix is a position
direction is either 'x' or 'y'
"""
rects_cut = []
if direction == 'x':
for rect in rects:
rects_cut += get_cut_rect_x(rect, axis)
else:
for rect in rects:
rects_cut += get_cut_rect_y(rect, axis)
return rects_cut
def get_all_cut_rects(rect):
"""
cut a rect about the 4 axis LLX, LLY, URX, URY
"""
rects = [rect]
rects = rects2cutrects(rects, LLX, 'x')
rects = rects2cutrects(rects, URX, 'x')
rects = rects2cutrects(rects, LLY, 'y')
rects = rects2cutrects(rects, URY, 'y')
return rects
def rectify(rects):
"""
gets a list of already cut rects (4-tuple) and returns another list of of
rects (4-tuple) that are not within LLX, LLY, URX, URY
"""
rect_outside = []
for rect in rects:
llx, lly, urx, ury = rect
if llx < LLX or llx > URX or urx > URX or urx < LLX \
or lly < LLY or lly > URY or ury > URY or ury < LLY:
rect_outside += [rect]
return rect_outside
def print_rects(prefix, rects):
for rect in rects:
llx, lly, urx, ury = rect
print( prefix + "RECT %f %f %f %f ;" % (llx, lly, urx, ury))
layer = ""
for line in sys.stdin:
if line.isspace():
continue
rect_match = re.search(RECT_REGEX, line)
if rect_match:
llx, lly, urx, ury = float(rect_match.group(1)), float(rect_match.group(2)), float(rect_match.group(3)), float(rect_match.group(4))
if (lly < LLY and ury < LLY) or (lly > URY and ury > URY) \
or (llx < LLX and urx < LLX) or (llx > URX and urx > URX): # outside the whole thing
rects = [(llx, lly, urx, ury)]
else:
rects = rectify(get_all_cut_rects((llx, lly, urx, ury)))
if len(rects) > 0:
print(layer)
if layer != "": # LAYER printed, clear it
layer = ""
print_rects(line[:line.find('R')], rects)
else:
if line.find("LAYER") != -1: # print it only if there're RECTs
layer = line
else:
print(line, end='')
|
visualize/grid_attention_visualization/__init__.py | rentainhe/visualization | 169 | 12703961 | from .visualize_attention_map import visualize_grid_attention
from .visualize_attention_map_V2 import visualize_grid_attention_v2 |
veros/core/external/solvers/scipy.py | AkasDutta/veros | 111 | 12703982 | import numpy as onp
import scipy.sparse
import scipy.sparse.linalg as spalg
from veros import logger, veros_kernel, veros_routine, distributed, runtime_state as rst
from veros.variables import allocate
from veros.core.operators import update, at, numpy as npx
from veros.core.external.solvers.base import LinearSolver
from veros.core.external.poisson_matrix import assemble_poisson_matrix
class SciPySolver(LinearSolver):
@veros_routine(
local_variables=(
"hu",
"hv",
"hvr",
"hur",
"dxu",
"dxt",
"dyu",
"dyt",
"cosu",
"cost",
"isle_boundary_mask",
"maskT",
),
dist_safe=False,
)
def __init__(self, state):
self._matrix, self._boundary_mask = self._assemble_poisson_matrix(state)
jacobi_precon = self._jacobi_preconditioner(state, self._matrix)
self._matrix = jacobi_precon * self._matrix
self._rhs_scale = jacobi_precon.diagonal()
self._extra_args = {}
logger.info("Computing ILU preconditioner...")
ilu_preconditioner = spalg.spilu(self._matrix.tocsc(), drop_tol=1e-6, fill_factor=100)
self._extra_args["M"] = spalg.LinearOperator(self._matrix.shape, ilu_preconditioner.solve)
def _scipy_solver(self, state, rhs, x0, boundary_val):
orig_shape = x0.shape
orig_dtype = x0.dtype
rhs = npx.where(self._boundary_mask, rhs, boundary_val) # set right hand side on boundaries
rhs = onp.asarray(rhs.reshape(-1) * self._rhs_scale, dtype="float64")
x0 = onp.asarray(x0.reshape(-1), dtype="float64")
linear_solution, info = spalg.bicgstab(
self._matrix,
rhs,
x0=x0,
atol=1e-8,
tol=0,
maxiter=1000,
**self._extra_args,
)
if info > 0:
logger.warning("Streamfunction solver did not converge after {} iterations", info)
return npx.asarray(linear_solution, dtype=orig_dtype).reshape(orig_shape)
def solve(self, state, rhs, x0, boundary_val=None):
"""
Main solver for streamfunction. Solves a 2D Poisson equation. Uses scipy.sparse.linalg
linear solvers.
Arguments:
rhs: Right-hand side vector
x0: Initial guess
boundary_val: Array containing values to set on boundary elements. Defaults to `x0`.
"""
rhs_global, x0_global, boundary_val = gather_variables(state, rhs, x0, boundary_val)
if rst.proc_rank == 0:
linear_solution = self._scipy_solver(state, rhs_global, x0_global, boundary_val=boundary_val)
else:
linear_solution = npx.empty_like(rhs)
return scatter_variables(state, linear_solution)
@staticmethod
def _jacobi_preconditioner(state, matrix):
"""
Construct a simple Jacobi preconditioner
"""
settings = state.settings
eps = 1e-20
precon = allocate(state.dimensions, ("xu", "yu"), fill=1, local=False)
diag = npx.reshape(matrix.diagonal().copy(), (settings.nx + 4, settings.ny + 4))[2:-2, 2:-2]
precon = update(precon, at[2:-2, 2:-2], npx.where(npx.abs(diag) > eps, 1.0 / (diag + eps), 1.0))
precon = onp.asarray(precon)
return scipy.sparse.dia_matrix((precon.reshape(-1), 0), shape=(precon.size, precon.size)).tocsr()
@staticmethod
def _assemble_poisson_matrix(state):
settings = state.settings
diags, offsets, boundary_mask = assemble_poisson_matrix(state)
# flatten offsets (as expected by scipy.sparse)
offsets = tuple(-dx * diags[0].shape[1] - dy for dx, dy in offsets)
if settings.enable_cyclic_x:
# add cyclic boundary conditions as additional matrix diagonals
# (only works in single-process mode)
wrap_diag_east, wrap_diag_west = (allocate(state.dimensions, ("xu", "yu"), local=False) for _ in range(2))
wrap_diag_east = update(wrap_diag_east, at[2, 2:-2], diags[2][2, 2:-2] * boundary_mask[2, 2:-2])
wrap_diag_west = update(wrap_diag_west, at[-3, 2:-2], diags[1][-3, 2:-2] * boundary_mask[-3, 2:-2])
diags[2] = update(diags[2], at[2, 2:-2], 0.0)
diags[1] = update(diags[1], at[-3, 2:-2], 0.0)
offsets += (-diags[0].shape[1] * (settings.nx - 1), diags[0].shape[1] * (settings.nx - 1))
diags += (wrap_diag_east, wrap_diag_west)
diags = tuple(onp.asarray(diag.reshape(-1)) for diag in (diags))
matrix = scipy.sparse.dia_matrix(
(diags, offsets),
shape=(diags[0].size, diags[0].size),
dtype="float64",
).T.tocsr()
return matrix, boundary_mask
@veros_kernel
def gather_variables(state, rhs, x0, boundary_val):
rhs_global = distributed.gather(rhs, state.dimensions, ("xt", "yt"))
x0_global = distributed.gather(x0, state.dimensions, ("xt", "yt"))
if boundary_val is None:
boundary_val = x0_global
else:
boundary_val = distributed.gather(boundary_val, state.dimensions, ("xt", "yt"))
return rhs_global, x0_global, boundary_val
@veros_kernel
def scatter_variables(state, linear_solution):
return distributed.scatter(linear_solution, state.dimensions, ("xt", "yt"))
|
test/normalizer_issue_files/E71.py | bryanforbes/parso | 6,989 | 12704000 | #: E711:7
if res == None:
pass
#: E711:7
if res != None:
pass
#: E711:8
if None == res:
pass
#: E711:8
if None != res:
pass
#: E711:10
if res[1] == None:
pass
#: E711:10
if res[1] != None:
pass
#: E711:8
if None != res[1]:
pass
#: E711:8
if None == res[1]:
pass
#
#: E712:7
if res == True:
pass
#: E712:7
if res != False:
pass
#: E712:8
if True != res:
pass
#: E712:9
if False == res:
pass
#: E712:10
if res[1] == True:
pass
#: E712:10
if res[1] != False:
pass
if x is False:
pass
#
#: E713:9
if not X in Y:
pass
#: E713:11
if not X.B in Y:
pass
#: E713:9
if not X in Y and Z == "zero":
pass
#: E713:24
if X == "zero" or not Y in Z:
pass
#
#: E714:9
if not X is Y:
pass
#: E714:11
if not X.B is Y:
pass
#
# Okay
if x not in y:
pass
if not (X in Y or X is Z):
pass
if not (X in Y):
pass
if x is not y:
pass
if TrueElement.get_element(True) == TrueElement.get_element(False):
pass
if (True) == TrueElement or x == TrueElement:
pass
assert (not foo) in bar
assert {'x': not foo} in bar
assert [42, not foo] in bar
|
maya/Tests/AbcExport_subframes_test.py | ryu-sw/alembic | 921 | 12704029 | <filename>maya/Tests/AbcExport_subframes_test.py
##-*****************************************************************************
##
## Copyright (c) 2009-2013,
## <NAME>ictures Imageworks, Inc. and
## Industrial Light & Magic, a division of Lucasfilm Entertainment Company Ltd.
##
## All rights reserved.
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above
## copyright notice, this list of conditions and the following disclaimer
## in the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Sony Pictures Imageworks, nor
## Industrial Light & Magic nor the names of their contributors may be used
## to endorse or promote products derived from this software without specific
## prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
##
##-*****************************************************************************
from maya import cmds as MayaCmds
import os
import subprocess
import unittest
import util
class subframesTest(unittest.TestCase):
def setUp(self):
MayaCmds.file(new=True, force=True)
self.__files = []
def tearDown(self):
for f in self.__files:
os.remove(f)
def testRangeFlag(self):
MayaCmds.createNode('transform', name='node')
MayaCmds.setKeyframe('node.translateX', time=1.0, v=1.0)
MayaCmds.setKeyframe('node.translateX', time=11.0, v=11.0)
self.__files.append(util.expandFileName('rangeTest.abc'))
MayaCmds.AbcExport(j='-fr 1 11 -step 0.25 -root node -file ' + self.__files[-1])
MayaCmds.AbcImport(self.__files[-1], m='open')
abcNodeName = MayaCmds.ls(exactType='AlembicNode')
MayaCmds.currentTime(0, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
self.failUnlessEqual(MayaCmds.getAttr('node.translateX'), 1)
MayaCmds.currentTime(1, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
self.failUnlessEqual(MayaCmds.getAttr('node.translateX'), 1)
MayaCmds.currentTime(1.0003, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
self.failUnlessEqual(MayaCmds.getAttr('node.translateX'), 1)
MayaCmds.currentTime(1.333333, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
self.failUnlessAlmostEqual(MayaCmds.getAttr('node.translateX'),
1.333333333, 2)
MayaCmds.currentTime(9.66667, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
self.failUnlessAlmostEqual(MayaCmds.getAttr('node.translateX'),
9.6666666666, 2)
MayaCmds.currentTime(11, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
self.failUnlessEqual(MayaCmds.getAttr('node.translateX'), 11)
MayaCmds.currentTime(12, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
self.failUnlessEqual(MayaCmds.getAttr('node.translateX'), 11)
def testPreRollStartFrameFlag(self):
MayaCmds.createNode('transform', name='node')
MayaCmds.setAttr('node.tx', 0.0)
MayaCmds.expression(
string="if(time==0)\n\tnode.tx=0;\n\nif (time*24 > 6 && node.tx > 0.8)\n\tnode.tx = 10;\n\nnode.tx = node.tx + time;\n",
name="startAtExp", ae=1, uc=all)
self.__files.append(util.expandFileName('startAtTest.abc'))
MayaCmds.AbcExport(j='-fr 1 10 -root node -file ' + self.__files[-1], prs=0, duf=True)
MayaCmds.AbcImport(self.__files[-1], m='open')
abcNodeName = MayaCmds.ls(exactType='AlembicNode')
# if the evaluation doesn't start at frame 0, node.tx < 10
MayaCmds.currentTime(10, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
self.failUnless(MayaCmds.getAttr('node.translateX')-10 > 0)
def testSkipFrames(self):
MayaCmds.createNode('transform', name='node')
MayaCmds.setKeyframe('node.translateX', time=1.0, v=1.0)
MayaCmds.setKeyframe('node.translateX', time=10.0, v=10.0)
MayaCmds.duplicate(name='dupNode')
MayaCmds.setAttr('dupNode.tx', 0.0)
MayaCmds.expression(
string="if(time==11)\n\tdupNode.tx=-50;\n\ndupNode.tx = dupNode.tx + time;\n",
name="startAtExp", ae=1, uc=all)
self.__files.append(util.expandFileName('skipFrameTest1.abc'))
self.__files.append(util.expandFileName('skipFrameTest2.abc'))
MayaCmds.AbcExport(j=['-fr 1 10 -root node -file ' + self.__files[-2],
'-fr 20 25 -root dupNode -file ' + self.__files[-1]])
MayaCmds.AbcImport(self.__files[-2], m='open')
abcNodeName = MayaCmds.ls(exactType='AlembicNode')
# make sure all the frames needed are written out and correctly
for val in range(1, 11):
MayaCmds.currentTime(val, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
self.failUnlessAlmostEqual(MayaCmds.getAttr('node.tx'), val, 3)
# also make sure nothing extra gets written out
MayaCmds.currentTime(11, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
self.failUnlessEqual(MayaCmds.getAttr('node.tx'), 10.0)
MayaCmds.AbcImport(self.__files[-1], m='open')
abcNodeName = MayaCmds.ls(exactType='AlembicNode')
# if dontSkipFrames flag is not set maya would evaluate frame 11 and
# set dupNode.tx to a big negative number
MayaCmds.currentTime(20, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
self.failUnless(MayaCmds.getAttr('dupNode.tx') > 0)
def testWholeFrameGeoFlag(self):
MayaCmds.polyCube(name='node')
MayaCmds.setKeyframe('node.translateX', time=1.0, v=1.0)
MayaCmds.setKeyframe('node.translateX', time=2.0, v=-3.0)
MayaCmds.setKeyframe('node.translateX', time=5.0, v=9.0)
MayaCmds.select('node.vtx[0:8]')
MayaCmds.setKeyframe(time=1.0)
MayaCmds.scale(1.5, 1.5, 1.8)
MayaCmds.setKeyframe(time=5.0)
self.__files.append(util.expandFileName('noSampleGeoTest.abc'))
MayaCmds.AbcExport(j='-fr 1 5 -wfg -frs 0 -frs 0.9 -root node -file ' + self.__files[-1])
MayaCmds.AbcImport(self.__files[-1], m='open')
abcNodeName = MayaCmds.ls(exactType='AlembicNode')
setTime = MayaCmds.currentTime(1, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
val_1 = MayaCmds.getAttr('node.vt[0]')[0][0]
MayaCmds.currentTime(2.0, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
MayaCmds.getAttr('node.vt[0]')
val_2 = MayaCmds.getAttr('node.vt[0]')[0][0]
self.failUnlessAlmostEqual(val_2, -0.5625, 3)
setTime = MayaCmds.currentTime(1.9, update=True)
MayaCmds.dgeval(abcNodeName, verbose=False)
self.failUnlessAlmostEqual(MayaCmds.getAttr('node.tx'), -3.086, 3)
# the vertex will get linearly interpolated
alpha = (setTime - 1) / (2 - 1)
self.failUnlessAlmostEqual(MayaCmds.getAttr('node.vt[0]')[0][0],
(1-alpha)*val_1+alpha*val_2, 3)
# convenience functions for the tests following
def noFrameRangeExists(self, fileName):
#TODO make sure we just have the default time sampling (0)
pass
def isFrameRangeExists(self, fileName):
#TODO make sure we have 1 other time sampling
pass
def isFrameRangeTransAndFrameRangeShapeExists(self, fileName):
#TODO make sure we have 2 other time samplings
pass
def test_agat(self):
# animated geometry, animated transform node
nodename = 'agat_node'
MayaCmds.polyCube(name=nodename)
MayaCmds.setKeyframe(nodename+'.translateX', time=1.0, v=1.0)
MayaCmds.setKeyframe(nodename+'.translateX', time=5.0, v=10.0)
MayaCmds.select(nodename+'.vtx[0:8]')
MayaCmds.setKeyframe(time=1.0)
MayaCmds.scale(1.5, 1.5, 1.8)
MayaCmds.setKeyframe(time=5.0)
self.__files.append(util.expandFileName('agat_motionblur_noSampleGeo_Test.abc'))
MayaCmds.AbcExport(j='-fr 1 5 -wfg -step 0.5 -root %s -file %s' % (
nodename, self.__files[-1]))
# frameRangeShape: 1, 2, 3, 4, 5, 6
# frameRangeTrans: 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5, 5.5, 6
self.isFrameRangeTransAndFrameRangeShapeExists(self.__files[-1])
self.__files.append(util.expandFileName('agat_motionblur_Test.abc'))
MayaCmds.AbcExport(j='-fr 1 5 -step 0.5 -root %s -file %s' % (
nodename, self.__files[-1]))
# frameRange: 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5, 5.5, 6
self.isFrameRangeExists(self.__files[-1])
self.__files.append(util.expandFileName('agat_norange_Test.abc'))
MayaCmds.AbcExport(j='-root %s -f %s' % (nodename,self.__files[-1]))
# no frameRange
self.noFrameRangeExists(self.__files[-1])
def test_agst(self):
# animated geometry, static transform node
nodename = 'agst_node'
MayaCmds.polyCube(name=nodename)
MayaCmds.select(nodename+'.vtx[0:8]')
MayaCmds.setKeyframe(time=1.0)
MayaCmds.scale(1.5, 1.5, 1.8)
MayaCmds.setKeyframe(time=5.0)
self.__files.append(util.expandFileName('agst_motionblur_noSampleGeo_Test.abc'))
MayaCmds.AbcExport(j='-fr 1 5 -step 0.5 -wfg -root %s -file %s' % (
nodename, self.__files[-1]))
# frameRange: 1, 2, 3, 4, 5, 6
self.isFrameRangeTransAndFrameRangeShapeExists(self.__files[-1])
self.__files.append(util.expandFileName('agst_motionblur_Test.abc'))
MayaCmds.AbcExport(j='-fr 1 5 -step 0.5 -root %s -f %s' % (
nodename, self.__files[-1]))
# frameRange: 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5, 5.5, 6
self.isFrameRangeExists(self.__files[-1])
self.__files.append(util.expandFileName('agst_noSampleGeo_Test.abc'))
MayaCmds.AbcExport(j='-fr 1 5 -wfg -root %s -f %s' % (nodename,
self.__files[-1]))
# frameRange: 1, 2, 3, 4, 5
self.isFrameRangeExists(self.__files[-1])
def test_sgat(self):
# static geometry, animated transform node
nodename = 'sgat_node'
MayaCmds.polyCube(name=nodename)
MayaCmds.setKeyframe(nodename+'.translateX', time=1.0, v=1.0)
MayaCmds.setKeyframe(nodename+'.translateX', time=5.0, v=10.0)
self.__files.append(util.expandFileName('sgat_motionblur_noSampleGeo_Test.abc'))
MayaCmds.AbcExport(j='-fr 1 5 -step 0.5 -wfg -root %s -f %s' % (
nodename, self.__files[-1]))
# frameRange: 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5, 5.5, 6
self.isFrameRangeTransAndFrameRangeShapeExists(self.__files[-1])
self.__files.append(util.expandFileName('sgat_motionblur_Test.abc'))
MayaCmds.AbcExport(j='-fr 1 5 -step 0.5 -root %s -f %s ' % (
nodename, self.__files[-1]))
# frameRange: 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5, 5.5, 6
self.isFrameRangeExists(self.__files[-1])
def test_sgst(self):
# static geometry, static transform node
nodename = 'sgst_node'
MayaCmds.polyCube(name=nodename)
self.__files.append(util.expandFileName('sgst_motionblur_noSampleGeo_Test.abc'))
MayaCmds.AbcExport(j='-fr 1 5 -step 0.5 -wfg -root %s -file %s ' % (
nodename, self.__files[-1]))
self.failIf(MayaCmds.AbcImport(self.__files[-1]) != "")
self.__files.append(util.expandFileName('sgst_moblur_noSampleGeo_norange_Test.abc'))
MayaCmds.AbcExport(j='-step 0.5 -wfg -root %s -file %s' % (
nodename, self.__files[-1]))
# frameRange: NA
self.noFrameRangeExists(self.__files[-1])
|
pypy/module/zipimport/moduledef.py | nanjekyejoannah/pypy | 333 | 12704052 | <gh_stars>100-1000
""" Zipimport module
"""
from pypy.interpreter.mixedmodule import MixedModule
class Module(MixedModule):
interpleveldefs = {
'zipimporter':'interp_zipimport.W_ZipImporter',
'_zip_directory_cache' : 'space.wrap(interp_zipimport.zip_cache)',
'ZipImportError': 'interp_zipimport.get_error(space)',
}
appleveldefs = {
}
def setup_after_space_initialization(self):
"""NOT_RPYTHON"""
space = self.space
# install zipimport hook
w_path_hooks = space.sys.get('path_hooks')
from pypy.module.zipimport.interp_zipimport import W_ZipImporter
w_zipimporter = space.gettypefor(W_ZipImporter)
space.call_method(w_path_hooks, 'append', w_zipimporter)
|
plugins/modules/oci_os_management_event_report_facts.py | slmjy/oci-ansible-collection | 108 | 12704063 | #!/usr/bin/python
# Copyright (c) 2020, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_os_management_event_report_facts
short_description: Fetches details about a EventReport resource in Oracle Cloud Infrastructure
description:
- Fetches details about a EventReport resource in Oracle Cloud Infrastructure
- Get summary information about events on this instance.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
managed_instance_id:
description:
- Instance Oracle Cloud identifier (ocid)
type: str
aliases: ["id"]
required: true
compartment_id:
description:
- The ID of the compartment in which to list resources.
type: str
required: true
latest_timestamp_less_than:
description:
- "filter event occurrence. Selecting only those last occurred before given date in ISO 8601 format
Example: 2017-07-14T02:40:00.000Z"
type: str
latest_timestamp_greater_than_or_equal_to:
description:
- "filter event occurrence. Selecting only those last occurred on or after given date in ISO 8601 format
Example: 2017-07-14T02:40:00.000Z"
type: str
extends_documentation_fragment: [ oracle.oci.oracle ]
"""
EXAMPLES = """
- name: Get a specific event_report
oci_os_management_event_report_facts:
# required
managed_instance_id: "ocid1.managedinstance.oc1..xxxxxxEXAMPLExxxxxx"
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
# optional
latest_timestamp_less_than: 2013-10-20T19:20:30+01:00
latest_timestamp_greater_than_or_equal_to: 2013-10-20T19:20:30+01:00
"""
RETURN = """
event_report:
description:
- EventReport resource
returned: on success
type: complex
contains:
count:
description:
- count of events currently registered on the system.
returned: on success
type: int
sample: 56
sample: {
"count": 56
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.os_management import EventClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class EventReportFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: get"""
def get_required_params_for_get(self):
return [
"managed_instance_id",
"compartment_id",
]
def get_resource(self):
optional_get_method_params = [
"latest_timestamp_less_than",
"latest_timestamp_greater_than_or_equal_to",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_get_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.call_with_backoff(
self.client.get_event_report,
managed_instance_id=self.module.params.get("managed_instance_id"),
compartment_id=self.module.params.get("compartment_id"),
**optional_kwargs
)
EventReportFactsHelperCustom = get_custom_class("EventReportFactsHelperCustom")
class ResourceFactsHelper(EventReportFactsHelperCustom, EventReportFactsHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
managed_instance_id=dict(aliases=["id"], type="str", required=True),
compartment_id=dict(type="str", required=True),
latest_timestamp_less_than=dict(type="str"),
latest_timestamp_greater_than_or_equal_to=dict(type="str"),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="event_report",
service_client_class=EventClient,
namespace="os_management",
)
result = []
if resource_facts_helper.is_get():
result = resource_facts_helper.get()
else:
resource_facts_helper.fail()
module.exit_json(event_report=result)
if __name__ == "__main__":
main()
|
tests/test_instantiation.py | theblackcat102/REL | 210 | 12704065 | <filename>tests/test_instantiation.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from pathlib import Path
import torch
from REL.entity_disambiguation import EntityDisambiguation
from REL.mention_detection import MentionDetection
from REL.mulrel_ranker import MulRelRanker, PreRank
from REL.ner import Cmns
def test_entity_disambiguation_instantiation():
return EntityDisambiguation(
Path(__file__).parent,
"wiki_test",
{
"mode": "eval",
"model_path": Path(__file__).parent / "wiki_test" / "generated" / "model",
},
)
def test_cmns_instantiation():
return Cmns(Path(__file__).parent, "wiki_test")
def test_mention_detection_instantiation():
return MentionDetection(Path(__file__).parent, "wiki_test")
def test_prerank_instantiation():
# NOTE: this is basically just a blank constructor; if this fails, something is
# seriously wrong
return PreRank({})
def test_mulrel_ranker_instantiation():
# minimal config to make the constructor run
config = {
"emb_dims": 300,
"hid_dims": 100,
"dropout_rate": 0.3,
"n_rels": 3,
"use_local": True,
"use_pad_ent": True,
}
return MulRelRanker(config, torch.device("cpu"))
|
tests/adapters/test_r_adapter.py | pietervans/viewflow | 106 | 12704070 | <gh_stars>100-1000
import viewflow
from unittest.mock import patch, ANY, call
@patch("viewflow.parsers.dependencies_r_patterns.custom_get_dependencies")
@patch("viewflow.parsers.dependencies_r_patterns.get_dependencies_default")
def test_default_dependencies_pattern(get_default_mock, get_custom_mock):
viewflow.create_dag("./tests/projects/r/pattern_default")
# Dependencies must have been retrieved for all possible schema's
calls = [call(ANY, "viewflow"), call(ANY, "public")]
get_default_mock.assert_has_calls(calls, any_order=True)
get_custom_mock.assert_not_called()
@patch("viewflow.parsers.dependencies_r_patterns.custom_get_dependencies")
@patch("viewflow.parsers.dependencies_r_patterns.get_dependencies_default")
def test_custom_dependencies_pattern(get_default_mock, get_custom_mock):
viewflow.create_dag("./tests/projects/r/pattern_custom")
# Dependencies must have been retrieved for all possible schema's
get_default_mock.assert_not_called()
calls = [call(ANY, "viewflow"), call(ANY, "public")]
get_custom_mock.assert_has_calls(calls, any_order=True)
|
candlestick/patterns/candlestick_finder.py | michalk21/candlestick-patterns | 212 | 12704077 | <reponame>michalk21/candlestick-patterns<gh_stars>100-1000
import pandas as pd
from pandas.api.types import is_numeric_dtype
class CandlestickFinder(object):
def __init__(self, name, required_count, target=None):
self.name = name
self.required_count = required_count
self.close_column = 'close'
self.open_column = 'open'
self.low_column = 'low'
self.high_column = 'high'
self.data = None
self.is_data_prepared = False
self.multi_coeff = -1
if target:
self.target = target
else:
self.target = self.name
def get_class_name(self):
return self.__class__.__name__
def logic(self, row_idx):
raise Exception('Implement the logic of ' + self.get_class_name())
def has_pattern(self,
candles_df,
ohlc,
is_reversed):
self.prepare_data(candles_df,
ohlc)
if self.is_data_prepared:
results = []
rows_len = len(candles_df)
idxs = candles_df.index.values
if is_reversed:
self.multi_coeff = 1
for row_idx in range(rows_len - 1, -1, -1):
if row_idx <= rows_len - self.required_count:
results.append([idxs[row_idx], self.logic(row_idx)])
else:
results.append([idxs[row_idx], None])
else:
self.multi_coeff = -1
for row in range(0, rows_len, 1):
if row >= self.required_count - 1:
results.append([idxs[row], self.logic(row)])
else:
results.append([idxs[row], None])
candles_df = candles_df.join(pd.DataFrame(results, columns=['row', self.target]).set_index('row'),
how='outer')
return candles_df
else:
raise Exception('Data is not prepared to detect patterns')
def prepare_data(self, candles_df, ohlc):
if isinstance(candles_df, pd.DataFrame):
if len(candles_df) >= self.required_count:
if ohlc and len(ohlc) == 4:
if not set(ohlc).issubset(candles_df.columns):
raise Exception('Provided columns does not exist in given data frame')
self.open_column = ohlc[0]
self.high_column = ohlc[1]
self.low_column = ohlc[2]
self.close_column = ohlc[3]
else:
raise Exception('Provide list of four elements indicating columns in strings. '
'Default: [open, high, low, close]')
self.data = candles_df.copy()
if not is_numeric_dtype(self.data[self.close_column]):
self.data[self.close_column] = pd.to_numeric(self.data[self.close_column])
if not is_numeric_dtype(self.data[self.open_column]):
self.data[self.open_column] = pd.to_numeric(self.data[self.open_column])
if not is_numeric_dtype(self.data[self.low_column]):
self.data[self.low_column] = pd.to_numeric(self.data[self.low_column])
if not is_numeric_dtype(self.data[self.high_column]):
self.data[self.high_column] = pd.to_numeric(candles_df[self.high_column])
self.is_data_prepared = True
else:
raise Exception('{0} requires at least {1} data'.format(self.name,
self.required_count))
else:
raise Exception('Candles must be in Panda data frame type')
|
samples/migrateADCGen1/mappers/sqlserver.py | daniel-dqsdatalabs/pyapacheatlas | 104 | 12704100 | <filename>samples/migrateADCGen1/mappers/sqlserver.py
from .assetmapper import AssetMapper
class SqlServerTableMapper(AssetMapper):
def __init__(self, asset, terms, typeName="azure_sql_table", columnTypeName="azure_sql_column"):
super().__init__(asset, terms, typeName, columnTypeName)
_address = self.asset["properties"]["dsl"]["address"]
self.server = _address["server"]
self.database = _address["database"]
self.schema = _address["schema"]
self.table = _address["object"]
self.friendlyName = _address["object"]
def entity(self, guid):
local_entity = super().entity(guid)
# Need to add the required relationship attributes
db_schema = {
"typeName":"azure_sql_schema",
"uniqueAttributes":{"qualifiedName": self.qualified_name("schema")}
}
local_entity.addRelationship(dbSchema = db_schema)
return local_entity
def qualified_name(self, level="table"):
output = f"mssql://{self.server}"
if level in ["database", "schema", "table"]:
output = output + "/" + self.database
if level in ["schema", "table"]:
output = output + "/" + self.schema
if level in ["table"]:
output = output + "/" + self.table
return output
def column_qualified_name_pattern(self, columnName):
return self.qualified_name() + "#" + columnName
class SqlServerDatabaseMapper(AssetMapper):
def __init__(self, asset, terms, typeName="azure_sql_db", columnTypeName="azure_sql_column"):
super().__init__(asset, terms, typeName, columnTypeName)
_address = self.asset["properties"]["dsl"]["address"]
self.server = _address["server"]
self.database = _address["database"]
self.friendlyName = _address["database"]
def entity(self, guid):
local_entity = super().entity(guid)
# Need to add the required relationship attributes
server = {
"typeName":"azure_sql_server",
"uniqueAttributes":{"qualifiedName": self.qualified_name("server")}
}
local_entity.addRelationship(server = server)
return local_entity
def qualified_name(self, level="database"):
output = f"mssql://{self.server}"
if level in ["database"]:
output = output + "/" + self.database
return output
def column_qualified_name_pattern(self, columnName):
return "BADDATA"
|
dashlivesim/vodanalyzer/dashanalyzer.py | Dash-Industry-Forum/dash-live-source-simulator | 133 | 12704101 | <reponame>Dash-Industry-Forum/dash-live-source-simulator
"""Analyze DASH content in live profile and extract parameters for VoD-config file for live source simulator.
"""
# The copyright in this software is being made available under the BSD License,
# included below. This software may be subject to other third party and contributor
# rights, including patent rights, and no such rights are granted under this license.
#
# Copyright (c) 2015, Dash Industry Forum.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
# * Neither the name of Dash Industry Forum nor the names of its
# contributors may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import sys
import os
import time
import re
from struct import pack
from dashlivesim.dashlib import configprocessor
from dashlivesim.dashlib import initsegmentfilter, mediasegmentfilter
from dashlivesim.dashlib.mpdprocessor import MpdProcessor
DEFAULT_DASH_NAMESPACE = "urn:mpeg:dash:schema:mpd:2011"
MUX_TYPE_NONE = 0
MUX_TYPE_FRAGMENT = 1
MUX_TYPE_SAMPLES = 2
## Utility functions
def makeTimeStamp(t):
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(t))
def makeDurationFromS(nrSeconds):
return "PT%dS" % nrSeconds
class DashAnalyzerError(Exception):
"""Error in DashAnalyzer."""
class DashAnalyzer(object):
def __init__(self, mpd_filepath, verbose=1):
self.mpd_filepath = mpd_filepath
path_parts = mpd_filepath.split('/')
self.base_name = 'content'
if len(path_parts) >= 2:
self.base_name = path_parts[-2]
self.config_filename = self.base_name + ".cfg"
self.base_path = os.path.split(mpd_filepath)[0]
self.verbose = verbose
self.as_data = {} # List of adaptation sets (one for each media)
self.muxedRep = None
self.muxedPaths = {}
self.mpdSegStartNr = -1
self.segDuration = None
self.firstSegmentInLoop = -1
self.lastSegmentInLoop = -1
self.nrSegmentsInLoop = -1
self.mpdProcessor = MpdProcessor(self.mpd_filepath)
self.loopTime = self.mpdProcessor.media_presentation_duration_in_s
def analyze(self):
self.initMedia()
self.checkAndUpdateMediaData()
self.write_config(self.config_filename)
def initMedia(self):
"Init media by analyzing the MPD and the media files."
for adaptation_set in self.mpdProcessor.get_adaptation_sets():
content_type = adaptation_set.content_type
if content_type is None:
print("No contentType for adaptation set")
sys.exit(1)
if content_type in self.as_data:
raise DashAnalyzerError("Multiple adaptation sets for contentType " + content_type)
as_data = {'as' : adaptation_set, 'reps' : []}
as_data['presentationDurationInS'] = self.mpdProcessor.media_presentation_duration_in_s
self.as_data[content_type] = as_data
for rep in adaptation_set.representations:
rep_data = {'representation' : rep, 'id' : rep.rep_id}
as_data['reps'].append(rep_data)
initPath = rep.initialization_path
rep_data['relInitPath'] = initPath
rep_data['absInitPath'] = os.path.join(self.base_path, initPath)
init_filter = initsegmentfilter.InitFilter(rep_data['absInitPath'])
init_filter.filter()
rep_data['trackID'] = init_filter.track_id
print("%s trackID = %d" % (content_type, rep_data['trackID']))
rep_data['relMediaPath'] = rep.get_media_path()
rep_data['absMediaPath'] = os.path.join(self.base_path, rep.get_media_path())
rep_data['default_sample_duration'] = \
init_filter.default_sample_duration
self.getSegmentRange(rep_data)
track_timescale = init_filter.track_timescale
if 'track_timescale' not in as_data:
as_data['track_timescale'] = track_timescale
elif track_timescale != as_data['track_timescale']:
raise DashAnalyzerError("Timescales not consistent between %s tracks" % content_type)
if self.verbose:
print("%s data: " % content_type)
for (k, v) in rep_data.items():
print(" %s=%s" % (k, v))
def getSegmentRange(self, rep_data):
"Search the directory for the first and last segment and set firstNumber and lastNumber for this MediaType."
rep_id = rep_data['id']
mediaDir, mediaName = os.path.split(rep_data['absMediaPath'])
mediaRegexp = mediaName.replace("%d", "(\d+)").replace(".", "\.")
mediaReg = re.compile(mediaRegexp)
files = os.listdir(mediaDir)
numbers = []
for f in files:
matchObj = mediaReg.match(f)
if matchObj:
number = int(matchObj.groups(1)[0])
numbers.append(number)
numbers.sort()
for i in range(1, len(numbers)):
if numbers[i] != numbers[i-1] + 1:
raise DashAnalyzerError("%s segment missing between %d and %d" % (rep_id, numbers[i-1], numbers[i]))
print("Found %s segments %d - %d" % (rep_id, numbers[0], numbers[-1]))
rep_data['firstNumber'] = numbers[0]
rep_data['lastNumber'] = numbers[-1]
def checkAndUpdateMediaData(self):
"""Check all segments for good values and return startTimes and total duration."""
lastGoodSegments = []
print("Checking all the media segment durations for deviations.")
def writeSegTiming(ofh, firstSegmentInRepeat, firstStartTimeInRepeat, duration, repeatCount):
data = pack(configprocessor.SEGTIMEFORMAT, firstSegmentInRepeat, repeatCount,
firstStartTimeInRepeat, duration)
ofh.write(data)
for content_type in self.as_data.keys():
as_data = self.as_data[content_type]
as_data['datFile'] = "%s_%s.dat" % (self.base_name, content_type)
adaptation_set = as_data['as']
print("Checking %s with timescale %d" % (content_type, as_data['track_timescale']))
if self.segDuration is None:
self.segDuration = adaptation_set.duration
else:
assert self.segDuration == adaptation_set.duration
track_timescale = as_data['track_timescale']
with open(as_data['datFile'], "wb") as ofh:
for (rep_nr, rep_data) in enumerate(as_data['reps']):
rep_id = rep_data['id']
rep_data['endNr'] = None
rep_data['startTick'] = None
rep_data['endTick'] = None
if self.firstSegmentInLoop >= 0:
assert rep_data['firstNumber'] == self.firstSegmentInLoop
else:
self.firstSegmentInLoop = rep_data['firstNumber']
if self.mpdSegStartNr >= 0:
assert adaptation_set.start_number == self.mpdSegStartNr
else:
self.mpdSegStartNr = adaptation_set.start_number
segTicks = self.segDuration*track_timescale
maxDiffInTicks = int(track_timescale*0.1) # Max 100ms
segNr = rep_data['firstNumber']
repeatCount = -1
firstSegmentInRepeat = -1
firstStartTimeInRepeat = -1
lastDuration = 0
while (True):
segmentPath = rep_data['absMediaPath'] % segNr
if not os.path.exists(segmentPath):
if self.verbose:
print("\nLast good %s segment is %d, endTime=%.3fs, totalTime=%.3fs" % (
rep_id, rep_data['endNr'], rep_data['endTime'],
rep_data['endTime']-rep_data['startTime']))
break
msf = mediasegmentfilter.MediaSegmentFilter(
segmentPath, default_sample_duration = rep_data[
'default_sample_duration'])
msf.filter()
tfdt = msf.get_tfdt_value()
duration = msf.get_duration()
print("{0} {1:8d} {2} {3}".format(content_type, segNr, tfdt, duration))
if duration == lastDuration:
repeatCount += 1
else:
if lastDuration != 0 and rep_nr == 0:
writeSegTiming(ofh, firstSegmentInRepeat,
firstStartTimeInRepeat,
lastDuration, repeatCount)
repeatCount = 0
lastDuration = duration
firstSegmentInRepeat = segNr
firstStartTimeInRepeat = tfdt
if rep_data['startTick'] is None:
rep_data['startTick'] = tfdt
rep_data['startTime'] = rep_data['startTick']/float(track_timescale)
print("First %s segment is %d starting at time %.3fs" % (rep_id, segNr,
rep_data['startTime']))
# Check that there is not too much drift. We want to end with at most maxDiffInTicks
endTick = tfdt + duration
idealTicks = (segNr - rep_data['firstNumber'] + 1)*segTicks + rep_data['startTick']
absDiffInTicks = abs(idealTicks - endTick)
if absDiffInTicks < maxDiffInTicks:
# This is a good wrap point
rep_data['endTick'] = tfdt + duration
rep_data['endTime'] = rep_data['endTick']/float(track_timescale)
rep_data['endNr'] = segNr
else:
raise DashAnalyzerError("Too much drift in the duration of the segments")
segNr += 1
if self.verbose:
sys.stdout.write(".")
if rep_nr == 0:
writeSegTiming(ofh, firstSegmentInRepeat, firstStartTimeInRepeat, duration, repeatCount)
lastGoodSegments.append(rep_data['endNr'])
as_data['totalTicks'] = rep_data['endTick'] - rep_data['startTick']
self.lastSegmentInLoop = min(lastGoodSegments)
self.nrSegmentsInLoop = self.lastSegmentInLoop-self.firstSegmentInLoop+1
self.loopTime = self.nrSegmentsInLoop*self.segDuration
if self.verbose:
print("")
print("Will loop segments %d-%d with loop time %ds" % (self.firstSegmentInLoop, self.lastSegmentInLoop,
self.loopTime))
def write_config(self, config_file):
"""Write a config file for the analyzed content, that can then be used to serve it efficiently."""
cfg_data = {'version' : '1.1', 'first_segment_in_loop' : self.firstSegmentInLoop,
'nr_segments_in_loop' : self.nrSegmentsInLoop, 'segment_duration_s' : self.segDuration}
media_data = {}
for content_type in ('video', 'audio'):
if content_type in self.as_data:
mdata = self.as_data[content_type]
media_data[content_type] = {'representations' : [rep['id'] for rep in mdata['reps']],
'timescale' : mdata['track_timescale'],
'totalDuration' : mdata['totalTicks'],
'datFile' : mdata['datFile']}
cfg_data['media_data'] = media_data
vod_cfg = configprocessor.VodConfig()
vod_cfg.write_config(config_file, cfg_data)
def processMpd(self):
"""Process the MPD and make an appropriate live version."""
mpdData = {"availabilityStartTime" :makeTimeStamp(self.mpdAvailabilityStartTIme),
"timeShiftBufferDepth" : makeDurationFromS(self.timeShiftBufferDepthInS),
"minimumUpdatePeriod" : "PT30M"}
if not self.muxType != MUX_TYPE_NONE:
self.mpdProcessor.makeLiveMpd(mpdData)
else:
self.mpdProcessor.makeLiveMultiplexedMpd(mpdData, self.media_data)
self.muxedRep = self.mpdProcessor.getMuxedRep()
targetMpdNamespace = None
if self.fixNamespace:
targetMpdNamespace = DEFAULT_DASH_NAMESPACE
self.mpd = self.mpdProcessor.getCleanString(True, targetMpdNamespace)
def main():
from optparse import OptionParser
verbose = 0
usage = "usage: %prog [options] mpdPath"
parser = OptionParser(usage)
parser.add_option("-v", "--verbose", dest="verbose", action="store_true")
(options, args) = parser.parse_args()
if options.verbose:
verbose = 1
if len(args) != 1:
parser.error("incorrect number of arguments")
mpdFile = args[0]
dashAnalyzer = DashAnalyzer(mpdFile, verbose)
dashAnalyzer.analyze()
if __name__ == "__main__":
main()
|
test/losses_test.py | timgates42/theanets | 314 | 12704105 | <reponame>timgates42/theanets
import pytest
import theanets
import util as u
class TestBuild:
def test_mse(self):
l = theanets.Loss.build('mse', target=2)
assert callable(l)
assert len(l.variables) == 1
def test_mse_weighted(self):
l = theanets.Loss.build('mse', target=2, weighted=True)
assert callable(l)
assert len(l.variables) == 2
@pytest.mark.parametrize('loss', ['xe', 'hinge'])
def test_classification(loss):
net = theanets.Classifier([
u.NUM_INPUTS, u.NUM_HID1, u.NUM_CLASSES], loss=loss)
u.assert_progress(net, u.CLF_DATA)
@pytest.mark.parametrize('loss', ['mse', 'mae', 'mmd'])
def test_regression(loss):
net = theanets.Regressor([
u.NUM_INPUTS, u.NUM_HID1, u.NUM_OUTPUTS], loss=loss)
u.assert_progress(net, u.REG_DATA)
def test_kl():
net = theanets.Regressor([
u.NUM_INPUTS, u.NUM_HID1, (u.NUM_OUTPUTS, 'softmax')], loss='kl')
u.assert_progress(net, [u.INPUTS, abs(u.OUTPUTS)])
def test_gll():
net = theanets.Regressor([
u.NUM_INPUTS,
dict(name='hid', size=u.NUM_HID1),
dict(name='covar', activation='relu', inputs='hid', size=u.NUM_OUTPUTS),
dict(name='mean', activation='linear', inputs='hid', size=u.NUM_OUTPUTS),
])
net.set_loss('gll', target=2, mean_name='mean', covar_name='covar')
u.assert_progress(net, u.REG_DATA)
|
src/genie/libs/parser/iosxe/tests/ShowBgpSummary/cli/equal/golden_output1_expected.py | balmasea/genieparser | 204 | 12704113 | expected_output = {
"bgp_id": 5918,
"vrf": {
"default": {
"neighbor": {
"192.168.10.253": {
"address_family": {
"vpnv4 unicast": {
"activity_paths": "23637710/17596802",
"activity_prefixes": "11724891/9708585",
"as": 65555,
"attribute_entries": "5101/4700",
"bgp_table_version": 33086714,
"cache_entries": {
"filter-list": {"memory_usage": 0, "total_entries": 0},
"route-map": {"memory_usage": 0, "total_entries": 0},
},
"community_entries": {
"memory_usage": 60120,
"total_entries": 2303,
},
"entries": {
"AS-PATH": {"memory_usage": 4824, "total_entries": 201},
"rrinfo": {"memory_usage": 20080, "total_entries": 502},
},
"input_queue": 0,
"local_as": 5918,
"msg_rcvd": 619,
"msg_sent": 695,
"output_queue": 0,
"path": {"memory_usage": 900480, "total_entries": 7504},
"prefixes": {"memory_usage": 973568, "total_entries": 3803},
"route_identifier": "192.168.10.254",
"routing_table_version": 33086714,
"scan_interval": 60,
"state_pfxrcd": "100",
"tbl_ver": 33086714,
"total_memory": 3305736,
"up_down": "05:07:45",
"version": 4,
}
}
}
}
}
},
}
|
watchmen/pipeline/core/case/function/case_then_for_storage.py | Insurance-Metrics-Measure-Advisory/watchman-data-connector | 125 | 12704124 | from typing import List
from watchmen_boot.config.config import settings
from watchmen.pipeline.core.case.model.parameter import Parameter
MYSQL = "mysql"
MONGO = "mongo"
ORACLE = "oracle"
def find_case_then_template():
if settings.STORAGE_ENGINE == MONGO:
from watchmen.pipeline.core.case.function import case_then_for_mongo
return case_then_for_mongo
elif settings.STORAGE_ENGINE == MYSQL:
from watchmen.pipeline.core.case.function import case_then_for_oracle
return case_then_for_oracle
elif settings.STORAGE_ENGINE == ORACLE:
from watchmen.pipeline.core.case.function import case_then_for_oracle
return case_then_for_oracle
case_then_template = find_case_then_template()
def parse_storage_case_then(parameters_: List[Parameter]):
return case_then_template.parse_storage_case_then(parameters_)
|
tensorflow_model_analysis/export_only/__init__.py | yifanmai/model-analysis | 1,118 | 12704134 | <filename>tensorflow_model_analysis/export_only/__init__.py
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Init module for TensorFlow Model Analysis export-only modules.
This module contains only the export-related modules of TFMA, for use in your
Trainer. You may want to use this in your Trainer as it has a smaller import
footprint (it doesn't include Apache Beam which is required for analysis but not
for export). To evaluate your model using TFMA, you should use the full TFMA
module instead, i.e. import tensorflow_model_analysis
Example usage:
import tensorflow_model_analysis.export_only as tfma_export
def eval_input_receiver_fn():
...
return tfma_export.export.EvalInputReceiver(...)
tfma_export.export.export_eval_saved_model(...)
"""
from tensorflow_model_analysis.eval_saved_model import export
from tensorflow_model_analysis.eval_saved_model import exporter
|
dynaconf/vendor/box/box.py | sephiartlist/dynaconf | 2,293 | 12704151 | _Z='keys'
_Y='box_settings'
_X='default_box_attr'
_W='Box is frozen'
_V='modify_tuples_box'
_U='box_safe_prefix'
_T='default_box_none_transform'
_S='__created'
_R='box_dots'
_Q='box_duplicates'
_P='ignore'
_O='.'
_N='strict'
_M='box_recast'
_L='box_intact_types'
_K='default_box'
_J='_'
_I='utf-8'
_H='_box_config'
_G=True
_F='camel_killer_box'
_E='conversion_box'
_D='frozen_box'
_C='__safe_keys'
_B=False
_A=None
import copy,re,string,warnings
from collections.abc import Iterable,Mapping,Callable
from keyword import kwlist
from pathlib import Path
from typing import Any,Union,Tuple,List,Dict
from dynaconf.vendor import box
from .converters import _to_json,_from_json,_from_toml,_to_toml,_from_yaml,_to_yaml,BOX_PARAMETERS
from .exceptions import BoxError,BoxKeyError,BoxTypeError,BoxValueError,BoxWarning
__all__=['Box']
_first_cap_re=re.compile('(.)([A-Z][a-z]+)')
_all_cap_re=re.compile('([a-z0-9])([A-Z])')
_list_pos_re=re.compile('\\[(\\d+)\\]')
NO_DEFAULT=object()
def _camel_killer(attr):D='\\1_\\2';A=attr;A=str(A);B=_first_cap_re.sub(D,A);C=_all_cap_re.sub(D,B);return re.sub(' *_+',_J,C.lower())
def _recursive_tuples(iterable,box_class,recreate_tuples=_B,**E):
D=recreate_tuples;C=box_class;B=[]
for A in iterable:
if isinstance(A,dict):B.append(C(A,**E))
elif isinstance(A,list)or D and isinstance(A,tuple):B.append(_recursive_tuples(A,C,D,**E))
else:B.append(A)
return tuple(B)
def _parse_box_dots(item):
A=item
for (B,C) in enumerate(A):
if C=='[':return A[:B],A[B:]
elif C==_O:return A[:B],A[B+1:]
raise BoxError('Could not split box dots properly')
def _get_box_config():return{_S:_B,_C:{}}
class Box(dict):
_protected_keys=['to_dict','to_json','to_yaml','from_yaml','from_json','from_toml','to_toml','merge_update']+[A for A in dir({})if not A.startswith(_J)]
def __new__(A,*D,box_settings=_A,default_box=_B,default_box_attr=NO_DEFAULT,default_box_none_transform=_G,frozen_box=_B,camel_killer_box=_B,conversion_box=_G,modify_tuples_box=_B,box_safe_prefix='x',box_duplicates=_P,box_intact_types=(),box_recast=_A,box_dots=_B,**E):C=default_box_attr;B=super(Box,A).__new__(A,*D,**E);B._box_config=_get_box_config();B._box_config.update({_K:default_box,_X:A.__class__ if C is NO_DEFAULT else C,_T:default_box_none_transform,_E:conversion_box,_U:box_safe_prefix,_D:frozen_box,_F:camel_killer_box,_V:modify_tuples_box,_Q:box_duplicates,_L:tuple(box_intact_types),_M:box_recast,_R:box_dots,_Y:box_settings or{}});return B
def __init__(A,*B,box_settings=_A,default_box=_B,default_box_attr=NO_DEFAULT,default_box_none_transform=_G,frozen_box=_B,camel_killer_box=_B,conversion_box=_G,modify_tuples_box=_B,box_safe_prefix='x',box_duplicates=_P,box_intact_types=(),box_recast=_A,box_dots=_B,**F):
E=default_box_attr;super().__init__();A._box_config=_get_box_config();A._box_config.update({_K:default_box,_X:A.__class__ if E is NO_DEFAULT else E,_T:default_box_none_transform,_E:conversion_box,_U:box_safe_prefix,_D:frozen_box,_F:camel_killer_box,_V:modify_tuples_box,_Q:box_duplicates,_L:tuple(box_intact_types),_M:box_recast,_R:box_dots,_Y:box_settings or{}})
if not A._box_config[_E]and A._box_config[_Q]!=_P:raise BoxError('box_duplicates are only for conversion_boxes')
if len(B)==1:
if isinstance(B[0],str):raise BoxValueError('Cannot extrapolate Box from string')
if isinstance(B[0],Mapping):
for (D,C) in B[0].items():
if C is B[0]:C=A
if C is _A and A._box_config[_K]and A._box_config[_T]:continue
A.__setitem__(D,C)
elif isinstance(B[0],Iterable):
for (D,C) in B[0]:A.__setitem__(D,C)
else:raise BoxValueError('First argument must be mapping or iterable')
elif B:raise BoxTypeError(f"Box expected at most 1 argument, got {len(B)}")
for (D,C) in F.items():
if B and isinstance(B[0],Mapping)and C is B[0]:C=A
A.__setitem__(D,C)
A._box_config[_S]=_G
def __add__(C,other):
A=other;B=C.copy()
if not isinstance(A,dict):raise BoxTypeError(f"Box can only merge two boxes or a box and a dictionary.")
B.merge_update(A);return B
def __hash__(A):
if A._box_config[_D]:
B=54321
for C in A.items():B^=hash(C)
return B
raise BoxTypeError('unhashable type: "Box"')
def __dir__(B):
D=string.ascii_letters+string.digits+_J;C=set(super().__dir__())
for A in B.keys():
A=str(A)
if' 'not in A and A[0]not in string.digits and A not in kwlist:
for E in A:
if E not in D:break
else:C.add(A)
for A in B.keys():
if A not in C:
if B._box_config[_E]:
A=B._safe_attr(A)
if A:C.add(A)
return list(C)
def get(B,key,default=NO_DEFAULT):
C=key;A=default
if C not in B:
if A is NO_DEFAULT:
if B._box_config[_K]and B._box_config[_T]:return B.__get_default(C)
else:return _A
if isinstance(A,dict)and not isinstance(A,Box):return Box(A,box_settings=B._box_config.get(_Y))
if isinstance(A,list)and not isinstance(A,box.BoxList):return box.BoxList(A)
return A
return B[C]
def copy(A):return Box(super().copy(),**A.__box_config())
def __copy__(A):return Box(super().copy(),**A.__box_config())
def __deepcopy__(A,memodict=_A):
B=memodict;E=A._box_config[_D];D=A.__box_config();D[_D]=_B;C=A.__class__(**D);B=B or{};B[id(A)]=C
for (F,G) in A.items():C[copy.deepcopy(F,B)]=copy.deepcopy(G,B)
C._box_config[_D]=E;return C
def __setstate__(A,state):B=state;A._box_config=B[_H];A.__dict__.update(B)
def keys(A):return super().keys()
def values(A):return[A[B]for B in A.keys()]
def items(A):return[(B,A[B])for B in A.keys()]
def __get_default(B,item):
A=B._box_config[_X]
if A in(B.__class__,dict):C=B.__class__(**B.__box_config())
elif isinstance(A,dict):C=B.__class__(**B.__box_config(),**A)
elif isinstance(A,list):C=box.BoxList(**B.__box_config())
elif isinstance(A,Callable):C=A()
elif hasattr(A,'copy'):C=A.copy()
else:C=A
B.__convert_and_store(item,C);return C
def __box_config(C):
A={}
for (B,D) in C._box_config.copy().items():
if not B.startswith('__'):A[B]=D
return A
def __recast(A,item,value):
C=value;B=item
if A._box_config[_M]and B in A._box_config[_M]:
try:return A._box_config[_M][B](C)
except ValueError:raise BoxValueError(f"Cannot convert {C} to {A._box_config[_M][B]}") from _A
return C
def __convert_and_store(B,item,value):
C=item;A=value
if B._box_config[_E]:D=B._safe_attr(C);B._box_config[_C][D]=C
if isinstance(A,(int,float,str,bytes,bytearray,bool,complex,set,frozenset)):return super().__setitem__(C,A)
if B._box_config[_L]and isinstance(A,B._box_config[_L]):return super().__setitem__(C,A)
if isinstance(A,dict)and not isinstance(A,Box):A=B.__class__(A,**B.__box_config())
elif isinstance(A,list)and not isinstance(A,box.BoxList):
if B._box_config[_D]:A=_recursive_tuples(A,B.__class__,recreate_tuples=B._box_config[_V],**B.__box_config())
else:A=box.BoxList(A,box_class=B.__class__,**B.__box_config())
elif B._box_config[_V]and isinstance(A,tuple):A=_recursive_tuples(A,B.__class__,recreate_tuples=_G,**B.__box_config())
super().__setitem__(C,A)
def __getitem__(B,item,_ignore_default=_B):
A=item
try:return super().__getitem__(A)
except KeyError as E:
if A==_H:raise BoxKeyError('_box_config should only exist as an attribute and is never defaulted') from _A
if B._box_config[_R]and isinstance(A,str)and(_O in A or'['in A):
C,F=_parse_box_dots(A)
if C in B.keys():
if hasattr(B[C],'__getitem__'):return B[C][F]
if B._box_config[_F]and isinstance(A,str):
D=_camel_killer(A)
if D in B.keys():return super().__getitem__(D)
if B._box_config[_K]and not _ignore_default:return B.__get_default(A)
raise BoxKeyError(str(E)) from _A
def __getattr__(A,item):
B=item
try:
try:C=A.__getitem__(B,_ignore_default=_G)
except KeyError:C=object.__getattribute__(A,B)
except AttributeError as E:
if B=='__getstate__':raise BoxKeyError(B) from _A
if B==_H:raise BoxError('_box_config key must exist') from _A
if A._box_config[_E]:
D=A._safe_attr(B)
if D in A._box_config[_C]:return A.__getitem__(A._box_config[_C][D])
if A._box_config[_K]:return A.__get_default(B)
raise BoxKeyError(str(E)) from _A
return C
def __setitem__(A,key,value):
C=value;B=key
if B!=_H and A._box_config[_S]and A._box_config[_D]:raise BoxError(_W)
if A._box_config[_R]and isinstance(B,str)and _O in B:
D,E=_parse_box_dots(B)
if D in A.keys():
if hasattr(A[D],'__setitem__'):return A[D].__setitem__(E,C)
C=A.__recast(B,C)
if B not in A.keys()and A._box_config[_F]:
if A._box_config[_F]and isinstance(B,str):B=_camel_killer(B)
if A._box_config[_E]and A._box_config[_Q]!=_P:A._conversion_checks(B)
A.__convert_and_store(B,C)
def __setattr__(A,key,value):
C=value;B=key
if B!=_H and A._box_config[_D]and A._box_config[_S]:raise BoxError(_W)
if B in A._protected_keys:raise BoxKeyError(f'Key name "{B}" is protected')
if B==_H:return object.__setattr__(A,B,C)
C=A.__recast(B,C);D=A._safe_attr(B)
if D in A._box_config[_C]:B=A._box_config[_C][D]
A.__setitem__(B,C)
def __delitem__(A,key):
B=key
if A._box_config[_D]:raise BoxError(_W)
if B not in A.keys()and A._box_config[_R]and isinstance(B,str)and _O in B:
C,E=B.split(_O,1)
if C in A.keys()and isinstance(A[C],dict):return A[C].__delitem__(E)
if B not in A.keys()and A._box_config[_F]:
if A._box_config[_F]and isinstance(B,str):
for D in A:
if _camel_killer(B)==D:B=D;break
super().__delitem__(B)
def __delattr__(A,item):
B=item
if A._box_config[_D]:raise BoxError(_W)
if B==_H:raise BoxError('"_box_config" is protected')
if B in A._protected_keys:raise BoxKeyError(f'Key name "{B}" is protected')
try:A.__delitem__(B)
except KeyError as D:
if A._box_config[_E]:
C=A._safe_attr(B)
if C in A._box_config[_C]:A.__delitem__(A._box_config[_C][C]);del A._box_config[_C][C];return
raise BoxKeyError(D)
def pop(B,key,*C):
A=key
if C:
if len(C)!=1:raise BoxError('pop() takes only one optional argument "default"')
try:D=B[A]
except KeyError:return C[0]
else:del B[A];return D
try:D=B[A]
except KeyError:raise BoxKeyError('{0}'.format(A)) from _A
else:del B[A];return D
def clear(A):super().clear();A._box_config[_C].clear()
def popitem(A):
try:B=next(A.__iter__())
except StopIteration:raise BoxKeyError('Empty box') from _A
return B,A.pop(B)
def __repr__(A):return f"<Box: {A.to_dict()}>"
def __str__(A):return str(A.to_dict())
def __iter__(A):
for B in A.keys():yield B
def __reversed__(A):
for B in reversed(list(A.keys())):yield B
def to_dict(D):
A=dict(D)
for (C,B) in A.items():
if B is D:A[C]=A
elif isinstance(B,Box):A[C]=B.to_dict()
elif isinstance(B,box.BoxList):A[C]=B.to_list()
return A
def update(C,__m=_A,**D):
B=__m
if B:
if hasattr(B,_Z):
for A in B:C.__convert_and_store(A,B[A])
else:
for (A,E) in B:C.__convert_and_store(A,E)
for A in D:C.__convert_and_store(A,D[A])
def merge_update(A,__m=_A,**E):
C=__m
def D(k,v):
B=A._box_config[_L]and isinstance(v,A._box_config[_L])
if isinstance(v,dict)and not B:
v=A.__class__(v,**A.__box_config())
if k in A and isinstance(A[k],dict):
if isinstance(A[k],Box):A[k].merge_update(v)
else:A[k].update(v)
return
if isinstance(v,list)and not B:v=box.BoxList(v,**A.__box_config())
A.__setitem__(k,v)
if C:
if hasattr(C,_Z):
for B in C:D(B,C[B])
else:
for (B,F) in C:D(B,F)
for B in E:D(B,E[B])
def setdefault(B,item,default=_A):
C=item;A=default
if C in B:return B[C]
if isinstance(A,dict):A=B.__class__(A,**B.__box_config())
if isinstance(A,list):A=box.BoxList(A,box_class=B.__class__,**B.__box_config())
B[C]=A;return A
def _safe_attr(C,attr):
B=attr;G=string.ascii_letters+string.digits+_J
if isinstance(B,tuple):B=_J.join([str(A)for A in B])
B=B.decode(_I,_P)if isinstance(B,bytes)else str(B)
if C.__box_config()[_F]:B=_camel_killer(B)
A=[];D=0
for (E,F) in enumerate(B):
if F in G:D=E;A.append(F)
elif not A:continue
elif D==E-1:A.append(_J)
A=''.join(A)[:D+1]
try:int(A[0])
except (ValueError,IndexError):pass
else:A=f"{C.__box_config()[_U]}{A}"
if A in kwlist:A=f"{C.__box_config()[_U]}{A}"
return A
def _conversion_checks(A,item):
B=A._safe_attr(item)
if B in A._box_config[_C]:
C=[f"{item}({B})",f"{A._box_config[_C][B]}({B})"]
if A._box_config[_Q].startswith('warn'):warnings.warn(f"Duplicate conversion attributes exist: {C}",BoxWarning)
else:raise BoxError(f"Duplicate conversion attributes exist: {C}")
def to_json(A,filename=_A,encoding=_I,errors=_N,**B):return _to_json(A.to_dict(),filename=filename,encoding=encoding,errors=errors,**B)
@classmethod
def from_json(E,json_string=_A,filename=_A,encoding=_I,errors=_N,**A):
D={}
for B in A.copy():
if B in BOX_PARAMETERS:D[B]=A.pop(B)
C=_from_json(json_string,filename=filename,encoding=encoding,errors=errors,**A)
if not isinstance(C,dict):raise BoxError(f"json data not returned as a dictionary, but rather a {type(C).__name__}")
return E(C,**D)
def to_yaml(A,filename=_A,default_flow_style=_B,encoding=_I,errors=_N,**B):return _to_yaml(A.to_dict(),filename=filename,default_flow_style=default_flow_style,encoding=encoding,errors=errors,**B)
@classmethod
def from_yaml(E,yaml_string=_A,filename=_A,encoding=_I,errors=_N,**A):
D={}
for B in A.copy():
if B in BOX_PARAMETERS:D[B]=A.pop(B)
C=_from_yaml(yaml_string=yaml_string,filename=filename,encoding=encoding,errors=errors,**A)
if not isinstance(C,dict):raise BoxError(f"yaml data not returned as a dictionary but rather a {type(C).__name__}")
return E(C,**D)
def to_toml(A,filename=_A,encoding=_I,errors=_N):return _to_toml(A.to_dict(),filename=filename,encoding=encoding,errors=errors)
@classmethod
def from_toml(D,toml_string=_A,filename=_A,encoding=_I,errors=_N,**B):
C={}
for A in B.copy():
if A in BOX_PARAMETERS:C[A]=B.pop(A)
E=_from_toml(toml_string=toml_string,filename=filename,encoding=encoding,errors=errors);return D(E,**C) |
smt/utils/__init__.py | jbussemaker/smt | 354 | 12704158 | from .misc import compute_rms_error
|
python/seldon/shell/zk_utils.py | smsahu/seldon-server | 1,645 | 12704204 | import json
def is_json_data(data):
if (data != None) and (len(data)>0):
return data[0] == '{' or data[0] == '['
else:
return False
def json_compress(json_data):
d = json.loads(json_data)
return json.dumps(d, sort_keys=True, separators=(',',':'))
def node_set(zk_client, node_path, node_value):
if is_json_data(node_value):
node_value = json_compress(node_value)
node_value = node_value.strip() if node_value != None else node_value
if zk_client.exists(node_path):
retVal = zk_client.set(node_path,node_value)
else:
retVal = zk_client.create(node_path,node_value,makepath=True)
print "updated zk node[{node_path}]".format(node_path=node_path)
def node_get(zk_client, node_path):
theValue = None
if zk_client.exists(node_path):
theValue = zk_client.get(node_path)
theValue = theValue[0]
return theValue.strip() if theValue != None else theValue
|
src/models/__init__.py | ashok-arjun/LCFCN-AI | 170 | 12704276 | import torch
import tqdm
import argparse
import pandas as pd
import pickle, os
import numpy as np
from . import base_networks
from haven import haven_results as hr
from haven import haven_chk as hc
from . import lcfcn
def get_model(model_dict, exp_dict=None, train_set=None):
if model_dict['name'] in ["lcfcn"]:
model = lcfcn.LCFCN(exp_dict, train_set=train_set)
return model
|
quickbooks/helpers.py | varunbheemaiah/python-quickbooks | 234 | 12704289 |
def qb_date_format(input_date):
"""
Converts date to quickbooks date format
:param input_date:
:return:
"""
return input_date.strftime("%Y-%m-%d")
def qb_datetime_format(input_date):
"""
Converts datetime to quickbooks datetime format
:param input_date:
:return:
"""
return input_date.strftime("%Y-%m-%dT%H:%M:%S")
def qb_datetime_utc_offset_format(input_date, utc_offset):
"""
Converts datetime to quickbooks datetime format including UTC offset
:param input_date:
:param utc_offset: Formatted +/-HH:MM example: -08:00
:return:
"""
return "{0}{1}".format(qb_datetime_format(input_date), utc_offset)
|
python/cpumenu.py | 3mdeb/bits | 215 | 12704306 | <filename>python/cpumenu.py
# Copyright (c) 2015, Intel Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""CPU menu generation"""
from __future__ import print_function
from cpudetect import cpulib
import bits
import ttypager
created_cpu_menu = False
def generate_cpu_menu():
global created_cpu_menu
if created_cpu_menu:
return
cfg = ""
cfg += 'menuentry "{}: {}" {{\n'.format(cpulib.name, bits.brandstring())
cfg += """ py 'import cpumenu; cpumenu.callback()'\n"""
cfg += '}\n'
bits.pyfs.add_static("cpumenu.cfg", cfg)
created_cpu_menu = True
def callback():
with ttypager.page():
print(bits.brandstring())
if cpulib.__name__ == "cpu_gen":
print("No processor-specific test exists!")
print("Menu entries will only include generic tests that apply to all processors.")
else:
print("Detected as CPU codename: {}".format(cpulib.name))
print("Menu entries have been tailored to target this specific processor")
|
qa/rpc-tests/electrum_transaction_get.py | gandrewstone/bitcoin | 535 | 12704312 | #!/usr/bin/env python3
# Copyright (c) 2020 The Bitcoin Unlimited developers
"""
Tests the electrum call 'blockchain.transaction.get'
"""
import asyncio
from test_framework.util import assert_equal, p2p_port
from test_framework.electrumutil import ElectrumTestFramework, ElectrumConnection
from test_framework.nodemessages import ToHex
from test_framework.blocktools import create_transaction, pad_tx
from test_framework.script import (
CScript,
OP_CHECKSIG,
OP_DROP,
OP_DUP,
OP_EQUAL,
OP_EQUALVERIFY,
OP_FALSE,
OP_HASH160,
OP_TRUE,
)
from test_framework.nodemessages import COIN
TX_GET = "blockchain.transaction.get"
DUMMY_HASH = 0x1111111111111111111111111111111111111111
class ElectrumTransactionGet(ElectrumTestFramework):
def run_test(self):
n = self.nodes[0]
self.bootstrap_p2p()
coinbases = self.mine_blocks(n, 104)
# non-coinbase transactions
prevtx = coinbases[0]
nonstandard_tx = create_transaction(
prevtx = prevtx,
value = prevtx.vout[0].nValue, n = 0,
sig = CScript([OP_TRUE]),
out = CScript([OP_FALSE, OP_DROP]))
prevtx = coinbases[1]
p2sh_tx = create_transaction(
prevtx = prevtx,
value = prevtx.vout[0].nValue, n = 0,
sig = CScript([OP_TRUE]),
out = CScript([OP_HASH160, DUMMY_HASH, OP_EQUAL]))
prevtx = coinbases[2]
p2pkh_tx = create_transaction(
prevtx = prevtx,
value = prevtx.vout[0].nValue, n = 0,
sig = CScript([OP_TRUE]),
out = CScript([OP_DUP, OP_HASH160, DUMMY_HASH, OP_EQUALVERIFY, OP_CHECKSIG]))
prevtx = coinbases[3]
unconfirmed_tx = create_transaction(
prevtx = prevtx,
value = prevtx.vout[0].nValue, n = 0,
sig = CScript([OP_TRUE]),
out = CScript([OP_DUP, OP_HASH160, DUMMY_HASH, OP_EQUALVERIFY, OP_CHECKSIG]))
for tx in [nonstandard_tx, p2sh_tx, p2pkh_tx, unconfirmed_tx]:
pad_tx(tx)
coinbases.extend(self.mine_blocks(n, 1, [nonstandard_tx, p2sh_tx, p2pkh_tx]))
self.sync_height()
n.sendrawtransaction(ToHex(unconfirmed_tx))
self.wait_for_mempool_count(count = 1)
async def async_tests(loop):
cli = ElectrumConnection(loop)
await cli.connect()
return await asyncio.gather(
self.test_verbose(n, cli, nonstandard_tx.hash, p2sh_tx.hash, p2pkh_tx.hash, unconfirmed_tx.hash),
self.test_non_verbose(cli, coinbases, unconfirmed_tx)
)
loop = asyncio.get_event_loop()
loop.run_until_complete(async_tests(loop))
async def test_non_verbose(self, cli, coinbases, unconfirmed):
for tx in coinbases + [unconfirmed]:
assert_equal(ToHex(tx), await cli.call(TX_GET, tx.hash))
async def test_verbose(self, n, cli, nonstandard_tx, p2sh_tx, p2pkh_tx, unconfirmed_tx):
"""
The spec is unclear. It states:
"whatever the coin daemon returns when asked for a
verbose form of the raw transaction"
We should test for defacto "common denominators" between bitcoind
implementations.
"""
# All confirmed transactions are confirmed in the tip
block = n.getbestblockhash()
tipheight = n.getblockcount()
coinbase_tx = n.getblock(block)['tx'][0]
async def check_tx(txid, is_confirmed = True, check_output_type = False):
electrum = await cli.call(TX_GET, txid, True)
bitcoind = n.getrawtransaction(txid, True, block)
is_coinbase = 'coinbase' in bitcoind['vin'][0]
if not is_confirmed:
# Transaction is unconfirmed. We handle this slightly different
# than bitcoind.
assert_equal(None, electrum['blockhash'])
assert_equal(None, electrum['confirmations'])
assert_equal(None, electrum['time'])
assert_equal(None, electrum['height'])
else:
assert_equal(n.getbestblockhash(), electrum['blockhash'])
assert_equal(1, electrum['confirmations'])
assert_equal(bitcoind['time'], electrum['time'])
assert_equal(tipheight, electrum['height'])
assert_equal(bitcoind['txid'], electrum['txid'])
assert_equal(bitcoind['locktime'], electrum['locktime'])
assert_equal(bitcoind['size'], electrum['size'])
assert_equal(bitcoind['hex'], electrum['hex'])
assert_equal(bitcoind['version'], electrum['version'])
# inputs
assert_equal(len(bitcoind['vin']), len(bitcoind['vin']))
for i in range(len(bitcoind['vin'])):
if 'coinbase' in bitcoind['vin'][i]:
# bitcoind drops txid and other fields, butadds 'coinbase' for coinbase
# inputs
assert_equal(bitcoind['vin'][i]['coinbase'], electrum['vin'][i]['coinbase'])
assert_equal(bitcoind['vin'][i]['sequence'], electrum['vin'][i]['sequence'])
continue
assert_equal(
bitcoind['vin'][i]['txid'],
electrum['vin'][i]['txid'])
assert_equal(
bitcoind['vin'][i]['vout'],
electrum['vin'][i]['vout'])
assert_equal(
bitcoind['vin'][i]['sequence'],
electrum['vin'][i]['sequence'])
assert_equal(
bitcoind['vin'][i]['scriptSig']['hex'],
electrum['vin'][i]['scriptSig']['hex'])
# There is more than one way to represent script as assembly.
# For instance '51' can be represented as '1' or 'OP_PUSHNUM_1'.
# Just check for existance.
assert('asm' in electrum['vin'][i]['scriptSig'])
# outputs
assert_equal(len(bitcoind['vout']), len(bitcoind['vout']))
for i in range(len(bitcoind['vout'])):
assert_equal(
bitcoind['vout'][i]['n'],
electrum['vout'][i]['n'])
assert_equal(
bitcoind['vout'][i]['value'],
electrum['vout'][i]['value_coin'])
assert_equal(
bitcoind['vout'][i]['value'] * COIN,
electrum['vout'][i]['value_satoshi'])
assert_equal(
bitcoind['vout'][i]['scriptPubKey']['hex'],
electrum['vout'][i]['scriptPubKey']['hex'])
assert('asm' in electrum['vout'][i]['scriptPubKey'])
if 'addresses' in bitcoind['vout'][i]['scriptPubKey']:
assert_equal(
bitcoind['vout'][i]['scriptPubKey']['addresses'],
electrum['vout'][i]['scriptPubKey']['addresses'])
else:
assert_equal([], electrum['vout'][i]['scriptPubKey']['addresses'])
if check_output_type:
assert_equal(
bitcoind['vout'][i]['scriptPubKey']['type'],
electrum['vout'][i]['scriptPubKey']['type'])
await asyncio.gather(
# ElectrsCash cannot tell if it's nonstandard
check_tx(nonstandard_tx, check_output_type = False),
check_tx(p2sh_tx),
check_tx(p2pkh_tx),
check_tx(coinbase_tx),
check_tx(unconfirmed_tx, is_confirmed = False),
)
if __name__ == '__main__':
ElectrumTransactionGet().main()
|
export_model_mediapipe.py | saj940826/youtube-8m | 2,341 | 12704314 | <reponame>saj940826/youtube-8m
# Lint as: python3
import numpy as np
import tensorflow as tf
from tensorflow import app
from tensorflow import flags
FLAGS = flags.FLAGS
def main(unused_argv):
# Get the input tensor names to be replaced.
tf.reset_default_graph()
meta_graph_location = FLAGS.checkpoint_file + ".meta"
tf.train.import_meta_graph(meta_graph_location, clear_devices=True)
input_tensor_name = tf.get_collection("input_batch_raw")[0].name
num_frames_tensor_name = tf.get_collection("num_frames")[0].name
# Create output graph.
saver = tf.train.Saver()
tf.reset_default_graph()
input_feature_placeholder = tf.placeholder(
tf.float32, shape=(None, None, 1152))
num_frames_placeholder = tf.placeholder(tf.int32, shape=(None, 1))
saver = tf.train.import_meta_graph(
meta_graph_location,
input_map={
input_tensor_name: input_feature_placeholder,
num_frames_tensor_name: tf.squeeze(num_frames_placeholder, axis=1)
},
clear_devices=True)
predictions_tensor = tf.get_collection("predictions")[0]
with tf.Session() as sess:
print("restoring variables from " + FLAGS.checkpoint_file)
saver.restore(sess, FLAGS.checkpoint_file)
tf.saved_model.simple_save(
sess,
FLAGS.output_dir,
inputs={'rgb_and_audio': input_feature_placeholder,
'num_frames': num_frames_placeholder},
outputs={'predictions': predictions_tensor})
# Try running inference.
predictions = sess.run(
[predictions_tensor],
feed_dict={
input_feature_placeholder: np.zeros((3, 7, 1152), dtype=np.float32),
num_frames_placeholder: np.array([[7]], dtype=np.int32)})
print('Test inference:', predictions)
print('Model saved to ', FLAGS.output_dir)
if __name__ == '__main__':
flags.DEFINE_string('checkpoint_file', None, 'Path to the checkpoint file.')
flags.DEFINE_string('output_dir', None, 'SavedModel output directory.')
app.run(main)
|
xfdnn/rt/xdnn_io.py | yarenty/ml-suite | 334 | 12704324 |
#!/usr/bin/env python
#
# // SPDX-License-Identifier: BSD-3-CLAUSE
#
# (C) Copyright 2018, Xilinx, Inc.
#
import os
import json
import argparse
from collections import OrderedDict
import h5py
import ntpath
import cv2
import numpy as np
from xfdnn.rt.xdnn_util import literal_eval
from ext.PyTurboJPEG import imread as _imread
class image_preprocessing(object):
def __init__(self, resize=[], crop=[], pxlscale=[], meansub=[], chtranspose=None, chswap=None,
plot=None):
self.resize = resize
self.crop = crop
self.pxlscale = pxlscale
self.meansub = meansub
self.chtranspose = chtranspose
self.chswap = chswap
def max_batch_size(x):
maxb = 16
if int(x) > maxb:
print ("Limiting batch size to %d" % maxb)
x = min( int(x), maxb)
return x
def extant_file(x):
"""
'Type' for argparse - checks that file exists but does not open.
"""
if x == "-":
# skip file check and allow empty string
return ""
if not os.path.exists(x):
# Argparse uses the ArgumentTypeError to give a rejection message like:
# error: argument input: x does not exist
raise argparse.ArgumentTypeError("{0} does not exist".format(x))
return x
def default_parser_args():
parser = argparse.ArgumentParser(description='pyXDNN')
parser.add_argument('--xclbin', help='.xclbin file', required=True, type=extant_file, metavar="FILE")
parser.add_argument('--batch_sz', type=max_batch_size, default=-1, help='batch size')
parser.add_argument('--dsp', type=int, default=28, help="xclbin's DSP array width")
parser.add_argument('--netcfg', help='FPGA instructions generated by compiler for the network',
required=True, type=extant_file, metavar="FILE")
parser.add_argument('--quantizecfg', help="Network's quantization parameters file",
required=True, type=extant_file, metavar="FILE")
parser.add_argument('--net_def', help='prototxt file for caffe',
type=extant_file, metavar="FILE")
parser.add_argument('--net_weights', help="caffe model file",
type=extant_file, metavar="FILE")
parser.add_argument('--xlnxlib',
help='FPGA xfDNN lib .so (deprecated)', type=extant_file, metavar="FILE")
parser.add_argument('--outsz', type=int, default=1000,
help='size of last layer\'s output blob')
parser.add_argument('--weights',
help="Folder path to network parameters/weights",
required=True, type=extant_file, metavar="FILE")
parser.add_argument('--labels',
help='result -> labels translation file', type=extant_file, metavar="FILE")
parser.add_argument('--golden', help='file idx -> expected label file', type=extant_file, metavar="FILE")
parser.add_argument('--jsoncfg',
help='json file with nets, data and PEs to use',
type=extant_file, metavar="FILE")
parser.add_argument('--images', nargs='*',
help='directory or raw image files to use as input', required=True, type=extant_file, metavar="FILE")
parser.add_argument('--scaleA', type=int, default=10000,
help='weights scaling value')
parser.add_argument('--scaleB', type=int, default=30,
help='activation scaling value ')
parser.add_argument('--img_raw_scale', type=float, default=255.0,
help='image raw scale value ')
parser.add_argument('--img_mean', type=int, nargs=3, default=[104.007,116.669,122.679], # BGR for Caffe
help='image mean values ')
parser.add_argument('--img_input_scale', type=float, default=1.0,
help='image input scale value ')
parser.add_argument('--zmqpub', default=False, action='store_true',
help='publish predictions to zmq port 5555')
parser.add_argument('--perpetual', default=False, action='store_true',
help='loop over input images forever')
parser.add_argument('--PE', nargs='?', type=int, default=-1,
help='preferred PE to run the classification on. Default is auto-select')
parser.add_argument('--endLayerName', default="",
help='layer name till the network should be run, helpful for debugging')
parser.add_argument('--diffStartLayer', type=int, default=0,
help="if 1 then we can run from any given layer ignoring the X's of first layers")
parser.add_argument('--v2WeightsFormat', type=bool, default=False,
help="Weights File specified as KernSizex KernSizey instead of only KernSize, supporting rectangle kernels")
parser.add_argument('--layerName', default="",
help='layername until which pyfpga should run, if left default, would run the entire model')
parser.add_argument('--binaryFormatWeights', type=bool, default=False,
help="Binary Format Weights Files")
return parser
def default_xdnn_arg_parser_compiled(base='TF'):
parser = argparse.ArgumentParser(description='XDLF_compiled')
parser.add_argument("--base", type=str, default="TF")
parser.add_argument("--compilerjson", type=str, default=None)
parser.add_argument("--weights", type=str, default=None)
parser.add_argument("--data_format", type=str, default='NCHW')
parser.add_argument("--input_shape", type=str, default=None)
parser.add_argument("--labels", type=str, default=None)
parser.add_argument("--image_path", type=str, default=None)
parser.add_argument('--images', type=extant_file, metavar='FILE', nargs='*', help='directory or raw image files to use as input')
parser.add_argument("--image", type=str, default=None)
parser.add_argument('--batch_sz', type=max_batch_size, default=-1, help='batch size')
parser.add_argument("--image_transforms", nargs='+', type=str, help="""None if no
preprocessing is needed. <name> if using prespecified reprocessings; . list of
preprocesses.""")
parser.add_argument("--val", type=str, default=None)
parser.add_argument("--num_batches", type=int, default=-1)
parser.add_argument("--batch", type=int, default=4)
parser.add_argument("--xclbin", type=str, default='')
parser.add_argument('--netcfg', type=extant_file, metavar='FILE', help="""FPGA instructions
generated by compiler for the network""")
parser.add_argument('--jsoncfg', type=extant_file, metavar='FILE', help='json file with nets, data and PEs to use')
parser.add_argument('--quantizecfg', type=extant_file, metavar='FILE', help="""Network's
quantization parameters file""")
parser.add_argument('--outsz', type=int, default=1000, help='size of last layer\'s output blob')
parser.add_argument('--datadir', type=extant_file, metavar='FILE', help='Folder path to network parameters/weights')
parser.add_argument("--xdnnv3", action='store_true', default=False)
parser.add_argument("--usedeephi", action='store_true', default=False)
parser.add_argument("--device", type=str, default='CPU')
parser.add_argument("--quant_cfgfile", type=str, default=None)
parser.add_argument("--quant_recipe", type=str, default=None)
parser.add_argument("--fpga_recipe", type=str, default=None)
parser.add_argument("--save", type=str, default=None)
parser.add_argument("--verify_dir", type=str, default=None)
parser.add_argument('--save2modeldir', action='store_true', default=False, help="""store network
partitions and compiler outpults at model directory (not at script's
directory.)""")
parser.add_argument('--scaleA', type=int, default=10000, help='weights scaling value')
parser.add_argument('--scaleB', type=int, default=30, help='activation scaling value ')
parser.add_argument('--img_raw_scale',type=float, default=255.0, help='image raw scale value ')
parser.add_argument('--img_mean', type=int, nargs=3, default=[104.007,116.669,122.679], # BGR for Caffe
help='image mean values ')
parser.add_argument('--img_input_scale', type=float, default=1.0, help='image input scale value ')
parser.add_argument('--zmqpub', action='store_true', default=False, help='publish predictions to zmq port 5555')
parser.add_argument('--perpetual', action='store_true', default=False, help='loop over input images forever')
parser.add_argument('--PE', type=int, nargs='?', default=-1, help='preferred PE to run the classification on. Default is auto-select')
parser.add_argument('--endLayerName', type=str, default='', help='layer name till the network should be run, helpful for debugging')
parser.add_argument('--diffStartLayer', type=int, default=0, help="if 1 then we can run from any given layer ignoring the X's of first layers")
parser.add_argument('--v2WeightsFormat', action='store_true', default=False, help="Weights File specified as KernSizex KernSizey instead of only KernSize, supporting rectangle kernels")
parser.add_argument('--layerName', type=str, default='', help='layername until which pyfpga should run, if left default, would run the entire model')
parser.add_argument('--binaryFormatWeights', action='store_true', default=False, help="Binary Format Weights Files")
return parser
def default_xdnn_arg_parser(base='TF'):
if base.lower() == 'tf':
## FIXME: Hack to by pass caffe and tensorflow co-existance issues
from xfdnn.tools.compile.bin.xfdnn_compiler_tensorflow import default_compiler_arg_parser as default_TF_compiler_arg_parser
parser = default_TF_compiler_arg_parser()
elif base.lower() == 'caffe':
## FIXME: Hack to by pass caffe and tensorflow co-existance issues
from xfdnn.tools.compile.bin.xfdnn_compiler_caffe import default_compiler_arg_parser as default_CAFFE_compiler_arg_parser
parser = default_CAFFE_compiler_arg_parser()
else:
raise AttributeError('unsupported paltform')
parser.add_argument("--base", type=str, default="TF")
parser.add_argument("--data_format", type=str, default='NCHW')
parser.add_argument("--input_shape", type=str, default=None)
parser.add_argument('--golden', type=extant_file, metavar='FILE', help='file idx -> expected label file')
parser.add_argument("--labels", type=str, default=None)
parser.add_argument("--image_path", type=str, default=None)
parser.add_argument('--images', type=extant_file, metavar='FILE', nargs='*', help='directory or raw image files to use as input')
parser.add_argument("--image", type=str, default=None)
parser.add_argument('--batch_sz', type=max_batch_size, default=-1, help='batch size')
parser.add_argument("--image_transforms", nargs='+', type=str, help="""None if no
preprocessing is needed. <name> if using prespecified reprocessings; . list of
preprocesses.""")
parser.add_argument("--val", type=str, default=None)
parser.add_argument("--num_batches", type=int, default=-1)
parser.add_argument("--batch", type=int, default=4)
parser.add_argument("--xclbin", type=str, default='')
parser.add_argument('--netcfg', type=extant_file, metavar='FILE', help="""FPGA instructions
generated by compiler for the network""")
parser.add_argument('--jsoncfg', type=extant_file, metavar='FILE', help='json file with nets, data and PEs to use')
parser.add_argument('--quantizecfg', type=extant_file, metavar='FILE', help="""Network's
quantization parameters file""")
parser.add_argument('--outsz', type=int, default=1000, help='size of last layer\'s output blob')
parser.add_argument('--datadir', type=extant_file, metavar='FILE', help='Folder path to network parameters/weights')
parser.add_argument("--xdnnv3", action='store_true', default=False)
parser.add_argument("--device", type=str, default='CPU')
parser.add_argument("--quant_recipe", type=str, default=None)
parser.add_argument("--fpga_recipe", type=str, default=None)
parser.add_argument("--save", type=str, default=None)
parser.add_argument("--verify_dir", type=str, default=None)
parser.add_argument('--save2modeldir', action='store_true', default=False, help="""store network
partitions and compiler outpults at model directory (not at script's
directory.)""")
parser.add_argument('--scaleA', type=int, default=10000, help='weights scaling value')
parser.add_argument('--scaleB', type=int, default=30, help='activation scaling value ')
parser.add_argument('--img_raw_scale',type=float, default=255.0, help='image raw scale value ')
parser.add_argument('--img_mean', type=int, nargs=3, default=[104.007,116.669,122.679], # BGR for Caffe
help='image mean values ')
parser.add_argument('--img_input_scale', type=float, default=1.0, help='image input scale value ')
parser.add_argument('--zmqpub', action='store_true', default=False, help='publish predictions to zmq port 5555')
parser.add_argument('--perpetual', action='store_true', default=False, help='loop over input images forever')
parser.add_argument('--PE', type=int, nargs='?', default=-1, help='preferred PE to run the classification on. Default is auto-select')
parser.add_argument('--endLayerName', type=str, default='', help='layer name till the network should be run, helpful for debugging')
parser.add_argument('--diffStartLayer', type=int, default=0, help="if 1 then we can run from any given layer ignoring the X's of first layers")
parser.add_argument('--v2WeightsFormat', action='store_true', default=False, help="Weights File specified as KernSizex KernSizey instead of only KernSize, supporting rectangle kernels")
parser.add_argument('--layerName', type=str, default='', help='layername until which pyfpga should run, if left default, would run the entire model')
parser.add_argument('--binaryFormatWeights', action='store_true', default=False, help="Binary Format Weights Files")
return parser
def make_dict_args(args):
def find_all_images(input_dict):
if 'images' in input_dict and input_dict['images'] is not None:
inputFiles = []
for dir_or_image in literal_eval(str(input_dict['images'])):
if os.path.isdir(dir_or_image):
inputFiles += [os.path.join(dir_or_image, f) for f in os.listdir(dir_or_image) if os.path.isfile(os.path.join(dir_or_image, f))]
else:
inputFiles += [dir_or_image]
input_dict['images'] = inputFiles
def eval_string(input_dict):
for key, val in list(input_dict.items()):
try:
input_dict[key] = literal_eval(str(val))
except:
pass
#if val and str(val).isdigit():
# input_dict[key] = int(val)
def ingest_xclbin_json_config(input_dict):
fname = input_dict['xclbin'] + ".json"
with open(fname) as data:
xclbinJson = json.load(data)
input_dict['overlaycfg'] = xclbinJson
isV3 = False
if 'XDNN_VERSION_MAJOR' in xclbinJson \
and xclbinJson['XDNN_VERSION_MAJOR'] == "3":
isV3 = True
if isV3:
input_dict['xdnnv3'] = True
libPath = os.environ['LIBXDNN_PATH'] + ".v3"
if os.path.isfile(libPath):
os.environ['LIBXDNN_PATH'] = libPath
if 'XDNN_CSR_BASE' in xclbinJson and input_dict['batch_sz'] == -1:
csrAddrs = xclbinJson['XDNN_CSR_BASE'].split(",")
input_dict['batch_sz'] = len(csrAddrs)
if not isV3:
input_dict['batch_sz'] *= 2
try:
args_dict = vars(args)
except:
args_dict = args
find_all_images(args_dict)
eval_string(args_dict)
ingest_xclbin_json_config(args_dict)
jsoncfg_exists = args_dict.get('jsoncfg')
if jsoncfg_exists:
with open(args_dict['jsoncfg']) as jsoncfgFile:
jsoncfgs = json.load(jsoncfgFile)['confs']
for jsoncfg in jsoncfgs:
find_all_images(jsoncfg)
eval_string(jsoncfg)
# include all args not in args_dict['jsoncfg'] from original args_dict
for key, value in list(args_dict.items()):
if key not in jsoncfg:
jsoncfg[key] = value
args_dict['jsoncfg'] = jsoncfgs
return args_dict
def processCommandLine(argv=None, base='TF'):
"""
Invoke command line parser for command line deployment flows.
"""
#parser = default_xdnn_arg_parser(base=base)
parser = default_parser_args()
args = parser.parse_args(argv)
return make_dict_args(args)
# Generic list of image manipulation functions for simplifying preprocess code
def loadImageBlobFromFileScriptBase(imgFile, cmdSeq):
if isinstance(imgFile, str):
img = _imread(imgFile)
else:
img = imgFile
orig_shape = img.shape
for (cmd,param) in cmdSeq:
#print "command:",cmd,"param:",param
#print "imshape:",img.shape
if cmd == 'resize':
img = cv2.resize(img, (param[0], param[1]))
elif cmd == 'resize2mindim':
height, width, __ = img.shape
newdim = min(height, width)
scalew = float(width) / newdim
scaleh = float(height) / newdim
mindim = min(param[0], param[1])
neww = int(mindim * scalew)
newh = int(mindim * scaleh)
img = cv2.resize(img, (neww, newh))
elif cmd == 'resize2maxdim':
# Currently doesn't work for rectangular output dimensions...
height, width, __ = img.shape
newdim = max(height, width)
scalew = float(width) / newdim
scaleh = float(height) / newdim
maxdim = max(param)
neww = int(maxdim * scalew)
newh = int(maxdim * scaleh)
img = cv2.resize(img, (neww, newh))
elif cmd == 'crop_letterbox':
height, width, channels = img.shape
newdim = max(height, width)
letter_image = np.zeros((newdim, newdim, channels))
letter_image[:, :, :] = param
if newdim == width:
letter_image[(newdim-height)/2:((newdim-height)/2+height),0:width] = img
else:
letter_image[0:height,(newdim-width)/2:((newdim-width)/2+width)] = img
img = letter_image
elif cmd == 'crop_center':
size_x = img.shape[0]
size_y = img.shape[1]
ll_x = size_x//2 - param[0]//2
ll_y = size_y//2 - param[1]//2
img = img[ll_x:ll_x+param[0],ll_y:ll_y+param[1]]
elif cmd == 'plot':
toshow = img.astype(np.uint8)
if param is not None:
toshow = np.transpose(toshow, (param[0], param[1], param[2]))
plt.imshow(toshow, cmap = 'gray', interpolation = 'bicubic')
plt.xticks([]), plt.yticks([]) # to hide tick values on X and Y axis
plt.show()
elif cmd == 'pxlscale':
if img.dtype != np.float32:
img = img.astype(np.float32, order='C')
if param != 1.0:
img = img * param
elif cmd == 'meansub':
if img.dtype != np.float32:
img = img.astype(np.float32, order='C')
if isinstance(param, np.ndarray):
img -= param
else:
img -= np.array(param, dtype = np.float32, order='C')
elif cmd == 'chtranspose':
# HWC->CWH = 2,0,1
# CWH->HWC = 1,2,0
img = np.transpose(img, (param[0], param[1], param[2]))
elif cmd == 'chswap':
# BGR->RGB = 2,1,0
# RGB->BGR = 2,1,0
ch = 3*[None]
if img.shape[0] == 3:
ch[0] = img[0,:,:]
ch[1] = img[1,:,:]
ch[2] = img[2,:,:]
img = np.stack((ch[param[0]],ch[param[1]],ch[param[2]]), axis=0)
else:
ch[0] = img[:,:,0]
ch[1] = img[:,:,1]
ch[2] = img[:,:,2]
img = np.stack((ch[param[0]],ch[param[1]],ch[param[2]]), axis=2)
else:
raise NotImplementedError(cmd)
# print "final imshape:",img.shape
return img, orig_shape
# This runs image manipulation script
def loadImageBlobFromFile(imgFile, raw_scale, mean, input_scale, img_h, img_w):
# Direct resize only
cmdseqResize = [
('resize',(img_w,img_h)),
('pxlscale',float(raw_scale)/255),
('meansub', mean),
('pxlscale', input_scale),
('chtranspose',(2,0,1))
]
img, orig_shape = loadImageBlobFromFileScriptBase(imgFile, cmdseqResize)
# Change initial resize to match network training (shown as {alpha x 256 or 256 x alpha}->224,224,
# alpha being at least 256 such that the original aspect ratio is maintained)
#cmdseqCenterCrop = [
# ('resize2mindim',(256,256)),
# ('crop_center',(img_h,img_w)),
# ('pxlscale',float(raw_scale)/255),
# ('meansub', mean),
# ('pxlscale', input_scale),
# ('chtranspose',(2,0,1))
# ]
#img, orig_shape = loadImageBlobFromFileScriptBase(imgFile, cmdseqCenterCrop)
img = img[ np.newaxis, ...]
np.ascontiguousarray(img, dtype=np.float32)
return img, None
def loadYoloImageBlobFromFile(imgFile, img_h, img_w):
# This first loads the image
# letterboxes/resizes
# divides by 255 to create values from 0.0 to 1.0
# Letter boxing
# When given a rectangular image
# If the network expects a square input
# Reshape the image such that its longer dimension fits exactly in the square
# i.e.
# ----------
# |--------|
# | IMAGE |
# |--------|
# ----------
cmdseqYolov2 = [
('resize2maxdim',(img_w,img_h)),
('pxlscale',(1.0/255.0)),
('crop_letterbox',(0.5)),
('chtranspose',(2,0,1)),
('chswap',(2,1,0))
]
img, orig_shape = loadImageBlobFromFileScriptBase(imgFile, cmdseqYolov2)
img = img[ np.newaxis, ...]
np.ascontiguousarray(img, dtype=np.float32)
return img, orig_shape
def getFilePaths(paths_list):
ext = (".jpg",".jpeg",".JPG",".JPEG")
img_paths = []
for p in paths_list:
if os.path.isfile(p) and p.endswith(ext):
img_paths.append( os.path.abspath(p) )
else:
for dirpath,_,filenames in os.walk(p):
for f in filenames:
if f.endswith(ext):
img_paths.append( os.path.abspath(os.path.join(dirpath, f)))
return img_paths
def getTopK(output, labels, topK):
output = output.flatten()
topKIdx = np.argsort(output)[-topK:]
topKVals = [output[ti] for ti in topKIdx]
topKList = zip( topKVals, topKIdx )
topKList.reverse()
return [(topKList[j][0], labels[topKList[j][1]]) for j in range(topK)]
def getGoldenMap(goldenFile):
goldenMap = OrderedDict()
with open(goldenFile, 'r') as f:
for line in f:
fname = line[:line.rfind(' ')]
goldenIdx = int(line[line.rfind(' ')+1:])
goldenMap[fname] = goldenIdx
return goldenMap
def isTopK ( out, goldenMap, fileName, labels, topK = 5):
f = ntpath.basename(fileName)
topKs = getTopK(out, labels, topK)
for (_, label) in topKs:
if ( label == labels[goldenMap[f]]):
return True
return False
def get_labels (label_file):
labels = None
if (label_file):
with open(label_file, 'r') as f:
labels = [line.strip() for line in f]
return labels
def printClassification(output, img_paths, labels, topK = 5):
if labels is not None:
print ( getClassification ( output, img_paths, labels, topK))
def getClassification(output, img_paths, labels, topK = 5, zmqPub = False):
"""
Print the result of classification given class scores, and a synset labels file.
:param output: Class scores, typically the output of the softmax layer.
:type output: numpy.ndarray.
:param img_paths: list of path(s) to image(s)
:param label_file: path to label file
:type args: dict.
"""
ret = ""
if not isinstance(img_paths, list):
img_paths = [img_paths]
for i,p in enumerate(img_paths):
topXs = getTopK(output[i,...], labels, topK)
inputImage = "for {:s} ".format(p if isinstance(p, str) else 'raw_input')
if zmqPub :
ret += (img_paths[i] + '\n')
else :
ret += "---------- Prediction {:d}/{:d} {:s}----------\n".format(i+1, output.shape[0], inputImage)
for (prob, label) in topXs:
ret += ("{:.4f} \"{:s}\"\n".format(prob, label))
return ret
def getNearFileMatchWithPrefix(path, prefix, index = 0):
nearMatches = [f for f in os.listdir(path) if f.startswith(prefix)]
nearMatches.sort()
if len(nearMatches) > 0:
return "%s/%s" % (path, nearMatches[index])
return None
def loadFCWeightsBias(arg, index = 0):
data_dir = arg['weights']
if ".h5" in data_dir:
with h5py.File(data_dir,'r') as f:
#keys = f.keys()
#print (keys)
key = list(f.keys())[0]
weight = list(np.array(f.get(key)).flatten())
key = list(f.keys())[1]
bias = list(np.array(f.get(key)).flatten())
else:
fname = "%s/fc" % data_dir
if not os.path.exists(fname):
nearMatch = getNearFileMatchWithPrefix(data_dir, "fc", index)
if nearMatch:
fname = nearMatch
if os.path.exists(fname):
with open(fname, 'r') as f:
line = f.read()
vals = line.strip().split(' ')
weight = [float(v) for v in vals]
else:
print("No FC layers found in {:s}".format(data_dir))
return (None, None)
fname = "%s/fc_bias" % data_dir
if not os.path.exists(fname):
nearMatch = getNearFileMatchWithPrefix(data_dir, "fc_bias", index)
if nearMatch:
fname = nearMatch
with open(fname, 'r') as f:
line = f.read()
vals = line.strip().split(' ')
bias = [float(v) for v in vals]
return (np.asarray(weight, dtype=np.float32), np.asarray(bias, dtype=np.float32))
|
HeapInspect.py | IMULMUL/heapinspect | 218 | 12704328 | <filename>HeapInspect.py
import argparse
from heapinspect.core import *
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog='HeapInspect.py',
description='''Inspect your heap by a given pid.
Author:matrix1001
Github:https://github.com/matrix1001/heapinspect''')
parser.add_argument(
'--raw',
action='store_true',
help='show more detailed chunk info'
)
parser.add_argument(
'--rela',
action='store_true',
help='show relative detailed chunk info'
)
parser.add_argument(
'pid',
type=int,
help='pid of the process'
)
parser.add_argument(
'-x',
action='store_false',
help='''ignore: heapchunks'''
)
args = parser.parse_args()
pid = args.pid
hi = HeapInspector(pid)
if args.rela:
hs = HeapShower(hi)
hs.relative = True
if args.x:
print(hs.heap_chunks)
print(hs.fastbins)
print(hs.unsortedbins)
print(hs.smallbins)
print(hs.largebins)
print(hs.tcache_chunks)
elif args.raw:
hs = HeapShower(hi)
if args.x:
print(hs.heap_chunks)
print(hs.fastbins)
print(hs.unsortedbins)
print(hs.smallbins)
print(hs.largebins)
print(hs.tcache_chunks)
else:
pp = PrettyPrinter(hi)
print(pp.all)
|
tests/pytests/unit/pillar/test_netbox.py | waynegemmell/salt | 9,425 | 12704330 | """
:codeauthor: <NAME> <<EMAIL>>
"""
import pytest
import salt.pillar.netbox as netbox
from tests.support.mock import patch
@pytest.fixture
def default_kwargs():
return {
"minion_id": "minion1",
"pillar": None,
"api_url": "http://netbox.example.com",
"api_token": "ye<PASSWORD>6oob3uWiey9a",
"api_query_result_limit": 65535,
}
@pytest.fixture
def headers():
return {"Authorization": "Token <PASSWORD>"}
@pytest.fixture
def device_results():
return {
"dict": {
"count": 1,
"next": None,
"previous": None,
"results": [
{
"id": 511,
"url": "https://netbox.example.com/api/dcim/devices/511/",
"name": "minion1",
"display_name": "minion1",
"device_type": {
"id": 4,
"url": "https://netbox.example.com/api/dcim/device-types/4/",
"manufacturer": {
"id": 1,
"url": "https://netbox.example.com/api/dcim/manufacturers/1/",
"name": "Cisco",
"slug": "cisco",
},
"model": "ISR2901",
"slug": "isr2901",
"display_name": "Cisco ISR2901",
},
"device_role": {
"id": 45,
"url": "https://netbox.example.com/api/dcim/device-roles/45/",
"name": "Network",
"slug": "network",
},
"node_type": "device",
"tenant": None,
"platform": {
"id": 1,
"url": "https://netbox.example.com/api/dcim/platforms/1/",
"name": "Cisco IOS",
"slug": "ios",
},
"serial": "",
"asset_tag": None,
"site": {
"id": 18,
"url": "https://netbox.example.com/api/dcim/sites/18/",
"name": "Site 1",
"slug": "site1",
},
"rack": None,
"position": None,
"face": None,
"parent_device": None,
"status": {"value": "active", "label": "Active"},
"primary_ip": {
"id": 1146,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1146/",
"family": 4,
"address": "192.0.2.1/24",
},
"primary_ip4": {
"id": 1146,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1146/",
"family": 4,
"address": "192.0.2.1/24",
},
"primary_ip6": None,
"cluster": None,
"virtual_chassis": None,
"vc_position": None,
"vc_priority": None,
"comments": "",
"local_context_data": None,
"tags": [],
"custom_fields": {},
"config_context": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:12:04.171105Z",
}
],
}
}
@pytest.fixture
def multiple_device_results():
return {
"dict": {
"count": 2,
"next": None,
"previous": None,
"results": [
{
"id": 511,
"url": "https://netbox.example.com/api/dcim/devices/511/",
"name": "minion1",
"display_name": "minion1",
"device_type": {
"id": 4,
"url": "https://netbox.example.com/api/dcim/device-types/4/",
"manufacturer": {
"id": 1,
"url": "https://netbox.example.com/api/dcim/manufacturers/1/",
"name": "Cisco",
"slug": "cisco",
},
"model": "ISR2901",
"slug": "isr2901",
"display_name": "Cisco ISR2901",
},
"device_role": {
"id": 45,
"url": "https://netbox.example.com/api/dcim/device-roles/45/",
"name": "Network",
"slug": "network",
},
"node_type": "device",
"tenant": None,
"platform": {
"id": 1,
"url": "https://netbox.example.com/api/dcim/platforms/1/",
"name": "Cisco IOS",
"slug": "ios",
},
"serial": "",
"asset_tag": None,
"site": {
"id": 18,
"url": "https://netbox.example.com/api/dcim/sites/18/",
"name": "Site 1",
"slug": "site1",
},
"rack": None,
"position": None,
"face": None,
"parent_device": None,
"status": {"value": "active", "label": "Active"},
"primary_ip": {
"id": 1146,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1146/",
"family": 4,
"address": "192.0.2.1/24",
},
"primary_ip4": {
"id": 1146,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1146/",
"family": 4,
"address": "192.0.2.1/24",
},
"primary_ip6": None,
"cluster": None,
"virtual_chassis": None,
"vc_position": None,
"vc_priority": None,
"comments": "",
"local_context_data": None,
"tags": [],
"custom_fields": {},
"config_context": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:12:04.171105Z",
},
{
"id": 512,
"url": "https://netbox.example.com/api/dcim/devices/512/",
"name": "minion1",
"display_name": "minion1",
"device_type": {
"id": 4,
"url": "https://netbox.example.com/api/dcim/device-types/4/",
"manufacturer": {
"id": 1,
"url": "https://netbox.example.com/api/dcim/manufacturers/1/",
"name": "Cisco",
"slug": "cisco",
},
"model": "ISR2901",
"slug": "isr2901",
"display_name": "Cisco ISR2901",
},
"device_role": {
"id": 45,
"url": "https://netbox.example.com/api/dcim/device-roles/45/",
"name": "Network",
"slug": "network",
},
"node_type": "device",
"tenant": None,
"platform": {
"id": 1,
"url": "https://netbox.example.com/api/dcim/platforms/1/",
"name": "Cisco IOS",
"slug": "ios",
},
"serial": "",
"asset_tag": None,
"site": {
"id": 18,
"url": "https://netbox.example.com/api/dcim/sites/18/",
"name": "Site 1",
"slug": "site1",
},
"rack": None,
"position": None,
"face": None,
"parent_device": None,
"status": {"value": "active", "label": "Active"},
"primary_ip": {
"id": 1150,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1150/",
"family": 4,
"address": "192.0.2.3/24",
},
"primary_ip4": {
"id": 1150,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1150/",
"family": 4,
"address": "192.0.2.3/24",
},
"primary_ip6": None,
"cluster": None,
"virtual_chassis": None,
"vc_position": None,
"vc_priority": None,
"comments": "",
"local_context_data": None,
"tags": [],
"custom_fields": {},
"config_context": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:12:04.171105Z",
},
],
}
}
@pytest.fixture
def virtual_machine_results():
return {
"dict": {
"count": 1,
"next": None,
"previous": None,
"results": [
{
"id": 222,
"url": "https://netbox.example.com/api/virtualization/virtual-machines/222/",
"name": "minion1",
"status": {"value": "active", "label": "Active"},
"site": {
"id": 18,
"url": "https://netbox.example.com/api/dcim/sites/18/",
"name": "Site 1",
"slug": "site1",
},
"cluster": {
"id": 1,
"url": "https://netbox.example.com/api/virtualization/clusters/1/",
"name": "Cluster",
},
"role": {
"id": 45,
"url": "https://netbox.example.com/api/dcim/device-roles/45/",
"name": "Network",
"slug": "network",
},
"node_type": "virtual-machine",
"tenant": None,
"platform": {
"id": 1,
"url": "https://netbox.example.com/api/dcim/platforms/1/",
"name": "Cisco IOS",
"slug": "ios",
},
"primary_ip": {
"id": 1148,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1148/",
"family": 4,
"address": "192.0.2.2/24",
},
"primary_ip4": {
"id": 1148,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1148/",
"family": 4,
"address": "192.0.2.2/24",
},
"primary_ip6": None,
"vcpus": 1,
"memory": 1024,
"disk": 30,
"comments": "",
"local_context_data": None,
"tags": [],
"custom_fields": {},
"config_context": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:23:05.799541Z",
}
],
}
}
@pytest.fixture
def multiple_virtual_machine_results():
return {
"dict": {
"count": 1,
"next": None,
"previous": None,
"results": [
{
"id": 222,
"url": "https://netbox.example.com/api/virtualization/virtual-machines/222/",
"name": "minion1",
"status": {"value": "active", "label": "Active"},
"site": {
"id": 18,
"url": "https://netbox.example.com/api/dcim/sites/18/",
"name": "Site 1",
"slug": "site1",
},
"cluster": {
"id": 1,
"url": "https://netbox.example.com/api/virtualization/clusters/1/",
"name": "Cluster",
},
"role": {
"id": 45,
"url": "https://netbox.example.com/api/dcim/device-roles/45/",
"name": "Network",
"slug": "network",
},
"node_type": "virtual-machine",
"tenant": None,
"platform": {
"id": 1,
"url": "https://netbox.example.com/api/dcim/platforms/1/",
"name": "Cisco IOS",
"slug": "ios",
},
"primary_ip": {
"id": 1148,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1148/",
"family": 4,
"address": "192.0.2.2/24",
},
"primary_ip4": {
"id": 1148,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1148/",
"family": 4,
"address": "192.0.2.2/24",
},
"primary_ip6": None,
"vcpus": 1,
"memory": 1024,
"disk": 30,
"comments": "",
"local_context_data": None,
"tags": [],
"custom_fields": {},
"config_context": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:23:05.799541Z",
},
{
"id": 223,
"url": "https://netbox.example.com/api/virtualization/virtual-machines/223/",
"name": "minion1",
"status": {"value": "active", "label": "Active"},
"site": {
"id": 18,
"url": "https://netbox.example.com/api/dcim/sites/18/",
"name": "Site 1",
"slug": "site1",
},
"cluster": {
"id": 1,
"url": "https://netbox.example.com/api/virtualization/clusters/1/",
"name": "Cluster",
},
"role": {
"id": 45,
"url": "https://netbox.example.com/api/dcim/device-roles/45/",
"name": "Network",
"slug": "network",
},
"node_type": "virtual-machine",
"tenant": None,
"platform": {
"id": 1,
"url": "https://netbox.example.com/api/dcim/platforms/1/",
"name": "Cisco IOS",
"slug": "ios",
},
"primary_ip": {
"id": 1152,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1152/",
"family": 4,
"address": "192.0.2.4/24",
},
"primary_ip4": {
"id": 1152,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1152/",
"family": 4,
"address": "192.0.2.4/24",
},
"primary_ip6": None,
"vcpus": 1,
"memory": 1024,
"disk": 30,
"comments": "",
"local_context_data": None,
"tags": [],
"custom_fields": {},
"config_context": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:23:05.799541Z",
},
],
}
}
@pytest.fixture
def no_results():
return {"dict": {"count": 0, "next": None, "previous": None, "results": []}}
@pytest.fixture
def http_error():
return {"error": "HTTP 404: Not Found", "status": 404}
@pytest.fixture
def device_interface_results():
return {
"dict": {
"count": 2,
"next": None,
"previous": None,
"results": [
{
"id": 8158,
"url": "https://netbox.example.com/api/dcim/interfaces/8158/",
"device": {
"id": 511,
"url": "https://netbox.example.com/api/dcim/devices/511/",
"name": "minion1",
"display_name": "minion1",
},
"name": "GigabitEthernet0/0",
"label": "",
"type": {"value": "1000base-t", "label": "1000BASE-T (1GE)"},
"enabled": True,
"lag": None,
"mtu": None,
"mac_address": None,
"mgmt_only": False,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"cable": None,
"cable_peer": None,
"cable_peer_type": None,
"connected_endpoint": None,
"connected_endpoint_type": None,
"connected_endpoint_reachable": None,
"tags": [],
"count_ipaddresses": 1,
},
{
"id": 8159,
"url": "https://netbox.example.com/api/dcim/interfaces/8159/",
"device": {
"id": 511,
"url": "https://netbox.example.com/api/dcim/devices/511/",
"name": "minion1",
"display_name": "minion1",
},
"name": "GigabitEthernet0/1",
"label": "",
"type": {"value": "1000base-t", "label": "1000BASE-T (1GE)"},
"enabled": True,
"lag": None,
"mtu": None,
"mac_address": None,
"mgmt_only": False,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"cable": None,
"cable_peer": None,
"cable_peer_type": None,
"connected_endpoint": None,
"connected_endpoint_type": None,
"connected_endpoint_reachable": None,
"tags": [],
"count_ipaddresses": 1,
},
],
}
}
@pytest.fixture
def device_interfaces_list():
return [
{
"id": 8158,
"url": "https://netbox.example.com/api/dcim/interfaces/8158/",
"name": "GigabitEthernet0/0",
"label": "",
"type": {"value": "1000base-t", "label": "1000BASE-T (1GE)"},
"enabled": True,
"lag": None,
"mtu": None,
"mac_address": None,
"mgmt_only": False,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"cable": None,
"cable_peer": None,
"cable_peer_type": None,
"connected_endpoint": None,
"connected_endpoint_type": None,
"connected_endpoint_reachable": None,
"tags": [],
"count_ipaddresses": 1,
},
{
"id": 8159,
"url": "https://netbox.example.com/api/dcim/interfaces/8159/",
"name": "GigabitEthernet0/1",
"label": "",
"type": {"value": "1000base-t", "label": "1000BASE-T (1GE)"},
"enabled": True,
"lag": None,
"mtu": None,
"mac_address": None,
"mgmt_only": False,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"cable": None,
"cable_peer": None,
"cable_peer_type": None,
"connected_endpoint": None,
"connected_endpoint_type": None,
"connected_endpoint_reachable": None,
"tags": [],
"count_ipaddresses": 1,
},
]
@pytest.fixture
def virtual_machine_interface_results():
return {
"dict": {
"count": 2,
"next": None,
"previous": None,
"results": [
{
"id": 668,
"url": "https://netbox.example.com/api/virtualization/interfaces/668/",
"virtual_machine": {
"id": 222,
"url": "https://netbox.example.com/api/virtualization/virtual-machines/222/",
"name": "minion1",
},
"name": "GigabitEthernet0/0",
"enabled": True,
"mtu": None,
"mac_address": None,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"tags": [],
},
{
"id": 669,
"url": "https://netbox.example.com/api/virtualization/interfaces/669/",
"virtual_machine": {
"id": 222,
"url": "https://netbox.example.com/api/virtualization/virtual-machines/222/",
"name": "minion1",
},
"name": "GigabitEthernet0/1",
"enabled": True,
"mtu": None,
"mac_address": None,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"tags": [],
},
],
}
}
@pytest.fixture
def virtual_machine_interfaces_list():
return [
{
"id": 668,
"url": "https://netbox.example.com/api/virtualization/interfaces/668/",
"name": "GigabitEthernet0/0",
"enabled": True,
"mtu": None,
"mac_address": None,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"tags": [],
},
{
"id": 669,
"url": "https://netbox.example.com/api/virtualization/interfaces/669/",
"name": "GigabitEthernet0/1",
"enabled": True,
"mtu": None,
"mac_address": None,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"tags": [],
},
]
@pytest.fixture
def device_ip_results():
return {
"dict": {
"count": 2,
"next": None,
"previous": None,
"results": [
{
"id": 1146,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1146/",
"family": {"value": 4, "label": "IPv4"},
"address": "192.0.2.1/24",
"vrf": None,
"tenant": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"assigned_object_type": "dcim.interface",
"assigned_object_id": 8158,
"assigned_object": {
"id": 8158,
"url": "https://netbox.example.com/api/dcim/interfaces/8158/",
"device": {
"id": 511,
"url": "https://netbox.example.com/api/dcim/devices/511/",
"name": "minion1",
"display_name": "minion1",
},
"name": "GigabitEthernet0/0",
"cable": None,
},
"nat_inside": None,
"nat_outside": None,
"dns_name": "",
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:12:04.153386Z",
},
{
"id": 1147,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1147/",
"family": {"value": 4, "label": "IPv4"},
"address": "198.51.100.1/24",
"vrf": None,
"tenant": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"assigned_object_type": "dcim.interface",
"assigned_object_id": 8159,
"assigned_object": {
"id": 8159,
"url": "https://netbox.example.com/api/dcim/interfaces/8159/",
"device": {
"id": 511,
"url": "https://netbox.example.com/api/dcim/devices/511/",
"name": "minion1",
"display_name": "minion1",
},
"name": "GigabitEthernet0/1",
"cable": None,
},
"nat_inside": None,
"nat_outside": None,
"dns_name": "",
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:12:40.508154Z",
},
],
}
}
@pytest.fixture
def virtual_machine_ip_results():
return {
"dict": {
"count": 2,
"next": None,
"previous": None,
"results": [
{
"id": 1148,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1148/",
"family": {"value": 4, "label": "IPv4"},
"address": "192.0.2.2/24",
"vrf": None,
"tenant": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"assigned_object_type": "virtualization.vminterface",
"assigned_object_id": 668,
"assigned_object": {
"id": 668,
"url": "https://netbox.example.com/api/virtualization/interfaces/668/",
"virtual_machine": {
"id": 222,
"url": "https://netbox.example.com/api/virtualization/virtual-machines/222/",
"name": "minion1",
},
"name": "GigabitEthernet0/0",
},
"nat_inside": None,
"nat_outside": None,
"dns_name": "",
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:23:05.784281Z",
},
{
"id": 1149,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1149/",
"family": {"value": 4, "label": "IPv4"},
"address": "198.51.100.2/24",
"vrf": None,
"tenant": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"assigned_object_type": "virtualization.vminterface",
"assigned_object_id": 669,
"assigned_object": {
"id": 669,
"url": "https://netbox.example.com/api/virtualization/interfaces/669/",
"virtual_machine": {
"id": 222,
"url": "https://netbox.example.com/api/virtualization/virtual-machines/222/",
"name": "minion1",
},
"name": "GigabitEthernet0/1",
},
"nat_inside": None,
"nat_outside": None,
"dns_name": "",
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:23:29.607428Z",
},
],
}
}
@pytest.fixture
def device_interfaces_ip_list():
return [
{
"id": 8158,
"ip_addresses": [
{
"id": 1146,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1146/",
"family": {"value": 4, "label": "IPv4"},
"address": "192.0.2.1/24",
"vrf": None,
"tenant": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"nat_inside": None,
"nat_outside": None,
"dns_name": "",
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:12:04.153386Z",
},
],
"url": "https://netbox.example.com/api/dcim/interfaces/8158/",
"name": "GigabitEthernet0/0",
"label": "",
"type": {"value": "1000base-t", "label": "1000BASE-T (1GE)"},
"enabled": True,
"lag": None,
"mtu": None,
"mac_address": None,
"mgmt_only": False,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"cable": None,
"cable_peer": None,
"cable_peer_type": None,
"connected_endpoint": None,
"connected_endpoint_type": None,
"connected_endpoint_reachable": None,
"tags": [],
"count_ipaddresses": 1,
},
{
"id": 8159,
"ip_addresses": [
{
"id": 1147,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1147/",
"family": {"value": 4, "label": "IPv4"},
"address": "198.51.100.1/24",
"vrf": None,
"tenant": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"nat_inside": None,
"nat_outside": None,
"dns_name": "",
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:12:40.508154Z",
},
],
"url": "https://netbox.example.com/api/dcim/interfaces/8159/",
"name": "GigabitEthernet0/1",
"label": "",
"type": {"value": "1000base-t", "label": "1000BASE-T (1GE)"},
"enabled": True,
"lag": None,
"mtu": None,
"mac_address": None,
"mgmt_only": False,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"cable": None,
"cable_peer": None,
"cable_peer_type": None,
"connected_endpoint": None,
"connected_endpoint_type": None,
"connected_endpoint_reachable": None,
"tags": [],
"count_ipaddresses": 1,
},
]
@pytest.fixture
def virtual_machine_interfaces_ip_list():
return [
{
"id": 668,
"ip_addresses": [
{
"id": 1148,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1148/",
"family": {"value": 4, "label": "IPv4"},
"address": "192.0.2.2/24",
"vrf": None,
"tenant": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"nat_inside": None,
"nat_outside": None,
"dns_name": "",
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:23:05.784281Z",
},
],
"url": "https://netbox.example.com/api/virtualization/interfaces/668/",
"name": "GigabitEthernet0/0",
"enabled": True,
"mtu": None,
"mac_address": None,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"tags": [],
},
{
"id": 669,
"ip_addresses": [
{
"id": 1149,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1149/",
"family": {"value": 4, "label": "IPv4"},
"address": "198.51.100.2/24",
"vrf": None,
"tenant": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"nat_inside": None,
"nat_outside": None,
"dns_name": "",
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:23:29.607428Z",
},
],
"url": "https://netbox.example.com/api/virtualization/interfaces/669/",
"name": "GigabitEthernet0/1",
"enabled": True,
"mtu": None,
"mac_address": None,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"tags": [],
},
]
@pytest.fixture
def site_results():
return {
"dict": {
"id": 18,
"url": "https://netbox.example.com/api/dcim/sites/18/",
"name": "Site 1",
"slug": "site1",
"status": {"value": "active", "label": "Active"},
"region": None,
"tenant": None,
"facility": "",
"asn": None,
"time_zone": None,
"description": "",
"physical_address": "",
"shipping_address": "",
"latitude": None,
"longitude": None,
"contact_name": "",
"contact_phone": "",
"contact_email": "",
"comments": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-25",
"last_updated": "2021-02-25T14:21:07.898957Z",
"circuit_count": 0,
"device_count": 1,
"prefix_count": 2,
"rack_count": 0,
"virtualmachine_count": 1,
"vlan_count": 0,
}
}
@pytest.fixture
def site_prefixes_results():
return {
"dict": {
"count": 2,
"next": None,
"previous": None,
"results": [
{
"id": 284,
"url": "https://netbox.example.com/api/ipam/prefixes/284/",
"family": {"value": 4, "label": "IPv4"},
"prefix": "192.0.2.0/24",
"site": {
"id": 18,
"url": "https://netbox.example.com/api/dcim/sites/18/",
"name": "Site 1",
"slug": "site1",
},
"vrf": None,
"tenant": None,
"vlan": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"is_pool": False,
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-25",
"last_updated": "2021-02-25T15:08:27.136305Z",
},
{
"id": 285,
"url": "https://netbox.example.com/api/ipam/prefixes/285/",
"family": {"value": 4, "label": "IPv4"},
"prefix": "198.51.100.0/24",
"site": {
"id": 18,
"url": "https://netbox.example.com/api/dcim/sites/18/",
"name": "Site 1",
"slug": "site1",
},
"vrf": None,
"tenant": None,
"vlan": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"is_pool": False,
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-25",
"last_updated": "2021-02-25T15:08:59.880440Z",
},
],
}
}
@pytest.fixture
def site_prefixes():
return [
{
"id": 284,
"url": "https://netbox.example.com/api/ipam/prefixes/284/",
"family": {"value": 4, "label": "IPv4"},
"prefix": "192.0.2.0/24",
"vrf": None,
"tenant": None,
"vlan": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"is_pool": False,
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-25",
"last_updated": "2021-02-25T15:08:27.136305Z",
},
{
"id": 285,
"url": "https://netbox.example.com/api/ipam/prefixes/285/",
"family": {"value": 4, "label": "IPv4"},
"prefix": "198.51.100.0/24",
"vrf": None,
"tenant": None,
"vlan": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"is_pool": False,
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-25",
"last_updated": "2021-02-25T15:08:59.880440Z",
},
]
@pytest.fixture
def proxy_details_results():
return {
"dict": {
"id": 1,
"url": "https://netbox.example.com/api/dcim/platforms/1/",
"name": "Cisco IOS",
"slug": "ios",
"manufacturer": {
"id": 1,
"url": "https://netbox.example.com/api/dcim/manufacturers/1/",
"name": "Cisco",
"slug": "cisco",
},
"napalm_driver": "ios",
"napalm_args": None,
"description": "",
"device_count": 152,
"virtualmachine_count": 1,
}
}
@pytest.fixture
def proxy_details():
return {
"host": "192.0.2.1",
"driver": "ios",
"proxytype": "napalm",
}
@pytest.fixture
def pillar_results():
return {
"netbox": {
"id": 511,
"url": "https://netbox.example.com/api/dcim/devices/511/",
"name": "minion1",
"node_type": "device",
"display_name": "minion1",
"device_type": {
"id": 4,
"url": "https://netbox.example.com/api/dcim/device-types/4/",
"manufacturer": {
"id": 1,
"url": "https://netbox.example.com/api/dcim/manufacturers/1/",
"name": "Cisco",
"slug": "cisco",
},
"model": "ISR2901",
"slug": "isr2901",
"display_name": "Cisco ISR2901",
},
"device_role": {
"id": 45,
"url": "https://netbox.example.com/api/dcim/device-roles/45/",
"name": "Network",
"slug": "network",
},
"interfaces": [
{
"id": 8158,
"ip_addresses": [
{
"id": 1146,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1146/",
"family": {"value": 4, "label": "IPv4"},
"address": "192.0.2.1/24",
"vrf": None,
"tenant": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"nat_inside": None,
"nat_outside": None,
"dns_name": "",
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:12:04.153386Z",
},
],
"url": "https://netbox.example.com/api/dcim/interfaces/8158/",
"name": "GigabitEthernet0/0",
"label": "",
"type": {"value": "1000base-t", "label": "1000BASE-T (1GE)"},
"enabled": True,
"lag": None,
"mtu": None,
"mac_address": None,
"mgmt_only": False,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"cable": None,
"cable_peer": None,
"cable_peer_type": None,
"connected_endpoint": None,
"connected_endpoint_type": None,
"connected_endpoint_reachable": None,
"tags": [],
"count_ipaddresses": 1,
},
{
"id": 8159,
"ip_addresses": [
{
"id": 1147,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1147/",
"family": {"value": 4, "label": "IPv4"},
"address": "198.51.100.1/24",
"vrf": None,
"tenant": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"nat_inside": None,
"nat_outside": None,
"dns_name": "",
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:12:40.508154Z",
},
],
"url": "https://netbox.example.com/api/dcim/interfaces/8159/",
"name": "GigabitEthernet0/1",
"label": "",
"type": {"value": "1000base-t", "label": "1000BASE-T (1GE)"},
"enabled": True,
"lag": None,
"mtu": None,
"mac_address": None,
"mgmt_only": False,
"description": "",
"mode": None,
"untagged_vlan": None,
"tagged_vlans": [],
"cable": None,
"cable_peer": None,
"cable_peer_type": None,
"connected_endpoint": None,
"connected_endpoint_type": None,
"connected_endpoint_reachable": None,
"tags": [],
"count_ipaddresses": 1,
},
],
"tenant": None,
"platform": {
"id": 1,
"url": "https://netbox.example.com/api/dcim/platforms/1/",
"name": "Cisco IOS",
"slug": "ios",
},
"serial": "",
"asset_tag": None,
"site": {
"id": 18,
"url": "https://netbox.example.com/api/dcim/sites/18/",
"name": "Site 1",
"slug": "site1",
"status": {"value": "active", "label": "Active"},
"region": None,
"tenant": None,
"facility": "",
"asn": None,
"time_zone": None,
"description": "",
"physical_address": "",
"shipping_address": "",
"latitude": None,
"longitude": None,
"contact_name": "",
"contact_phone": "",
"contact_email": "",
"comments": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-25",
"last_updated": "2021-02-25T14:21:07.898957Z",
"circuit_count": 0,
"device_count": 1,
"prefix_count": 2,
"rack_count": 0,
"virtualmachine_count": 1,
"vlan_count": 0,
"prefixes": [
{
"id": 284,
"url": "https://netbox.example.com/api/ipam/prefixes/284/",
"family": {"value": 4, "label": "IPv4"},
"prefix": "192.0.2.0/24",
"vrf": None,
"tenant": None,
"vlan": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"is_pool": False,
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-25",
"last_updated": "2021-02-25T15:08:27.136305Z",
},
{
"id": 285,
"url": "https://netbox.example.com/api/ipam/prefixes/285/",
"family": {"value": 4, "label": "IPv4"},
"prefix": "198.51.100.0/24",
"vrf": None,
"tenant": None,
"vlan": None,
"status": {"value": "active", "label": "Active"},
"role": None,
"is_pool": False,
"description": "",
"tags": [],
"custom_fields": {},
"created": "2021-02-25",
"last_updated": "2021-02-25T15:08:59.880440Z",
},
],
},
"rack": None,
"position": None,
"face": None,
"parent_device": None,
"status": {"value": "active", "label": "Active"},
"primary_ip": {
"id": 1146,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1146/",
"family": 4,
"address": "192.0.2.1/24",
},
"primary_ip4": {
"id": 1146,
"url": "https://netbox.example.com/api/ipam/ip-addresses/1146/",
"family": 4,
"address": "192.0.2.1/24",
},
"primary_ip6": None,
"cluster": None,
"virtual_chassis": None,
"vc_position": None,
"vc_priority": None,
"comments": "",
"local_context_data": None,
"tags": [],
"custom_fields": {},
"config_context": {},
"created": "2021-02-19",
"last_updated": "2021-02-19T06:12:04.171105Z",
},
"proxy": {"host": "192.0.2.1", "driver": "ios", "proxytype": "napalm"},
}
def test_when_minion_id_is_star_then_result_should_be_empty_dict(default_kwargs):
expected_result = {}
default_kwargs["minion_id"] = "*"
actual_result = netbox.ext_pillar(**default_kwargs)
assert actual_result == expected_result
def test_when_api_url_is_not_http_or_https_then_error_message_should_be_logged(
default_kwargs,
):
default_kwargs["api_url"] = "ftp://netbox.example.com"
with patch("salt.pillar.netbox.log.error", autospec=True) as fake_error:
netbox.ext_pillar(**default_kwargs)
fake_error.assert_called_with(
'Provided URL for api_url "%s" is malformed or is not an http/https URL',
"ftp://netbox.example.com",
)
def test_when_neither_devices_or_virtual_machines_requested_then_error_message_should_be_logged(
default_kwargs,
):
default_kwargs["devices"] = default_kwargs["virtual_machines"] = False
with patch("salt.pillar.netbox.log.error", autospec=True) as fake_error:
netbox.ext_pillar(**default_kwargs)
fake_error.assert_called_with(
"At least one of devices or virtual_machines must be True"
)
def test_when_interface_ips_requested_but_not_interfaces_then_error_message_should_be_logged(
default_kwargs,
):
default_kwargs["interfaces"] = False
default_kwargs["interface_ips"] = True
with patch("salt.pillar.netbox.log.error", autospec=True) as fake_error:
netbox.ext_pillar(**default_kwargs)
fake_error.assert_called_with(
"The value for interfaces must be True if interface_ips is True"
)
def test_when_api_query_result_limit_set_but_not_a_positive_integer_then_error_message_should_be_logged(
default_kwargs,
):
default_kwargs["api_query_result_limit"] = -1
with patch("salt.pillar.netbox.log.error", autospec=True) as fake_error:
netbox.ext_pillar(**default_kwargs)
fake_error.assert_called_with(
"The value for api_query_result_limit must be a postive integer if set"
)
def test_when_api_token_not_set_then_error_message_should_be_logged(
default_kwargs,
):
default_kwargs["api_token"] = ""
with patch("salt.pillar.netbox.log.error", autospec=True) as fake_error:
netbox.ext_pillar(**default_kwargs)
fake_error.assert_called_with("The value for api_token is not set")
def test_when_we_retrieve_a_single_device_then_return_list(
default_kwargs, headers, device_results
):
expected_result = device_results["dict"]["results"]
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = device_results
actual_result = netbox._get_devices(
default_kwargs["api_url"],
default_kwargs["minion_id"],
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_when_we_retrieve_a_device_and_get_http_error_then_return_empty_list(
default_kwargs, headers, http_error
):
expected_result = []
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = http_error
actual_result = netbox._get_devices(
default_kwargs["api_url"],
default_kwargs["minion_id"],
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_when_we_retrieve_a_single_virtual_machine_then_return_list(
default_kwargs, headers, virtual_machine_results
):
expected_result = virtual_machine_results["dict"]["results"]
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = virtual_machine_results
actual_result = netbox._get_virtual_machines(
default_kwargs["api_url"],
default_kwargs["minion_id"],
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_when_we_retrieve_a_virtual_machine_and_get_http_error_then_return_empty_dict(
default_kwargs, headers, http_error
):
expected_result = []
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = http_error
actual_result = netbox._get_virtual_machines(
default_kwargs["api_url"],
default_kwargs["minion_id"],
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_when_we_retrieve_device_interfaces_then_return_dict(
default_kwargs, headers, device_interface_results, device_interfaces_list
):
expected_result = device_interfaces_list
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = device_interface_results
actual_result = netbox._get_interfaces(
default_kwargs["api_url"],
default_kwargs["minion_id"],
511,
"device",
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_when_we_retrieve_device_interfaces_and_get_http_error_then_return_empty_list(
default_kwargs, headers, http_error
):
expected_result = []
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = http_error
actual_result = netbox._get_interfaces(
default_kwargs["api_url"],
default_kwargs["minion_id"],
511,
"device",
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_when_we_retrieve_virtual_machine_interfaces_then_return_list(
default_kwargs,
headers,
virtual_machine_interface_results,
virtual_machine_interfaces_list,
):
expected_result = virtual_machine_interfaces_list
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = virtual_machine_interface_results
actual_result = netbox._get_interfaces(
default_kwargs["api_url"],
default_kwargs["minion_id"],
222,
"virtual-machine",
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_when_we_retrieve_virtual_machine_interfaces_and_get_http_error_then_return_empty_list(
default_kwargs, headers, http_error
):
expected_result = []
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = http_error
actual_result = netbox._get_interfaces(
default_kwargs["api_url"],
default_kwargs["minion_id"],
222,
"virtual-machine",
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_when_we_retrieve_device_interface_ips_then_return_list(
default_kwargs, headers, device_ip_results
):
expected_result = device_ip_results["dict"]["results"]
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = device_ip_results
actual_result = netbox._get_interface_ips(
default_kwargs["api_url"],
default_kwargs["minion_id"],
511,
"device",
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_when_we_retrieve_device_interface_ips_and_get_http_error_then_return_empty_list(
default_kwargs, headers, http_error
):
expected_result = []
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = http_error
actual_result = netbox._get_interface_ips(
default_kwargs["api_url"],
default_kwargs["minion_id"],
511,
"device",
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_when_we_retrieve_virtual_machine_interface_ips_then_return_list(
default_kwargs, headers, virtual_machine_ip_results
):
expected_result = virtual_machine_ip_results["dict"]["results"]
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = virtual_machine_ip_results
actual_result = netbox._get_interface_ips(
default_kwargs["api_url"],
default_kwargs["minion_id"],
222,
"virtual-machine",
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_when_we_retrieve_virtual_machine_interface_ips_and_get_http_error_then_return_empty_list(
default_kwargs, headers, http_error
):
expected_result = []
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = http_error
actual_result = netbox._get_interface_ips(
default_kwargs["api_url"],
default_kwargs["minion_id"],
222,
"virtual-machine",
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_associate_ips_to_interfaces_then_return_list(
default_kwargs, device_interfaces_list, device_ip_results, device_interfaces_ip_list
):
expected_result = device_interfaces_ip_list
interfaces_list = device_interfaces_list
interface_ips_list = device_ip_results["dict"]["results"]
actual_result = netbox._associate_ips_to_interfaces(
interfaces_list, interface_ips_list
)
assert actual_result == expected_result
def test_associate_empty_ip_list_to_interfaces_then_return_list(
default_kwargs, device_interfaces_list, device_ip_results
):
expected_result = device_interfaces_list
interfaces_list = device_interfaces_list
interface_ips_list = []
actual_result = netbox._associate_ips_to_interfaces(
interfaces_list, interface_ips_list
)
assert actual_result == expected_result
def test_when_we_retrieve_site_details_then_return_dict(
default_kwargs, headers, site_results
):
expected_result = site_results["dict"]
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = site_results
actual_result = netbox._get_site_details(
default_kwargs["api_url"],
default_kwargs["minion_id"],
"Site 1",
18,
headers,
)
assert actual_result == expected_result
def test_when_we_retrieve_site_details_and_get_http_error_then_return_empty_dict(
default_kwargs, headers, http_error
):
expected_result = {}
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = http_error
actual_result = netbox._get_site_details(
default_kwargs["api_url"],
default_kwargs["minion_id"],
"Site 1",
18,
headers,
)
assert actual_result == expected_result
def test_when_we_retrieve_site_prefixes_then_return_list(
default_kwargs, headers, site_prefixes_results, site_prefixes
):
expected_result = site_prefixes
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = site_prefixes_results
actual_result = netbox._get_site_prefixes(
default_kwargs["api_url"],
default_kwargs["minion_id"],
"Site 1",
18,
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_when_we_retrieve_site_prefixes_and_get_http_error_then_return_empty_list(
default_kwargs, headers, http_error
):
expected_result = []
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = http_error
actual_result = netbox._get_site_prefixes(
default_kwargs["api_url"],
default_kwargs["minion_id"],
"Site 1",
18,
headers,
default_kwargs["api_query_result_limit"],
)
assert actual_result == expected_result
def test_when_we_retrieve_proxy_details_then_return_dict(
default_kwargs, headers, proxy_details_results, proxy_details
):
expected_result = proxy_details
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = proxy_details_results
actual_result = netbox._get_proxy_details(
default_kwargs["api_url"],
default_kwargs["minion_id"],
"192.0.2.1/24",
1,
headers,
)
assert actual_result == expected_result
def test_when_we_retrieve_proxy_details_and_get_http_error_then_dont_return(
default_kwargs, headers, http_error
):
expected_result = None
with patch("salt.utils.http.query", autospec=True) as query:
query.return_value = http_error
actual_result = netbox._get_proxy_details(
default_kwargs["api_url"],
default_kwargs["minion_id"],
"192.0.2.1/24",
1,
headers,
)
assert actual_result == expected_result
def test_when_we_retrieve_multiple_devices_then_error_message_should_be_logged(
default_kwargs, multiple_device_results
):
with patch(
"salt.pillar.netbox._get_devices", autospec=True
) as multiple_devices, patch(
"salt.pillar.netbox.log.error", autospec=True
) as fake_error:
multiple_devices.return_value = multiple_device_results["dict"]["results"]
netbox.ext_pillar(**default_kwargs)
fake_error.assert_called_with(
'More than one node found for "%s"',
"minion1",
)
def test_when_we_retrieve_multiple_virtual_machines_then_error_message_should_be_logged(
default_kwargs, multiple_virtual_machine_results
):
default_kwargs["devices"] = False
default_kwargs["virtual_machines"] = True
with patch(
"salt.pillar.netbox._get_virtual_machines", autospec=True
) as multiple_virtual_machines, patch(
"salt.pillar.netbox.log.error", autospec=True
) as fake_error:
multiple_virtual_machines.return_value = multiple_virtual_machine_results[
"dict"
]["results"]
netbox.ext_pillar(**default_kwargs)
fake_error.assert_called_with(
'More than one node found for "%s"',
"minion1",
)
def test_when_we_retrieve_a_device_and_a_virtual_machine_then_error_message_should_be_logged(
default_kwargs, device_results, virtual_machine_results
):
default_kwargs["virtual_machines"] = True
with patch("salt.pillar.netbox._get_devices", autospec=True) as device, patch(
"salt.pillar.netbox._get_virtual_machines", autospec=True
) as virtual_machine, patch(
"salt.pillar.netbox.log.error", autospec=True
) as fake_error:
device.return_value = device_results["dict"]["results"]
virtual_machine.return_value = virtual_machine_results["dict"]["results"]
netbox.ext_pillar(**default_kwargs)
fake_error.assert_called_with(
'More than one node found for "%s"',
"minion1",
)
def test_when_we_retrieve_no_devices_then_error_message_should_be_logged(
default_kwargs, no_results
):
with patch("salt.pillar.netbox._get_devices", autospec=True) as devices, patch(
"salt.pillar.netbox.log.error", autospec=True
) as fake_error:
devices.return_value = no_results["dict"]["results"]
netbox.ext_pillar(**default_kwargs)
fake_error.assert_called_with(
'Unable to pull NetBox data for "%s"',
"minion1",
)
def test_when_we_retrieve_no_virtual_machines_then_error_message_should_be_logged(
default_kwargs, no_results
):
default_kwargs["devices"] = False
default_kwargs["virtual_machines"] = True
with patch(
"salt.pillar.netbox._get_virtual_machines", autospec=True
) as virtual_machines, patch(
"salt.pillar.netbox.log.error", autospec=True
) as fake_error:
virtual_machines.return_value = no_results["dict"]["results"]
netbox.ext_pillar(**default_kwargs)
fake_error.assert_called_with(
'Unable to pull NetBox data for "%s"',
"minion1",
)
def test_when_we_retrieve_everything_successfully_then_return_dict(
default_kwargs,
device_results,
no_results,
device_interfaces_list,
device_ip_results,
site_results,
site_prefixes,
proxy_details,
pillar_results,
):
expected_result = pillar_results
default_kwargs["virtual_machines"] = False
default_kwargs["interfaces"] = True
default_kwargs["interface_ips"] = True
default_kwargs["site_details"] = True
default_kwargs["site_prefixes"] = True
default_kwargs["proxy_return"] = True
with patch("salt.pillar.netbox._get_devices", autospec=True) as get_devices, patch(
"salt.pillar.netbox._get_virtual_machines", autospec=True
) as get_virtual_machines, patch(
"salt.pillar.netbox._get_interfaces", autospec=True
) as get_interfaces, patch(
"salt.pillar.netbox._get_interface_ips", autospec=True
) as get_interface_ips, patch(
"salt.pillar.netbox._get_site_details", autospec=True
) as get_site_details, patch(
"salt.pillar.netbox._get_site_prefixes", autospec=True
) as get_site_prefixes, patch(
"salt.pillar.netbox._get_proxy_details", autospec=True
) as get_proxy_details:
get_devices.return_value = device_results["dict"]["results"]
get_virtual_machines.return_value = no_results["dict"]["results"]
get_interfaces.return_value = device_interfaces_list
get_interface_ips.return_value = device_ip_results["dict"]["results"]
get_site_details.return_value = site_results["dict"]
get_site_prefixes.return_value = site_prefixes
get_proxy_details.return_value = proxy_details
actual_result = netbox.ext_pillar(**default_kwargs)
assert actual_result == expected_result
|
procrastinate/utils.py | ignaciocabeza/procrastinate | 129 | 12704334 | import asyncio
import contextlib
import datetime
import functools
import importlib
import inspect
import logging
import pathlib
import types
from typing import Any, Awaitable, Callable, Iterable, Optional, Type, TypeVar
import dateutil.parser
from procrastinate import exceptions
T = TypeVar("T")
U = TypeVar("U")
logger = logging.getLogger(__name__)
def load_from_path(path: str, allowed_type: Type[T]) -> T:
"""
Import and return then object at the given full python path.
"""
if "." not in path:
raise exceptions.LoadFromPathError(f"{path} is not a valid path")
module_path, name = path.rsplit(".", 1)
try:
module = importlib.import_module(module_path)
except ImportError as exc:
raise exceptions.LoadFromPathError(str(exc)) from exc
try:
imported = getattr(module, name)
except AttributeError as exc:
raise exceptions.LoadFromPathError(str(exc)) from exc
if not isinstance(imported, allowed_type):
raise exceptions.LoadFromPathError(
f"Object at {path} is not of type {allowed_type.__name__} "
f"but {type(imported).__name__}"
)
return imported
def import_all(import_paths: Iterable[str]) -> None:
"""
Given a list of paths, just import them all
"""
for import_path in import_paths:
logger.debug(
f"Importing module {import_path}",
extra={"action": "import_module", "module_name": import_path},
)
importlib.import_module(import_path)
def add_sync_api(cls: Type) -> Type:
"""
Applying this decorator to a class with async methods named "<name>_async"
will create a sync version named "<name>" of these methods that performs the same
thing but synchronously.
"""
# Iterate on all class attributes
for attribute_name in dir(cls):
add_method_sync_api(cls=cls, method_name=attribute_name)
return cls
SYNC_ADDENDUM = """
This method is the synchronous counterpart of `{}`.
"""
ASYNC_ADDENDUM = """
This method is the asynchronous counterpart of `{}`.
"""
def add_method_sync_api(*, cls: Type, method_name: str, suffix: str = "_async"):
if method_name.startswith("_") or not method_name.endswith(suffix):
return
attribute, function = get_raw_method(cls=cls, method_name=method_name)
# Keep only async def methods
if not asyncio.iscoroutinefunction(function):
return
if isinstance(attribute, types.FunctionType): # classic method
method_type = "method"
elif isinstance(attribute, classmethod):
method_type = "classmethod"
elif isinstance(attribute, staticmethod):
method_type = "staticmethod"
else:
raise ValueError(f"Invalid object of type {type(attribute)}")
attribute.__doc__ = attribute.__doc__ or ""
# Create a wrapper that will call the method in a run_until_complete
@functools.wraps(function)
def wrapper(*args, **kwargs):
if method_type == "method":
final_class = type(args[0])
elif method_type == "classmethod":
final_class = args[0]
else:
final_class = cls
_, function = get_raw_method(cls=final_class, method_name=method_name)
awaitable = function(*args, **kwargs)
return sync_await(awaitable=awaitable)
sync_name = method_name[: -len(suffix)]
attribute.__doc__ += ASYNC_ADDENDUM.format(sync_name)
final_wrapper: Any
if method_type == "method":
final_wrapper = wrapper
elif method_type == "classmethod":
final_wrapper = classmethod(wrapper)
else:
final_wrapper = staticmethod(wrapper)
# Save this new method on the class
wrapper.__name__ = sync_name
final_wrapper.__doc__ += SYNC_ADDENDUM.format(method_name)
setattr(cls, sync_name, final_wrapper)
def get_raw_method(cls: Type, method_name: str):
"""
Extract a method from the class, without triggering the descriptor.
Return 2 objects:
- the method itself stored on the class (which may be a function, a classmethod or
a staticmethod)
- The real function underneath (the same function as above for a normal method,
and the wrapped function for static and class methods).
"""
# Methods are descriptors so using getattr here will not give us the real method
cls_vars = vars(cls)
method = cls_vars[method_name]
# If method is a classmethod or staticmethod, its real function, that may be
# async, is stored in __func__.
wrapped = getattr(method, "__func__", method)
return method, wrapped
def sync_await(awaitable: Awaitable[T]) -> T:
"""
Given an awaitable, awaits it synchronously. Returns the result after it's done.
"""
loop = asyncio.get_event_loop()
if loop.is_closed():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop.run_until_complete(awaitable)
def causes(exc: Optional[BaseException]):
"""
From a single exception with a chain of causes and contexts, make an iterable
going through every exception in the chain.
"""
while exc:
yield exc
exc = exc.__cause__ or exc.__context__
def _get_module_name(obj: Any) -> str:
module_name = obj.__module__
if module_name != "__main__":
return module_name
module = inspect.getmodule(obj)
# obj could be None, or has no __file__ if in an interactive shell
# in which case, there's not a lot we can do.
if not module or not hasattr(module, "__file__"):
return module_name
path = pathlib.Path(module.__file__)
# If the path is absolute, it probably means __main__ is an executable from an
# installed package
if path.is_absolute():
return module_name
# Creating the dotted path from the path
return ".".join([*path.parts[:-1], path.stem])
def get_full_path(obj: Any) -> str:
return f"{_get_module_name(obj)}.{obj.__name__}"
@contextlib.contextmanager
def task_context(awaitable: Awaitable, name: str):
"""
Take an awaitable, return a context manager.
On enter, launch the awaitable as a task that will execute in parallel in the
event loop. On exit, cancel the task (and log). If the task ends with an exception
log it.
A name is required for logging purposes.
"""
nice_name = name.replace("_", " ").title()
async def wrapper():
try:
logger.debug(f"Started {nice_name}", extra={"action": f"{name}_start"})
await awaitable
except asyncio.CancelledError:
logger.debug(f"Stopped {nice_name}", extra={"action": f"{name}_stop"})
raise
except Exception:
logger.exception(f"{nice_name} error", extra={"action": f"{name}_error"})
try:
task = asyncio.ensure_future(wrapper())
yield task
finally:
task.cancel()
def utcnow() -> datetime.datetime:
return datetime.datetime.now(tz=datetime.timezone.utc)
def parse_datetime(raw: str) -> datetime.datetime:
try:
# this parser is the stricter one, so we try it first
dt = dateutil.parser.isoparse(raw)
if not dt.tzinfo:
dt = dt.replace(tzinfo=datetime.timezone.utc)
return dt
except ValueError:
pass
# this parser is quite forgiving, and will attempt to return
# a value in most circumstances, so we use it as last option
dt = dateutil.parser.parse(raw)
dt = dt.replace(tzinfo=datetime.timezone.utc)
return dt
class AwaitableContext:
"""
Provides an object that can be called this way:
- value = await AppContext(...)
- async with AppContext(...) as value: ...
open_coro and close_coro are functions taking on arguments and returning coroutines.
"""
def __init__(
self,
open_coro: Callable[[], Awaitable],
close_coro: Callable[[], Awaitable],
return_value: U,
):
self._open_coro = open_coro
self._close_coro = close_coro
self._return_value = return_value
async def __aenter__(self) -> U:
await self._open_coro()
return self._return_value
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self._close_coro()
def __await__(self):
async def _inner_coro() -> U:
await self._open_coro()
return self._return_value
return _inner_coro().__await__()
|
monitoring/prometheus/aliyun-exporter/aliyun_exporter/utils.py | smthkissinger/docker-images | 264 | 12704340 | def format_metric(text: str):
return text.replace('.', '_')
def format_period(text: str):
return text.split(',', 1)[0]
def try_or_else(op, default):
try:
return op()
except:
return default
|
tests/framework/DataObjects/DataobjectsAttributes/massflowrate_fake.py | rinelson456/raven | 159 | 12704366 | <filename>tests/framework/DataObjects/DataobjectsAttributes/massflowrate_fake.py
# Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import numpy
def run(self, Input):
number_of_steps = 20
self.time = numpy.zeros(number_of_steps)
dt = 0.0001
Tw = Input["Tw"]
dummy1 = Input["Dummy1"]
self.pipe_Area = numpy.zeros(number_of_steps)
self.pipe_Tw = numpy.zeros(number_of_steps)
self.pipe_Hw = numpy.zeros(number_of_steps)
for i in range(len(self.time)):
self.time[i] = dt*i
time = self.time[i]
self.pipe_Area[i] = 0.25 + time
self.pipe_Tw[i] = Tw + time
self.pipe_Hw[i] = dummy1 + time
|
clif/python/slots.py | wangxf123456/clif | 966 | 12704376 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Process Python __methods__ into XX_slots in CLASS_Type struct."""
# TODO: Decide if we need as_buffer slots implemented.
import copy
import itertools
from clif.python import py3slots
I = ' '
_SLOT_MAP = {} # Known slots cache: {py_slot_name: slot_description}
def _SplitSlots(orig_methods):
"""Remove slot methods from orig_methods and return them."""
if not _SLOT_MAP:
_SLOT_MAP.update(_COMMON_SLOT_MAP)
methods = []
slots = []
for sc in _special_case: sc.data = {} # {c_slot_name : s.init data} # pylint: disable=multiple-statements
for m in orig_methods:
name = m[0]
try:
as_slot = _SLOT_MAP[name]
except KeyError:
methods.append(m)
else:
if as_slot:
slots.extend(_SlotsFuncAddress(name.rstrip('#'), as_slot, m[1]))
else:
raise NameError('Redefining %s is not allowed.' % name)
if slots:
orig_methods[:] = methods
return slots
def _SlotsFuncAddress(py_slot_name, slot_descr, cfunc_name):
"""Expand a [list of] slot(s) to (c_slot_name, func_addr)... generator.
Args:
py_slot_name: user-defined method name like __str__
slot_descr: Slot description from the _SLOT_MAP
cfunc_name: generated CFunction name like wrapFoo_as__str__
Yields:
('c_slot_name', 'c_slot_func_address' | special_case_data_struct)
"""
special = False
if isinstance(slot_descr, tuple):
tag = slot_descr[0]
if isinstance(tag, int): # A special case slot.
special = _special_case[tag]
c_slot_name = slot_descr[1]
data = special.data.setdefault(c_slot_name, copy.copy(special.init))
special(py_slot_name, cfunc_name, slot_descr, data)
slot_descr = (c_slot_name, '')
else:
assert len(slot_descr) == 2, 'Wrong slot:' + str(slot_descr)
else:
assert isinstance(slot_descr, (list, str))
slot_descr = (slot_descr, '')
slots = slot_descr[0]
if not isinstance(slots, list): slots = [slots]
for c_slot_name in slots:
yield c_slot_name, (special.data[c_slot_name] if special else
_SlotFunc(c_slot_name, cfunc_name, slot_descr[1]))
def _SlotFunc(cslot_name, cfunc_name, converter):
"""Compute c_slot_function name."""
if cslot_name == 'tp_call':
assert not converter
return cfunc_name
ret, args = _SlotFuncSignature(cslot_name)
if isinstance(args, list) or args.strip('O'):
assert converter, 'Non PyObject* args needs a convertor.'
assert not converter.startswith('+'), 'Not a special processor.'
return 'slot::'+converter+'<%s>' % cfunc_name
args = cfunc_name + ', PyObject*'*len(args) + '>'
if ret == 'O':
assert not converter, ('PyObject* return from %s does not need a processor.'
% cfunc_name)
return 'slot::adapter<' + args
else:
assert converter.startswith('+'), cslot_name+' needs a special processor.'
return 'slot::adapter<%s, slot::%s, ' % (ret, converter[1:]) + args
def GenRichCompare(rcslots):
"""Generate tp_richcmp slot implementation.
Args:
rcslots: {'Py_LT': '__lt__ wrap function name'}
Yields:
C++ source
"""
yield ''
yield 'PyObject* slot_richcmp(PyObject* self, PyObject* other, int op) {'
yield I+'switch (op) {'
for op_func in sorted(rcslots.items()):
yield I+I+'case %s: return slot::adapter<%s>(self, other);' % op_func
yield I+I+'default: Py_RETURN_NOTIMPLEMENTED;'
yield I+'}'
yield '}'
GenRichCompare.name = 'slot_richcmp' # Generated C++ name.
def GenSetAttr(setattr_slots):
"""Generate slot implementation for __set*__ / __del*__ user functions."""
assert len(setattr_slots) == 2, 'Need 2-slot input.'
set_attr, del_attr = setattr_slots
assert setattr or delattr, 'Need one or both set/del funcs.'
yield ''
yield 'int slot_seto(PyObject* self, PyObject* name, PyObject* value) {'
yield I+'if (value != nullptr) {'
if set_attr:
yield I+I+'PyObject* args = PyTuple_Pack(2, name, value);'
yield I+I+'if (args == nullptr) return -1;'
yield I+I+'int r = slot::ignore(%s(self, args, nullptr));' % set_attr
yield I+I+'Py_DECREF(args);'
yield I+I+'return r;'
else:
yield I+I+'PyErr_SetNone(PyExc_NotImplementedError);'
yield I+I+'return -1;'
yield I+'} else {'
if del_attr:
yield I+I+'PyObject* args = PyTuple_Pack(1, name);'
yield I+I+'if (args == nullptr) return -1;'
yield I+I+'int r = slot::ignore(%s(self, args, nullptr));' % del_attr
yield I+I+'Py_DECREF(args);'
yield I+I+'return r;'
else:
yield I+I+'PyErr_SetNone(PyExc_NotImplementedError);'
yield I+I+'return -1;'
yield I+'}'
yield '}'
GenSetAttr.name = 'slot_seto' # Generated C++ name.
def GenSetItem(setitem_slots):
"""Combine __setitem__ / __delitem__ funcs into one xx_setitem slot."""
assert len(setitem_slots) == 2, 'Need __setitem__ / __delitem__ funcs.'
setitem, delitem = setitem_slots
assert setitem or delitem, 'Need one or both __setitem__ / __delitem__ funcs.'
yield ''
yield 'int slot_seti(PyObject* self, Py_ssize_t idx, PyObject* value) {'
yield I+'idx = slot::item_index(self, idx);'
yield I+'if (idx < 0) return -1;'
yield I+'PyObject* i = PyLong_FromSize_t(idx);'
yield I+'if (i == nullptr) return -1;'
yield I+'if (value != nullptr) {'
if setitem:
yield I+I+'PyObject* args = PyTuple_Pack(2, i, value);'
yield I+I+'Py_DECREF(i);'
yield I+I+'if (args == nullptr) return -1;'
yield I+I+'PyObject* res = %s(self, args, nullptr);' % setitem
yield I+I+'Py_DECREF(args);'
yield I+I+'return slot::ignore(res);'
else:
yield I+I+'PyErr_SetNone(PyExc_NotImplementedError);'
yield I+I+'return -1;'
yield I+'} else {'
if delitem:
yield I+I+'PyObject* args = PyTuple_Pack(1, i);'
yield I+I+'Py_DECREF(i);'
yield I+I+'if (args == nullptr) return -1;'
yield I+I+'PyObject* res = %s(self, args, nullptr);' % delitem
yield I+I+'Py_DECREF(args);'
yield I+I+'return slot::ignore(res);'
else:
yield I+I+'PyErr_SetNone(PyExc_NotImplementedError);'
yield I+I+'return -1;'
yield I+'}'
yield '}'
GenSetItem.name = 'slot_seti' # Generated C++ name.
def GenRopSlot(name, op, rop):
"""Generate nb_* slot implementation.
Args:
name: c_slot_name like 'nb_add'
op: __op__ wrapper function name
rop: __rop__ wrapper function name
Yields:
C++ source
"""
yield ''
yield 'extern PyTypeObject* wrapper_Type;'
yield 'PyObject* slot_%s(PyObject* v, PyObject* w) {' % name
yield I+'if (PyObject_TypeCheck(v, wrapper_Type))'
yield I+I+'return slot::adapter<%s, PyObject*>(v, w);' % op
yield I+'if (PyObject_TypeCheck(w, wrapper_Type))'
yield I+I+'return slot::adapter<%s, PyObject*>(v, w);' % rop
yield I+'Py_INCREF(Py_NotImplemented);'
yield I+'return Py_NotImplemented;'
yield '}'
GenRopSlot.name = 'slot_' # Generated C++ name prefix.
def _UpdateSlotsToRopFunc(slots):
for c_slot_name, ropslots in slots.items():
if c_slot_name in _special_case[_ROP].data:
op, rop = ropslots
if not (op and rop):
slots[c_slot_name] = 'slot::adapter<%s, PyObject*>' % (op or rop)
else:
for s in GenRopSlot(c_slot_name, op, rop): yield s # pylint: disable=multiple-statements
slots[c_slot_name] = GenRopSlot.name + c_slot_name
_SUBSLOT_INFO = (
# xx, tp_slot, Python_C_API_struct, PyCLIF_StaticAlloc_instance
('nb', 'tp_as_number', 'PyNumberMethods', 'AsNumberStaticAlloc'),
('sq', 'tp_as_sequence', 'PySequenceMethods', 'AsSequenceStaticAlloc'),
('mp', 'tp_as_mapping', 'PyMappingMethods', 'AsMappingStaticAlloc'),
# New in Python 3.5, currently not supported.
# ('am', 'tp_as_async', PyAsyncMethods, None),
)
def GenTypeSlotsHeaptype(tracked_slot_groups, tp_group):
"""Assign slot values to dynamically allocated type object."""
# tp_name:
# Following the approach of pybind11 (ignoring the Python docs):
# tp_name = module_path + qualname
# (qualname = Outer.Inner for nested types).
# tp_methods:
# Using MethodsStaticAlloc. In contrast, pybind11 uses setattr to add
# methods. Doing the same here requires significant extra work on this
# code generator, but has no known practical benefit. Note that the
# motivation for using Py_TPFLAGS_HEAPTYPE is NOT actually to allocate
# the PyTypeObject data on the heap, but to unlock its side-effect of
# enabling injection of methods from Python.
for slot in py3slots.PyTypeObject:
value = tp_group.get(slot)
if slot in ('tp_as_number',
'tp_as_sequence',
'tp_as_mapping',
'tp_as_async'):
# Using heap_type->as_number, ... slots, assigned in gen.py.
continue
if slot == 'tp_flags':
value += ' | Py_TPFLAGS_HEAPTYPE'
if value is not None and value != '0':
yield I+'ty->%s = %s;' % (slot, value)
for xx, tp_slot, stype, unused_sname in _SUBSLOT_INFO:
xx_slots = tracked_slot_groups.get(xx)
if xx_slots is not None:
for subslot in getattr(py3slots, stype):
value = xx_slots.get(subslot)
if value is not None:
yield I+'ty->%s->%s = %s;' % (tp_slot, subslot, value)
def GenSlots(methods, tp_slots, tracked_groups=None):
"""Generate extra slots structs and update tp_slots dict."""
if tracked_groups is None:
tracked_groups = {}
all_slots = _SplitSlots(methods)
if all_slots:
tp_flags = tp_slots['tp_flags']
for xx, it in itertools.groupby(sorted(all_slots), lambda s: s[0][:2]):
xx_slots = tracked_groups[xx] = dict(it)
if xx == 'tp':
for s in _UpdateSlotToGeneratedFunc(
xx_slots, 'tp_setattro', GenSetAttr):
yield s
for s in _UpdateSlotToGeneratedFunc(
xx_slots, 'tp_richcompare', GenRichCompare):
yield s
tp_slots.update(xx_slots)
elif xx == 'mp':
for s in _UpdateSlotToGeneratedFunc(
xx_slots, 'mp_ass_subscript', GenSetAttr):
yield s
elif xx == 'sq':
for s in _UpdateSlotToGeneratedFunc(
xx_slots, 'sq_ass_item', GenSetItem):
yield s
elif xx == 'nb':
for s in _UpdateSlotsToRopFunc(xx_slots):
yield s
for xx, tp_slot, unused_stype, sname in _SUBSLOT_INFO:
xx_slots = tracked_groups.get(xx)
if xx_slots:
tp_slots[tp_slot] = '&' + sname
# Update tp_flags.
if 'tp_finalize' in tp_slots:
tp_flags.append('Py_TPFLAGS_HAVE_FINALIZE')
def _UpdateSlotToGeneratedFunc(slots, name, gen_func):
data = slots.get(name)
if data:
for s in gen_func(data):
yield s
slots[name] = gen_func.name
def _ATTRcase(slot, func, unused_slot_info, case_data):
"""Adapter for use in _special_case list below."""
if slot.startswith('__set'):
case_data[0] = func
elif slot.startswith('__del'):
case_data[1] = func
else:
assert 'Slot %s should not be _ATTR special.' % slot
_ATTRcase.init = [None, None]
def _ITEMcase(slot, func, unused_slot_info, case_data):
"""Adapter for use in _special_case list below."""
if slot == '__setitem__':
case_data[0] = func
elif slot == '__delitem__':
case_data[1] = func
else:
assert 'Slot %s should not be _ITEM special.' % slot
_ITEMcase.init = [None, None]
def _ROPcase(slot, func, slot_info, case_data):
"""Adapter for use in _special_case list below."""
assert len(slot_info) == 3, 'expect (ROP, c_slot, op), got %s' % slot_info
assert slot_info[1].startswith('nb_'), ('Only nb_ slots can be _ROP, not %s'
% slot_info[1])
op = slot_info[-1]
if slot == '__%s__' % op:
case_data[0] = func
elif slot == '__r%s__' % op:
case_data[1] = func
else:
assert 'Slot %s should not be _ROP special.' % slot
_ROPcase.init = [None, None]
def _RICHcase(unused_slot, func, slot_info, case_data):
"""Adapter for use in _special_case list below."""
assert len(slot_info) == 3
case_data[slot_info[-1]] = func
_RICHcase.init = {}
FORBIDDEN = () # Redefining those slots gets an error.
# Special cases.
_ATTR, _ITEM, _ROP, _RICH = range(4)
_special_case = [_ATTRcase, _ITEMcase, _ROPcase, _RICHcase]
# Some known "slots" are not in the map, they are ignored and if defined just
# will be methods (to be called from a Python class inherited from ours).
_COMMON_SLOT_MAP = {
# name : 'slot' or ['slots',...] call adapter<>
# name : (slot(s), 'processor') call processor
# name : (slot(s), '+converter') call adapter<> with result converter
# name : (CASE, slot) use _special_case[CASE] func
# name : (CASE, slots, op) use _special_case[CASE] func
# name : FORBIDDEN
# name is a user-defined method name like __str__ (may end with #,
# indicating sq_ slots, see pytd2proto.py for more details)
'__new__': FORBIDDEN,
'__del__': FORBIDDEN,
'__getattr__': 'tp_getattro',
'__setattr__': (_ATTR, 'tp_setattro'),
'__delattr__': (_ATTR, 'tp_setattro'),
'__getattribute__': FORBIDDEN,
'__dir__': FORBIDDEN,
'__get__': FORBIDDEN,
'__set__': FORBIDDEN,
'__delete__': FORBIDDEN,
'__len__': (['sq_length', 'mp_length'], '+as_size'),
'__hash__': ('tp_hash', '+as_hash'),
'__getitem__': 'mp_subscript',
'__setitem__': (_ATTR, 'mp_ass_subscript'),
'__delitem__': (_ATTR, 'mp_ass_subscript'),
'__getitem__#': ('sq_item', 'getitem'),
'__setitem__#': (_ITEM, 'sq_ass_item'),
'__delitem__#': (_ITEM, 'sq_ass_item'),
'__contains__': ('sq_contains', '+as_bool'),
'__str__': 'tp_str',
'__repr__': 'tp_repr',
'__format__': FORBIDDEN,
'__sizeof__': FORBIDDEN,
'__pos__': 'nb_positive',
'__neg__': 'nb_negative',
'__abs__': 'nb_absolute',
'__add__#': 'sq_concat',
'__iadd__#': 'sq_inplace_concat',
'__add__': (_ROP, 'nb_add', 'add'),
'__radd__': (_ROP, 'nb_add', 'add'),
'__iadd__': 'nb_inplace_add',
'__sub__': (_ROP, 'nb_subtract', 'sub'),
'__rsub__': (_ROP, 'nb_subtract', 'sub'),
'__isub__': 'nb_inplace_subtract',
'__mul__': (_ROP, 'nb_multiply', 'mul'),
'__rmul__': (_ROP, 'nb_multiply', 'mul'),
'__imul__': 'nb_inplace_multiply',
'__mul__#': ('sq_repeat', 'repeat'),
'__imul__#': ('sq_inplace_repeat', 'repeat'),
'__truediv__': (_ROP, 'nb_true_divide', 'truediv'),
'__rtruediv__': (_ROP, 'nb_true_divide', 'truediv'),
'__itruediv__': 'nb_inplace_true_divide',
'__floordiv__': (_ROP, 'nb_floor_divide', 'floordiv'),
'__rfloordiv__': (_ROP, 'nb_floor_divide', 'floordiv'),
'__ifloordiv__': 'nb_inplace_floor_divide',
'__divmod__': 'nb_divmod',
'__mod__': (_ROP, 'nb_remainder', 'mod'),
'__rmod__': (_ROP, 'nb_remainder', 'mod'),
'__imod__': 'nb_inplace_remainder',
'__pow__': 'nb_power',
'__ipow__': 'nb_inplace_power',
'__lshift__': (_ROP, 'nb_lshift', 'lshift'),
'__rlshift__': (_ROP, 'nb_lshift', 'lshift'),
'__ilshift__': 'nb_inplace_lshift',
'__rshift__': (_ROP, 'nb_rshift', 'rshift'),
'__rrshift__': (_ROP, 'nb_rshift', 'rshift'),
'__irshift__': 'nb_inplace_rshift',
'__and__': (_ROP, 'nb_and', 'and'),
'__rand__': (_ROP, 'nb_and', 'and'),
'__iand__': 'nb_inplace_and',
'__xor__': (_ROP, 'nb_xor', 'xor'),
'__rxor__': (_ROP, 'nb_xor', 'xor'),
'__ixor__': 'nb_inplace_xor',
'__or__': (_ROP, 'nb_or', 'or'),
'__ror__': (_ROP, 'nb_or', 'or'),
'__ior__': 'nb_inplace_or',
'__invert__': 'nb_invert',
'__int__': 'nb_int',
'__float__': 'nb_float',
'__index__': 'nb_index',
'__iter__': 'tp_iter',
'__next__': 'tp_iternext',
'__call__': 'tp_call',
'__lt__': (_RICH, 'tp_richcompare', 'Py_LT'),
'__le__': (_RICH, 'tp_richcompare', 'Py_LE'),
'__gt__': (_RICH, 'tp_richcompare', 'Py_GT'),
'__ge__': (_RICH, 'tp_richcompare', 'Py_GE'),
'__eq__': (_RICH, 'tp_richcompare', 'Py_EQ'),
'__ne__': (_RICH, 'tp_richcompare', 'Py_NE'),
'__bool__': ('nb_bool', '+as_bool'),
# Enable for Python 3.5+
# '__matmul__': 'nb_matrix_multiply',
# '__imatmul__': 'nb_inplace_matrix_multiply',
# '__await__': 'am_await',
# '__aiter__': 'am_aiter',
# '__anext__': 'am_anext',
}
def _SlotFuncSignature(slot):
return py3slots.SIGNATURES[slot]
|
ivy/test/implement1.py | b1f6c1c4/cfg-enum | 113 | 12704411 |
from ivy import ivy_module as im
from ivy.ivy_compiler import ivy_from_string
from ivy.tk_ui import new_ui
from ivy import ivy_utils as iu
from ivy import ivy_check as ick
prog = """#lang ivy1.5
type packet
object intf = {
action send(x:packet)
action recv(x:packet)
}
object spec = {
relation sent(X:packet)
init ~sent(X)
before intf.send {
sent(x) := true
}
before intf.recv {
assert sent(x)
}
}
object protocol = {
implement intf.send {
call intf.recv(x)
}
}
import intf.recv
export intf.send
"""
with im.Module():
iu.set_parameters({'mode':'induction','show_compiled':'true'})
ivy_from_string(prog,create_isolate=False)
ick.check_module()
|
src/tree/1028.recover-a-tree-from-preorder-traversal/recover-a-tree-from-preorder-traversal.py | lyphui/Just-Code | 782 | 12704419 | class Solution:
def recoverFromPreorder(self, S: str) -> TreeNode:
if not S: return
l = S.split('-')
s, depth = [[TreeNode(l[0]), 0]], 1
for item in l[1:]:
if not item:
depth += 1
continue
node = TreeNode(item)
while s[-1][1] != depth - 1:
s.pop()
if not s[-1][0].left:
s[-1][0].left = node
else:
s[-1][0].right = node
s.append([node, depth])
depth = 1
return s[0][0] |
CircuitPython_OLED_Watch/CircuitPython_OLED_Watch.py | joewalk102/Adafruit_Learning_System_Guides | 665 | 12704428 | <reponame>joewalk102/Adafruit_Learning_System_Guides
import board
import displayio
import adafruit_displayio_ssd1306
import terminalio
import adafruit_ds3231
from adafruit_display_text import label
font = terminalio.FONT
displayio.release_displays()
i2c = board.I2C()
display_bus = displayio.I2CDisplay(i2c, device_address=0x3c)
oled = adafruit_displayio_ssd1306.SSD1306(display_bus, width=128, height=32)
rtc = adafruit_ds3231.DS3231(i2c)
# The first time you run this code, you must set the time!
# You must set year, month, date, hour, minute, second and weekday.
# struct_time order: year, month, day (date), hour, minute, second, weekday , yearday, isdst
# yearday is not supported, isdst can be set but we don't do anything with it at this time
# UNCOMMENT THE FOLLOWING FOUR LINES THE FIRST TIME YOU RUN THE CODE TO SET THE TIME!
# import time
# set_time = time.struct_time((2019, 8, 16, 23, 59, 45, 4, -1, -1))
# print("Setting time to:", set_time)
# rtc.datetime = set_time
# Comment out the above four lines again after setting the time!
while True:
current = rtc.datetime
hour = current.tm_hour % 12
if hour == 0:
hour = 12
am_pm = "AM"
if current.tm_hour / 12 >= 1:
am_pm = "PM"
time_display = "{:d}:{:02d}:{:02d} {}".format(hour, current.tm_min, current.tm_sec, am_pm)
date_display = "{:d}/{:d}/{:d}".format(current.tm_mon, current.tm_mday, current.tm_year)
text_display = "CircuitPython Time"
clock = label.Label(font, text=time_display)
date = label.Label(font, text=date_display)
text = label.Label(font, text=text_display)
(_, _, width, _) = clock.bounding_box
clock.x = oled.width // 2 - width // 2
clock.y = 5
(_, _, width, _) = date.bounding_box
date.x = oled.width // 2 - width // 2
date.y = 15
(_, _, width, _) = text.bounding_box
text.x = oled.width // 2 - width // 2
text.y = 25
watch_group = displayio.Group()
watch_group.append(clock)
watch_group.append(date)
watch_group.append(text)
oled.show(watch_group)
|
test/programytest/storage/stores/sql/store/test_conversations.py | cdoebler1/AIML2 | 345 | 12704431 | import unittest
from unittest.mock import patch
import programytest.storage.engines as Engines
from programy.dialog.conversation import Conversation
from programy.dialog.question import Question
from programy.parser.pattern.match import Match
from programy.parser.pattern.matchcontext import MatchContext
from programy.parser.pattern.nodes.word import PatternWordNode
from programy.storage.stores.sql.config import SQLStorageConfiguration
from programy.storage.stores.sql.engine import SQLStorageEngine
from programy.storage.stores.sql.store.conversations import SQLConversationStore
from programytest.client import TestClient
from programytest.storage.asserts.store.assert_conversations import ConverstionStoreAsserts
from programy.storage.stores.sql.dao.conversation import Conversation as ConversationDAO
from programy.storage.stores.sql.dao.conversation import Question as QuestionDAO
from programy.storage.stores.sql.dao.conversation import Sentence as SentenceDAO
from programy.storage.stores.sql.dao.conversation import ConversationProperty as ConversationPropertyDAO
from programy.storage.stores.sql.dao.conversation import Match as MatchDAO
from programy.storage.stores.sql.dao.conversation import MatchNode as MatchNodeDAO
class SQLConversationStoreTests(ConverstionStoreAsserts):
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_initialise(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_get_all(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
with self.assertRaises(Exception):
store._get_all()
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_read_write_conversation_properties_in_db(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
client = TestClient()
client_context = client.create_client_context("user1")
store.empty()
properties1 = {"key1": "value1", "key2": "value2"}
store._write_properties_to_db(client_context, 1, 2, ConversationPropertyDAO.CONVERSATION, properties1)
store.commit()
properties2 = {}
store._read_properties_from_db(client_context, 1, 2, ConversationPropertyDAO.CONVERSATION, properties2)
self.assertEqual({"key1": "value1", "key2": "value2"}, properties2)
store.empty()
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_read_write_question_properties_in_db(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
client = TestClient()
client_context = client.create_client_context("user1")
store.empty()
properties1 = {"key1": "value1", "key2": "value2"}
store._write_properties_to_db(client_context, 1, 2, ConversationPropertyDAO.QUESTION, properties1)
store.commit()
properties2 = {}
store._read_properties_from_db(client_context, 1, 2, ConversationPropertyDAO.QUESTION, properties2)
self.assertEqual({"key1": "value1", "key2": "value2"}, properties2)
store.empty()
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_read_write_matches_in_db(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
store.empty()
client = TestClient()
client_context = client.create_client_context("user1")
matched_context1 = MatchContext(100, 100, sentence="Hello", response="Hi There")
matched_context1.matched_nodes.append(Match(Match.WORD, PatternWordNode("Hello"), "Hello"))
store._write_matches_to_db(client_context, matched_context1, 1)
store.commit()
matched_context2 = MatchContext(0, 0)
store._read_matches_from_db(client_context, matched_context2, 1)
self.assertEqual(1, len(matched_context2.matched_nodes))
self.assertEqual(Match.WORD, matched_context2.matched_nodes[0].matched_node_type)
self.assertEqual("WORD [Hello]", matched_context2.matched_nodes[0].matched_node_str)
self.assertFalse(matched_context2.matched_nodes[0].matched_node_multi_word)
self.assertFalse(matched_context2.matched_nodes[0].matched_node_wildcard)
self.assertEqual(1, len(matched_context2.matched_nodes[0].matched_node_words))
self.assertEqual(["Hello"], matched_context2.matched_nodes[0].matched_node_words)
store.empty()
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_match_context_in_db(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
store.empty()
client = TestClient()
client_context = client.create_client_context("user1")
matched_context1 = MatchContext(100, 100, sentence="Hello", response="Hi There")
matched_context1._matched_nodes = []
matched_context1._template_node = None
store._write_match_context_to_db(client_context, 1, matched_context1)
store.commit()
matched_context2 = MatchContext(100, 100)
store._read_match_context_from_db(client_context, 1, matched_context2)
self.assertEqual(100, matched_context2.max_search_timeout)
self.assertEqual(100, matched_context2.max_search_depth)
self.assertEqual("Hello", matched_context2.sentence)
self.assertEqual("Hi There", matched_context2.response)
store.empty()
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_sentences_in_db(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
store.empty()
client = TestClient()
client_context = client.create_client_context("user1")
question1 = Question.create_from_text(client_context, "Hello There")
question1.sentence(0).response = "Hi"
question1.sentence(0)._positivity = 0.5
question1.sentence(0)._subjectivity = 0.6
store._write_sentences_to_db(client_context, 1, question1)
store.commit()
question2 = Question()
store._read_sentences_from_db(client_context, 1, question2)
self.assertEqual(1, len(question2.sentences))
self.assertEqual(0.5, question2.sentences[0].positivity)
self.assertEqual(0.6, question2.sentences[0].subjectivity)
self.assertEqual(["Hello", "There"], question2.sentences[0].words)
self.assertEqual("Hi", question2.sentences[0].response)
store.empty()
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_questions_in_db(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
store.empty()
client = TestClient()
client_context = client.create_client_context("user1")
conversation1 = Conversation(client_context)
question1 = Question.create_from_text(client_context, "Hello There")
question1.sentence(0).response = "Hi"
question1.sentence(0)._positivity = 0.5
question1.sentence(0)._subjectivity = 0.6
conversation1.record_dialog(question1)
store._write_questions_to_db(client_context, 1, conversation1)
store.commit()
conversation2 = Conversation(client_context)
store._read_questions_from_db(client_context, 1, conversation2)
self.assertEqual(1, len(conversation2.questions))
self.assertEqual(1, len(conversation2.questions[0].sentences))
self.assertEqual(0.5, conversation2.questions[0].sentences[0].positivity)
self.assertEqual(0.6, conversation2.questions[0].sentences[0].subjectivity)
self.assertEqual(["Hello", "There"], conversation2.questions[0].sentences[0].words)
self.assertEqual("Hi", conversation2.questions[0].sentences[0].response)
store.empty()
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_conversation_in_db(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
store.empty()
client = TestClient()
client_context = client.create_client_context("user1")
conversation1 = Conversation(client_context)
conversation1.properties['ckey1'] = "cvalue1"
conversation1.properties['ckey2'] ="cvalue2"
question1 = Question.create_from_text(client_context, "Hello There")
question1.sentence(0).response = "Hi"
question1.sentence(0)._positivity = 0.5
question1.sentence(0)._subjectivity = 0.6
question1.properties['qkey1'] = "qvalue1"
question1.properties['qkey2'] = "qvalue2"
conversation1.record_dialog(question1)
store.store_conversation(client_context, conversation1)
store.commit ()
conversation2 = Conversation(client_context)
store.load_conversation (client_context, conversation2)
self.assertEqual(conversation2.properties['ckey1'], "cvalue1")
self.assertEqual(conversation2.properties['ckey2'], "cvalue2")
self.assertEqual(conversation2.questions[0].sentence(0).response, "Hi")
self.assertEqual(conversation2.questions[0].sentence(0)._positivity, 0.5)
self.assertEqual(conversation2.questions[0].sentence(0)._subjectivity, 0.6)
self.assertEqual(conversation2.questions[0].properties['qkey1'], "qvalue1")
self.assertEqual(conversation2.questions[0].properties['qkey2'], "qvalue2")
store.empty()
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_conversation_storage(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_conversation_storage(store)
def patch_get_conversation_dao(self, client_context):
return ConversationDAO(id=1,
clientid="client1",
userid="user1",
botid="bot1",
brainid="brain1",
maxhistories=100)
@patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_conversation_dao", patch_get_conversation_dao)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_storage_where_existing_conversation(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_just_conversation_storage(store)
def patch_get_conversation_dao2(self, client_context):
return None
@patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_conversation_dao", patch_get_conversation_dao2)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_storage_where_no_conversation(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_just_conversation_storage(store)
def patch_get_question_dao(self, conversationid, question_no):
return QuestionDAO(id=1,
conversationid=conversationid,
questionno=question_no,
srai=False)
@patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_question_dao", patch_get_question_dao)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_storage_where_existing_question(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_just_conversation_storage(store)
def patch_get_question_dao2(self, conversationid, question_no):
return None
@patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_question_dao", patch_get_question_dao2)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_storage_where_noquestion(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_just_conversation_storage(store)
def patch_get_sentence_dao(self, questionid, sentence_no):
return SentenceDAO(id=1,
questionid = questionid,
sentenceno = sentence_no,
sentence = "Hello",
response = "Hi There",
positivity = "0.5",
subjectivity = "0.5")
@patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_sentence_dao", patch_get_sentence_dao)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_storage_where_existing_sentence(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_just_conversation_storage(store)
def patch_get_sentence_dao2(self, questionid, sentence_no):
return None
@patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_sentence_dao", patch_get_sentence_dao2)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_storage_where_no_sentence(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_just_conversation_storage(store)
def patch_get_match_dao(self, sentenceid):
return MatchDAO(id=1,
sentenceid = sentenceid,
max_search_depth = 99,
max_search_timeout = 99,
sentence = "Hello",
response = "Hi there",
score = "1.0")
@patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_match_dao", patch_get_match_dao)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_storage_where_existing_match(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_just_conversation_storage(store)
def patch_get_match_dao2(self, sentenceid):
return None
@patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_match_dao", patch_get_match_dao2)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_storage_where_no_match(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_just_conversation_storage(store)
def patch_get_matchnode_dao(self, matchid, match_count):
return MatchNodeDAO(id = 1,
matchid = matchid,
matchcount = match_count,
matchtype = "WORD",
matchnode = "WORD",
matchstr = "HELLO",
wildcard = False,
multiword = False)
@patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_matchnode_dao", patch_get_matchnode_dao)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_storage_where_existing_matchnode(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_just_conversation_storage(store)
def patch_get_matchnode_dao2(self, matchid, match_count):
return None
@patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_matchnode_dao", patch_get_matchnode_dao2)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_storage_where_no_matchnode(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_just_conversation_storage(store)
def patch_get_property_dao(self, conversationid, questionid, proptype, name):
return ConversationPropertyDAO(id=1,
conversationid = conversationid,
questionid =questionid,
type = proptype,
name = name,
value = "value")
@patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_property_dao", patch_get_property_dao)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_storage_where_existing_property_unmatched(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_just_conversation_storage(store)
def patch_get_property_dao2(self, conversationid, questionid, proptype, name):
return ConversationPropertyDAO(id=1,
conversationid = conversationid,
questionid =questionid,
type = proptype,
name = "topic",
value = "*")
@patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_property_dao", patch_get_property_dao2)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_storage_where_existing_property_matched(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_just_conversation_storage(store)
def patch_get_property_dao3(self, conversationid, questionid, proptype, name):
return None
@patch("programy.storage.stores.sql.store.conversations.SQLConversationStore._get_property_dao", patch_get_property_dao3)
@unittest.skipIf(Engines.sql is False, Engines.sql_disabled)
def test_storage_where_no_property(self):
config = SQLStorageConfiguration()
engine = SQLStorageEngine(config)
engine.initialise()
store = SQLConversationStore(engine)
self.assertEqual(store.storage_engine, engine)
self.assert_just_conversation_storage(store)
|
docker-desktop/vnc/docker-ubuntu-vnc-desktop/image/usr/local/lib/web/backend/vnc/log.py | smthkissinger/docker-images | 2,917 | 12704477 | import logging
log = logging.getLogger('novnc2')
|
workflows/pipe-common/pipeline/utils/ssh.py | ZMaratovna/cloud-pipeline | 126 | 12704502 | <reponame>ZMaratovna/cloud-pipeline<gh_stars>100-1000
# Copyright 2017-2021 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABCMeta, abstractmethod
import paramiko
class SSHError(RuntimeError):
pass
class CloudPipelineSSH:
__metaclass__ = ABCMeta
@abstractmethod
def execute(self, command, user=None, logger=None):
pass
class LogSSH(CloudPipelineSSH):
def __init__(self, logger, inner):
self._logger = logger
self._inner = inner
def execute(self, command, user=None, logger=None):
self._inner.execute(command, user=user, logger=logger or self._logger)
class UserSSH(CloudPipelineSSH):
def __init__(self, user, inner):
self._user = user
self._inner = inner
def execute(self, command, user=None, logger=None):
self._inner.execute(command, user=user or self._user, logger=logger)
class HostSSH(CloudPipelineSSH):
def __init__(self, host, private_key_path):
self._host = host
self._private_key_path = private_key_path
def execute(self, command, user=None, logger=None):
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.MissingHostKeyPolicy())
client.connect(self._host, username=user, key_filename=self._private_key_path)
_, stdout, stderr = client.exec_command(command)
exit_code = stdout.channel.recv_exit_status()
if logger:
for line in stdout:
stripped_line = line.strip('\n')
logger.info(stripped_line)
for line in stderr:
stripped_line = line.strip('\n')
logger.warning(stripped_line)
if exit_code != 0:
raise SSHError('Command has finished with exit code ' + str(exit_code))
client.close()
|
vel/api/metrics/__init__.py | tigerwlin/vel | 273 | 12704505 | <filename>vel/api/metrics/__init__.py
from .base_metric import BaseMetric
from .averaging_metric import AveragingMetric, AveragingNamedMetric, AveragingSupervisedMetric
from .value_metric import ValueMetric
from .summing_metric import SummingMetric, SummingNamedMetric
|
python/fate_client/pipeline/param/psi_param.py | hubert-he/FATE | 3,787 | 12704515 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class PSIParam(BaseParam):
def __init__(self, max_bin_num=20, need_run=True, dense_missing_val=None):
super(PSIParam, self).__init__()
self.max_bin_num = max_bin_num
self.need_run = need_run
self.dense_missing_val = dense_missing_val
def check(self):
assert type(self.max_bin_num) == int and self.max_bin_num > 0, 'max bin must be an integer larger than 0'
assert type(self.need_run) == bool
if self.dense_missing_val is not None:
assert type(self.dense_missing_val) == str or type(self.dense_missing_val) == int or \
type(self.dense_missing_val) == float, \
'missing value type {} not supported'.format(type(self.dense_missing_val))
|
ecosystem_tools/mindconverter/mindconverter/graph_based_converter/mapper/onnx/ops/clip_mapper.py | mindspore-ai/mindinsight | 216 | 12704584 | <gh_stars>100-1000
# Copyright 2021 Huawei Technologies Co., Ltd.All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Mapper module."""
from mindconverter.graph_based_converter.constant import ExchangeMessageKeywords, \
TemplateKeywords
from mindconverter.graph_based_converter.mapper.base import ONNXToMindSporeMapper
class ClipMapper(ONNXToMindSporeMapper):
"""Clip mapper."""
@staticmethod
def _operation_name_in_ms(*args, **kwargs):
return "P.clip_by_value"
@staticmethod
def _convert_params(**kwargs):
return dict()
@staticmethod
def _convert_trained_weights(**kwargs):
return dict()
@staticmethod
def _generate_snippet_template(**kwargs):
op = kwargs.get("operation").replace("onnx::", "onnx.")
weights = kwargs.get("weights")
if not op:
raise ValueError("Can not get MindSpore operation name.")
min_val = ClipMapper._find_val_by_index(0, weights).tolist()
max_val = ClipMapper._find_val_by_index(1, weights).tolist()
variable_slot = "var_0"
args = {"min": min_val, "max": max_val}
min_val_decl_stmt = f"self.{{{variable_slot}}}_min = {{min}}"
max_val_decl_stmt = f"self.{{{variable_slot}}}_max = {{max}}"
construct_template = f"opt_{{{variable_slot}}} = {op}" \
f"({{{ExchangeMessageKeywords.VariableScope.value.INPUTS.value}}}, " \
f"self.{{{variable_slot}}}_min, self.{{{variable_slot}}}_max)"
template = {
variable_slot: {
TemplateKeywords.INIT.value: [min_val_decl_stmt, max_val_decl_stmt],
TemplateKeywords.CONSTRUCT.value: [construct_template]
}
}
exchange_msg = {
variable_slot: {
ExchangeMessageKeywords.VariableScope.value.OPERATION.value: op,
ExchangeMessageKeywords.VariableScope.value.VARIABLE_NAME.value: None,
ExchangeMessageKeywords.VariableScope.value.OUTPUT_TYPE.value:
ExchangeMessageKeywords.VariableScope.value.TSR_TYPE.value,
ExchangeMessageKeywords.VariableScope.value.INPUTS.value: [],
ExchangeMessageKeywords.VariableScope.value.ARGS.value: args,
ExchangeMessageKeywords.VariableScope.value.WEIGHTS.value: dict(),
ExchangeMessageKeywords.VariableScope.value.TRAINABLE_PARAMS.value: dict()
}
}
outputs_list = [f"opt_{{{variable_slot}}}"]
outputs_mapping = ((0, 0),)
return template, exchange_msg, outputs_list, outputs_mapping
|
astroquery/jplspec/setup_package.py | rickynilsson/astroquery | 577 | 12704625 | <gh_stars>100-1000
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
def get_package_data():
paths_test = [os.path.join('data', 'CO.data'),
os.path.join('data', 'CO_6.data'),
os.path.join('data', 'multi.data')]
paths_data = [os.path.join('data', 'catdir.cat')]
return {'astroquery.jplspec.tests': paths_test,
'astroquery.jplspec': paths_data, }
|
Stock/Data/Viewer/IndexConsecutiveDayLineStats/DyStockDataIndexConsecutiveDayLineStatsTabWidget.py | Leonardo-YXH/DevilYuan | 135 | 12704631 | from PyQt5.QtWidgets import QTabWidget
from ....Common.DyStockCommon import *
from .DyStockDataIndexConsecutiveDayLineStatsWidget import *
class DyStockDataIndexConsecutiveDayLineStatsTabWidget(QTabWidget):
def __init__(self, dataWindow, startDate, endDate, indexCountedDfs, greenLine=True):
super().__init__()
for index, df in indexCountedDfs.items():
self.addTab(DyStockDataIndexConsecutiveDayLineStatsWidget(dataWindow, index, df),
DyStockCommon.indexes[index]
)
self.setWindowTitle('指数连续日{0}线统计[{1},{2}]'.format('阴' if greenLine else '阳', startDate, endDate)) |
src/main/APIClient.py | BMW-InnovationLab/BMW-Anonymization-API | 108 | 12704636 | <reponame>BMW-InnovationLab/BMW-Anonymization-API
import os
import time
import io
import sys
import json
import requests
import jsonschema
from exceptions import InvalidUrlConfiguration, ApplicationError
class ApiClient:
def __init__(self):
self.configuration = []
self.url_list = self.get_url_configuration()
self.get_api_configuration()
def get_configuration(self):
try:
return self.configuration
except ApplicationError as e:
raise e
@staticmethod
def get_url_configuration():
"""
:return: List of all the api urls provided in the url_configuration file
"""
with open('../jsonFiles/url_configuration.json') as f:
data = json.load(f)
urls = data["urls"]
try:
validate_url_configuration(data)
except Exception as e:
raise InvalidUrlConfiguration
return urls
def get_api_configuration(self):
for url in self.url_list:
self.get_models(url)
@staticmethod
def get_model_names(url: str):
time.sleep(5)
response = requests.get(
url=url + "models")
models_list = response.json()["data"]["models"]
return models_list
def get_models(self, url: str):
"""
Returns a list of json objects representing the configuration of each api
corresponding to each url in the url_configuration file
:param url: Each url in the url_configuration file
:return: List of json objects
"""
models_list = self.get_model_names(url)
for model_name in models_list:
labels_list = self.get_labels(url, model_name)
model_type = self.get_model_configuration(url, model_name)
palette = None
if "segmentation" in model_type:
palette = self.get_palette(url, model_name)
self.configuration.append({
"name": model_name,
"labels": labels_list,
"type": model_type,
"url": url,
"palette": palette
})
@staticmethod
def get_palette(url: str, model_name: str):
response = requests.get(
url=url + "models/" + model_name + "/palette"
)
return response.json()["data"]
@staticmethod
def get_labels(url: str, model_name: str):
response = requests.get(
url=url + "models/" + model_name + "/labels"
)
return response.json()["data"]
@staticmethod
def get_model_configuration(url: str, model_name: str):
response = requests.get(
url=url + "models/" + model_name + "/config"
)
return response.json()["data"]["type"]
@staticmethod
def get_detection_response(url: str, model_name: str, im):
response = requests.post(
url=url + "models/" + model_name + "/predict",
files={'input_data': io.BytesIO(im.tobytes())})
return response.json()
@staticmethod
def get_segmentation_response(url: str, model_name: str, im):
response = requests.post(
url=url + "models/" + model_name + "/inference",
files={'input_data': io.BytesIO(im.tobytes())}
)
return response
def validate_url_configuration(data):
"""
Validate the url_configuration file by comparing it to the urlConfigurationSchema
:param data: The data from the url_configuration file
"""
with open('urlConfigurationSchema') as f:
schema = json.load(f)
try:
jsonschema.validate(data, schema)
except Exception as e:
raise InvalidUrlConfiguration(e)
|
core/clonesf.py | faslan1234/socialfish | 2,970 | 12704652 | <filename>core/clonesf.py<gh_stars>1000+
import requests
import re
import os
# CLONING FUNCTIONS --------------------------------------------------------------------------------------------
def clone(url, user_agent, beef):
try:
u = url.replace('://', '-')
q = 'templates/fake/{}/{}'.format(user_agent, u)
os.makedirs(q, exist_ok=True)
temp_ind_path = 'templates/fake/{}/{}/index.html'.format(user_agent, u)
headers = {'User-Agent': user_agent}
r = requests.get(url, headers=headers)
html = r.text
old_regular = re.findall(r'action="([^ >"]*)"',html)
new_regular = '/login'
for r in old_regular:
print(r)
html = html.replace(r, new_regular)
if beef == 'yes':
inject = '<script src=":3000/hook.js" type="text/javascript"></script></body>'
html = html.replace("</body>", inject)
new_html = open(temp_ind_path, 'w')
new_html.write(html.encode('ascii', 'ignore').decode('ascii'))
new_html.close()
except:
pass
#-------------------------------------------------------------------------------------------------------------------- |
la/oblas/data/dpotrf01.py | wtsia/gosl | 1,811 | 12704682 | <gh_stars>1000+
import numpy as np
import scipy.linalg as la
from auxiliary import *
a = np.matrix([
[+3, +0, -3, +0],
[+0, +3, +1, +2],
[-3, +1, +4, +1],
[+0, +2, +1, +3],
],dtype=float)
res = la.cholesky(a, lower=False)
mprint('aUp', res)
res = la.cholesky(a, lower=True)
mprint('aLo', res)
|
mmdet/models/detectors/reasoning_rcnn.py | TaoBowoa180011/Reasoning-RCNN | 178 | 12704683 | from __future__ import division
import torch
import torch.nn as nn
from .base import BaseDetector
from .test_mixins import RPNTestMixin
from .. import builder
from ..registry import DETECTORS
from mmdet.core import (assign_and_sample, bbox2roi, bbox2result, multi_apply,
merge_aug_masks)
import numpy as np
import pickle
from ..utils import ConvModule
import torch.nn.functional as F
@DETECTORS.register_module
class ReasoningRCNN(BaseDetector, RPNTestMixin):
def __init__(self,
num_stages,
backbone,
neck=None,
upper_neck=None,
rpn_head=None,
bbox_roi_extractor=None,
bbox_head=None,
mask_roi_extractor=None,
mask_head=None,
train_cfg=None,
test_cfg=None,
pretrained=None,
adj_gt=None,
graph_out_channels=256,
normalize=None,
roi_feat_size=7,
shared_num_fc=2):
assert bbox_roi_extractor is not None
assert bbox_head is not None
super(ReasoningRCNN, self).__init__()
self.num_stages = num_stages
self.backbone = builder.build_backbone(backbone)
if neck is not None:
self.neck = builder.build_neck(neck)
else:
assert upper_neck is not None
if rpn_head is not None:
self.rpn_head = builder.build_rpn_head(rpn_head)
if upper_neck is not None:
if isinstance(upper_neck, list):
self.upper_neck = nn.ModuleList()
assert len(upper_neck) == self.num_stages
for neck in upper_neck:
self.upper_neck.append(builder.build_upper_neck(neck))
else:
self.upper_neck = builder.build_upper_neck(upper_neck)
if bbox_head is not None:
self.bbox_roi_extractor = nn.ModuleList()
self.bbox_head = nn.ModuleList()
if not isinstance(bbox_roi_extractor, list):
bbox_roi_extractor = [
bbox_roi_extractor for _ in range(num_stages)
]
if not isinstance(bbox_head, list):
bbox_head = [bbox_head for _ in range(num_stages)]
assert len(bbox_roi_extractor) == len(bbox_head) == self.num_stages
for roi_extractor, head in zip(bbox_roi_extractor, bbox_head):
self.bbox_roi_extractor.append(
builder.build_roi_extractor(roi_extractor))
self.bbox_head.append(builder.build_bbox_head(head))
if mask_head is not None:
self.mask_head = nn.ModuleList()
if not isinstance(mask_head, list):
mask_head = [mask_head for _ in range(num_stages)]
assert len(mask_head) == self.num_stages
for head in mask_head:
self.mask_head.append(builder.build_mask_head(head))
if mask_roi_extractor is not None:
self.mask_roi_extractor = nn.ModuleList()
if not isinstance(mask_roi_extractor, list):
mask_roi_extractor = [
mask_roi_extractor for _ in range(num_stages)
]
assert len(mask_roi_extractor) == self.num_stages
for roi_extractor in mask_roi_extractor:
self.mask_roi_extractor.append(
builder.build_roi_extractor(roi_extractor))
self.normalize = normalize
self.with_bias = normalize is None
if adj_gt is not None:
self.adj_gt = pickle.load(open(adj_gt, 'rb'))
self.adj_gt = np.float32(self.adj_gt)
self.adj_gt = nn.Parameter(torch.from_numpy(self.adj_gt), requires_grad=False)
# init cmp attention
self.cmp_attention = nn.ModuleList()
self.cmp_attention.append(
ConvModule(1024, 1024 // 16,
3, stride=2, padding=1, normalize=self.normalize, bias=self.with_bias))
self.cmp_attention.append(
nn.Linear(1024 // 16, bbox_head[0]['in_channels'] + 1))
# init graph w
self.graph_out_channels = graph_out_channels
self.graph_weight_fc = nn.Linear(bbox_head[0]['in_channels'] + 1, self.graph_out_channels)
self.relu = nn.ReLU(inplace=True)
# shared upper neck
in_channels = rpn_head['in_channels']
if shared_num_fc > 0:
in_channels *= (roi_feat_size * roi_feat_size)
self.branch_fcs = nn.ModuleList()
for i in range(shared_num_fc):
fc_in_channels = (in_channels
if i == 0 else bbox_head[0]['in_channels'])
self.branch_fcs.append(
nn.Linear(fc_in_channels, bbox_head[0]['in_channels']))
self.train_cfg = train_cfg
self.test_cfg = test_cfg
self.init_weights(pretrained=pretrained)
@property
def with_rpn(self):
return hasattr(self, 'rpn_head') and self.rpn_head is not None
def init_weights(self, pretrained=None):
super(ReasoningRCNN, self).init_weights(pretrained)
self.backbone.init_weights(pretrained=pretrained)
if self.with_neck:
if isinstance(self.neck, nn.Sequential):
for m in self.neck:
m.init_weights()
else:
self.neck.init_weights()
if self.with_rpn:
self.rpn_head.init_weights()
for i in range(self.num_stages):
if self.with_bbox:
self.bbox_roi_extractor[i].init_weights()
self.bbox_head[i].init_weights()
if self.with_mask_roi_extractor:
self.mask_roi_extractor[i].init_weights()
if self.with_mask:
self.mask_head[i].init_weights()
def extract_feat(self, img):
x = self.backbone(img)
if self.with_neck:
x = self.neck(x)
return x
def forward_upper_neck(self, x, stage):
if self.with_share_upper_neck:
x = self.upper_neck(x)
elif self.with_unshare_upper_neck:
x = self.upper_neck[stage](x)
return x
def forward_train(self,
img,
img_meta,
gt_bboxes,
gt_bboxes_ignore,
gt_labels,
gt_masks=None,
proposals=None):
x = self.extract_feat(img)
# precmp attention
if len(x) > 1:
base_feat = []
for b_f in x[1:]:
base_feat.append(
F.interpolate(b_f, scale_factor=(x[2].size(2) / b_f.size(2), x[2].size(3) / b_f.size(3))))
base_feat = torch.cat(base_feat, 1)
else:
base_feat = torch.cat(x, 1)
for ops in self.cmp_attention:
base_feat = ops(base_feat)
if len(base_feat.size()) > 2:
base_feat = base_feat.mean(3).mean(2)
else:
base_feat = self.relu(base_feat)
losses = dict()
if self.with_rpn:
rpn_outs = self.rpn_head(x)
rpn_loss_inputs = rpn_outs + (gt_bboxes, img_meta,
self.train_cfg.rpn)
rpn_losses = self.rpn_head.loss(*rpn_loss_inputs)
losses.update(rpn_losses)
proposal_inputs = rpn_outs + (img_meta, self.test_cfg.rpn)
proposal_list = self.rpn_head.get_proposals(*proposal_inputs)
else:
proposal_list = proposals
for i in range(self.num_stages):
rcnn_train_cfg = self.train_cfg.rcnn[i]
lw = self.train_cfg.stage_loss_weights[i]
# add reasoning process
if i > 0:
# 1.build global semantic pool
global_semantic_pool = torch.cat((bbox_head.fc_cls.weight,
bbox_head.fc_cls.bias.unsqueeze(1)), 1).detach()
# 2.compute graph attention
attention_map = nn.Softmax(1)(torch.mm(base_feat, torch.transpose(global_semantic_pool, 0, 1)))
# 3.adaptive global reasoning
alpha_em = attention_map.unsqueeze(-1) * torch.mm(self.adj_gt, global_semantic_pool).unsqueeze(0)
alpha_em = alpha_em.view(-1, global_semantic_pool.size(-1))
alpha_em = self.graph_weight_fc(alpha_em)
alpha_em = self.relu(alpha_em)
# enhanced_feat = torch.mm(nn.Softmax(1)(cls_score), alpha_em)
n_classes = bbox_head.fc_cls.weight.size(0)
cls_prob = nn.Softmax(1)(cls_score).view(len(img_meta), -1, n_classes)
enhanced_feat = torch.bmm(cls_prob, alpha_em.view(len(img_meta), -1, self.graph_out_channels))
enhanced_feat = enhanced_feat.view(-1, self.graph_out_channels)
# assign gts and sample proposals
assign_results, sampling_results = multi_apply(
assign_and_sample,
proposal_list,
gt_bboxes,
gt_bboxes_ignore,
gt_labels,
cfg=rcnn_train_cfg)
# bbox head forward and loss
bbox_roi_extractor = self.bbox_roi_extractor[i]
bbox_head = self.bbox_head[i]
rois, rois_index = bbox2roi(
[(res.pos_bboxes, res.neg_bboxes) for res in sampling_results],
return_index=True)
bbox_feats = bbox_roi_extractor(x[:bbox_roi_extractor.num_inputs],
rois)
# without upperneck
bbox_feats = bbox_feats.view(bbox_feats.size(0), -1)
for fc in self.branch_fcs:
bbox_feats = self.relu(fc(bbox_feats))
# cat with enhanced feature
if i > 0:
bbox_feats = torch.cat([bbox_feats, enhanced_feat], 1)
cls_score, bbox_pred = bbox_head(bbox_feats)
bbox_targets = bbox_head.get_target(sampling_results, gt_bboxes,
gt_labels, rcnn_train_cfg)
loss_bbox = bbox_head.loss(cls_score, bbox_pred, *bbox_targets)
for name, value in loss_bbox.items():
losses['s{}.{}'.format(
i, name)] = (value * lw if 'loss' in name else value)
# mask head forward and loss
if self.with_mask:
if self.with_mask_roi_extractor:
mask_roi_extractor = self.mask_roi_extractor[i]
pos_rois = bbox2roi(
[res.pos_bboxes for res in sampling_results])
mask_feats = mask_roi_extractor(
x[:mask_roi_extractor.num_inputs], pos_rois)
mask_feats = self.forward_upper_neck(mask_feats, i)
else:
pos_inds = (rois_index == 0)
mask_feats = bbox_feats[pos_inds]
mask_head = self.mask_head[i]
mask_pred = mask_head(mask_feats)
mask_targets = mask_head.get_target(sampling_results, gt_masks,
rcnn_train_cfg)
pos_labels = torch.cat(
[res.pos_gt_labels for res in sampling_results])
loss_mask = mask_head.loss(mask_pred, mask_targets, pos_labels)
for name, value in loss_mask.items():
losses['s{}.{}'.format(
i, name)] = (value * lw if 'loss' in name else value)
# refine bboxes
if i < self.num_stages - 1:
pos_is_gts = [res.pos_is_gt for res in sampling_results]
roi_labels = bbox_targets[0] # bbox_targets is a tuple
with torch.no_grad():
proposal_list = bbox_head.refine_bboxes(
rois, roi_labels, bbox_pred, pos_is_gts, img_meta)
return losses
def simple_test(self, img, img_meta, proposals=None, rescale=False):
x = self.extract_feat(img)
# precmp attention
if len(x) > 1:
base_feat = []
for b_f in x[1:]:
base_feat.append(
F.interpolate(b_f, scale_factor=(x[2].size(2) / b_f.size(2), x[2].size(3) / b_f.size(3))))
base_feat = torch.cat(base_feat, 1)
else:
base_feat = torch.cat(x, 1)
for ops in self.cmp_attention:
base_feat = ops(base_feat)
if len(base_feat.size()) > 2:
base_feat = base_feat.mean(3).mean(2)
else:
base_feat = self.relu(base_feat)
proposal_list = self.simple_test_rpn(
x, img_meta, self.test_cfg.rpn) if proposals is None else proposals
img_shape = img_meta[0]['img_shape']
ori_shape = img_meta[0]['ori_shape']
scale_factor = img_meta[0]['scale_factor']
# "ms" in variable names means multi-stage
ms_bbox_result = {}
ms_segm_result = {}
ms_scores = []
rcnn_test_cfg = self.test_cfg.rcnn
rois = bbox2roi(proposal_list)
for i in range(self.num_stages):
# add reasoning process
if i > 0:
# transform CxC classes graph to region
# 1.build global semantic pool
global_semantic_pool = torch.cat((bbox_head.fc_cls.weight,
bbox_head.fc_cls.bias.unsqueeze(1)), 1).detach()
# 2.compute graph attention
attention_map = nn.Softmax(1)(torch.mm(base_feat, torch.transpose(global_semantic_pool, 0, 1)))
# 3.adaptive global reasoning
alpha_em = attention_map.unsqueeze(-1) * torch.mm(self.adj_gt, global_semantic_pool).unsqueeze(0)
alpha_em = alpha_em.view(-1, global_semantic_pool.size(-1))
alpha_em = self.graph_weight_fc(alpha_em)
alpha_em = self.relu(alpha_em)
n_classes = bbox_head.fc_cls.weight.size(0)
cls_prob = nn.Softmax(1)(cls_score).view(len(img_meta), -1, n_classes)
enhanced_feat = torch.bmm(cls_prob, alpha_em.view(len(img_meta), -1, self.graph_out_channels))
enhanced_feat = enhanced_feat.view(-1, self.graph_out_channels)
bbox_roi_extractor = self.bbox_roi_extractor[i]
bbox_head = self.bbox_head[i]
bbox_feats = bbox_roi_extractor(
x[:len(bbox_roi_extractor.featmap_strides)], rois)
# bbox_feats = self.forward_upper_neck(bbox_feats, i)
# without upperneck
bbox_feats = bbox_feats.view(bbox_feats.size(0), -1)
for fc in self.branch_fcs:
bbox_feats = self.relu(fc(bbox_feats))
# cat with enhanced feature
if i > 0:
bbox_feats = torch.cat([bbox_feats, enhanced_feat], 1)
cls_score, bbox_pred = bbox_head(bbox_feats)
ms_scores.append(cls_score)
if self.test_cfg.keep_all_stages:
det_bboxes, det_labels = bbox_head.get_det_bboxes(
rois,
cls_score,
bbox_pred,
img_shape,
scale_factor,
rescale=rescale,
cfg=rcnn_test_cfg)
bbox_result = bbox2result(det_bboxes, det_labels,
bbox_head.num_classes)
ms_bbox_result['stage{}'.format(i)] = bbox_result
if self.with_mask:
if self.with_mask_roi_extractor:
mask_roi_extractor = self.mask_roi_extractor[i]
else:
mask_roi_extractor = self.bbox_roi_extractor[i]
mask_head = self.mask_head[i]
if det_bboxes.shape[0] == 0:
segm_result = [
[] for _ in range(mask_head.num_classes - 1)
]
else:
_bboxes = (det_bboxes[:, :4] * scale_factor
if rescale else det_bboxes)
mask_rois = bbox2roi([_bboxes])
mask_feats = mask_roi_extractor(
x[:len(mask_roi_extractor.featmap_strides)],
mask_rois)
mask_feats = self.forward_upper_neck(mask_feats, i)
mask_pred = mask_head(mask_feats)
segm_result = mask_head.get_seg_masks(
mask_pred, _bboxes, det_labels, rcnn_test_cfg,
ori_shape, scale_factor, rescale)
ms_segm_result['stage{}'.format(i)] = segm_result
if i < self.num_stages - 1:
bbox_label = cls_score.argmax(dim=1)
rois = bbox_head.regress_by_class(rois, bbox_label, bbox_pred,
img_meta[0])
cls_score = sum(ms_scores) / self.num_stages
det_bboxes, det_labels = self.bbox_head[-1].get_det_bboxes(
rois,
cls_score,
bbox_pred,
img_shape,
scale_factor,
rescale=rescale,
cfg=rcnn_test_cfg)
bbox_result = bbox2result(det_bboxes, det_labels,
self.bbox_head[-1].num_classes)
ms_bbox_result['ensemble'] = bbox_result
if self.with_mask:
if det_bboxes.shape[0] == 0:
segm_result = [
[] for _ in range(self.mask_head[-1].num_classes - 1)
]
else:
_bboxes = (det_bboxes[:, :4] * scale_factor
if rescale else det_bboxes)
mask_rois = bbox2roi([_bboxes])
aug_masks = []
for i in range(self.num_stages):
if self.with_mask_roi_extractor:
mask_roi_extractor = self.mask_roi_extractor[i]
else:
mask_roi_extractor = self.bbox_roi_extractor[i]
mask_feats = mask_roi_extractor(
x[:len(mask_roi_extractor.featmap_strides)], mask_rois)
mask_feats = self.forward_upper_neck(mask_feats, i)
mask_pred = self.mask_head[i](mask_feats)
aug_masks.append(mask_pred.sigmoid().cpu().numpy())
merged_masks = merge_aug_masks(aug_masks,
[img_meta] * self.num_stages,
self.test_cfg.rcnn)
segm_result = self.mask_head[-1].get_seg_masks(
merged_masks, _bboxes, det_labels, rcnn_test_cfg,
ori_shape, scale_factor, rescale)
ms_segm_result['ensemble'] = segm_result
if not self.test_cfg.keep_all_stages:
if self.with_mask:
results = (ms_bbox_result['ensemble'],
ms_segm_result['ensemble'])
else:
results = ms_bbox_result['ensemble']
else:
if self.with_mask:
results = {
stage: (ms_bbox_result[stage], ms_segm_result[stage])
for stage in ms_bbox_result
}
else:
results = ms_bbox_result
return results
def aug_test(self, img, img_meta, proposals=None, rescale=False):
raise NotImplementedError
def show_result(self, data, result, img_norm_cfg, **kwargs):
if self.with_mask:
ms_bbox_result, ms_segm_result = result
if isinstance(ms_bbox_result, dict):
result = (ms_bbox_result['ensemble'],
ms_segm_result['ensemble'])
else:
if isinstance(result, dict):
result = result['ensemble']
super(ReasoningRCNN, self).show_result(data, result, img_norm_cfg,
**kwargs) |
wikipron/__init__.py | wannaphong/wikipron | 111 | 12704689 | <filename>wikipron/__init__.py
"""Scrapes grapheme-to-phoneme data from Wiktionary."""
import pkg_resources
from wikipron.config import Config
from wikipron.scrape import scrape
__version__ = pkg_resources.get_distribution("wikipron").version
__all__ = ["__version__", "Config", "scrape"]
|
scripts/legend.py | oscargus/cheatsheets | 6,062 | 12704774 | # -----------------------------------------------------------------------------
# Matplotlib cheat sheet
# Released under the BSD License
# -----------------------------------------------------------------------------
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(4, 4))
ax = fig.add_axes([0.15, 0.15, .7, .7], frameon=True, aspect=1,
xticks=[], yticks=[])
def text(x, y, _text):
color= "C1"
if not 0 < x < 1 or not 0 < y < 1: color = "C0"
size = 0.15
ax.text(x, y, _text, color="white", # bbox={"color": "C1"},
size="xx-large", weight="bold", ha="center", va="center")
rect = plt.Rectangle((x-size/2, y-size/2), size, size, facecolor=color,
zorder=-10, clip_on=False)
ax.add_patch(rect)
def point(x, y):
ax.scatter([x], [y], facecolor="C0", edgecolor="white",
zorder=10, clip_on=False)
d = .1
e = .15/2
text( d, d, "3"), text( 0.5, d, "8"), text(1-d, d, "4")
text( d, 0.5, "6"), text( 0.5, 0.5, "10"), text(1-d, 0.5, "7")
text( d, 1-d, "2"), text( 0.5, 1-d, "9"), text(1-d, 1-d, "1")
text( -d, 1-d, "A"), text( -d, 0.5, "B"), text( -d, d, "C")
point(-d+e, 1-d+e), point(-d+e, 0.5), point(-d+e, d-e),
text( d, -d, "D"), text(0.5, -d, "E"), text( 1-d, -d, "F")
point(d-e, -d+e), point(0.5, -d+e), point(1-d+e, -d+e),
text(1+d, d, "G"), text(1+d, 0.5, "H"), text( 1+d, 1-d, "I")
point(1+d-e, d-e), point(1+d-e, .5), point(1+d-e, 1-d+e),
text(1-d, 1+d, "J"), text(0.5, 1+d, "K"), text( d, 1+d, "L")
point(1-d+e, 1+d-e), point(0.5, 1+d-e), point(d-e, 1+d-e),
plt.xlim(0, 1), plt.ylim(0, 1)
plt.savefig("../figures/legend-placement.pdf")
# plt.show()
|
setup.py | skypjsfly/bustag | 4,197 | 12704807 | import io
import os
import re
from setuptools import find_packages
from setuptools import setup
from bustag import __version__
def read(filename):
filename = os.path.join(os.path.dirname(__file__), filename)
text_type = type(u"")
with io.open(filename, mode="r", encoding='utf-8') as fd:
return re.sub(text_type(r':[a-z]+:`~?(.*?)`'), text_type(r'``\1``'), fd.read())
setup(
name="bustag",
version=__version__,
url="https://github.com/gxtrobot/bustag",
license='MIT',
author="gxtrobot",
author_email="<EMAIL>",
description="a tag and recommend system for old bus driver",
long_description=read("README.md"),
packages=find_packages(exclude=('tests',)),
install_requires=[],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
)
|
tests/test_tls.py | mguc/aiocoap | 229 | 12704824 | <reponame>mguc/aiocoap<gh_stars>100-1000
# This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 <NAME> <http://sixpinetrees.blogspot.com/>,
# 2013-2014 <NAME> <<EMAIL>>
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import json
import tempfile
import shutil
import subprocess
import unittest
import sys
import aiocoap
from .test_server import WithClient, WithTestServer, run_fixture_as_standalone_server
from .fixtures import no_warnings, asynctest
from .common import tcp_disabled
IS_STANDALONE = False
class WithTLSServer(WithTestServer):
def setUp(self):
self.keydir = tempfile.mkdtemp(suffix="-testkeypair")
self.keyfile = self.keydir + '/key.pem'
self.certfile = self.keydir + '/cert.pem'
self.credentialsfile = self.keydir + '/credentials.json'
subprocess.check_call([
'openssl',
'req',
'-x509',
'-newkey', 'rsa:4096',
'-keyout', self.keyfile,
'-out', self.certfile,
'-days', '5',
'-nodes', '-subj', '/CN=%s' % self.servernamealias
],
stderr=subprocess.DEVNULL,
)
# Write out for the benefit of standalone clients during debugging
with open(self.credentialsfile, 'w') as of:
json.dump({
'coaps+tcp://%s/*' % self.servernamealias: {'tlscert': { 'certfile': self.certfile }}
}, of)
if IS_STANDALONE:
print("To test, run ./aiocoap-client coaps+tcp://%s/whoami --credentials %s" % (self.servernamealias, self.credentialsfile,))
super().setUp()
def tearDown(self):
super().tearDown()
shutil.rmtree(self.keydir)
def get_server_ssl_context(self):
import ssl
# FIXME: copied from aiocoap.cli.common
ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
ssl_context.load_cert_chain(certfile=self.certfile, keyfile=self.keyfile)
ssl_context.set_alpn_protocols(["coap"])
if hasattr(ssl_context, 'sni_callback'): # starting python 3.7
ssl_context.sni_callback = lambda obj, name, context: setattr(obj, "indicated_server_name", name)
return ssl_context
class WithTLSClient(WithClient):
# This expects that something -- typically the colocated WithTestServer -- sets certfile first
def setUp(self):
super().setUp()
self.client.client_credentials['coaps+tcp://%s/*' % self.servernamealias] = aiocoap.credentials.TLSCert(certfile=self.certfile)
@unittest.skipIf(tcp_disabled, "TCP disabled in environment")
class TestTLS(WithTLSServer, WithTLSClient):
@no_warnings
@asynctest
async def test_tls(self):
request = aiocoap.Message(code=aiocoap.GET)
request.set_request_uri('coaps+tcp://%s/whoami' % self.servernamealias, set_uri_host=False)
response = await self.client.request(request).response_raising
response = json.loads(response.payload)
self.assertEqual(response['requested_uri'], 'coaps+tcp://%s/whoami' % self.servernamealias, "SNI name was not used by the server")
if sys.version_info < (3, 7):
test_tls = unittest.expectedFailure(test_tls) # SNI support was only added in Python 3.7
if 'PyPy' in sys.version:
# For PyPy exclusion, see https://foss.heptapod.net/pypy/pypy/-/issues/3359
# Completely skipping a test that causes segfaults
test_tls = None
if __name__ == "__main__":
# due to the imports, you'll need to run this as `python3 -m tests.test_server`
IS_STANDALONE = True
import logging
logging.basicConfig(level=logging.DEBUG)
run_fixture_as_standalone_server(TestTLS)
|
tests/test_parser/test_rdfxml.py | cthoyt/pronto | 182 | 12704843 | <reponame>cthoyt/pronto
import io
import os
import unittest
import warnings
import xml.etree.ElementTree as etree
import pronto
class TestRdfXMLParser(unittest.TestCase):
@staticmethod
def get_ontology(content):
xml = f"""
<rdf:RDF xmlns="http://purl.obolibrary.org/obo/TEMP#"
xml:base="http://purl.obolibrary.org/obo/TEMP"
xmlns:obo="http://purl.obolibrary.org/obo/"
xmlns:owl="http://www.w3.org/2002/07/owl#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:xml="http://www.w3.org/XML/1998/namespace"
xmlns:xsd="http://www.w3.org/2001/XMLSchema#"
xmlns:doap="http://usefulinc.com/ns/doap#"
xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
xmlns:oboInOwl="http://www.geneontology.org/formats/oboInOwl#">
{content}
</rdf:RDF>
"""
s = io.BytesIO(xml.encode('utf-8'))
return pronto.Ontology(s, import_depth=0)
def setUp(self):
warnings.simplefilter("error")
def tearDown(self):
warnings.simplefilter(warnings.defaultaction)
# ---
def test_iao(self):
warnings.simplefilter("ignore")
path = os.path.join(__file__, "..", "..", "data", "iao.owl")
iao = pronto.Ontology(os.path.realpath(path))
self.assertEqual(len(iao.terms()), 245)
def test_aeo(self):
warnings.simplefilter("ignore")
path = os.path.join(__file__, "..", "..", "data", "aeo.owl")
aeo = pronto.Ontology(os.path.realpath(path))
self.assertEqual(len(aeo.terms()), 250)
self.assertEqual(len(aeo.relationships()), 11)
self.assertEqual(aeo["AEO:0000099"].name, "keratin-based structure")
self.assertEqual(len(aeo["AEO:0000099"].definition.xrefs), 1)
def test_invalid_xml_file(self):
self.assertRaises(ValueError, self.get_ontology, "")
# ------------------------------------------------------------------------
def test_metadata_auto_generated_by(self):
ont = self.get_ontology(
"""
<owl:Ontology>
<oboInOwl:auto-generated-by>pronto</oboInOwl:auto-generated-by>
</owl:Ontology>
"""
)
self.assertEqual(ont.metadata.auto_generated_by, "pronto")
def test_metadata_default_namespace(self):
ont = self.get_ontology(
"""
<owl:Ontology>
<oboInOwl:hasDefaultNamespace rdf:datatype="http://www.w3.org/2001/XMLSchema#string">thing</oboInOwl:hasDefaultNamespace>
</owl:Ontology>
"""
)
self.assertEqual(ont.metadata.default_namespace, "thing")
def test_metadata_data_version(self):
# owl:versionrIRI
ont = self.get_ontology(
"""
<owl:Ontology rdf:about="http://purl.obolibrary.org/obo/ms.owl">
<owl:versionIRI rdf:resource="http://purl.obolibrary.org/obo/ms/4.1.30/ms.owl"/>
</owl:Ontology>
"""
)
self.assertEqual(ont.metadata.ontology, "ms")
self.assertEqual(ont.metadata.data_version, "4.1.30")
# doap:Version
ont2 = self.get_ontology(
"<owl:Ontology><doap:Version>0.1.0</doap:Version></owl:Ontology>"
)
self.assertEqual(ont2.metadata.data_version, "0.1.0")
def test_metadata_format_version(self):
ont = self.get_ontology(
"""
<owl:Ontology>
<oboInOwl:hasOBOFormatVersion>1.2</oboInOwl:hasOBOFormatVersion>
</owl:Ontology>
"""
)
self.assertEqual(ont.metadata.format_version, "1.2")
def test_metadata_imports(self):
ont = self.get_ontology(
"""
<owl:Ontology>
<owl:imports rdf:resource="http://purl.obolibrary.org/obo/ms.obo"/>
</owl:Ontology>
"""
)
self.assertIn("http://purl.obolibrary.org/obo/ms.obo", ont.metadata.imports)
def test_metadata_saved_by(self):
ont = self.get_ontology(
"""
<owl:Ontology>
<oboInOwl:savedBy><NAME></oboInOwl:savedBy>
</owl:Ontology>
"""
)
self.assertEqual(ont.metadata.saved_by, "<NAME>")
# ------------------------------------------------------------------------
def test_term_consider(self):
# Extract from `oboInOwl:consider` text
ont = self.get_ontology(
"""
<owl:Ontology/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001">
<oboInOwl:consider rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:002</oboInOwl:consider>
</owl:Class>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_002"/>
"""
)
self.assertIn("TST:001", ont)
self.assertIn("TST:002", ont)
self.assertIn(ont["TST:002"], ont["TST:001"].consider)
# Extract from `oboInOwl:consider` RDF resource
ont2 = self.get_ontology(
"""
<owl:Ontology/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001">
<oboInOwl:consider rdf:resource="http://purl.obolibrary.org/obo/TST_002"/>
</owl:Class>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_002"/>
"""
)
self.assertIn("TST:001", ont2)
self.assertIn("TST:002", ont2)
self.assertIn(ont2["TST:002"], ont2["TST:001"].consider)
def test_term_definition_as_property(self):
ont = self.get_ontology("""
<owl:Ontology/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001">
<obo:IAO_0000115 rdf:datatype="http://www.w3.org/2001/XMLSchema#string">a term</obo:IAO_0000115>
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id>
</owl:Class>
""")
self.assertIn("TST:001", ont)
self.assertEqual(ont["TST:001"].definition, "a term")
self.assertEqual(len(ont["TST:001"].definition.xrefs), 0)
def test_term_definition_as_axiom(self):
ont = self.get_ontology("""
<owl:Ontology/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001">
<obo:IAO_0000115 rdf:datatype="http://www.w3.org/2001/XMLSchema#string">a term</obo:IAO_0000115>
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id>
</owl:Class>
<owl:Axiom>
<owl:annotatedSource rdf:resource="http://purl.obolibrary.org/obo/TST_001"/>
<owl:annotatedProperty rdf:resource="http://purl.obolibrary.org/obo/IAO_0000115"/>
<owl:annotatedTarget rdf:datatype="http://www.w3.org/2001/XMLSchema#string">a term</owl:annotatedTarget>
<oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</oboInOwl:hasDbXref>
</owl:Axiom>
""")
self.assertIn("TST:001", ont)
self.assertEqual(ont["TST:001"].definition, "a term")
self.assertEqual(list(ont["TST:001"].definition.xrefs)[0], pronto.Xref("ISBN:1234"))
def test_term_multiple_labels(self):
txt = """
<owl:Ontology/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001">
<rdfs:label>A</rdfs:label>
<rdfs:label>B</rdfs:label>
</owl:Class>
"""
# check multiple labels is a syntax error in error mode
with warnings.catch_warnings():
warnings.simplefilter("error", pronto.warnings.SyntaxWarning)
with self.assertRaises(SyntaxError):
ont = self.get_ontology(txt)
# check multiple labels is fine in ignore mode
with warnings.catch_warnings():
warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning)
ont = self.get_ontology(txt)
self.assertIn(ont['TST:001'].name, ["A", "B"])
def test_term_subclass_of(self):
ont = self.get_ontology("""
<owl:Ontology/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_002">
<rdfs:subClassOf rdf:resource="http://purl.obolibrary.org/obo/TST_001"/>
</owl:Class>
""")
self.assertIn(ont["TST:001"], ont["TST:002"].superclasses().to_set())
self.assertIn(ont["TST:002"], ont["TST:001"].subclasses().to_set())
def test_term_subset(self):
ont = self.get_ontology("""
<owl:Ontology rdf:about="http://purl.obolibrary.org/obo/tst.owl"/>
<owl:AnnotationProperty rdf:about="http://purl.obolibrary.org/obo/tst#ss">
<rdfs:comment rdf:datatype="http://www.w3.org/2001/XMLSchema#string">a subset</rdfs:comment>
<rdfs:subPropertyOf rdf:resource="http://www.geneontology.org/formats/oboInOwl#SubsetProperty"/>
</owl:AnnotationProperty>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001">
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id>
<oboInOwl:inSubset rdf:resource="http://purl.obolibrary.org/obo/tst#ss"/>
</owl:Class>
""")
self.assertIn("TST:001", ont)
self.assertEqual(ont["TST:001"].subsets, {"ss"})
def test_term_synonym_as_property(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning)
ont = self.get_ontology("""
<owl:Ontology/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001">
<oboInOwl:hasExactSynonym rdf:datatype="http://www.w3.org/2001/XMLSchema#string">stuff</oboInOwl:hasExactSynonym>
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id>
</owl:Class>
""")
self.assertIn("TST:001", ont)
self.assertEqual(len(ont["TST:001"].synonyms), 1)
syn = next(iter(ont["TST:001"].synonyms))
self.assertEqual(syn.description, "stuff")
self.assertEqual(syn.scope, "EXACT")
self.assertEqual(syn.xrefs, set())
def test_term_synonym_as_axiom(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning)
ont = self.get_ontology("""
<owl:Ontology/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001">
<oboInOwl:hasExactSynonym rdf:datatype="http://www.w3.org/2001/XMLSchema#string">stuff</oboInOwl:hasExactSynonym>
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id>
</owl:Class>
<owl:Axiom>
<owl:annotatedSource rdf:resource="http://purl.obolibrary.org/obo/TST_001"/>
<owl:annotatedProperty rdf:resource="http://www.geneontology.org/formats/oboInOwl#hasExactSynonym"/>
<owl:annotatedTarget rdf:datatype="http://www.w3.org/2001/XMLSchema#string">stuff</owl:annotatedTarget>
<oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</oboInOwl:hasDbXref>
</owl:Axiom>
""")
self.assertIn("TST:001", ont)
self.assertEqual(len(ont["TST:001"].synonyms), 1)
syn = next(iter(ont["TST:001"].synonyms))
self.assertEqual(syn.description, "stuff")
self.assertEqual(syn.scope, "EXACT")
self.assertEqual(syn.xrefs, {pronto.Xref("ISBN:1234")})
def test_term_relationship(self):
ont = self.get_ontology("""
<owl:Ontology/>
<owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/RO_0002202">
<rdf:type rdf:resource="http://www.w3.org/2002/07/owl#TransitiveProperty"/>
<oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">RO:0002202</oboInOwl:hasDbXref>
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string"></oboInOwl:id>
<oboInOwl:shorthand rdf:datatype="http://www.w3.org/2001/XMLSchema#string">develops_from</oboInOwl:shorthand>
<rdfs:label rdf:datatype="http://www.w3.org/2001/XMLSchema#string">develops from</rdfs:label>
</owl:ObjectProperty>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001"/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_002">
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://purl.obolibrary.org/obo/RO_0002202"/>
<owl:someValuesFrom rdf:resource="http://purl.obolibrary.org/obo/TST_001"/>
</owl:Restriction>
</rdfs:subClassOf>
</owl:Class>
""")
self.assertIn("develops_from", [r.id for r in ont.relationships()])
develops_from = ont.get_relationship("develops_from")
self.assertIn(ont["TST:001"], ont["TST:002"].relationships[develops_from])
def test_term_xref_as_property_resource(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning)
ont = self.get_ontology("""
<owl:Ontology/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001">
<oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</oboInOwl:hasDbXref>
<oboInOwl:id rdf:resource="http://purl.obolibrary.org/obo/ISBN_1234"/>
</owl:Class>
""")
self.assertEqual(len(ont["TST:001"].xrefs), 1)
self.assertEqual(list(ont["TST:001"].xrefs)[0].id, "ISBN:1234")
def test_term_xref_as_property_text(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning)
ont = self.get_ontology("""
<owl:Ontology/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001">
<oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</oboInOwl:hasDbXref>
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id>
</owl:Class>
""")
self.assertEqual(len(ont["TST:001"].xrefs), 1)
self.assertEqual(list(ont["TST:001"].xrefs)[0].id, "ISBN:1234")
def test_term_xref_as_axiom_without_description(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning)
ont = self.get_ontology("""
<owl:Ontology/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001">
<oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</oboInOwl:hasDbXref>
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id>
</owl:Class>
<owl:Axiom>
<owl:annotatedSource rdf:resource="http://purl.obolibrary.org/obo/TST_001"/>
<owl:annotatedProperty rdf:resource="http://www.geneontology.org/formats/oboInOwl#hasDbXref"/>
<owl:annotatedTarget rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</owl:annotatedTarget>
</owl:Axiom>
""")
self.assertEqual(len(ont["TST:001"].xrefs), 1)
self.assertEqual(list(ont["TST:001"].xrefs)[0].id, "ISBN:1234")
self.assertEqual(list(ont["TST:001"].xrefs)[0].description, None)
def test_term_xref_as_axiom_with_description(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning)
ont = self.get_ontology("""
<owl:Ontology/>
<owl:Class rdf:about="http://purl.obolibrary.org/obo/TST_001">
<oboInOwl:hasDbXref rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</oboInOwl:hasDbXref>
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id>
</owl:Class>
<owl:Axiom>
<owl:annotatedSource rdf:resource="http://purl.obolibrary.org/obo/TST_001"/>
<owl:annotatedProperty rdf:resource="http://www.geneontology.org/formats/oboInOwl#hasDbXref"/>
<owl:annotatedTarget rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ISBN:1234</owl:annotatedTarget>
<rdfs:label rdf:datatype="http://www.w3.org/2001/XMLSchema#string">a great book</rdfs:label>
</owl:Axiom>
""")
self.assertEqual(len(ont["TST:001"].xrefs), 1)
self.assertEqual(list(ont["TST:001"].xrefs)[0].id, "ISBN:1234")
self.assertEqual(list(ont["TST:001"].xrefs)[0].description, "a great book")
# ------------------------------------------------------------------------
def test_relationship_cyclic(self):
ont = self.get_ontology(
"""
<owl:Ontology/>
<owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/TST_001">
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id>
<oboInOwl:is_cyclic rdf:datatype="http://www.w3.org/2001/XMLSchema#boolean">true</oboInOwl:is_cyclic>
</owl:ObjectProperty>
"""
)
self.assertIn("TST:001", ont.relationships())
self.assertTrue(ont.get_relationship("TST:001").cyclic)
def test_relationship_functional(self):
ont = self.get_ontology(
"""
<owl:Ontology/>
<owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/TST_001">
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id>
<rdf:type rdf:resource="http://www.w3.org/2002/07/owl#FunctionalProperty"/>
</owl:ObjectProperty>
"""
)
self.assertIn("TST:001", ont.relationships())
self.assertTrue(ont.get_relationship("TST:001").functional)
def test_relationship_multiple_labels(self):
txt = """
<owl:Ontology/>
<owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/TST_001">
<rdfs:label>A</rdfs:label>
<rdfs:label>B</rdfs:label>
</owl:ObjectProperty>
"""
# check multiple labels is a syntax error in error mode
with warnings.catch_warnings():
warnings.simplefilter("error", pronto.warnings.SyntaxWarning)
with self.assertRaises(SyntaxError):
ont = self.get_ontology(txt)
# check multiple labels is fine in ignore mode
with warnings.catch_warnings():
warnings.simplefilter("ignore", pronto.warnings.SyntaxWarning)
ont = self.get_ontology(txt)
self.assertIn(ont.get_relationship('TST:001').name, ["A", "B"])
def test_relationship_reflexive(self):
ont = self.get_ontology(
"""
<owl:Ontology/>
<owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/TST_001">
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id>
<rdf:type rdf:resource="http://www.w3.org/2002/07/owl#ReflexiveProperty"/>
</owl:ObjectProperty>
"""
)
self.assertIn("TST:001", ont.relationships())
self.assertTrue(ont.get_relationship("TST:001").reflexive)
def test_relationship_subset(self):
ont = self.get_ontology("""
<owl:Ontology rdf:about="http://purl.obolibrary.org/obo/tst.owl"/>
<owl:AnnotationProperty rdf:about="http://purl.obolibrary.org/obo/tst#ss">
<rdfs:comment rdf:datatype="http://www.w3.org/2001/XMLSchema#string">a subset</rdfs:comment>
<rdfs:subPropertyOf rdf:resource="http://www.geneontology.org/formats/oboInOwl#SubsetProperty"/>
</owl:AnnotationProperty>
<owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/tst#friend_of">
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">friend_of</oboInOwl:id>
<oboInOwl:inSubset rdf:resource="http://purl.obolibrary.org/obo/tst#ss"/>
</owl:ObjectProperty>
""")
self.assertIn("friend_of", ont.relationships())
self.assertEqual(ont.get_relationship("friend_of").subsets, {"ss"})
def test_relationship_symmetric(self):
ont = self.get_ontology(
"""
<owl:Ontology/>
<owl:ObjectProperty rdf:about="http://purl.obolibrary.org/obo/TST_001">
<oboInOwl:id rdf:datatype="http://www.w3.org/2001/XMLSchema#string">TST:001</oboInOwl:id>
<rdf:type rdf:resource="http://www.w3.org/2002/07/owl#SymmetricProperty"/>
</owl:ObjectProperty>
"""
)
self.assertIn("TST:001", ont.relationships())
self.assertTrue(ont.get_relationship("TST:001").symmetric)
|
examples/comparison.py | dojitza/ddnn | 110 | 12704846 | import os
import sys
sys.path.append('..')
import argparse
import chainer
from elaas.elaas import Collection
from elaas.family.simple import SimpleHybridFamily
from elaas.family.binary import BinaryFamily
from elaas.family.float import FloatFamily
from elaas.family.multi_input_edge_with_dropout import MultiInputEdgeDropoutFamily
from visualize import visualize
import deepopt.chooser
import matplotlib
matplotlib.rcParams['font.size'] = 20.0
import matplotlib.pyplot as plt
def max_acc(trace):
acc = 0
best_idx = 0
for i, t in enumerate(trace):
if t['action'] == 'add_point':
acc = max(acc, t['y'])
best_idx = i
return acc, best_idx
model_dict = {
"binary": BinaryFamily,
"float": FloatFamily
}
def train_model(args, model_type, nfilters):
trainer = Collection(model_type, args.save_dir, nepochs=args.epochs, verbose=args.verbose)
trainer.set_model_family(model_dict[model_type])
train, test = chainer.datasets.get_mnist(ndim=3)
data_shape = train._datasets[0].shape[1:]
trainer.add_trainset(train)
trainer.add_testset(test)
trainer.set_searchspace(
nfilters_embeded=[nfilters],
nlayers_embeded=[2],
lr=[1e-3]
)
res = trainer.train(niters=args.iters, bootstrap_nepochs=args.bootstrap_epochs)
return max_acc(res)[0]
parser = argparse.ArgumentParser(description='Training Simple eBNN model')
parser.add_argument('-s', '--save_dir', default='_models')
parser.add_argument('-c', '--c_file', default=os.path.join('c', 'simple.h'))
parser.add_argument('--inter_file', default=os.path.join('c', 'inter_simple.h'))
parser.add_argument('-i', '--iters', type=int, default=10)
parser.add_argument('-e', '--epochs', type=int, default=20)
parser.add_argument('-b', '--bootstrap_epochs', type=int, default=2)
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('--gen_inter', action='store_true')
args = parser.parse_args()
names = ['float', 'binary']
accs = {name: [] for name in names}
mem = {name: [] for name in names}
for i in [1,2,3,4,5]:
acc = train_model(args, 'float', i)
print(acc)
accs['float'].append(acc*100)
mem['float'].append(i*32*32*9)
print("====")
binary_accs = []
binary_mem = []
for i in [1,3,5,10,20,40,80,160]:
acc = train_model(args, 'binary', i)
print(acc)
accs['binary'].append(acc*100)
mem['binary'].append(i*32*9)
#plot code
linewidth = 4
ms = 8
colors = {'binary': '#FF944D', 'float': '#FF8F80'}
styles = {'binary': '-o', 'float': '-.o'}
plt.figure(figsize=(8, 6.5))
for name in names:
plt.plot(mem[name], accs[name], styles[name],
linewidth=linewidth, ms=ms, color=colors[name],
label=name)
plt.xlabel('Memory (bits)')
plt.ylabel('Classification Accuracy (%)')
plt.legend(loc=0, prop={'size': 14})
plt.tight_layout()
plt.grid()
plt.savefig("comparison_2layer.png")
plt.clf()
|
utils/dcrf.py | loserbbb/1-stage-wseg | 364 | 12704866 | import numpy as np
import pydensecrf.densecrf as dcrf
from pydensecrf.utils import unary_from_softmax
def crf_inference(img, probs, t=10, scale_factor=1, labels=21):
h, w = img.shape[:2]
n_labels = labels
d = dcrf.DenseCRF2D(w, h, n_labels)
unary = unary_from_softmax(probs)
unary = np.ascontiguousarray(unary)
d.setUnaryEnergy(unary)
d.addPairwiseGaussian(sxy=3/scale_factor, compat=3)
d.addPairwiseBilateral(sxy=80/scale_factor, srgb=13, rgbim=np.copy(img), compat=10)
Q = d.inference(t)
return np.array(Q).reshape((n_labels, h, w))
|
courses/unstructured/01-dataprocML.py | laurenzberger/training-data-analyst | 6,140 | 12704888 | <gh_stars>1000+
#!/usr/bin/env python
# Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
This program takes a sample text line of text and passes to a Natural Language Processing
services, sentiment analysis, and processes the results in Python.
'''
import logging
import argparse
import json
import os
from googleapiclient.discovery import build
from pyspark import SparkContext
sc = SparkContext("local", "Simple App")
'''
You must set these values for the job to run.
'''
APIKEY="your-api-key" # CHANGE
print(APIKEY)
PROJECT_ID="your-project-id" # CHANGE
print(PROJECT_ID)
BUCKET="your-bucket" # CHANGE
## Wrappers around the NLP REST interface
def SentimentAnalysis(text):
from googleapiclient.discovery import build
lservice = build('language', 'v1beta1', developerKey=APIKEY)
response = lservice.documents().analyzeSentiment(
body={
'document': {
'type': 'PLAIN_TEXT',
'content': text
}
}).execute()
return response
## main
sampleline = 'There are places I remember, all my life though some have changed.'
#
# Calling the Natural Language Processing REST interface
#
results = SentimentAnalysis(sampleline)
#
# What is the service returning?
#
print("Function returns: ", type(results))
print(json.dumps(results, sort_keys=True, indent=4))
|
mesh/node.py | eric-downes/mesh-networking | 368 | 12704912 | # -*- coding: utf-8 -*-
# MIT License: <NAME>
import random
import threading
import time
from collections import defaultdict
try:
from queue import Queue
except ImportError:
from Queue import Queue
from .filters import LoopbackFilter
# Physical Layer (copper, fiber, audio, wireless)
# Link Layer (ethernet, ARP, PPP): links.py
# Network Layer (IPv4, IPv6, ICMP, MeshP): scapy
# Transport Layer (TCP, UDP, SCTP): scapy
# Nodes connect to each other over links. The node has a runloop that pulls packets off the link's incoming packet Queue,
# runs them through its list of filters, then places it in the nodes incoming packet queue for that interface node.inq.
# the Node's Program is has a seperate runloop in a different thread that is constantly calling node.inq.get().
# The program does something with the packet (like print it to the screen, or reply with "ACK"), and sends any outgoing responses
# by calling the Node's send() method directly. The Node runs the packet through it's outgoing packet filters in order, then
# if it wasn't dropped, calls the network interface's .send() method to push it over the network.
# --> incoming packet queue | -> pulls packets off link's inq -> filters -> node.inq | -> pulls packets off the node's inq
# [LINK] | [NODE] | [PROGRAM]
# <-- outgoing Link.send() | <---- outgoing filters <----- Node.send() <----- | <- sends responses by calling Node.send()
class Node(threading.Thread):
"""a Node represents a computer. node.interfaces contains the list of network links the node is connected to.
Nodes process incoming traffic through their filters, then place packets in their inq for their Program to handle.
Programs process packets off the node's incoming queue, then send responses out through node's outbound filters,
and finally out to the right network interface.
"""
def __init__(self, interfaces=None, name="n1", promiscuous=False, mac_addr=None, Filters=(), Program=None):
threading.Thread.__init__(self)
self.name = name
self.interfaces = interfaces or []
self.keep_listening = True
self.promiscuous = promiscuous
self.mac_addr = mac_addr or self._generate_MAC(6, 2)
self.inq = defaultdict(Queue) # TODO: convert to bounded ring-buffer
self.filters = [LoopbackFilter()] + [F() for F in Filters] # initialize the filters that shape incoming and outgoing traffic before it hits the program
self.program = Program(node=self) if Program else None # init the program that will be processing incoming packets
def __repr__(self):
return "[{0}]".format(self.name)
def __str__(self):
return self.__repr__()
@staticmethod
def _generate_MAC(segments=6, segment_length=2, delimiter=":", charset="0123456789abcdef"):
"""generate a non-guaranteed-unique mac address"""
addr = []
for _ in range(segments):
sub = ''.join(random.choice(charset) for _ in range(segment_length))
addr.append(sub)
return delimiter.join(addr)
def log(self, *args):
"""stdout and stderr for the node"""
print("%s %s" % (str(self).ljust(8), " ".join(str(x) for x in args)))
def stop(self):
self.keep_listening = False
if self.program:
self.program.stop()
self.join()
return True
### Runloop
def run(self):
"""runloop that gets triggered by node.start()
reads new packets off the link and feeds them to recv()
"""
if self.program:
self.program.start()
while self.keep_listening:
for interface in self.interfaces:
packet = interface.recv(self.mac_addr if not self.promiscuous else "00:00:00:00:00:00")
if packet:
self.recv(packet, interface)
time.sleep(0.01)
self.log("Stopped listening.")
### IO
def recv(self, packet, interface):
"""run incoming packet through the filters, then place it in its inq"""
# the packet is piped into the first filter, then the result of that into the second filter, etc.
for f in self.filters:
if not packet:
break
packet = f.tr(packet, interface)
if packet:
# if the packet wasn't dropped by a filter, log the recv and place it in the interface's inq
# self.log("IN ", str(interface).ljust(30), packet.decode())
self.inq[interface].put(packet)
def send(self, packet, interfaces=None):
"""write packet to given interfaces, default is broadcast to all interfaces"""
interfaces = interfaces or self.interfaces # default to all interfaces
interfaces = interfaces if hasattr(interfaces, '__iter__') else [interfaces]
for interface in interfaces:
for f in self.filters:
packet = f.tx(packet, interface) # run outgoing packet through the filters
if packet:
# if not dropped, log the transmit and pass it to the interface's send method
# self.log("OUT ", ("<"+",".join(i.name for i in interfaces)+">").ljust(30), packet.decode())
interface.send(packet)
|
h2o-py/tests/testdir_misc/pyunit_backend_errors.py | vishalbelsare/h2o-3 | 6,098 | 12704919 | <filename>h2o-py/tests/testdir_misc/pyunit_backend_errors.py
import sys
sys.path.insert(1,"../../")
import h2o
from h2o.estimators import H2OGradientBoostingEstimator
from h2o.exceptions import H2OResponseError
from h2o.schemas import H2OErrorV3, H2OModelBuilderErrorV3
from tests import pyunit_utils as pu
def test_backend_error():
try:
h2o.api("GET /3/Foo", data=dict(bar='baz'))
assert False, "API call should have failed"
except H2OResponseError as e:
backend_err = e.args[0]
assert isinstance(backend_err, H2OErrorV3)
assert backend_err.endpoint == "GET /3/Foo"
assert backend_err.payload == (None, None, None, dict(bar='baz')) # yeah! because on GET, data becomes params and turns into None, this is so confusing!
assert backend_err.http_status == 404
assert isinstance(backend_err.stacktrace, list)
assert len(backend_err.stacktrace) > 10
assert backend_err.stacktrace[0] == "water.exceptions.H2ONotFoundArgumentException: Resource /3/Foo not found"
assert backend_err.msg == "Resource /3/Foo not found"
assert backend_err.dev_msg == backend_err.msg
assert backend_err.exception_msg == backend_err.msg
assert backend_err.exception_type == "water.exceptions.H2ONotFoundArgumentException"
assert backend_err.error_url == "Resource /3/Foo"
assert backend_err.timestamp > 0
assert len(backend_err.values) == 0
def test_model_builds_error():
try:
df = h2o.import_file(path=pu.locate("smalldata/prostate/prostate.csv"))
gbm = H2OGradientBoostingEstimator()
gbm.train(y=-1, training_frame=df, offset_column="foo")
assert False, "model training should have failed"
except H2OResponseError as e:
mb_err = e.args[0]
assert isinstance(mb_err, H2OModelBuilderErrorV3)
assert mb_err.endpoint == "POST /3/ModelBuilders/gbm"
data = mb_err.payload[0]
assert data is not None
assert data['offset_column'] == 'foo'
assert mb_err.http_status == 412 # see H2OIllegalArgumentException
assert isinstance(mb_err.stacktrace, list)
assert len(mb_err.stacktrace) > 10
assert "water.exceptions.H2OModelBuilderIllegalArgumentException: Illegal argument(s) for GBM model" in mb_err.stacktrace[0]
assert "ERRR on field: _offset_column: Offset column 'foo' not found in the training frame" in mb_err.msg
assert mb_err.dev_msg == mb_err.msg
assert mb_err.exception_msg == mb_err.msg
assert mb_err.exception_type == "water.exceptions.H2OModelBuilderIllegalArgumentException"
assert mb_err.error_url == "/3/ModelBuilders/gbm"
assert mb_err.timestamp > 0
assert len(mb_err.values) == 4
assert {'algo', 'error_count', 'messages', 'parameters'} == set(mb_err.values.keys())
assert mb_err.values['algo'] == 'GBM'
assert mb_err.values['error_count'] == 4 # no idea why 4, but adding it to test as it's interesting
assert mb_err.values['parameters']['_offset_column'] == 'foo'
assert len(mb_err.values['messages']) > 1
msgs_lev_1 = [m for m in mb_err.values['messages'] if m['_log_level'] == 1]
assert len(msgs_lev_1) == 2
assert msgs_lev_1[0] == msgs_lev_1[1] # it is duplicated indeed!
assert msgs_lev_1[0]['_field_name'] == '_offset_column'
assert msgs_lev_1[0]['_message'] == "Offset column 'foo' not found in the training frame"
# specific to H2OModelBuilderErrorV3
assert mb_err.error_count == mb_err.values['error_count']
assert len(mb_err.messages) == len(mb_err.values['messages'])
assert len(mb_err.parameters) < len(mb_err.values['parameters']) # no idea what's the difference there, outside that on the left side, parameters are accessible with the full schema
pu.run_tests([
test_backend_error,
test_model_builds_error
])
|
test/game/test_tree.py | MAWUT0R/PokerRL | 247 | 12704974 | <filename>test/game/test_tree.py
# Copyright (c) 2019 <NAME>
import unittest
from unittest import TestCase
import numpy as np
from PokerRL.game._.tree.PublicTree import PublicTree
from PokerRL.game.games import StandardLeduc, DiscretizedNLLeduc
from PokerRL.game.wrappers import HistoryEnvBuilder
class TestGameTree(TestCase):
def test_building(self):
_get_leduc_tree()
_get_nl_leduc_tree()
def test_vs_env_obs(self):
for game in ["limit", "nl"]:
if game == "limit":
env, env_args = _get_new_leduc_env()
dummy_env, env_args = _get_new_leduc_env()
tree = _get_leduc_tree(env_args=env_args)
else:
env, env_args = _get_new_nl_leduc_env()
dummy_env, env_args = _get_new_nl_leduc_env()
tree = _get_nl_leduc_tree(env_args=env_args)
lut_holder = StandardLeduc.get_lut_holder()
env.reset()
dummy_env.reset()
node = tree.root
# RAISE .. stays preflop
legal = env.get_legal_actions()
a = 2
assert a in legal
o, r, d, i = env.step(a)
node = node.children[legal.index(a)]
dummy_env.load_state_dict(node.env_state)
tree_o = dummy_env.get_current_obs(is_terminal=False)
env.print_obs(o)
env.print_obs(tree_o)
assert np.array_equal(o, tree_o)
# CALL .. goes flop
legal = env.get_legal_actions()
a = 1
assert a in legal
o, r, d, i = env.step(a)
node = node.children[legal.index(1)]
card_that_came_in_env = lut_holder.get_1d_card(env.board[0])
node = node.children[card_that_came_in_env]
dummy_env.load_state_dict(node.env_state)
tree_o = dummy_env.get_current_obs(is_terminal=False)
assert np.array_equal(o, tree_o)
# RAISE .. stays flop
legal = env.get_legal_actions()
a = legal[-1]
assert a in legal
o, r, d, i = env.step(a)
node = node.children[legal.index(a)]
dummy_env.load_state_dict(node.env_state)
tree_o = dummy_env.get_current_obs(is_terminal=False)
assert np.array_equal(o, tree_o)
def _get_leduc_tree(env_args=None):
if env_args is None:
env_args = StandardLeduc.ARGS_CLS(n_seats=2,
)
env_bldr = HistoryEnvBuilder(env_cls=StandardLeduc, env_args=env_args)
_tree = PublicTree(
env_bldr=env_bldr,
stack_size=env_args.starting_stack_sizes_list,
stop_at_street=None
)
_tree.build_tree()
for p in range(env_bldr.N_SEATS):
_tree.fill_uniform_random()
_tree.compute_ev()
_tree.export_to_file()
print("Tree with stack size", _tree.stack_size, "has", _tree.n_nodes, "nodes out of which", _tree.n_nonterm,
"are non-terminal.")
print(np.mean(_tree.root.exploitability) * env_bldr.env_cls.EV_NORMALIZER)
return _tree
def _get_nl_leduc_tree(env_args=None):
if env_args is None:
env_args = DiscretizedNLLeduc.ARGS_CLS(n_seats=2,
starting_stack_sizes_list=[1000, 1000],
bet_sizes_list_as_frac_of_pot=[1.0]
)
env_bldr = HistoryEnvBuilder(env_cls=DiscretizedNLLeduc, env_args=env_args)
_tree = PublicTree(
env_bldr=env_bldr,
stack_size=env_args.starting_stack_sizes_list,
stop_at_street=None,
)
_tree.build_tree()
for p in range(env_bldr.N_SEATS):
_tree.fill_uniform_random()
_tree.compute_ev()
_tree.export_to_file()
print("Tree with stack size", _tree.stack_size, "has", _tree.n_nodes, "nodes out of which", _tree.n_nonterm,
"are non-terminal.")
print(np.mean(_tree.root.exploitability) * env_bldr.env_cls.EV_NORMALIZER)
return _tree
def _get_new_leduc_env(env_args=None):
if env_args is None:
env_args = StandardLeduc.ARGS_CLS(n_seats=2,
starting_stack_sizes_list=[150, 150],
)
return StandardLeduc(env_args=env_args, is_evaluating=True, lut_holder=StandardLeduc.get_lut_holder()), env_args
def _get_new_nl_leduc_env(env_args=None):
if env_args is None:
env_args = DiscretizedNLLeduc.ARGS_CLS(n_seats=2,
bet_sizes_list_as_frac_of_pot=[1.0, 1000.0]
)
return DiscretizedNLLeduc(env_args=env_args, is_evaluating=True,
lut_holder=DiscretizedNLLeduc.get_lut_holder()), env_args
if __name__ == '__main__':
unittest.main()
|
tractseg/experiments/pretrained_models/TractSeg_All_xtract_PeakRot4.py | inaccel/TractSeg | 148 | 12704985 | <gh_stars>100-1000
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from tractseg.data import dataset_specific_utils
from tractseg.experiments.tract_seg import Config as TractSegConfig
class Config(TractSegConfig):
EXP_NAME = os.path.basename(__file__).split(".")[0]
DATASET = "HCP_all"
DATASET_FOLDER = "HCP_preproc_all"
FEATURES_FILENAME = "32g90g270g_CSD_BX"
CLASSES = "xtract"
NR_OF_CLASSES = len(dataset_specific_utils.get_bundle_names(CLASSES)[1:])
RESOLUTION = "1.25mm"
LABELS_FILENAME = "bundle_masks_xtract_thr001"
NUM_EPOCHS = 300
EPOCH_MULTIPLIER = 0.5
DAUG_ROTATE = True
SPATIAL_TRANSFORM = "SpatialTransformPeaks"
# rotation: 2*np.pi = 360 degree (-> 0.8 ~ 45 degree, 0.4 ~ 22 degree))
DAUG_ROTATE_ANGLE = (-0.4, 0.4) |
xar/xar_util.py | backwardn/xar | 1,477 | 12704997 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import, division, print_function, unicode_literals
import collections
import contextlib
import errno
import logging
import os
import shutil
import stat
import struct
import subprocess
import sys
import tempfile
import time
import uuid
logger = logging.getLogger("xar")
if os.path.exists("/etc/centos-release"):
NOGROUP = "nobody"
else:
# Works for debian and darwin for sure
NOGROUP = "nogroup"
def make_uuid():
# ugh line length limit; we need a small uuid
return str(uuid.uuid1()).split("-")[0]
def _align_offset(offset, align=4096):
"""Aligns the offset to the given alignment"""
mask = align - 1
assert (mask & align) == 0
return (offset + mask) & (~mask)
def find_mksquashfs():
# Prefer these paths, if none exist fall back to user's $PATH
paths = ["/usr/sbin/mksquashfs", "/sbin/mksquashfs"]
for path in paths:
if os.path.isfile(path) and os.access(path, os.X_OK):
return path
return "mksquashfs"
class SquashfsOptions(object):
def __init__(self):
self.mksquashfs = find_mksquashfs()
self.compression_algorithm = "zstd"
self.zstd_level = 16
self.block_size = 256 * 1024
class XarFactory(object):
"""A class for creating XAR files.
Pretty straight forward; take an input directory, output file, and some
metadata and produce a XAR file of the contents.
"""
def __init__(self, dirname, output, header_prefix):
self.dirname = dirname
self.output = output
self.header_prefix = header_prefix
self.xar_header = {}
self.uuid = None
self.version = None
self.sort_file = None
self.squashfs_options = SquashfsOptions()
def go(self):
"Make the XAR file."
logger.info("Squashing %s to %s" % (self.dirname, self.output))
if self.uuid is None:
self.uuid = make_uuid()
if self.version is None:
self.version = time.time()
tf = tempfile.NamedTemporaryFile(delete=False)
# Create!
sqopts = self.squashfs_options
cmd = [
sqopts.mksquashfs,
self.dirname,
tf.name,
"-noappend",
"-noI",
"-noX", # is this worth it? probably
"-force-uid",
"nobody",
"-force-gid",
NOGROUP,
"-b",
str(sqopts.block_size),
"-comp",
sqopts.compression_algorithm,
]
if sqopts.compression_algorithm == "zstd":
cmd.extend(("-Xcompression-level", str(sqopts.zstd_level)))
if self.sort_file:
cmd.extend(["-sort", self.sort_file])
if sys.stdout.isatty():
subprocess.check_call(cmd)
else:
with open("/dev/null", "wb") as f:
subprocess.check_call(cmd, stdout=f)
headers = [self.header_prefix]
# Take the squash file, create a header, and write it
with open(self.output, "wb") as of:
# Make a "safe" header that is easily parsed and also not
# going to explode if accidentally executed.
headers.append('OFFSET="$OFFSET"')
headers.append('UUID="$UUID"')
headers.append('VERSION="%d"' % self.version)
for key, val in self.xar_header.items():
headers.append('%s="%s"' % (key, str(val).replace('"', " ")))
headers.append("#xar_stop")
headers.append("echo This XAR file should not be executed by sh")
headers.append("exit 1")
headers.append("# Actual squashfs file begins at $OFFSET")
text_headers = "\n".join(headers) + "\n"
# 128 is to account for expansion of $OFFSET and $UUID;
# it's well over what they might reasonably be.
header_size = _align_offset(128 + len(text_headers))
text_headers = text_headers.replace("$OFFSET", "%d" % header_size)
text_headers = text_headers.replace("$UUID", self.uuid)
text_headers += "\n" * (header_size - len(text_headers))
of.write(text_headers.encode("UTF-8"))
# Now append the squashfs file to the header.
with open(tf.name, "rb") as rf:
while True:
data = rf.read(1024 * 1024)
if not data:
break
of.write(data)
def safe_mkdir(directory):
try:
os.makedirs(directory)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
def safe_remove(filename):
try:
os.unlink(filename)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def safe_rmtree(directory):
if os.path.exists(directory):
shutil.rmtree(directory, True)
# Simplified version of Chroot from PEX
class StagingDirectory(object):
"""
Manages the staging directory.
"""
class Error(Exception):
pass
def __init__(self, staging_dir=None):
self._staging = os.path.normpath(staging_dir or tempfile.mkdtemp())
safe_mkdir(self._staging)
def __deepcopy__(self, memo):
other = StagingDirectory()
memo[id(self)] = other
other.copytree(self._staging)
return other
def _normalize(self, dst):
dst = os.path.normpath(dst)
if dst.startswith(os.sep) or dst.startswith(".."):
raise self.Error("Destination path '%s' is not a relative!" % dst)
return dst
def _ensure_parent(self, dst):
safe_mkdir(os.path.dirname(self.absolute(dst)))
def _ensure_not_dst(self, dst):
if self.exists(dst):
raise self.Error("Destination path '%s' already exists!" % dst)
def path(self):
"""Returns the root directory of the staging directory."""
return self._staging
def absolute(self, dst=None):
"""Returns absolute path for a path relative to staging directory."""
if dst is None:
return self._staging
dst = self._normalize(dst)
return os.path.normpath(os.path.join(self._staging, dst))
def delete(self):
"""Delete the staging directory."""
safe_rmtree(self._staging)
def copy(self, src, dst):
"""Copy src into dst under the staging directory."""
dst = self._normalize(dst)
self._ensure_parent(dst)
self._ensure_not_dst(dst)
shutil.copy2(src, self.absolute(dst))
def write(self, data, dst, mode, permissions):
"""Write data into dst."""
dst = self._normalize(dst)
self._ensure_parent(dst)
self._ensure_not_dst(dst)
with open(self.absolute(dst), mode) as f:
f.write(data)
os.chmod(self.absolute(dst), permissions)
@contextlib.contextmanager
def postprocess(self, src):
fpath = self.absolute(src)
st = os.stat(fpath)
old_times = (st.st_atime, st.st_mtime)
with tempfile.NamedTemporaryFile(
prefix=fpath + ".", mode="w", delete=False
) as outf:
with open(fpath) as inf:
yield inf, outf
outf.flush()
os.utime(outf.name, old_times)
shutil.copystat(fpath, outf.name)
os.rename(outf.name, fpath)
def _resolve_dst_dir(self, dst):
if dst is None:
# Replace the current staging directory
if os.listdir(self._staging) != []:
raise self.Error("Staging directory is not empty!")
# shutil requires that the destination directory does not exist
safe_rmtree(self._staging)
dst = "."
dst = self._normalize(dst)
self._ensure_not_dst(dst)
return dst
def copytree(self, src, dst=None):
"""Copy src dir into dst under the staging directory."""
dst = self._resolve_dst_dir(dst)
shutil.copytree(src, self.absolute(dst))
def symlink(self, link, dst):
"""Write symbolic link to dst under the staging directory."""
dst = self._normalize(dst)
self._ensure_parent(dst)
self._ensure_not_dst(dst)
os.symlink(link, self.absolute(dst))
def move(self, src, dst):
"""Move src into dst under the staging directory."""
dst = self._normalize(dst)
self._ensure_parent(dst)
self._ensure_not_dst(dst)
shutil.move(src, self.absolute(dst))
def exists(self, dst):
"""Checks if dst exists under the staging directory."""
dst = self._normalize(dst)
return os.path.exists(self.absolute(dst))
def extract(self, zf, dst=None):
"""Extracts the zipfile into dst under the staging directory."""
dst = self._resolve_dst_dir(dst)
abs_dst = os.path.join(self._staging, dst)
timestamps = {}
for zi in zf.infolist():
filename = os.path.join(dst, zi.filename)
destination = self.absolute(filename)
mode = zi.external_attr >> 16
if stat.S_ISLNK(mode):
target = zf.read(zi).decode("utf-8")
self.symlink(target, filename)
else:
self._ensure_parent(filename)
zf.extract(zi, path=abs_dst)
os.chmod(destination, stat.S_IMODE(mode))
# Use the embedded timestamp for from the pyc file for the
# pyc and py file; otherwise, use the timezone-less
# timestamp from the zipfile (sigh).
if filename.endswith(".pyc"):
new_time = extract_pyc_timestamp(destination)
timestamps[destination] = new_time # pyc file
timestamps[destination[:-1]] = new_time # py file too
else:
new_time = tuple((list(zi.date_time) + [0, 0, -1]))
timestamps[destination] = time.mktime(new_time)
# Set our timestamps.
for path, timestamp in timestamps.items():
try:
os.utime(path, (timestamp, timestamp))
except OSError as e:
# Sometimes we had a pyc file but no py file; the utime
# would fail.
if not path.endswith(".py"):
raise e
class TemporaryFile(object):
"""Wrapper around a temporary file that supports deepcopy()."""
def __init__(self):
with tempfile.NamedTemporaryFile(mode="w+", delete=False) as f:
self._filename = f.name
def open(self, mode=None):
return open(self._filename, mode)
def name(self):
return self._filename
def delete(self):
safe_remove(self._filename)
def __deepcopy__(self, memo):
other = TemporaryFile()
memo[id(self)] = other
with self.open("rb") as src, other.open("wb") as dst:
shutil.copyfileobj(src, dst)
return other
# Simple class to represent a partition destination. Each destination
# is a path and a uuid from which the contents come (ie, the uuid of
# the spar file that contains the file that is moved into the
# partition; used for symlink construction).
PartitionDestination = collections.namedtuple("PartitionDestination", "staging uuid")
def partition_files(staging, extension_destinations):
"""Partition source_dir into multiple output directories.
A partition is defined by extension_destinations which maps suffixes (such
as ".debuginfo") to a PartitionDestination instance.
dest_dir contains all files that aren't in a partition, and symlinks for
ones that are. symlinks are relative and of the form
"../../../uuid/path/to/file" so that the final symlinks are correct
relative to /mnt/xar/....
"""
source_dir = staging.path()
source_dir = source_dir.rstrip("/")
for dirpath, _dirnames, filenames in os.walk(staging.path()):
# path relative to source_dir; used for creating the right
# file inside the staging dir
relative_dirname = dirpath[len(source_dir) + 1 :]
# Special case; if a file is in the root of source_dir, then
# relative_dirname is empty, but that has the same number of
# '/' as just 'bin', so we need to special case it the empty
# value.
if not relative_dirname:
relative_depth = 1
else:
relative_depth = 2 + relative_dirname.count("/")
for filename in filenames:
# Does this extension map to a separate output?
_, extension = os.path.splitext(filename)
dest_base = extension_destinations.get(extension, None)
# This path stays in the source staging directory
if dest_base is None:
continue
# This file is destined for another tree, make a
# relative symlink in source pointing to the
# sub-xar destination.
relative_path = os.path.join(relative_dirname, filename)
source_path = staging.absolute(relative_path)
dest_base.staging.move(source_path, relative_path)
dependency_mountpoint = dest_base.uuid
staging_symlink = os.path.join(
"../" * relative_depth, dependency_mountpoint, relative_path
)
logging.info("%s %s" % (staging_symlink, source_path))
staging.symlink(staging_symlink, relative_path)
def write_sort_file(staging_dir, extension_priorities, sort_file):
"""
Write a sort file for mksquashfs to colocate some files at the beginning.
Files are assigned priority by extension, with files earlier in the list
appearing first. The result is written to the file object sort_file.
mksquashfs takes the sort file with the option '-sort sort_filename'.
"""
for dirpath, _dirname, filenames in os.walk(staging_dir):
for filename in filenames:
fn = os.path.join(dirpath, filename)
for idx, suffix in enumerate(extension_priorities):
if fn.endswith(suffix):
# Default priority is 0; make ours all
# negative so we can not list files with
# spaces in the name, making them default
# to 0
priority = idx - len(extension_priorities) - 1
break
assert fn.startswith(staging_dir + "/")
fn = fn[len(staging_dir) + 1 :]
# Older versions of mksquashfs don't like spaces
# in filenames; let them have the default priority
# of 0.
if " " not in fn:
sort_file.write("%s %d\n" % (fn, priority))
def extract_pyc_timestamp(path):
"Extract the embedded timestamp from a pyc file"
# A PYC file has a four byte header then four byte timestamp. The
# timestamp must match the timestamp on the py file, otherwise the
# interpreter will attempt to re-compile the py file. We extract
# the timestamp to adulterate the py/pyc files before squashing
# them.
with open(path, "rb") as fh:
prefix = fh.read(8)
return struct.unpack(b"<I", prefix[4:])[0]
def file_in_zip(zf, filename):
"""Returns True if :filename: is present in the zipfile :zf:."""
try:
zf.getinfo(filename)
return True
except KeyError:
return False
def yield_prefixes_reverse(path):
"""
Yields all prefixes of :path: in reverse.
list(yield_prefixes_reverse("/a/b")) == ["/a/b", "/a", "/"]
list(yield_prefixes_reverse("a/b")) == ["a/b", "a", ""]
"""
old = None
while path != old:
yield path
old = path
path, _ = os.path.split(path)
|
preprocessors/abstract_preprocessor.py | slowy07/tensor2robot | 456 | 12705002 | # coding=utf-8
# Copyright 2021 The Tensor2Robot Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as python3
"""The abstract preprocessor, handling boilerplate validation."""
import abc
from typing import Any, Callable, Optional, Tuple
import six
from tensor2robot.utils import tensorspec_utils
import tensorflow.compat.v1 as tf
ModeKeys = tf.estimator.ModeKeys
class AbstractPreprocessor(six.with_metaclass(abc.ABCMeta, object)):
"""A per example preprocessing function executed prior to the model_fn.
Note, our preprocessor is invoked for a batch of features and labels.
If the _preprocess_fn can only operate on batch_size one please use
tf.map_fn as described in _preprocessor_fn.
"""
def __init__(
self,
model_feature_specification_fn = None,
model_label_specification_fn = None,
is_model_device_tpu = False):
"""Initialize an instance.
The provided specifications are used both for the in and out specification.
The _preprocess_fn will not alter the provided tensors.
Args:
model_feature_specification_fn: (Optional) A function which takes mode as
an argument and returns a valid spec structure for the features,
preferablely a (hierarchical) namedtuple of TensorSpecs and
OptionalTensorSpecs.
model_label_specification_fn: (Optional) A function which takes mode as an
argument and returns a valid spec structure for the labels, preferably a
(hierarchical) namedtupel of TensorSpecs and OptionalTensorSpecs.
is_model_device_tpu: True if the model is operating on TPU and otherwise
False. This information is useful to do type conversions and strip
unnecessary information from preprocessing since no summaries are
generated on TPUs.
"""
for spec_generator in [
model_feature_specification_fn, model_label_specification_fn
]:
for estimator_mode in [ModeKeys.TRAIN, ModeKeys.PREDICT, ModeKeys.EVAL]:
if spec_generator:
tensorspec_utils.assert_valid_spec_structure(
spec_generator(estimator_mode))
self._model_feature_specification_fn = model_feature_specification_fn
self._model_label_specification_fn = model_label_specification_fn
self._is_model_device_tpu = is_model_device_tpu
@property
def model_feature_specification_fn(self):
return self._model_feature_specification_fn
@model_feature_specification_fn.setter
def model_feature_specification_fn(self, model_feature_specification_fn):
self._model_feature_specification_fn = model_feature_specification_fn
@property
def model_label_specification_fn(self):
return self._model_label_specification_fn
@model_label_specification_fn.setter
def model_label_specification_fn(self, model_label_specification_fn):
self._model_label_specification_fn = model_label_specification_fn
@abc.abstractmethod
def get_in_feature_specification(
self, mode):
"""The specification for the input features for the preprocess_fn.
Arguments:
mode: mode key for this feature specification
Returns:
A TensorSpecStruct describing the required and optional tensors.
"""
@abc.abstractmethod
def get_in_label_specification(
self, mode):
"""The specification for the input labels for the preprocess_fn.
Arguments:
mode: mode key for this feature specification
Returns:
A TensorSpecStruct describing the required and optional tensors.
"""
@abc.abstractmethod
def get_out_feature_specification(
self, mode):
"""The specification for the output features after executing preprocess_fn.
Arguments:
mode: mode key for this feature specification
Returns:
A TensorSpecStruct describing the required and optional tensors.
"""
@abc.abstractmethod
def get_out_label_specification(
self, mode):
"""The specification for the output labels after executing preprocess_fn.
Arguments:
mode: mode key for this feature specification
Returns:
A TensorSpecStruct describing the required and optional tensors.
"""
@abc.abstractmethod
def _preprocess_fn(
self, features,
labels, mode
):
"""The preprocessing function which will be executed prior to the model_fn.
Note, _preprocess_fn is invoked for a batch of features and labels.
If the _preprocess_fn can only operate on batch_size one please use
the following pattern.
def _fn(features_single_batch, labels_single_batch):
# The actual implementation
return = tf.map_fn(
_fn, # The single batch implementation
(features, labels), # Our nested structure, the first dimension unpacked
dtype=(self.get_out_feature_specification(),
self.get_out_labels_specification()),
back_prop=False,
parallel_iterations=self._parallel_iterations)
Args:
features: The input features extracted from a single example in our
in_feature_specification format.
labels: (Optional None) The input labels extracted from a single example
in our in_label_specification format.
mode: (ModeKeys) Specifies if this is training, evaluation or prediction.
Returns:
features_preprocessed: The preprocessed features, potentially adding
additional tensors derived from the input features.
labels_preprocessed: (Optional) The preprocessed labels, potentially
adding additional tensors derived from the input features and labels.
"""
def preprocess(
self, features,
labels, mode
):
"""The function which preprocesses the features and labels per example.
Note, this function performs the boilerplate packing and flattening and
verification of the features and labels according to our spec. The actual
preprocessing is performed by _preprocess_fn.
Args:
features: The features of a single example.
labels: (Optional None) The labels of a single example.
mode: (ModeKeys) Specifies if this is training, evaluation or prediction.
Returns:
features_preprocessed: The preprocessed and flattened features
verified to fulfill our output specs.
labels_preprocessed: (Optional None) The preprocessed and flattened labels
verified to fulfill our output specs.
"""
# First, we verify that the input features and labels fulfill our spec.
# We further pack the flattened features and labels to our (hierarchical)
# specification.:
features = tensorspec_utils.validate_and_pack(
expected_spec=self.get_in_feature_specification(mode),
actual_tensors_or_spec=features,
ignore_batch=True)
if labels is not None:
labels = tensorspec_utils.validate_and_pack(
expected_spec=self.get_in_label_specification(mode),
actual_tensors_or_spec=labels,
ignore_batch=True)
features_preprocessed, labels_preprocessed = self._preprocess_fn(
features=features, labels=labels, mode=mode)
features_preprocessed = tensorspec_utils.validate_and_flatten(
expected_spec=self.get_out_feature_specification(mode),
actual_tensors_or_spec=features_preprocessed,
ignore_batch=True)
if labels_preprocessed:
labels_preprocessed = tensorspec_utils.validate_and_flatten(
expected_spec=self.get_out_label_specification(mode),
actual_tensors_or_spec=labels_preprocessed,
ignore_batch=True)
return features_preprocessed, labels_preprocessed
|
{{cookiecutter.project_slug}}/pages/migrations/0013_videopage_videopagecarouselitem.py | lendlsmith/wagtail-cookiecutter-foundation | 182 | 12705003 | <filename>{{cookiecutter.project_slug}}/pages/migrations/0013_videopage_videopagecarouselitem.py
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-08-08 13:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0019_delete_filter'),
('wagtaildocs', '0007_merge'),
('wagtailcore', '0039_collectionviewrestriction'),
('pages', '0012_auto_20170606_1319'),
]
operations = [
migrations.CreateModel(
name='VideoPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro', wagtail.core.fields.RichTextField(blank=True)),
('template_string', models.CharField(choices=[(b'pages/video_gallery_page.html', b'Videos Page')], default=b'pages/video_gallery_page.html', max_length=255)),
('feed_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='VideoPageCarouselItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('link_external', models.URLField(blank=True, verbose_name=b'External link')),
('embed_url', models.URLField(blank=True, verbose_name=b'Embed URL')),
('caption', wagtail.core.fields.RichTextField(blank=True)),
('image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('link_document', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtaildocs.Document')),
('link_page', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtailcore.Page')),
('page', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='carousel_items', to='pages.VideoPage')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
]
|
tests/SampleApps/python/flask-setup-py-requirement-txt/app.py | samruddhikhandale/Oryx | 403 | 12705004 | from flask import Flask
from datetime import datetime
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World! " + str(datetime.now())
|
exps/stage3_root2/test.py | zju3dv/SMAP | 209 | 12705011 | <reponame>zju3dv/SMAP<filename>exps/stage3_root2/test.py
"""
@author: <NAME>
@contact: <EMAIL>
"""
import os
import argparse
import json
import cv2
from tqdm import tqdm
from torch.utils.data import DataLoader
from cvpack.utils.logger import get_logger
from model.smap import SMAP
from model.refinenet import RefineNet
from lib.utils.dataloader import get_test_loader
from lib.utils.comm import is_main_process
from exps.stage3_root2.test_util import *
from dataset.custom_dataset import CustomDataset
from config import cfg
import dapalib
def generate_3d_point_pairs(model, refine_model, data_loader, cfg, logger, device,
output_dir=''):
os.makedirs(output_dir, exist_ok=True)
model.eval()
if refine_model is not None:
refine_model.eval()
result = dict()
result['model_pattern'] = cfg.DATASET.NAME
result['3d_pairs'] = []
# 3d_pairs has items like{'pred_2d':[[x,y,detZ,score]...], 'gt_2d':[[x,y,Z,visual_type]...],
# 'pred_3d':[[X,Y,Z,score]...], 'gt_3d':[[X,Y,X]...],
# 'root_d': (abs depth of root (float value) pred by network),
# 'image_path': relative image path}
kpt_num = cfg.DATASET.KEYPOINT.NUM
data = tqdm(data_loader) if is_main_process() else data_loader
for idx, batch in enumerate(data):
if cfg.TEST_MODE == 'run_inference':
imgs, img_path, scales = batch
meta_data = None
else:
imgs, meta_data, img_path, scales = batch
imgs = imgs.to(device)
with torch.no_grad():
outputs_2d, outputs_3d, outputs_rd = model(imgs)
outputs_3d = outputs_3d.cpu()
outputs_rd = outputs_rd.cpu()
if cfg.DO_FLIP:
imgs_flip = torch.flip(imgs, [-1])
outputs_2d_flip, outputs_3d_flip, outputs_rd_flip = model(imgs_flip)
outputs_2d_flip = torch.flip(outputs_2d_flip, dims=[-1])
# outputs_3d_flip = torch.flip(outputs_3d_flip, dims=[-1])
# outputs_rd_flip = torch.flip(outputs_rd_flip, dims=[-1])
keypoint_pair = cfg.DATASET.KEYPOINT.FLIP_ORDER
paf_pair = cfg.DATASET.PAF.FLIP_CHANNEL
paf_abs_pair = [x+kpt_num for x in paf_pair]
pair = keypoint_pair + paf_abs_pair
for i in range(len(pair)):
if i >= kpt_num and (i - kpt_num) % 2 == 0:
outputs_2d[:, i] += outputs_2d_flip[:, pair[i]]*-1
else:
outputs_2d[:, i] += outputs_2d_flip[:, pair[i]]
outputs_2d[:, kpt_num:] *= 0.5
for i in range(len(imgs)):
if meta_data is not None:
# remove person who was blocked
new_gt_bodys = []
annotation = meta_data[i].numpy()
scale = scales[i]
for j in range(len(annotation)):
if annotation[j, cfg.DATASET.ROOT_IDX, 3] > 1:
new_gt_bodys.append(annotation[j])
gt_bodys = np.asarray(new_gt_bodys)
if len(gt_bodys) == 0:
continue
# groundtruth:[person..[keypoints..[x, y, Z, score(0:None, 1:invisible, 2:visible), X, Y, Z,
# f_x, f_y, cx, cy]]]
if len(gt_bodys[0][0]) < 11:
scale['f_x'] = gt_bodys[0, 0, 7]
scale['f_y'] = gt_bodys[0, 0, 7]
scale['cx'] = scale['img_width']/2
scale['cy'] = scale['img_height']/2
else:
scale['f_x'] = gt_bodys[0, 0, 7]
scale['f_y'] = gt_bodys[0, 0, 8]
scale['cx'] = gt_bodys[0, 0, 9]
scale['cy'] = gt_bodys[0, 0, 10]
else:
gt_bodys = None
# use default values
scale = {k: scales[k][i].numpy() for k in scales}
scale['f_x'] = scale['img_width']
scale['f_y'] = scale['img_width']
scale['cx'] = scale['img_width']/2
scale['cy'] = scale['img_height']/2
hmsIn = outputs_2d[i]
# if the first pair is [1, 0], uncomment the code below
# hmsIn[cfg.DATASET.KEYPOINT.NUM:cfg.DATASET.KEYPOINT.NUM+2] *= -1
# outputs_3d[i, 0] *= -1
hmsIn[:cfg.DATASET.KEYPOINT.NUM] /= 255
hmsIn[cfg.DATASET.KEYPOINT.NUM:] /= 127
rDepth = outputs_rd[i][0]
# no batch implementation yet
pred_bodys_2d = dapalib.connect(hmsIn, rDepth, cfg.DATASET.ROOT_IDX, distFlag=True)
if len(pred_bodys_2d) > 0:
pred_bodys_2d[:, :, :2] *= cfg.dataset.STRIDE # resize poses to the input-net shape
pred_bodys_2d = pred_bodys_2d.numpy()
pafs_3d = outputs_3d[i].numpy().transpose(1, 2, 0)
root_d = outputs_rd[i][0].numpy()
paf_3d_upsamp = cv2.resize(
pafs_3d, (cfg.INPUT_SHAPE[1], cfg.INPUT_SHAPE[0]), interpolation=cv2.INTER_NEAREST)
root_d_upsamp = cv2.resize(
root_d, (cfg.INPUT_SHAPE[1], cfg.INPUT_SHAPE[0]), interpolation=cv2.INTER_NEAREST)
# generate 3d prediction bodys
pred_bodys_2d = register_pred(pred_bodys_2d, gt_bodys)
if len(pred_bodys_2d) == 0:
continue
pred_rdepths = generate_relZ(pred_bodys_2d, paf_3d_upsamp, root_d_upsamp, scale)
pred_bodys_3d = gen_3d_pose(pred_bodys_2d, pred_rdepths, scale)
if refine_model is not None:
new_pred_bodys_3d = lift_and_refine_3d_pose(pred_bodys_2d, pred_bodys_3d, refine_model,
device=device, root_n=cfg.DATASET.ROOT_IDX)
else:
new_pred_bodys_3d = pred_bodys_3d
if cfg.TEST_MODE == "generate_train":
save_result_for_train_refine(pred_bodys_2d, new_pred_bodys_3d, gt_bodys, pred_rdepths, result)
else:
save_result(pred_bodys_2d, new_pred_bodys_3d, gt_bodys, pred_rdepths, img_path[i], result)
dir_name = os.path.split(os.path.split(os.path.realpath(__file__))[0])[1]
pair_file_name = os.path.join(output_dir, '{}_{}_{}_{}.json'.format(dir_name, cfg.TEST_MODE,
cfg.DATA_MODE, cfg.JSON_SUFFIX_NAME))
with open(pair_file_name, 'w') as f:
json.dump(result, f)
logger.info("Pairs writed to {}".format(pair_file_name))
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--test_mode", "-t", type=str, default="run_inference",
choices=['generate_train', 'generate_result', 'run_inference'],
help='Type of test. One of "generate_train": generate refineNet datasets, '
'"generate_result": save inference result and groundtruth, '
'"run_inference": save inference result for input images.')
parser.add_argument("--data_mode", "-d", type=str, default="test",
choices=['test', 'generation'],
help='Only used for "generate_train" test_mode, "generation" for refineNet train dataset,'
'"test" for refineNet test dataset.')
parser.add_argument("--SMAP_path", "-p", type=str, default='log/SMAP.pth',
help='Path to SMAP model')
parser.add_argument("--RefineNet_path", "-rp", type=str, default='',
help='Path to RefineNet model, empty means without RefineNet')
parser.add_argument("--batch_size", type=int, default=1,
help='Batch_size of test')
parser.add_argument("--do_flip", type=float, default=0,
help='Set to 1 if do flip when test')
parser.add_argument("--dataset_path", type=str, default="",
help='Image dir path of "run_inference" test mode')
parser.add_argument("--json_name", type=str, default="",
help='Add a suffix to the result json.')
args = parser.parse_args()
cfg.TEST_MODE = args.test_mode
cfg.DATA_MODE = args.data_mode
cfg.REFINE = len(args.RefineNet_path) > 0
cfg.DO_FLIP = args.do_flip
cfg.JSON_SUFFIX_NAME = args.json_name
cfg.TEST.IMG_PER_GPU = args.batch_size
os.makedirs(cfg.TEST_DIR, exist_ok=True)
logger = get_logger(
cfg.DATASET.NAME, cfg.TEST_DIR, 0, 'test_log_{}.txt'.format(args.test_mode))
model = SMAP(cfg, run_efficient=cfg.RUN_EFFICIENT)
device = torch.device(cfg.MODEL.DEVICE)
model.to(device)
if args.test_mode == "run_inference":
test_dataset = CustomDataset(cfg, args.dataset_path)
data_loader = DataLoader(test_dataset, batch_size=args.batch_size, shuffle=False)
else:
data_loader = get_test_loader(cfg, num_gpu=1, local_rank=0, stage=args.data_mode)
if cfg.REFINE:
refine_model = RefineNet()
refine_model.to(device)
refine_model_file = args.RefineNet_path
else:
refine_model = None
refine_model_file = ""
model_file = args.SMAP_path
if os.path.exists(model_file):
state_dict = torch.load(model_file, map_location=lambda storage, loc: storage)
state_dict = state_dict['model']
model.load_state_dict(state_dict)
if os.path.exists(refine_model_file):
refine_model.load_state_dict(torch.load(refine_model_file))
elif refine_model is not None:
logger.info("No such RefineNet checkpoint of {}".format(args.RefineNet_path))
return
generate_3d_point_pairs(model, refine_model, data_loader, cfg, logger, device,
output_dir=os.path.join(cfg.OUTPUT_DIR, "result"))
else:
logger.info("No such checkpoint of SMAP {}".format(args.SMAP_path))
if __name__ == '__main__':
main()
|
tests/debug/__init__.py | int19h/ptvsd | 349 | 12705019 | <gh_stars>100-1000
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, division, print_function, unicode_literals
# Expose Session directly.
def Session(*args, **kwargs):
from tests.debug import session
return session.Session(*args, **kwargs)
|
matrix-python-project/material_sync/sync_cronjob.py | hokaso/hocassian-media-matrix | 141 | 12705140 | import schedule, time, sys, os, traceback
sys.path.append(os.getcwd())
from material_sync.sync_to_baidu_cloud import Sync2Cloud
p = Sync2Cloud().main
schedule.every(1).days.at("03:00").do(p)
# schedule.every(1).minutes.do(p)
print("脚本已启动")
while True:
try:
schedule.run_pending()
time.sleep(1)
except Exception as e:
traceback.print_exc()
print(e)
|
corehq/apps/users/migrations/0001_add_location_permission.py | dimagilg/commcare-hq | 471 | 12705155 | <reponame>dimagilg/commcare-hq
# Generated by Django 1.9.12 on 2017-03-09 02:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = []
|
jarviscli/plugins/blackjack.py | WWFelina/Jarvis | 2,605 | 12705169 | import random
from plugin import plugin
from colorama import Fore
def delay(): # method to pause after a series of actions have been completed.
n = input("Press enter to continue")
def wiped_slate(player): # resets all hands and bets
player['hands'] = []
player['suits'] = []
player['bets'] = []
return player
def pprinthand(hand, suit, type='visible'): # returns hand as a string which may or may not be hidden.
temphand = hand[:]
for i in range(len(temphand)):
if temphand[i] == 1 or temphand[i] == 11:
temphand[i] = 'A' # 1 or 11 is value of ace.
temphand[i] = str(temphand[i]) + " of " + suit[i]
if type == 'visible':
return str(temphand)
elif type == 'partially-visible':
return '[' + str(temphand[0]) + ',hidden]'
def pprinthandlist(handlist, suitlist): # returns handlist as a string
newhandlist = []
for i in range(len(handlist)):
newhandlist.append(pprinthand(handlist[i], suitlist[i]))
return str(newhandlist)
def blackjacksum(orig_hand): # computes the sum by assuming appropriate value of Ace.
hand = orig_hand[:]
for i in range(len(hand)):
if str(hand[i]) in 'JQK': # converts face card to their value,that is,10.
hand[i] = 10
if sum(hand) <= 11: # of Ace card(either 1 or 11) acc. to the sum.
for i in range(len(hand)):
if hand[i] == 1:
hand[i] = 11
orig_hand[i] = 11
break
elif sum(hand) > 21:
for i in range(len(hand)):
if hand[i] == 11:
hand[i] = 1
orig_hand[i] = 1
break
return sum(hand), orig_hand
def move(hand, suit, cards, suits,
bet): # Here, hand is a nested list inside a list. It is a list of all hands of a player.
# Player can have multiple hands if he/she chooses to split.
sum_, hand[0] = blackjacksum(hand[0])
print("Your hand is", pprinthand(hand[0], suit[0]))
print("Your sum is", sum_)
print('---------------------------')
# checks for bust or blackjack.
if sum_ > 21:
print("You got busted!")
return hand, suit, bet
elif sum_ == 21 and len(hand) == 2:
print("Blackjack!")
return hand, suit, bet
while True:
choice = input("Press H to Hit, S to Stand, D to Double-Down, P to sPlit\n")
if choice in ['H', 'h']:
newcard = random.choice(cards)
newsuit = random.choice(suits)
print("Newcard is", str(newcard) + " of " + newsuit)
hand[0].append(newcard)
suit[0].append(newsuit)
print("Updated hand is", pprinthand(hand[0], suit[0]))
sum_, hand[0] = blackjacksum(hand[0])
hand, suit, bet = move(hand, suit, cards, suits, bet)
return hand, suit, bet
elif choice in ['S', 's']:
return hand, suit, bet
elif choice in ['D', 'd']:
newcard = random.choice(cards)
print("Newcard is", newcard)
newsuit = random.choice(suits)
hand[0].append(newcard)
suit[0].append(newsuit)
print("Updated hand is", pprinthand(hand[0], suit[0]))
sum_, hand[0] = blackjacksum(hand[0])
print("Your sum is", sum_)
if sum_ > 21:
print("You got busted!")
bet[0] = bet[0] * 2
print("Your new bet is", bet[0])
return hand, suit, bet
elif choice in ['P', 'p']:
if hand[0][0] == hand[0][1]:
if not hand[0][0] == 1:
splitHand1 = [[0, 0]]
splitHand2 = [[0, 0]]
splitSuit1 = [[0, 0]]
splitSuit2 = [[0, 0]]
newcard1 = random.choice(cards)
newsuit1 = random.choice(suits)
print("Newcard for first split is", str(newcard1) + " of " + newsuit1)
newcard2 = random.choice(cards)
newsuit2 = random.choice(suits)
print("Newcard for second split is", str(newcard2) + " of " + newsuit2)
splitHand1[0][0] = hand[0][0]
splitHand2[0][0] = hand[0][1]
splitHand1[0][1] = newcard1
splitHand2[0][1] = newcard2
splitSuit1[0][0] = suit[0][0]
splitSuit2[0][0] = suit[0][1]
splitSuit1[0][1] = newsuit1
splitSuit2[0][1] = newsuit2
print("Split hands are", pprinthand(splitHand1[0], splitSuit1[0]), ", ",
pprinthand(splitHand2[0], splitSuit2[0]))
sum1, splitHand1[0] = blackjacksum(splitHand1[0])
sum2, splitHand2[0] = blackjacksum(splitHand2[0])
print("Your sum for split 1 is", sum1)
print("Your sum for split 2 is", sum2)
bet1 = bet[:]
bet2 = bet[:]
splitHand1, splitSuit1, bet1 = move(splitHand1, splitSuit1, cards, suits, bet1)
splitHand2, splitSuit2, bet2 = move(splitHand2, splitSuit2, cards, suits, bet2)
splitHand1.extend(splitHand2) # converting both hands to a single list
splitSuit1.extend(splitSuit2)
bet1.extend(bet2) # converting both bets to a single list
return splitHand1, splitSuit1, bet1
else:
print("Sorry,you can't split aces")
hand, suit, bet = move(hand, suit, cards, suits, bet)
return hand, suit, bet
else:
print("Sorry, you can only split hands with identical cards")
hand, suit, bet = move(hand, suit, cards, suits, bet)
return hand, suit, bet
else:
print("Please try again with a valid choice.")
@plugin('blackjack')
def blackjack(jarvis, s):
jarvis.say("Welcome to the casino! Let's play blackjack!", Fore.GREEN)
player = {"hands": [], "suits": [], "bets": [], 'profit': []}
cards = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 'J', 'Q', 'K']
suits = ['spades', 'hearts', 'diamonds', 'clubs']
choice = 'y'
delay()
# Instructions
jarvis.say('How to play:', Fore.GREEN)
jarvis.say('-->The goal of blackjack is to beat the dealer\'s hand without going over 21.', Fore.CYAN)
jarvis.say('-->Face cards are worth 10. Aces are worth 1 or 11, whichever makes a better hand.', Fore.CYAN)
jarvis.say('-->Each player starts with two cards, one of the dealer\'s cards is hidden until the end.', Fore.CYAN)
jarvis.say('-->To \'Hit\' is to ask for another card. To \'Stand\' is to hold your total and end your turn.',
Fore.CYAN)
jarvis.say('-->If you go over 21 you bust, and the dealer wins regardless of the dealer\'s hand.', Fore.CYAN)
jarvis.say('-->If you are dealt 21 from the start (Ace & 10), you got a blackjack.', Fore.CYAN)
jarvis.say('-->Blackjack means you win 1.5 the amount of your bet.', Fore.CYAN)
jarvis.say('-->Dealer will hit until his/her cards total 17 or higher.', Fore.CYAN)
jarvis.say('-->Doubling is like a hit, only the bet is doubled and you only get one more card.', Fore.CYAN)
jarvis.say('-->Split can be done when you have two of the same card - the pair is split into two hands.', Fore.CYAN)
jarvis.say('-->Splitting also doubles the bet, because each new hand is worth the original bet.', Fore.CYAN)
jarvis.say('-->You cannot split two aces.', Fore.CYAN)
jarvis.say('-->You can double on a hand resulting from a split, tripling or quadrupling you bet.', Fore.CYAN)
while choice in "Yy":
jarvis.say('Shuffling the cards....', Fore.BLUE)
jarvis.say("Let's start the game!", Fore.BLUE)
# Bets
jarvis.say("How much are you betting?", Fore.BLUE)
bet = jarvis.input_number()
player['bets'].append(bet)
delay()
jarvis.say('---------------------------')
# Cards
jarvis.say("Dealing the cards............", Fore.BLUE)
jarvis.say("Your cards....", Fore.BLUE)
hand = [random.choice(cards), random.choice(cards)]
suit = [random.choice(suits), random.choice(suits)]
player["hands"].append(hand)
player["suits"].append(suit)
jarvis.say(pprinthand(hand, suit))
delay()
jarvis.say('---------------------------')
# Dealer's cards
dealerhand = [random.choice(cards), random.choice(cards)]
dealersuit = [random.choice(suits), random.choice(suits)]
jarvis.say("Dealer hand: " + pprinthand(dealerhand, dealersuit, type='partially-visible'), Fore.MAGENTA)
delay()
jarvis.say('---------------------------')
# Players' moves
jarvis.say("It's your turn, make your choice!", Fore.BLUE)
player['hands'], player['suits'], player['bets'] = move(player['hands'], player['suits'], cards, suits,
player['bets'])
jarvis.say("Your hands and respective bets for this round are:", Fore.BLUE)
jarvis.say(pprinthandlist(player['hands'], player['suits']) + " " + str(player['bets']), Fore.BLUE)
delay()
jarvis.say('---------------------------')
# Dealer's moves
jarvis.say("Dealer hand: " + pprinthand(dealerhand, dealersuit), Fore.MAGENTA)
dealersum, dealerhand = blackjacksum(dealerhand)
jarvis.say("Dealer's sum is " + str(dealersum), Fore.MAGENTA)
while dealersum < 17 or (
dealersum == 17 and 11 in dealerhand): # condition which determines if dealer hits or not.
jarvis.say("Dealer draws another card", Fore.MAGENTA)
dealerhand.append(random.choice(cards))
dealersuit.append(random.choice(suits))
jarvis.say("Newcard is " + str(dealerhand[-1]) + " of " + str(dealersuit[-1]), Fore.MAGENTA)
dealersum, dealerhand = blackjacksum(dealerhand)
jarvis.say("Dealer's sum is " + str(dealersum), Fore.MAGENTA)
jarvis.say("Dealer's hand is " + pprinthand(dealerhand, dealersuit), Fore.MAGENTA)
delay()
jarvis.say('---------------------------')
# Profit Calculation
jarvis.say("Let's see your results ", Fore.BLUE)
for j in range(len(player['hands'])):
hand = player['hands'][j]
suit = player['suits'][j]
bet = player['bets'][j]
sum_, hand = blackjacksum(hand)
dealersum, dealerhand = blackjacksum(dealerhand)
jarvis.say("For the hand- " + pprinthand(hand, suit) + ' sum is-' + str(sum_), Fore.BLUE)
if len(hand) == 2 and sum_ == 21:
jarvis.say("Blackjack!", Fore.BLUE)
profit = bet * 1.5
player['profit'].append(bet * 1.5)
elif sum_ > 21:
jarvis.say("Busted", Fore.BLUE)
profit = bet * -1
player['profit'].append(bet * -1)
elif dealersum > 21:
jarvis.say("Dealer Busted", Fore.BLUE)
profit = bet * 1
player['profit'].append(bet * 1)
elif dealersum > sum_:
jarvis.say("You lost", Fore.BLUE)
profit = bet * -1
player['profit'].append(bet * -1)
elif sum_ > dealersum:
jarvis.say("You win", Fore.BLUE)
profit = bet * 1
player['profit'].append(bet * 1)
elif sum_ == 21 and dealersum == 21 and len(dealerhand) == 2 and len(hand) > 2:
jarvis.say("You lost", Fore.BLUE)
profit = bet * -1
player['profit'].append(bet * -1)
elif sum_ == dealersum:
jarvis.say("Push", Fore.BLUE)
profit = bet * 0
player['profit'].append(bet * 0)
jarvis.say("Profit is- " + str(profit), Fore.BLUE)
players = wiped_slate(player)
choice = jarvis.input("Do you wish to play another round?Y/n \n", Fore.GREEN)
jarvis.say("OK then, Let's see the results", Fore.GREEN)
jarvis.say('---------------------------')
profit = sum(player['profit'])
if profit >= 0:
jarvis.say("Your total profit is " + str(profit), Fore.GREEN)
else:
jarvis.say("Your total loss is " + str(profit * -1), Fore.GREEN)
jarvis.say("Goodbye, Let's play again sometime!", Fore.GREEN)
|
RecoEgamma/EgammaTools/python/gbrwrappermaker_cfi.py | ckamtsikis/cmssw | 852 | 12705172 | <gh_stars>100-1000
import FWCore.ParameterSet.Config as cms
gbrwrappermaker = cms.EDAnalyzer('GBRWrapperMaker'
)
|
test/question.py | guoweikuang/zhihu-api | 1,065 | 12705183 | <filename>test/question.py
# encoding: utf-8
import unittest
from zhihu import Question
class QuestionTestCase(unittest.TestCase):
def test_follow_question_with_id(self):
data = Question(id=32096743).follow_question()
self.assertEqual({"is_following": True}, data)
def test_unfollow_question_with_id(self):
data = Question(id=32096743).unfollow_question()
self.assertEqual({"is_following": False}, data)
def test_follow_question_with_url(self):
data = Question(url='https://www.zhihu.com/question/58684385').follow_question()
self.assertEqual({"is_following": True}, data)
def test_follow_question_with_answer_url(self):
"""
也支持回答的URL,因为从回答中也能找到问题的ID
:return:
"""
data = Question(url='https://www.zhihu.com/question/59001738/answer/160832685').follow_question()
self.assertEqual({"is_following": True}, data)
def test_unfollow_question_with_url(self):
data = Question(url='https://www.zhihu.com/question/58684385').unfollow_question()
self.assertEqual({"is_following": False}, data)
|
src/ansible_navigator/image_manager/__init__.py | ekmixon/ansible-navigator | 134 | 12705189 | """image manager"""
from .puller import ImagePuller
from .inspector import inspect_all
|
visualize.py | AnTao97/PointCloudDataset | 184 | 12705198 | <filename>visualize.py
import os
import numpy as np
def standardize_bbox(pcl, points_per_object):
pt_indices = np.random.choice(pcl.shape[0], points_per_object, replace=False)
np.random.shuffle(pt_indices)
pcl = pcl[pt_indices] # n by 3
mins = np.amin(pcl, axis=0)
maxs = np.amax(pcl, axis=0)
center = ( mins + maxs ) / 2.
scale = np.amax(maxs-mins)
print("Center: {}, Scale: {}".format(center, scale))
result = ((pcl - center)/scale).astype(np.float32) # [-0.5, 0.5]
return result
xml_head = \
"""
<scene version="0.5.0">
<integrator type="path">
<integer name="maxDepth" value="-1"/>
</integrator>
<sensor type="perspective">
<float name="farClip" value="100"/>
<float name="nearClip" value="0.1"/>
<transform name="toWorld">
<lookat origin="3,3,3" target="0,0,0" up="0,0,1"/>
</transform>
<float name="fov" value="25"/>
<sampler type="ldsampler">
<integer name="sampleCount" value="256"/>
</sampler>
<film type="ldrfilm">
<integer name="width" value="1600"/>
<integer name="height" value="1200"/>
<rfilter type="gaussian"/>
<boolean name="banner" value="false"/>
</film>
</sensor>
<bsdf type="roughplastic" id="surfaceMaterial">
<string name="distribution" value="ggx"/>
<float name="alpha" value="0.05"/>
<float name="intIOR" value="1.46"/>
<rgb name="diffuseReflectance" value="1,1,1"/> <!-- default 0.5 -->
</bsdf>
"""
xml_ball_segment = \
"""
<shape type="sphere">
<float name="radius" value="0.02"/>
<transform name="toWorld">
<translate x="{}" y="{}" z="{}"/>
<scale value="0.7"/>
</transform>
<bsdf type="diffuse">
<rgb name="reflectance" value="{},{},{}"/>
</bsdf>
</shape>
"""
xml_tail = \
"""
<shape type="rectangle">
<ref name="bsdf" id="surfaceMaterial"/>
<transform name="toWorld">
<scale x="10" y="10" z="10"/>
<translate x="0" y="0" z="-0.5"/>
</transform>
</shape>
<shape type="rectangle">
<transform name="toWorld">
<scale x="10" y="10" z="1"/>
<lookat origin="-4,4,20" target="0,0,0" up="0,0,1"/>
</transform>
<emitter type="area">
<rgb name="radiance" value="6,6,6"/>
</emitter>
</shape>
</scene>
"""
def colormap(x,y,z):
vec = np.array([x,y,z])
vec = np.clip(vec, 0.001,1.0)
norm = np.sqrt(np.sum(vec**2))
vec /= norm
return [vec[0], vec[1], vec[2]]
def mitsuba(pcl, path, clr=None):
xml_segments = [xml_head]
# pcl = standardize_bbox(pcl, 2048)
pcl = pcl[:,[2,0,1]]
pcl[:,0] *= -1
h = np.min(pcl[:,2])
for i in range(pcl.shape[0]):
if clr == None:
color = colormap(pcl[i,0]+0.5,pcl[i,1]+0.5,pcl[i,2]+0.5)
else:
color = clr
if h < -0.25:
xml_segments.append(xml_ball_segment.format(pcl[i,0],pcl[i,1],pcl[i,2]-h-0.6875, *color))
else:
xml_segments.append(xml_ball_segment.format(pcl[i,0],pcl[i,1],pcl[i,2], *color))
xml_segments.append(xml_tail)
xml_content = str.join('', xml_segments)
with open(path, 'w') as f:
f.write(xml_content)
if __name__ == '__main__':
item = 0
split = 'train'
dataset_name = 'shapenetcorev2'
root = os.getcwd()
save_root = os.path.join("image", dataset_name)
if not os.path.exists(save_root):
os.makedirs(save_root)
from dataset import Dataset
d = Dataset(root=root, dataset_name=dataset_name,
num_points=2048, split=split, random_rotation=False, load_name=True)
print("datasize:", d.__len__())
pts, lb, n = d[item]
print(pts.size(), pts.type(), lb.size(), lb.type(), n)
path = os.path.join(save_root, dataset_name + '_' + split + str(item) + '_' + str(n) + '.xml')
mitsuba(pts.numpy(), path)
|
safedelete/tests/test_many2many.py | GustavoNagel/django-safedelete | 505 | 12705201 | <filename>safedelete/tests/test_many2many.py
import unittest
from django.db import models
from ..config import DELETED_VISIBLE
from ..models import SafeDeleteModel
from .testcase import SafeDeleteTestCase
class ManyToManyChild(models.Model):
pass
class ManyToManyOtherChild(models.Model):
pass
class ManyToManyOtherChildThrough(SafeDeleteModel):
other_child = models.ForeignKey(ManyToManyOtherChild, on_delete=models.CASCADE)
parent = models.ForeignKey('ManyToManyParent', on_delete=models.CASCADE)
class ManyToManyParent(SafeDeleteModel):
children = models.ManyToManyField(
ManyToManyChild,
blank=True,
related_name='parents'
)
other_children = models.ManyToManyField(
ManyToManyOtherChild,
blank=True,
related_name='parents',
through=ManyToManyOtherChildThrough,
)
class ManyToManyTestCase(SafeDeleteTestCase):
@unittest.expectedFailure
def test_many_to_many_through(self):
""" This is not supported yet! """
parent = ManyToManyParent.objects.create()
other_child = ManyToManyOtherChild.objects.create()
through = ManyToManyOtherChildThrough.objects.create(other_child=other_child, parent=parent)
self.assertEqual(parent.manytomanyotherchildthrough_set.all().count(), 1)
self.assertEqual(parent.other_children.all().count(), 1)
through.delete()
self.assertEqual(parent.manytomanyotherchildthrough_set.all().count(), 0)
self.assertEqual(parent.other_children.all().count(), 0)
def test_many_to_many(self):
"""Test whether related queries still works."""
parent1 = ManyToManyParent.objects.create()
parent2 = ManyToManyParent.objects.create()
child = ManyToManyChild.objects.create()
parent1.children.add(child)
parent2.children.add(child)
# The child should still have both parents
self.assertEqual(
child.parents.all().count(),
2
)
# Soft deleting one parent, should "hide" it from the related field
parent1.delete()
self.assertEqual(
child.parents.all().count(),
1
)
# But explicitly saying you want to "show" them, shouldn't hide them
self.assertEqual(
child.parents.all(force_visibility=DELETED_VISIBLE).count(),
2
)
|
contrib/od/test/test_live.py | backwardn/ccs-calendarserver | 462 | 12705203 | ##
# Copyright (c) 2014-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
OpenDirectory live service tests.
"""
from __future__ import print_function
from itertools import chain
from uuid import UUID
from twisted.trial import unittest
from twisted.internet.defer import inlineCallbacks, returnValue
try:
from twext.who.opendirectory import DirectoryService
moduleImported = True
except:
moduleImported = False
print("Could not import OpenDirectory")
if moduleImported:
from twext.who.expression import (
CompoundExpression, Operand, MatchExpression, MatchType, MatchFlags
)
from txdav.who.directory import CalendarDirectoryServiceMixin
from txdav.who.opendirectory import DirectoryService as OpenDirectoryService
class CalOpenDirectoryService(OpenDirectoryService, CalendarDirectoryServiceMixin):
pass
LOCAL_SHORTNAMES = "odtestalbert odtestbill odtestcarl odtestdavid odtestsubgroupa".split()
NETWORK_SHORTNAMES = "odtestamanda odtestbetty odtestcarlene odtestdenise odtestsubgroupb odtestgrouptop".split()
def onlyIfPopulated(func):
"""
Only run the decorated test method if the "odtestamanda" record exists
"""
@inlineCallbacks
def checkThenRun(self):
record = yield self.service.recordWithShortName(self.service.recordType.user, u"odtestamanda")
if record is not None:
result = yield func(self)
returnValue(result)
else:
print("OD not populated, skipping {}".format(func.func_name))
return checkThenRun
class LiveOpenDirectoryServiceTestCase(unittest.TestCase):
"""
Live service tests for L{DirectoryService}.
"""
def setUp(self):
self.service = DirectoryService()
def tearDown(self):
self.service._deletePool()
def verifyResults(self, records, expected, unexpected):
shortNames = []
for record in records:
for shortName in record.shortNames:
shortNames.append(shortName)
for name in expected:
self.assertTrue(name in shortNames)
for name in unexpected:
self.assertFalse(name in shortNames)
@onlyIfPopulated
@inlineCallbacks
def test_shortNameStartsWith(self):
records = yield self.service.recordsFromExpression(
MatchExpression(
self.service.fieldName.shortNames, u"odtest",
matchType=MatchType.startsWith
)
)
self.verifyResults(
records,
chain(LOCAL_SHORTNAMES, NETWORK_SHORTNAMES),
["anotherodtestamanda", "anotherodtestalbert"]
)
@onlyIfPopulated
@inlineCallbacks
def test_uid(self):
for uid, name in (
(u"9DC04A71-E6DD-11DF-9492-0800200C9A66", u"odtestbetty"),
(u"9DC04A75-E6DD-11DF-9492-0800200C9A66", u"odtestbill"),
):
record = yield self.service.recordWithUID(uid)
self.assertTrue(record is not None)
self.assertEquals(record.shortNames[0], name)
@onlyIfPopulated
@inlineCallbacks
def test_guid(self):
for guid, name in (
(UUID("9DC04A71-E6DD-11DF-9492-0800200C9A66"), u"odtestbetty"),
(UUID("9DC04A75-E6DD-11DF-9492-0800200C9A66"), u"odtestbill"),
):
record = yield self.service.recordWithGUID(guid)
self.assertTrue(record is not None)
self.assertEquals(record.shortNames[0], name)
@onlyIfPopulated
@inlineCallbacks
def test_compoundWithoutRecordType(self):
expression = CompoundExpression(
[
CompoundExpression(
[
MatchExpression(
self.service.fieldName.fullNames, u"be",
matchType=MatchType.contains
),
MatchExpression(
self.service.fieldName.emailAddresses, u"be",
matchType=MatchType.startsWith
),
],
Operand.OR
),
CompoundExpression(
[
MatchExpression(
self.service.fieldName.fullNames, u"test",
matchType=MatchType.contains
),
MatchExpression(
self.service.fieldName.emailAddresses, u"test",
matchType=MatchType.startsWith
),
],
Operand.OR
),
],
Operand.AND
)
records = yield self.service.recordsFromExpression(expression)
# We should get back users and groups since we did not specify a type:
self.verifyResults(
records,
[
"odtestbetty", "odtestalbert", "anotherodtestalbert",
"odtestgroupbetty", "odtestgroupalbert"
],
["odtestamanda", "odtestbill", "odtestgroupa", "odtestgroupb"]
)
@onlyIfPopulated
@inlineCallbacks
def test_compoundWithExplicitRecordType(self):
expression = CompoundExpression(
[
CompoundExpression(
[
MatchExpression(
self.service.fieldName.fullNames, u"be",
matchType=MatchType.contains
),
MatchExpression(
self.service.fieldName.emailAddresses, u"be",
matchType=MatchType.startsWith
),
],
Operand.OR
),
CompoundExpression(
[
MatchExpression(
self.service.fieldName.fullNames, u"test",
matchType=MatchType.contains
),
MatchExpression(
self.service.fieldName.emailAddresses, u"test",
matchType=MatchType.startsWith
),
],
Operand.OR
),
],
Operand.AND
)
records = yield self.service.recordsFromExpression(
expression, recordTypes=[self.service.recordType.user]
)
# We should get back users but not groups:
self.verifyResults(
records,
["odtestbetty", "odtestalbert", "anotherodtestalbert"],
["odtestamanda", "odtestbill", "odtestgroupa", "odtestgroupb"]
)
@onlyIfPopulated
@inlineCallbacks
def test_compoundWithMultipleExplicitRecordTypes(self):
expression = CompoundExpression(
[
CompoundExpression(
[
MatchExpression(
self.service.fieldName.fullNames, u"be",
matchType=MatchType.contains
),
MatchExpression(
self.service.fieldName.emailAddresses, u"be",
matchType=MatchType.startsWith
),
],
Operand.OR
),
CompoundExpression(
[
MatchExpression(
self.service.fieldName.fullNames, u"test",
matchType=MatchType.contains
),
MatchExpression(
self.service.fieldName.emailAddresses, u"test",
matchType=MatchType.startsWith
),
],
Operand.OR
),
],
Operand.AND
)
records = yield self.service.recordsFromExpression(
expression, recordTypes=[
self.service.recordType.user,
self.service.recordType.group
]
)
# We should get back users and groups:
self.verifyResults(
records,
[
"odtestbetty", "odtestalbert", "anotherodtestalbert",
"odtestgroupbetty", "odtestgroupalbert"
],
["odtestamanda", "odtestbill", "odtestgroupa", "odtestgroupb"]
)
@onlyIfPopulated
@inlineCallbacks
def test_recordsMatchingTokens(self):
self.calService = CalOpenDirectoryService()
records = yield self.calService.recordsMatchingTokens([u"be", u"test"])
self.verifyResults(
records,
[
"odtestbetty", "odtestalbert", "anotherodtestalbert",
"odtestgroupbetty", "odtestgroupalbert"
],
["odtestamanda", "odtestbill", "odtestgroupa", "odtestgroupb"]
)
@onlyIfPopulated
@inlineCallbacks
def test_recordsMatchingTokensWithContextUser(self):
self.calService = CalOpenDirectoryService()
records = yield self.calService.recordsMatchingTokens(
[u"be", u"test"],
context=self.calService.searchContext_user
)
self.verifyResults(
records,
[
"odtestbetty", "odtestalbert", "anotherodtestalbert",
],
[
"odtestamanda", "odtestbill", "odtestgroupa", "odtestgroupb",
"odtestgroupbetty", "odtestgroupalbert"
]
)
@onlyIfPopulated
@inlineCallbacks
def test_recordsMatchingTokensWithContextGroup(self):
self.calService = CalOpenDirectoryService()
records = yield self.calService.recordsMatchingTokens(
[u"be", u"test"],
context=self.calService.searchContext_group
)
self.verifyResults(
records,
[
"odtestgroupbetty", "odtestgroupalbert"
],
[
"odtestamanda", "odtestbill", "odtestgroupa", "odtestgroupb",
"odtestbetty", "odtestalbert", "anotherodtestalbert"
]
)
@onlyIfPopulated
@inlineCallbacks
def test_recordsMatchingMultipleFieldsNoRecordType(self):
self.calService = CalOpenDirectoryService()
fields = (
(u"fullNames", u"be", MatchFlags.caseInsensitive, MatchType.contains),
(u"fullNames", u"test", MatchFlags.caseInsensitive, MatchType.contains),
)
records = (yield self.calService.recordsMatchingFields(
fields, operand=Operand.AND, recordType=None
))
self.verifyResults(
records,
[
"odtestgroupbetty", "odtestgroupalbert",
"odtestbetty", "odtestalbert", "anotherodtestalbert"
],
[
"odtestamanda",
]
)
@onlyIfPopulated
@inlineCallbacks
def test_recordsMatchingSingleFieldNoRecordType(self):
self.calService = CalOpenDirectoryService()
fields = (
(u"fullNames", u"test", MatchFlags.caseInsensitive, MatchType.contains),
)
records = (yield self.calService.recordsMatchingFields(
fields, operand=Operand.AND, recordType=None
))
self.verifyResults(
records,
[
"odtestgroupbetty", "odtestgroupalbert",
"odtestbetty", "odtestalbert", "anotherodtestalbert",
"odtestamanda",
],
[
"nobody",
]
)
@onlyIfPopulated
@inlineCallbacks
def test_recordsMatchingFieldsWithRecordType(self):
self.calService = CalOpenDirectoryService()
fields = (
(u"fullNames", u"be", MatchFlags.caseInsensitive, MatchType.contains),
(u"fullNames", u"test", MatchFlags.caseInsensitive, MatchType.contains),
)
records = (yield self.calService.recordsMatchingFields(
fields, operand=Operand.AND, recordType=self.calService.recordType.user
))
self.verifyResults(
records,
[
"odtestbetty", "odtestalbert", "anotherodtestalbert"
],
[
"odtestamanda", "odtestgroupalbert", "odtestgroupbetty",
]
)
|
venv/lib/python3.8/site-packages/statsmodels/tsa/arima_model.py | johncollinsai/post-high-frequency-data | 6,931 | 12705253 | """
See statsmodels.tsa.arima.model.ARIMA and statsmodels.tsa.SARIMAX.
"""
ARIMA_DEPRECATION_ERROR = """
statsmodels.tsa.arima_model.ARMA and statsmodels.tsa.arima_model.ARIMA have
been removed in favor of statsmodels.tsa.arima.model.ARIMA (note the .
between arima and model) and statsmodels.tsa.SARIMAX.
statsmodels.tsa.arima.model.ARIMA makes use of the statespace framework and
is both well tested and maintained. It also offers alternative specialized
parameter estimators.
"""
class ARMA:
"""
ARMA has been deprecated in favor of the new implementation
See Also
--------
statsmodels.tsa.arima.model.ARIMA
ARIMA models with a variety of parameter estimators
statsmodels.tsa.statespace.SARIMAX
SARIMAX models estimated using MLE
"""
def __init__(self, *args, **kwargs):
raise NotImplementedError(ARIMA_DEPRECATION_ERROR)
class ARIMA(ARMA):
"""
ARIMA has been deprecated in favor of the new implementation
See Also
--------
statsmodels.tsa.arima.model.ARIMA
ARIMA models with a variety of parameter estimators
statsmodels.tsa.statespace.SARIMAX
SARIMAX models estimated using MLE
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class ARMAResults:
"""
ARMA has been deprecated in favor of the new implementation
See Also
--------
statsmodels.tsa.arima.model.ARIMA
ARIMA models with a variety of parameter estimators
statsmodels.tsa.statespace.SARIMAX
SARIMAX models estimated using MLE
"""
def __init__(self, *args, **kwargs):
raise NotImplementedError(ARIMA_DEPRECATION_ERROR)
class ARIMAResults(ARMAResults):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
|
spacy_transformers/span_getters.py | thomashacker/spacy-transformers | 744 | 12705275 | <reponame>thomashacker/spacy-transformers
from typing import Callable, Iterable, List
from functools import partial
from spacy.tokens import Doc, Span
from .util import registry
SpannerT = Callable[[List[Doc]], List[List[Span]]]
def get_strided_spans(
docs: Iterable[Doc], window: int, stride: int
) -> List[List[Span]]:
spans = []
for doc in docs:
start = 0
spans.append([])
for i in range(len(doc) // stride):
spans[-1].append(doc[start : start + window])
if (start + window) >= len(doc):
break
start += stride
else:
if start < len(doc):
spans[-1].append(doc[start:])
return spans
@registry.span_getters("spacy-transformers.strided_spans.v1")
def configure_strided_spans(window: int, stride: int) -> SpannerT:
"""
Set the 'window' and 'stride' options for getting strided spans.
If you set the window and stride to the same value, the spans will cover
each token once. Setting 'stride' lower than 'window' will allow for an
overlap, so that some tokens are counted twice. This can be desirable,
because it allows all tokens to have both a left and right context.
"""
return partial(get_strided_spans, window=window, stride=stride)
def get_sent_spans(docs: Iterable[Doc]) -> List[List[Span]]:
return [list(doc.sents) for doc in docs]
@registry.span_getters("spacy-transformers.sent_spans.v1")
def configure_get_sent_spans() -> Callable:
"""
Create a `span_getter` that uses sentence boundary markers to extract
the spans. This requires sentence boundaries to be set, and may result
in somewhat uneven batches, depending on the sentence lengths. However,
it does provide the transformer with more meaningful windows to attend over.
"""
return get_sent_spans
def get_doc_spans(docs: Iterable[Doc]) -> List[List[Span]]:
return [[doc[:]] for doc in docs]
@registry.span_getters("spacy-transformers.doc_spans.v1")
def configure_get_doc_spans() -> Callable:
"""
Create a `span_getter` that uses the whole document as its spans. This is
the best approach if your `Doc` objects already refer to relatively short
texts.
"""
return get_doc_spans
get_sent_spans = configure_get_sent_spans()
get_doc_spans = configure_get_doc_spans()
__all__ = [
"get_sent_spans",
"get_doc_spans",
"configure_get_doc_spans",
"configure_get_sent_spans",
"configure_strided_spans",
]
|
homeassistant/components/tractive/device_tracker.py | MrDelik/core | 30,023 | 12705313 | <gh_stars>1000+
"""Support for Tractive device trackers."""
from __future__ import annotations
from typing import Any
from homeassistant.components.device_tracker import (
SOURCE_TYPE_BLUETOOTH,
SOURCE_TYPE_GPS,
)
from homeassistant.components.device_tracker.config_entry import TrackerEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import Trackables
from .const import (
CLIENT,
DOMAIN,
SERVER_UNAVAILABLE,
TRACKABLES,
TRACKER_HARDWARE_STATUS_UPDATED,
TRACKER_POSITION_UPDATED,
)
from .entity import TractiveEntity
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up Tractive device trackers."""
client = hass.data[DOMAIN][entry.entry_id][CLIENT]
trackables = hass.data[DOMAIN][entry.entry_id][TRACKABLES]
entities = [TractiveDeviceTracker(client.user_id, item) for item in trackables]
async_add_entities(entities)
class TractiveDeviceTracker(TractiveEntity, TrackerEntity):
"""Tractive device tracker."""
_attr_icon = "mdi:paw"
def __init__(self, user_id: str, item: Trackables) -> None:
"""Initialize tracker entity."""
super().__init__(user_id, item.trackable, item.tracker_details)
self._battery_level: int = item.hw_info["battery_level"]
self._latitude: float = item.pos_report["latlong"][0]
self._longitude: float = item.pos_report["latlong"][1]
self._accuracy: int = item.pos_report["pos_uncertainty"]
self._source_type: str = item.pos_report["sensor_used"]
self._attr_name = f"{self._tracker_id} {item.trackable['details']['name']}"
self._attr_unique_id = item.trackable["_id"]
@property
def source_type(self) -> str:
"""Return the source type, eg gps or router, of the device."""
if self._source_type == "PHONE":
return SOURCE_TYPE_BLUETOOTH
return SOURCE_TYPE_GPS
@property
def latitude(self) -> float:
"""Return latitude value of the device."""
return self._latitude
@property
def longitude(self) -> float:
"""Return longitude value of the device."""
return self._longitude
@property
def location_accuracy(self) -> int:
"""Return the gps accuracy of the device."""
return self._accuracy
@property
def battery_level(self) -> int:
"""Return the battery level of the device."""
return self._battery_level
@callback
def _handle_hardware_status_update(self, event: dict[str, Any]) -> None:
self._battery_level = event["battery_level"]
self._attr_available = True
self.async_write_ha_state()
@callback
def _handle_position_update(self, event: dict[str, Any]) -> None:
self._latitude = event["latitude"]
self._longitude = event["longitude"]
self._accuracy = event["accuracy"]
self._source_type = event["sensor_used"]
self._attr_available = True
self.async_write_ha_state()
@callback
def _handle_server_unavailable(self) -> None:
self._attr_available = False
self.async_write_ha_state()
async def async_added_to_hass(self) -> None:
"""Handle entity which will be added."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{TRACKER_HARDWARE_STATUS_UPDATED}-{self._tracker_id}",
self._handle_hardware_status_update,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{TRACKER_POSITION_UPDATED}-{self._tracker_id}",
self._handle_position_update,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SERVER_UNAVAILABLE}-{self._user_id}",
self._handle_server_unavailable,
)
)
|
digits/dataset/__init__.py | wills2133/digits-ssd | 4,552 | 12705319 | <gh_stars>1000+
# Copyright (c) 2014-2017, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
from .images import ImageClassificationDatasetJob, GenericImageDatasetJob
from .generic import GenericDatasetJob
from .job import DatasetJob
__all__ = [
'ImageClassificationDatasetJob',
'GenericImageDatasetJob',
'GenericDatasetJob',
'DatasetJob',
]
|
test/test_memcpy.py | dendisuhubdy/coriander | 644 | 12705348 | # Copyright <NAME> 2017
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
import pyopencl as cl
import os
import math
import pytest
from test import test_common
from test.test_common import offset_type
def test_memcpy(context, q, int_data, int_data_gpu):
ll_code = """
declare void @_Z6memcpyPvPKvm(i8*, i8*, i64)
define void @mykernel(i32* %data) {
%1 = bitcast i32* %data to i8*
%2 = getelementptr i32, i32* %data, i32 8
%3 = bitcast i32* %2 to i8*
call void @_Z6memcpyPvPKvm(i8 *%3, i8 *%1, i64 32)
ret void
}
"""
cl_code = test_common.ll_to_cl(ll_code, 'mykernel', num_clmems=1)
print('cl_code', cl_code)
for i in range(8):
int_data[i] = 3 + i
cl.enqueue_copy(q, int_data_gpu, int_data)
kernel = test_common.build_kernel(context, cl_code, 'mykernel')
kernel(q, (32,), (32,), int_data_gpu, offset_type(0), offset_type(0), cl.LocalMemory(32))
from_gpu = np.copy(int_data)
cl.enqueue_copy(q, from_gpu, int_data_gpu)
q.finish()
for i in range(8):
print(i, from_gpu[8 + i])
assert from_gpu[8 + i] == 3 + i
|
common.py | BerSerK/LianjiaSpider | 143 | 12705350 | <gh_stars>100-1000
# -*- coding: utf-8 -*-
import pandas as pd
imgfilename = 'table.png'
import numpy as np
import os
import datetime
import six
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import gridspec
from pypinyin import pinyin
import numpy as np
from matplotlib.font_manager import FontProperties
font=FontProperties(fname='font/Songti.ttc',size=18)
matplotlib.font_manager.fontManager.addfont('font/Songti.ttc')
from matplotlib.offsetbox import TextArea, DrawingArea, OffsetImage, AnnotationBbox
import matplotlib.image as mpimg
def read(city):
dfs = []
for root, dirs, files in os.walk('data/chengjiao-%s/'%city):
#print(root, files, dirs)
files.sort()
for f in files:
fullPath = os.path.join(root, f)
#print(fullPath)
df = None
if f.endswith('.xls'):
df = pd.read_excel(fullPath, converters = {'成交价(元/平)':lambda x:float(x),
'链家编号':str, '产权年限':str})
elif f.endswith('.csv'):
df = pd.read_csv(fullPath, converters = {'成交价(元/平)':lambda x:float(x),
'链家编号':str})
else:
continue
if len(df) == 0:
print('No data in %s' % fullPath )
continue
if '单价(元/平米)' in df.columns:
df['单价(元/平米)'] = pd.to_numeric(df['单价(元/平米)'], errors="coerce")
df = df.rename(columns={'单价(元/平米)':'成交价(元/平)','所属小区':'小区','建筑面积:平米':'建筑面积',
'浏览(次)':'浏览(次)', '关注(人)':'关注(人)', '带看(次)':'带看(次)',
'所属下辖区':'下辖区', '房权所属':'产权所属', '房屋朝向':'朝向','调价(次)':'调价(次)',
'建成时间:年':'建成时间', '所属商圈':'商圈', '装修情况':'装修', '成交周期(天)':'成交周期(天)',
'房屋户型':'户型','产权年限':'土地年限', '楼层状态':'所在楼层', '挂牌价格(万)':'挂牌价格(万)',
'配备电梯':'电梯'})
#去掉面积单位
try:
mj = df['建筑面积']
mj_num = []
for m in mj:
m = str(m)
if '㎡' in m:
m = m[:m.find('㎡')]
try:
m = float(m)
except:
m = np.nan
mj_num.append(m)
df['建筑面积'] = mj_num
except:
pass
#统一成交时间格式
try:
time = []
for t in df['成交时间']:
t = str(t)
if '/' in t:
t = '-'.join(t.split('/'))
if '成交' in t:
t = t.replace('成交', '').strip()
time.append(t)
df['成交时间'] = time
except Exception as e:
df.columns
print(fullPath)
print('成交时间错误', e)
pass
#去掉售价单位
try:
sj = df['售价(万)']
sj_num = []
for s in sj:
s = str(s)
if '万' in s:
s = s[:s.find('万')]
if '-' in s:
#print(s)
s = s.split('-')[-1]
s = float(s)
sj_num.append(s)
df['售价(万)'] = sj_num
except:
pass
try:
df['成交价(元/平)'] = pd.to_numeric(df['成交价(元/平)'], errors='coerse')
except:
pass
if len(df) > 0:
dfs.append(df)
df = pd.concat(dfs)
print('raw count:', len(df))
df = df.drop_duplicates(subset=['链家编号'])
print('count after drop duplicates', len(df))
if city in ['北京', '上海', '深圳']:
df = df.loc[df['成交价(元/平)']> 10000]
elif city in [ '广州', '杭州']:
df = df.loc[df['成交价(元/平)']> 5000]
else:
df = df.loc[df['成交价(元/平)']> 1000]
df = df.loc[df['成交价(元/平)']< 200000]
print('count after drop less than 1000', len(df))
if city not in ['重庆', 'allcq', '南京']:
df = df.loc[~df['土地年限'].str.contains('40', na = False)]
df = df.loc[~df['土地年限'].str.contains('50', na = False)]
print('count after drop 40, 50', len(df))
df = df.set_index('链家编号')
#print(len(df))
return df
MA = True
#MA = False
ma_length = 30
start_date = '2017-01-01'
city = 'default'
def get_moving_average(res, ma_length, keep_all = False):
startDate = datetime.datetime.strptime(res.index[0],'%Y-%m-%d')
endDate = datetime.datetime.strptime(res.index[-1],'%Y-%m-%d')
#print(startDate, endDate)
date_range=[str(x.date()) for x in pd.date_range(startDate, endDate)]
volume_ma = []
median_ma = []
mean_ma = []
for i in range(len(date_range) - ma_length):
interval_data = res.loc[(res.index >= date_range[i]) & (res.index <= date_range[i+ma_length])]
volume_ele = sum(interval_data['volume'])
median_ele = 0
mean_ele = 0
for index, row in interval_data.iterrows():
median_ele += row['volume'] * row['median_price']
mean_ele += row['volume'] * row['mean_price']
volume_ma.append(volume_ele)
if volume_ele == 0:
median_ma.append(median_ma[-1])
mean_ma.append(mean_ma[-1])
else:
median_ma.append(median_ele/volume_ele)
mean_ma.append(mean_ele/volume_ele)
last_index = 0
if keep_all == False:
for i in range(len(volume_ma)):
if volume_ma[i] < ma_length / 6:
last_index = i
volume_ma = volume_ma[last_index+1:]
median_ma = median_ma[last_index+1:]
mean_ma = mean_ma[last_index+1:]
return pd.DataFrame({'volume':volume_ma, 'median_price':median_ma, 'mean_price':mean_ma},
index = date_range[ma_length+last_index + 1:])
def resetXticks(ax, res):
labels = res.index
xticks = ax.get_xticks()
if len(xticks) < 366:
tick_month = ['%0.2d'%i for i in range(1, 13)]
else:
tick_month = ['%0.2d'%i for i in range(1, 13, 3)]
target_xticks = []
last_index = 0
month_mark = set()
for i in range(len(labels)):
label = labels[i]
tick = xticks[i]
(year, month, day) = label.split('-')
if month in tick_month and '-'.join([year, month]) not in month_mark:
month_mark.add('-'.join([year,month]))
last_index = i
target_xticks.append(tick)
if len(res) - last_index < 20:
target_xticks = target_xticks[:-1] + [xticks[-1]]
else:
target_xticks = target_xticks + [xticks[-1]]
ax.set_xticks(target_xticks)
def plot(res, city, title, MA, ma_length, start_date = None, force = False, keep_all = False):
if force == False and len(res)< 10 + ma_length:
return pd.DataFrame()
if MA == True:
res = get_moving_average(res, ma_length, keep_all)
if force == False and len(res) < 10:
return pd.DataFrame()
if start_date is not None:
res = res.loc[res.index >= start_date,:]
if len(res) > 0 and res.index[0] > start_date:
date_range=[str(x.date()) for x in pd.date_range(start_date, res.index[0])]
date_range=[str(x.date()) for x in pd.date_range(start_date, res.index[0])]
padding = pd.DataFrame(columns = res.columns,index = date_range[:-1])
padding.volume = [0] * len(padding)
res = pd.concat([padding, res])
plt.rcParams['font.sans-serif']=['SimHei']
matplotlib.rc('font', size=18)
matplotlib.rcParams['figure.figsize'] = [15, 15]
gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
ax0 = plt.subplot(gs[0])
ax0.plot(res['median_price'])
ax0.plot(res['mean_price'])
ax0.legend(['%d日中位数=%.0f'%(ma_length, res['median_price'][-1]),
'%d日均价=%.0f'%(ma_length, res['mean_price'][-1])], prop = font)
x1,x2,y1,y2 = ax0.axis()
ax0.axis((x1,x2,0,y2))
#插入二维码
qrcode = mpimg.imread('wechatqrcode.png')
imagebox = OffsetImage(qrcode, zoom=0.5)
ab = AnnotationBbox(imagebox, (0.2*x2, 0.2*y2))
ax0.add_artist(ab)
resetXticks(ax0, res)
plt.setp( ax0.get_xticklabels(), visible=False)
plt.grid(True)
plt.title(title+'--欢迎扫二维码关注公众号获取其他城市房价走势还有低佣金证券开户', fontproperties = font)
#重画x轴
ax1 = plt.subplot(gs[1])
#ax1.bar(res.index, res['volume'])
ax1.fill_between(res.index, res['volume'])
ax1.legend(['30日成交量'], prop = font)
resetXticks(ax1, res)
plt.xticks(rotation=90)
dir_name = os.path.join('fig', city)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
plt.tight_layout()
plt.savefig(os.path.join(dir_name, title +'.png'))
#plt.show()
plt.close()
res.to_excel('data/trend/%s-%s.xlsx'%(city, title))
return res
def plot_district(df, city, district ='朝阳', ma_length = -1, start_date = None):
if district == '静安':
gp = df.loc[df['下辖区'].isin(set(['静安','闸北']))].groupby(['成交时间'])
else:
gp = df.loc[df['下辖区']==district].groupby(['成交时间'])
res = pd.DataFrame({'volume':gp.size(),'mean_price':gp['成交价(元/平)'].mean(),
'median_price':gp['成交价(元/平)'].median()})
res = res.iloc[:len(res),:]
title = district
return plot(res, city, title, MA, ma_length, start_date, False, True)
def plot_df(df, city, title, MA, ma_length, start_date = None, force = False):
gp = df.groupby(['成交时间'])['成交价(元/平)']
res=pd.DataFrame({"volume":gp.size(),"median_price":gp.median(), "mean_price":gp.mean()})
res = res.iloc[:len(res),:]
plot(res, city, title, MA, ma_length, start_date, force)
def plot_dfs(dfs, title, legends, ma_length = 30, start_date = None):
ress = []
for df in dfs:
gp = df.groupby(['成交时间'])['成交价(元/平)']
res=pd.DataFrame({"volume":gp.size(),"median_price":gp.median(), "mean_price":gp.mean()})
res = res.iloc[:len(res),:]
if len(res)< 10 + ma_length:
return
if ma_length != -1:
res = get_moving_average(res, ma_length)
if start_date is not None:
res = res.loc[res.index >= start_date,:]
ress.append(res)
plt.rcParams['font.sans-serif']=['SimHei']
matplotlib.rc('font', size=18)
matplotlib.rcParams['figure.figsize'] = [15, 10]
index = ress[0].index
for res in ress:
res = res.loc[res.index.isin(index)]
plt.plot(res['mean_price']/res['mean_price'].iloc[0])
plt.legend(legends, prop = font)
plt.title(title, fontproperties = font)
ax0 = plt.gca()
xticks = ax0.xaxis.get_major_ticks()
interval = len(xticks)// 10
ax0.set_xticks(ax0.get_xticks()[::interval])
plt.xticks(rotation=30)
plt.grid(True)
dir_name = os.path.join('fig', city)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
plt.savefig(os.path.join(dir_name, title +'.png'))
plt.show()
plt.close()
def render_mpl_table(data, filename, col_width=3.0, row_height=1, font_size=24,
header_color='#40466e', row_colors=['#f1f1f2', 'w'], edge_color='w',
bbox=[0, 0, 1, 1], header_columns=0,
ax=None, **kwargs):
matplotlib.rcParams['font.sans-serif'] = "Songti SC"
matplotlib.rcParams['font.family'] = "sans-serif"
if ax is None:
size = (np.array(data.shape[::-1]) + np.array([0, 6])) * np.array([col_width, row_height])
matplotlib.rcParams['figure.figsize'] = size
gs = gridspec.GridSpec(2, 1, height_ratios=[4, 1])
ax = plt.subplot(gs[0])
ax2 = plt.subplot(gs[1])
ax.axis('off')
mpl_table = ax.table(cellText=data.values, bbox=bbox, colLabels=data.columns, **kwargs)
mpl_table.auto_set_font_size(False)
mpl_table.set_fontsize(font_size)
for k, cell in six.iteritems(mpl_table._cells):
cell.set_edgecolor(edge_color)
if k[0] == 0 or k[1] < header_columns:
cell.set_text_props(weight='bold', color='w')
cell.set_facecolor(header_color)
else:
cell.set_facecolor(row_colors[k[0]%len(row_colors) ])
qrcode = mpimg.imread('wechatqrcode.png')
imagebox = OffsetImage(qrcode, zoom=0.8)
ab = AnnotationBbox(imagebox, (0.1, 0.5))
ax2.axis("off")
ax2.add_artist(ab)
ax2.text( 0.3, 0.5, "欢迎扫码关注微信公众号\"时炜观察\"\n获取房价走势图以及在量化投资行业的知识见识分享。\n更有多家低佣A股证券开户。", dict(size=30))
plt.tight_layout()
plt.savefig(filename)
return ax
def updateCityTable():
df = pd.read_excel('rank/城市排名.xlsx')
ax = render_mpl_table(df, 'fig/city_table.png', header_columns=0, col_width=2.0)
def updateAllTableImage():
df = pd.read_excel('rank/城市排名.xlsx')
render_mpl_table(df, 'fig/allcity/table.png', header_columns=0, col_width=2.0)
for city in df['城市']:
filename = 'rank/%s区域排名.xlsx'%city
imgfilename = 'fig/%s/table.png'%city
data = pd.read_excel(filename)
render_mpl_table(data, imgfilename, header_columns=0, col_width=2.0) |
pretty_xml/pretty_xml.py | DazEB2/SimplePyScripts | 117 | 12705353 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
def process_xml_string(xml_string):
"""
Функция из текста выдирает строку с xml --
она должна начинаться на < и заканчиваться >
"""
start = xml_string.index('<')
end = xml_string.rindex('>')
return xml_string[start:end + 1]
def pretty_xml_minidom(xml_string, ind=2):
"""Функция принимает строку xml и выводит xml с отступами."""
from xml.dom.minidom import parseString
xml_string = process_xml_string(xml_string)
xml_utf8 = parseString(xml_string).toprettyxml(indent=' ' * ind, encoding='utf-8')
return xml_utf8.decode('utf-8')
def pretty_xml_lxml(xml_string):
"""Функция принимает строку xml и выводит xml с отступами."""
from lxml import etree
xml_string = process_xml_string(xml_string)
root = etree.fromstring(xml_string)
return etree.tostring(root, pretty_print=True, encoding='utf-8').decode('utf-8')
if __name__ == '__main__':
xml = '<a><b/><c><z/><h/></c></a>'
print(pretty_xml_minidom(xml))
print(pretty_xml_lxml(xml))
|
lib/datasets/cityscapes/coco_to_cityscapes_id.py | jcjs/FPN-Pytorch | 271 | 12705397 | # Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
# mapping coco categories to cityscapes (our converted json) id
# cityscapes
# INFO roidb.py: 220: 1 bicycle: 7286
# INFO roidb.py: 220: 2 car: 53684
# INFO roidb.py: 220: 3 person: 35704
# INFO roidb.py: 220: 4 train: 336
# INFO roidb.py: 220: 5 truck: 964
# INFO roidb.py: 220: 6 motorcycle: 1468
# INFO roidb.py: 220: 7 bus: 758
# INFO roidb.py: 220: 8 rider: 3504
# coco (val5k)
# INFO roidb.py: 220: 1 person: 21296
# INFO roidb.py: 220: 2 bicycle: 628
# INFO roidb.py: 220: 3 car: 3818
# INFO roidb.py: 220: 4 motorcycle: 732
# INFO roidb.py: 220: 5 airplane: 286 <------ irrelevant
# INFO roidb.py: 220: 6 bus: 564
# INFO roidb.py: 220: 7 train: 380
# INFO roidb.py: 220: 8 truck: 828
def cityscapes_to_coco(cityscapes_id):
lookup = {
0: 0, # ... background
1: 2, # bicycle
2: 3, # car
3: 1, # person
4: 7, # train
5: 8, # truck
6: 4, # motorcycle
7: 6, # bus
8: -1, # rider (-1 means rand init)
}
return lookup[cityscapes_id]
def cityscapes_to_coco_with_rider(cityscapes_id):
lookup = {
0: 0, # ... background
1: 2, # bicycle
2: 3, # car
3: 1, # person
4: 7, # train
5: 8, # truck
6: 4, # motorcycle
7: 6, # bus
8: 1, # rider ("person", *rider has human right!*)
}
return lookup[cityscapes_id]
def cityscapes_to_coco_without_person_rider(cityscapes_id):
lookup = {
0: 0, # ... background
1: 2, # bicycle
2: 3, # car
3: -1, # person (ignore)
4: 7, # train
5: 8, # truck
6: 4, # motorcycle
7: 6, # bus
8: -1, # rider (ignore)
}
return lookup[cityscapes_id]
def cityscapes_to_coco_all_random(cityscapes_id):
lookup = {
0: -1, # ... background
1: -1, # bicycle
2: -1, # car
3: -1, # person (ignore)
4: -1, # train
5: -1, # truck
6: -1, # motorcycle
7: -1, # bus
8: -1, # rider (ignore)
}
return lookup[cityscapes_id]
|
core/polyaxon/polypod/init/custom.py | admariner/polyaxon | 3,200 | 12705400 | #!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List, Optional
from polyaxon.containers.contexts import CONTEXT_MOUNT_ARTIFACTS
from polyaxon.containers.names import (
INIT_CUSTOM_CONTAINER_PREFIX,
generate_container_name,
)
from polyaxon.exceptions import PolypodException
from polyaxon.k8s import k8s_schemas
from polyaxon.polypod.common import constants
from polyaxon.polypod.common.containers import patch_container
from polyaxon.polypod.common.env_vars import (
get_connection_env_var,
get_env_from_config_map,
get_env_from_secret,
get_items_from_config_map,
get_items_from_secret,
)
from polyaxon.polypod.common.mounts import (
get_auth_context_mount,
get_connections_context_mount,
get_mount_from_resource,
)
from polyaxon.polypod.common.volumes import get_volume_name
from polyaxon.polypod.specs.contexts import PluginsContextsSpec
from polyaxon.schemas.types import V1ConnectionType
from polyaxon.utils.list_utils import to_list
def get_custom_init_container(
connection: V1ConnectionType,
contexts: PluginsContextsSpec,
container: Optional[k8s_schemas.V1Container],
env: List[k8s_schemas.V1EnvVar] = None,
mount_path: str = None,
) -> k8s_schemas.V1Container:
if not connection:
raise PolypodException("A connection is required to create a repo context.")
volume_name = (
get_volume_name(mount_path)
if mount_path
else constants.CONTEXT_VOLUME_ARTIFACTS
)
mount_path = mount_path or CONTEXT_MOUNT_ARTIFACTS
volume_mounts = [
get_connections_context_mount(name=volume_name, mount_path=mount_path)
]
if contexts and contexts.auth:
volume_mounts.append(get_auth_context_mount(read_only=True))
env = to_list(env, check_none=True)
env_from = []
secret = connection.get_secret()
if secret:
volume_mounts += to_list(
get_mount_from_resource(resource=secret), check_none=True
)
env += to_list(get_items_from_secret(secret=secret), check_none=True)
env_from = to_list(get_env_from_secret(secret=secret), check_none=True)
env += to_list(
get_connection_env_var(connection=connection, secret=secret), check_none=True
)
config_map = connection.get_config_map()
if config_map:
volume_mounts += to_list(
get_mount_from_resource(resource=config_map), check_none=True
)
env += to_list(
get_items_from_config_map(config_map=config_map), check_none=True
)
env_from = to_list(
get_env_from_config_map(config_map=config_map), check_none=True
)
container_name = container.name or generate_container_name(
INIT_CUSTOM_CONTAINER_PREFIX, connection.name
)
return patch_container(
container=container,
name=container_name,
env=env,
env_from=env_from,
volume_mounts=volume_mounts,
)
|
basicsr/metrics/psnr_ssim.py | DEMVSNet/HINet | 237 | 12705410 | <reponame>DEMVSNet/HINet
# ------------------------------------------------------------------------
# Copyright (c) 2021 megvii-model. All Rights Reserved.
# ------------------------------------------------------------------------
# Modified from BasicSR (https://github.com/xinntao/BasicSR)
# Copyright 2018-2020 BasicSR Authors
# ------------------------------------------------------------------------
import cv2
import numpy as np
from basicsr.metrics.metric_util import reorder_image, to_y_channel
import skimage.metrics
import torch
def calculate_psnr(img1,
img2,
crop_border,
input_order='HWC',
test_y_channel=False):
"""Calculate PSNR (Peak Signal-to-Noise Ratio).
Ref: https://en.wikipedia.org/wiki/Peak_signal-to-noise_ratio
Args:
img1 (ndarray/tensor): Images with range [0, 255]/[0, 1].
img2 (ndarray/tensor): Images with range [0, 255]/[0, 1].
crop_border (int): Cropped pixels in each edge of an image. These
pixels are not involved in the PSNR calculation.
input_order (str): Whether the input order is 'HWC' or 'CHW'.
Default: 'HWC'.
test_y_channel (bool): Test on Y channel of YCbCr. Default: False.
Returns:
float: psnr result.
"""
assert img1.shape == img2.shape, (
f'Image shapes are differnet: {img1.shape}, {img2.shape}.')
if input_order not in ['HWC', 'CHW']:
raise ValueError(
f'Wrong input_order {input_order}. Supported input_orders are '
'"HWC" and "CHW"')
if type(img1) == torch.Tensor:
if len(img1.shape) == 4:
img1 = img1.squeeze(0)
img1 = img1.detach().cpu().numpy().transpose(1,2,0)
if type(img2) == torch.Tensor:
if len(img2.shape) == 4:
img2 = img2.squeeze(0)
img2 = img2.detach().cpu().numpy().transpose(1,2,0)
img1 = reorder_image(img1, input_order=input_order)
img2 = reorder_image(img2, input_order=input_order)
img1 = img1.astype(np.float64)
img2 = img2.astype(np.float64)
if crop_border != 0:
img1 = img1[crop_border:-crop_border, crop_border:-crop_border, ...]
img2 = img2[crop_border:-crop_border, crop_border:-crop_border, ...]
if test_y_channel:
img1 = to_y_channel(img1)
img2 = to_y_channel(img2)
mse = np.mean((img1 - img2)**2)
if mse == 0:
return float('inf')
max_value = 1. if img1.max() <= 1 else 255.
return 20. * np.log10(max_value / np.sqrt(mse))
def _ssim(img1, img2):
"""Calculate SSIM (structural similarity) for one channel images.
It is called by func:`calculate_ssim`.
Args:
img1 (ndarray): Images with range [0, 255] with order 'HWC'.
img2 (ndarray): Images with range [0, 255] with order 'HWC'.
Returns:
float: ssim result.
"""
C1 = (0.01 * 255)**2
C2 = (0.03 * 255)**2
img1 = img1.astype(np.float64)
img2 = img2.astype(np.float64)
kernel = cv2.getGaussianKernel(11, 1.5)
window = np.outer(kernel, kernel.transpose())
mu1 = cv2.filter2D(img1, -1, window)[5:-5, 5:-5]
mu2 = cv2.filter2D(img2, -1, window)[5:-5, 5:-5]
mu1_sq = mu1**2
mu2_sq = mu2**2
mu1_mu2 = mu1 * mu2
sigma1_sq = cv2.filter2D(img1**2, -1, window)[5:-5, 5:-5] - mu1_sq
sigma2_sq = cv2.filter2D(img2**2, -1, window)[5:-5, 5:-5] - mu2_sq
sigma12 = cv2.filter2D(img1 * img2, -1, window)[5:-5, 5:-5] - mu1_mu2
ssim_map = ((2 * mu1_mu2 + C1) *
(2 * sigma12 + C2)) / ((mu1_sq + mu2_sq + C1) *
(sigma1_sq + sigma2_sq + C2))
return ssim_map.mean()
def prepare_for_ssim(img, k):
import torch
with torch.no_grad():
img = torch.from_numpy(img).unsqueeze(0).unsqueeze(0).float()
conv = torch.nn.Conv2d(1, 1, k, stride=1, padding=k//2, padding_mode='reflect')
conv.weight.requires_grad = False
conv.weight[:, :, :, :] = 1. / (k * k)
img = conv(img)
img = img.squeeze(0).squeeze(0)
img = img[0::k, 0::k]
return img.detach().cpu().numpy()
def prepare_for_ssim_rgb(img, k):
import torch
with torch.no_grad():
img = torch.from_numpy(img).float() #HxWx3
conv = torch.nn.Conv2d(1, 1, k, stride=1, padding=k // 2, padding_mode='reflect')
conv.weight.requires_grad = False
conv.weight[:, :, :, :] = 1. / (k * k)
new_img = []
for i in range(3):
new_img.append(conv(img[:, :, i].unsqueeze(0).unsqueeze(0)).squeeze(0).squeeze(0)[0::k, 0::k])
return torch.stack(new_img, dim=2).detach().cpu().numpy()
def _3d_gaussian_calculator(img, conv3d):
out = conv3d(img.unsqueeze(0).unsqueeze(0)).squeeze(0).squeeze(0)
return out
def _generate_3d_gaussian_kernel():
kernel = cv2.getGaussianKernel(11, 1.5)
window = np.outer(kernel, kernel.transpose())
kernel_3 = cv2.getGaussianKernel(11, 1.5)
kernel = torch.tensor(np.stack([window * k for k in kernel_3], axis=0))
conv3d = torch.nn.Conv3d(1, 1, (11, 11, 11), stride=1, padding=(5, 5, 5), bias=False, padding_mode='replicate')
conv3d.weight.requires_grad = False
conv3d.weight[0, 0, :, :, :] = kernel
return conv3d
def _ssim_3d(img1, img2, max_value):
assert len(img1.shape) == 3 and len(img2.shape) == 3
"""Calculate SSIM (structural similarity) for one channel images.
It is called by func:`calculate_ssim`.
Args:
img1 (ndarray): Images with range [0, 255]/[0, 1] with order 'HWC'.
img2 (ndarray): Images with range [0, 255]/[0, 1] with order 'HWC'.
Returns:
float: ssim result.
"""
C1 = (0.01 * max_value) ** 2
C2 = (0.03 * max_value) ** 2
img1 = img1.astype(np.float64)
img2 = img2.astype(np.float64)
kernel = _generate_3d_gaussian_kernel().cuda()
img1 = torch.tensor(img1).float().cuda()
img2 = torch.tensor(img2).float().cuda()
mu1 = _3d_gaussian_calculator(img1, kernel)
mu2 = _3d_gaussian_calculator(img2, kernel)
mu1_sq = mu1 ** 2
mu2_sq = mu2 ** 2
mu1_mu2 = mu1 * mu2
sigma1_sq = _3d_gaussian_calculator(img1 ** 2, kernel) - mu1_sq
sigma2_sq = _3d_gaussian_calculator(img2 ** 2, kernel) - mu2_sq
sigma12 = _3d_gaussian_calculator(img1*img2, kernel) - mu1_mu2
ssim_map = ((2 * mu1_mu2 + C1) *
(2 * sigma12 + C2)) / ((mu1_sq + mu2_sq + C1) *
(sigma1_sq + sigma2_sq + C2))
return float(ssim_map.mean())
def _ssim_cly(img1, img2):
assert len(img1.shape) == 2 and len(img2.shape) == 2
"""Calculate SSIM (structural similarity) for one channel images.
It is called by func:`calculate_ssim`.
Args:
img1 (ndarray): Images with range [0, 255] with order 'HWC'.
img2 (ndarray): Images with range [0, 255] with order 'HWC'.
Returns:
float: ssim result.
"""
C1 = (0.01 * 255)**2
C2 = (0.03 * 255)**2
img1 = img1.astype(np.float64)
img2 = img2.astype(np.float64)
kernel = cv2.getGaussianKernel(11, 1.5)
# print(kernel)
window = np.outer(kernel, kernel.transpose())
bt = cv2.BORDER_REPLICATE
mu1 = cv2.filter2D(img1, -1, window, borderType=bt)
mu2 = cv2.filter2D(img2, -1, window,borderType=bt)
mu1_sq = mu1**2
mu2_sq = mu2**2
mu1_mu2 = mu1 * mu2
sigma1_sq = cv2.filter2D(img1**2, -1, window, borderType=bt) - mu1_sq
sigma2_sq = cv2.filter2D(img2**2, -1, window, borderType=bt) - mu2_sq
sigma12 = cv2.filter2D(img1 * img2, -1, window, borderType=bt) - mu1_mu2
ssim_map = ((2 * mu1_mu2 + C1) *
(2 * sigma12 + C2)) / ((mu1_sq + mu2_sq + C1) *
(sigma1_sq + sigma2_sq + C2))
return ssim_map.mean()
def calculate_ssim(img1,
img2,
crop_border,
input_order='HWC',
test_y_channel=False):
"""Calculate SSIM (structural similarity).
Ref:
Image quality assessment: From error visibility to structural similarity
The results are the same as that of the official released MATLAB code in
https://ece.uwaterloo.ca/~z70wang/research/ssim/.
For three-channel images, SSIM is calculated for each channel and then
averaged.
Args:
img1 (ndarray): Images with range [0, 255].
img2 (ndarray): Images with range [0, 255].
crop_border (int): Cropped pixels in each edge of an image. These
pixels are not involved in the SSIM calculation.
input_order (str): Whether the input order is 'HWC' or 'CHW'.
Default: 'HWC'.
test_y_channel (bool): Test on Y channel of YCbCr. Default: False.
Returns:
float: ssim result.
"""
assert img1.shape == img2.shape, (
f'Image shapes are differnet: {img1.shape}, {img2.shape}.')
if input_order not in ['HWC', 'CHW']:
raise ValueError(
f'Wrong input_order {input_order}. Supported input_orders are '
'"HWC" and "CHW"')
if type(img1) == torch.Tensor:
if len(img1.shape) == 4:
img1 = img1.squeeze(0)
img1 = img1.detach().cpu().numpy().transpose(1,2,0)
if type(img2) == torch.Tensor:
if len(img2.shape) == 4:
img2 = img2.squeeze(0)
img2 = img2.detach().cpu().numpy().transpose(1,2,0)
img1 = reorder_image(img1, input_order=input_order)
img2 = reorder_image(img2, input_order=input_order)
img1 = img1.astype(np.float64)
img2 = img2.astype(np.float64)
if crop_border != 0:
img1 = img1[crop_border:-crop_border, crop_border:-crop_border, ...]
img2 = img2[crop_border:-crop_border, crop_border:-crop_border, ...]
if test_y_channel:
img1 = to_y_channel(img1)
img2 = to_y_channel(img2)
return _ssim_cly(img1[..., 0], img2[..., 0])
ssims = []
# ssims_before = []
# skimage_before = skimage.metrics.structural_similarity(img1, img2, data_range=255., multichannel=True)
# print('.._skimage',
# skimage.metrics.structural_similarity(img1, img2, data_range=255., multichannel=True))
max_value = 1 if img1.max() <= 1 else 255
with torch.no_grad():
final_ssim = _ssim_3d(img1, img2, max_value)
ssims.append(final_ssim)
# for i in range(img1.shape[2]):
# ssims_before.append(_ssim(img1, img2))
# print('..ssim mean , new {:.4f} and before {:.4f} .... skimage before {:.4f}'.format(np.array(ssims).mean(), np.array(ssims_before).mean(), skimage_before))
# ssims.append(skimage.metrics.structural_similarity(img1[..., i], img2[..., i], multichannel=False))
return np.array(ssims).mean()
|
tests/sockets/socket_relay.py | albertobarri/idk | 9,724 | 12705434 | # Copyright 2013 The Emscripten Authors. All rights reserved.
# Emscripten is available under two separate licenses, the MIT license and the
# University of Illinois/NCSA Open Source License. Both these licenses can be
# found in the LICENSE file.
"""Listens on 2 ports and relays between them.
Listens to ports A and B. When someone connects to port A, and then
sends some data to port A, that data is sent to someone who
connected to socket B. And so forth.
This is different than say socat which will listen to one port
and then make a connection to another port, and do bidirectional
communication. We need to actually listen on both ports.
"""
import sys
import socket
import time
import threading
ports = [int(sys.argv[1]), int(sys.argv[2])]
class Listener(threading.Thread):
def run(self):
self.conn = None
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
global ports
port = ports[0]
ports = ports[1:]
print('listener binding to ', port)
s.bind(('127.0.0.1', port))
s.listen(1)
print('listener', port, 'waiting for connection')
conn, addr = s.accept()
self.conn = conn
while True:
time.sleep(0.5)
print('listener', port, 'waiting for data')
data = conn.recv(20 * 1024)
if not data:
continue
while not self.other.conn:
print('listener', port, 'waiting for other connection in order to send data')
time.sleep(1)
print('listener', port, 'sending data', len(data))
self.other.conn.send(data)
in_listener = Listener()
in_listener.daemon = True
in_listener.start()
out_listener = Listener()
out_listener.daemon = True
out_listener.start()
in_listener.other = out_listener
out_listener.other = in_listener
while True:
time.sleep(1)
|
seahub/api2/endpoints/ocm_repos.py | weimens/seahub | 101 | 12705467 | <reponame>weimens/seahub
import logging
import requests
import json
from rest_framework import status
from rest_framework.authentication import SessionAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from seahub.api2.authentication import TokenAuthentication
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.utils import api_error
from seahub.ocm.models import OCMShareReceived
from seahub.ocm.settings import VIA_REPO_TOKEN_URL
from seahub.constants import PERMISSION_READ_WRITE
logger = logging.getLogger(__name__)
def send_get_request(url, params=None, headers=None):
response = requests.get(url, params=params, headers=headers)
return json.loads(response.text)
class OCMReposDirView(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated,)
throttle_classes = (UserRateThrottle,)
def get(self, request, provider_id, repo_id):
"""
Send request to Provider to get repo item list
"""
path = request.GET.get('path', '/')
with_thumbnail = request.GET.get('with_thumbnail', 'false')
if with_thumbnail not in ('true', 'false'):
error_msg = 'with_thumbnail invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
ocm_share_received = OCMShareReceived.objects.filter(provider_id=provider_id, repo_id=repo_id).first()
if not ocm_share_received:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
if ocm_share_received.to_user != request.user.username:
error_msg = 'permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
url = ocm_share_received.from_server_url + VIA_REPO_TOKEN_URL['DIR']
params = {
'path': path,
'with_thumbnail': with_thumbnail,
}
headers = {'Authorization': 'token ' + ocm_share_received.shared_secret}
try:
resp = send_get_request(url, params=params, headers=headers)
except Exception as e:
logging.error(e)
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')
return Response(resp)
class OCMReposDownloadLinkView(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated,)
throttle_classes = (UserRateThrottle,)
def get(self, request, provider_id, repo_id):
"""
Send request to Provider to get download link
"""
path = request.GET.get('path', '/')
ocm_share_received = OCMShareReceived.objects.filter(provider_id=provider_id, repo_id=repo_id).first()
if not ocm_share_received:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
if ocm_share_received.to_user != request.user.username:
error_msg = 'permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
url = ocm_share_received.from_server_url + VIA_REPO_TOKEN_URL['DOWNLOAD_LINK']
params = {
'path': path,
}
headers = {'Authorization': 'token ' + ocm_share_received.shared_secret}
try:
resp = send_get_request(url, params=params, headers=headers)
except Exception as e:
logging.error(e)
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')
return Response(resp)
class OCMReposUploadLinkView(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated,)
throttle_classes = (UserRateThrottle,)
def get(self, request, provider_id, repo_id):
"""
Send request to Provider to get upload link
"""
path = request.GET.get('path', '/')
ocm_share_received = OCMShareReceived.objects.filter(provider_id=provider_id, repo_id=repo_id).first()
if not ocm_share_received:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
if ocm_share_received.to_user != request.user.username:
error_msg = 'permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
if ocm_share_received.permission != PERMISSION_READ_WRITE:
error_msg = 'permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
url = ocm_share_received.from_server_url + VIA_REPO_TOKEN_URL['UPLOAD_LINK']
params = {
'path': path,
'from': 'web',
}
headers = {'Authorization': 'token ' + ocm_share_received.shared_secret}
try:
resp = send_get_request(url, params=params, headers=headers)
except Exception as e:
logging.error(e)
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')
return Response(resp)
|
setup.py | ZeekoZhu/viscm | 122 | 12705478 | <reponame>ZeekoZhu/viscm<filename>setup.py
from setuptools import setup, find_packages
import sys
import os.path
import numpy as np
# Must be one line or PyPI will cut it off
DESC = ("A colormap tool")
LONG_DESC = open("README.rst").read()
setup(
name="viscm",
version="0.9",
description=DESC,
long_description=LONG_DESC,
author="<NAME>, <NAME>",
author_email="<EMAIL>, <EMAIL>",
url="https://github.com/bids/viscm",
license="MIT",
classifiers =
[ "Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
packages=find_packages(),
install_requires=["numpy", "matplotlib", "colorspacious"],
package_data={'viscm': ['examples/*']},
)
|
pixiedust/__init__.py | elgalu/pixiedust | 598 | 12705495 | <reponame>elgalu/pixiedust<gh_stars>100-1000
# -------------------------------------------------------------------------------
# Copyright IBM Corp. 2017
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------------
__all__=['packageManager','display','services','utils']
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
#Check if we have an python shell available, if not, use our ProxyShell
get_ipython()
except NameError:
from .proxyShell import ProxyInteractiveShell
ProxyInteractiveShell.instance()
#shortcut to logging
import pixiedust.utils.pdLogging as pdLogging
logger = pdLogging.getPixiedustLogger()
getLogger = pdLogging.getLogger
from pixiedust.utils.environment import Environment
if Environment.hasSpark:
#shortcut to packageManager
import pixiedust.packageManager as packageManager
printAllPackages=packageManager.printAllPackages
installPackage=packageManager.installPackage
uninstallPackage=packageManager.uninstallPackage
try:
from py4j.protocol import Py4JJavaError
#javaBridge and scalaBridge only work in the driver, not an executor
from pixiedust.utils.javaBridge import *
from pixiedust.utils.scalaBridge import *
#shortcut to Spark job monitoring
from pixiedust.utils.sparkJobProgressMonitor import enableSparkJobProgressMonitor
enableJobMonitor = enableSparkJobProgressMonitor
except (NameError, Py4JJavaError):
#IPython not available we must be in a spark executor
pass
#automated import into the user namespace
try:
from IPython.core.getipython import get_ipython
from pixiedust.display import display
import pixiedust.services
if "display" not in get_ipython().user_ns:
#be nice, only set the display variable on the user namespace if it's not already taken
get_ipython().user_ns["display"]=display
from pixiedust.utils.sampleData import sampleData
import pixiedust.apps.debugger
from pixiedust.utils import checkVersion
from pixiedust.utils.storage import optOut, optIn
checkVersion()
except (NameError):
#IPython not available we must be in a spark executor
pass
|
sealion/neural_networks/loss.py | anish-lakkapragada/sealion | 341 | 12705522 | """
@author : <NAME>
@date : 1 - 23 - 2021
The loss functions are really simple. You just need to understand whether it is a classification or regression task.
All losses will be set in the model.finalize() model.
"""
import numpy as np
import warnings
from scipy.special import softmax as sfmx_indiv
warnings.filterwarnings("ignore", category=RuntimeWarning)
class Loss:
"""Base loss class."""
def __init__(self):
self.SGD = False
def loss(self, y, y_pred):
pass
def grad(self, y, y_pred):
pass
class MSE(Loss):
"""
MSE stands for mean-squared error, and its the loss you'll want to use for regression. To set it in the model.finalize()
method just do:
>>> from sealion import neural_networks as nn
>>> model = nn.models.NeuralNetwork(layers_list)
>>> model.finalize(loss=nn.loss.MSE(), optimizer=...)
and you're all set!
"""
def __init__(self):
super().__init__()
self.type_regression = True
def loss(self, y, y_pred):
error = np.sum(np.power(y_pred - y, 2)) / (2 * len(y))
return error
def grad(self, y, y_pred):
return (y_pred - y) / len(y)
def softmax(x):
softmax_output = np.apply_along_axis(sfmx_indiv, 1, x)
return softmax_output
class CrossEntropy(Loss):
"""
This loss function is for classification problems. I know there's a binary log loss and then a multi-category cross entropy
loss function for classification, but they're essentially the same thing so I thought using one class would make it easier.
Remember to use one-hot encoded data for this to work (check out utils).
If you are using this loss function, make sure your last layer is Softmax and vice versa. Otherwise, annoying error
messages will occur.
To set this in the ``model.finalize()`` method do:
>>> from sealion import neural_networks as nn
>>> model = nn.models.NeuralNetwork()
>>> # ... add the layers ...
>>> model.add(nn.layers.Softmax()) # last layer has to be softmax
>>> model.finalize(loss=nn.loss.CrossEntropy(), optimizer=...)
and that's all there is to it.
"""
def __init__(self):
super().__init__()
self.type_regression = False
def loss(self, y, y_pred):
return np.sum(y * np.log(y_pred + 1e-20)) / len(
y
) # now give the crossentropy loss
def grad(self, y, y_pred):
y_pred = softmax(y_pred)
return (y_pred - y) / len(y) # give the sexy partial derivative
|
python/part2/initialize.py | enjalot/adventures_in_opencl | 152 | 12705539 | from OpenGL.GL import *
import timing
timings = timing.Timing()
@timings
def fountain_np(num):
"""numpy way of initializing data using ufuncs instead of loops"""
import numpy
pos = numpy.ndarray((num, 4), dtype=numpy.float32)
col = numpy.ndarray((num, 4), dtype=numpy.float32)
vel = numpy.ndarray((num, 4), dtype=numpy.float32)
pos[:,0] = numpy.sin(numpy.arange(0., num) * 2.001 * numpy.pi / num)
pos[:,0] *= numpy.random.random_sample((num,)) / 3. + .2
pos[:,1] = numpy.cos(numpy.arange(0., num) * 2.001 * numpy.pi / num)
pos[:,1] *= numpy.random.random_sample((num,)) / 3. + .2
pos[:,2] = 0.
pos[:,3] = 1.
col[:,0] = 0.
col[:,1] = 1.
col[:,2] = 0.
col[:,3] = 1.
vel[:,0] = pos[:,0] * 2.
vel[:,1] = pos[:,1] * 2.
vel[:,2] = 3.
vel[:,3] = numpy.random.random_sample((num, ))
return pos, col, vel
@timings
def fountain_loopy(num):
"""This is a slower way of initializing the points (by 10x for large num)
but more illustrative of whats going on"""
from math import sqrt, sin, cos
import numpy
pos = numpy.ndarray((num, 4), dtype=numpy.float32)
col = numpy.ndarray((num, 4), dtype=numpy.float32)
vel = numpy.ndarray((num, 4), dtype=numpy.float32)
import random
random.seed()
for i in xrange(0, num):
rad = random.uniform(.2, .5);
x = sin(2*3.14 * i/num)*rad
z = 0.
y = cos(2*3.14 * i/num)*rad
pos[i,0] = x
pos[i,1] = y
pos[i,2] = z
pos[i,3] = 1.
col[i,0] = 0.
col[i,1] = 1.
col[i,2] = 0.
col[i,3] = 1.
life = random.random()
vel[i,0] = x*2.
vel[i,1] = y*2.
vel[i,2] = 3.
vel[i,3] = life
return pos, col, vel
def fountain(num):
"""Initialize position, color and velocity arrays we also make Vertex
Buffer Objects for the position and color arrays"""
#pos, col, vel = fountain_loopy(num)
pos, col, vel = fountain_np(num)
print timings
#create the Vertex Buffer Objects
from OpenGL.arrays import vbo
pos_vbo = vbo.VBO(data=pos, usage=GL_DYNAMIC_DRAW, target=GL_ARRAY_BUFFER)
pos_vbo.bind()
col_vbo = vbo.VBO(data=col, usage=GL_DYNAMIC_DRAW, target=GL_ARRAY_BUFFER)
col_vbo.bind()
return (pos_vbo, col_vbo, vel)
|
maro/cli/grass/executors/grass_azure_executor.py | yangboz/maro | 598 | 12705566 | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import collections
import json
import os
import secrets
import shutil
import string
import threading
import time
from multiprocessing.pool import ThreadPool
import yaml
from maro.cli.grass.executors.grass_executor import GrassExecutor
from maro.cli.grass.utils.file_synchronizer import FileSynchronizer
from maro.cli.grass.utils.master_api_client import MasterApiClientV1
from maro.cli.grass.utils.params import ContainerStatus, GrassParams, GrassPaths, NodeStatus
from maro.cli.utils.azure_controller import AzureController
from maro.cli.utils.deployment_validator import DeploymentValidator
from maro.cli.utils.details_reader import DetailsReader
from maro.cli.utils.details_writer import DetailsWriter
from maro.cli.utils.name_creator import NameCreator
from maro.cli.utils.params import GlobalParams, GlobalPaths
from maro.utils.exception.cli_exception import BadRequestError
from maro.utils.logger import CliLogger
logger = CliLogger(name=__name__)
class GrassAzureExecutor(GrassExecutor):
"""Executor for grass/azure mode.
See https://maro.readthedocs.io/en/latest/key_components/orchestration.html for reference.
"""
def __init__(self, cluster_name: str):
super().__init__(cluster_details=DetailsReader.load_cluster_details(cluster_name=cluster_name))
# Cloud configs
self.subscription = self.cluster_details["cloud"]["subscription"]
self.resource_group = self.cluster_details["cloud"]["resource_group"]
self.location = self.cluster_details["cloud"]["location"]
self.default_username = self.cluster_details["cloud"]["default_username"]
# Connection configs
self.ssh_port = self.cluster_details["connection"]["ssh"]["port"]
self.api_server_port = self.cluster_details["connection"]["api_server"]["port"]
# maro grass create
@staticmethod
def create(create_deployment: dict) -> None:
"""Create MARO Cluster with create_deployment.
Args:
create_deployment (dict): create_deployment of grass/azure. See lib/deployments/internal for reference.
Returns:
None.
"""
logger.info("Creating cluster")
# Get standardized cluster_details
cluster_details = GrassAzureExecutor._standardize_cluster_details(create_deployment=create_deployment)
cluster_name = cluster_details["name"]
if os.path.isdir(f"{GlobalPaths.ABS_MARO_CLUSTERS}/{cluster_name}"):
raise BadRequestError(f"Cluster '{cluster_name}' is exist")
# Start creating
try:
GrassAzureExecutor._create_resource_group(cluster_details=cluster_details)
GrassAzureExecutor._create_vnet(cluster_details=cluster_details)
# Simultaneously capture image and init master
build_node_image_thread = threading.Thread(
target=GrassAzureExecutor._build_node_image,
args=(cluster_details,)
)
build_node_image_thread.start()
create_and_init_master_thread = threading.Thread(
target=GrassAzureExecutor._create_and_init_master,
args=(cluster_details,)
)
create_and_init_master_thread.start()
build_node_image_thread.join()
create_and_init_master_thread.join()
# local save cluster after initialization
DetailsWriter.save_cluster_details(cluster_name=cluster_name, cluster_details=cluster_details)
except Exception as e:
# If failed, remove details folder, then raise
shutil.rmtree(path=f"{GlobalPaths.ABS_MARO_CLUSTERS}/{cluster_name}")
logger.error_red(f"Failed to create cluster '{cluster_name}'")
raise e
logger.info_green(f"Cluster '{cluster_name}' is created")
@staticmethod
def _standardize_cluster_details(create_deployment: dict) -> dict:
"""Standardize cluster_details from create_deployment.
We use create_deployment to build cluster_details (they share the same keys structure).
Args:
create_deployment (dict): create_deployment of grass/azure. See lib/deployments/internal for reference.
Returns:
dict: standardized cluster_details.
"""
samba_password = "".join(secrets.choice(string.ascii_letters + string.digits) for _ in range(20))
optional_key_to_value = {
"root['master']['redis']": {"port": GlobalParams.DEFAULT_REDIS_PORT},
"root['master']['redis']['port']": GlobalParams.DEFAULT_REDIS_PORT,
"root['master']['fluentd']": {"port": GlobalParams.DEFAULT_FLUENTD_PORT},
"root['master']['fluentd']['port']": GlobalParams.DEFAULT_FLUENTD_PORT,
"root['master']['samba']": {"password": <PASSWORD>},
"root['master']['samba']['password']": <PASSWORD>,
"root['connection']": {
"ssh": {"port": GlobalParams.DEFAULT_SSH_PORT},
"api_server": {"port": GrassParams.DEFAULT_API_SERVER_PORT},
},
"root['connection']['ssh']": {"port": GlobalParams.DEFAULT_SSH_PORT},
"root['connection']['ssh']['port']": GlobalParams.DEFAULT_SSH_PORT,
"root['connection']['api_server']": {"port": GrassParams.DEFAULT_API_SERVER_PORT},
"root['connection']['api_server']['port']": GrassParams.DEFAULT_API_SERVER_PORT
}
with open(f"{GrassPaths.ABS_MARO_GRASS_LIB}/deployments/internal/grass_azure_create.yml") as fr:
create_deployment_template = yaml.safe_load(fr)
DeploymentValidator.validate_and_fill_dict(
template_dict=create_deployment_template,
actual_dict=create_deployment,
optional_key_to_value=optional_key_to_value
)
# Init runtime fields.
create_deployment["id"] = NameCreator.create_cluster_id()
return create_deployment
@staticmethod
def _create_resource_group(cluster_details: dict) -> None:
"""Create the resource group if it does not exist.
Args:
cluster_details (dict): details of the cluster.
Returns:
None.
"""
# Load params
subscription = cluster_details["cloud"]["subscription"]
resource_group = cluster_details["cloud"]["resource_group"]
# Check if Azure CLI is installed, and print version
version_details = AzureController.get_version()
logger.info_green(f"Your Azure CLI version: {version_details['azure-cli']}")
# Set subscription id
AzureController.set_subscription(subscription=subscription)
logger.info_green(f"Set subscription to '{subscription}'")
# Check and create resource group
resource_group_details = AzureController.get_resource_group(resource_group=resource_group)
if resource_group_details:
logger.warning_yellow(f"Azure resource group '{resource_group}' already exists")
else:
AzureController.create_resource_group(
resource_group=resource_group,
location=cluster_details["cloud"]["location"]
)
logger.info_green(f"Resource group '{resource_group}' is created")
@staticmethod
def _create_vnet(cluster_details: dict) -> None:
"""Create vnet for the MARO Cluster.
Args:
cluster_details (dict): details of the MARO Cluster.
Returns:
None.
"""
logger.info("Creating vnet")
# Create ARM parameters and start deployment
template_file_path = f"{GrassPaths.ABS_MARO_GRASS_LIB}/modes/azure/create_vnet/template.json"
parameters_file_path = (
f"{GlobalPaths.ABS_MARO_CLUSTERS}/{cluster_details['name']}/vnet/arm_create_vnet_parameters.json"
)
ArmTemplateParameterBuilder.create_vnet(
cluster_details=cluster_details,
export_path=parameters_file_path
)
AzureController.start_deployment(
resource_group=cluster_details["cloud"]["resource_group"],
deployment_name="vnet",
template_file_path=template_file_path,
parameters_file_path=parameters_file_path
)
logger.info_green("Vnet is created")
@staticmethod
def _build_node_image(cluster_details: dict) -> None:
"""Build Azure Image for MARO Node.
The built image will contain required Node runtime environment including GPU support.
See https://docs.microsoft.com/en-us/azure/virtual-machines/linux/capture-image for reference.
Args:
cluster_details (dict): details of the MARO Cluster.
Returns:
None.
"""
logger.info("Building MARO Node image")
# Build params
resource_name = "build-node-image"
image_name = f"{cluster_details['id']}-node-image"
vm_name = f"{cluster_details['id']}-{resource_name}-vm"
# Create ARM parameters and start deployment.
# For simplicity, we use master_node_size as the size of build_node_image_vm here
template_file_path = f"{GrassPaths.ABS_MARO_GRASS_LIB}/modes/azure/create_build_node_image_vm/template.json"
parameters_file_path = (
f"{GlobalPaths.ABS_MARO_CLUSTERS}/{cluster_details['name']}"
f"/build_node_image_vm/arm_create_build_node_image_vm_parameters.json"
)
ArmTemplateParameterBuilder.create_build_node_image_vm(
cluster_details=cluster_details,
node_size=cluster_details["master"]["node_size"],
export_path=parameters_file_path
)
AzureController.start_deployment(
resource_group=cluster_details["cloud"]["resource_group"],
deployment_name=resource_name,
template_file_path=template_file_path,
parameters_file_path=parameters_file_path
)
# Gracefully wait
time.sleep(10)
# Get public ip address
ip_addresses = AzureController.list_ip_addresses(
resource_group=cluster_details["cloud"]["resource_group"],
vm_name=vm_name
)
public_ip_address = ip_addresses[0]["virtualMachine"]["network"]["publicIpAddresses"][0]["ipAddress"]
# Make sure build_node_image_vm is able to connect
GrassAzureExecutor.retry_connection(
node_username=cluster_details["cloud"]["default_username"],
node_hostname=public_ip_address,
node_ssh_port=cluster_details["connection"]["ssh"]["port"]
)
# Run init image script
FileSynchronizer.copy_files_to_node(
local_path=f"{GrassPaths.MARO_GRASS_LIB}/scripts/build_node_image_vm/init_build_node_image_vm.py",
remote_dir="~/",
node_username=cluster_details["cloud"]["default_username"],
node_hostname=public_ip_address,
node_ssh_port=cluster_details["connection"]["ssh"]["port"]
)
GrassAzureExecutor.remote_init_build_node_image_vm(
node_username=cluster_details["cloud"]["default_username"],
node_hostname=public_ip_address,
node_ssh_port=cluster_details["connection"]["ssh"]["port"]
)
# Extract image
AzureController.deallocate_vm(resource_group=cluster_details["cloud"]["resource_group"], vm_name=vm_name)
AzureController.generalize_vm(resource_group=cluster_details["cloud"]["resource_group"], vm_name=vm_name)
AzureController.create_image_from_vm(
resource_group=cluster_details["cloud"]["resource_group"],
image_name=image_name,
vm_name=vm_name
)
# Delete resources
GrassAzureExecutor._delete_resources(
resource_group=cluster_details["cloud"]["resource_group"],
resource_name=resource_name,
cluster_id=cluster_details["id"]
)
logger.info_green("MARO Node Image is built")
@staticmethod
def _create_and_init_master(cluster_details: dict) -> None:
"""Create and init MARO Master.
Args:
cluster_details (dict): details of the MARO Cluster.
Returns:
None.
"""
logger.info("Creating MARO Master")
GrassAzureExecutor._create_master_vm(cluster_details=cluster_details)
GrassAzureExecutor._init_master(cluster_details=cluster_details)
GrassAzureExecutor._create_user(cluster_details=cluster_details)
# Remote create master, cluster after initialization
master_api_client = MasterApiClientV1(
master_hostname=cluster_details["master"]["public_ip_address"],
master_api_server_port=cluster_details["master"]["api_server"]["port"],
user_id=cluster_details["user"]["id"],
master_to_dev_encryption_private_key=cluster_details["user"]["master_to_dev_encryption_private_key"],
dev_to_master_encryption_public_key=cluster_details["user"]["dev_to_master_encryption_public_key"],
dev_to_master_signing_private_key=cluster_details["user"]["dev_to_master_signing_private_key"]
)
master_api_client.create_master(master_details=cluster_details["master"])
master_api_client.create_cluster(cluster_details=cluster_details)
logger.info_green("MARO Master is created")
@staticmethod
def _create_master_vm(cluster_details: dict) -> None:
"""Create MARO Master VM.
Args:
cluster_details (dict): details of the MARO Cluster.
Returns:
None.
"""
logger.info("Creating Master VM")
# Build params
vm_name = f"{cluster_details['id']}-master-vm"
# Create ARM parameters and start deployment
template_file_path = f"{GrassPaths.ABS_MARO_GRASS_LIB}/modes/azure/create_master/template.json"
parameters_file_path = (
f"{GlobalPaths.ABS_MARO_CLUSTERS}/{cluster_details['name']}"
f"/master/arm_create_master_parameters.json"
)
ArmTemplateParameterBuilder.create_master(
cluster_details=cluster_details,
node_size=cluster_details["master"]["node_size"],
export_path=parameters_file_path
)
AzureController.start_deployment(
resource_group=cluster_details["cloud"]["resource_group"],
deployment_name="master",
template_file_path=template_file_path,
parameters_file_path=parameters_file_path
)
# Get master IP addresses
ip_addresses = AzureController.list_ip_addresses(
resource_group=cluster_details["cloud"]["resource_group"],
vm_name=vm_name
)
public_ip_address = ip_addresses[0]["virtualMachine"]["network"]["publicIpAddresses"][0]["ipAddress"]
private_ip_address = ip_addresses[0]["virtualMachine"]["network"]["privateIpAddresses"][0]
# Get other params and fill them to master_details
hostname = vm_name
username = cluster_details["cloud"]["default_username"]
cluster_details["master"]["hostname"] = hostname
cluster_details["master"]["username"] = username
cluster_details["master"]["public_ip_address"] = public_ip_address
cluster_details["master"]["private_ip_address"] = private_ip_address
cluster_details["master"]["resource_name"] = vm_name
cluster_details["master"]["ssh"] = {"port": cluster_details["connection"]["ssh"]["port"]}
cluster_details["master"]["api_server"] = {"port": cluster_details["connection"]["api_server"]["port"]}
logger.info_green(f"You can login to your master node with: {username}@{public_ip_address}")
logger.info_green("Master VM is created")
# maro grass delete
def delete(self) -> None:
"""Delete the MARO Cluster.
Returns:
None.
"""
logger.info(f"Deleting cluster '{self.cluster_name}'")
# Get resource list
resource_list = AzureController.list_resources(resource_group=self.resource_group)
# Filter resources
deletable_ids = []
for resource_info in resource_list:
if resource_info["name"].startswith(self.cluster_id):
deletable_ids.append(resource_info["id"])
# Delete resources
if len(deletable_ids) > 0:
AzureController.delete_resources(resource_ids=deletable_ids)
# Delete cluster folder
shutil.rmtree(f"{GlobalPaths.ABS_MARO_CLUSTERS}/{self.cluster_name}")
logger.info_green(f"Cluster '{self.cluster_name}' is deleted")
# maro grass node
def scale_node(self, replicas: int, node_size: str):
"""Scale up/down MARO Node using predefined Node Image.
Args:
replicas (int): desired number of MARO Node in specific node_size.
node_size (str): size of the MARO Node VM, see https://docs.microsoft.com/en-us/azure/virtual-machines/sizes
for reference.
Returns:
None.
"""
# Load details
nodes_details = self.master_api_client.list_nodes()
# Init node_size_to_count
node_size_to_count = collections.defaultdict(lambda: 0)
for node_details in nodes_details:
node_size_to_count[node_details["node_size"]] += 1
# Get node_size_to_spec
node_size_to_spec = self._get_node_size_to_spec()
if node_size not in node_size_to_spec:
raise BadRequestError(f"Invalid node_size '{node_size}'")
# Scale nodes
if node_size_to_count[node_size] > replicas:
self._delete_nodes(
num=node_size_to_count[node_size] - replicas,
node_size=node_size
)
elif node_size_to_count[node_size] < replicas:
self._create_nodes(
num=replicas - node_size_to_count[node_size],
node_size=node_size
)
else:
logger.warning_yellow("Replica is match, no create or delete")
def _create_nodes(self, num: int, node_size: str) -> None:
"""Create MARO Nodes in parallel.
Args:
num (int): number of MARO Nodes (with specific node_size) to create.
node_size (str): size of the MARO Node VM.
Returns:
None.
"""
logger.info(f"Scaling up {num}")
# Parallel create
with ThreadPool(GlobalParams.PARALLELS) as pool:
pool.starmap(
self._create_node,
[[node_size]] * num
)
def _create_node(self, node_size: str) -> None:
"""Create a MARO Node.
Args:
node_size (str): size of the MARO Node VM.
Returns:
None.
"""
# Generate node name
node_name = NameCreator.create_node_name()
logger.info(message=f"Creating node '{node_name}'")
# Create node
join_cluster_deployment = self._create_vm(
node_name=node_name,
node_size=node_size
)
# Start joining cluster
self._join_cluster(node_details=join_cluster_deployment["node"])
logger.info_green(message=f"Node '{node_name}' is created")
def _delete_nodes(self, num: int, node_size: str) -> None:
"""Delete MARO Nodes in parallel.
Args:
num (int): number of MARO Nodes (with specific node_size) to delete.
node_size (str): size of the MARO Node VM.
Returns:
None.
"""
# Load details
nodes_details = self.master_api_client.list_nodes()
# Get deletable_nodes and check, TODO: consider to add -f
deletable_nodes = []
for node_details in nodes_details:
if node_details["node_size"] == node_size and len(node_details["containers"]) == 0:
deletable_nodes.append(node_details["name"])
if len(deletable_nodes) >= num:
logger.info(f"Scaling down {num}")
# Parallel delete
params = [[deletable_node] for deletable_node in deletable_nodes[:num]]
with ThreadPool(GlobalParams.PARALLELS) as pool:
pool.starmap(
self._delete_node,
params
)
else:
logger.warning_yellow(
"Unable to scale down.\n"
f"Only {len(deletable_nodes)} nodes are deletable, but need to delete {num} to meet the replica"
)
def _create_vm(self, node_name: str, node_size: str) -> dict:
"""Create MARO Node VM.
Args:
node_name (str): name of the MARO Node. Also the id of the MARO Node.
node_size (str): size of the MARO Node VM.
Returns:
dict: join_cluster_deployment that needed in "join cluster" operation.
See /lib/scripts/join_cluster.py for reference.
"""
logger.info(message=f"Creating VM '{node_name}'")
# Create ARM parameters and start deployment
os.makedirs(name=f"{GlobalPaths.ABS_MARO_CLUSTERS}/{self.cluster_name}/nodes/{node_name}", exist_ok=True)
template_file_path = f"{GrassPaths.ABS_MARO_GRASS_LIB}/modes/azure/create_node/template.json"
parameters_file_path = (
f"{GlobalPaths.ABS_MARO_CLUSTERS}/{self.cluster_name}/nodes/{node_name}/arm_create_node_parameters.json"
)
ArmTemplateParameterBuilder.create_node(
node_name=node_name,
cluster_details=self.cluster_details,
node_size=node_size,
export_path=parameters_file_path
)
AzureController.start_deployment(
resource_group=self.resource_group,
deployment_name=node_name,
template_file_path=template_file_path,
parameters_file_path=parameters_file_path
)
# Get node IP addresses
ip_addresses = AzureController.list_ip_addresses(
resource_group=self.resource_group,
vm_name=f"{self.cluster_id}-{node_name}-vm"
)
logger.info_green(f"VM '{node_name}' is created")
# Build join_cluster_deployment.
join_cluster_deployment = {
"mode": "grass/azure",
"master": {
"private_ip_address": self.master_private_ip_address,
"api_server": {
"port": self.master_api_server_port
},
"redis": {
"port": self.master_redis_port
}
},
"node": {
"name": node_name,
"id": node_name,
"username": self.default_username,
"public_ip_address": ip_addresses[0]["virtualMachine"]["network"]["publicIpAddresses"][0]["ipAddress"],
"private_ip_address": ip_addresses[0]["virtualMachine"]["network"]["privateIpAddresses"][0],
"node_size": node_size,
"resource_name": f"{self.cluster_id}-{node_name}-vm",
"hostname": f"{self.cluster_id}-{node_name}-vm",
"resources": {
"cpu": "all",
"memory": "all",
"gpu": "all"
},
"api_server": {
"port": self.api_server_port
},
"ssh": {
"port": self.ssh_port
}
},
"configs": {
"install_node_runtime": False,
"install_node_gpu_support": False
}
}
with open(
file=f"{GlobalPaths.ABS_MARO_CLUSTERS}/{self.cluster_name}/nodes/{node_name}/join_cluster_deployment.yml",
mode="w"
) as fw:
yaml.safe_dump(data=join_cluster_deployment, stream=fw)
return join_cluster_deployment
def _delete_node(self, node_name: str) -> None:
"""Delete the MARO Node.
Args:
node_name (str): name of the MARO Node.
Returns:
None.
"""
logger.info(f"Deleting node '{node_name}'")
# Delete node
self.master_api_client.delete_node(node_name=node_name)
# Delete resources
self._delete_resources(
resource_group=self.resource_group,
cluster_id=self.cluster_id,
resource_name=node_name
)
# Delete azure deployment
AzureController.delete_deployment(
resource_group=self.resource_group,
deployment_name=node_name
)
# Delete node related files
shutil.rmtree(f"{GlobalPaths.ABS_MARO_CLUSTERS}/{self.cluster_name}/nodes/{node_name}")
logger.info_green(f"Node '{node_name}' is deleted")
def _join_cluster(self, node_details: dict) -> None:
"""Join the cluster using node_details.
Args:
node_details (str): details of the MARO Node.
Returns:
None.
"""
node_name = node_details["name"]
logger.info(f"Node '{node_name}' is joining the cluster '{self.cluster_name}'")
# Make sure the node is able to connect
self.retry_connection(
node_username=node_details["username"],
node_hostname=node_details["public_ip_address"],
node_ssh_port=node_details["ssh"]["port"]
)
# Copy required files
local_path_to_remote_dir = {
f"{GlobalPaths.ABS_MARO_CLUSTERS}/{self.cluster_name}/nodes/{node_name}/join_cluster_deployment.yml":
f"{GlobalPaths.MARO_LOCAL}/clusters/{self.cluster_name}/nodes/{node_name}"
}
for local_path, remote_dir in local_path_to_remote_dir.items():
FileSynchronizer.copy_files_to_node(
local_path=local_path,
remote_dir=remote_dir,
node_username=node_details["username"],
node_hostname=node_details["public_ip_address"],
node_ssh_port=node_details["ssh"]["port"]
)
# Remote join cluster
self.remote_join_cluster(
node_username=node_details["username"],
node_hostname=node_details["public_ip_address"],
node_ssh_port=node_details["ssh"]["port"],
master_private_ip_address=self.master_private_ip_address,
master_api_server_port=self.master_api_server_port,
deployment_path=(
f"{GlobalPaths.MARO_LOCAL}/clusters/{self.cluster_name}/nodes/{node_name}"
f"/join_cluster_deployment.yml"
)
)
logger.info_green(f"Node '{node_name}' is joined")
def start_node(self, replicas: int, node_size: str):
"""Start MARO Node VMs in parallel.
Args:
replicas (int): number of MARO Node in specific node_size to start.
node_size (str): size of the MARO Node VM, see https://docs.microsoft.com/en-us/azure/virtual-machines/sizes
for reference.
Returns:
None.
"""
# Get nodes details
nodes_details = self.master_api_client.list_nodes()
# Get startable nodes
startable_nodes = []
for node_details in nodes_details:
if node_details["node_size"] == node_size and node_details["state"]["status"] == NodeStatus.STOPPED:
startable_nodes.append(node_details["name"])
# Check replicas
if len(startable_nodes) < replicas:
raise BadRequestError(
f"No enough '{node_size}' nodes can be started, only {len(startable_nodes)} is able to start"
)
# Parallel start
params = [[startable_node] for startable_node in startable_nodes[:replicas]]
with ThreadPool(GlobalParams.PARALLELS) as pool:
pool.starmap(
self._start_node,
params
)
def _start_node(self, node_name: str):
"""Start the MARO Node VM.
Args:
node_name (str): name of the MARO Node.
Returns:
None.
"""
logger.info(f"Starting node '{node_name}'")
# Start node vm
AzureController.start_vm(
resource_group=self.resource_group,
vm_name=f"{self.cluster_id}-{node_name}-vm"
)
# Start node
self.master_api_client.start_node(node_name=node_name)
logger.info_green(f"Node '{node_name}' is started")
def stop_node(self, replicas: int, node_size: str):
"""Stop MARO Node VMs in parallel.
Args:
replicas (int): number of MARO Node in specific node_size to stop.
node_size (str): size of the MARO Node VM,
see https://docs.microsoft.com/en-us/azure/virtual-machines/sizes for reference.
Returns:
None.
"""
# Get nodes details
nodes_details = self.master_api_client.list_nodes()
# Get stoppable nodes
stoppable_nodes_details = []
for node_details in nodes_details:
if (
node_details["node_size"] == node_size and
node_details["state"]["status"] == NodeStatus.RUNNING and
self._count_running_containers(node_details) == 0
):
stoppable_nodes_details.append(node_details)
# Check replicas
if len(stoppable_nodes_details) < replicas:
raise BadRequestError(
f"No more '{node_size}' nodes can be stopped, only {len(stoppable_nodes_details)} are stoppable"
)
# Parallel stop
params = [[node_details] for node_details in stoppable_nodes_details[:replicas]]
with ThreadPool(GlobalParams.PARALLELS) as pool:
pool.starmap(
self._stop_node,
params
)
def _stop_node(self, node_details: dict):
"""Stop MARO Node VM.
Args:
node_details (dict): details of the MARO Node.
Returns:
None.
"""
node_name = node_details["name"]
logger.info(f"Stopping node '{node_name}'")
# Stop node
self.master_api_client.stop_node(node_name=node_name)
# Stop node vm
AzureController.stop_vm(
resource_group=self.resource_group,
vm_name=f"{self.cluster_id}-{node_name}-vm"
)
logger.info_green(f"Node '{node_name}' is stopped")
def _get_node_size_to_spec(self) -> dict:
"""Get node_size to spec mapping of Azure VM.
Returns:
dict: node_size to spec mapping.
"""
# List available sizes for VMs
specs = AzureController.list_vm_sizes(location=self.location)
# Get node_size_to_spec
node_size_to_spec = {}
for spec in specs:
node_size_to_spec[spec["name"]] = spec
return node_size_to_spec
@staticmethod
def _count_running_containers(node_details: dict) -> int:
"""Count running containers based on field "Status".
Args:
node_details (dict): details of the MARO Node.
Returns:
int: num of running containers.
"""
# Extract details
containers_details = node_details["containers"]
# Do counting
count = 0
for container_details in containers_details:
if container_details["Status"] == ContainerStatus.RUNNING:
count += 1
return count
# maro grass clean
def clean(self) -> None:
"""Delete running jobs, schedule and containers of the MARO Cluster.
Returns:
None.
"""
# Remote clean jobs
self.master_api_client.clean_jobs()
# Utils
@staticmethod
def _delete_resources(resource_group: str, cluster_id: int, resource_name: str) -> None:
"""Delete resources in the resource group.
Args:
resource_group (str): name of the resource group.
cluster_id (id): id of the MARO Cluster.
resource_name (str): name of the MARO Resource. e.g. node_name
Returns:
None.
"""
# Get resource list
resource_list = AzureController.list_resources(resource_group=resource_group)
# Filter resources
deletable_ids = []
for resource_info in resource_list:
if resource_info["name"].startswith(f"{cluster_id}-{resource_name}"):
deletable_ids.append(resource_info["id"])
# Delete resources
if len(deletable_ids) > 0:
AzureController.delete_resources(resource_ids=deletable_ids)
class ArmTemplateParameterBuilder:
"""Builder for ARM Template Parameters.
See https://docs.microsoft.com/en-us/azure/azure-resource-manager/templates/ for reference.
"""
@staticmethod
def create_vnet(cluster_details: dict, export_path: str) -> dict:
"""Create parameters file for vnet.
Args:
cluster_details (dict): details of the MARO Cluster.
export_path (str): location to export the parameter file.
Returns:
dict: parameter dict, should be exported to json.
"""
# Load and update parameters
with open(file=f"{GrassPaths.ABS_MARO_GRASS_LIB}/modes/azure/create_vnet/parameters.json", mode="r") as fr:
base_parameters = json.load(fr)
parameters = base_parameters["parameters"]
parameters["location"]["value"] = cluster_details["cloud"]["location"]
parameters["virtualNetworkName"]["value"] = f"{cluster_details['id']}-vnet"
# Export parameters if the path is set
if export_path:
os.makedirs(os.path.dirname(export_path), exist_ok=True)
with open(export_path, "w") as fw:
json.dump(base_parameters, fw, indent=4)
return base_parameters
@staticmethod
def create_master(cluster_details: dict, node_size: str, export_path: str) -> dict:
"""Create parameters file for MARO Master VM.
Args:
cluster_details (dict): details of the MARO Cluster.
node_size (str): node_size of the MARO Master VM.
export_path (str): path to export the parameter file.
Returns:
dict: parameter dict, should be exported to json.
"""
# Load and update parameters
with open(file=f"{GrassPaths.ABS_MARO_GRASS_LIB}/modes/azure/create_master/parameters.json", mode="r") as fr:
base_parameters = json.load(fr)
parameters = base_parameters["parameters"]
parameters["adminPublicKey"]["value"] = cluster_details["cloud"]["default_public_key"]
parameters["adminUsername"]["value"] = cluster_details["cloud"]["default_username"]
parameters["apiServerDestinationPorts"]["value"] = [cluster_details["connection"]["api_server"]["port"]]
parameters["location"]["value"] = cluster_details["cloud"]["location"]
parameters["networkInterfaceName"]["value"] = f"{cluster_details['id']}-master-nic"
parameters["networkSecurityGroupName"]["value"] = f"{cluster_details['id']}-master-nsg"
parameters["publicIpAddressName"]["value"] = f"{cluster_details['id']}-master-pip"
parameters["sshDestinationPorts"]["value"] = [cluster_details["connection"]["ssh"]["port"]]
parameters["virtualMachineName"]["value"] = f"{cluster_details['id']}-master-vm"
parameters["virtualMachineSize"]["value"] = node_size
parameters["virtualNetworkName"]["value"] = f"{cluster_details['id']}-vnet"
# Export parameters if the path is set
if export_path:
os.makedirs(os.path.dirname(export_path), exist_ok=True)
with open(export_path, "w") as fw:
json.dump(base_parameters, fw, indent=4)
return base_parameters
@staticmethod
def create_build_node_image_vm(cluster_details: dict, node_size: str, export_path: str) -> dict:
"""Create parameters file for Build Image Node VM.
Args:
cluster_details (dict): details of the MARO Cluster.
node_size (str): node_size of the Build Image Node VM.
export_path (str): path to export the parameter file.
Returns:
dict: parameter dict, should be exported to json.
"""
# Load and update parameters
with open(
file=f"{GrassPaths.ABS_MARO_GRASS_LIB}/modes/azure/create_build_node_image_vm/parameters.json",
mode="r"
) as fr:
base_parameters = json.load(fr)
parameters = base_parameters["parameters"]
parameters["adminPublicKey"]["value"] = cluster_details["cloud"]["default_public_key"]
parameters["adminUsername"]["value"] = cluster_details["cloud"]["default_username"]
parameters["location"]["value"] = cluster_details["cloud"]["location"]
parameters["networkInterfaceName"]["value"] = f"{cluster_details['id']}-build-node-image-nic"
parameters["networkSecurityGroupName"]["value"] = f"{cluster_details['id']}-build-node-image-nsg"
parameters["publicIpAddressName"]["value"] = f"{cluster_details['id']}-build-node-image-pip"
parameters["sshDestinationPorts"]["value"] = [cluster_details["connection"]["ssh"]["port"]]
parameters["virtualMachineName"]["value"] = f"{cluster_details['id']}-build-node-image-vm"
parameters["virtualMachineSize"]["value"] = node_size
parameters["virtualNetworkName"]["value"] = f"{cluster_details['id']}-vnet"
# Export parameters if the path is set
if export_path:
os.makedirs(os.path.dirname(export_path), exist_ok=True)
with open(export_path, "w") as fw:
json.dump(base_parameters, fw, indent=4)
return base_parameters
@staticmethod
def create_node(node_name: str, cluster_details: dict, node_size: str, export_path: str) -> dict:
"""Create parameters file for MARO Node VM.
Args:
cluster_details (dict): details of the MARO Cluster.
node_name (str): name of the MARO Node.
node_size (str): node_size of the MARO Node VM.
export_path (str): path to export the parameter file.
Returns:
dict: parameter dict, should be exported to json.
"""
# Load and update parameters
with open(file=f"{GrassPaths.ABS_MARO_GRASS_LIB}/modes/azure/create_node/parameters.json", mode="r") as fr:
base_parameters = json.load(fr)
parameters = base_parameters["parameters"]
parameters["adminPublicKey"]["value"] = cluster_details["cloud"]["default_public_key"]
parameters["adminUsername"]["value"] = cluster_details["cloud"]["default_username"]
parameters["imageResourceId"]["value"] = AzureController.get_image_resource_id(
resource_group=cluster_details["cloud"]["resource_group"],
image_name=f"{cluster_details['id']}-node-image"
)
parameters["location"]["value"] = cluster_details["cloud"]["location"]
parameters["networkInterfaceName"]["value"] = f"{cluster_details['id']}-{node_name}-nic"
parameters["networkSecurityGroupName"]["value"] = f"{cluster_details['id']}-{node_name}-nsg"
parameters["publicIpAddressName"]["value"] = f"{cluster_details['id']}-{node_name}-pip"
parameters["sshDestinationPorts"]["value"] = [cluster_details["connection"]["ssh"]["port"]]
parameters["virtualMachineName"]["value"] = f"{cluster_details['id']}-{node_name}-vm"
parameters["virtualMachineSize"]["value"] = node_size
parameters["virtualNetworkName"]["value"] = f"{cluster_details['id']}-vnet"
# Export parameters if the path is set
if export_path:
os.makedirs(os.path.dirname(export_path), exist_ok=True)
with open(export_path, "w") as fw:
json.dump(base_parameters, fw, indent=4)
return base_parameters
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.