blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
14d7f7c19f940e56a97173a00a879ee87f1a14d5 | 555b9f764d9bca5232360979460bc35c2f5ad424 | /google/ads/google_ads/v2/proto/services/ad_parameter_service_pb2.py | b6d2e35e6d16290d0d6e5acefcaf8d6aa4771afc | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
]
| permissive | juanmacugat/google-ads-python | b50256163782bc0223bcd8b29f789d74f4cfad05 | 0fc8a7dbf31d9e8e2a4364df93bec5f6b7edd50a | refs/heads/master | 2021-02-18T17:00:22.067673 | 2020-03-05T16:13:57 | 2020-03-05T16:13:57 | 245,215,877 | 1 | 0 | Apache-2.0 | 2020-03-05T16:39:34 | 2020-03-05T16:39:33 | null | UTF-8 | Python | false | true | 19,008 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v2/proto/services/ad_parameter_service.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.ads.google_ads.v2.proto.resources import ad_parameter_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_resources_dot_ad__parameter__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2
from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
from google.api import client_pb2 as google_dot_api_dot_client__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v2/proto/services/ad_parameter_service.proto',
package='google.ads.googleads.v2.services',
syntax='proto3',
serialized_options=_b('\n$com.google.ads.googleads.v2.servicesB\027AdParameterServiceProtoP\001ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v2/services;services\242\002\003GAA\252\002 Google.Ads.GoogleAds.V2.Services\312\002 Google\\Ads\\GoogleAds\\V2\\Services\352\002$Google::Ads::GoogleAds::V2::Services'),
serialized_pb=_b('\nAgoogle/ads/googleads_v2/proto/services/ad_parameter_service.proto\x12 google.ads.googleads.v2.services\x1a:google/ads/googleads_v2/proto/resources/ad_parameter.proto\x1a\x1cgoogle/api/annotations.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/rpc/status.proto\x1a\x17google/api/client.proto\".\n\x15GetAdParameterRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t\"\xac\x01\n\x19MutateAdParametersRequest\x12\x13\n\x0b\x63ustomer_id\x18\x01 \x01(\t\x12J\n\noperations\x18\x02 \x03(\x0b\x32\x36.google.ads.googleads.v2.services.AdParameterOperation\x12\x17\n\x0fpartial_failure\x18\x03 \x01(\x08\x12\x15\n\rvalidate_only\x18\x04 \x01(\x08\"\xea\x01\n\x14\x41\x64ParameterOperation\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12@\n\x06\x63reate\x18\x01 \x01(\x0b\x32..google.ads.googleads.v2.resources.AdParameterH\x00\x12@\n\x06update\x18\x02 \x01(\x0b\x32..google.ads.googleads.v2.resources.AdParameterH\x00\x12\x10\n\x06remove\x18\x03 \x01(\tH\x00\x42\x0b\n\toperation\"\x9b\x01\n\x1aMutateAdParametersResponse\x12\x31\n\x15partial_failure_error\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12J\n\x07results\x18\x02 \x03(\x0b\x32\x39.google.ads.googleads.v2.services.MutateAdParameterResult\"0\n\x17MutateAdParameterResult\x12\x15\n\rresource_name\x18\x01 \x01(\t2\xb5\x03\n\x12\x41\x64ParameterService\x12\xb1\x01\n\x0eGetAdParameter\x12\x37.google.ads.googleads.v2.services.GetAdParameterRequest\x1a..google.ads.googleads.v2.resources.AdParameter\"6\x82\xd3\xe4\x93\x02\x30\x12./v2/{resource_name=customers/*/adParameters/*}\x12\xcd\x01\n\x12MutateAdParameters\x12;.google.ads.googleads.v2.services.MutateAdParametersRequest\x1a<.google.ads.googleads.v2.services.MutateAdParametersResponse\"<\x82\xd3\xe4\x93\x02\x36\"1/v2/customers/{customer_id=*}/adParameters:mutate:\x01*\x1a\x1b\xca\x41\x18googleads.googleapis.comB\xfe\x01\n$com.google.ads.googleads.v2.servicesB\x17\x41\x64ParameterServiceProtoP\x01ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v2/services;services\xa2\x02\x03GAA\xaa\x02 Google.Ads.GoogleAds.V2.Services\xca\x02 Google\\Ads\\GoogleAds\\V2\\Services\xea\x02$Google::Ads::GoogleAds::V2::Servicesb\x06proto3')
,
dependencies=[google_dot_ads_dot_googleads__v2_dot_proto_dot_resources_dot_ad__parameter__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,google_dot_api_dot_client__pb2.DESCRIPTOR,])
_GETADPARAMETERREQUEST = _descriptor.Descriptor(
name='GetAdParameterRequest',
full_name='google.ads.googleads.v2.services.GetAdParameterRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='resource_name', full_name='google.ads.googleads.v2.services.GetAdParameterRequest.resource_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=277,
serialized_end=323,
)
_MUTATEADPARAMETERSREQUEST = _descriptor.Descriptor(
name='MutateAdParametersRequest',
full_name='google.ads.googleads.v2.services.MutateAdParametersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='customer_id', full_name='google.ads.googleads.v2.services.MutateAdParametersRequest.customer_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='operations', full_name='google.ads.googleads.v2.services.MutateAdParametersRequest.operations', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='partial_failure', full_name='google.ads.googleads.v2.services.MutateAdParametersRequest.partial_failure', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='validate_only', full_name='google.ads.googleads.v2.services.MutateAdParametersRequest.validate_only', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=326,
serialized_end=498,
)
_ADPARAMETEROPERATION = _descriptor.Descriptor(
name='AdParameterOperation',
full_name='google.ads.googleads.v2.services.AdParameterOperation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='update_mask', full_name='google.ads.googleads.v2.services.AdParameterOperation.update_mask', index=0,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='create', full_name='google.ads.googleads.v2.services.AdParameterOperation.create', index=1,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='update', full_name='google.ads.googleads.v2.services.AdParameterOperation.update', index=2,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remove', full_name='google.ads.googleads.v2.services.AdParameterOperation.remove', index=3,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='operation', full_name='google.ads.googleads.v2.services.AdParameterOperation.operation',
index=0, containing_type=None, fields=[]),
],
serialized_start=501,
serialized_end=735,
)
_MUTATEADPARAMETERSRESPONSE = _descriptor.Descriptor(
name='MutateAdParametersResponse',
full_name='google.ads.googleads.v2.services.MutateAdParametersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='partial_failure_error', full_name='google.ads.googleads.v2.services.MutateAdParametersResponse.partial_failure_error', index=0,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='results', full_name='google.ads.googleads.v2.services.MutateAdParametersResponse.results', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=738,
serialized_end=893,
)
_MUTATEADPARAMETERRESULT = _descriptor.Descriptor(
name='MutateAdParameterResult',
full_name='google.ads.googleads.v2.services.MutateAdParameterResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='resource_name', full_name='google.ads.googleads.v2.services.MutateAdParameterResult.resource_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=895,
serialized_end=943,
)
_MUTATEADPARAMETERSREQUEST.fields_by_name['operations'].message_type = _ADPARAMETEROPERATION
_ADPARAMETEROPERATION.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK
_ADPARAMETEROPERATION.fields_by_name['create'].message_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_resources_dot_ad__parameter__pb2._ADPARAMETER
_ADPARAMETEROPERATION.fields_by_name['update'].message_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_resources_dot_ad__parameter__pb2._ADPARAMETER
_ADPARAMETEROPERATION.oneofs_by_name['operation'].fields.append(
_ADPARAMETEROPERATION.fields_by_name['create'])
_ADPARAMETEROPERATION.fields_by_name['create'].containing_oneof = _ADPARAMETEROPERATION.oneofs_by_name['operation']
_ADPARAMETEROPERATION.oneofs_by_name['operation'].fields.append(
_ADPARAMETEROPERATION.fields_by_name['update'])
_ADPARAMETEROPERATION.fields_by_name['update'].containing_oneof = _ADPARAMETEROPERATION.oneofs_by_name['operation']
_ADPARAMETEROPERATION.oneofs_by_name['operation'].fields.append(
_ADPARAMETEROPERATION.fields_by_name['remove'])
_ADPARAMETEROPERATION.fields_by_name['remove'].containing_oneof = _ADPARAMETEROPERATION.oneofs_by_name['operation']
_MUTATEADPARAMETERSRESPONSE.fields_by_name['partial_failure_error'].message_type = google_dot_rpc_dot_status__pb2._STATUS
_MUTATEADPARAMETERSRESPONSE.fields_by_name['results'].message_type = _MUTATEADPARAMETERRESULT
DESCRIPTOR.message_types_by_name['GetAdParameterRequest'] = _GETADPARAMETERREQUEST
DESCRIPTOR.message_types_by_name['MutateAdParametersRequest'] = _MUTATEADPARAMETERSREQUEST
DESCRIPTOR.message_types_by_name['AdParameterOperation'] = _ADPARAMETEROPERATION
DESCRIPTOR.message_types_by_name['MutateAdParametersResponse'] = _MUTATEADPARAMETERSRESPONSE
DESCRIPTOR.message_types_by_name['MutateAdParameterResult'] = _MUTATEADPARAMETERRESULT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GetAdParameterRequest = _reflection.GeneratedProtocolMessageType('GetAdParameterRequest', (_message.Message,), dict(
DESCRIPTOR = _GETADPARAMETERREQUEST,
__module__ = 'google.ads.googleads_v2.proto.services.ad_parameter_service_pb2'
,
__doc__ = """Request message for
[AdParameterService.GetAdParameter][google.ads.googleads.v2.services.AdParameterService.GetAdParameter]
Attributes:
resource_name:
The resource name of the ad parameter to fetch.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.services.GetAdParameterRequest)
))
_sym_db.RegisterMessage(GetAdParameterRequest)
MutateAdParametersRequest = _reflection.GeneratedProtocolMessageType('MutateAdParametersRequest', (_message.Message,), dict(
DESCRIPTOR = _MUTATEADPARAMETERSREQUEST,
__module__ = 'google.ads.googleads_v2.proto.services.ad_parameter_service_pb2'
,
__doc__ = """Request message for
[AdParameterService.MutateAdParameters][google.ads.googleads.v2.services.AdParameterService.MutateAdParameters]
Attributes:
customer_id:
The ID of the customer whose ad parameters are being modified.
operations:
The list of operations to perform on individual ad parameters.
partial_failure:
If true, successful operations will be carried out and invalid
operations will return errors. If false, all operations will
be carried out in one transaction if and only if they are all
valid. Default is false.
validate_only:
If true, the request is validated but not executed. Only
errors are returned, not results.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.services.MutateAdParametersRequest)
))
_sym_db.RegisterMessage(MutateAdParametersRequest)
AdParameterOperation = _reflection.GeneratedProtocolMessageType('AdParameterOperation', (_message.Message,), dict(
DESCRIPTOR = _ADPARAMETEROPERATION,
__module__ = 'google.ads.googleads_v2.proto.services.ad_parameter_service_pb2'
,
__doc__ = """A single operation (create, update, remove) on ad parameter.
Attributes:
update_mask:
FieldMask that determines which resource fields are modified
in an update.
operation:
The mutate operation.
create:
Create operation: No resource name is expected for the new ad
parameter.
update:
Update operation: The ad parameter is expected to have a valid
resource name.
remove:
Remove operation: A resource name for the ad parameter to
remove is expected in this format: ``customers/{customer_id}/
adParameters/{ad_group_id}~{criterion_id}~{parameter_index}``
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.services.AdParameterOperation)
))
_sym_db.RegisterMessage(AdParameterOperation)
MutateAdParametersResponse = _reflection.GeneratedProtocolMessageType('MutateAdParametersResponse', (_message.Message,), dict(
DESCRIPTOR = _MUTATEADPARAMETERSRESPONSE,
__module__ = 'google.ads.googleads_v2.proto.services.ad_parameter_service_pb2'
,
__doc__ = """Response message for an ad parameter mutate.
Attributes:
partial_failure_error:
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial\_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results:
All results for the mutate.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.services.MutateAdParametersResponse)
))
_sym_db.RegisterMessage(MutateAdParametersResponse)
MutateAdParameterResult = _reflection.GeneratedProtocolMessageType('MutateAdParameterResult', (_message.Message,), dict(
DESCRIPTOR = _MUTATEADPARAMETERRESULT,
__module__ = 'google.ads.googleads_v2.proto.services.ad_parameter_service_pb2'
,
__doc__ = """The result for the ad parameter mutate.
Attributes:
resource_name:
The resource name returned for successful operations.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.services.MutateAdParameterResult)
))
_sym_db.RegisterMessage(MutateAdParameterResult)
DESCRIPTOR._options = None
_ADPARAMETERSERVICE = _descriptor.ServiceDescriptor(
name='AdParameterService',
full_name='google.ads.googleads.v2.services.AdParameterService',
file=DESCRIPTOR,
index=0,
serialized_options=_b('\312A\030googleads.googleapis.com'),
serialized_start=946,
serialized_end=1383,
methods=[
_descriptor.MethodDescriptor(
name='GetAdParameter',
full_name='google.ads.googleads.v2.services.AdParameterService.GetAdParameter',
index=0,
containing_service=None,
input_type=_GETADPARAMETERREQUEST,
output_type=google_dot_ads_dot_googleads__v2_dot_proto_dot_resources_dot_ad__parameter__pb2._ADPARAMETER,
serialized_options=_b('\202\323\344\223\0020\022./v2/{resource_name=customers/*/adParameters/*}'),
),
_descriptor.MethodDescriptor(
name='MutateAdParameters',
full_name='google.ads.googleads.v2.services.AdParameterService.MutateAdParameters',
index=1,
containing_service=None,
input_type=_MUTATEADPARAMETERSREQUEST,
output_type=_MUTATEADPARAMETERSRESPONSE,
serialized_options=_b('\202\323\344\223\0026\"1/v2/customers/{customer_id=*}/adParameters:mutate:\001*'),
),
])
_sym_db.RegisterServiceDescriptor(_ADPARAMETERSERVICE)
DESCRIPTOR.services_by_name['AdParameterService'] = _ADPARAMETERSERVICE
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
]
| |
48cc96d967e3c3242cbb6e49bf663103aaea450c | 9ecf55bf2601e0d4f74e71f4903d2fd9e0871fd6 | /my_seg_tf/v10_segcap_128_128/config/config_res_segcap_3L_v3.py | 01c05a130a8d00c0547dfd1a8595652e1a11e9fd | []
| no_license | qq191513/mySeg | 02bc9803cde43907fc5d96dc6a6a6371f2bef6fe | 4337e6a0ca50b8ccbf6ed9b6254f2aec814b24db | refs/heads/master | 2020-04-10T09:57:37.811133 | 2019-06-26T08:21:23 | 2019-06-26T08:21:23 | 160,951,962 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,331 | py | import os
########################## 训练集 #######################################
project_root ='/home/mo/work/seg_caps/my_seg_tf/v9_segcap_128_128'
mask=True
train_data_number = 57
num_classes = 1
batch_size = 2
input_shape =[batch_size,128,128,3]
labels_shape =[batch_size,128,128,1]
labels_shape_vec =[batch_size,128*128*1]
epoch = 150 #共多少个epoch
save_epoch_n = 5 #每多少epoch保存一次
lr_range=(1e-3,1e-7,0.96)
test_data_number =9
choose_loss = 'margin_focus'
######################## end ########################################
########################## 输出路径 #######################################
output_path = '/home/mo/work/output'
branch_name = 'v9_segcap_128_128'
model_name = 'res_segcap_my_final'
dataset_name = 'my_128'
######################## end ########################################
#固定写法
ckpt =os.path.join(output_path,branch_name,model_name + '_' + dataset_name)
save_mean_csv = os.path.join(ckpt,'eval_result_mean.csv')
save_list_csv = os.path.join(ckpt,'eval_result.csv')
save_plot_curve_dir = os.path.join(ckpt,'plot_curve')
train_print_log = os.path.join(ckpt)
logdir = os.path.join(ckpt,'logdir')
predict_pics_save = os.path.join(ckpt,'predict_pics')
predict_tensor_feature_map = os.path.join(ckpt,'predict_tensor_feature_map') | [
"[email protected]"
]
| |
20bc4a91d23ce3b4bf4fbc72b4d4261d464e3cab | 0d85720b3d3678b688d19d6bf4a102e6f92ea73b | /restsite/routertest/admin.py | 6c2b3e8c4d63b1e273842d741033525d8b35fa9e | []
| no_license | zzy0371/restframeworksite | 8d9d9c09500877fdf462c3f7224f66bf7942d67a | 38bed811878bb20e9391d51e58977f56e9383d46 | refs/heads/master | 2022-12-11T02:10:57.114808 | 2019-08-23T03:53:05 | 2019-08-23T03:53:05 | 197,371,759 | 2 | 0 | null | 2022-12-08T06:03:58 | 2019-07-17T10:57:31 | Python | UTF-8 | Python | false | false | 109 | py | from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(Zoo)
| [
"[email protected]"
]
| |
0ad48321b180ab40c03d0c360ae53bd96eb13d58 | 87cacb90676e5e7d1d8f0e643f1ad6ed9e35acbf | /need to clean/codes_dl_old/split_train_test_bu.py | 99a5c178bbdbfdef7b08c55a8d37704e0befe3c4 | []
| no_license | vuhoangminh/Kaggle-TalkingData-AdTracking-Fraud-Detection-Challenge | 3b75d4a7c60574a4875c62e8843a01d945d792d3 | 56045f446f1a0c538d91ac65e536edc4b7b5a417 | refs/heads/master | 2020-03-13T12:56:42.309722 | 2018-05-08T10:50:35 | 2018-05-08T10:50:35 | 131,129,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,884 | py | """
Adding improvements inspired from:
Ravi Teja's fe script: https://www.kaggle.com/rteja1113/lightgbm-with-count-features?scriptVersionId=2815638
"""
import pandas as pd
import time
import numpy as np
from sklearn.cross_validation import train_test_split
import lightgbm as lgb
import gc
import pickle
from sklearn.preprocessing import LabelEncoder
path = '../input/'
dtypes = {
'ip' : 'uint32',
'app' : 'uint16',
'device' : 'uint16',
'os' : 'uint16',
'channel' : 'uint16',
'is_attributed' : 'uint8',
'click_id' : 'uint32'
}
TRAINSAMPLE = 180000000
# TRAINSAMPLE = 1000
NROWS = 30000000
TRAINROWS = 90000000
# NROWS = 300
num_split = int(TRAINSAMPLE/NROWS)
print (num_split)
def load_write(iSplit):
print('loading train data...')
for iDivide in range(3):
skip_rows = iDivide*NROWS
print('loading train data...', iDivide)
if iDivide==0:
train_df = pd.read_csv(path+"train.csv", nrows=NROWS, dtype=dtypes, usecols=['ip','app','device','os', 'channel', 'click_time', 'is_attributed'])
train_df = train_df.sample(frac=TRAINROWS/NROWS/num_split)
print ("len of train_df: ", len(train_df))
else:
train_df_temp = pd.read_csv(path+"train.csv", skiprows=range(1,skip_rows), nrows=NROWS, dtype=dtypes, usecols=['ip','app','device','os', 'channel', 'click_time', 'is_attributed'])
train_df_temp = train_df_temp.sample(frac=TRAINROWS/NROWS/num_split)
train_df = train_df.append(train_df_temp)
print ("len of train_df: ", len(train_df))
del train_df_temp
gc.collect()
# train_df_full = pd.read_csv(path+"train.csv", dtype=dtypes, usecols=['ip','app','device','os', 'channel', 'click_time', 'is_attributed'])
# print ("len of full: ", len(train_df_full))
# train_df, train_df_delete = train_test_split(train_df_full, test_size=NROWS/TRAINSAMPLE)
# del train_df_delete, train_df_full
# gc.collect()
# print ("len of sampling: ", len(train_df_full))
print('loading test data...')
test_df = pd.read_csv(path+"test.csv", dtype=dtypes, usecols=['ip','app','device','os', 'channel', 'click_time', 'click_id'])
print ("len of test: ", len(test_df))
gc.collect()
len_train = len(train_df)
train_df=train_df.append(test_df)
del test_df
gc.collect()
# if iSplit>0:
# train_df = pd.read_csv(path+"train.csv", skiprows=range(1,skip_rows), nrows=NROWS, dtype=dtypes, usecols=['ip','app','device','os', 'channel', 'click_time', 'is_attributed'])
# else:
# train_df = pd.read_csv(path+"train.csv", nrows=NROWS, dtype=dtypes, usecols=['ip','app','device','os', 'channel', 'click_time', 'is_attributed'])
gc.collect()
print('Extracting new features...')
train_df['min'] = pd.to_datetime(train_df.click_time).dt.minute.astype('uint8')
train_df['hour'] = pd.to_datetime(train_df.click_time).dt.hour.astype('uint8')
train_df['day'] = pd.to_datetime(train_df.click_time).dt.day.astype('uint8')
train_df['wday'] = pd.to_datetime(train_df.click_time).dt.dayofweek.astype('uint8')
print(train_df.head())
print('grouping by ip alone....')
gp = train_df[['ip','channel']].groupby(by=['ip'])[['channel']].count().reset_index().rename(index=str, columns={'channel': 'ipcount'})
train_df = train_df.merge(gp, on=['ip'], how='left')
del gp; gc.collect()
print('grouping by ip-day-hour combination....')
gp = train_df[['ip','day','hour','channel']].groupby(by=['ip','day','hour'])[['channel']].count().reset_index().rename(index=str, columns={'channel': 'qty'})
train_df = train_df.merge(gp, on=['ip','day','hour'], how='left')
del gp; gc.collect()
print('group by ip-app combination....')
gp = train_df[['ip','app', 'channel']].groupby(by=['ip', 'app'])[['channel']].count().reset_index().rename(index=str, columns={'channel': 'ip_app_count'})
train_df = train_df.merge(gp, on=['ip','app'], how='left')
del gp; gc.collect()
print('group by ip-app-os combination....')
gp = train_df[['ip','app', 'os', 'channel']].groupby(by=['ip', 'app', 'os'])[['channel']].count().reset_index().rename(index=str, columns={'channel': 'ip_app_os_count'})
train_df = train_df.merge(gp, on=['ip','app', 'os'], how='left')
del gp; gc.collect()
print("vars and data type....")
train_df['ipcount'] = train_df['qty'].astype('uint32')
train_df['qty'] = train_df['qty'].astype('uint16')
train_df['ip_app_count'] = train_df['ip_app_count'].astype('uint16')
train_df['ip_app_os_count'] = train_df['ip_app_os_count'].astype('uint16')
print("label encoding....")
train_df[['app','device','os', 'channel', 'hour', 'day', 'wday']].apply(LabelEncoder().fit_transform)
# train_df[['app','device','os', 'channel', 'hour', 'day']].apply(LabelEncoder().fit_transform)
print ('final part of preparation....')
# train_df = train_df.drop(['click_time', 'wday', 'day'], axis=1)
# train_df = train_df.drop(['click_time', 'day'], axis=1)
train_df = train_df.drop(['click_time'], axis=1)
print(train_df.head())
print("train size: ", len(train_df))
test_df = train_df[len_train:]
print ("len of test: ", len(test_df))
train_df = train_df[:len_train]
print ("len of train: ", len(train_df))
save_name = 'train_' + str(iSplit)
print("save to: ", save_name)
train_df.to_pickle(save_name)
del train_df
gc.collect()
save_name = 'test_' + str(iSplit)
print("save to: ", save_name)
test_df.to_pickle(save_name)
del test_df
gc.collect()
for iSplit in range(3):
print('Processing split', iSplit+1)
skip_rows = iSplit*NROWS
print (skip_rows)
load_write(iSplit) | [
"[email protected]"
]
| |
5c87709e79b598bd4b214aead30ccb6efaa34278 | 018a1d8d59c00f69b0489ce05567a2972c335ff7 | /2017_May23/threads/job_queue.py | f5a021c0ba152893e936037dcb4ee78ae934f673 | []
| no_license | singhujjwal/python | f0127b604e2204a02836c95d89ee4903f760d48c | 4fb4b34a318f093bd944cd70d7f0d69dd7dfef6e | refs/heads/master | 2021-09-20T15:35:13.389400 | 2021-09-03T06:39:58 | 2021-09-03T06:39:58 | 92,157,309 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,401 | py | from time import sleep
def square(x):
sleep(5)
return x*x
def factorial(x):
sleep(3)
from functools import reduce
return reduce(lambda x,y: x*y, range(1, x+1))
def sum_all(x):
sleep(2)
return sum(range(x))
class MyJobQueue:
def __init__(self, capacity):
from queue import Queue
from threading import Lock, Thread
self.capacity = capacity
self.jobq = Queue(capacity)
self.workers = {}
self.result = {}
self.job_id = 0
self.job_id_mtx = Lock()
for i in range(capacity):
self.workers[i] = Thread(target=self.work)
self.workers[i].start()
def work(self):
while True:
job_id, fn, args, kwargs = self.jobq.get()
self.result[job_id] = fn(*args, **kwargs)
self.jobq.task_done()
def submit(self, fn, *args, **kwargs):
with self.job_id_mtx: self.job_id += 1
self.jobq.put((self.job_id, fn, args, kwargs))
def join(self):
self.jobq.join()
return self.result
if __name__ == "__main__":
jobs = MyJobQueue(10) # Upto 10 workers can run concurrently!
jobs.submit(square, 10)
jobs.submit(factorial, 5)
jobs.submit(sum_all, 10)
jobs.submit(square, 2)
result = jobs.join() # Wait for all submitted jobs to complete...
print("Result = {}".format(str(result)))
| [
"[email protected]"
]
| |
de6558aa3fa726a49caaa8cdd14339487414a1c5 | a46d135ba8fd7bd40f0b7d7a96c72be446025719 | /packages/python/plotly/plotly/validators/box/unselected/marker/_opacity.py | d86095f213c385378e349c836d4e4410faab53eb | [
"MIT"
]
| permissive | hugovk/plotly.py | 5e763fe96f225d964c4fcd1dea79dbefa50b4692 | cfad7862594b35965c0e000813bd7805e8494a5b | refs/heads/master | 2022-05-10T12:17:38.797994 | 2021-12-21T03:49:19 | 2021-12-21T03:49:19 | 234,146,634 | 0 | 0 | MIT | 2020-01-15T18:33:43 | 2020-01-15T18:33:41 | null | UTF-8 | Python | false | false | 501 | py | import _plotly_utils.basevalidators
class OpacityValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="opacity", parent_name="box.unselected.marker", **kwargs
):
super(OpacityValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
max=kwargs.pop("max", 1),
min=kwargs.pop("min", 0),
**kwargs
)
| [
"[email protected]"
]
| |
bb3782c433358b95062c37e3a033397bf5ba2564 | 93039551fbdef0a112a9c39181d30b0c170eb3a6 | /day33/__init__.py | 39d3c1d1e41daae2a914cfb0a558d14a25b0e768 | []
| no_license | wenzhe980406/PythonLearning | 8714de8a472c71e6d02b6de64efba970a77f6f4a | af0e85f0b11bf9d2f8e690bac480b92b971c01bb | refs/heads/master | 2020-07-14T20:46:45.146134 | 2020-05-28T12:16:21 | 2020-05-28T12:16:21 | 205,398,758 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | # _*_ coding : UTF-8 _*_
# 开发人员 : ChangYw
# 开发时间 : 2019/8/28 10:37
# 文件名称 : __init__.py.PY
# 开发工具 : PyCharm | [
"[email protected]"
]
| |
1af686590d178711452f4ed5b201e9029756d6e5 | f445450ac693b466ca20b42f1ac82071d32dd991 | /generated_tempdir_2019_09_15_163300/generated_part004559.py | 2ab65e23a64dd1382972f44fd77b836d6ac16441 | []
| no_license | Upabjojr/rubi_generated | 76e43cbafe70b4e1516fb761cabd9e5257691374 | cd35e9e51722b04fb159ada3d5811d62a423e429 | refs/heads/master | 2020-07-25T17:26:19.227918 | 2019-09-15T15:41:48 | 2019-09-15T15:41:48 | 208,357,412 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,456 | py | from sympy.abc import *
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy import *
from matchpy.utils import VariableWithCount
from collections import deque
from multiset import Multiset
from sympy.integrals.rubi.constraints import *
from sympy.integrals.rubi.utility_function import *
from sympy.integrals.rubi.rules.miscellaneous_integration import *
from sympy import *
class CommutativeMatcher14982(CommutativeMatcher):
_instance = None
patterns = {
0: (0, Multiset({}), [
(VariableWithCount('i2.2.1.4.1.1.0', 1, 1, None), Mul),
(VariableWithCount('i2.2.1.4.1.1.0_1', 1, 1, S(1)), Mul)
]),
1: (1, Multiset({}), [
(VariableWithCount('i2.2.1.4.1.1.0', 1, 1, S(1)), Mul),
(VariableWithCount('i2.2.3.1.1.0', 1, 1, None), Mul)
])
}
subjects = {}
subjects_by_id = {}
bipartite = BipartiteGraph()
associative = Mul
max_optional_count = 1
anonymous_patterns = set()
def __init__(self):
self.add_subject(None)
@staticmethod
def get():
if CommutativeMatcher14982._instance is None:
CommutativeMatcher14982._instance = CommutativeMatcher14982()
return CommutativeMatcher14982._instance
@staticmethod
def get_match_iter(subject):
subjects = deque([subject]) if subject is not None else deque()
subst0 = Substitution()
# State 14981
return
yield
from collections import deque | [
"[email protected]"
]
| |
332448d7ef58c005a849960379afc675efeb75c3 | eeda8f12876b4193b8b32642b663c865f4ade39a | /player/migrations/0007_auto_20161210_2247.py | cf0accaf6c90db284a7cec3816fc1a10a76722e3 | []
| no_license | TheAvinashSingh/DirectMe | b893a29757ec0c147cc57d0d1fbd5917ce069958 | dc957d19c08666ae27adb5c25321a32ad5316a7b | refs/heads/master | 2021-01-20T05:26:49.504025 | 2016-12-11T00:28:53 | 2016-12-11T00:28:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 426 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-10 22:47
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('player', '0006_auto_20161210_2117'),
]
operations = [
migrations.AlterModelOptions(
name='inventory',
options={'verbose_name_plural': 'Inventory'},
),
]
| [
"[email protected]"
]
| |
bbc3043cb67e1318cd74d7424783ed50f9e0f638 | dc95dfb24f3cd12b823dfad2cca8607ab12e757b | /13-Built-in-Functions/lambda-functions.py | 290a032db496cccdb44a90abc4b42ebe13a6a8b5 | []
| no_license | RandyG3/Python | 06213a361deac2d653d4cd4734728838ed34e733 | 86068d81ae037beb6fd6114d93074a92c2f3108e | refs/heads/master | 2023-01-06T15:18:43.173886 | 2020-11-08T03:03:34 | 2020-11-08T03:03:34 | 236,549,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 554 | py | # Lambda Function
# An anonymous function
# (a function without a name)
#
# use once, then discard
metals = ["gold", "silver", "platinum", "palladium"]
# get words more than 5 char
print(filter(lambda metal: len(metal) > 5, metals))
print(list(filter(lambda metal: len(metal) > 5, metals)))
print(list(filter(lambda metal: "p" in metal, metals)))
# count the number of "l"s
print(list(map(lambda word: word.count("l"), metals)))
# return new list where lower "s" is replaced with a $
print(list(map(lambda val: val.replace("s", "$"), metals)))
| [
"[email protected]"
]
| |
48945ea707b79a5bb8bf524341cc5e465488f546 | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/fbs_0033+376/sdB_FBS_0033+376_lc.py | 7a1e86b98c57d3ff1f979c0a7855edcfdd203c18 | []
| no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 346 | py | from gPhoton.gAperture import gAperture
def main():
gAperture(band="NUV", skypos=[9.166167,37.931969], stepsz=30., csvfile="/data2/fleming/GPHOTON_OUTPU/LIGHTCURVES/sdBs/sdB_FBS_0033+376 /sdB_FBS_0033+376_lc.csv", maxgap=1000., overwrite=True, radius=0.00555556, annulus=[0.005972227,0.0103888972], verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
da8f240c44afe29c7e5b234fa6b40b2e38fa1cbd | 87a26f06a60b98f7a0191e59e4111d3ba338aeeb | /biLSTM/Common.py | 7b3d38f45022ee7d9f20d5833d178ba1d5f6be93 | []
| no_license | zenRRan/Stance-Detection | be0c03d3cafe0c4390e39c931cb836f6eca1f156 | 62b6d6a092a956ccd31a7d47a9de9fbf72260f98 | refs/heads/master | 2021-09-08T01:18:51.392615 | 2018-03-05T02:14:51 | 2018-03-05T02:14:51 | 115,618,231 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 512 | py | # Version python3.6
# -*- coding: utf-8 -*-
# @Time : 2018/2/9 下午9:22
# @Author : zenRRan
# @Email : [email protected]
# @File : Common.py
# @Software: PyCharm Community Edition
unk_key = '-unk-'
padding_key = '-padding-'
English_topics = ['atheism', 'feminist movement', 'hillary clinton',
'legalization of abortion', 'climate change is a real concern']
Chinese_topics = ['春节 放鞭炮', 'iphonese', '俄罗斯 在 叙利亚 的 反恐 行动', '开放 二胎', '深圳 禁摩 限电'] | [
"[email protected]"
]
| |
2fa77c854e55396aad6687738cfc4adb903b8084 | 694d57c3e512ce916269411b51adef23532420cd | /leetcode_review/378kth_smallest_element_in_a_sorted_matrix.py | ea3b9532ad401931b61c1c7752f4a37732851542 | []
| no_license | clovery410/mycode | 5541c3a99962d7949832a0859f18819f118edfba | e12025e754547d18d5bb50a9dbe5e725fd03fd9c | refs/heads/master | 2021-05-16T02:46:47.996748 | 2017-05-10T23:43:50 | 2017-05-10T23:43:50 | 39,235,141 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 983 | py | from heapq import *
class Solution(object):
# solution1, using heap
def kthSmallest(self, matrix, k):
m, n = len(matrix), len(matrix[0]) if len(matrix) else 0
heap = []
for i in xrange(m):
heappush(heap, (matrix[i][0], i, 0))
for x in xrange(k):
cur_num, cur_i, cur_j = heappop(heap)
if cur_j + 1 < n:
heappush(heap, (matrix[cur_i][cur_j+1], cur_i, cur_j + 1))
return cur_num
# solution2, using binary search, faster than solution1
def kthSmallest2(self, matrix, k):
lo, hi = matrix[0][0], matrix[-1][-1]
while lo < hi:
mid = (hi - lo) / 2 + lo
if sum(bisect.bisect(row, mid) for row in matrix) >= k:
hi = mid
else:
lo = mid + 1
return lo
if __name__ == "__main__":
sol = Solution()
matrix = [[1,5,9],[10,11,13],[12,13,15]]
k = 9
print sol.kthSmallest(matrix, k)
| [
"[email protected]"
]
| |
69e6cab54865678d41fd6a8b033cb45b0e9a27a2 | d4c83df812c0c182bf444cc432deba03b79fb810 | /bin/gifmaker.py | 4150f99bbb22a423f9f1994f854015fa3f3fbb1e | [
"MIT"
]
| permissive | webbyfox/suade | 2c06bb3d59762bbc9c41bde7b062a9575b3ea9e2 | 52a93df0f4cb1f6442b6c7dd259c8350a7687082 | refs/heads/master | 2021-01-21T20:16:58.674107 | 2017-05-23T21:23:26 | 2017-05-23T21:23:26 | 92,213,858 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 662 | py | #!/Users/Rizwan/www/suade/bin/python3.6
#
# The Python Imaging Library
# $Id$
#
# convert sequence format to GIF animation
#
# history:
# 97-01-03 fl created
#
# Copyright (c) Secret Labs AB 1997. All rights reserved.
# Copyright (c) Fredrik Lundh 1997.
#
# See the README file for information on usage and redistribution.
#
from __future__ import print_function
from PIL import Image
if __name__ == "__main__":
import sys
if len(sys.argv) < 3:
print("GIFMAKER -- create GIF animations")
print("Usage: gifmaker infile outfile")
sys.exit(1)
im = Image.open(sys.argv[1])
im.save(sys.argv[2], save_all=True)
| [
"[email protected]"
]
| |
7b212255dd798268635f44caac329f228d0e7cdb | 5c443eb3556d6d52717227008c29426ed7ca6a24 | /todo/test_views.py | 8242761704991a0157f03efcc6fbe053dc7aa31f | []
| no_license | ashur-k/ci_django_to_do_app | ef4dcfd1b6df529265f8ee4c9b1e4d5b05c27573 | f829d962a2e482830bc7c631230d2a30bef0245f | refs/heads/master | 2023-02-26T11:48:48.306451 | 2021-02-01T11:19:02 | 2021-02-01T11:19:02 | 286,980,518 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,861 | py | from django.test import TestCase
from .models import Item
# Create your tests here.
class TestViews(TestCase):
def test__get_todo_list(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'todo/todo_list.html')
def test_get_add_item_page(self):
response = self.client.get('/add')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'todo/add_item.html')
def test_get_edit_item_page(self):
item = Item.objects.create(name='Test Todo Item')
response = self.client.get(f'/edit/{item.id}')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'todo/edit_item.html')
def test_can_add_item(self):
response = self.client.post('/add', {'name': 'Test Added Item'})
self.assertRedirects(response, '/')
def test_can_delete_item(self):
item = Item.objects.create(name='Test Todo Item')
response = self.client.get(f'/delete/{item.id}')
self.assertRedirects(response, '/')
existing_items = Item.objects.filter(id=item.id)
self.assertEqual(len(existing_items), 0)
def test_can_toggle_item(self):
item = Item.objects.create(name='Test Todo Item', done=True)
response = self.client.get(f'/toggle/{item.id}')
self.assertRedirects(response, '/')
updated_item = Item.objects.get(id=item.id)
self.assertFalse(updated_item.done)
def test_can_edit_item(self):
item = Item.objects.create(name='Test Todo Item')
response = self.client.post(f'/edit/{item.id}', {'name': 'Updated Name'})
self.assertRedirects(response, '/')
updated_item = Item.objects.get(id=item.id)
self.assertEqual(updated_item.name, 'Updated Name')
| [
"[email protected]"
]
| |
d75e31c9ad75f7bcd75a6edb6e64dcb2f734a9f8 | 5b93930ce8280b3cbc7d6b955df0bfc5504ee99c | /nodes/Geron17Hands/C_PartII/A_Chapter9/C_ManagingGraphs/index.py | c0a3c947cdad986f3ad78522ec72a8e696105d0b | []
| no_license | nimra/module_gen | 8749c8d29beb700cac57132232861eba4eb82331 | 2e0a4452548af4fefd4cb30ab9d08d7662122cf4 | refs/heads/master | 2022-03-04T09:35:12.443651 | 2019-10-26T04:40:49 | 2019-10-26T04:40:49 | 213,980,247 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,823 | py | # Lawrence McAfee
# ~~~~~~~~ import ~~~~~~~~
from modules.node.HierNode import HierNode
from modules.node.LeafNode import LeafNode
from modules.node.Stage import Stage
from modules.node.block.CodeBlock import CodeBlock as cbk
from modules.node.block.HierBlock import HierBlock as hbk
from modules.node.block.ImageBlock import ImageBlock as ibk
from modules.node.block.ListBlock import ListBlock as lbk
from modules.node.block.MarkdownBlock import MarkdownBlock as mbk
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
blocks = [
# Download from finelybook www.finelybook.com
# with block (but you do need to close the session manually when you are done with
# it):
# >>> sess = tf.InteractiveSession()
# >>> init.run()
# >>> result = f.eval()
# >>> print(result)
# 42
# >>> sess.close()
# A TensorFlow program is typically split into two parts: the first part builds a compu‐
# tation graph (this is called the construction phase), and the second part runs it (this is
# the execution phase). The construction phase typically builds a computation graph
# representing the ML model and the computations required to train it. The execution
# phase generally runs a loop that evaluates a training step repeatedly (for example, one
# step per mini-batch), gradually improving the model parameters. We will go through
# an example shortly.
#
# Managing Graphs
# Any node you create is automatically added to the default graph:
# >>> x1 = tf.Variable(1)
# >>> x1.graph is tf.get_default_graph()
# True
# In most cases this is fine, but sometimes you may want to manage multiple independ‐
# ent graphs. You can do this by creating a new Graph and temporarily making it the
# default graph inside a with block, like so:
# >>> graph = tf.Graph()
# >>> with graph.as_default():
# ... x2 = tf.Variable(2)
# ...
# >>> x2.graph is graph
# True
# >>> x2.graph is tf.get_default_graph()
# False
#
# In Jupyter (or in a Python shell), it is common to run the same
# commands more than once while you are experimenting. As a
# result, you may end up with a default graph containing many
# duplicate nodes. One solution is to restart the Jupyter kernel (or
# the Python shell), but a more convenient solution is to just reset the
# default graph by running tf.reset_default_graph().
#
#
#
#
# 234 | Chapter 9: Up and Running with TensorFlow
#
# Download from finelybook www.finelybook.com
# Lifecycle of a Node Value
# When you evaluate a node, TensorFlow automatically determines the set of nodes
# that it depends on and it evaluates these nodes first. For example, consider the follow‐
# ing code:
# w = tf.constant(3)
# x = w + 2
# y = x + 5
# z = x * 3
#
# with tf.Session() as sess:
# print(y.eval()) # 10
# print(z.eval()) # 15
# First, this code defines a very simple graph. Then it starts a session and runs the
# graph to evaluate y: TensorFlow automatically detects that y depends on w, which
# depends on x, so it first evaluates w, then x, then y, and returns the value of y. Finally,
# the code runs the graph to evaluate z. Once again, TensorFlow detects that it must
# first evaluate w and x. It is important to note that it will not reuse the result of the
# previous evaluation of w and x. In short, the preceding code evaluates w and x twice.
# All node values are dropped between graph runs, except variable values, which are
# maintained by the session across graph runs (queues and readers also maintain some
# state, as we will see in Chapter 12). A variable starts its life when its initializer is run,
# and it ends when the session is closed.
# If you want to evaluate y and z efficiently, without evaluating w and x twice as in the
# previous code, you must ask TensorFlow to evaluate both y and z in just one graph
# run, as shown in the following code:
# with tf.Session() as sess:
# y_val, z_val = sess.run([y, z])
# print(y_val) # 10
# print(z_val) # 15
#
# In single-process TensorFlow, multiple sessions do not share any
# state, even if they reuse the same graph (each session would have its
# own copy of every variable). In distributed TensorFlow (see Chap‐
# ter 12), variable state is stored on the servers, not in the sessions, so
# multiple sessions can share the same variables.
#
#
# Linear Regression with TensorFlow
# TensorFlow operations (also called ops for short) can take any number of inputs and
# produce any number of outputs. For example, the addition and multiplication ops
# each take two inputs and produce one output. Constants and variables take no input
#
#
# Lifecycle of a Node Value | 235
#
]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Content(LeafNode):
def __init__(self):
super().__init__(
"Managing Graphs",
# Stage.REMOVE_EXTRANEOUS,
# Stage.ORIG_BLOCKS,
# Stage.CUSTOM_BLOCKS,
# Stage.ORIG_FIGURES,
# Stage.CUSTOM_FIGURES,
# Stage.CUSTOM_EXERCISES,
)
[self.add(a) for a in blocks]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class ManagingGraphs(HierNode):
def __init__(self):
super().__init__("Managing Graphs")
self.add(Content(), "content")
# eof
| [
"[email protected]"
]
| |
7dc8acbac6368137168cd3244cb6acd73d01644b | 358dd2e27935215304ef5640b715de260d16aa2b | /lextract/keyed_db/repl.py | 90230021411875594959910e7f1981de5ea88e4d | []
| no_license | frankier/lextract | 7495c2053493eb50623b1cae4a8594cca6c8247e | ba38eb23188e074f7724e2ec08e5993fe98dcb6f | refs/heads/master | 2023-03-08T22:03:33.095959 | 2020-11-23T11:27:22 | 2020-11-23T11:27:22 | 188,093,991 | 1 | 0 | null | 2023-02-22T23:29:47 | 2019-05-22T18:35:44 | Python | UTF-8 | Python | false | false | 592 | py | import sys
import click
from pprint import pprint
from finntk import get_omorfi
from wikiparse.utils.db import get_session
from .extract import extract_toks
@click.command("extract-toks")
def extract_toks_cmd():
paragraph = sys.stdin.read()
omorfi = get_omorfi()
tokenised = omorfi.tokenise(paragraph)
starts = []
start = 0
for token in tokenised:
start = paragraph.index(token["surf"], start)
starts.append(start)
surfs = [tok["surf"] for tok in tokenised]
session = get_session().get_bind()
pprint(list(extract_toks(session, surfs)))
| [
"[email protected]"
]
| |
317e95c65c566b23be221e71ce486bf6889f931e | 86e8f0a13269d01bd3f9020dc2dc32e77fc7e30f | /tests/integration/test_misc.py | dd9e414a01d2216759622644551555ef39b9518d | [
"Apache-2.0"
]
| permissive | stardude900/stratis-cli | 92bae614f8998cf30727a3eb0899906fe2a67df6 | bf0beb60c7bc64b8762247da983436be7ec59d32 | refs/heads/master | 2021-01-19T06:15:37.907429 | 2017-08-07T21:46:04 | 2017-08-07T21:46:04 | 100,633,518 | 0 | 0 | null | 2017-08-17T18:28:51 | 2017-08-17T18:28:51 | null | UTF-8 | Python | false | false | 5,528 | py | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test miscellaneous methods.
"""
import time
import unittest
from stratisd_client_dbus import get_object
from stratis_cli._actions._misc import GetObjectPath
from stratis_cli._constants import TOP_OBJECT
from stratis_cli._errors import StratisCliDbusLookupError
from ._misc import _device_list
from ._misc import RUNNER
from ._misc import Service
_DEVICE_STRATEGY = _device_list(1)
class GetPoolTestCase(unittest.TestCase):
"""
Test get_pool method when there is no pool.
It should raise an exception.
"""
def setUp(self):
"""
Start the stratisd daemon with the simulator.
"""
self._service = Service()
self._service.setUp()
time.sleep(1)
def tearDown(self):
"""
Stop the stratisd simulator and daemon.
"""
self._service.tearDown()
def testNonExistingPool(self):
"""
An exception is raised if the pool does not exist.
"""
with self.assertRaises(StratisCliDbusLookupError):
GetObjectPath.get_pool(get_object(TOP_OBJECT), {'Name': 'notapool'})
class GetPool1TestCase(unittest.TestCase):
"""
Test get_pool method when there is a pool.
"""
_POOLNAME = 'deadpool'
def setUp(self):
"""
Start the stratisd daemon with the simulator.
"""
self._service = Service()
self._service.setUp()
time.sleep(1)
command_line = \
['pool', 'create', self._POOLNAME] + \
_DEVICE_STRATEGY.example()
RUNNER(command_line)
def tearDown(self):
"""
Stop the stratisd simulator and daemon.
"""
self._service.tearDown()
def testExistingPool(self):
"""
The pool should be gotten.
"""
self.assertIsNotNone(
GetObjectPath.get_pool(
get_object(TOP_OBJECT),
spec={'Name': self._POOLNAME}
)
)
def testNonExistingPool(self):
"""
An exception is raised if the pool does not exist.
"""
with self.assertRaises(StratisCliDbusLookupError):
GetObjectPath.get_pool(get_object(TOP_OBJECT), {'Name': 'notapool'})
class GetVolume1TestCase(unittest.TestCase):
"""
Test get_filesystem method when there is a pool but no volume.
"""
_POOLNAME = 'deadpool'
def setUp(self):
"""
Start the stratisd daemon with the simulator.
"""
self._service = Service()
self._service.setUp()
time.sleep(1)
command_line = \
['pool', 'create', self._POOLNAME] + \
_DEVICE_STRATEGY.example()
RUNNER(command_line)
def tearDown(self):
"""
Stop the stratisd simulator and daemon.
"""
self._service.tearDown()
def testNonExistingVolume(self):
"""
An exception is raised if the volume does not exist.
"""
proxy = get_object(TOP_OBJECT)
pool_object_path = \
GetObjectPath.get_pool(proxy, spec={'Name': self._POOLNAME})
with self.assertRaises(StratisCliDbusLookupError):
GetObjectPath.get_filesystem(
proxy,
{'Name': 'noname', 'Pool': pool_object_path}
)
class GetVolume2TestCase(unittest.TestCase):
"""
Test get_filesystem method when there is a pool and the volume is there.
"""
_POOLNAME = 'deadpool'
_VOLNAME = 'vol'
def setUp(self):
"""
Start the stratisd daemon with the simulator.
"""
self._service = Service()
self._service.setUp()
time.sleep(1)
command_line = \
['pool', 'create', self._POOLNAME] + \
_DEVICE_STRATEGY.example()
RUNNER(command_line)
command_line = \
['filesystem', 'create', self._POOLNAME, self._VOLNAME]
RUNNER(command_line)
def tearDown(self):
"""
Stop the stratisd simulator and daemon.
"""
self._service.tearDown()
def testExistingVolume(self):
"""
The volume should be discovered.
"""
proxy = get_object(TOP_OBJECT)
pool_object_path = \
GetObjectPath.get_pool(proxy, spec={'Name': self._POOLNAME})
self.assertIsNotNone(
GetObjectPath.get_filesystem(
proxy,
{'Name': self._VOLNAME, 'Pool': pool_object_path}
)
)
def testNonExistingVolume(self):
"""
An exception is raised if the volume does not exist.
"""
proxy = get_object(TOP_OBJECT)
pool_object_path = \
GetObjectPath.get_pool(proxy, spec={'Name': self._POOLNAME})
with self.assertRaises(StratisCliDbusLookupError):
GetObjectPath.get_filesystem(
proxy,
{'Name': 'noname', 'Pool': pool_object_path}
)
| [
"[email protected]"
]
| |
c2b66c7a38983f935b08a2197ce9cdb85019fda6 | 9bb16f8fbf9f562f1171a3bbff8318a47113823b | /abc132/abc132_d/main.py | 6b825f6fef88b9fcc0c84f1d7a6cd6ad0aa3c805 | []
| no_license | kyamashiro/atcoder | 83ab0a880e014c167b6e9fe9457e6972901353fc | 999a7852b70b0a022a4d64ba40d4048ee4cc0c9c | refs/heads/master | 2022-06-01T03:01:39.143632 | 2022-05-22T05:38:42 | 2022-05-22T05:38:42 | 464,391,209 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 400 | py | #!/usr/bin/env python3
# from typing import *
MOD = 1000000007
# def solve(N: int, K: int) -> List[str]:
def solve(N, K):
pass # TODO: edit here
# generated by oj-template v4.8.1 (https://github.com/online-judge-tools/template-generator)
def main():
N, K = map(int, input().split())
a = solve(N, K)
for i in range(K):
print(a[i])
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
015dde9f3d18d2234d142749e7b2fdbe911482af | 7882860350c714e6c08368288dab721288b8d9db | /백트래킹/1182_부분수열의 합.py | bc2b31a9f2df3eabfb77b6ce66c3a3e67deac17a | []
| no_license | park-seonju/Algorithm | 682fca984813a54b92a3f2ab174e4f05a95921a8 | 30e5bcb756e9388693624e8880e57bc92bfda969 | refs/heads/master | 2023-08-11T18:23:49.644259 | 2021-09-27T10:07:49 | 2021-09-27T10:07:49 | 388,741,922 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | n,s=map(int,input().split())
arr=list(map(int,input().split()))
ans=0
def func(idx,total):
global ans
if idx>=n:
if s==total:
ans+=1
return
else:
func(idx+1,total+arr[idx])
func(idx+1,total)
func(0,0)
if s==0:
print(ans-1)
else:
print(ans) | [
"[email protected]"
]
| |
161708e25ea49fb32dcaf50a3134a77915485ee0 | 67ffddfd7e0ace7490c5d52325838b82644eb458 | /leetcode/greedy/lc_134.py | 5480eaf182de37f45b734ea31103412665b90700 | []
| no_license | ckdrjs96/algorithm | 326f353c5aa89a85ec86ce1aabb06cde341193ce | d5d09b047808b6fc2eeaabdbe7f32c83446b4a1b | refs/heads/main | 2023-08-20T05:12:50.671798 | 2021-10-23T04:20:05 | 2021-10-23T04:20:05 | 324,481,888 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,133 | py | # greedy o(N)
class Solution:
def canCompleteCircuit(self, gas: List[int], cost: List[int]) -> int:
if sum(gas) < sum(cost):
return -1
# 성립이 안되는 지점이 있다면 그앞은 모두 정답이 될수없다. 따라서 start=i+1
start, fuel = 0, 0
for i in range(len(gas)):
if gas[i] + fuel < cost[i]:
start = i + 1
fuel = 0
else:
fuel += gas[i] - cost[i]
# print(start,fuel)
return start
#brute force o(N^2) 통과는되지만 매우느리다
class Solution:
def canCompleteCircuit(self, gas: List[int], cost: List[int]) -> int:
n = len(gas)
for start in range(n):
fuel = 0
for i in range(n):
idx = (start + i) % n
fuel += gas[idx] - cost[idx]
# print(fuel)
if fuel < 0:
ans = -1
break
# 정답이 반드시 하나라했으므로 찾으면 바로종료
else:
return start
return ans | [
"[email protected]"
]
| |
76815cea1685820cd7c163aadc790a8960d954e4 | 753a70bc416e8dced2853f278b08ef60cdb3c768 | /include/tensorflow/lite/testing/op_tests/cos.py | 20b831dce9a7a16df7890f821473a43f2ad57af6 | [
"MIT"
]
| permissive | finnickniu/tensorflow_object_detection_tflite | ef94158e5350613590641880cb3c1062f7dd0efb | a115d918f6894a69586174653172be0b5d1de952 | refs/heads/master | 2023-04-06T04:59:24.985923 | 2022-09-20T16:29:08 | 2022-09-20T16:29:08 | 230,891,552 | 60 | 19 | MIT | 2023-03-25T00:31:18 | 2019-12-30T09:58:41 | C++ | UTF-8 | Python | false | false | 2,002 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test configs for cos."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow.lite.testing.zip_test_utils import create_tensor_data
from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
from tensorflow.lite.testing.zip_test_utils import register_make_test_function
@register_make_test_function()
def make_cos_tests(options):
"""Make a set of tests to do cos."""
test_parameters = [{
"input_dtype": [tf.float32],
"input_shape": [[], [3], [1, 100], [4, 2, 3], [5, 224, 224, 3]],
}]
def build_graph(parameters):
"""Build the cos op testing graph."""
input_tensor = tf.compat.v1.placeholder(
dtype=parameters["input_dtype"],
name="input",
shape=parameters["input_shape"])
out = tf.cos(input_tensor)
return [input_tensor], [out]
def build_inputs(parameters, sess, inputs, outputs):
values = [
create_tensor_data(
parameters["input_dtype"],
parameters["input_shape"],
min_value=-np.pi,
max_value=np.pi)
]
return values, sess.run(outputs, feed_dict=dict(zip(inputs, values)))
make_zip_of_tests(options, test_parameters, build_graph, build_inputs)
| [
"[email protected]"
]
| |
2e15ac5d08dfab344b92280d6c14efe4945dc1f4 | bc6492a9a30ac7228caad91643d58653b49ab9e3 | /sympy/utilities/matchpy_connector.py | 1d12f13a8f09bbb88826a622781a13072f484722 | []
| no_license | cosmosZhou/sagemath | 2c54ea04868882340c7ef981b7f499fb205095c9 | 0608b946174e86182c6d35d126cd89d819d1d0b8 | refs/heads/master | 2023-01-06T07:31:37.546716 | 2020-11-12T06:39:22 | 2020-11-12T06:39:22 | 311,177,322 | 1 | 0 | null | 2020-11-12T06:09:11 | 2020-11-08T23:42:40 | Python | UTF-8 | Python | false | false | 4,168 | py | from sympy.external import import_module
from sympy.utilities.decorator import doctest_depends_on
from sympy.functions.elementary.integers import floor, frac
from sympy.functions import (log, sin, cos, tan, cot, csc, sec, sqrt, erf, gamma, uppergamma, polygamma, digamma,
loggamma, factorial, zeta, LambertW)
from sympy.functions.elementary.hyperbolic import acosh, asinh, atanh, acoth, acsch, asech, cosh, sinh, tanh, coth, sech, csch
from sympy.functions.elementary.trigonometric import atan, acsc, asin, acot, acos, asec, atan2
from sympy.polys.polytools import Poly, quo, rem, total_degree, degree
from sympy.simplify.simplify import fraction, simplify, cancel, powsimp
from sympy.core.sympify import sympify
from sympy.utilities.iterables import postorder_traversal
from sympy.functions.special.error_functions import fresnelc, fresnels, erfc, erfi, Ei, expint, li, Si, Ci, Shi, Chi
from sympy.functions.elementary.complexes import im, re, Abs
from sympy.core.exprtools import factor_terms
from sympy import (Basic, E, polylog, N, Wild, WildFunction, factor, gcd, Sum, S, I, Mul, Integer, Float, Dict, Symbol, Rational,
Add, hyper, symbols, sqf_list, sqf, Max, factorint, factorrat, Min, sign, E, Function, collect, FiniteSet, nsimplify,
expand_trig, expand, poly, apart, lcm, And, Pow, pi, zoo, oo, Integral, UnevaluatedExpr, PolynomialError, Dummy, exp,
powdenest, PolynomialDivisionFailed, discriminant, UnificationFailed, appellf1)
from sympy.functions.special.hyper import TupleArg
from sympy.functions.special.elliptic_integrals import elliptic_f, elliptic_e, elliptic_pi
from sympy.utilities.iterables import flatten
from random import randint
from sympy.logic.boolalg import Or
matchpy = import_module("matchpy")
if matchpy:
from matchpy import Arity, Operation, CommutativeOperation, AssociativeOperation, OneIdentityOperation, CustomConstraint, Pattern, ReplacementRule, ManyToOneReplacer
from matchpy.expressions.functions import op_iter, create_operation_expression, op_len
from sympy.integrals.rubi.symbol import WC
from matchpy import is_match, replace_all
Operation.register(Integral)
Operation.register(Pow)
OneIdentityOperation.register(Pow)
Operation.register(Add)
OneIdentityOperation.register(Add)
CommutativeOperation.register(Add)
AssociativeOperation.register(Add)
Operation.register(Mul)
OneIdentityOperation.register(Mul)
CommutativeOperation.register(Mul)
AssociativeOperation.register(Mul)
Operation.register(exp)
Operation.register(log)
Operation.register(gamma)
Operation.register(uppergamma)
Operation.register(fresnels)
Operation.register(fresnelc)
Operation.register(erf)
Operation.register(Ei)
Operation.register(erfc)
Operation.register(erfi)
Operation.register(sin)
Operation.register(cos)
Operation.register(tan)
Operation.register(cot)
Operation.register(csc)
Operation.register(sec)
Operation.register(sinh)
Operation.register(cosh)
Operation.register(tanh)
Operation.register(coth)
Operation.register(csch)
Operation.register(sech)
Operation.register(asin)
Operation.register(acos)
Operation.register(atan)
Operation.register(acot)
Operation.register(acsc)
Operation.register(asec)
Operation.register(asinh)
Operation.register(acosh)
Operation.register(atanh)
Operation.register(acoth)
Operation.register(acsch)
Operation.register(asech)
@op_iter.register(Integral)
def _(operation):
return iter((operation._args[0],) + operation._args[1])
@op_iter.register(Basic)
def _(operation):
return iter(operation._args)
@op_len.register(Integral)
def _(operation):
return 1 + len(operation._args[1])
@op_len.register(Basic)
def _(operation):
return len(operation._args)
@create_operation_expression.register(Basic)
def sympy_op_factory(old_operation, new_operands, variable_name=True):
return type(old_operation)(*new_operands)
| [
"[email protected]"
]
| |
5161e0559dc3a3fd010c60d8374844d07214aba5 | 543e4a93fd94a1ebcadb7ba9bd8b1f3afd3a12b8 | /maza/modules/creds/routers/mikrotik/telnet_default_creds.py | 540d36ba30ae985f5d22df9eb468af7ebc98ea40 | [
"MIT"
]
| permissive | ArturSpirin/maza | e3127f07b90034f08ff294cc4afcad239bb6a6c3 | 56ae6325c08bcedd22c57b9fe11b58f1b38314ca | refs/heads/master | 2020-04-10T16:24:47.245172 | 2018-12-11T07:13:15 | 2018-12-11T07:13:15 | 161,144,181 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 856 | py | from maza.core.exploit import *
from maza.modules.creds.generic.telnet_default import Exploit as TelnetDefault
class Exploit(TelnetDefault):
__info__ = {
"name": "Mikrotik Router Default Telnet Creds",
"description": "Module performs dictionary attack against Mikrotik Router Telnet service."
"If valid credentials are found they are displayed to the user.",
"authors": (
"Marcin Bury <marcin[at]threat9.com>", # routersploit module
),
"devices": (
"Mikrotik Router",
),
}
target = OptIP("", "Target IPv4, IPv6 address or file with ip:port (file://)")
port = OptPort(23, "Target Telnet port")
threads = OptInteger(1, "Number of threads")
defaults = OptWordlist("admin:admin", "User:Pass or file with default credentials (file://)")
| [
"[email protected]"
]
| |
363743b18f38e10094f4b801a2b48ea117027291 | 0f8909782b5150783b738df3875c91509a92a33b | /scena/m9082.bin.py | 2cba9b7788446747346f2d8411dd2749f3f368cb | []
| no_license | uyjulian/ao_text | e40cd982bcdd5ea9ffd6f0f2e97ce9b92749b63a | 5cc5468aeb64fa97935f334a627357ec10e22307 | refs/heads/master | 2021-06-26T21:43:07.489898 | 2017-07-05T16:55:01 | 2017-07-05T16:55:01 | 96,562,461 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 132,517 | py | from ScenarioHelper import *
def main():
CreateScenaFile(
"m9082.bin", # FileName
"m9082", # MapName
"m9082", # Location
0x00C3, # MapIndex
"ed7356",
0x00000000, # Flags
("", "", "", "", "", ""), # include
0x00, # PlaceNameNumber
0x2A, # PreInitFunctionIndex
b'\x00\xff\xff', # Unknown_51
# Information
[0, 0, -1000, 0, 0, 0, 24000, 500, 30, 45, 0, 360, 0, 0, 0, 0, 0, 1, 195, 0, 0, 0, 1],
)
BuildStringList((
"m9082", # 0
"アリオス", # 1
"台詞表示用ダミーキャラ", # 2
"アリオスお供", # 3
"アリオスお供", # 4
"エフェクト表示用ダミーキャラ",# 5
"bm9069", # 6
))
ATBonus("ATBonus_1D8", 100, 5, 0, 5, 0, 5, 0, 2, 5, 0, 0, 0, 2, 0, 0, 0)
MonsterBattlePostion("MonsterBattlePostion_298", 8, 12, 180)
MonsterBattlePostion("MonsterBattlePostion_29C", 3, 14, 180)
MonsterBattlePostion("MonsterBattlePostion_2A0", 13, 14, 180)
MonsterBattlePostion("MonsterBattlePostion_2A4", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_2A8", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_2AC", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_2B0", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_2B4", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_278", 7, 4, 0)
MonsterBattlePostion("MonsterBattlePostion_27C", 10, 11, 225)
MonsterBattlePostion("MonsterBattlePostion_280", 4, 7, 90)
MonsterBattlePostion("MonsterBattlePostion_284", 12, 7, 270)
MonsterBattlePostion("MonsterBattlePostion_288", 4, 11, 135)
MonsterBattlePostion("MonsterBattlePostion_28C", 11, 4, 315)
MonsterBattlePostion("MonsterBattlePostion_290", 7, 12, 180)
MonsterBattlePostion("MonsterBattlePostion_294", 5, 5, 45)
# monster count: 0
# event battle count: 1
BattleInfo(
"BattleInfo_2B8", 0x0042, 255, 6, 45, 3, 3, 30, 0, "bm9069", 0x00000000, 100, 0, 0, 0,
(
("ms02401.dat", "ms85401.dat", "ms85501.dat", 0, 0, 0, 0, 0, "MonsterBattlePostion_298", "MonsterBattlePostion_278", "ed7527", "ed7453", "ATBonus_1D8"),
(),
(),
(),
)
)
AddCharChip((
"apl/ch51744.itc", # 00
))
DeclNpc(0, 12000, 211500, 180, 389, 0x0, 0, 0, 0, 255, 255, 0, 2, 255, 0)
DeclNpc(0, 0, 0, 0, 508, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 13100, 204699, 305, 508, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclEvent(0x0000, 0, 9, 0.0, 185.0, 11.0, 225.0, [0.20000000298023224, -0.0, 0.0, 0.0, -0.0, 0.1666666716337204, -0.0, 0.0, 0.0, -0.0, 0.20000000298023224, 0.0, -0.0, -30.83333396911621, -2.200000047683716, 1.0])
DeclActor(3500, 0, 155000, 1200, 3500, 1000, 155000, 0x007C, 0, 3, 0x0000)
ChipFrameInfo(940, 0) # 0
ScpFunction((
"Function_0_3AC", # 00, 0
"Function_1_3F8", # 01, 1
"Function_2_517", # 02, 2
"Function_3_78F", # 03, 3
"Function_4_88B", # 04, 4
"Function_5_B0F", # 05, 5
"Function_6_C62", # 06, 6
"Function_7_CC3", # 07, 7
"Function_8_D24", # 08, 8
"Function_9_D37", # 09, 9
"Function_10_575F", # 0A, 10
"Function_11_578A", # 0B, 11
"Function_12_57B5", # 0C, 12
"Function_13_57E0", # 0D, 13
"Function_14_580B", # 0E, 14
"Function_15_5836", # 0F, 15
"Function_16_585A", # 10, 16
"Function_17_586C", # 11, 17
"Function_18_587E", # 12, 18
"Function_19_5890", # 13, 19
"Function_20_589C", # 14, 20
"Function_21_58EA", # 15, 21
"Function_22_5938", # 16, 22
"Function_23_5961", # 17, 23
"Function_24_59AB", # 18, 24
"Function_25_59D8", # 19, 25
"Function_26_5A04", # 1A, 26
"Function_27_5A27", # 1B, 27
"Function_28_5A70", # 1C, 28
"Function_29_5AB9", # 1D, 29
"Function_30_5AD5", # 1E, 30
"Function_31_5B49", # 1F, 31
"Function_32_76AE", # 20, 32
"Function_33_76F7", # 21, 33
"Function_34_7761", # 22, 34
"Function_35_7FDC", # 23, 35
"Function_36_7FEC", # 24, 36
"Function_37_7FFB", # 25, 37
"Function_38_800D", # 26, 38
"Function_39_801F", # 27, 39
"Function_40_802B", # 28, 40
"Function_41_8079", # 29, 41
"Function_42_80C7", # 2A, 42
"Function_43_80EE", # 2B, 43
))
def Function_0_3AC(): pass
label("Function_0_3AC")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0x0), scpexpr(EXPR_PUSH_LONG, 0x65), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_3BD")
Event(0, 4)
label("loc_3BD")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x22, 0)), scpexpr(EXPR_END)), "loc_3D1")
ClearScenarioFlags(0x22, 0)
Event(0, 31)
Jump("loc_3F7")
label("loc_3D1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x22, 1)), scpexpr(EXPR_END)), "loc_3E5")
ClearScenarioFlags(0x22, 1)
Event(0, 34)
Jump("loc_3F7")
label("loc_3E5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A9, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x22, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_3F7")
Event(0, 8)
label("loc_3F7")
Return()
# Function_0_3AC end
def Function_1_3F8(): pass
label("Function_1_3F8")
OP_F0(0x1, 0x320)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A9, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x22, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_414")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
label("loc_414")
OP_1B(0x1, 0x0, 0x5)
ModifyEventFlags(0, 0, 0x80)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A9, 3)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A9, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_431")
ModifyEventFlags(1, 0, 0x80)
label("loc_431")
OP_52(0x8, 0x2D, (scpexpr(EXPR_PUSH_LONG, 0x41A), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0x8, 0x2E, (scpexpr(EXPR_PUSH_LONG, 0x41A), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0x8, 0x2F, (scpexpr(EXPR_PUSH_LONG, 0x41A), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetMapObjFlags(0x2, 0x1000)
ClearMapObjFlags(0x2, 0x4)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A9, 4)), scpexpr(EXPR_END)), "loc_4BE")
SetMapObjFlags(0x2, 0x4)
SetMapObjFrame(0xFF, "magi10_add", 0x1, 0x1)
SetMapObjFrame(0xFF, "magi11_add", 0x1, 0x1)
SetMapObjFrame(0xFF, "point_add", 0x1, 0x1)
SetMapObjFrame(0xFF, "magi_04_add", 0x0, 0x1)
OP_70(0x1, 0x96)
Jump("loc_50A")
label("loc_4BE")
SetMapObjFrame(0xFF, "magi10_add", 0x0, 0x1)
SetMapObjFrame(0xFF, "magi11_add", 0x0, 0x1)
SetMapObjFrame(0xFF, "point_add", 0x0, 0x1)
SetMapObjFrame(0xFF, "magi_04_add", 0x1, 0x1)
OP_70(0x1, 0x3C)
label("loc_50A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A9, 4)), scpexpr(EXPR_END)), "loc_516")
Call(0, 42)
label("loc_516")
Return()
# Function_1_3F8 end
def Function_2_517(): pass
label("Function_2_517")
SetChrFlags(0x8, 0x10)
TalkBegin(0x8)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1CF, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_73E")
ChrTalk(
0x8,
"……………………………………\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00001F……完全に気を失ってる。\x01",
"命に別状もなさそうだ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00306Fさすがにとんでもねえ相手だったな……\x01",
"俺たち全員相手にあそこまでやるとは。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
"#00208Fさすがは《風の剣聖》……ですね。\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_641")
ChrTalk(
0x10A,
"#00603Fフン……よく勝てたものだ。\x02",
)
CloseMessageWindow()
label("loc_641")
ChrTalk(
0x101,
(
"#00003Fシズクちゃんも心配しているだろうし、\x01",
"すぐにでもメルカバに運びたいけど……\x02\x03",
"#00001F……この先にはマリアベルさんと\x01",
"イアン先生が待ち構えている。\x02\x03",
"#00003F申し訳ないけど、\x01",
"今は後回しにさせてもらおう。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#00100Fそうね……行きましょう。\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x1CF, 0)
Jump("loc_78B")
label("loc_73E")
ChrTalk(
0x8,
"……………………………………\x02",
)
CloseMessageWindow()
SetChrName("")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"完全に気を失っているようだ。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
label("loc_78B")
TalkEnd(0x8)
Return()
# Function_2_517 end
def Function_3_78F(): pass
label("Function_3_78F")
OP_F4(0x2)
FadeToDark(300, 0, 100)
AnonymousTalk(
0x3E7,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"オーブメントを回復できる装置がある。\x07\x00\x02",
)
)
Menu(
0,
-1,
-1,
1,
(
"ここで休憩する\x01", # 0
"やめる\x01", # 1
)
)
MenuEnd(0x0)
OP_60(0x0)
OP_57(0x0)
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_87C")
FadeToBright(100, 0)
Sleep(500)
SoundLoad(13)
OP_74(0x0, 0x1E)
Sound(7, 0, 100, 0)
OP_70(0x0, 0x0)
OP_71(0x0, 0x0, 0x1E, 0x0, 0x0)
OP_79(0x0)
OP_71(0x0, 0x1F, 0x186, 0x0, 0x20)
Sleep(1000)
StopBGM(0xBB8)
FadeToDark(1000, 0, -1)
Sleep(700)
Sound(13, 0, 100, 0)
OP_0D()
OP_32(0xFF, 0xFE, 0x0)
OP_6A(0x0, 0x0)
OP_31(0x1)
Sleep(3500)
OP_70(0x0, 0x0)
OP_1F()
FadeToBright(1000, 0)
OP_57(0x0)
TalkEnd(0xFF)
Return()
label("loc_87C")
FadeToBright(300, 0)
TalkEnd(0xFF)
Return()
# Function_3_78F end
def Function_4_88B(): pass
label("Function_4_88B")
EventBegin(0x0)
FadeToDark(0, 0, -1)
OP_E2(0x3)
LoadEffect(0x1, "event/ev202_00.eff")
OP_68(-340, 13500, 219060, 0)
MoveCamera(29, 41, 0, 0)
OP_6E(600, 0)
SetCameraDistance(12970, 0)
SetChrPos(0x0, 0, 12000, 222000, 180)
SetChrPos(0x1, 0, 12000, 222000, 180)
SetChrPos(0x2, 0, 12000, 222000, 180)
SetChrPos(0x3, 0, 12000, 222000, 180)
OP_A7(0x0, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x1, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x2, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x3, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
FadeToBright(500, 0)
OP_0D()
Sound(920, 0, 100, 0)
PlayEffect(0x1, 0xFF, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(100)
def lambda_99B():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x0, 2, lambda_99B)
def lambda_9AC():
OP_95(0xFE, -240, 12000, 218120, 2500, 0x0)
ExitThread()
QueueWorkItem(0x0, 1, lambda_9AC)
Sleep(500)
PlayEffect(0x1, 0xFF, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(100)
def lambda_A03():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x1, 2, lambda_A03)
def lambda_A14():
OP_95(0xFE, -1420, 12000, 218280, 2500, 0x0)
ExitThread()
QueueWorkItem(0x1, 1, lambda_A14)
Sleep(500)
Sound(920, 0, 100, 0)
PlayEffect(0x1, 0xFF, 0x2, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(100)
def lambda_A71():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x2, 2, lambda_A71)
def lambda_A82():
OP_95(0xFE, 1060, 12000, 218310, 2500, 0x0)
ExitThread()
QueueWorkItem(0x2, 1, lambda_A82)
Sleep(500)
PlayEffect(0x1, 0xFF, 0x3, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(100)
def lambda_AD9():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x3, 2, lambda_AD9)
def lambda_AEA():
OP_95(0xFE, -2780, 12000, 218680, 2500, 0x0)
ExitThread()
QueueWorkItem(0x3, 1, lambda_AEA)
WaitChrThread(0x3, 1)
Sleep(500)
OP_E2(0x2)
OP_69(0xFF, 0x0)
EventEnd(0x5)
Return()
# Function_4_88B end
def Function_5_B0F(): pass
label("Function_5_B0F")
EventBegin(0x0)
OP_E2(0x3)
LoadEffect(0x1, "event/evwarp.eff")
Sound(936, 0, 100, 0)
PlayEffect(0x1, 0xFF, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
def lambda_B68():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x0, 2, lambda_B68)
Sleep(100)
PlayEffect(0x1, 0xFF, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
def lambda_BB3():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x1, 2, lambda_BB3)
Sleep(100)
PlayEffect(0x1, 0xFF, 0x2, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
def lambda_BFE():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x2, 2, lambda_BFE)
Sleep(100)
PlayEffect(0x1, 0xFF, 0x3, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
def lambda_C49():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x3, 2, lambda_C49)
Sleep(1000)
NewScene("m9008", 102, 0, 0)
IdleLoop()
Return()
# Function_5_B0F end
def Function_6_C62(): pass
label("Function_6_C62")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_C7A")
LoadChrToIndex("chr/ch03150.itc", 0x23)
label("loc_C7A")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_C92")
LoadChrToIndex("chr/ch03250.itc", 0x23)
label("loc_C92")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_CAA")
LoadChrToIndex("chr/ch02950.itc", 0x23)
label("loc_CAA")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_CC2")
LoadChrToIndex("chr/ch00950.itc", 0x23)
label("loc_CC2")
Return()
# Function_6_C62 end
def Function_7_CC3(): pass
label("Function_7_CC3")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_CDB")
LoadChrToIndex("chr/ch03150.itc", 0x24)
label("loc_CDB")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_CF3")
LoadChrToIndex("chr/ch03250.itc", 0x24)
label("loc_CF3")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_D0B")
LoadChrToIndex("chr/ch02950.itc", 0x24)
label("loc_D0B")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_D23")
LoadChrToIndex("chr/ch00950.itc", 0x24)
label("loc_D23")
Return()
# Function_7_CC3 end
def Function_8_D24(): pass
label("Function_8_D24")
EventBegin(0x0)
StopBGM(0xFA0)
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
EventEnd(0x5)
Return()
# Function_8_D24 end
def Function_9_D37(): pass
label("Function_9_D37")
EventBegin(0x0)
FadeToDark(0, -1, 0)
FadeToDark(500, 0, -1)
OP_0D()
FadeToBright(0, -1)
CreatePortrait(0, 234, 0, 490, 256, 0, 0, 256, 256, 0, 0, 256, 256, 0xFFFFFF, 0x0, "bu01400.itp")
LoadChrToIndex("apl/ch51233.itc", 0x1E)
LoadChrToIndex("chr/ch00050.itc", 0x1F)
LoadChrToIndex("chr/ch00150.itc", 0x20)
LoadChrToIndex("chr/ch00250.itc", 0x21)
LoadChrToIndex("chr/ch00350.itc", 0x22)
Call(0, 6)
Call(0, 7)
LoadChrToIndex("chr/ch02450.itc", 0x25)
LoadChrToIndex("monster/ch85450.itc", 0x26)
LoadChrToIndex("monster/ch60051.itc", 0x27)
LoadChrToIndex("monster/ch85550.itc", 0x28)
LoadChrToIndex("monster/ch60051.itc", 0x29)
LoadChrToIndex("apl/ch51743.itc", 0x2A)
LoadEffect(0x0, "event/ev602_01.eff")
LoadEffect(0x1, "event/eva06_02.eff")
LoadEffect(0x2, "event/eva06_01.eff")
LoadEffect(0x3, "event/ev17013.eff")
SoundLoad(128)
SoundLoad(825)
SoundLoad(832)
SoundLoad(881)
SoundLoad(833)
SoundLoad(4064)
SoundLoad(4077)
SoundLoad(4065)
SoundLoad(4066)
SoundLoad(4067)
SetChrPos(0x101, 0, 25000, 181800, 0)
SetChrPos(0x102, 1100, 25000, 181100, 0)
SetChrPos(0x103, 200, 25000, 180000, 0)
SetChrPos(0x104, -1100, 25000, 180750, 0)
SetChrPos(0xF4, -650, 25000, 179250, 0)
SetChrPos(0xF5, 850, 25000, 179000, 0)
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
SetChrChipByIndex(0x8, 0x1E)
SetChrSubChip(0x8, 0x0)
ClearChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x8000)
SetChrPos(0x8, 0, 12000, 210000, 180)
OP_52(0x8, 0x2D, (scpexpr(EXPR_PUSH_LONG, 0x41A), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0x8, 0x2E, (scpexpr(EXPR_PUSH_LONG, 0x41A), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0x8, 0x2F, (scpexpr(EXPR_PUSH_LONG, 0x41A), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
ClearChrFlags(0x9, 0x80)
OP_A7(0x9, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
SetChrPos(0x9, 0, 12000, 198500, 0)
SetChrChipByIndex(0xA, 0x26)
SetChrSubChip(0xA, 0x0)
SetChrFlags(0xA, 0x20)
OP_52(0xA, 0x24, (scpexpr(EXPR_PUSH_LONG, 0xD8), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xA, 0x7, (scpexpr(EXPR_PUSH_LONG, 0x7D0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrPos(0xA, -2500, 12000, 211500, 180)
OP_A7(0xA, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
ClearChrFlags(0xA, 0x80)
ClearChrBattleFlags(0xA, 0x8000)
SetChrChipByIndex(0xB, 0x28)
SetChrSubChip(0xB, 0x0)
SetChrFlags(0xB, 0x20)
OP_52(0xB, 0x24, (scpexpr(EXPR_PUSH_LONG, 0xD8), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xB, 0x7, (scpexpr(EXPR_PUSH_LONG, 0x7D0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrPos(0xB, 2500, 12000, 211500, 180)
OP_A7(0xB, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
ClearChrFlags(0xB, 0x80)
ClearChrBattleFlags(0xB, 0x8000)
BeginChrThread(0xA, 2, 0, 29)
BeginChrThread(0xB, 2, 0, 29)
ClearChrFlags(0xC, 0x80)
OP_68(0, 13000, 180500, 0)
MoveCamera(0, 38, 0, 0)
OP_6E(600, 0)
SetCameraDistance(21000, 0)
Sleep(500)
OP_68(0, 13000, 188000, 4500)
MoveCamera(0, 38, 0, 4500)
OP_6E(600, 4500)
SetCameraDistance(24000, 4500)
FadeToBright(1000, 0)
def lambda_102A():
OP_9B(0x0, 0xFE, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_102A)
Sleep(50)
def lambda_1042():
OP_9B(0x0, 0xFE, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x103, 1, lambda_1042)
Sleep(50)
def lambda_105A():
OP_9B(0x0, 0xFE, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_105A)
Sleep(50)
def lambda_1072():
OP_9B(0x0, 0xFE, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x104, 1, lambda_1072)
Sleep(50)
def lambda_108A():
OP_9B(0x0, 0xFE, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xF4, 1, lambda_108A)
Sleep(50)
def lambda_10A2():
OP_9B(0x0, 0xFE, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xF5, 1, lambda_10A2)
OP_0D()
Sleep(2400)
OP_C9(0x0, 0x80000000)
NpcTalk(
0x8,
"男性の声",
"#4064V#6P#30W#16A──至ったか。\x02",
)
CloseMessageWindow()
OP_C9(0x1, 0x80000000)
OP_63(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x104, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0xF4, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0xF5, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
WaitChrThread(0xF5, 1)
OP_6F(0x79)
PlayBGM("ed7356", 0)
BeginChrThread(0x101, 0, 0, 10)
Sleep(50)
BeginChrThread(0x102, 0, 0, 11)
Sleep(50)
BeginChrThread(0x103, 0, 0, 12)
Sleep(50)
BeginChrThread(0x104, 0, 0, 13)
Sleep(50)
BeginChrThread(0xF4, 0, 0, 14)
Sleep(50)
BeginChrThread(0xF5, 0, 0, 15)
OP_68(-410, 13300, 205280, 4000)
MoveCamera(47, 16, 0, 4000)
OP_6E(600, 4000)
SetCameraDistance(16180, 4000)
OP_6F(0x79)
WaitChrThread(0x101, 0)
WaitChrThread(0x104, 0)
WaitChrThread(0x103, 0)
WaitChrThread(0x102, 0)
WaitChrThread(0xF4, 0)
WaitChrThread(0xF5, 0)
ChrTalk(
0x101,
"#00001F#12P……アリオスさん。\x02",
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00301F#12Pもう、あの長官の格好は\x01",
"してねぇんだな……?\x02",
)
)
CloseMessageWindow()
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
OP_CB(0x0, 0x0, 0xFFFFD8F0, 0x0, 0x1F4, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x0, 0x0, 0x0)
SetMessageWindowPos(14, 280, 35, 3)
Sleep(500)
AnonymousTalk(
0x8,
(
"クロイス氏の要請とはいえ、\x01",
"元々、無理のある人事だからな。\x02\x03",
"独立国の無効宣言があった以上、\x01",
"俺にあれを着る資格はない。\x02\x03",
"国防長官でも、遊撃士でもなく……\x02\x03",
"ただの無頼の剣士として\x01",
"ここに立っていると思うがいい。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
OP_CB(0x0, 0x3, 0xFFFFFF, 0x1F4, 0x0, 0x0)
OP_CB(0x0, 0x0, 0x0, 0x0, 0x1F4, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x0, 0x0, 0x0)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1406")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_13DE")
OP_FC(0xFFF4)
Jump("loc_13E1")
label("loc_13DE")
OP_FC(0xC)
label("loc_13E1")
ChrTalk(
0x10A,
"#00600F#13Pマクレイン……\x02",
)
CloseMessageWindow()
OP_5A()
Jump("loc_1455")
label("loc_1406")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1455")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1430")
OP_FC(0xFFF4)
Jump("loc_1433")
label("loc_1430")
OP_FC(0xC)
label("loc_1433")
ChrTalk(
0x109,
"#10113F#13Pアリオスさん……\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_1455")
ChrTalk(
0x102,
"#00108F#12Pどうしてそこまで……\x02",
)
CloseMessageWindow()
ChrTalk(
0x103,
"#00206F#12P……律儀すぎです。\x02",
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1500)
OP_64(0x101)
ChrTalk(
0x101,
(
"#00004F#12P……はは、参ったな……\x02\x03",
"#00008F聞きたい事が色々ありすぎて\x01",
"整理できていないんですけど……\x02\x03",
"#00000Fまずは“答え合わせ”をしても\x01",
"構いませんか……?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#01404Fああ#5P──元よりそのつもりだ。\x02\x03",
"#01400F聞くがいい……\x01",
"ただ1つのことを除いて\x01",
"全てに答えよう。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#00003F#12Pそれでは……\x02",
)
CloseMessageWindow()
ClearScenarioFlags(0x0, 3)
ClearScenarioFlags(0x0, 0)
ClearScenarioFlags(0x0, 1)
ClearScenarioFlags(0x0, 2)
label("loc_15EA")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3A20")
FadeToDark(300, 0, 100)
OP_0D()
MenuCmd(0, 0)
MenuCmd(1, 0, "5年前の“事故”について")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_END)), "loc_1647")
MenuCmd(1, 0, "イアン弁護士との関係について")
label("loc_1647")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_END)), "loc_1670")
MenuCmd(1, 0, "黒の競売会でのキーアについて")
label("loc_1670")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_END)), "loc_1697")
MenuCmd(1, 0, "ガイが亡くなった日について")
label("loc_1697")
MenuCmd(2, 0, -1, -1, 0)
MenuEnd(0x0)
OP_60(0x0)
FadeToBright(300, 0)
OP_0D()
Switch(
(scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_END)),
(0, "loc_16D1"),
(1, "loc_234B"),
(2, "loc_2D7F"),
(3, "loc_3A13"),
(SWITCH_DEFAULT, "loc_3A1B"),
)
label("loc_16D1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2208")
ChrTalk(
0x101,
(
"#00008F#12P……辛いことを聞くようで\x01",
"申し訳ありませんが……\x02\x03",
"#00001F5年前の“事故”について\x01",
"教えてもらえませんか……?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#01403F#5Pああ……\x01",
"もはや隠す必要もあるまい。\x02\x03",
"#01400F5年前、表通りで起きた\x01",
"運搬車の爆発事故……\x02\x03",
"お前たちも気付いているように、\x01",
"あれは帝国と共和国の諜報戦の\x01",
"結果として起きたものだった。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#00106F#12Pやはり……\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1982")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_185D")
OP_FC(0xFFF4)
Jump("loc_1860")
label("loc_185D")
OP_FC(0xC)
label("loc_1860")
ChrTalk(
0x10A,
"#00608F#13P………………………………\x02",
)
CloseMessageWindow()
OP_5A()
ChrTalk(
0x8,
(
"#01402F#5Pフフ、当然一課では\x01",
"その事実を把握していた筈だな?\x02\x03",
"#01403Fそして帝国・共和国派に配慮した\x01",
"上層部の判断で、当然のように\x01",
"握りつぶされたわけだが……\x02\x03",
"#01400Fその事自体に失望はあっても\x01",
"今さら恨みはない。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x10A,
"#00606F#13P……言葉も無い。\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_1982")
ChrTalk(
0x103,
(
"#00208F#12P……それでアリオスさんの\x01",
"奥さんとシズクちゃんは……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#01403F#5Pああ……サヤの命は失われ、\x01",
"シズクの光は奪われた。\x02\x03",
"#01408Fあれから5年……\x01",
"両国の諜報機関が整備されたことで\x01",
"無為な破壊工作は無くなったが……\x02\x03",
"#01401F数十年に渡る暗闘の結果、\x01",
"サヤたちと同じような被害者は\x01",
"少なからず出ていた。\x02\x03",
"#01403Fロイド──お前の両親や\x01",
"イアン先生の家族を含めてな。\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x104, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0xF4, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0xF5, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
OP_82(0x64, 0x0, 0xBB8, 0x12C)
ChrTalk(
0x101,
"#00005F#12P#4S!?\x02",
)
CloseMessageWindow()
ChrTalk(
0x104,
"#00307F#12Pなんだと……!?\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x17C, 6)), scpexpr(EXPR_END)), "loc_1CE2")
ChrTalk(
0x102,
(
"#00101F#12Pロ、ロイドのご両親って\x01",
"確か……!?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
"#00208F#12P15年前の飛行船事故で……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00006F#12Pああ……前に話した通りだ。\x02\x03",
"#00008F俺は物心付いたばかりで\x01",
"ほとんど覚えていないけど……\x02\x03",
"#00013Fじゃあ、その時に……\x01",
"イアン先生の家族というのも?\x02",
)
)
CloseMessageWindow()
Jump("loc_1DFD")
label("loc_1CE2")
ChrTalk(
0x102,
"#00105F#12Pロ、ロイドのご両親が!?\x02",
)
CloseMessageWindow()
ChrTalk(
0x103,
"#00206F#12P……初耳です……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00006F#12P俺の両親は……\x01",
"15年前、就航したばかりの\x01",
"飛行船の事故で亡くなっている……\x02\x03",
"#00008F俺は物心付いたばかりで\x01",
"ほとんど覚えていないけど……\x02\x03",
"#00013Fじゃあ、その時に……\x01",
"イアン先生の家族というのも?\x02",
)
)
CloseMessageWindow()
label("loc_1DFD")
ChrTalk(
0x8,
(
"#01403F#5Pああ、奥さんとお子さん2人が\x01",
"それに乗っていたと聞いている。\x02\x03",
"俺にはシズクが残されたが……\x01",
"全てを失った彼の嘆きと哀しみは\x01",
"想像も付かないくらいだろう。\x02\x03",
"#01400Fそしてその時、ガイとイアン先生も\x01",
"同じ遺族として知り合っている筈だ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#00001F#12P……………………………………\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1F70")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1F45")
OP_FC(0xFFF4)
Jump("loc_1F48")
label("loc_1F45")
OP_FC(0xC)
label("loc_1F48")
ChrTalk(
0x109,
"#10106F#13P……そ、そんな事が……\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_1F70")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1FDA")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1F9A")
OP_FC(0xFFF4)
Jump("loc_1F9D")
label("loc_1F9A")
OP_FC(0xC)
label("loc_1F9D")
ChrTalk(
0x10A,
(
"#00606F#13Pその情報は一課でも\x01",
"把握されていなかった……\x02",
)
)
CloseMessageWindow()
OP_5A()
label("loc_1FDA")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_2044")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_2004")
OP_FC(0xFFF4)
Jump("loc_2007")
label("loc_2004")
OP_FC(0xC)
label("loc_2007")
ChrTalk(
0x105,
(
"#10401F#13P……なるほど。\x01",
"そんな因縁があったとはね……\x02",
)
)
CloseMessageWindow()
OP_5A()
label("loc_2044")
ChrTalk(
0x8,
(
"#01403F#5P……そして5年前の事件の後、\x01",
"俺は警察を辞め、\x01",
"遊撃士協会の門戸を叩いた。\x02\x03",
"警察への失望、シズクの入院費用の捻出、\x01",
"色々と理由はあったが……\x02\x03",
"#01408F単に、サヤを失った哀しみから\x01",
"逃れたかっただけかもしれない。\x02\x03",
"#01400Fその気になれば幾らでもある\x01",
"遊撃士の仕事に没頭することでな。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#00008F#12Pアリオスさん……\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_2200")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_21B0")
OP_FC(0xFFF4)
Jump("loc_21B3")
label("loc_21B0")
OP_FC(0xC)
label("loc_21B3")
ChrTalk(
0x106,
(
"#10706F#13P(……今まで《銀#2Rイン#》が殺めた\x01",
" 標的たちの家族も……)\x02",
)
)
CloseMessageWindow()
OP_5A()
label("loc_2200")
SetScenarioFlags(0x0, 0)
Jump("loc_2346")
label("loc_2208")
ChrTalk(
0x8,
(
"#01403F#5P5年前、表通りで起きた\x01",
"サヤの命とシズクの光を奪った\x01",
"運搬車の爆発事故……\x02\x03",
"#01401Fあれは帝国と共和国の\x01",
"諜報戦の結果起きたものだった。\x02\x03",
"そして15年前の飛行船事故も\x01",
"同じ理由で起きている。\x02\x03",
"#01403Fその結果……ガイとお前の両親や\x01",
"イアン先生の家族の命が奪われた。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#00008F#12P……………………………………\x02",
)
CloseMessageWindow()
label("loc_2346")
Jump("loc_3A1B")
label("loc_234B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2C91")
ChrTalk(
0x101,
(
"#00006F#12P……ずっと疑問に思っていた\x01",
"事があったんです。\x02\x03",
"#00001Fどうしてディーターさんたちと\x01",
"貴方の存在が結び付くのかと。\x02",
)
)
CloseMessageWindow()
OP_63(0x8, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
ChrTalk(
0x8,
"#01405F#5Pほう……?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#00106F#12P……確かにおじさまやベルは\x01",
"経済や金融、クロイス家に関係する\x01",
"教団の情報には詳しそうだけど……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00303F#12P帝国と共和国の水面下での暗闘……\x02\x03",
"#00301Fそのあたりの事情にまで\x01",
"通じているのは違和感があるな。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
(
"#00203F#12Pお互い接点が無い両名……\x02\x03",
"#00201Fなのに大統領になったディーターさんは\x01",
"アリオスさんを国防長官に指名した……\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_2623")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_2585")
OP_FC(0xFFF4)
Jump("loc_2588")
label("loc_2585")
OP_FC(0xC)
label("loc_2588")
ChrTalk(
0x10A,
(
"#00606F#13P……なるほど、そういう事か。\x02\x03",
"#00601Fその両者を結びつけたのが\x01",
"イアン先生だったという訳か。\x02",
)
)
CloseMessageWindow()
OP_5A()
ChrTalk(
0x101,
(
"#00001F#12Pええ……\x01",
"──違いますか?\x02",
)
)
CloseMessageWindow()
Jump("loc_2796")
label("loc_2623")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_26EB")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_264D")
OP_FC(0xFFF4)
Jump("loc_2650")
label("loc_264D")
OP_FC(0xC)
label("loc_2650")
ChrTalk(
0x105,
(
"#10406F#13P……なるほど、そういう事か。\x02\x03",
"#10401Fその両者を結びつけたのが\x01",
"あの熊ヒゲ先生だった訳だね?\x02",
)
)
CloseMessageWindow()
OP_5A()
ChrTalk(
0x101,
(
"#00001F#12Pああ……\x01",
"──違いますか?\x02",
)
)
CloseMessageWindow()
Jump("loc_2796")
label("loc_26EB")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_2796")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_2715")
OP_FC(0xFFF4)
Jump("loc_2718")
label("loc_2715")
OP_FC(0xC)
label("loc_2718")
ChrTalk(
0x109,
(
"#10108F#13Pひょっとして……\x02\x03",
"#10101Fその両者を結びつけたのが\x01",
"イアン先生……?\x02",
)
)
CloseMessageWindow()
OP_5A()
ChrTalk(
0x101,
(
"#00001F#12Pああ……\x01",
"──違いますか?\x02",
)
)
CloseMessageWindow()
label("loc_2796")
ChrTalk(
0x8,
"#01404F#5Pフフ……その通りだ。\x02",
)
CloseMessageWindow()
FadeToDark(1000, 0, -1)
OP_0D()
CreatePortrait(0, 224, 0, 480, 256, 10, 16, 256, 256, 0, 0, 256, 256, 0xFFFFFF, 0x0, "bu02200.itp")
OP_CB(0x0, 0x3, 0xAAFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
AnonymousTalk(
0x8,
(
"#01402F警察時代、お前たちと同じく、\x01",
"俺とガイもイアン先生の情報には\x01",
"随分と助けられたものだった。\x02\x03",
"教団のロッジ制圧作戦でも\x01",
"民間のアドバイザーとして\x01",
"協力していたくらいの情報通だ。\x02\x03",
"#01403Fそして遊撃士になった後も……\x01",
"彼とは頻繁に情報交換していた。\x02",
)
)
CloseMessageWindow()
CreatePortrait(1, 224, 0, 480, 256, 65296, 16, 256, 256, 0, 0, 256, 256, 0xFFFFFF, 0x0, "bu02800.itp")
OP_CB(0x1, 0x3, 0xAAFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x1, 0x3)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
AnonymousTalk(
0x8,
(
"#01403F#5P一方で先生は、IBCの法務を通じて\x01",
"クロイス父娘と昔から親交があった。\x02\x03",
"#01401Fそして──あらゆる情報と要素は\x01",
"先生のところに集約・統合され……\x02\x03",
"クロイス氏は彼に誘導されるまま、\x01",
"様々な政治工作と《至宝》の力による\x01",
"クロスベル独立を成し遂げた。\x02\x03",
"#01403Fその裏で、彼とマリアベル嬢によって\x01",
"真の計画が進められているとも知らずに。\x02",
)
)
CloseMessageWindow()
FadeToBright(800, 0)
OP_CB(0x0, 0x3, 0xFFFFFF, 0x320, 0x0, 0x0)
OP_CB(0x1, 0x3, 0xFFFFFF, 0x320, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x0, 0x1, 0x3)
OP_CC(0x1, 0xFF, 0x0)
OP_0D()
Sleep(300)
ChrTalk(
0x101,
"#00013F#12P真の計画……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#00108F#12P『碧き零の計画』ですか……\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#01403F#5Pそう……サヤたちの事故についても\x01",
"先生はいち早く真相に気付いていた。\x02\x03",
"そして俺に事情を打ち明け……\x01",
"俺も計画に協力する事となった。\x02\x03",
"#01400Fこれが経緯#4Rいきさつ#の全てだ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#00008F#12P………………………………\x02",
)
CloseMessageWindow()
ChrTalk(
0x103,
(
"#00206F#12P全てはイアン先生と\x01",
"マリアベルさんの掌#2Rてのひら#の上……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x104,
"#00301F#12P……とんでもねぇ話だぜ。\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2D7A")
label("loc_2C91")
ChrTalk(
0x8,
(
"#01403F#5Pクロイス氏と俺……\x01",
"接点のない2人を結びつけたのが\x01",
"他ならぬイアン先生だった。\x02\x03",
"彼は、5年前の事故についても\x01",
"いち早く真相に気付いて\x01",
"俺を『碧#2Rあお#き零#2Rゼロ#の計画』へと誘い……\x02\x03",
"#01400Fそして俺もまた、それに応じた。\x02",
)
)
CloseMessageWindow()
label("loc_2D7A")
Jump("loc_3A1B")
label("loc_2D7F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_38B0")
ChrTalk(
0x101,
(
"#00006F#12P……これも同じく、\x01",
"疑問に思っていた事ですが……\x02\x03",
"#00013Fキーアを《太陽の砦》の地下から\x01",
"連れ出したのはアリオスさんですね?\x02\x03",
"そして《黒の競売会》に出品される\x01",
"ローゼンベルク人形と入れ替えたのも。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#00105F#12Pそ、そういえば……\x02",
)
CloseMessageWindow()
ChrTalk(
0x103,
(
"#00206F#12P確かにその問題も完全には\x01",
"明らかになっていませんね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#01404F#5Pああ、その通りだ。\x02\x03",
"#01402Fそれに関しては先生ではなく、\x01",
"マリアベル嬢の主導だったがな。\x02",
)
)
CloseMessageWindow()
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis303.itp")
CreatePortrait(1, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis304.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x320, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
AnonymousTalk(
0x8,
(
"#01403Fどうやら彼女はヨアヒムの動きを\x01",
"完全に把握していたようでな……\x02\x03",
"#01401F彼女の転位術で俺たちは容易く\x01",
"最下層の祭壇に辿りつき、\x01",
"あの子を揺籃#4Rゆりかご#から解放した。\x02",
)
)
CloseMessageWindow()
OP_CB(0x1, 0x4, 0x0, 0x0, 0x0, 0x0)
OP_CB(0x1, 0x3, 0xFFFFFFFF, 0x320, 0x0, 0x0)
OP_CC(0x0, 0x1, 0x3)
OP_CB(0x0, 0x3, 0xFFFFFF, 0x0, 0x0, 0x0)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
AnonymousTalk(
0x8,
(
"#01403Fそして俺は、レミフェリア方面から\x01",
"運ばれてきたローゼンベルク人形と\x01",
"あの子をすり替えた。\x02\x03",
"#01400Fそのローゼンベルク人形自体も\x01",
"ルバーチェ側に気づかれないように\x01",
"マリアベル嬢が用意したものだがな。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, -1, 3)
AnonymousTalk(
0x102,
"#00106F……そんな事まで……\x02",
)
CloseMessageWindow()
OP_CB(0x1, 0x3, 0xFFFFFF, 0x320, 0x0, 0x0)
OP_CC(0x0, 0x1, 0x3)
OP_CC(0x1, 0xFF, 0x0)
Sleep(300)
ChrTalk(
0x101,
(
"#00006F#12Pしかし……\x01",
"そんな事をしてマリアベルさんに\x01",
"何の意味があったんですか?\x02\x03",
"#00013F計画にキーアが必要ならそのまま\x01",
"保護すれば良かったのに……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#01403F#5P一つは《黒月#4Rヘイユエ#》も巻き込んで\x01",
"ルバーチェ側の面目を失墜させ、\x01",
"自滅の第一歩とすること……\x02\x03",
"#01400Fもし、競売会の場で\x01",
"彼女が目覚めることになったら\x01",
"マリアベル嬢が動いたはずだ。\x02\x03",
"動揺する客とマルコーニを前に\x01",
"IBCの名を出してあの子の保護を\x01",
"買って出るつもりだったのだろう。\x02\x03",
"#01404F《黒月》が動いたら\x01",
"別の展開もあっただろうが……\x02\x03",
"いずれにせよ、あの時は\x01",
"この俺も会場内に潜伏していた#28R噵 噵 噵 噵 噵 噵 噵 噵 噵 噵 噵 噵 噵 噵#。\x02\x03",
"#01402Fどんな展開になったとしても\x01",
"収拾できる態勢は整っていたわけだ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00306F#12Pなんつーか……\x01",
"ウルトラCすぎんだろ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
"#00211F#12P用意周到すぎます……\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_3540")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_34E2")
OP_FC(0xFFF4)
Jump("loc_34E5")
label("loc_34E2")
OP_FC(0xC)
label("loc_34E5")
ChrTalk(
0x106,
(
"#10708F#13P……確かにあの時、\x01",
"他にも何者かが潜んでいるような\x01",
"気配を感じましたけど……\x02",
)
)
CloseMessageWindow()
label("loc_3540")
ChrTalk(
0x8,
(
"#01403F#5Pそしてもう一つは……\x02\x03",
"#01401Fあのような特異な状況で\x01",
"《至宝》を目覚めさせることで\x01",
"潜在能力を見極めるという事だ。\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x104, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0xF4, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0xF5, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
ChrTalk(
0x103,
"#00205F#12Pキーアの潜在能力を見極める?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#00101F#12Pど、どういう事ですか……?\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#01403F#5Pさて──マリアベル嬢が\x01",
"そう言っていたというだけだ。\x02\x03",
"#01408F恐らく、あの子を長き眠りから\x01",
"目覚めさせる条件の一つなのかも\x01",
"しれないが……\x02\x03",
"#01400Fいずれにせよ、女神の導きか、\x01",
"それとも単なる偶然か、\x01",
"彼女はお前たちの前で目覚めた。\x02\x03",
"マリアベル嬢にしたら\x01",
"完全に想定外だった筈だが……\x02\x03",
"#01403Fあの子がお前たちに引き取られ、\x01",
"一緒に暮らす事になったのも含めて\x01",
"歓迎しているかのようだった。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#00008F#12P………………………………\x02",
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00306F#12P……ダメだ。\x01",
"ワケが分からねぇ……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#00108F#12P……ベル……\x01",
"いったい何のつもりで……?\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 2)
Jump("loc_3A0E")
label("loc_38B0")
ChrTalk(
0x8,
(
"#01403F#5P《太陽の砦》からキーアを解放し、\x01",
"競売会の場に運び込んだのは\x01",
"マリアベル嬢の主導によるものだ。\x02\x03",
"#01408Fその狙いは、ルバーチェを崩壊に導き、\x01",
"状況をコントロールするためでも\x01",
"あったようだが……\x02\x03",
"#01401F特異な状況で彼女を目覚めさせ、\x01",
"潜在能力を見極めるという目的も\x01",
"一方ではあったようだ。\x02\x03",
"#01403Fそれ以上のことは\x01",
"残念ながら俺にも分からない。\x02",
)
)
CloseMessageWindow()
label("loc_3A0E")
Jump("loc_3A1B")
label("loc_3A13")
SetScenarioFlags(0x0, 3)
Jump("loc_3A1B")
label("loc_3A1B")
Jump("loc_15EA")
label("loc_3A20")
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1500)
OP_64(0x101)
ChrTalk(
0x101,
(
"#00006F#12P#30W……それじゃあ……\x02\x03",
"#00008F兄貴が亡くなった日の事……\x01",
"……その真実を教えて貰えますか?\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_3AEE")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_3ACD")
OP_FC(0xFFF4)
Jump("loc_3AD0")
label("loc_3ACD")
OP_FC(0xC)
label("loc_3AD0")
ChrTalk(
0x10A,
"#00601F#13P……っ………\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_3AEE")
ChrTalk(
0x103,
"#00208F#12P……ぁ………\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
"#01403F#5P#30W………………………………\x02",
)
CloseMessageWindow()
OP_63(0x8, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1500)
OP_64(0x8)
ChrTalk(
0x8,
"#01400F#5P#30Wいいだろう──\x02",
)
CloseMessageWindow()
StopBGM(0xFA0)
FadeToDark(1000, 0, -1)
OP_0D()
WaitBGM()
Sound(883, 0, 60, 0)
Sleep(2300)
Sound(128, 2, 10, 0)
Sleep(150)
OP_25(0x80, 0x14)
Sleep(150)
OP_25(0x80, 0x1E)
Sleep(150)
OP_25(0x80, 0x28)
Sleep(150)
OP_25(0x80, 0x32)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
AnonymousTalk(
0x8,
(
"#3C#30W……サヤを亡くして\x01",
"警察を離れてから2年……\x02\x03",
"俺はイアン先生たちの計画に協力し、\x01",
"幾つもの工作を成し遂げていた……\x02\x03",
"いずれも後ろ暗い……\x01",
"陰謀めいた工作ばかりだ。\x02\x03",
"だが、ギルド関係者を始め、\x01",
"それを誰かに感付かれることは\x01",
"遂になかった。\x02\x03",
"ガイ・バニングス……\x01",
"かつての俺の相棒を除いては。\x02",
)
)
CloseMessageWindow()
PlayBGM("ed7560", 0)
CreatePortrait(0, 224, 0, 480, 256, 0, 16, 256, 256, 0, 0, 256, 256, 0xFFFFFF, 0x0, "bu07800.itp")
OP_CB(0x0, 0x3, 0xEEFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
AnonymousTalk(
0x8,
(
"#3C#30Wガイは……あいつは\x01",
"凄まじいほどの嗅覚と粘り強さで\x01",
"様々な陰謀と秘密に迫っていた。\x02\x03",
"帝国と共和国による暗闘……\x02\x03",
"ハルトマン議長とルバーチェ、\x01",
"そしてD∴G教団残党の動き……\x02\x03",
"その更に背後にある、\x01",
"クロイス家の計画にまで……\x02\x03",
"そして──\x02\x03",
"あの雨の日、ガイは俺を\x01",
"着工したばかりのオルキスタワーの\x01",
"建設現場に呼び出した……\x02",
)
)
CloseMessageWindow()
OP_CB(0x0, 0x3, 0xFF000000, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_25(0x80, 0x3C)
Sleep(200)
OP_25(0x80, 0x46)
Sleep(200)
OP_25(0x80, 0x50)
Sleep(200)
OP_25(0x80, 0x5A)
Sleep(200)
OP_25(0x80, 0x64)
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFF000000, 0x0, "c_vis305.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x320, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
AnonymousTalk(
0x8,
(
"#3C#30W無論、計画の詳細までは\x01",
"掴んでいなかったが……\x02\x03",
"ガイの推測は驚くほど的確で\x01",
"計画の全体像を捉えていた。\x02\x03",
"教団とマフィアを利用した\x01",
"クロイス氏の政界進出……\x02\x03",
"外国勢力の仕業に見せかけて\x01",
"クロスベル市を襲撃させることで\x01",
"独立の気運を煽ること……\x02\x03",
"更にはクロイス家の“何か”で\x01",
"大陸全土を威圧・主導する事……\x02\x03",
"信じ難いことに\x01",
"そんな事まで指摘してのけた。\x02\x03",
"そして──\x02",
)
)
CloseMessageWindow()
OP_CB(0x0, 0x3, 0xFF000000, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x1, 0xFF, 0x0)
CreatePortrait(0, 0, 0, 512, 512, 0, 65296, 512, 512, 0, 0, 512, 512, 0xFF000000, 0x0, "c_vis330.itp")
CreatePortrait(1, 0, 0, 512, 512, 0, 0, 512, 512, 0, 0, 512, 512, 0xFFFFFF, 0x0, "c_vis331.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
OP_CB(0x0, 0x0, 0x0, 0x0, 0x7D0, 0x0)
OP_CC(0x0, 0x0, 0x0)
OP_CB(0x1, 0x4, 0x0, 0x0, 0x0, 0x0)
OP_CB(0x1, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
OP_CB(0x1, 0x0, 0x0, 0xFFFC5680, 0x7D0, 0x0)
OP_CC(0x0, 0x1, 0x3)
OP_CB(0x0, 0x3, 0xFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x1, 0x0)
OP_CB(0x1, 0x3, 0xFF000000, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x1, 0x3)
OP_CC(0x1, 0xFF, 0x0)
Sound(534, 0, 80, 0)
Sleep(100)
PlayEffect(0x3, 0x3, 0xC, 0x0, 0, 0, 0, 180, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sound(372, 0, 40, 0)
Sleep(200)
Sound(540, 0, 100, 0)
Sound(511, 0, 100, 0)
Sleep(400)
Sound(540, 0, 100, 0)
Sound(372, 0, 40, 0)
Sound(566, 0, 50, 0)
Sleep(200)
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFF000000, 0x0, "c_vis306.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
Sleep(50)
BeginChrThread(0x8, 0, 0, 30)
OP_CC(0x0, 0x0, 0x3)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
AnonymousTalk(
0x8,
(
"#3C#30W……手を引けという\x01",
"俺の言葉をガイは受け入れず……\x02\x03",
"俺たちは雨の中、死闘を始めた。\x02\x03",
"武術の腕は俺がやや上……\x01",
"だが、ガイには揺るぎない\x01",
"意志による力がみなぎっていた。\x02\x03",
"何十合と打ち合い、\x01",
"お互いの体力を奪い合いながら\x01",
"雨の中の死闘は続き……\x02\x03",
"そして──\x02",
)
)
CloseMessageWindow()
OP_CB(0x0, 0x3, 0xFF000000, 0x320, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x1, 0xFF, 0x0)
Sleep(800)
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFF000000, 0x0, "c_vis307.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x3E8, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(800)
SetMessageWindowPos(14, 280, -1, 3)
AnonymousTalk(
0x8,
(
"#3C#30Wそしてガイは……\x01",
"命を落とすこととなった。\x02\x03",
"当然、あいつのトンファーを\x01",
"現場から持ち去ったのは俺だ。\x02\x03",
"トンファーに無数に刻まれた刀傷から\x01",
"犯人を特定されたくなかったからだ。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_CB(0x0, 0x3, 0xFFFFFF, 0x3E8, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x1, 0xFF, 0x0)
StopSound(128, 1000, 100)
FadeToBright(1500, 0)
OP_0D()
Sleep(500)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4514")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_44BD")
OP_FC(0xFFF4)
Jump("loc_44C0")
label("loc_44BD")
OP_FC(0xC)
label("loc_44C0")
ChrTalk(
0x109,
"#10106F#13P#30Wそんな事が……\x02",
)
CloseMessageWindow()
OP_5A()
ChrTalk(
0x103,
"#00208F#12P#30W……………………………\x02",
)
CloseMessageWindow()
Jump("loc_456A")
label("loc_4514")
ChrTalk(
0x101,
"#00008F#12P#30W……………………………\x02",
)
CloseMessageWindow()
ChrTalk(
0x103,
"#00206F#12P#30W……そんな事が…………\x02",
)
CloseMessageWindow()
label("loc_456A")
OP_63(0x8, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x8)
Sleep(500)
ChrTalk(
0x8,
(
"#01403F#5P#30W……これがあの雨の日、\x01",
"起こった事件のあらましだ。\x02\x03",
"#01408Fその後、マフィアの手下が現れ、\x01",
"ガイのバッジを持って行かれたのは\x01",
"さすがに想定外だったが……\x02\x03",
"#01400Fいずれにしても、これで大体の\x01",
"疑問には答えられただろう。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#00003F#12P───いえ。\x02",
)
CloseMessageWindow()
OP_63(0x8, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
ChrTalk(
0x8,
"#01405F#5P………………………………\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00006F#12P当然知っているでしょうが、\x01",
"兄貴の死因は銃撃#4R噵 噵#によるものです。\x02\x03",
"#00001Fその事についての説明が\x01",
"無かったみたいですが……?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#01403F#5P……警察にいた頃に\x01",
"拳銃の扱いは習得している。\x02\x03",
"#01401Fしつこく喰い下がってくる\x01",
"面倒な相手に業を煮やして\x01",
"使ったまでだが……?\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_48B0")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_480D")
OP_FC(0xFFF4)
Jump("loc_4810")
label("loc_480D")
OP_FC(0xC)
label("loc_4810")
ChrTalk(
0x10A,
(
"#00606F#13P嘘だな──マクレイン。\x02\x03",
"#00601Fそのような死闘の中で\x01",
"別の得物を構える余裕など\x01",
"あるものか。\x02\x03",
"ましてや相手の背中から\x01",
"止めを刺すなど不可能だ。\x02",
)
)
CloseMessageWindow()
Jump("loc_4956")
label("loc_48B0")
ChrTalk(
0x101,
(
"#00013F#12P──それは嘘だ。\x02\x03",
"#00006Fそんな死闘の中で\x01",
"別の得物を構える余裕など\x01",
"ある訳がないでしょう。\x02\x03",
"#00001Fましてや相手の背中から\x01",
"止めを刺すなんて不可能です。\x02",
)
)
CloseMessageWindow()
label("loc_4956")
StopBGM(0xFA0)
ChrTalk(
0x8,
"#01401F#5P………………………………\x02",
)
CloseMessageWindow()
ChrTalk(
0x104,
"#00303F#12Pまあ、道理ってヤツだな。\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#00108F#12Pいったい“誰”が\x01",
"ガイさんを撃ったのか……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
"#00201F#12P……話してください。\x02",
)
CloseMessageWindow()
BeginChrThread(0x8, 0, 0, 22)
WaitChrThread(0x8, 0)
OP_63(0x8, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x8)
WaitBGM()
Sleep(10)
PlayBGM("ed7356", 0)
MoveCamera(43, 13, 0, 20000)
Sleep(500)
ChrTalk(
0x8,
(
"#01403F#5P#30W──何を言われようと\x01",
"ガイを死に追いやったのは\x01",
"俺以外の何者でもない。\x02\x03",
"#01400Fそして俺は……\x01",
"かつての相棒を犠牲にしてまで\x01",
"計画に協力する道を選んだ。\x02\x03",
"そして今もなお……\x01",
"いたいけな少女の想いを利用して\x01",
"計画を完了させようとしている。\x02\x03",
"#01403F全てはサヤのため……\x01",
"そしてシズクの未来のために。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#00001F#12P……アリオスさん……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#00108F#12Pシズクちゃんがこんな事して\x01",
"喜ぶとでも……?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#01403F#5P#30W無論、喜ぶわけがない。\x02\x03",
"#01401Fだが──\x01",
"クロスベルという地の呪いは\x01",
"あの子から母と光を奪った。\x02\x03",
"そしてクロスベルが\x01",
"大陸のこの位置にある以上、\x01",
"呪いは決して消えることはない。\x02\x03",
"#01403F──人の世の理を超越した\x01",
"“奇蹟”でも起きない限りは。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#00005F#12P……!?\x02",
)
CloseMessageWindow()
BeginChrThread(0x8, 0, 0, 23)
WaitChrThread(0x8, 0)
OP_63(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x104, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0xF4, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0xF5, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
ChrTalk(
0x8,
(
"#01400F#5P#30W3年前ガイは……\x01",
"俺のことを一言も責めず、\x01",
"死闘の果てに命を落とした。\x02\x03",
"そして《至宝》となった彼女#4Rキーア#は\x01",
"シズクの目を治してくれた。\x02\x03",
"#01403Fもはや──\x01",
"後戻りできる道理はない。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Fade(250)
SetCameraDistance(15370, 800)
BeginChrThread(0x8, 0, 0, 24)
Sleep(500)
PlayEffect(0x1, 0x1, 0x8, 0x1, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(250)
OP_82(0x64, 0x0, 0xBB8, 0x320)
Sound(825, 2, 50, 0)
Sound(832, 2, 100, 0)
Sound(881, 0, 50, 0)
Sound(833, 0, 50, 0)
Sleep(1000)
ChrTalk(
0x8,
(
"#01403F#5P#30W……承服できなければ\x01",
"力をもって止めてみるがいい。\x02\x03",
"#01401F兄の遺#2Rのこ#したそのトンファーで……\x02\x03",
"#01407F見事、兄の仇を討って\x01",
"大切なものを取り戻すための道を\x01",
"切り拓いてみせるがいい……!\x02",
)
)
CloseMessageWindow()
StopBGM(0xBB8)
ChrTalk(
0x101,
(
"#00006F#12P……分かりました。\x02\x03",
"#00001Fだが──兄貴の仇を\x01",
"取るつもりは毛頭ありません。\x02",
)
)
CloseMessageWindow()
Sleep(200)
BeginChrThread(0x101, 0, 0, 16)
WaitChrThread(0x101, 0)
PlayEffect(0x2, 0x0, 0x101, 0x1, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sound(881, 0, 50, 0)
Sound(833, 0, 50, 0)
OP_25(0x339, 0x46)
WaitBGM()
Sleep(10)
PlayBGM("ed7527", 0)
SetCameraDistance(18370, 20000)
Sleep(1000)
ChrTalk(
0x101,
(
"#00003F#12Pガイ・バニングスの\x01",
"意志を継いで立ち上げられた\x01",
"ささやかな部署として……\x02\x03",
"#00001Fシズクちゃんを始め、\x01",
"大勢の人々の想いを託された\x01",
"《特務支援課》として……\x02\x03",
"#00007F貴方という《壁》を乗り越え、\x01",
"キーアを取り戻して……\x01",
"本当の意味で事件を解決してみせる!\x02",
)
)
CloseMessageWindow()
Sleep(150)
OP_82(0x64, 0x0, 0xBB8, 0x12C)
ChrTalk(
0x8,
"#01405F#5P……!\x02",
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00302F#12Pハハ……\x01",
"さすがは俺らのリーダー!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#00101F#12Pオルキスタワーで待っている\x01",
"シズクちゃんのためにも……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
"#00201F#12P……絶対に退#2Rひ#けません……!\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_52A2")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5270")
OP_FC(0xFFF4)
Jump("loc_5273")
label("loc_5270")
OP_FC(0xC)
label("loc_5273")
ChrTalk(
0x10A,
(
"#00604F#13Pフッ……\x01",
"仕方のないヤツらだ。\x02",
)
)
CloseMessageWindow()
OP_5A()
label("loc_52A2")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5302")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_52CC")
OP_FC(0xFFF4)
Jump("loc_52CF")
label("loc_52CC")
OP_FC(0xC)
label("loc_52CF")
ChrTalk(
0x105,
(
"#10402F#13Pフフ……\x01",
"このノリが支援課だよね。\x02",
)
)
CloseMessageWindow()
OP_5A()
label("loc_5302")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5353")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_532C")
OP_FC(0xFFF4)
Jump("loc_532F")
label("loc_532C")
OP_FC(0xC)
label("loc_532F")
ChrTalk(
0x109,
"#10107F#13P全力で援護します!\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_5353")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_53E1")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_537D")
OP_FC(0xFFF4)
Jump("loc_5380")
label("loc_537D")
OP_FC(0xC)
label("loc_5380")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_53B9")
ChrTalk(
0x106,
"#10701F#13P私も……力の限り!\x02",
)
CloseMessageWindow()
OP_5A()
Jump("loc_53E1")
label("loc_53B9")
ChrTalk(
0x106,
"#10707F#13P全力でお手伝いします!\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_53E1")
BeginChrThread(0x102, 0, 0, 17)
BeginChrThread(0x103, 0, 0, 18)
BeginChrThread(0x104, 0, 0, 19)
BeginChrThread(0xF4, 0, 0, 20)
BeginChrThread(0xF5, 0, 0, 21)
WaitChrThread(0x102, 0)
WaitChrThread(0x103, 0)
WaitChrThread(0x104, 0)
WaitChrThread(0xF4, 0)
WaitChrThread(0xF5, 0)
Sleep(500)
OP_C9(0x0, 0x80000000)
ChrTalk(
0x8,
"#01404F#4077V#5P#30W#25Aフフ──いいだろう。\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
BeginChrThread(0x8, 0, 0, 25)
WaitChrThread(0x8, 0)
Sound(817, 0, 100, 0)
BeginChrThread(0xA, 3, 0, 27)
BeginChrThread(0xB, 3, 0, 28)
WaitChrThread(0xA, 3)
WaitChrThread(0xB, 3)
OP_68(180, 13300, 207000, 20000)
MoveCamera(43, 13, 0, 20000)
SetCameraDistance(14120, 20000)
CreatePortrait(0, 65514, 0, 490, 256, 0, 0, 512, 256, 0, 0, 512, 256, 0xFFFFFF, 0x0, "bu01402.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
OP_CB(0x0, 0x0, 0xFFFFD8F0, 0x0, 0x1F4, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x0, 0x0, 0x0)
SetMessageWindowPos(14, 280, 35, 3)
Sleep(500)
BeginChrThread(0x8, 0, 0, 26)
AnonymousTalk(
0x8,
(
"#4065V#40W#55A八葉一刀流、ニの型奥義皆伝、\x01",
"アリオス・マクレイン……\x02\x03",
"#4066V#60A一身上の都合により、義に背き、\x01",
"道を外れ、勝手を貫かせてもらう!\x02\x03",
"#4067V#30A来るがいい──特務支援課!\x02",
)
)
WaitChrThread(0x8, 0)
CloseMessageWindow()
OP_C9(0x1, 0x80000000)
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
OP_CB(0x0, 0x3, 0xFFFFFF, 0x1F4, 0x0, 0x0)
OP_CB(0x0, 0x0, 0x0, 0x0, 0x1F4, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x0, 0x0, 0x0)
Sleep(300)
SetMessageWindowPos(330, 100, -1, -1)
SetChrName("ロイドたち")
OP_82(0xC8, 0x0, 0xBB8, 0x1F4)
AnonymousTalk(
0xFF,
"#4S#12Aおおっ!\x02",
)
Sound(2153, 255, 90, 0)
Sound(2343, 255, 100, 1)
Sound(2249, 255, 100, 2)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_568B")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5682")
OP_FC(0x4)
Sound(2478, 255, 100, 4)
Jump("loc_568B")
label("loc_5682")
OP_FC(0x3)
Sound(2478, 255, 100, 3)
label("loc_568B")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_56BE")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_56B8")
Sound(2417, 255, 100, 4)
Jump("loc_56BE")
label("loc_56B8")
Sound(2417, 255, 100, 3)
label("loc_56BE")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_56F1")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_56EB")
Sound(2544, 255, 100, 4)
Jump("loc_56F1")
label("loc_56EB")
Sound(2544, 255, 100, 3)
label("loc_56F1")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5724")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_571E")
Sound(3174, 255, 100, 4)
Jump("loc_5724")
label("loc_571E")
Sound(3174, 255, 100, 3)
label("loc_5724")
Sound(2055, 255, 100, 5)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
Battle("BattleInfo_2B8", 0x0, 0x0, 0x100, 0x45, 0xFF)
FadeToDark(0, 0, -1)
SetChrFlags(0xA, 0x80)
SetChrFlags(0xB, 0x80)
Call(0, 31)
Return()
# Function_9_D37 end
def Function_10_575F(): pass
label("Function_10_575F")
OP_9B(0x0, 0xFE, 0x0, 0x1770, 0xFA0, 0x1)
OP_96(0xFE, 0x0, 0x2EE0, 0x318BC, 0xFA0, 0x0)
OP_93(0xFE, 0x0, 0x1F4)
Return()
# Function_10_575F end
def Function_11_578A(): pass
label("Function_11_578A")
OP_9B(0x0, 0xFE, 0x0, 0x157C, 0xFA0, 0x1)
OP_96(0xFE, 0x460, 0x2EE0, 0x313E4, 0xFA0, 0x0)
OP_93(0xFE, 0x0, 0x1F4)
Return()
# Function_11_578A end
def Function_12_57B5(): pass
label("Function_12_57B5")
OP_9B(0x0, 0xFE, 0x0, 0x157C, 0xFA0, 0x1)
OP_96(0xFE, 0x1A4, 0x2EE0, 0x30EB2, 0xFA0, 0x0)
OP_93(0xFE, 0x0, 0x1F4)
Return()
# Function_12_57B5 end
def Function_13_57E0(): pass
label("Function_13_57E0")
OP_9B(0x0, 0xFE, 0x0, 0x157C, 0xFA0, 0x1)
OP_96(0xFE, 0xFFFFFB0A, 0x2EE0, 0x3116E, 0xFA0, 0x0)
OP_93(0xFE, 0x0, 0x1F4)
Return()
# Function_13_57E0 end
def Function_14_580B(): pass
label("Function_14_580B")
OP_9B(0x0, 0xFE, 0x0, 0x1996, 0xFA0, 0x1)
OP_96(0xFE, 0xFFFFF614, 0x2EE0, 0x313DA, 0xFA0, 0x0)
OP_93(0xFE, 0x0, 0x1F4)
Return()
# Function_14_580B end
def Function_15_5836(): pass
label("Function_15_5836")
OP_9B(0x0, 0xFE, 0x0, 0x1AC2, 0xFA0, 0x1)
OP_96(0xFE, 0x9EC, 0x2EE0, 0x31312, 0xFA0, 0x0)
Return()
# Function_15_5836 end
def Function_16_585A(): pass
label("Function_16_585A")
Sleep(150)
Sound(805, 0, 100, 0)
SetChrChipByIndex(0xFE, 0x1F)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_16_585A end
def Function_17_586C(): pass
label("Function_17_586C")
Sleep(300)
Sound(531, 0, 100, 0)
SetChrChipByIndex(0xFE, 0x20)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_17_586C end
def Function_18_587E(): pass
label("Function_18_587E")
Sleep(450)
Sound(805, 0, 100, 0)
SetChrChipByIndex(0xFE, 0x21)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_18_587E end
def Function_19_5890(): pass
label("Function_19_5890")
Sleep(450)
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_19_5890 end
def Function_20_589C(): pass
label("Function_20_589C")
Sleep(600)
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_58BC")
Sound(540, 0, 50, 0)
Jump("loc_58E1")
label("loc_58BC")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_58E1")
Sound(531, 0, 100, 0)
label("loc_58E1")
SetChrChipByIndex(0xFE, 0x23)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_20_589C end
def Function_21_58EA(): pass
label("Function_21_58EA")
Sleep(750)
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_590A")
Sound(540, 0, 50, 0)
Jump("loc_592F")
label("loc_590A")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_592F")
Sound(531, 0, 100, 0)
label("loc_592F")
SetChrChipByIndex(0xFE, 0x24)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_21_58EA end
def Function_22_5938(): pass
label("Function_22_5938")
SetChrChipByIndex(0x8, 0x2A)
SetChrFlags(0xFE, 0x1000)
SetChrFlags(0xFE, 0x20)
SetChrFlags(0xFE, 0x2)
SetChrSubChip(0xFE, 0x0)
Sleep(125)
SetChrSubChip(0xFE, 0x1)
Sleep(125)
SetChrSubChip(0xFE, 0x2)
Sleep(500)
Return()
# Function_22_5938 end
def Function_23_5961(): pass
label("Function_23_5961")
SetChrChipByIndex(0x8, 0x2A)
SetChrFlags(0xFE, 0x1000)
SetChrFlags(0xFE, 0x20)
SetChrFlags(0xFE, 0x2)
SetChrSubChip(0xFE, 0x2)
Sleep(125)
Sound(932, 0, 60, 0)
SetChrSubChip(0xFE, 0x3)
Sleep(125)
SetChrSubChip(0xFE, 0x4)
Sleep(375)
SetChrSubChip(0xFE, 0x5)
Sleep(125)
Sound(859, 0, 100, 0)
SetChrSubChip(0xFE, 0x6)
Sleep(125)
SetChrSubChip(0xFE, 0x7)
Sleep(500)
Return()
# Function_23_5961 end
def Function_24_59AB(): pass
label("Function_24_59AB")
SetChrSubChip(0xFE, 0x7)
Sleep(125)
SetChrSubChip(0xFE, 0x8)
Sleep(125)
SetChrSubChip(0xFE, 0x9)
Sleep(125)
SetChrSubChip(0xFE, 0xA)
Sleep(125)
Sound(812, 0, 100, 0)
Sound(531, 0, 50, 0)
SetChrSubChip(0xFE, 0xB)
Return()
# Function_24_59AB end
def Function_25_59D8(): pass
label("Function_25_59D8")
SetChrSubChip(0xFE, 0xB)
Sleep(125)
SetChrSubChip(0xFE, 0xC)
Sleep(125)
SetChrSubChip(0xFE, 0xD)
Sleep(125)
Sound(531, 0, 50, 0)
SetChrSubChip(0xFE, 0xE)
Sleep(250)
Sound(859, 0, 60, 0)
Sleep(250)
Return()
# Function_25_59D8 end
def Function_26_5A04(): pass
label("Function_26_5A04")
SetChrSubChip(0xFE, 0xE)
Sleep(91)
Sound(540, 0, 40, 0)
SetChrSubChip(0xFE, 0xF)
Sleep(91)
SetChrSubChip(0xFE, 0x10)
Sleep(91)
SetChrSubChip(0xFE, 0x11)
Sleep(364)
Return()
# Function_26_5A04 end
def Function_27_5A27(): pass
label("Function_27_5A27")
PlayEffect(0x0, 0x5, 0xFE, 0x1, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(500)
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
StopEffect(0x5, 0x2)
Return()
# Function_27_5A27 end
def Function_28_5A70(): pass
label("Function_28_5A70")
PlayEffect(0x0, 0x6, 0xFE, 0x1, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(500)
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
StopEffect(0x6, 0x2)
Return()
# Function_28_5A70 end
def Function_29_5AB9(): pass
label("Function_29_5AB9")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_5AD4")
OP_A1(0xFE, 0x4B0, 0x5, 0x0, 0x1, 0x2, 0x3, 0x4)
Jump("Function_29_5AB9")
label("loc_5AD4")
Return()
# Function_29_5AB9 end
def Function_30_5AD5(): pass
label("Function_30_5AD5")
OP_CB(0x0, 0x0, 0xFFFFE0C0, 0x0, 0x6E, 0x0)
OP_CC(0x0, 0x0, 0x0)
OP_CB(0x0, 0x0, 0x1770, 0x0, 0x5A, 0x0)
OP_CC(0x0, 0x0, 0x0)
OP_CB(0x0, 0x0, 0xFFFFF060, 0x0, 0x46, 0x0)
OP_CC(0x0, 0x0, 0x0)
OP_CB(0x0, 0x0, 0x7D0, 0x0, 0x32, 0x0)
OP_CC(0x0, 0x0, 0x0)
OP_CB(0x0, 0x0, 0x0, 0x0, 0x1E, 0x0)
OP_CC(0x0, 0x0, 0x0)
Return()
# Function_30_5AD5 end
def Function_31_5B49(): pass
label("Function_31_5B49")
EventBegin(0x0)
FadeToDark(0, 0, -1)
OP_E2(0x3)
LoadChrToIndex("chr/ch00050.itc", 0x1F)
LoadChrToIndex("chr/ch00150.itc", 0x20)
LoadChrToIndex("chr/ch00250.itc", 0x21)
LoadChrToIndex("chr/ch00350.itc", 0x22)
Call(0, 6)
Call(0, 7)
LoadChrToIndex("apl/ch51744.itc", 0x26)
LoadEffect(0x0, "event/ev17084.eff")
LoadEffect(0x1, "event/ev17085.eff")
SoundLoad(128)
SoundLoad(4078)
SoundLoad(4079)
OP_68(-80, 13300, 209040, 0)
MoveCamera(358, 19, 0, 0)
OP_6E(600, 0)
SetCameraDistance(19000, 0)
SetCameraDistance(16500, 2500)
SetChrPos(0x101, 0, 12000, 207440, 0)
SetChrPos(0x102, 1120, 12000, 206200, 0)
SetChrPos(0x103, 420, 12000, 204870, 0)
SetChrPos(0x104, -1270, 12000, 205570, 0)
SetChrPos(0xF4, -2540, 11990, 206190, 0)
SetChrPos(0xF5, 2540, 12000, 205990, 0)
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
SetChrChipByIndex(0x101, 0x1F)
SetChrSubChip(0x101, 0x0)
SetChrChipByIndex(0x102, 0x20)
SetChrSubChip(0x102, 0x0)
SetChrChipByIndex(0x103, 0x21)
SetChrSubChip(0x103, 0x0)
SetChrChipByIndex(0x104, 0x22)
SetChrSubChip(0x104, 0x0)
SetChrChipByIndex(0xF4, 0x23)
SetChrSubChip(0xF4, 0x0)
SetChrChipByIndex(0xF5, 0x24)
SetChrSubChip(0xF5, 0x0)
SetChrChipByIndex(0x8, 0x0)
SetChrSubChip(0x8, 0x0)
ClearChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x8000)
SetChrFlags(0x8, 0x1000)
SetChrFlags(0x8, 0x2)
SetChrFlags(0x8, 0x800)
SetChrPos(0x8, 0, 12000, 211500, 180)
BeginChrThread(0x8, 0, 0, 32)
OP_68(0, 13000, 210000, 0)
MoveCamera(0, 15, 0, 0)
OP_6E(600, 0)
SetCameraDistance(17500, 0)
SetCameraDistance(15800, 12000)
FadeToBright(1000, 0)
OP_0D()
OP_C9(0x0, 0x80000000)
ChrTalk(
0x8,
(
"#01404F#4078V#5P#80W#30A………フフ…………\x02\x03",
"#4079V#45Aロイド……他の者たちも……\x01",
"……本当に強くなったな。\x02",
)
)
CloseMessageWindow()
OP_C9(0x1, 0x80000000)
OP_57(0x0)
OP_5A()
Fade(500)
OP_68(0, 13100, 208700, 0)
MoveCamera(46, 18, 0, 0)
OP_6E(600, 0)
SetCameraDistance(14680, 0)
EndChrThread(0x8, 0x0)
SetChrSubChip(0x8, 0x1)
OP_0D()
Sleep(500)
ChrTalk(
0x101,
(
"#00006F#12P#40W……はあっ……はあっ……\x02\x03",
"#00008Fだとしたら……アリオスさんが\x01",
"目標になってくれたからです……\x02",
)
)
OP_6F(0x79)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00306F#12P#40W確かに……アンタがいなけりゃ\x01",
"ここまでは行けなかったかもな……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
"#00206F#12P#40W……同感です……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#00108F#12P#40Wいつも目指すべき《壁》として\x01",
"遥か先に居てくれましたから……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#01402F#5P#40Wフフ……まったく……\x02\x03",
"#01404Fそのように言われる資格など\x01",
"そもそも無いというのに……\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1500)
OP_64(0x101)
Sleep(500)
ChrTalk(
0x101,
(
"#00003F#12P#30W……アリオスさん。\x02\x03",
"#00001Fあの日、兄貴を撃ったのは\x01",
"イアン先生ですね……?\x02",
)
)
CloseMessageWindow()
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x103, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x104, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0xF4, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0xF5, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_60AC")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6088")
OP_FC(0xFFF4)
Jump("loc_608B")
label("loc_6088")
OP_FC(0xC)
label("loc_608B")
ChrTalk(
0x10A,
"#00605F#13P#30W……!\x02",
)
CloseMessageWindow()
OP_5A()
Jump("loc_60FB")
label("loc_60AC")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_60FB")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_60D6")
OP_FC(0xFFF4)
Jump("loc_60D9")
label("loc_60D6")
OP_FC(0xC)
label("loc_60D9")
ChrTalk(
0x109,
"#10105F#13P#30W……ぁ……!\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_60FB")
ChrTalk(
0x104,
"#00301F#12P#30Wそいつは……\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#01403F#5P#40W………………………………\x02\x03",
"#01400F……何故、そう思う……?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00006F#12P#30W単なる消去法です……\x02\x03",
"#00008F……事件の背景を考えると……\x01",
"先生以外に容疑者がいるとしたら\x01",
"ディーターさんかマリアベルさん……\x02\x03",
"#00001Fただ、ディーターさんには\x01",
"計画の全ては伝えられていないようだし、\x01",
"マリアベルさんも兄貴とは接点がない……\x02\x03",
"#00006Fだが……イアン先生は\x01",
"兄貴ともかなり親しいようでした……\x02\x03",
"そして……国外の出張も多く、\x01",
"自衛の必要がある先生ならば\x01",
"拳銃に慣れていてもおかしくない……\x02\x03",
"#00013F……どうですか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#01403F#5P#40W……60点だな……\x02\x03",
"#01402Fだが……及第点は\x01",
"付けざるを得ないようだ……\x02",
)
)
CloseMessageWindow()
StopBGM(0xFA0)
FadeToDark(1000, 0, -1)
OP_0D()
Sleep(1000)
Sound(128, 2, 30, 0)
Sleep(150)
OP_25(0x80, 0x28)
Sleep(150)
OP_25(0x80, 0x32)
Sleep(150)
OP_25(0x80, 0x3C)
Sleep(150)
OP_25(0x80, 0x46)
Sleep(150)
OP_25(0x80, 0x50)
Sleep(150)
OP_25(0x80, 0x5A)
Sleep(150)
OP_25(0x80, 0x64)
Sleep(300)
Sound(884, 0, 100, 0)
Sleep(3000)
WaitBGM()
Sleep(10)
PlayBGM("ed7534", 0)
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis308.itp")
CreatePortrait(1, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis317.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x3E8, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(500)
SetMessageWindowPos(20, 160, -1, -1)
SetChrName("ガイ")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#40Wはあっ、はあっ……\x02\x03",
"……なあ、アリオス……\x02\x03",
"お互い限界みたいだし……\x01",
"今日のところは休戦にしねぇか?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(230, 140, -1, -1)
SetChrName("アリオス")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#40W……何を馬鹿な……\x02\x03",
"知られた以上、お前をここから\x01",
"帰すわけにはいかん……\x02\x03",
"来月の式を無事迎えたくば\x01",
"殺す気でかかって来るがいい……!\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(20, 160, -1, -1)
SetChrName("ガイ")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#40Wんなの出来るワケねえだろ……\x02\x03",
"そしたらお前やシズクちゃんを\x01",
"式に呼べねえだろうが……?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(330, 140, -1, -1)
SetChrName("アリオス")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"……!\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(20, 160, -1, -1)
SetChrName("ガイ")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#40W安心しろ……お前らの計画は\x01",
"誰にも話しちゃいない……\x02\x03",
"ダドリーあたりに\x01",
"協力してもらおうかと思ったが……\x01",
"アイツも融通効かねぇからな。\x02\x03",
"セルゲイさんにだって\x01",
"まだ相談してないんだぜ……?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(250, 140, -1, -1)
SetChrName("アリオス")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#40Wお前……\x02\x03",
"……それを聞いて\x01",
"俺が好都合と判断するとは\x01",
"思わないのか……?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(20, 160, -1, -1)
SetChrName("ガイ")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30Wいや……?\x01",
"だってお前、不器用だし。\x02\x03",
"じゃなかったらこんな場所に\x01",
"ノコノコ一人では来ねぇだろ。\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(330, 140, -1, -1)
SetChrName("アリオス")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30Wくっ……\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(20, 160, -1, -1)
SetChrName("ガイ")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30Wとにかく……このあたりにして\x01",
"今から呑みにでも行こうぜ?\x02\x03",
"そうでなくてもここ2年、\x01",
"ロクに話も出来なかったし……\x02\x03",
"弟と彼女の自慢話くらい、\x01",
"させろっつーの。\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(240, 150, -1, -1)
SetChrName("アリオス")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30Wフッ……相変わらずだな。\x02\x03",
"弟はたしか……\x01",
"もう15になるんだったか?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(20, 160, -1, -1)
SetChrName("ガイ")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30Wああ、俺に似ずに結構な秀才でな。\x02\x03",
"どこかの高等学校あたりに\x01",
"行かせたいと思ってるんだが……\x02\x03",
"……まあいいや。\x01",
"雨だし《ガランテ》にでも──\x02",
)
)
CloseMessageWindow()
Sound(567, 0, 100, 0)
Sleep(200)
OP_CB(0x1, 0x4, 0x0, 0x0, 0x0, 0x0)
OP_CB(0x1, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
Sleep(500)
SetMessageWindowPos(80, 160, -1, -1)
SetChrName("ガイ")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#60Wあ───\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(330, 140, -1, -1)
SetChrName("アリオス")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W!?\x02",
)
)
CloseMessageWindow()
OP_CB(0x0, 0x3, 0xFFFFFF, 0x0, 0x0, 0x0)
OP_CB(0x1, 0x3, 0xFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x1, 0x3)
OP_CC(0x1, 0xFF, 0x0)
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis309.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x2EE, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(300)
SetMessageWindowPos(330, 160, -1, -1)
SetChrName("アリオス")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W先生……!?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(250, 180, -1, -1)
SetChrName("ガイ")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#50W……ハハ……\x02\x03",
"……なるほど……\x01",
"黒幕はアンタだったか……\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
OP_CB(0x0, 0x3, 0xFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x1, 0xFF, 0x0)
Sound(885, 0, 80, 0)
Sound(811, 0, 80, 0)
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis310.itp")
CreatePortrait(1, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis318.itp")
CreatePortrait(2, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis319.itp")
CreatePortrait(3, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis320.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(1000)
OP_CB(0x1, 0x4, 0x0, 0x0, 0x0, 0x0)
OP_CB(0x1, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
Sleep(1000)
OP_CB(0x0, 0x3, 0xFFFFFF, 0x0, 0x0, 0x0)
OP_CB(0x2, 0x4, 0x0, 0x0, 0x0, 0x0)
OP_CB(0x2, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
Sleep(1000)
OP_CB(0x1, 0x3, 0xFFFFFF, 0x0, 0x0, 0x0)
OP_CB(0x3, 0x4, 0x0, 0x0, 0x0, 0x0)
OP_CB(0x3, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
SetMessageWindowPos(30, 160, -1, -1)
SetChrName("イアン弁護士")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W……悪いな、ガイ君。\x02\x03",
"ご両親のことを考えたら\x01",
"君も誘うべきかと思ったが……\x02\x03",
"多分、君は絶対#4R噵 噵#に賛同しないと\x01",
"確信できてしまったのでね。\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(350, 140, -1, -1)
SetChrName("アリオス")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W……先生………\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(200, 200, -1, -1)
SetChrName("ガイ")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#50Wハハ……当たりですよ……\x02\x03",
"……先生が付いてるなら\x01",
"多分……その計画ってのも\x01",
"上手く運ぶでしょう……\x02\x03",
"でも……きっと……\x01",
"俺の代わりは現れますよ……?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(40, 160, -1, -1)
SetChrName("イアン弁護士")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30Wああ……そうだろうな。\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(350, 140, -1, -1)
SetChrName("アリオス")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30Wガイ……!\x01",
"……しっかりしろ……!\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(210, 200, -1, -1)
SetChrName("ガイ")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#60Wゲホッ……あぁ……\x01",
"……しまったなァ……\x02\x03",
"#80Wこんな事になるなら……\x01",
"ロイドと……セシルに……\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
OP_CB(0x3, 0x3, 0xFFFFFF, 0x3E8, 0x0, 0x0)
OP_CB(0x2, 0x3, 0xFFFFFF, 0x0, 0x0, 0x0)
OP_CC(0x0, 0x3, 0x3)
OP_CC(0x1, 0xFF, 0x0)
Sound(885, 0, 90, 0)
Sleep(100)
Sound(811, 0, 90, 0)
Sound(862, 0, 40, 0)
StopBGM(0x1770)
Sleep(2000)
StopSound(128, 2000, 100)
WaitBGM()
SetCameraDistance(16180, 3000)
FadeToBright(1500, 0)
OP_0D()
OP_6F(0x79)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6FC3")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6F94")
OP_FC(0xFFF4)
Jump("loc_6F97")
label("loc_6F94")
OP_FC(0xC)
label("loc_6F97")
ChrTalk(
0x10A,
"#00608F#13P#30W……………………………\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_6FC3")
ChrTalk(
0x103,
"#00213F#12P#30W………ガイさん………\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#00108F#12P#30W……そんな事が……\x02",
)
CloseMessageWindow()
ChrTalk(
0x104,
"#00308F#12P#30W因果な話だぜ……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00006F#12P#30W……ありがとう。\x01",
"兄貴の最期を教えてくれて。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#01404F#5P#40W……礼を言うな……\x02\x03",
"#01400Fイアン先生は……\x01",
"多分、揺らがないだろう……\x02\x03",
"そして……\x01",
"キーアの決意も固いようだ……\x02\x03",
"#01404F#50W2人を崩せるかどうか……\x01",
"お前たちの全てをぶつけてみろ……\x02",
)
)
CloseMessageWindow()
Sleep(200)
Sound(898, 0, 100, 0)
def lambda_714F():
OP_A6(0xFE, 0x0, 0x23, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0x8, 2, lambda_714F)
WaitChrThread(0x8, 2)
BeginChrThread(0x8, 0, 0, 33)
WaitChrThread(0x8, 0)
Sleep(250)
PlayBGM("ed7356", 0)
OP_50(0x4C, (scpexpr(EXPR_PUSH_LONG, 0x164), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(700)
Sound(202, 0, 100, 0)
Sound(181, 0, 80, 0)
PlayEffect(0x0, 0xFF, 0xFF, 0x0, 0, 12050, 208000, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(200)
SetMapObjFrame(0xFF, "magi_04_add", 0x0, 0x1)
Sleep(2000)
OP_68(0, 13400, 208000, 0)
MoveCamera(30, 20, 0, 0)
OP_6E(700, 0)
SetCameraDistance(42000, 0)
Fade(500)
SetCameraDistance(44000, 5000)
OP_0D()
Sound(223, 0, 50, 0)
Sound(293, 0, 60, 0)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -8810, 11000, 195890, 250, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -18740, 11000, 207720, 240, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -8810, 11000, 219990, 277, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 2340, 11000, 226130, 26, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 14640, 11000, 216150, 34, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 14650, 11000, 200000, 64, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(150)
OP_75(0x2, 0x1, 0x7D0)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -14190, 4900, 215730, 314, -33, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 3670, 8000, 223960, 85, -33, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 16510, 5300, 208790, 89, -33, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(150)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -14840, 11000, 200070, 295, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -14870, 11000, 216410, 326, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -2400, 11000, 226170, 334, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 8990, 11000, 220020, 80, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 18340, 11000, 208220, 120, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 8860, 11000, 195800, 110, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(150)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -1580, 4700, 224260, 271, -33, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 16370, 8500, 219180, 44, -33, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 6440, 1300, 196290, 113, -13, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 23440, -900, 210080, 119, -13, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(500)
Sleep(100)
Sleep(100)
Sleep(100)
Sleep(100)
Sleep(100)
Sleep(100)
FadeToDark(1500, 0, -1)
OP_24(0x80)
OP_0D()
SetScenarioFlags(0x22, 0)
SetScenarioFlags(0x22, 2)
NewScene("m9008", 0, 0, 0)
IdleLoop()
Return()
# Function_31_5B49 end
def Function_32_76AE(): pass
label("Function_32_76AE")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_76F6")
SetChrSubChip(0xFE, 0x8)
Sleep(150)
SetChrSubChip(0xFE, 0x9)
Sleep(150)
SetChrSubChip(0xFE, 0xA)
Sleep(150)
SetChrSubChip(0xFE, 0xB)
Sleep(150)
SetChrSubChip(0xFE, 0xC)
Sleep(150)
SetChrSubChip(0xFE, 0xB)
Sleep(150)
SetChrSubChip(0xFE, 0xA)
Sleep(150)
SetChrSubChip(0xFE, 0x9)
Sleep(150)
Jump("Function_32_76AE")
label("loc_76F6")
Return()
# Function_32_76AE end
def Function_33_76F7(): pass
label("Function_33_76F7")
SetChrSubChip(0xFE, 0x1)
Sleep(100)
SetChrSubChip(0xFE, 0x2)
Sleep(100)
SetChrSubChip(0xFE, 0x3)
Sleep(100)
SetChrSubChip(0xFE, 0x4)
Sleep(100)
SetChrSubChip(0xFE, 0x5)
Sleep(100)
SetChrSubChip(0xFE, 0x6)
Sleep(100)
SetChrSubChip(0xFE, 0x7)
Sound(811, 0, 40, 0)
Sleep(100)
SetChrSubChip(0xFE, 0x7)
Sleep(300)
SetChrSubChip(0xFE, 0x7)
Sleep(100)
SetChrSubChip(0xFE, 0xD)
Sleep(100)
SetChrSubChip(0xFE, 0xE)
Sleep(100)
Sound(811, 0, 80, 0)
SetChrSubChip(0xFE, 0xF)
Sleep(100)
Sound(862, 0, 30, 0)
Sleep(300)
Return()
# Function_33_76F7 end
def Function_34_7761(): pass
label("Function_34_7761")
EventBegin(0x0)
FadeToDark(0, 0, -1)
OP_E2(0x3)
Call(0, 43)
LoadChrToIndex("chr/ch00050.itc", 0x1F)
LoadChrToIndex("chr/ch00150.itc", 0x20)
LoadChrToIndex("chr/ch00250.itc", 0x21)
LoadChrToIndex("chr/ch00350.itc", 0x22)
Call(0, 6)
Call(0, 7)
LoadChrToIndex("chr/ch00056.itc", 0x25)
LoadEffect(0x0, "event/ev17012.eff")
SetChrPos(0x101, -430, 12000, 207440, 0)
SetChrPos(0x102, 470, 12000, 206000, 0)
SetChrPos(0x103, -1370, 12000, 204870, 0)
SetChrPos(0x104, 1370, 12000, 204570, 0)
SetChrPos(0xF4, -2540, 12000, 205690, 0)
SetChrPos(0xF5, 2400, 12000, 205790, 315)
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
SetChrChipByIndex(0x101, 0x1F)
SetChrSubChip(0x101, 0x0)
SetChrChipByIndex(0x102, 0x20)
SetChrSubChip(0x102, 0x0)
SetChrChipByIndex(0x103, 0x21)
SetChrSubChip(0x103, 0x0)
SetChrChipByIndex(0x104, 0x22)
SetChrSubChip(0x104, 0x0)
SetChrChipByIndex(0xF4, 0x23)
SetChrSubChip(0xF4, 0x0)
SetChrChipByIndex(0xF5, 0x24)
SetChrSubChip(0xF5, 0x0)
SetChrChipByIndex(0x8, 0x0)
SetChrSubChip(0x8, 0xF)
ClearChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x8000)
SetChrFlags(0x8, 0x1000)
SetChrFlags(0x8, 0x20)
SetChrFlags(0x8, 0x2)
SetChrFlags(0x8, 0x800)
SetChrPos(0x8, 0, 12000, 211500, 180)
SetMapObjFrame(0xFF, "magi_04_add", 0x0, 0x1)
SetMapObjFlags(0x2, 0x4)
OP_68(0, 13050, 222000, 0)
MoveCamera(16, 28, 0, 0)
OP_6E(600, 0)
SetCameraDistance(31500, 0)
SetCameraDistance(29500, 2600)
FadeToBright(1000, 0)
OP_0D()
Sleep(300)
PlayEffect(0x0, 0x0, 0xFF, 0x0, 0, 12000, 222000, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sound(935, 0, 80, 0)
SetMapObjFrame(0xFF, "magi10_add", 0x1, 0x1)
SetMapObjFrame(0xFF, "magi11_add", 0x1, 0x1)
Sleep(200)
SetMapObjFrame(0xFF, "point_add", 0x1, 0x1)
Sleep(600)
StopEffect(0x0, 0x2)
OP_6F(0x79)
OP_68(0, 12800, 208700, 0)
MoveCamera(46, 18, 0, 0)
OP_6E(600, 0)
SetCameraDistance(18410, 0)
Fade(500)
OP_0D()
Sleep(300)
BeginChrThread(0x101, 0, 0, 36)
BeginChrThread(0x102, 0, 0, 37)
BeginChrThread(0x103, 0, 0, 38)
BeginChrThread(0x104, 0, 0, 39)
BeginChrThread(0xF4, 0, 0, 40)
BeginChrThread(0xF5, 0, 0, 41)
WaitChrThread(0x101, 0)
WaitChrThread(0x102, 0)
WaitChrThread(0x103, 0)
WaitChrThread(0x104, 0)
WaitChrThread(0xF4, 0)
WaitChrThread(0xF5, 0)
Sleep(100)
OP_68(160, 12800, 209170, 2000)
MoveCamera(44, 18, 0, 2000)
OP_6E(600, 2000)
SetCameraDistance(17490, 2000)
BeginChrThread(0x101, 0, 0, 35)
WaitChrThread(0x101, 0)
Sleep(500)
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1500)
OP_64(0x101)
Sleep(500)
OP_6F(0x79)
Jc((scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_7A62")
ChrTalk(
0x102,
"#00108F#12Pロイド……\x02",
)
CloseMessageWindow()
Jump("loc_7B5E")
label("loc_7A62")
Jc((scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_7A99")
ChrTalk(
0x103,
"#00208F#12P……ロイドさん……\x02",
)
CloseMessageWindow()
Jump("loc_7B5E")
label("loc_7A99")
Jc((scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_7AC8")
ChrTalk(
0x104,
"#00308F#12Pロイド……\x02",
)
CloseMessageWindow()
Jump("loc_7B5E")
label("loc_7AC8")
Jc((scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_7AFB")
ChrTalk(
0x105,
"#10408F#12P……ロイド……\x02",
)
CloseMessageWindow()
Jump("loc_7B5E")
label("loc_7AFB")
Jc((scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_PUSH_LONG, 0x4), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_7B2E")
ChrTalk(
0x109,
"#10108F#12Pロイドさん……\x02",
)
CloseMessageWindow()
Jump("loc_7B5E")
label("loc_7B2E")
Jc((scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_PUSH_LONG, 0x5), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_7B5E")
ChrTalk(
0x106,
"#10708F#12P……ロイドさん。\x02",
)
CloseMessageWindow()
label("loc_7B5E")
ChrTalk(
0x101,
(
"#00004F#11P#30Wはは……\x02\x03",
"#00008F……これでやっと……\x01",
"兄貴に届けた気がする。\x02\x03",
"#00002Fありがとう……\x01",
"みんなのおかげだよ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00304F#12Pはは……\x01",
"何言ってんだっつーの。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
(
"#00204F#12P……ロイドさんの意志が\x01",
"アリオスさんという《壁》を\x01",
"突き崩したんだと思います。\x02\x03",
"#00208Fそしてガイさんの死という\x01",
"過去の暗闇に光を当てた……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#00104F#12Pええ……私たちは\x01",
"その手伝いをしただけだわ。\x02\x03",
"#00108F次ばかりは手伝いなんて\x01",
"言っていられないけど……\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7D50")
ChrTalk(
0x106,
"#10703F#12P……そうですね……\x02",
)
CloseMessageWindow()
Jump("loc_7DB7")
label("loc_7D50")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7D88")
ChrTalk(
0x109,
"#10106F#12P……そうですね……\x02",
)
CloseMessageWindow()
Jump("loc_7DB7")
label("loc_7D88")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7DB7")
ChrTalk(
0x105,
"#10406F#12P……そうだね。\x02",
)
CloseMessageWindow()
label("loc_7DB7")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7E0B")
ChrTalk(
0x104,
(
"#00308F#12Pベルお嬢さんにイアン先生、\x01",
"それにキー坊か……\x02",
)
)
CloseMessageWindow()
OP_5A()
Jump("loc_7EB8")
label("loc_7E0B")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7E5E")
ChrTalk(
0x105,
(
"#10408F#12Pマリアベル嬢に熊ヒゲ先生、\x01",
"それにキーアか……\x02",
)
)
CloseMessageWindow()
Jump("loc_7EB8")
label("loc_7E5E")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7EB8")
ChrTalk(
0x109,
(
"#10108F#12Pマリアベルさんにイアン先生、\x01",
"それにキーアちゃんですか……\x02",
)
)
CloseMessageWindow()
label("loc_7EB8")
ChrTalk(
0x101,
"#00006F#11P#30W……ああ……\x02",
)
CloseMessageWindow()
OP_68(350, 12800, 208640, 1000)
MoveCamera(37, 17, 0, 1000)
def lambda_7EFA():
OP_93(0xFE, 0xB4, 0x190)
ExitThread()
QueueWorkItem(0x101, 2, lambda_7EFA)
Sleep(300)
def lambda_7F0A():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0xF4, 2, lambda_7F0A)
OP_6F(0x79)
ChrTalk(
0x101,
(
"#00003F#5P最後の《領域》も解放した。\x02\x03",
"#00000Fとりあえず……\x01",
"《神域》の終点に戻ろう。\x02",
)
)
CloseMessageWindow()
SetCameraDistance(17740, 1000)
FadeToDark(1000, 0, -1)
OP_0D()
ClearChrFlags(0x8, 0x8000)
SetChrPos(0x0, 0, 12000, 202500, 0)
SetChrFlags(0x4, 0x80)
SetChrBattleFlags(0x4, 0x8000)
SetChrFlags(0x5, 0x80)
SetChrBattleFlags(0x5, 0x8000)
OP_69(0xFF, 0x0)
OP_37()
SetScenarioFlags(0x1A9, 4)
OP_29(0xB2, 0x1, 0x8)
ModifyEventFlags(0, 0, 0x80)
OP_50(0x4C, (scpexpr(EXPR_PUSH_LONG, 0xFFFF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x164), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
ClearScenarioFlags(0x22, 2)
OP_E2(0x2)
Sleep(500)
EventEnd(0x5)
Return()
# Function_34_7761 end
def Function_35_7FDC(): pass
label("Function_35_7FDC")
OP_9B(0x0, 0xFE, 0x162, 0x320, 0x3E8, 0x0)
Return()
# Function_35_7FDC end
def Function_36_7FEC(): pass
label("Function_36_7FEC")
Sound(805, 0, 100, 0)
SetChrChipByIndex(0xFE, 0xFF)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_36_7FEC end
def Function_37_7FFB(): pass
label("Function_37_7FFB")
Sleep(200)
Sound(531, 0, 100, 0)
SetChrChipByIndex(0xFE, 0xFF)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_37_7FFB end
def Function_38_800D(): pass
label("Function_38_800D")
Sleep(300)
Sound(805, 0, 100, 0)
SetChrChipByIndex(0xFE, 0xFF)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_38_800D end
def Function_39_801F(): pass
label("Function_39_801F")
Sleep(100)
SetChrChipByIndex(0xFE, 0xFF)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_39_801F end
def Function_40_802B(): pass
label("Function_40_802B")
Sleep(400)
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_804B")
Sound(540, 0, 50, 0)
Jump("loc_8070")
label("loc_804B")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_8070")
Sound(531, 0, 100, 0)
label("loc_8070")
SetChrChipByIndex(0xFE, 0xFF)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_40_802B end
def Function_41_8079(): pass
label("Function_41_8079")
Sleep(500)
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_8099")
Sound(540, 0, 50, 0)
Jump("loc_80BE")
label("loc_8099")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_80BE")
Sound(531, 0, 100, 0)
label("loc_80BE")
SetChrChipByIndex(0xFE, 0xFF)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_41_8079 end
def Function_42_80C7(): pass
label("Function_42_80C7")
SetChrChipByIndex(0x8, 0x0)
SetChrSubChip(0x8, 0x10)
ClearChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x800)
SetChrFlags(0x8, 0x8000)
SetChrFlags(0x8, 0x2)
SetChrFlags(0x8, 0x1000)
SetChrFlags(0x8, 0x800)
Return()
# Function_42_80C7 end
def Function_43_80EE(): pass
label("Function_43_80EE")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1AB, 1)), scpexpr(EXPR_END)), "loc_8110")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_8279")
label("loc_8110")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1AB, 2)), scpexpr(EXPR_END)), "loc_8128")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_8279")
label("loc_8128")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1AB, 3)), scpexpr(EXPR_END)), "loc_8140")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_8279")
label("loc_8140")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1AB, 4)), scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_8163")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x4), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_8279")
label("loc_8163")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1AB, 5)), scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_8186")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_8279")
label("loc_8186")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1AB, 6)), scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_81A9")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x5), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_8279")
label("loc_81A9")
Jc((scpexpr(EXPR_EXEC_OP, "OP_DC(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_81C7")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_8279")
label("loc_81C7")
Jc((scpexpr(EXPR_EXEC_OP, "OP_DC(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_81E5")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_8279")
label("loc_81E5")
Jc((scpexpr(EXPR_EXEC_OP, "OP_DC(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_8203")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_8279")
label("loc_8203")
Jc((scpexpr(EXPR_EXEC_OP, "OP_DC(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x8), scpexpr(EXPR_EQU), scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_822C")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x4), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_8279")
label("loc_822C")
Jc((scpexpr(EXPR_EXEC_OP, "OP_DC(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x4), scpexpr(EXPR_EQU), scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_8255")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_8279")
label("loc_8255")
Jc((scpexpr(EXPR_EXEC_OP, "OP_DC(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x5), scpexpr(EXPR_EQU), scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_8279")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x5), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
label("loc_8279")
Return()
# Function_43_80EE end
SaveToFile()
Try(main)
| [
"[email protected]"
]
| |
1e461f00ab09e4de26a84fd68a6fd672c1864ff8 | 261ff2a577650185ff00f5d26dee3189283f28ad | /problems/er-cha-sou-suo-shu-de-hou-xu-bian-li-xu-lie-lcof/solution_2.py | 4910e5585f6a7b750a56f850a1b51e46193f26b5 | [
"MIT"
]
| permissive | YunYouJun/LeetCode | a60dd3e719a199f09f47656ba21af66bb5c02641 | 9dbd55acc82cafd7b1eb3cc81b20563f9bb1ce04 | refs/heads/master | 2023-08-25T05:58:28.444830 | 2023-07-21T17:24:45 | 2023-07-21T17:24:45 | 146,094,429 | 5 | 2 | MIT | 2021-08-28T17:49:40 | 2018-08-25T12:43:30 | Python | UTF-8 | Python | false | false | 597 | py | from typing import List
class Solution:
def verifyPostorder(self, postorder: List[int]) -> bool:
stack, root = [], float('+inf')
for i in range(len(postorder) - 1, -1, -1):
if postorder[i] > root:
return False
while stack and postorder[i] < stack[-1]:
root = stack.pop()
stack.append(postorder[i])
return True
if __name__ == '__main__':
test_cases = [[1, 6, 3, 2, 5], [1, 3, 2, 6, 5], [1, 3, 2, 5]]
for case in test_cases:
ans = Solution().verifyPostorder(case)
print(ans)
| [
"[email protected]"
]
| |
f5e547d3d457bb61682b752e448c9ac190e7749f | 9b87a520e85566a66f729d2b4cafd00c67ea5db0 | /Builder/build_utils.py | b71a2e74f1b31500fe22c85e188b543577ea5471 | [
"Apache-2.0"
]
| permissive | barry-scott/scm-workbench | c694720acd316789821b7f8ebf32f7b941913d94 | 6ae79141ae54424d81a94a76690b2ab12df9d901 | refs/heads/master | 2023-03-15T21:13:28.394412 | 2022-07-10T11:47:30 | 2022-07-10T11:47:30 | 50,042,812 | 28 | 12 | null | null | null | null | UTF-8 | Python | false | false | 2,725 | py | #
# Needs to run under python2 or python3
#
from __future__ import print_function
import sys
import os
import subprocess
import shutil
log = None
if sys.version_info[0] == 2:
unicode_type = unicode
else:
unicode_type = str
class BuildError(Exception):
def __init__( self, msg ):
super(BuildError, self).__init__( msg )
# use a python3 compatible subprocess.run() function
class CompletedProcess(object):
def __init__(self, returncode, stdout=None, stderr=None):
self.returncode = returncode
if stdout is not None:
self.stdout = stdout.decode( 'utf-8' )
else:
self.stdout = stdout
if stderr is not None:
self.stderr = stderr.decode( 'utf-8' )
else:
self.stderr = stderr
class Popen(subprocess.Popen):
def __init__( self, *args, **kwargs ):
super(Popen, self).__init__( *args, **kwargs )
def __enter__(self):
return self
def __exit__(self, exc_type, value, traceback):
if self.stdout:
self.stdout.close()
if self.stderr:
self.stderr.close()
# Wait for the process to terminate, to avoid zombies.
self.wait()
def run( cmd, check=True, output=False, cwd=None ):
kwargs = {}
if cwd is None:
cwd = os.getcwd()
kwargs['cwd'] = cwd
if type(cmd) is unicode_type:
log.info( 'Running %s in %s' % (cmd, cwd) )
kwargs['shell'] = True
else:
log.info( 'Running %s in %s' % (' '.join( cmd ), cwd) )
if output:
kwargs['stdout'] = subprocess.PIPE
kwargs['stderr'] = subprocess.PIPE
with Popen(cmd, **kwargs) as process:
try:
stdout, stderr = process.communicate( input=None )
except: # Including KeyboardInterrupt, communicate handled that.
process.kill()
# We don't call process.wait() as .__exit__ does that for us.
raise
retcode = process.poll()
r = CompletedProcess( retcode, stdout, stderr )
if check and retcode != 0:
raise BuildError( 'Cmd failed %s - %r' % (retcode, cmd) )
return r
def rmdirAndContents( folder ):
if os.path.exists( folder ):
shutil.rmtree( folder )
def mkdirAndParents( folder ):
if not os.path.exists( folder ):
os.makedirs( folder, 0o755 )
def copyFile( src, dst, mode ):
if os.path.isdir( dst ):
dst = os.path.join( dst, os.path.basename( src ) )
if os.path.exists( dst ):
os.chmod( dst, 0o600 )
os.remove( dst )
shutil.copyfile( src, dst )
os.chmod( dst, mode )
def numCpus():
return os.sysconf( os.sysconf_names['SC_NPROCESSORS_ONLN'] )
| [
"[email protected]"
]
| |
168e9a9c6acb9bf3b4a1fbaac845daddd794d7b0 | a46fc5187245f7ac79758ae475d4d865e24f482b | /permutation_in_string/str_permutation.py | 02a895fffa3bf8acdd261aea5803250d08ef67fd | []
| no_license | narnat/leetcode | ae31f9321ac9a087244dddd64706780ea57ded91 | 20a48021be5e5348d681e910c843e734df98b596 | refs/heads/master | 2022-12-08T00:58:12.547227 | 2020-08-26T21:04:53 | 2020-08-26T21:04:53 | 257,167,879 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,696 | py | #!/usr/bin/env python3
class Solution:
def checkInclusion(self, s1: str, s2: str) -> bool:
if len(s1) > len(s2):
return False
pattern = [0] * 26
s = [0] * 26
for i in range(len(s1)):
pattern[ord(s1[i]) - ord('a')] += 1
s[ord(s2[i]) - ord('a')] += 1
window = len(s1) - 1
if s == pattern:
return True
i = 1
while i + window < len(s2):
s[ord(s2[i - 1]) - ord('a')] -= 1
s[ord(s2[i + window]) - ord('a')] += 1
if s == pattern:
return True
i += 1
return False
def checkInclusion_2(self, s1: str, s2: str) -> bool:
''' Optimized count checking'''
if len(s1) > len(s2):
return False
pattern = [0] * 26
s = [0] * 26
for i in range(len(s1)):
pattern[ord(s1[i]) - ord('a')] += 1
s[ord(s2[i]) - ord('a')] += 1
matches = 0
window = len(s1)
for i in range(len(pattern)):
if pattern[i] == s[i]:
matches += 1
for i in range(len(s2) - len(s1)):
if matches == 26:
return True
left = ord(s2[i]) - ord('a')
right = ord(s2[i + window]) - ord('a')
s[left] -= 1
if s[left] == pattern[left]:
matches += 1
elif s[left] == pattern[left] - 1:
matches -= 1
s[right] += 1
if s[right] == pattern[right]:
matches += 1
elif s[right] == pattern[right] + 1:
matches -= 1
return matches == 26
| [
"[email protected]"
]
| |
046d95f1bdeabff8b72c1d0183cafd768c0b0544 | 15581a76b36eab6062e71d4e5641cdfaf768b697 | /Topics/Binary Search/Sqrt(x).py | 1021b5b6f1d4524ae476e8448226947f4e415112 | []
| no_license | MarianDanaila/Competitive-Programming | dd61298cc02ca3556ebc3394e8d635b57f58b4d2 | 3c5a662e931a5aa1934fba74b249bce65a5d75e2 | refs/heads/master | 2023-05-25T20:03:18.468713 | 2023-05-16T21:45:08 | 2023-05-16T21:45:08 | 254,296,597 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 373 | py | class Solution:
def mySqrt(self, x: int) -> int:
low = 0
high = x + 1
while low <= high:
mid = low + (high - low) // 2
if mid * mid == x:
return mid
elif mid * mid < x:
ans = mid
low = mid + 1
else:
high = mid - 1
return ans
| [
"[email protected]"
]
| |
425aa18798c9113ad41a1766ea7429d70cc9bebe | a7058080e41af37eb77c146fc09a5e4db57f7ec6 | /Solved/05361/05361.py | e15000503140aedadcd13f71e1a12b36e94d0d31 | []
| no_license | Jinmin-Goh/BOJ_PS | bec0922c01fbf6e440589cc684d0cd736e775066 | 09a285bd1369bd0d73f86386b343d271dc08a67d | refs/heads/master | 2022-09-24T02:24:50.823834 | 2022-09-21T02:16:22 | 2022-09-21T02:16:22 | 223,768,547 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 383 | py | # Problem No.: 5361
# Solver: Jinmin Goh
# Date: 20220710
# URL: https://www.acmicpc.net/problem/5361
import sys
def main():
t = int(input())
for _ in range(t):
a, b, c, d, e = map(int, sys.stdin.readline().split())
print(f'${a * 350.34 + b * 230.90 + c * 190.55 + d * 125.30 + e * 180.90:.2f}')
return
if __name__ == "__main__":
main() | [
"[email protected]"
]
| |
68cbc7c18c20b28c14e110eea1c6641da8ea30fd | 2ff7e53d5e512cd762217ca54317982e07a2bb0c | /pathtools/__init__.py | d48128e5f3c99b6ca2e9208be81a75f562cf8baa | []
| no_license | nanxijw/Clara-Pretty-One-Dick | 66d3d69426642b79e8fd4cc8e0bec23adeeca6d6 | 50de3488a2140343c364efc2615cf6e67f152be0 | refs/heads/master | 2021-01-19T09:25:07.555284 | 2015-02-17T21:49:33 | 2015-02-17T21:49:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 48 | py | #Embedded file name: pathtools\__init__.py
pass
| [
"[email protected]"
]
| |
f052899444f57fe22175d48f07fc1b910f3d54f6 | 5f8487a7efb97d90ec0393b0db046b7ca908378b | /wk2/examples/albert.py | 2077581b5d51754c3eded2211afbcb38ee8bacc3 | []
| no_license | sphilmoon/karel | 444fe686c083b3a6b101141c3b16e807ef54a4ba | f6e8a1801509d49d188cf579856cec5d7033bbde | refs/heads/main | 2023-08-25T10:23:19.010270 | 2021-10-20T12:26:35 | 2021-10-20T12:26:35 | 359,027,046 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 277 | py | C = 299792458
E = "e = m * C^2.."
def main():
mass = float(input("Enter kilos of mass: "))
print(E)
print("m =", mass, "kg")
print("C =", f"{C:,d}", "m/s")
energy = mass * (C**2)
print(energy, "joules of energy!")
if __name__ == "__main__":
main() | [
"[email protected]"
]
| |
c038f897a86b44cea4235b3a9c3b0b1234e25ddd | 20c20938e201a0834ccf8b5f2eb5d570d407ad15 | /abc112/abc112_c/8543451.py | 9e0a5e189ca893892719e48af7fabf0ab05acb17 | []
| no_license | kouhei-k/atcoder_submissions | 8e1a1fb30c38e0d443b585a27c6d134bf1af610a | 584b4fd842ccfabb16200998fe6652f018edbfc5 | refs/heads/master | 2021-07-02T21:20:05.379886 | 2021-03-01T12:52:26 | 2021-03-01T12:52:26 | 227,364,764 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 618 | py | N=int(input())
xyh=[list(map(int,input().split()))for i in range(N)]
xyh.sort(reverse=True,key=lambda x:x[2])
for cx in range(101):
for cy in range(101):
H=0
flag=False
for i,tmp in enumerate(xyh):
x,y,h=tmp
if h != 0:
tmp= h+abs(x-cx)+abs(y-cy)
else:
if H-abs(x-cx)-abs(y-cy) <=0:
if i == N-1:
flag=True
continue
if H!=0:
if H==tmp:
if i==N-1:
flag=True
continue
else:
break
else:
H=tmp
if flag:
print(cx,cy,H)
exit(0)
| [
"[email protected]"
]
| |
e82a60a7a6247bc2fab33bf2628b009d9ac38eaa | 141b42d9d72636c869ff2ce7a2a9f7b9b24f508b | /myvenv/Lib/site-packages/phonenumbers/data/region_GH.py | d654cc750d3bbb37816ed1112c6c777b6334e675 | [
"BSD-3-Clause"
]
| permissive | Fa67/saleor-shop | 105e1147e60396ddab6f006337436dcbf18e8fe1 | 76110349162c54c8bfcae61983bb59ba8fb0f778 | refs/heads/master | 2021-06-08T23:51:12.251457 | 2018-07-24T08:14:33 | 2018-07-24T08:14:33 | 168,561,915 | 1 | 0 | BSD-3-Clause | 2021-04-18T07:59:12 | 2019-01-31T17:00:39 | Python | UTF-8 | Python | false | false | 1,483 | py | """Auto-generated file, do not edit by hand. GH metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_GH = PhoneMetadata(id='GH', country_code=233, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[235]\\d{8}|8\\d{7}', possible_length=(8, 9), possible_length_local_only=(7,)),
fixed_line=PhoneNumberDesc(national_number_pattern='3(?:0(?:[237]\\d|80)|[167](?:2[0-6]|7\\d|80)|2(?:2[0-5]|7\\d|80)|3(?:2[0-3]|7\\d|80)|4(?:2[013-9]|3[01]|7\\d|80)|5(?:2[0-7]|7\\d|80)|8(?:2[0-2]|7\\d|80)|9(?:[28]0|7\\d))\\d{5}', example_number='302345678', possible_length=(9,), possible_length_local_only=(7,)),
mobile=PhoneNumberDesc(national_number_pattern='(?:2[034678]\\d|5(?:[0457]\\d|6[01]))\\d{6}', example_number='231234567', possible_length=(9,)),
toll_free=PhoneNumberDesc(national_number_pattern='800\\d{5}', example_number='80012345', possible_length=(8,)),
no_international_dialling=PhoneNumberDesc(national_number_pattern='800\\d{5}', example_number='80012345', possible_length=(8,)),
national_prefix='0',
national_prefix_for_parsing='0',
number_format=[NumberFormat(pattern='(\\d{2})(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['[235]'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(\\d{3})(\\d{5})', format='\\1 \\2', leading_digits_pattern=['8'], national_prefix_formatting_rule='0\\1')],
mobile_number_portable_region=True)
| [
"[email protected]"
]
| |
e4ad0a1cd2b6f6d8ae19810a9781b6eb59e564d0 | 882c865cf0a4b94fdd117affbb5748bdf4e056d0 | /python/SWexpert/level1/2068_최대수 구하기.py | 6908a815bc4146840b9367f8be966728a03e0f3a | []
| no_license | minhee0327/Algorithm | ebae861e90069e2d9cf0680159e14c833b2f0da3 | fb0d3763b1b75d310de4c19c77014e8fb86dad0d | refs/heads/master | 2023-08-15T14:55:49.769179 | 2021-09-14T04:05:11 | 2021-09-14T04:05:11 | 331,007,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 137 | py | import sys
for t in range(int(sys.stdin.readline())):
lst = list(map(int, input().split()))
print("#{} {}".format(t, max(lst)))
| [
"[email protected]"
]
| |
0a78e495b80ab9f88ed61331eaa680936024d356 | 1a6c2be5ff1a8364c97a1ede23c824b2579ecf79 | /tfx/dsl/components/base/executor_spec.py | 502c4c21c1cc3f66f1766c0916cf2b2132a0d303 | [
"Apache-2.0"
]
| permissive | 418sec/tfx | fa1a4690df2178e9c6bd24f97df0bbde7436df95 | df1529c91e52d442443eca5968ff33cf0a38dffa | refs/heads/master | 2023-04-18T12:25:38.098958 | 2021-04-28T16:11:00 | 2021-04-28T16:11:00 | 333,769,030 | 2 | 1 | Apache-2.0 | 2021-04-28T16:11:01 | 2021-01-28T13:35:14 | null | UTF-8 | Python | false | false | 6,429 | py | # Lint as: python2, python3
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Executor specifications for defining what to to execute."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import copy
from typing import cast, Iterable, List, Optional, Text, Type
from six import with_metaclass
from tfx import types
from tfx.dsl.components.base import base_executor
from tfx.proto.orchestration import executable_spec_pb2
from tfx.utils import import_utils
from tfx.utils import json_utils
from google.protobuf import message
class ExecutorSpec(with_metaclass(abc.ABCMeta, json_utils.Jsonable)):
"""A specification for a component executor.
An instance of ExecutorSpec describes the implementation of a component.
"""
def encode(
self,
component_spec: Optional[types.ComponentSpec] = None) -> message.Message:
"""Encodes ExecutorSpec into an IR proto for compiling.
This method will be used by DSL compiler to generate the corresponding IR.
Args:
component_spec: Optional. The ComponentSpec to help with the encoding.
Returns:
An executor spec proto.
"""
# TODO(b/158712976, b/161286496): Serialize executor specs for different
# platforms.
raise NotImplementedError('{}.{} does not support encoding into IR.'.format(
self.__module__, self.__class__.__name__))
def copy(self) -> 'ExecutorSpec':
"""Makes a copy of the ExecutorSpec.
An abstract method to implement to make a copy of the ExecutorSpec instance.
Deepcopy is preferred in the implementation. But if for any reason a
deepcopy is not able to be made because of some fields are not deepcopyable,
it is OK to make a shallow copy as long as the subfield is consider
globally immutable.
Returns:
A copy of ExecutorSpec.
"""
return cast('ExecutorSpec', copy.deepcopy(self))
class ExecutorClassSpec(ExecutorSpec):
"""A specification of executor class.
Attributes:
executor_class: a subclass of base_executor.BaseExecutor used to execute
this component (required).
extra_flags: extra flags to be set in the Python base executor.
"""
def __init__(self, executor_class: Type[base_executor.BaseExecutor]):
if not executor_class:
raise ValueError('executor_class is required')
self.executor_class = executor_class
self.extra_flags = []
super(ExecutorClassSpec, self).__init__()
def __reduce__(self):
# When executing on the Beam DAG runner, the ExecutorClassSpec instance
# is pickled using the "dill" library. To make sure that the executor code
# itself is not pickled, we save the class path which will be reimported
# by the worker in this custom __reduce__ function.
#
# See https://docs.python.org/3/library/pickle.html#object.__reduce__ for
# more details.
return (ExecutorClassSpec._reconstruct_from_executor_class_path,
(self.class_path,))
@property
def class_path(self):
"""Fully qualified class name for the executor class.
<executor_class_module>.<executor_class_name>
Returns:
Fully qualified class name for the executor class.
"""
return '{}.{}'.format(self.executor_class.__module__,
self.executor_class.__name__)
@staticmethod
def _reconstruct_from_executor_class_path(executor_class_path):
executor_class = import_utils.import_class_by_path(executor_class_path)
return ExecutorClassSpec(executor_class)
def encode(
self,
component_spec: Optional[types.ComponentSpec] = None) -> message.Message:
result = executable_spec_pb2.PythonClassExecutableSpec()
result.class_path = self.class_path
result.extra_flags.extend(self.extra_flags)
return result
def add_extra_flags(self, extra_flags: Iterable[str]) -> None:
self.extra_flags.extend(extra_flags)
def copy(self) -> 'ExecutorClassSpec':
# The __reduce__() method is customized and the function
# import_class_by_path() is used in it. import_class_by_path() doesn't work
# with nested class which is very common in tests. copy.deepcopy(self)
# desn't work.
# So in this implementation, a new
# ExecutorClassSpec is created and every field in the old instance is
# deepcopied to the new instance.
cls = self.__class__
result = cls.__new__(cls)
for k, v in self.__dict__.items():
setattr(result, k, copy.deepcopy(v))
return result
class ExecutorContainerSpec(ExecutorSpec):
"""A specification of a container.
The spec includes image, command line entrypoint and arguments for a
container. For example:
spec = ExecutorContainerSpec(
image='docker/whalesay',
command=['cowsay'],
args=['hello wolrd'])
Attributes:
image: Container image that has executor application. Assumption is that
this container image is separately release-managed, and tagged/versioned
accordingly.
command: Container entrypoint array. Not executed within a shell. The docker
image's ENTRYPOINT is used if this is not provided. The Jinja templating
mechanism is used for constructing a user-specified command-line
invocation based on input and output metadata at runtime.
args: Arguments to the container entrypoint. The docker image's CMD is used
if this is not provided. The Jinja templating mechanism is used for
constructing a user-specified command-line invocation based on input and
output metadata at runtime.
"""
def __init__(self,
image: Text,
command: List[Text] = None,
args: List[Text] = None):
if not image:
raise ValueError('image cannot be None or empty.')
self.image = image
self.command = command
self.args = args
super(ExecutorContainerSpec, self).__init__()
| [
"[email protected]"
]
| |
856943bf9166e753fbe9519c080fd81a56fb9ed6 | 7abc12b1742af6d2cd4aa05a417551aebbbb11fd | /tensorflow/contrib/tpu/python/tpu/tpu_context.py | 344ff9a37fc79ab1360fae8d3d2f9ec73e24f2b3 | [
"Apache-2.0"
]
| permissive | Raksh-y/tensorflow | a28517d0fa14ce0077c761046b0526a5691c2fd4 | fbb6fcde866a430e8238a80991e3f61d3836af41 | refs/heads/master | 2021-04-29T17:59:16.151921 | 2018-02-15T20:25:15 | 2018-02-15T20:25:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,003 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===================================================================
"""TPU system metdata and associated tooling."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from contextlib import contextmanager
import copy
import numpy as np
from tensorflow.contrib.tpu.python.tpu import device_assignment as tpu_device_assignment
from tensorflow.contrib.tpu.python.tpu import tpu_system_metadata as tpu_system_metadata_lib
from tensorflow.python.estimator import model_fn as model_fn_lib
from tensorflow.python.platform import tf_logging as logging
_DEFAULT_JOB_NAME = 'tpu_worker'
_DEFAULT_COORDINATOR_JOB_NAME = 'coordinator'
_LOCAL_MASTERS = ('', 'local')
class _TPUContext(object):
"""A context holds immutable states of TPU computation.
This immutable object holds TPUEstimator config, train/eval batch size, and
`TPUEstimator.use_tpu`, which is expected to be passed around. It also
provides utility functions, basded on the current state, to determine other
information commonly required by TPU computation, such as TPU device names,
TPU hosts, shard batch size, etc.
N.B. As `mode` is not immutable state in Estimator, but essential to
distinguish between TPU training and evaluation, a common usage for
_TPUContext with `mode` is as follows:
```
with _ctx.with_mode(mode) as ctx:
if ctx.is_running_on_cpu():
...
```
"""
def __init__(self, config, train_batch_size, eval_batch_size,
predict_batch_size, use_tpu):
self._config = config
self._train_batch_size = train_batch_size
self._eval_batch_size = eval_batch_size
self._predict_batch_size = predict_batch_size
self._use_tpu = use_tpu
self._model_parallelism_enabled = (
use_tpu and config.tpu_config.computation_shape)
self._mode = None
self._lazy_tpu_system_metadata_dict = {} # key by master address
self._lazy_device_assignment_dict = {} # key by master address
self._lazy_validation_dict = {} # key by ModeKeys
def _assert_mode(self):
if self._mode is None:
raise RuntimeError(
'`mode` needs to be set via contextmanager `with_mode`.')
return self._mode
@contextmanager
def with_mode(self, mode):
# NOTE(xiejw): Shallow copy is enough. It will share he lazy dictionaries,
# such as _lazy_tpu_system_metadata_dict between new copy and the original
# one. Note that all lazy states stored in properties _lazy_foo are sort of
# immutable as they should be same for the process lifetime.
new_ctx = copy.copy(self)
new_ctx._mode = mode # pylint: disable=protected-access
yield new_ctx
@property
def mode(self):
return self._assert_mode()
def _get_master_address(self):
mode = self._assert_mode()
config = self._config
master = (
config.master
if mode != model_fn_lib.ModeKeys.EVAL else config.evaluation_master)
return master
def _get_tpu_system_metadata(self):
"""Gets the (maybe cached) TPU system metadata."""
master = self._get_master_address()
tpu_system_metadata = self._lazy_tpu_system_metadata_dict.get(master)
if tpu_system_metadata is not None:
return tpu_system_metadata
# pylint: disable=protected-access
tpu_system_metadata = (
tpu_system_metadata_lib._query_tpu_system_metadata(
master,
run_config=self._config,
query_topology=self.model_parallelism_enabled))
self._lazy_tpu_system_metadata_dict[master] = tpu_system_metadata
return tpu_system_metadata
def _get_device_assignment(self):
"""Gets the (maybe cached) TPU device assignment."""
master = self._get_master_address()
device_assignment = self._lazy_device_assignment_dict.get(master)
if device_assignment is not None:
return device_assignment
tpu_system_metadata = self._get_tpu_system_metadata()
device_assignment = tpu_device_assignment.device_assignment(
tpu_system_metadata.topology,
computation_shape=self._config.tpu_config.computation_shape,
num_replicas=self.num_replicas)
logging.info('computation_shape: %s',
str(self._config.tpu_config.computation_shape))
logging.info('num_replicas: %d', self.num_replicas)
logging.info('device_assignment.topology.device_coordinates: %s',
str(device_assignment.topology.device_coordinates))
logging.info('device_assignment.core_assignment: %s',
str(device_assignment.core_assignment))
self._lazy_device_assignment_dict[master] = device_assignment
return device_assignment
@property
def model_parallelism_enabled(self):
return self._model_parallelism_enabled
@property
def device_assignment(self):
return (self._get_device_assignment()
if self._model_parallelism_enabled else None)
@property
def num_of_cores_per_host(self):
metadata = self._get_tpu_system_metadata()
return metadata.num_of_cores_per_host
@property
def num_cores(self):
metadata = self._get_tpu_system_metadata()
return metadata.num_cores
@property
def num_of_replicas_per_host(self):
if self.model_parallelism_enabled:
return self.num_replicas // self.num_hosts
else:
return self.num_of_cores_per_host
@property
def num_replicas(self):
num_cores_in_system = self.num_cores
if self.model_parallelism_enabled:
computation_shape_array = np.asarray(
self._config.tpu_config.computation_shape, dtype=np.int32)
num_cores_per_replica = np.prod(computation_shape_array)
if num_cores_per_replica > num_cores_in_system:
raise ValueError(
'The num of cores required by the model parallelism, specified by '
'TPUConfig.computation_shape, is larger than the total num of '
'TPU cores in the system. computation_shape: {}, num cores '
'in the system: {}'.format(
self._config.tpu_config.computation_shape,
num_cores_in_system))
if num_cores_in_system % num_cores_per_replica != 0:
raise RuntimeError(
'The num of cores in the system ({}) is not divisible by the num '
'of cores ({}) required by the model parallelism, specified by '
'TPUConfig.computation_shape. This should never happen!'.format(
num_cores_in_system, num_cores_per_replica))
return num_cores_in_system // num_cores_per_replica
else:
return num_cores_in_system
@property
def num_hosts(self):
metadata = self._get_tpu_system_metadata()
return metadata.num_hosts
@property
def config(self):
return self._config
def is_input_sharded_per_core(self):
"""Return true if input_fn is invoked per-core (other than per-host)."""
mode = self._assert_mode()
return (mode == model_fn_lib.ModeKeys.TRAIN and
not self._config.tpu_config.per_host_input_for_training)
def is_running_on_cpu(self, is_export_mode=False):
"""Determines whether the input_fn and model_fn should be invoked on CPU.
This API also validates user provided configuration, such as batch size,
according the lazy initialized TPU system metadata.
Args:
is_export_mode: Indicates whether the current mode is for exporting the
model, when mode == PREDICT. Only with this bool, we could
tell whether user is calling the Estimator.predict or
Estimator.export_savedmodel, which are running on TPU and CPU
respectively. Parent class Estimator does not distingush these two.
Returns:
bool, whether current input_fn or model_fn should be running on CPU.
Raises:
ValueError: any configuration is invalid.
"""
is_running_on_cpu = self._is_running_on_cpu(is_export_mode)
if not is_running_on_cpu:
self._validate_tpu_configuration()
return is_running_on_cpu
def _is_running_on_cpu(self, is_export_mode):
"""Determines whether the input_fn and model_fn should be invoked on CPU."""
mode = self._assert_mode()
if not self._use_tpu:
return True
if mode != model_fn_lib.ModeKeys.PREDICT:
return False
# There are actually 2 use cases when running with mode.PREDICT: prediction
# and saving the model. We run actual predictions on the TPU, but
# model export is run on the CPU.
if is_export_mode:
return True
return False
@property
def global_batch_size(self):
mode = self._assert_mode()
if mode == model_fn_lib.ModeKeys.TRAIN:
return self._train_batch_size
elif mode == model_fn_lib.ModeKeys.EVAL:
return self._eval_batch_size
elif mode == model_fn_lib.ModeKeys.PREDICT:
return self._predict_batch_size
else:
return None
@property
def batch_size_for_input_fn(self):
"""Returns the shard batch size for `input_fn`."""
global_batch_size = self.global_batch_size
if self.is_running_on_cpu():
return global_batch_size
# On TPU
if self.is_input_sharded_per_core():
# We prohibit per core input sharding for the model parallelism case,
# therefore it is safe to use num_cores here.
return global_batch_size // self.num_cores
else:
return global_batch_size // self.num_hosts
@property
def batch_size_for_model_fn(self):
"""Returns the shard batch size for `model_fn`."""
global_batch_size = self.global_batch_size
if self.is_running_on_cpu():
return global_batch_size
# On TPU. always sharded per shard.
return global_batch_size // self.num_replicas
@property
def master_job(self):
"""Returns the job name to use to place TPU computations on.
Returns:
A string containing the job name, or None if no job should be specified.
Raises:
ValueError: If the user needs to specify a tpu_job_name, because we are
unable to infer the job name automatically, or if the user-specified job
names are inappropriate.
"""
run_config = self._config
# If the user specifies the tpu_job_name, use that.
if run_config.tpu_config.tpu_job_name:
return run_config.tpu_config.tpu_job_name
# The tpu job is determined by the run_config. Right now, this method is
# required as tpu_config is not part of the RunConfig.
mode = self._assert_mode()
master = (
run_config.evaluation_master
if mode == model_fn_lib.ModeKeys.EVAL else run_config.master)
if master in _LOCAL_MASTERS:
return None
if (not run_config.session_config or
not run_config.session_config.cluster_def.job):
return _DEFAULT_JOB_NAME
cluster_def = run_config.session_config.cluster_def
job_names = set([job.name for job in cluster_def.job])
if _DEFAULT_JOB_NAME in job_names:
# b/37868888 tracks allowing ClusterSpec propagation to reuse job names.
raise ValueError('Currently, tpu_worker is not an allowed job name.')
if len(job_names) == 1:
return cluster_def.job[0].name
if len(job_names) == 2:
if _DEFAULT_COORDINATOR_JOB_NAME in job_names:
job_names.remove(_DEFAULT_COORDINATOR_JOB_NAME)
return job_names.pop()
# TODO(b/67716447): Include more sophisticated heuristics.
raise ValueError(
'Could not infer TPU job name. Please specify a tpu_job_name as part '
'of your TPUConfig.')
@property
def tpu_host_placement_function(self):
"""Returns the TPU host place function."""
master = self.master_job
def _placement_function(_sentinal=None, core_id=None, host_id=None): # pylint: disable=invalid-name
assert _sentinal is None
if core_id is not None and host_id is not None:
raise RuntimeError(
'core_id and host_id can have only one non-None value.')
if master is None:
return '/replica:0/task:0/device:CPU:0'
else:
if core_id is not None:
host_id = core_id / self.num_of_cores_per_host
return '/job:%s/task:%d/device:CPU:0' % (master, host_id)
return _placement_function
@property
def tpu_device_placement_function(self):
"""Returns a TPU device placement Fn."""
master = self.master_job
job_device = '' if master is None else ('/job:%s' % master)
def _placement_function(i):
if self.model_parallelism_enabled:
return self.device_assignment.tpu_device(replica=i, job=master)
else:
num_of_cores_per_host = self.num_of_cores_per_host
host_id = i / num_of_cores_per_host
ordinal_id = i % num_of_cores_per_host
return '%s/task:%d/device:TPU:%d' % (job_device, host_id, ordinal_id)
return _placement_function
@property
def tpu_ordinal_function(self):
"""Returns the TPU ordinal fn."""
def _tpu_ordinal_function(index):
"""Return the TPU ordinal associated with a shard.
Required because the enqueue ops are placed on CPU.
Args:
index: the shard index
Returns:
The ordinal of the TPU device the shard's infeed should be placed on.
"""
if self.model_parallelism_enabled:
return self.device_assignment.tpu_ordinal(replica=index)
else:
return index % self.num_of_cores_per_host
return _tpu_ordinal_function
def _validate_tpu_configuration(self):
"""Validates the configuration based on the TPU system metadata."""
mode = self._assert_mode()
if self._lazy_validation_dict.get(mode):
return
# All following information is obtained from TPU system metadata.
num_cores = self.num_cores
num_replicas = self.num_replicas
num_hosts = self.num_hosts
if not num_cores:
tpu_system_metadata = self._get_tpu_system_metadata()
raise RuntimeError(
'Cannot find any TPU cores in the system. Please double check '
'Tensorflow master address and TPU worker(s). Available devices '
'are {}.'.format(tpu_system_metadata.devices))
if self._config.tpu_config.num_shards:
user_provided_num_replicas = self._config.tpu_config.num_shards
if user_provided_num_replicas != num_replicas:
message = (
'TPUConfig.num_shards is not set correctly. According to TPU '
'system metadata for Tensorflow master ({}): num_replicas should '
'be ({}), got ({}). For non-model-parallelism, num_replicas should '
'be the total num of TPU cores in the system. For '
'model-parallelism, the total number of TPU cores should be '
'product(computation_shape) * num_replicas. Please set it '
'accordingly or leave it as `None`'.format(
self._get_master_address(), num_replicas,
user_provided_num_replicas))
if self.model_parallelism_enabled:
raise ValueError(message)
else:
logging.warning(message)
logging.warning(
'For non-model-parallelism, TPUEstimator currently '
'automatically queries the TPU system information so ignores '
'this field.')
if mode == model_fn_lib.ModeKeys.TRAIN:
if self._train_batch_size % num_replicas != 0:
raise ValueError(
'train batch size {} must be divisible by number of replicas {}'
.format(self._train_batch_size, num_replicas))
elif mode == model_fn_lib.ModeKeys.EVAL:
if self._eval_batch_size is None:
raise ValueError(
'eval_batch_size in TPUEstimator constructor cannot be `None`'
'if .evaluate is running on TPU.')
if self._eval_batch_size % num_replicas != 0:
raise ValueError(
'eval batch size {} must be divisible by number of replicas {}'
.format(self._eval_batch_size, num_replicas))
if num_hosts > 1:
raise ValueError(
'TPUEstimator.evaluate should be running on single TPU worker. '
'got {}.'.format(num_hosts))
else:
assert mode == model_fn_lib.ModeKeys.PREDICT
if self._predict_batch_size is None:
raise ValueError(
'predict_batch_size in TPUEstimator constructor should not be '
'`None` if .predict is running on TPU.')
if self._predict_batch_size % num_replicas != 0:
raise ValueError(
'predict batch size {} must be divisible by number of replicas {}'
.format(self._predict_batch_size, num_replicas))
if num_hosts > 1:
raise ValueError(
'TPUEstimator.predict should be running on single TPU worker. '
'got {}.'.format(num_hosts))
# Record the state "validated" into lazy dictionary.
self._lazy_validation_dict[mode] = True
class _OneCoreTPUContext(_TPUContext):
"""Special _TPUContext for one core usage."""
def __init__(self, config, train_batch_size, eval_batch_size,
predict_batch_size, use_tpu):
super(_OneCoreTPUContext, self).__init__(
config, train_batch_size, eval_batch_size,
predict_batch_size, use_tpu)
def _get_tpu_system_metadata(self):
"""Gets the (maybe cached) TPU system metadata."""
master = self._get_master_address()
tpu_system_metadata = self._lazy_tpu_system_metadata_dict.get(master)
if tpu_system_metadata is not None:
return tpu_system_metadata
tpu_system_metadata = (
tpu_system_metadata_lib._TPUSystemMetadata( # pylint: disable=protected-access
num_cores=1,
num_hosts=1,
num_of_cores_per_host=1,
topology=None,
devices=[]))
self._lazy_tpu_system_metadata_dict[master] = tpu_system_metadata
return tpu_system_metadata
def _get_tpu_context(config, train_batch_size, eval_batch_size,
predict_batch_size, use_tpu):
"""Returns an instance of `_TPUContext`."""
if (config.tpu_config.num_shards == 1 and
config.tpu_config.computation_shape is None):
logging.warning(
'Setting TPUConfig.num_shards==1 is an unsupported behavior. '
'Please fix as soon as possible (leaving num_shards as None.')
return _OneCoreTPUContext(config, train_batch_size, eval_batch_size,
predict_batch_size, use_tpu)
return _TPUContext(config, train_batch_size, eval_batch_size,
predict_batch_size, use_tpu)
| [
"[email protected]"
]
| |
4c5707cd2ba9d17efea44451ba77c6004b84104a | ac4b9385b7ad2063ea51237fbd8d1b74baffd016 | /.history/google/drive_files_download_prepare_20210214184811.py | 86688cc510192a730867099d50d8918aaacbc36e | []
| no_license | preethanpa/ssoemprep | 76297ef21b1d4893f1ac2f307f60ec72fc3e7c6f | ce37127845253c768d01aeae85e5d0d1ade64516 | refs/heads/main | 2023-03-09T00:15:55.130818 | 2021-02-20T06:54:58 | 2021-02-20T06:54:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,610 | py | from __future__ import print_function
import pickle
import os.path
import io
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from googleapiclient.http import MediaIoBaseDownload
from oauth2client.service_account import ServiceAccountCredentials
from google.oauth2 import service_account
import googleapiclient.discovery
import inspect
import sys
import json
SCOPES = ['https://www.googleapis.com/auth/documents',
'https://www.googleapis.com/auth/documents.readonly',
'https://www.googleapis.com/auth/documents.readonly',
'https://www.googleapis.com/auth/drive',
'https://www.googleapis.com/auth/drive.file',
'https://www.googleapis.com/auth/drive.metadata.readonly',
'https://www.googleapis.com/auth/drive.readonly',
]
# The ID of a sample document.
# DOCUMENT_ID = '1bQkFcQrWFHGlte8oTVtq_zyKGIgpFlWAS5_5fi8OzjY'
DOCUMENT_ID = '1sXQie19gQBRHODebxBZv4xUCJy-9rGpnlpM7_SUFor4'
# SERVICE_ACCOUNT_FILE = '/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/google/domain-wide-credentials-gdrive.json'
SERVICE_ACCOUNT_FILE = '/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/google/app-automation-service-account-thirdrayai-1612747564720-415d6ebd6001.json'
UPLOAD_FILE_LOCATION = '/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/documents/pdf/'
doc_types = {
"application/vnd.google-apps.document": "gdoc",
# "application/vnd.google-apps.folder": "folder",
"application/vnd.google-apps.spreadsheet": "gsheet",
"application/vnd.google-apps.presentation": "gslide"
}
drive_files_list = [] if (sys.argv is None or sys.argv[1] is None) else json.loads(sys.argv[1])
# google_file_type = 'gdoc' if (sys.argv is None or sys.argv[1] is None or sys.argv[1].google_file_type is None) else sys.argv[1].google_file_type
# target_file_type = 'pdf' if (sys.argv is None or sys.argv[1] is None or sys.argv[1].target_file_type is None) else sys.argv[1].target_file_type
# location = '/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/drive_documents/'+drive_files_list.get('job_id')+'/pdf/'
# document_id = None if (sys.argv[1] is None or sys.argv[1].file_location is None) else sys.argv[1].document_id
document_id = ''
def get_resource(domain_wide_delegate=False, user_to_impersonate=None):
"""Prepare a Google Drive resource object based on credentials.
"""
credentials = None
# use subject in case of domain-wide delegation
if domain_wide_delegate:
if user_to_impersonate is not None:
credentials = service_account.Credentials.from_service_account_file(SERVICE_ACCOUNT_FILE, scopes=SCOPES, subject=user_to_impersonate)
else:
credentials = service_account.Credentials.from_service_account_file(SERVICE_ACCOUNT_FILE, scopes=SCOPES)
if credentials is None:
return credentials
else:
drive_service = build('drive', 'v3', credentials=credentials)
return drive_service
def download_drive_file(resource=None, document_id=None, google_file_type='gdoc', target_type=None, target_location=None):
"""Downloads a Google Drive file using the provided resource.
If google_file_type is passed as None, then 'gdoc' / Google Doc is default.
If target_type is passed as None, then 'application/pdf' is default.
If location is none, then use environment variable UPLOAD_FILE_LOCATION as default
"""
# print(dir(resource.files())) #Get resource methods with dir.
if resource is None:
raise Exception('Invalid credentials. Provide subject email addredd for Drive-wide delegation')
else:
extension, mimeType = extension_mime_type(google_file_type, target_type)
t
content = resource.files().export(fileId=document_id, mimeType=mimeType).execute()
with open(target_location+target_type+'-'+document_id+extension, "wb") as file:
file.write(content)
return {"status": "OK", "message": target_type+'-'+document_id+extension+" has been added to data lake."}
def extension_mime_type(google_file_ext=None, format=None):
export_type = None
if google_file_ext is not None:
if google_file_ext == 'gdoc':
if format == 'docx':
export_type = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
elif format == 'epub':
export_type = 'application/epub+zip'
elif format == 'html':
export_type = 'text/html'
elif format == 'odt':
export_type = 'application/vnd.oasis.opendocument.text'
elif format == 'pdf':
export_type = 'application/pdf'
elif format == 'rtf':
export_type = 'application/rtf'
elif format == 'tex':
export_type = 'application/zip'
elif format == 'txt':
export_type = 'text/plain'
elif format == 'html.zip':
export_type = 'application/zip'
else:
raise Exception('Unknown format "{}"'.format(format))
elif google_file_ext == 'gsheet':
if format == 'csv':
export_type = 'text/csv'
elif format == 'html.zip':
export_type = 'application/zip'
elif format == 'ods':
export_type = 'application/x-vnd.oasis.opendocument.spreadsheet'
elif format == 'pdf':
export_type = 'application/pdf'
elif format == 'tsv':
export_type = 'text/tab-separated-values'
elif format == 'xlsx':
export_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
else:
raise Exception('Unknown format "{}"'.format(format))
elif google_file_ext == 'gslide':
if format == 'odp':
export_type = 'application/vnd.oasis.opendocument.presentation'
elif format == 'pdf':
export_type = 'application/pdf'
elif format == 'pptx':
export_type = 'application/vnd.openxmlformats-officedocument.presentationml.presentation'
elif format == 'txt':
export_type = 'text/plain'
else:
raise Exception('Unknown format "{}"'.format(format))
else:
raise Exception('Unknown Google document extension "{}"'.format(google_file_ext))
return '.'+format, export_type
if drive_files_list == []:
print(json.dumps(drive_files_list))
else:
location = os.path.join('/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/drive_documents/', drive_files_list.get('job_id')+'/pdf/')
os.makedirs(location)
response_message = {
"status": "OK",
"processed_files": []
}
for index, item in enumerate(drive_files_list.get('files')):
try:
google_file_type = doc_types[item.get('mimeType')]
drive_document_id = item.get('id')
target_file_type = "pdf"
dl_response = download_drive_file(resource=get_resource(domain_wide_delegate=False), document_id=drive_document_id, google_file_type=google_file_type, target_type=target_file_type, target_location=location)
response_message["processed_files"].append(dl_response)
except KeyError as ke:
pass
print(json.dumps(response_message))
# print(download_drive_file(resource=get_resource(domain_wide_delegate=False)), google_file_type=google_file_type, target_type=target_file_type, target_location=location) | [
"{[email protected]}"
]
| |
fd12868d93d2bfa398026083bb2783875080d1af | b156c2f5ee7417dfa1f6cdcf14e9773a25397544 | /GeneVisualization/venv2/Lib/site-packages/itk/itkPyImageFilterPython.py | 5ba7ffc47ba7a921f6601bd4ee6149cc65a6e415 | []
| no_license | PinarTurkyilmaz/Vis | 1115d9426e9c8eeb5d07949241713d6f58a7721b | 4dd4426a70c0bd0a6e405ffe923afee29630aa67 | refs/heads/master | 2022-11-18T13:16:18.668065 | 2020-07-06T21:04:10 | 2020-07-06T21:04:10 | 226,217,392 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,843 | py | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.8
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (3, 0, 0):
new_instancemethod = lambda func, inst, cls: _itkPyImageFilterPython.SWIG_PyInstanceMethod_New(func)
else:
from new import instancemethod as new_instancemethod
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_itkPyImageFilterPython', [dirname(__file__)])
except ImportError:
import _itkPyImageFilterPython
return _itkPyImageFilterPython
if fp is not None:
try:
_mod = imp.load_module('_itkPyImageFilterPython', fp, pathname, description)
finally:
fp.close()
return _mod
_itkPyImageFilterPython = swig_import_helper()
del swig_import_helper
else:
import _itkPyImageFilterPython
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
object.__setattr__(self, name, value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
def _swig_setattr_nondynamic_method(set):
def set_attr(self, name, value):
if (name == "thisown"):
return self.this.own(value)
if hasattr(self, name) or (name == "this"):
set(self, name, value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
import ITKCommonBasePython
import pyBasePython
import itkImageToImageFilterAPython
import itkImageSourcePython
import itkImageSourceCommonPython
import itkImagePython
import itkSizePython
import itkPointPython
import vnl_vector_refPython
import vnl_vectorPython
import vnl_matrixPython
import stdcomplexPython
import itkVectorPython
import itkFixedArrayPython
import itkIndexPython
import itkOffsetPython
import itkRGBAPixelPython
import itkCovariantVectorPython
import itkRGBPixelPython
import itkMatrixPython
import vnl_matrix_fixedPython
import itkSymmetricSecondRankTensorPython
import itkImageRegionPython
import itkVectorImagePython
import itkVariableLengthVectorPython
import itkImageToImageFilterCommonPython
def itkPyImageFilterIUS3IUS3_New():
return itkPyImageFilterIUS3IUS3.New()
def itkPyImageFilterIUS2IUS2_New():
return itkPyImageFilterIUS2IUS2.New()
def itkPyImageFilterIUC3IUC3_New():
return itkPyImageFilterIUC3IUC3.New()
def itkPyImageFilterIUC2IUC2_New():
return itkPyImageFilterIUC2IUC2.New()
class itkPyImageFilterIUC2IUC2(itkImageToImageFilterAPython.itkImageToImageFilterIUC2IUC2):
"""Proxy of C++ itkPyImageFilterIUC2IUC2 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkPyImageFilterIUC2IUC2_Pointer":
"""__New_orig__() -> itkPyImageFilterIUC2IUC2_Pointer"""
return _itkPyImageFilterPython.itkPyImageFilterIUC2IUC2___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkPyImageFilterIUC2IUC2_Pointer":
"""Clone(itkPyImageFilterIUC2IUC2 self) -> itkPyImageFilterIUC2IUC2_Pointer"""
return _itkPyImageFilterPython.itkPyImageFilterIUC2IUC2_Clone(self)
def SetPyGenerateData(self, obj: 'PyObject *') -> "void":
"""SetPyGenerateData(itkPyImageFilterIUC2IUC2 self, PyObject * obj)"""
return _itkPyImageFilterPython.itkPyImageFilterIUC2IUC2_SetPyGenerateData(self, obj)
__swig_destroy__ = _itkPyImageFilterPython.delete_itkPyImageFilterIUC2IUC2
def cast(obj: 'itkLightObject') -> "itkPyImageFilterIUC2IUC2 *":
"""cast(itkLightObject obj) -> itkPyImageFilterIUC2IUC2"""
return _itkPyImageFilterPython.itkPyImageFilterIUC2IUC2_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkPyImageFilterIUC2IUC2
Create a new object of the class itkPyImageFilterIUC2IUC2 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkPyImageFilterIUC2IUC2.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkPyImageFilterIUC2IUC2.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkPyImageFilterIUC2IUC2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkPyImageFilterIUC2IUC2.Clone = new_instancemethod(_itkPyImageFilterPython.itkPyImageFilterIUC2IUC2_Clone, None, itkPyImageFilterIUC2IUC2)
itkPyImageFilterIUC2IUC2.SetPyGenerateData = new_instancemethod(_itkPyImageFilterPython.itkPyImageFilterIUC2IUC2_SetPyGenerateData, None, itkPyImageFilterIUC2IUC2)
itkPyImageFilterIUC2IUC2_swigregister = _itkPyImageFilterPython.itkPyImageFilterIUC2IUC2_swigregister
itkPyImageFilterIUC2IUC2_swigregister(itkPyImageFilterIUC2IUC2)
def itkPyImageFilterIUC2IUC2___New_orig__() -> "itkPyImageFilterIUC2IUC2_Pointer":
"""itkPyImageFilterIUC2IUC2___New_orig__() -> itkPyImageFilterIUC2IUC2_Pointer"""
return _itkPyImageFilterPython.itkPyImageFilterIUC2IUC2___New_orig__()
def itkPyImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> "itkPyImageFilterIUC2IUC2 *":
"""itkPyImageFilterIUC2IUC2_cast(itkLightObject obj) -> itkPyImageFilterIUC2IUC2"""
return _itkPyImageFilterPython.itkPyImageFilterIUC2IUC2_cast(obj)
class itkPyImageFilterIUC3IUC3(itkImageToImageFilterAPython.itkImageToImageFilterIUC3IUC3):
"""Proxy of C++ itkPyImageFilterIUC3IUC3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkPyImageFilterIUC3IUC3_Pointer":
"""__New_orig__() -> itkPyImageFilterIUC3IUC3_Pointer"""
return _itkPyImageFilterPython.itkPyImageFilterIUC3IUC3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkPyImageFilterIUC3IUC3_Pointer":
"""Clone(itkPyImageFilterIUC3IUC3 self) -> itkPyImageFilterIUC3IUC3_Pointer"""
return _itkPyImageFilterPython.itkPyImageFilterIUC3IUC3_Clone(self)
def SetPyGenerateData(self, obj: 'PyObject *') -> "void":
"""SetPyGenerateData(itkPyImageFilterIUC3IUC3 self, PyObject * obj)"""
return _itkPyImageFilterPython.itkPyImageFilterIUC3IUC3_SetPyGenerateData(self, obj)
__swig_destroy__ = _itkPyImageFilterPython.delete_itkPyImageFilterIUC3IUC3
def cast(obj: 'itkLightObject') -> "itkPyImageFilterIUC3IUC3 *":
"""cast(itkLightObject obj) -> itkPyImageFilterIUC3IUC3"""
return _itkPyImageFilterPython.itkPyImageFilterIUC3IUC3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkPyImageFilterIUC3IUC3
Create a new object of the class itkPyImageFilterIUC3IUC3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkPyImageFilterIUC3IUC3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkPyImageFilterIUC3IUC3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkPyImageFilterIUC3IUC3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkPyImageFilterIUC3IUC3.Clone = new_instancemethod(_itkPyImageFilterPython.itkPyImageFilterIUC3IUC3_Clone, None, itkPyImageFilterIUC3IUC3)
itkPyImageFilterIUC3IUC3.SetPyGenerateData = new_instancemethod(_itkPyImageFilterPython.itkPyImageFilterIUC3IUC3_SetPyGenerateData, None, itkPyImageFilterIUC3IUC3)
itkPyImageFilterIUC3IUC3_swigregister = _itkPyImageFilterPython.itkPyImageFilterIUC3IUC3_swigregister
itkPyImageFilterIUC3IUC3_swigregister(itkPyImageFilterIUC3IUC3)
def itkPyImageFilterIUC3IUC3___New_orig__() -> "itkPyImageFilterIUC3IUC3_Pointer":
"""itkPyImageFilterIUC3IUC3___New_orig__() -> itkPyImageFilterIUC3IUC3_Pointer"""
return _itkPyImageFilterPython.itkPyImageFilterIUC3IUC3___New_orig__()
def itkPyImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> "itkPyImageFilterIUC3IUC3 *":
"""itkPyImageFilterIUC3IUC3_cast(itkLightObject obj) -> itkPyImageFilterIUC3IUC3"""
return _itkPyImageFilterPython.itkPyImageFilterIUC3IUC3_cast(obj)
class itkPyImageFilterIUS2IUS2(itkImageToImageFilterAPython.itkImageToImageFilterIUS2IUS2):
"""Proxy of C++ itkPyImageFilterIUS2IUS2 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkPyImageFilterIUS2IUS2_Pointer":
"""__New_orig__() -> itkPyImageFilterIUS2IUS2_Pointer"""
return _itkPyImageFilterPython.itkPyImageFilterIUS2IUS2___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkPyImageFilterIUS2IUS2_Pointer":
"""Clone(itkPyImageFilterIUS2IUS2 self) -> itkPyImageFilterIUS2IUS2_Pointer"""
return _itkPyImageFilterPython.itkPyImageFilterIUS2IUS2_Clone(self)
def SetPyGenerateData(self, obj: 'PyObject *') -> "void":
"""SetPyGenerateData(itkPyImageFilterIUS2IUS2 self, PyObject * obj)"""
return _itkPyImageFilterPython.itkPyImageFilterIUS2IUS2_SetPyGenerateData(self, obj)
__swig_destroy__ = _itkPyImageFilterPython.delete_itkPyImageFilterIUS2IUS2
def cast(obj: 'itkLightObject') -> "itkPyImageFilterIUS2IUS2 *":
"""cast(itkLightObject obj) -> itkPyImageFilterIUS2IUS2"""
return _itkPyImageFilterPython.itkPyImageFilterIUS2IUS2_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkPyImageFilterIUS2IUS2
Create a new object of the class itkPyImageFilterIUS2IUS2 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkPyImageFilterIUS2IUS2.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkPyImageFilterIUS2IUS2.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkPyImageFilterIUS2IUS2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkPyImageFilterIUS2IUS2.Clone = new_instancemethod(_itkPyImageFilterPython.itkPyImageFilterIUS2IUS2_Clone, None, itkPyImageFilterIUS2IUS2)
itkPyImageFilterIUS2IUS2.SetPyGenerateData = new_instancemethod(_itkPyImageFilterPython.itkPyImageFilterIUS2IUS2_SetPyGenerateData, None, itkPyImageFilterIUS2IUS2)
itkPyImageFilterIUS2IUS2_swigregister = _itkPyImageFilterPython.itkPyImageFilterIUS2IUS2_swigregister
itkPyImageFilterIUS2IUS2_swigregister(itkPyImageFilterIUS2IUS2)
def itkPyImageFilterIUS2IUS2___New_orig__() -> "itkPyImageFilterIUS2IUS2_Pointer":
"""itkPyImageFilterIUS2IUS2___New_orig__() -> itkPyImageFilterIUS2IUS2_Pointer"""
return _itkPyImageFilterPython.itkPyImageFilterIUS2IUS2___New_orig__()
def itkPyImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> "itkPyImageFilterIUS2IUS2 *":
"""itkPyImageFilterIUS2IUS2_cast(itkLightObject obj) -> itkPyImageFilterIUS2IUS2"""
return _itkPyImageFilterPython.itkPyImageFilterIUS2IUS2_cast(obj)
class itkPyImageFilterIUS3IUS3(itkImageToImageFilterAPython.itkImageToImageFilterIUS3IUS3):
"""Proxy of C++ itkPyImageFilterIUS3IUS3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkPyImageFilterIUS3IUS3_Pointer":
"""__New_orig__() -> itkPyImageFilterIUS3IUS3_Pointer"""
return _itkPyImageFilterPython.itkPyImageFilterIUS3IUS3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkPyImageFilterIUS3IUS3_Pointer":
"""Clone(itkPyImageFilterIUS3IUS3 self) -> itkPyImageFilterIUS3IUS3_Pointer"""
return _itkPyImageFilterPython.itkPyImageFilterIUS3IUS3_Clone(self)
def SetPyGenerateData(self, obj: 'PyObject *') -> "void":
"""SetPyGenerateData(itkPyImageFilterIUS3IUS3 self, PyObject * obj)"""
return _itkPyImageFilterPython.itkPyImageFilterIUS3IUS3_SetPyGenerateData(self, obj)
__swig_destroy__ = _itkPyImageFilterPython.delete_itkPyImageFilterIUS3IUS3
def cast(obj: 'itkLightObject') -> "itkPyImageFilterIUS3IUS3 *":
"""cast(itkLightObject obj) -> itkPyImageFilterIUS3IUS3"""
return _itkPyImageFilterPython.itkPyImageFilterIUS3IUS3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkPyImageFilterIUS3IUS3
Create a new object of the class itkPyImageFilterIUS3IUS3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkPyImageFilterIUS3IUS3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkPyImageFilterIUS3IUS3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkPyImageFilterIUS3IUS3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkPyImageFilterIUS3IUS3.Clone = new_instancemethod(_itkPyImageFilterPython.itkPyImageFilterIUS3IUS3_Clone, None, itkPyImageFilterIUS3IUS3)
itkPyImageFilterIUS3IUS3.SetPyGenerateData = new_instancemethod(_itkPyImageFilterPython.itkPyImageFilterIUS3IUS3_SetPyGenerateData, None, itkPyImageFilterIUS3IUS3)
itkPyImageFilterIUS3IUS3_swigregister = _itkPyImageFilterPython.itkPyImageFilterIUS3IUS3_swigregister
itkPyImageFilterIUS3IUS3_swigregister(itkPyImageFilterIUS3IUS3)
def itkPyImageFilterIUS3IUS3___New_orig__() -> "itkPyImageFilterIUS3IUS3_Pointer":
"""itkPyImageFilterIUS3IUS3___New_orig__() -> itkPyImageFilterIUS3IUS3_Pointer"""
return _itkPyImageFilterPython.itkPyImageFilterIUS3IUS3___New_orig__()
def itkPyImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> "itkPyImageFilterIUS3IUS3 *":
"""itkPyImageFilterIUS3IUS3_cast(itkLightObject obj) -> itkPyImageFilterIUS3IUS3"""
return _itkPyImageFilterPython.itkPyImageFilterIUS3IUS3_cast(obj)
def py_image_filter(*args, **kwargs):
"""Procedural interface for PyImageFilter"""
import itk
instance = itk.PyImageFilter.New(*args, **kwargs)
return instance.__internal_call__()
def py_image_filter_init_docstring():
import itk
import itkTemplate
if isinstance(itk.PyImageFilter, itkTemplate.itkTemplate):
py_image_filter.__doc__ = itk.PyImageFilter.values()[0].__doc__
else:
py_image_filter.__doc__ = itk.PyImageFilter.__doc__
| [
"[email protected]"
]
| |
7391c196678a5851d5df375ccb31973f5f4308d5 | faa390890e17219fd763bd66e66bb6753c692b14 | /jacinle/comm/__init__.py | 31090e96df5a3363aa5d28bc73146df1406c9cf6 | [
"MIT"
]
| permissive | vacancy/Jacinle | 7170b1c798e4a903186abe74d28e4a7e034ec766 | 20021790fd32ef1ad40c67fba7582c6db54235da | refs/heads/master | 2023-07-20T03:54:46.693649 | 2023-07-12T21:00:10 | 2023-07-12T21:00:10 | 117,910,172 | 135 | 275 | MIT | 2023-01-18T17:41:33 | 2018-01-18T00:35:55 | Python | UTF-8 | Python | false | false | 228 | py | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# File : __init__.py
# Author : Jiayuan Mao
# Email : [email protected]
# Date : 01/22/2018
#
# This file is part of Jacinle.
# Distributed under terms of the MIT license.
| [
"[email protected]"
]
| |
bc044afaa2aa7c550a0e4c305793d3073561ab60 | 444a9480bce2035565332d4d4654244c0b5cd47b | /official/cv/OCRNet/convert_from_torch.py | 4d18c05e48df3f7270f9673f7922b6241774d711 | [
"LicenseRef-scancode-proprietary-license",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | mindspore-ai/models | 7ede9c6454e77e995e674628204e1c6e76bd7b27 | eab643f51336dbf7d711f02d27e6516e5affee59 | refs/heads/master | 2023-07-20T01:49:34.614616 | 2023-07-17T11:43:18 | 2023-07-17T11:43:18 | 417,393,380 | 301 | 92 | Apache-2.0 | 2023-05-17T11:22:28 | 2021-10-15T06:38:37 | Python | UTF-8 | Python | false | false | 4,078 | py | # Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""HRNet conversion from torch."""
import argparse
import pickle
import torch
from mindspore import Parameter, load_param_into_net, save_checkpoint
from src.config import config_hrnetv2_w48 as config
from src.seg_hrnet import HighResolutionNet
def parse_args():
"""Get arguments from command-line."""
parser = argparse.ArgumentParser(description="Convert HRNetW48_seg weights from torch to mindspore.")
parser.add_argument("--torch_path", type=str, default=None, help="Path to input torch model.")
parser.add_argument("--numpy_path", type=str, default=None,
help="Path to save/load intermediate numpy representation.")
parser.add_argument("--mindspore_path", type=str, default=None, help="Path to save result mindspore model.")
return parser.parse_args()
def torch2numpy(input_torch, out_numpy=None):
"""
Convert torch model to numpy
Args:
input_torch: path to .pth model
out_numpy: path to save .npy model (if None will not save)
Returns:
dict of numpy weights
"""
weights = torch.load(input_torch)
weights_numpy = {k: v.detach().cpu().numpy() for k, v in weights.items()}
if out_numpy:
with open(out_numpy, 'wb') as fp:
pickle.dump(weights_numpy, fp)
return weights_numpy
def numpy2mindspore(input_numpy, out_mindspore):
"""
Convert numpy model weights to mindspore
Args:
input_numpy: path to .npy weights or dict of numpy arrays
out_mindspore: path to output mindspore model
"""
if isinstance(input_numpy, str):
with open(input_numpy, 'rb') as fp:
weights_numpy = pickle.load(fp)
else:
weights_numpy = input_numpy
net = HighResolutionNet(config.model, 19)
sample_ms = net.parameters_dict()
weights_ms = {}
miss_in_ms = set()
for k in weights_numpy.keys():
if k.endswith('.num_batches_tracked'):
continue
new_k = k
if k.rsplit('.', 1)[0] + '.running_mean' in weights_numpy.keys():
new_k = new_k.replace('weight', 'gamma').replace('bias', 'beta')
new_k = new_k.replace('running_mean', 'moving_mean').replace('running_var', 'moving_variance')
if new_k not in sample_ms.keys():
miss_in_ms.add(k)
continue
weights_ms[new_k] = Parameter(weights_numpy[k], name=new_k)
print('Missed in mindspore:\n', miss_in_ms)
print('Missed from mindspore:\n', set(sample_ms.keys()) - set(weights_ms.keys()))
load_param_into_net(net, weights_ms)
save_checkpoint(net, out_mindspore)
def convert():
""" Full convert pipeline """
args = parse_args()
if not args.torch_path and not args.numpy_path:
raise ValueError('torch_path or numpy_path must be defined as input')
if not args.mindspore_path and not args.numpy_path:
raise ValueError('mindspore_path or numpy_path must be defined as output')
numpy_weights = None
if args.torch_path:
numpy_weights = torch2numpy(input_torch=args.torch_path, out_numpy=args.numpy_path)
print('Converted to numpy!')
if args.mindspore_path:
if not numpy_weights:
numpy_weights = args.numpy_path
numpy2mindspore(input_numpy=numpy_weights, out_mindspore=args.mindspore_path)
print('Converted to mindspore!')
if __name__ == '__main__':
convert()
| [
"[email protected]"
]
| |
8374d54bebd6681df61f6a480be5b6a013cc4aaa | 1b34447fff2b0c08d5b43257b441b82f3faa263a | /bloogle-bot/blooglebot/spiders/theverge_spider.py | 050967b104b0a3840bb4caf49601b88b3c68bd97 | []
| no_license | Folch/bloogle | ded7a4986139e50ffc1b372e1c6a348b9524f58c | fd573c5948fd14945411e75c22f71ce45e9747b9 | refs/heads/master | 2020-05-03T16:55:11.349542 | 2019-03-31T19:42:43 | 2019-03-31T19:42:43 | 178,734,386 | 1 | 0 | null | 2019-03-31T19:45:28 | 2019-03-31T19:45:28 | null | UTF-8 | Python | false | false | 180 | py | from .simple_base_spider import SimpleBaseSpider
class TheVergeSpider(SimpleBaseSpider):
name = 'theverge'
def get_domain(self):
return 'https://www.theverge.com' | [
"[email protected]"
]
| |
269b95c4e3715ced6971c77957a30e620a5f8b6b | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog/optimized_47913.py | 110c6944e50cfa7e456ea3a324c71dcbb4357bfe | []
| no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,838 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((480.133, 574.617, 353.855), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_0" not in marker_sets:
s=new_marker_set('Cog2_0')
marker_sets["Cog2_0"]=s
s= marker_sets["Cog2_0"]
mark=s.place_marker((471.941, 506.375, 366.128), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_1" not in marker_sets:
s=new_marker_set('Cog2_1')
marker_sets["Cog2_1"]=s
s= marker_sets["Cog2_1"]
mark=s.place_marker((460.179, 426.327, 371.678), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((463.5, 488.736, 247.655), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((445.537, 242.226, 430.618), (0.89, 0.1, 0.1), 18.4716)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((466.957, 531.167, 363.335), (1, 1, 0), 18.4716)
if "Cog3_0" not in marker_sets:
s=new_marker_set('Cog3_0')
marker_sets["Cog3_0"]=s
s= marker_sets["Cog3_0"]
mark=s.place_marker((466.775, 532.627, 363.235), (1, 1, 0.2), 17.1475)
if "Cog3_1" not in marker_sets:
s=new_marker_set('Cog3_1')
marker_sets["Cog3_1"]=s
s= marker_sets["Cog3_1"]
mark=s.place_marker((440.776, 522.815, 367.059), (1, 1, 0.2), 17.1475)
if "Cog3_2" not in marker_sets:
s=new_marker_set('Cog3_2')
marker_sets["Cog3_2"]=s
s= marker_sets["Cog3_2"]
mark=s.place_marker((417.38, 511.573, 376.693), (1, 1, 0.2), 17.1475)
if "Cog3_3" not in marker_sets:
s=new_marker_set('Cog3_3')
marker_sets["Cog3_3"]=s
s= marker_sets["Cog3_3"]
mark=s.place_marker((391.338, 512.737, 387.109), (1, 1, 0.2), 17.1475)
if "Cog3_4" not in marker_sets:
s=new_marker_set('Cog3_4')
marker_sets["Cog3_4"]=s
s= marker_sets["Cog3_4"]
mark=s.place_marker((375.819, 530.843, 372.422), (1, 1, 0.2), 17.1475)
if "Cog3_5" not in marker_sets:
s=new_marker_set('Cog3_5')
marker_sets["Cog3_5"]=s
s= marker_sets["Cog3_5"]
mark=s.place_marker((377.201, 556.273, 360.545), (1, 1, 0.2), 17.1475)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((483.905, 553.219, 369.612), (1, 1, 0.4), 18.4716)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((271.035, 559.086, 359.439), (1, 1, 0.4), 18.4716)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((314.991, 374.058, 427.888), (0, 0, 0.8), 18.4716)
if "Cog4_0" not in marker_sets:
s=new_marker_set('Cog4_0')
marker_sets["Cog4_0"]=s
s= marker_sets["Cog4_0"]
mark=s.place_marker((314.991, 374.058, 427.888), (0, 0, 0.8), 17.1475)
if "Cog4_1" not in marker_sets:
s=new_marker_set('Cog4_1')
marker_sets["Cog4_1"]=s
s= marker_sets["Cog4_1"]
mark=s.place_marker((335.008, 394.82, 423.374), (0, 0, 0.8), 17.1475)
if "Cog4_2" not in marker_sets:
s=new_marker_set('Cog4_2')
marker_sets["Cog4_2"]=s
s= marker_sets["Cog4_2"]
mark=s.place_marker((356.31, 414.199, 419.015), (0, 0, 0.8), 17.1475)
if "Cog4_3" not in marker_sets:
s=new_marker_set('Cog4_3')
marker_sets["Cog4_3"]=s
s= marker_sets["Cog4_3"]
mark=s.place_marker((378.07, 432.899, 414.409), (0, 0, 0.8), 17.1475)
if "Cog4_4" not in marker_sets:
s=new_marker_set('Cog4_4')
marker_sets["Cog4_4"]=s
s= marker_sets["Cog4_4"]
mark=s.place_marker((399.46, 450.616, 407.516), (0, 0, 0.8), 17.1475)
if "Cog4_5" not in marker_sets:
s=new_marker_set('Cog4_5')
marker_sets["Cog4_5"]=s
s= marker_sets["Cog4_5"]
mark=s.place_marker((422.608, 464.135, 398.878), (0, 0, 0.8), 17.1475)
if "Cog4_6" not in marker_sets:
s=new_marker_set('Cog4_6')
marker_sets["Cog4_6"]=s
s= marker_sets["Cog4_6"]
mark=s.place_marker((433.657, 489.378, 403.593), (0, 0, 0.8), 17.1475)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((189.494, 422.769, 348.469), (0, 0, 0.8), 18.4716)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((682.681, 550.891, 440.684), (0, 0, 0.8), 18.4716)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((457.67, 451.265, 412.51), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_0" not in marker_sets:
s=new_marker_set('Cog5_0')
marker_sets["Cog5_0"]=s
s= marker_sets["Cog5_0"]
mark=s.place_marker((457.67, 451.265, 412.51), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_1" not in marker_sets:
s=new_marker_set('Cog5_1')
marker_sets["Cog5_1"]=s
s= marker_sets["Cog5_1"]
mark=s.place_marker((478.502, 444.731, 392.83), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_2" not in marker_sets:
s=new_marker_set('Cog5_2')
marker_sets["Cog5_2"]=s
s= marker_sets["Cog5_2"]
mark=s.place_marker((490.376, 438.288, 366.685), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_3" not in marker_sets:
s=new_marker_set('Cog5_3')
marker_sets["Cog5_3"]=s
s= marker_sets["Cog5_3"]
mark=s.place_marker((475.783, 433.432, 341.959), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((497.788, 549.921, 303.974), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((447.684, 314.825, 371.61), (0.3, 0.3, 0.3), 18.4716)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((481.892, 514.219, 340.56), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_0" not in marker_sets:
s=new_marker_set('Cog6_0')
marker_sets["Cog6_0"]=s
s= marker_sets["Cog6_0"]
mark=s.place_marker((481.92, 514.226, 340.504), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_1" not in marker_sets:
s=new_marker_set('Cog6_1')
marker_sets["Cog6_1"]=s
s= marker_sets["Cog6_1"]
mark=s.place_marker((501.039, 522.457, 360.081), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_2" not in marker_sets:
s=new_marker_set('Cog6_2')
marker_sets["Cog6_2"]=s
s= marker_sets["Cog6_2"]
mark=s.place_marker((497.568, 522.893, 388.481), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_3" not in marker_sets:
s=new_marker_set('Cog6_3')
marker_sets["Cog6_3"]=s
s= marker_sets["Cog6_3"]
mark=s.place_marker((471.936, 532.357, 396.801), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_4" not in marker_sets:
s=new_marker_set('Cog6_4')
marker_sets["Cog6_4"]=s
s= marker_sets["Cog6_4"]
mark=s.place_marker((445.535, 543.918, 395.014), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_5" not in marker_sets:
s=new_marker_set('Cog6_5')
marker_sets["Cog6_5"]=s
s= marker_sets["Cog6_5"]
mark=s.place_marker((417.417, 550.694, 391.886), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_6" not in marker_sets:
s=new_marker_set('Cog6_6')
marker_sets["Cog6_6"]=s
s= marker_sets["Cog6_6"]
mark=s.place_marker((389.12, 557.708, 391.324), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((453.421, 527.079, 436.448), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((320.645, 588.786, 344.08), (0.21, 0.49, 0.72), 18.4716)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((491.899, 503.154, 427.54), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_0" not in marker_sets:
s=new_marker_set('Cog7_0')
marker_sets["Cog7_0"]=s
s= marker_sets["Cog7_0"]
mark=s.place_marker((490.639, 492.523, 403.415), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_1" not in marker_sets:
s=new_marker_set('Cog7_1')
marker_sets["Cog7_1"]=s
s= marker_sets["Cog7_1"]
mark=s.place_marker((485.336, 468.399, 351.493), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_2" not in marker_sets:
s=new_marker_set('Cog7_2')
marker_sets["Cog7_2"]=s
s= marker_sets["Cog7_2"]
mark=s.place_marker((479.585, 444.441, 299.381), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((532.383, 502.052, 278.523), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((432.292, 368.905, 244.95), (0.7, 0.7, 0.7), 18.4716)
if "Cog8_0" not in marker_sets:
s=new_marker_set('Cog8_0')
marker_sets["Cog8_0"]=s
s= marker_sets["Cog8_0"]
mark=s.place_marker((523.853, 509.451, 408.886), (1, 0.5, 0), 17.1475)
if "Cog8_1" not in marker_sets:
s=new_marker_set('Cog8_1')
marker_sets["Cog8_1"]=s
s= marker_sets["Cog8_1"]
mark=s.place_marker((543.261, 494.389, 423.425), (1, 0.5, 0), 17.1475)
if "Cog8_2" not in marker_sets:
s=new_marker_set('Cog8_2')
marker_sets["Cog8_2"]=s
s= marker_sets["Cog8_2"]
mark=s.place_marker((557.069, 470.323, 428.028), (1, 0.5, 0), 17.1475)
if "Cog8_3" not in marker_sets:
s=new_marker_set('Cog8_3')
marker_sets["Cog8_3"]=s
s= marker_sets["Cog8_3"]
mark=s.place_marker((546.398, 448.215, 414.149), (1, 0.5, 0), 17.1475)
if "Cog8_4" not in marker_sets:
s=new_marker_set('Cog8_4')
marker_sets["Cog8_4"]=s
s= marker_sets["Cog8_4"]
mark=s.place_marker((531.16, 430.742, 397.692), (1, 0.5, 0), 17.1475)
if "Cog8_5" not in marker_sets:
s=new_marker_set('Cog8_5')
marker_sets["Cog8_5"]=s
s= marker_sets["Cog8_5"]
mark=s.place_marker((515.513, 413.859, 380.938), (1, 0.5, 0), 17.1475)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((497.001, 492.839, 378.454), (1, 0.6, 0.1), 18.4716)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((531.46, 333.91, 382.133), (1, 0.6, 0.1), 18.4716)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"[email protected]"
]
| |
2f058e998fff357b00f55662b8586e74272472e7 | 2fb8fa04d520a0b6fde6f737467e4ff5f8b1c8d2 | /add.py | bedba07af9cfb067b2441297e4063ea8f7741216 | []
| no_license | Dhiraj4016/batch89 | 3bd11ae67d39cc43cd816d3c1ccca425833b2e3d | 8cdc01853c00eaf009b20024b9e25ddcc29b7bda | refs/heads/master | 2020-06-07T12:57:11.581541 | 2019-06-21T03:56:06 | 2019-06-21T03:56:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 312 | py | '''a=int(input("enter Any number"))
b=int (input("enter Any number"))
c = a + b
print("sum=",c)'''
name = input("enter your name")
print(name)
data= input("Are you a student if yes press y else n")
print(data[0])
print(data)
data1= input("Are you a student if yes press y else n")[0]
print(data1)
print(data1)
| [
"[email protected]"
]
| |
68fd786f367e1dd95ac1d5c0cf2a28069117e1e6 | 276dd5dd778adefd039e6f6a71dc574386729401 | /demo2/webapp/app.py | dbe50b9132d718d8d8a8d8ec04892046463991de | [
"MIT"
]
| permissive | amitsaha/python-grpc-demo | 4880e64b4b993df4b7eb96f2946b6607fb2dfa82 | 48546bfda83062a3fcb015d352fecb46346e8c92 | refs/heads/master | 2023-01-12T10:01:36.396783 | 2022-10-08T05:10:39 | 2022-10-08T05:10:39 | 101,063,881 | 145 | 52 | MIT | 2022-12-27T17:26:21 | 2017-08-22T13:07:17 | Python | UTF-8 | Python | false | false | 757 | py | from flask import Flask, Response
import sys
from google.protobuf.json_format import MessageToJson
from client_wrapper import ServiceClient
import users_pb2_grpc as users_service
import users_types_pb2 as users_messages
app = Flask(__name__)
app.config['users'] = ServiceClient(users_service, 'UsersStub', 'users', 50051)
@app.route('/users/')
def users_get():
request = users_messages.GetUsersRequest(
user=[users_messages.User(username="alexa", user_id=1),
users_messages.User(username="christie", user_id=1)]
)
def get_user():
response = app.config['users'].GetUsers(request)
for resp in response:
yield MessageToJson(resp)
return Response(get_user(), content_type='application/json') | [
"[email protected]"
]
| |
e6b276124f6f834603cf0301a93b00fed97d9445 | 275b16af98827504d4de75c5d45afa09d0a84b8c | /tests/messages/server/test_result_message_parser.py | fb0346ed890136b496c853af3c2b95768ec9b05d | [
"Apache-2.0"
]
| permissive | foxdog-studios/pyddp | e63bec12fffc5f87e9a44b0bf9de7bedae71d517 | a4ac0bd5d8a2f350e012fd65d79e0034a89d8e67 | refs/heads/dev | 2021-01-02T08:45:41.081693 | 2015-05-31T15:10:29 | 2015-05-31T15:10:29 | 13,887,437 | 10 | 5 | Apache-2.0 | 2018-03-05T17:42:18 | 2013-10-26T17:12:39 | Python | UTF-8 | Python | false | false | 1,968 | py | # -*- coding: utf-8 -*-
# Copyright 2014 Foxdog Studios
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
from ddp.messages.server.result_message import ResultMessage
from ddp.messages.server.result_message_parser import ResultMessageParser
class ResultMessageParserTestCase(unittest.TestCase):
def setUp(self):
self.parser = ResultMessageParser()
self.id = 'id'
self.error = 'error'
self.result = {'result': [True, 1.0]}
def test_with_error(self):
message = self.parser.parse({'msg': 'result', 'id': self.id,
'error': self.error})
self.assertEqual(message, ResultMessage(self.id, error=self.error))
def test_with_result(self):
message = self.parser.parse({'msg': 'result', 'id': self.id,
'result': self.result})
self.assertEqual(message, ResultMessage(self.id, result=self.result))
def test_with_error_with_result(self):
with self.assertRaises(ValueError):
self.parser.parse({'msg': 'result', 'id': self.id,
'error': self.error, 'result': self.result})
def test_without_error_without_reuslt(self):
message = self.parser.parse({'msg': 'result', 'id': self.id})
self.assertEqual(message, ResultMessage(self.id))
| [
"[email protected]"
]
| |
6324c47b3a02c8328fa9a73edda997ad76230713 | 518bf342bc4138982af3e2724e75f1d9ca3ba56c | /solutions/2370. Longest Ideal Subsequence/2370.py | 1b387f7c2ce92430d9791134800f76c0ce754e38 | [
"MIT"
]
| permissive | walkccc/LeetCode | dae85af7cc689882a84ee5011f0a13a19ad97f18 | a27be41c174565d365cbfe785f0633f634a01b2a | refs/heads/main | 2023-08-28T01:32:43.384999 | 2023-08-20T19:00:45 | 2023-08-20T19:00:45 | 172,231,974 | 692 | 302 | MIT | 2023-08-13T14:48:42 | 2019-02-23T15:46:23 | C++ | UTF-8 | Python | false | false | 506 | py | class Solution:
def longestIdealString(self, s: str, k: int) -> int:
# dp[i] := longest subseq that ends at ('a' + i)
dp = [0] * 26
for c in s:
i = ord(c) - ord('a')
dp[i] = 1 + self._getMaxReachable(dp, i, k)
return max(dp)
def _getMaxReachable(self, dp: List[int], i: int, k: int) -> int:
first = max(0, i - k)
last = min(25, i + k)
maxReachable = 0
for j in range(first, last + 1):
maxReachable = max(maxReachable, dp[j])
return maxReachable
| [
"[email protected]"
]
| |
9a4edd570cf19c80e4d21640897a0de5b4bfc863 | d214b72b3ae340d288c683afe356de6846a9b09d | /括号类/使括号有效的最少添加_921.py | d19e8d350a25ab41893dfe2a1a6db26563fa58bb | []
| no_license | Xiaoctw/LeetCode1_python | 540af6402e82b3221dad8648bbdcce44954a9832 | b2228230c90d7c91b0a40399fa631520c290b61d | refs/heads/master | 2021-08-29T15:02:37.786181 | 2021-08-22T11:12:07 | 2021-08-22T11:12:07 | 168,444,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 447 | py | from typing import *
class Solution:
def minAddToMakeValid(self, S: str) -> int:
stack=[]
cnt=0
for c in S:
if c=='(':
stack.append(c)
else:
if not stack:
cnt+=1
else:
stack.pop()
return cnt+len(stack)
if __name__ == '__main__':
sol=Solution()
s='()))(('
print(sol.minAddToMakeValid(s)) | [
"[email protected]"
]
| |
e19f03b66bd26f3d7fc8dd5e3735dd2b75cd2ef8 | 702f22704e5485aff356b4d1b6f1dea51bd95fa4 | /grd_web_edit/tools/handler/modify_handler.py | 6dab3c01e736e30f972f6e3ed148329bbdf56ef1 | []
| no_license | 510908220/chromium_grd_tool | 9b20986e6c5a825eeff4447a9ec91416b3188087 | 4b649c74f49d8515a5bf362828b48cae2f8ce6d0 | refs/heads/master | 2021-03-12T23:56:51.201622 | 2015-08-21T06:06:45 | 2015-08-21T06:06:45 | 41,099,166 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,320 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#encoding:utf-8
__author__ = 'Administrator'
import xml_node
import conf
import util
import history_handler
import innerdb
def get_message_info(message_name):
"""
return:
message_string
message_text
"""
grd = xml_node.Grd(conf.grd_path)
message_info = grd.get_message_node_string(message_name)
return message_info
def get_translation_text(id_):
xtb = xml_node.XTB(conf.xtb_path)
translation_node = xtb.get_translation_node(id_)
if translation_node is not None:
return translation_node.text
def search(message_name):
message_info = get_message_info(message_name)
if message_info is not None:
id_ = util.generate_message_id(message_info[1].strip())
print id_
translation_text = get_translation_text(id_)
print "translation_text:", translation_text, type(translation_text)
message_str = message_info[0]
if translation_text is not None:
return {"translation_text":translation_text.encode("utf-8"),"message_str":message_str}
else:
return {"message_str":message_str}
return {}
def delete(message_name):
message_info = get_message_info(message_name)
ret_info = {"ret":'false', "info":"no such message node"}
if message_info is not None:
id_ = util.generate_message_id(message_info[1])
grd = xml_node.Grd(conf.grd_path)
message_flag = grd.remove(message_name)
xtb = xml_node.XTB(conf.xtb_path)
xtb_flag = xtb.delete(id_)
if message_flag and xtb_flag:#删除成功才保存使生效
grd.save()
xtb.save()
ret_info["ret"] = 'true'
ret_info["info"] = ""
history_handler.delete_history(message_name)
else:
ret_info["ret"] = 'false'
ret_info["info"] = grd.error_info + "/" + xtb.error_info
return ret_info
def update(message_name, translation_desc):
message_info = get_message_info(message_name)
ret_info = {"ret":'false', "info":"update faile"}
if message_info is not None:
id_ = util.generate_message_id(message_info[1])
xtb = xml_node.XTB(conf.xtb_path)
xtb_flag = xtb.update(id_, translation_desc)
if xtb_flag:
xtb.save()
ret_info["ret"] = "true"
ret_info["info"] = ""
with innerdb.InnerDb() as db:
db.write(str(message_name),{"id":id_,"translation_desc":translation_desc})
else:
ret_info["ret"] = "false"
ret_info["info"] = xtb.error_info
return ret_info
| [
"[email protected]"
]
| |
58b5c70bbc6fd1570d661922bbc81850e967a9d4 | e2f400d159ca0abb82e35af7eeedc4eebc3333e7 | /desktop/core/src/desktop/lib/rest/http_client.py | e0b90f9979300b77e4e731021e47046aecbc5742 | [
"Apache-2.0"
]
| permissive | optimistdk/hue | a40ad47b33fc395fadc02840c5f707d8890e5bdc | 417d7e18b87faae1157c60f949da61cd3075ed98 | refs/heads/master | 2021-01-17T21:43:52.399457 | 2012-07-30T17:17:07 | 2012-07-30T17:17:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,954 | py | # Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cookielib
import logging
import posixpath
import types
import urllib
import urllib2
from urllib2_kerberos import HTTPKerberosAuthHandler
__docformat__ = "epytext"
LOG = logging.getLogger(__name__)
class RestException(Exception):
"""
Any error result from the Rest API is converted into this exception type.
"""
def __init__(self, error):
Exception.__init__(self, error)
self._error = error
self._code = None
self._message = str(error)
# See if there is a code or a message. (For urllib2.HTTPError.)
try:
self._code = error.code
self._message = error.read()
except AttributeError:
pass
def __str__(self):
res = self._message or ""
if self._code is not None:
res += " (error %s)" % (self._code,)
return res
def get_parent_ex(self):
if isinstance(self._error, Exception):
return self._error
return None
@property
def code(self):
return self._code
@property
def message(self):
return self._message
class HttpClient(object):
"""
Basic HTTP client tailored for rest APIs.
"""
def __init__(self, base_url, exc_class=None, logger=None):
"""
@param base_url: The base url to the API.
@param exc_class: An exception class to handle non-200 results.
Creates an HTTP(S) client to connect to the Cloudera Manager API.
"""
self._base_url = base_url.rstrip('/')
self._exc_class = exc_class or RestException
self._logger = logger or LOG
self._headers = { }
# Make a cookie processor
cookiejar = cookielib.CookieJar()
self._opener = urllib2.build_opener(
HTTPErrorProcessor(),
urllib2.HTTPCookieProcessor(cookiejar))
def set_basic_auth(self, username, password, realm):
"""
Set up basic auth for the client
@param username: Login name.
@param password: Login password.
@param realm: The authentication realm.
@return: The current object
"""
# Make a basic auth handler that does nothing. Set credentials later.
passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
passmgr.add_password(realm, self._base_url, username, password)
authhandler = urllib2.HTTPBasicAuthHandler(passmgr)
self._opener.add_handler(authhandler)
return self
def set_kerberos_auth(self):
"""Set up kerberos auth for the client, based on the current ticket."""
authhandler = HTTPKerberosAuthHandler()
self._opener.add_handler(authhandler)
return self
def set_headers(self, headers):
"""
Add headers to the request
@param headers: A dictionary with the key value pairs for the headers
@return: The current object
"""
self._headers = headers
return self
@property
def base_url(self):
return self._base_url
@property
def logger(self):
return self._logger
def _get_headers(self, headers):
res = self._headers.copy()
if headers:
res.update(headers)
return res
def execute(self, http_method, path, params=None, data=None, headers=None):
"""
Submit an HTTP request.
@param http_method: GET, POST, PUT, DELETE
@param path: The path of the resource. Unsafe characters will be quoted.
@param params: Key-value parameter data.
@param data: The data to attach to the body of the request.
@param headers: The headers to set for this request.
@return: The result of urllib2.urlopen()
"""
# Prepare URL and params
path = urllib.quote(smart_str(path))
url = self._make_url(path, params)
if http_method in ("GET", "DELETE"):
if data is not None:
self.logger.warn(
"GET method does not pass any data. Path '%s'" % (path,))
data = None
# Setup the request
request = urllib2.Request(url, data)
# Hack/workaround because urllib2 only does GET and POST
request.get_method = lambda: http_method
headers = self._get_headers(headers)
for k, v in headers.items():
request.add_header(k, v)
# Call it
self.logger.debug("%s %s" % (http_method, url))
try:
return self._opener.open(request)
except (urllib2.HTTPError, urllib2.URLError), ex:
raise self._exc_class(ex)
def _make_url(self, path, params):
res = self._base_url
if path:
res += posixpath.normpath('/' + path.lstrip('/'))
if params:
param_str = urllib.urlencode(params)
res += '?' + param_str
return iri_to_uri(res)
class HTTPErrorProcessor(urllib2.HTTPErrorProcessor):
"""
Python 2.4 only recognize 200 and 206 as success. It's broken. So we install
the following processor to catch the bug.
"""
def http_response(self, request, response):
if 200 <= response.code < 300:
return response
return urllib2.HTTPErrorProcessor.http_response(self, request, response)
https_response = http_response
#
# Method copied from Django
#
def iri_to_uri(iri):
"""
Convert an Internationalized Resource Identifier (IRI) portion to a URI
portion that is suitable for inclusion in a URL.
This is the algorithm from section 3.1 of RFC 3987. However, since we are
assuming input is either UTF-8 or unicode already, we can simplify things a
little from the full method.
Returns an ASCII string containing the encoded result.
"""
# The list of safe characters here is constructed from the "reserved" and
# "unreserved" characters specified in sections 2.2 and 2.3 of RFC 3986:
# reserved = gen-delims / sub-delims
# gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
# sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
# / "*" / "+" / "," / ";" / "="
# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
# Of the unreserved characters, urllib.quote already considers all but
# the ~ safe.
# The % character is also added to the list of safe characters here, as the
# end of section 3.1 of RFC 3987 specifically mentions that % must not be
# converted.
if iri is None:
return iri
return urllib.quote(smart_str(iri), safe="/#%[]=:;$&()+,!?*@'~")
#
# Method copied from Django
#
def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Returns a bytestring version of 's', encoded as specified in 'encoding'.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if strings_only and isinstance(s, (types.NoneType, int)):
return s
elif not isinstance(s, basestring):
try:
return str(s)
except UnicodeEncodeError:
if isinstance(s, Exception):
# An Exception subclass containing non-ASCII data that doesn't
# know how to print itself properly. We shouldn't raise a
# further exception.
return ' '.join([smart_str(arg, encoding, strings_only,
errors) for arg in s])
return unicode(s).encode(encoding, errors)
elif isinstance(s, unicode):
return s.encode(encoding, errors)
elif s and encoding != 'utf-8':
return s.decode('utf-8', errors).encode(encoding, errors)
else:
return s
| [
"[email protected]"
]
| |
ec8fab1581b91290cbc80f32f5189bf5668fbae2 | 2b7efe276d1dfdc70a4b5cd59ae863b7b7a1bd58 | /euler121.py | 596e9fa74a1af624297a392aad4c6bdc67a2e48c | []
| no_license | mckkcm001/euler | 550bbd126e8d9bb5bc7cb854147399060f865cfc | 8cf1db345b05867d47921b01e8c7e4c2df4ee98d | refs/heads/master | 2021-01-01T17:43:28.799946 | 2017-11-07T02:17:34 | 2017-11-07T02:17:34 | 18,375,089 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | py | from fractions import Fraction
rounds = 1
odds = Fraction(blue,blue+red)
def chances(rounds):
red = 0
blue = 0
bag_red = 1
bag_blue = 1
for i in range(rounds):
for i in range(rounds):
red += 1
odds *= Fraction(blue,blue+red)
print(odds)
| [
"[email protected]"
]
| |
daf5e135c93d1a2cd279020eb00f833972745212 | d7d53826ab804a3d0f229b0a189f2626d4ebe99b | /payment/models.py | a029297c35becab4fbd5c326718b5fa46a280bd2 | []
| no_license | zbcbcbc/xiaomaifeng | 6e299e7f1d13dbca95af7a1e46d66dd0d1c86b08 | 91b7da9404678227d3c2c4a446777be6dacdedb7 | refs/heads/master | 2020-12-02T16:58:26.661967 | 2016-09-04T17:53:51 | 2016-09-04T17:53:51 | 67,359,821 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,254 | py | # -*- coding: utf-8 -*-
#
# Copyright 2013 XiaoMaiFeng
__author__ = "Bicheng Zhang"
__copyright__ = "Copyright 2013, XiaoMaiFeng"
from django.db import models
__all__ = ['PartnerTradeBaseModel','DirectPayBaseModel', 'PartnerTradeBaseManager', 'DirectPayBaseManager']
class PaymentBaseManager(models.Manager):
def verify_payment(self, params):
raise NotImplementedError
def update_payment(self, params):
raise NotImplementedError
class PaymentBaseModel(models.Model):
"""
"""
xmf_order = models.OneToOneField('orders.Order', primary_key=True, null=False, on_delete=models.CASCADE,
blank=False,
related_name="%(class)s",
editable=False, )
is_verified = models.BooleanField(null=False, default=False, editable=True)
class Meta:
abstract = True
def __repr__(self):
return u"%s:(%s)" % (self, self.pk)
def _verify_success(self, **kwargs):
"""
交易认证成功逻辑:
如果认证成功,发送认证成功信号
"""
raise NotImplementedError
def _verify_fail(self, reason, re_verify=True, **kwargs):
"""
交易认证成功逻辑:
如果认证成功,发送认证失败信号
"""
raise NotImplementedError
def build_verify_url(self, write_to_db=False, **kwargs):
"""
建立支付认证url
"""
raise NotImplementedError
class PartnerTradeBaseManager(PaymentBaseManager):
def create_payment(self, payer, receiver, comment, social_platform, body, item, quantity):
raise NotImplementedError
class PartnerTradeBaseModel(PaymentBaseModel):
class Meta(PaymentBaseModel.Meta):
abstract = True
def shippment_confirm_success(self, **kwargs):
"""
交易货物寄送成功:
如果认证成功,发送寄送货物成功信号
"""
raise NotImplementedError
def shippment_confirm_fail(self, reason, re_verify=True, **kwargs):
"""
交易货物寄送成功:
如果认证成功,发送寄送货物成功信号
"""
raise NotImplementedError
class DirectPayBaseManager(PaymentBaseManager):
def create_payment(self, payer, receiver, comment, social_platform, body, fund):
raise NotImplementedError
class DirectPayBaseModel(PaymentBaseModel):
class Meta(PaymentBaseModel.Meta):
abstract = True
| [
"[email protected]"
]
| |
293b781a84a06c23dcb33282f1187f182d64d46e | 853d4cec42071b76a80be38c58ffe0fbf9b9dc34 | /venv/Lib/site-packages/pip/_internal/operations/build/wheel.py | 5a43e43f72d487fc34b5e42c4afc6f2d625df89e | []
| no_license | msainTesting/TwitterAnalysis | 5e1646dbf40badf887a86e125ef30a9edaa622a4 | b1204346508ba3e3922a52380ead5a8f7079726b | refs/heads/main | 2023-08-28T08:29:28.924620 | 2021-11-04T12:36:30 | 2021-11-04T12:36:30 | 424,242,582 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,100 | py | import logging
import os
from typing import Optional
from pip._vendor.pep517.wrappers import Pep517HookCaller
from pip._internal.utils.subprocess import runner_with_spinner_message
logger = logging.getLogger(__name__)
def build_wheel_pep517(
name: str,
backend: Pep517HookCaller,
metadata_directory: str,
tempd: str,
) -> Optional[str]:
"""Build one InstallRequirement using the PEP 517 build process.
Returns path to wheel if successfully built. Otherwise, returns None.
"""
assert metadata_directory is not None
try:
logger.debug("Destination directory: %s", tempd)
runner = runner_with_spinner_message(
f"Building wheel for {name} (pyproject.toml)"
)
with backend.subprocess_runner(runner):
wheel_name = backend.build_wheel(
tempd,
metadata_directory=metadata_directory,
)
except Exception:
logger.error("Failed building wheel for %s", name)
return None
return os.path.join(tempd, wheel_name)
| [
"[email protected]"
]
| |
ea41eabc699599cea850f3c5a4d00ad43a74addf | 90419da201cd4948a27d3612f0b482c68026c96f | /sdk/python/pulumi_azure_nextgen/devices/v20190322preview/iot_hub_resource_event_hub_consumer_group.py | 0828b9ec378e286ea61cfdf6bf519c011c308a72 | [
"BSD-3-Clause",
"Apache-2.0"
]
| permissive | test-wiz-sec/pulumi-azure-nextgen | cd4bee5d70cb0d332c04f16bb54e17d016d2adaf | 20a695af0d020b34b0f1c336e1b69702755174cc | refs/heads/master | 2023-06-08T02:35:52.639773 | 2020-11-06T22:39:06 | 2020-11-06T22:39:06 | 312,993,761 | 0 | 0 | Apache-2.0 | 2023-06-02T06:47:28 | 2020-11-15T09:04:00 | null | UTF-8 | Python | false | false | 6,716 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['IotHubResourceEventHubConsumerGroup']
class IotHubResourceEventHubConsumerGroup(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
event_hub_endpoint_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_name_: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
The properties of the EventHubConsumerGroupInfo object.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] event_hub_endpoint_name: The name of the Event Hub-compatible endpoint in the IoT hub.
:param pulumi.Input[str] name: The name of the consumer group to add.
:param pulumi.Input[str] resource_group_name: The name of the resource group that contains the IoT hub.
:param pulumi.Input[str] resource_name_: The name of the IoT hub.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if event_hub_endpoint_name is None:
raise TypeError("Missing required property 'event_hub_endpoint_name'")
__props__['event_hub_endpoint_name'] = event_hub_endpoint_name
if name is None:
raise TypeError("Missing required property 'name'")
__props__['name'] = name
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if resource_name_ is None:
raise TypeError("Missing required property 'resource_name_'")
__props__['resource_name'] = resource_name_
__props__['etag'] = None
__props__['properties'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:devices/latest:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20160203:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20170119:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20170701:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20180122:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20180401:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20181201preview:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20190322:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20190701preview:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20191104:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20200301:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20200401:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20200615:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20200710preview:IotHubResourceEventHubConsumerGroup"), pulumi.Alias(type_="azure-nextgen:devices/v20200801:IotHubResourceEventHubConsumerGroup")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(IotHubResourceEventHubConsumerGroup, __self__).__init__(
'azure-nextgen:devices/v20190322preview:IotHubResourceEventHubConsumerGroup',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'IotHubResourceEventHubConsumerGroup':
"""
Get an existing IotHubResourceEventHubConsumerGroup resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return IotHubResourceEventHubConsumerGroup(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
The etag.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The Event Hub-compatible consumer group name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output[Mapping[str, str]]:
"""
The tags.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
the resource type.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| [
"[email protected]"
]
| |
9182965cf5743dc890b618905174e89ec27a0f10 | 534e9eb7da6450a58876b284272d515ff8595730 | /base/factories.py | bee9c8e09ec3c02bf5ab2d091d098737a64a1bd8 | []
| no_license | Asingjr2/todos_dj | a188946c45e435faf7e57f912676af23f03356f6 | a1e08f27a81f9bfa3336dfcee036cbb325f7b63e | refs/heads/master | 2020-03-24T09:57:34.460300 | 2018-09-13T00:57:09 | 2018-09-13T00:57:09 | 142,643,266 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | import factory
from .models import BaseModel
class BaseModelFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseModel
abstract = True | [
"[email protected]"
]
| |
6f28abf08eb082e5601825a559cb2c2981c18ca4 | 71764665e27f4b96bab44f38a4a591ffc2171c24 | /hhplt/productsuite/OBU_tc56/mock_suite_3.py | 2e1584d638d34317a197e435489c2eba9d8dd1cb | []
| no_license | kingdomjc/RSU_production_VAT | 693f8c504acc0cc88af92942734ccb85f7e7d7c0 | 9a3d6d3f5a5edfaf30afdff725661630aafe434c | refs/heads/master | 2020-07-31T05:03:46.699606 | 2019-09-24T02:09:53 | 2019-09-24T02:09:53 | 210,491,514 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,981 | py | #encoding:utf-8
u'''这项测试又是虚拟测试项,用于调试'''
import time
from threading import RLock
suiteName = u'''并行虚拟测试项3'''
version = "1.0"
failWeightSum = 10 #整体不通过权值,当失败权值和超过此,判定测试不通过
from hhplt.testengine.exceptions import TestItemFailException,AbortTestException
from hhplt.parameters import SESSION
from hhplt.testengine.parallelTestSynAnnotation import syntest,serialSuite
def finalFun(product):
pass
def setup(product):
pass
serialIdCode = 1000
def T_01_initFactorySetting_A(product):
u'''出厂信息写入-出厂信息写入(包括MAC地址,唤醒灵敏度参数)'''
global serialIdCode
serialIdCode += 1
product.setTestingProductIdCode("%.5d"%serialIdCode)
# raise AbortTestException(message=u"终止了啊")
time.sleep(0.5)
#@syntest
def T_03_soundLight_M(product):
u'''声光指示测试-指示灯显示正常,蜂鸣器响声正常。人工确认后才停止显示和响'''
global serialIdCode
product.addBindingCode(u"PID","%.5d"%(serialIdCode+10))
product.addBindingCode(u"中文","%.5d"%(serialIdCode+10))
time.sleep(0.5)
def T_04_BatteryVoltage_A(product):
u'''电池电路电压测试-返回电池电路电压值,后台根据配置判定'''
global serialIdCode
product.addBindingCode(u"EPC","%.5d"%(serialIdCode+100))
time.sleep(0.5)
return {u"槽思密达":product.productSlot}
@syntest
def T_05_anotherSoundLight_M(product):
u'''又一个声光指示测试-指示灯显示正常,蜂鸣器响声正常。人工确认后才停止显示和响'''
time.sleep(1)
from hhplt.testengine.manul import manulCheck
if manulCheck(u"声光指示测试", u"请确认槽位【%s】破玩意是正常亮了吗?"%product.productSlot):
return {"随便写的返回值":300}
else:
raise TestItemFailException(failWeight = 10,message = u'声光测试失败')
| [
"[email protected]"
]
| |
05c4564a86b21d10499757b085d1b233d309d25b | 4e60e8a46354bef6e851e77d8df4964d35f5e53f | /share/Tornado/databases_share.py | 62c25ddacece769d8b5b612f0a4203f260ca0128 | []
| no_license | cq146637/DockerManagerPlatform | cbae4154ad66eac01772ddd902d7f70b62a2d856 | 9c509fb8dca6633ed3afdc92d4e6491b5d13e322 | refs/heads/master | 2021-04-09T13:58:14.117752 | 2018-03-19T13:41:04 | 2018-03-19T13:41:04 | 125,712,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,764 | py | # -*- coding: utf-8 -*-
__author__ = 'CQ'
import pymysql
import logging
logger = logging.getLogger(__name__)
class MysqlServer(object):
"""
Tornado通用连接数据库类
用pymysql替代tornado使得操作数据库更加灵活,定制化
"""
def __init__(self, db_config):
try:
self._db_config = db_config
self._conn = self.__get_conn()
self._cursor = self._conn.curson()
except Exception:
self.close()
logger.exception(u"数据库连接失败")
def __get_conn(self):
connection = pymysql.connect(host=self._db_config['HOST'],
port=self._db_config['PORT'],
user=self._db_config['USERNAME'],
password=self._db_config['PASSWORD'],
db=self._db_config['DB_NAME'],
charset=self._db_config['CHAR_SET'],
)
connection.ping(True)
return connection
def ensure_cursor(self):
if not self._cursor:
if not self._conn:
self._conn = self.__get_conn()
self._cursor = self._conn.cursor()
def run_sql(self, sql):
"""
执行完SQL语句需要返回结果
:param sql:
:return:
"""
self.ensure_cursor()
self._cursor.execute(sql)
# commit只对innodb生效,不加commit的话,修改数据库记录的操作不会生效。
# 如果是myisam引擎的话,不需要commit即可生效
self._conn.commit()
return self._cursor.fetchall()
def execute_sql(self, sql):
"""
执行SQL语句无返回值
:param sql:
:return:
"""
self.ensure_cursor()
self._cursor.execute(sql)
self._conn.commit()
def run_sql_fetchone(self, sql):
"""
执行SQL返回一条结果
:param sql:
:return:
"""
self.ensure_cursor()
self._cursor.execute(sql)
return self._cursor.fetchone()
def close(self):
if self._cursor:
self._cursor.close()
if self._conn:
self._conn.close()
logger.info(u"关闭数据库连接")
def test():
settings = {
'HOST': "127.0.0.1",
'PORT': "3306",
'USERNAME': "root",
'PASSWORD': "123456",
'DB_NAME': "test",
'CHAR_SET': "utf8",
}
db = MysqlServer(settings)
sql = "select distinct `node_name` from tb_node"
ret = db.run_sql(sql)
db.close()
return ret
if __name__ == "__main__":
print(test())
| [
"[email protected]"
]
| |
9d7c8ef7c5eb615add594ecc1e219baf83885afc | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/OMNI-gx2RSW200-MIB.py | 54100b88f1632183441c3e86f0cda6ade47ef2cf | [
"Apache-2.0"
]
| permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 41,787 | py | #
# PySNMP MIB module OMNI-gx2RSW200-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/OMNI-gx2RSW200-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:24:25 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsIntersection")
gx2Rsw200, = mibBuilder.importSymbols("GX2HFC-MIB", "gx2Rsw200")
trapText, trapNetworkElemAvailStatus, trapNETrapLastTrapTimeStamp, trapNetworkElemModelNumber, trapIdentifier, trapChangedValueInteger, trapChangedObjectId, trapNetworkElemAlarmStatus, trapPerceivedSeverity, trapNetworkElemOperState, trapNetworkElemAdminState, trapChangedValueDisplayString, trapNetworkElemSerialNum = mibBuilder.importSymbols("NLSBBN-TRAPS-MIB", "trapText", "trapNetworkElemAvailStatus", "trapNETrapLastTrapTimeStamp", "trapNetworkElemModelNumber", "trapIdentifier", "trapChangedValueInteger", "trapChangedObjectId", "trapNetworkElemAlarmStatus", "trapPerceivedSeverity", "trapNetworkElemOperState", "trapNetworkElemAdminState", "trapChangedValueDisplayString", "trapNetworkElemSerialNum")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Bits, Gauge32, IpAddress, Integer32, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Counter32, Unsigned32, NotificationType, ObjectIdentity, TimeTicks, ModuleIdentity, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "Gauge32", "IpAddress", "Integer32", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Counter32", "Unsigned32", "NotificationType", "ObjectIdentity", "TimeTicks", "ModuleIdentity", "Counter64")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
class Float(Counter32):
pass
gx2Rsw200Descriptor = MibIdentifier((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 1))
gx2Rsw200AnalogTable = MibTable((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2), )
if mibBuilder.loadTexts: gx2Rsw200AnalogTable.setStatus('mandatory')
gx2Rsw200AnalogEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1), ).setIndexNames((0, "OMNI-gx2RSW200-MIB", "gx2Rsw200AnalogTableIndex"))
if mibBuilder.loadTexts: gx2Rsw200AnalogEntry.setStatus('mandatory')
gx2Rsw200DigitalTable = MibTable((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3), )
if mibBuilder.loadTexts: gx2Rsw200DigitalTable.setStatus('mandatory')
gx2Rsw200DigitalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2), ).setIndexNames((0, "OMNI-gx2RSW200-MIB", "gx2Rsw200DigitalTableIndex"))
if mibBuilder.loadTexts: gx2Rsw200DigitalEntry.setStatus('mandatory')
gx2Rsw200StatusTable = MibTable((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4), )
if mibBuilder.loadTexts: gx2Rsw200StatusTable.setStatus('mandatory')
gx2Rsw200StatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3), ).setIndexNames((0, "OMNI-gx2RSW200-MIB", "gx2Rsw200StatusTableIndex"))
if mibBuilder.loadTexts: gx2Rsw200StatusEntry.setStatus('mandatory')
gx2Rsw200FactoryTable = MibTable((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5), )
if mibBuilder.loadTexts: gx2Rsw200FactoryTable.setStatus('mandatory')
gx2Rsw200FactoryEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4), ).setIndexNames((0, "OMNI-gx2RSW200-MIB", "gx2Rsw200FactoryTableIndex"))
if mibBuilder.loadTexts: gx2Rsw200FactoryEntry.setStatus('mandatory')
gx2Rsw200bHoldTimeTable = MibTable((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 6), )
if mibBuilder.loadTexts: gx2Rsw200bHoldTimeTable.setStatus('mandatory')
gx2Rsw200bHoldTimeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 6, 5), ).setIndexNames((0, "OMNI-gx2RSW200-MIB", "rxgx2Rsw200bHoldTimeTableIndex"), (0, "OMNI-gx2RSW200-MIB", "rxgx2Rsw200bHoldTimeSpecIndex"))
if mibBuilder.loadTexts: gx2Rsw200bHoldTimeEntry.setStatus('mandatory')
gx2Rsw200AnalogTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gx2Rsw200AnalogTableIndex.setStatus('mandatory')
rsw200labelModTemp = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelModTemp.setStatus('optional')
rsw200uomModTemp = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200uomModTemp.setStatus('optional')
rsw200majorHighModTemp = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 4), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200majorHighModTemp.setStatus('mandatory')
rsw200majorLowModTemp = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 5), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200majorLowModTemp.setStatus('mandatory')
rsw200minorHighModTemp = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 6), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200minorHighModTemp.setStatus('mandatory')
rsw200minorLowModTemp = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 7), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200minorLowModTemp.setStatus('mandatory')
rsw200currentValueModTemp = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 8), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200currentValueModTemp.setStatus('mandatory')
rsw200stateFlagModTemp = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateFlagModTemp.setStatus('mandatory')
rsw200minValueModTemp = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 10), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200minValueModTemp.setStatus('mandatory')
rsw200maxValueModTemp = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 11), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200maxValueModTemp.setStatus('mandatory')
rsw200alarmStateModTemp = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("noAlarm", 1), ("majorLowAlarm", 2), ("minorLowAlarm", 3), ("minorHighAlarm", 4), ("majorHighAlarm", 5), ("informational", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200alarmStateModTemp.setStatus('mandatory')
rsw200labelFanCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 13), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelFanCurrent.setStatus('optional')
rsw200uomFanCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 14), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200uomFanCurrent.setStatus('optional')
rsw200majorHighFanCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 15), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200majorHighFanCurrent.setStatus('mandatory')
rsw200majorLowFanCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 16), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200majorLowFanCurrent.setStatus('mandatory')
rsw200minorHighFanCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 17), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200minorHighFanCurrent.setStatus('mandatory')
rsw200minorLowFanCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 18), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200minorLowFanCurrent.setStatus('mandatory')
rsw200currentValueFanCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 19), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200currentValueFanCurrent.setStatus('mandatory')
rsw200stateFlagFanCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateFlagFanCurrent.setStatus('mandatory')
rsw200minValueFanCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 21), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200minValueFanCurrent.setStatus('mandatory')
rsw200maxValueFanCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 22), Float()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200maxValueFanCurrent.setStatus('mandatory')
rsw200alarmStateFanCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 2, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("noAlarm", 1), ("majorLowAlarm", 2), ("minorLowAlarm", 3), ("minorHighAlarm", 4), ("majorHighAlarm", 5), ("informational", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200alarmStateFanCurrent.setStatus('mandatory')
gx2Rsw200DigitalTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gx2Rsw200DigitalTableIndex.setStatus('mandatory')
rsw200labelSwitchControl = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelSwitchControl.setStatus('optional')
rsw200enumSwitchControl = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200enumSwitchControl.setStatus('optional')
rsw200valueSwitchControl = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("primary", 1), ("secondary", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsw200valueSwitchControl.setStatus('mandatory')
rsw200stateflagSwitchControl = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateflagSwitchControl.setStatus('mandatory')
rsw200labelRevertMode = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelRevertMode.setStatus('optional')
rsw200enumRevertMode = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200enumRevertMode.setStatus('optional')
rsw200valueRevertMode = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("manual", 1), ("autorevert", 2), ("nonrevert", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsw200valueRevertMode.setStatus('mandatory')
rsw200stateflagRevertMode = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateflagRevertMode.setStatus('mandatory')
rsw200labelRevertTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelRevertTime.setStatus('optional')
rsw200enumRevertTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200enumRevertTime.setStatus('optional')
rsw200valueRevertTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("tensec", 1), ("sixtysec", 2), ("sixhundredsec", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsw200valueRevertTime.setStatus('mandatory')
rsw200stateflagRevertTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateflagRevertTime.setStatus('mandatory')
rsw200labelSwitchState = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 14), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelSwitchState.setStatus('optional')
rsw200enumSwitchState = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 15), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200enumSwitchState.setStatus('optional')
rsw200valueSwitchState = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("primary", 1), ("secondary", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200valueSwitchState.setStatus('mandatory')
rsw200stateflagSwitchState = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateflagSwitchState.setStatus('mandatory')
rsw200labelPriStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 18), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelPriStatus.setStatus('optional')
rsw200enumPriStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 19), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200enumPriStatus.setStatus('optional')
rsw200valuePriStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("short", 1), ("valid", 2), ("invalid", 3), ("open", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200valuePriStatus.setStatus('mandatory')
rsw200stateflagPriStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateflagPriStatus.setStatus('mandatory')
rsw200labelSecStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 22), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelSecStatus.setStatus('optional')
rsw200enumSecStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 23), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200enumSecStatus.setStatus('optional')
rsw200valueSecStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("short", 1), ("valid", 2), ("invalid", 3), ("open", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200valueSecStatus.setStatus('mandatory')
rsw200stateflagSecStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateflagSecStatus.setStatus('mandatory')
rsw200labelFactoryDefault = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 26), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelFactoryDefault.setStatus('optional')
rsw200enumFactoryDefault = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 27), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200enumFactoryDefault.setStatus('optional')
rsw200valueFactoryDefault = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("off", 1), ("on", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsw200valueFactoryDefault.setStatus('mandatory')
rsw200stateflagFactoryDefault = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 3, 2, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateflagFactoryDefault.setStatus('mandatory')
gx2Rsw200StatusTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gx2Rsw200StatusTableIndex.setStatus('mandatory')
rsw200labelBoot = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelBoot.setStatus('optional')
rsw200valueBoot = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("ok", 1), ("undetermined", 2), ("warning", 3), ("minor", 4), ("major", 5), ("critical", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200valueBoot.setStatus('mandatory')
rsw200stateflagBoot = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateflagBoot.setStatus('mandatory')
rsw200labelFlash = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelFlash.setStatus('optional')
rsw200valueFlash = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("ok", 1), ("undetermined", 2), ("warning", 3), ("minor", 4), ("major", 5), ("critical", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200valueFlash.setStatus('mandatory')
rsw200stateflagFlash = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateflagFlash.setStatus('mandatory')
rsw200labelFactoryDataCRC = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelFactoryDataCRC.setStatus('optional')
rsw200valueFactoryDataCRC = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("ok", 1), ("undetermined", 2), ("warning", 3), ("minor", 4), ("major", 5), ("critical", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200valueFactoryDataCRC.setStatus('mandatory')
rsw200stateflagFactoryDataCRC = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateflagFactoryDataCRC.setStatus('mandatory')
rsw200labelAlarmDataCrc = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelAlarmDataCrc.setStatus('optional')
rsw200valueAlarmDataCrc = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("ok", 1), ("undetermined", 2), ("warning", 3), ("minor", 4), ("major", 5), ("critical", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200valueAlarmDataCrc.setStatus('mandatory')
rsw200stateflagAlarmDataCrc = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateflagAlarmDataCrc.setStatus('mandatory')
rsw200labelRFSignalStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 14), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelRFSignalStatus.setStatus('optional')
rsw200valueRFSignalStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("ok", 1), ("undetermined", 2), ("warning", 3), ("minor", 4), ("major", 5), ("critical", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200valueRFSignalStatus.setStatus('mandatory')
rsw200stateflagRFSignalStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateflagRFSignalStatus.setStatus('mandatory')
rsw200labelPriActiveStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 17), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200labelPriActiveStatus.setStatus('optional')
rsw200valuePriActiveStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("ok", 1), ("undetermined", 2), ("warning", 3), ("minor", 4), ("major", 5), ("critical", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200valuePriActiveStatus.setStatus('mandatory')
rsw200stateflagPriActiveStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 4, 3, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("hidden", 1), ("read-only", 2), ("updateable", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200stateflagPriActiveStatus.setStatus('mandatory')
gx2Rsw200FactoryTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gx2Rsw200FactoryTableIndex.setStatus('mandatory')
rsw200bootControlByte = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200bootControlByte.setStatus('mandatory')
rsw200bootStatusByte = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200bootStatusByte.setStatus('mandatory')
rsw200bank1CRC = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200bank1CRC.setStatus('mandatory')
rsw200bank2CRC = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200bank2CRC.setStatus('mandatory')
rsw200prgEEPROMByte = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200prgEEPROMByte.setStatus('mandatory')
rsw200factoryCRC = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200factoryCRC.setStatus('mandatory')
rsw200calculateCRC = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("factory", 1), ("alarm", 2), ("na", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200calculateCRC.setStatus('mandatory')
rsw200hourMeter = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200hourMeter.setStatus('mandatory')
rsw200flashPrgCntA = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200flashPrgCntA.setStatus('mandatory')
rsw200flashPrgCntB = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200flashPrgCntB.setStatus('mandatory')
rsw200flashBankARev = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4, 12), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200flashBankARev.setStatus('mandatory')
rsw200flashBankBRev = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 5, 4, 13), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsw200flashBankBRev.setStatus('mandatory')
rxgx2Rsw200bHoldTimeTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 6, 5, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rxgx2Rsw200bHoldTimeTableIndex.setStatus('mandatory')
rxgx2Rsw200bHoldTimeSpecIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 6, 5, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rxgx2Rsw200bHoldTimeSpecIndex.setStatus('mandatory')
rxgx2Rsw200bHoldTimeData = MibTableColumn((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8, 6, 5, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rxgx2Rsw200bHoldTimeData.setStatus('mandatory')
trapRSW200ConfigChangeInteger = NotificationType((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8) + (0,1)).setObjects(("NLSBBN-TRAPS-MIB", "trapIdentifier"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemModelNumber"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemSerialNum"), ("NLSBBN-TRAPS-MIB", "trapPerceivedSeverity"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemOperState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAlarmStatus"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAdminState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAvailStatus"), ("NLSBBN-TRAPS-MIB", "trapText"), ("NLSBBN-TRAPS-MIB", "trapChangedObjectId"), ("NLSBBN-TRAPS-MIB", "trapChangedValueInteger"), ("NLSBBN-TRAPS-MIB", "trapNETrapLastTrapTimeStamp"))
trapRSW200ConfigChangeDisplayString = NotificationType((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8) + (0,2)).setObjects(("NLSBBN-TRAPS-MIB", "trapIdentifier"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemModelNumber"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemSerialNum"), ("NLSBBN-TRAPS-MIB", "trapPerceivedSeverity"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemOperState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAlarmStatus"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAdminState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAvailStatus"), ("NLSBBN-TRAPS-MIB", "trapText"), ("NLSBBN-TRAPS-MIB", "trapChangedObjectId"), ("NLSBBN-TRAPS-MIB", "trapChangedValueDisplayString"), ("NLSBBN-TRAPS-MIB", "trapNETrapLastTrapTimeStamp"))
trapRSW200RFInputAlarm = NotificationType((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8) + (0,3)).setObjects(("NLSBBN-TRAPS-MIB", "trapIdentifier"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemModelNumber"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemSerialNum"), ("NLSBBN-TRAPS-MIB", "trapPerceivedSeverity"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemOperState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAlarmStatus"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAdminState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAvailStatus"), ("NLSBBN-TRAPS-MIB", "trapText"), ("NLSBBN-TRAPS-MIB", "trapChangedObjectId"), ("NLSBBN-TRAPS-MIB", "trapChangedValueInteger"), ("NLSBBN-TRAPS-MIB", "trapNETrapLastTrapTimeStamp"))
trapRSW200FanCurrentAlarm = NotificationType((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8) + (0,4)).setObjects(("NLSBBN-TRAPS-MIB", "trapIdentifier"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemModelNumber"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemSerialNum"), ("NLSBBN-TRAPS-MIB", "trapPerceivedSeverity"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemOperState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAlarmStatus"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAdminState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAvailStatus"), ("NLSBBN-TRAPS-MIB", "trapText"), ("NLSBBN-TRAPS-MIB", "trapChangedObjectId"), ("NLSBBN-TRAPS-MIB", "trapChangedValueInteger"), ("NLSBBN-TRAPS-MIB", "trapNETrapLastTrapTimeStamp"))
trapRSW200ModuleTempAlarm = NotificationType((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8) + (0,5)).setObjects(("NLSBBN-TRAPS-MIB", "trapIdentifier"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemModelNumber"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemSerialNum"), ("NLSBBN-TRAPS-MIB", "trapPerceivedSeverity"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemOperState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAlarmStatus"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAdminState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAvailStatus"), ("NLSBBN-TRAPS-MIB", "trapText"), ("NLSBBN-TRAPS-MIB", "trapChangedObjectId"), ("NLSBBN-TRAPS-MIB", "trapChangedValueInteger"), ("NLSBBN-TRAPS-MIB", "trapNETrapLastTrapTimeStamp"))
trapRSW200FlashAlarm = NotificationType((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8) + (0,6)).setObjects(("NLSBBN-TRAPS-MIB", "trapIdentifier"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemModelNumber"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemSerialNum"), ("NLSBBN-TRAPS-MIB", "trapPerceivedSeverity"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemOperState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAlarmStatus"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAdminState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAvailStatus"), ("NLSBBN-TRAPS-MIB", "trapText"), ("NLSBBN-TRAPS-MIB", "trapChangedObjectId"), ("NLSBBN-TRAPS-MIB", "trapChangedValueInteger"), ("NLSBBN-TRAPS-MIB", "trapNETrapLastTrapTimeStamp"))
trapRSW200BankBootAlarm = NotificationType((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8) + (0,7)).setObjects(("NLSBBN-TRAPS-MIB", "trapIdentifier"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemModelNumber"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemSerialNum"), ("NLSBBN-TRAPS-MIB", "trapPerceivedSeverity"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemOperState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAlarmStatus"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAdminState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAvailStatus"), ("NLSBBN-TRAPS-MIB", "trapText"), ("NLSBBN-TRAPS-MIB", "trapChangedObjectId"), ("NLSBBN-TRAPS-MIB", "trapChangedValueInteger"), ("NLSBBN-TRAPS-MIB", "trapNETrapLastTrapTimeStamp"))
trapRSW200AlarmDataCRCAlarm = NotificationType((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8) + (0,8)).setObjects(("NLSBBN-TRAPS-MIB", "trapIdentifier"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemModelNumber"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemSerialNum"), ("NLSBBN-TRAPS-MIB", "trapPerceivedSeverity"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemOperState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAlarmStatus"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAdminState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAvailStatus"), ("NLSBBN-TRAPS-MIB", "trapText"), ("NLSBBN-TRAPS-MIB", "trapChangedObjectId"), ("NLSBBN-TRAPS-MIB", "trapChangedValueInteger"), ("NLSBBN-TRAPS-MIB", "trapNETrapLastTrapTimeStamp"))
trapRSW200FactoryDataCRCAlarm = NotificationType((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8) + (0,9)).setObjects(("NLSBBN-TRAPS-MIB", "trapIdentifier"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemModelNumber"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemSerialNum"), ("NLSBBN-TRAPS-MIB", "trapPerceivedSeverity"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemOperState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAlarmStatus"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAdminState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAvailStatus"), ("NLSBBN-TRAPS-MIB", "trapText"), ("NLSBBN-TRAPS-MIB", "trapChangedObjectId"), ("NLSBBN-TRAPS-MIB", "trapChangedValueInteger"), ("NLSBBN-TRAPS-MIB", "trapNETrapLastTrapTimeStamp"))
trapRSW200InputSwitchedAlarm = NotificationType((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8) + (0,10)).setObjects(("NLSBBN-TRAPS-MIB", "trapIdentifier"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemModelNumber"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemSerialNum"), ("NLSBBN-TRAPS-MIB", "trapPerceivedSeverity"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemOperState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAlarmStatus"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAdminState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAvailStatus"), ("NLSBBN-TRAPS-MIB", "trapText"), ("NLSBBN-TRAPS-MIB", "trapChangedObjectId"), ("NLSBBN-TRAPS-MIB", "trapChangedValueInteger"), ("NLSBBN-TRAPS-MIB", "trapNETrapLastTrapTimeStamp"))
trapRSW200ResetFactoryDefaultAlarm = NotificationType((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8) + (0,11)).setObjects(("NLSBBN-TRAPS-MIB", "trapIdentifier"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemModelNumber"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemSerialNum"), ("NLSBBN-TRAPS-MIB", "trapPerceivedSeverity"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemOperState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAlarmStatus"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAdminState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAvailStatus"), ("NLSBBN-TRAPS-MIB", "trapText"), ("NLSBBN-TRAPS-MIB", "trapChangedObjectId"), ("NLSBBN-TRAPS-MIB", "trapChangedValueInteger"), ("NLSBBN-TRAPS-MIB", "trapNETrapLastTrapTimeStamp"))
trapRSW200SecondaryInputActiveAlarm = NotificationType((1, 3, 6, 1, 4, 1, 1166, 6, 1, 2, 8) + (0,12)).setObjects(("NLSBBN-TRAPS-MIB", "trapIdentifier"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemModelNumber"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemSerialNum"), ("NLSBBN-TRAPS-MIB", "trapPerceivedSeverity"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemOperState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAlarmStatus"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAdminState"), ("NLSBBN-TRAPS-MIB", "trapNetworkElemAvailStatus"), ("NLSBBN-TRAPS-MIB", "trapText"), ("NLSBBN-TRAPS-MIB", "trapChangedObjectId"), ("NLSBBN-TRAPS-MIB", "trapChangedValueInteger"), ("NLSBBN-TRAPS-MIB", "trapNETrapLastTrapTimeStamp"))
mibBuilder.exportSymbols("OMNI-gx2RSW200-MIB", gx2Rsw200bHoldTimeTable=gx2Rsw200bHoldTimeTable, rsw200labelFactoryDefault=rsw200labelFactoryDefault, gx2Rsw200AnalogEntry=gx2Rsw200AnalogEntry, rsw200flashPrgCntA=rsw200flashPrgCntA, rsw200stateflagRevertMode=rsw200stateflagRevertMode, rsw200stateflagSwitchState=rsw200stateflagSwitchState, trapRSW200FlashAlarm=trapRSW200FlashAlarm, rsw200labelAlarmDataCrc=rsw200labelAlarmDataCrc, rsw200hourMeter=rsw200hourMeter, rsw200minorHighModTemp=rsw200minorHighModTemp, rsw200labelPriActiveStatus=rsw200labelPriActiveStatus, trapRSW200FactoryDataCRCAlarm=trapRSW200FactoryDataCRCAlarm, rsw200flashBankARev=rsw200flashBankARev, rsw200bank1CRC=rsw200bank1CRC, rsw200alarmStateFanCurrent=rsw200alarmStateFanCurrent, trapRSW200SecondaryInputActiveAlarm=trapRSW200SecondaryInputActiveAlarm, rxgx2Rsw200bHoldTimeTableIndex=rxgx2Rsw200bHoldTimeTableIndex, rsw200enumRevertMode=rsw200enumRevertMode, rsw200alarmStateModTemp=rsw200alarmStateModTemp, rsw200minorLowModTemp=rsw200minorLowModTemp, rsw200stateflagBoot=rsw200stateflagBoot, Float=Float, rxgx2Rsw200bHoldTimeSpecIndex=rxgx2Rsw200bHoldTimeSpecIndex, trapRSW200ConfigChangeDisplayString=trapRSW200ConfigChangeDisplayString, rsw200valueAlarmDataCrc=rsw200valueAlarmDataCrc, rsw200enumSwitchControl=rsw200enumSwitchControl, rsw200maxValueModTemp=rsw200maxValueModTemp, rsw200factoryCRC=rsw200factoryCRC, gx2Rsw200StatusTableIndex=gx2Rsw200StatusTableIndex, rsw200majorLowModTemp=rsw200majorLowModTemp, gx2Rsw200FactoryTableIndex=gx2Rsw200FactoryTableIndex, rsw200valueSwitchControl=rsw200valueSwitchControl, rsw200enumFactoryDefault=rsw200enumFactoryDefault, rsw200flashPrgCntB=rsw200flashPrgCntB, rsw200majorHighModTemp=rsw200majorHighModTemp, trapRSW200ResetFactoryDefaultAlarm=trapRSW200ResetFactoryDefaultAlarm, gx2Rsw200FactoryEntry=gx2Rsw200FactoryEntry, rsw200maxValueFanCurrent=rsw200maxValueFanCurrent, rsw200bootStatusByte=rsw200bootStatusByte, rsw200uomModTemp=rsw200uomModTemp, rsw200labelRevertTime=rsw200labelRevertTime, rsw200enumSwitchState=rsw200enumSwitchState, rsw200valueSwitchState=rsw200valueSwitchState, rsw200labelFlash=rsw200labelFlash, rsw200labelRevertMode=rsw200labelRevertMode, rsw200stateflagFactoryDefault=rsw200stateflagFactoryDefault, rsw200valueRevertTime=rsw200valueRevertTime, rsw200stateflagSwitchControl=rsw200stateflagSwitchControl, rsw200labelModTemp=rsw200labelModTemp, rsw200minorLowFanCurrent=rsw200minorLowFanCurrent, rsw200valueFlash=rsw200valueFlash, gx2Rsw200AnalogTableIndex=gx2Rsw200AnalogTableIndex, gx2Rsw200StatusTable=gx2Rsw200StatusTable, rsw200stateflagSecStatus=rsw200stateflagSecStatus, gx2Rsw200bHoldTimeEntry=gx2Rsw200bHoldTimeEntry, rsw200labelBoot=rsw200labelBoot, gx2Rsw200StatusEntry=gx2Rsw200StatusEntry, gx2Rsw200DigitalTable=gx2Rsw200DigitalTable, gx2Rsw200DigitalTableIndex=gx2Rsw200DigitalTableIndex, rsw200stateflagRevertTime=rsw200stateflagRevertTime, rsw200stateFlagModTemp=rsw200stateFlagModTemp, rsw200bootControlByte=rsw200bootControlByte, rsw200labelSwitchControl=rsw200labelSwitchControl, rsw200flashBankBRev=rsw200flashBankBRev, rsw200calculateCRC=rsw200calculateCRC, trapRSW200ModuleTempAlarm=trapRSW200ModuleTempAlarm, rsw200minorHighFanCurrent=rsw200minorHighFanCurrent, trapRSW200ConfigChangeInteger=trapRSW200ConfigChangeInteger, rsw200majorLowFanCurrent=rsw200majorLowFanCurrent, rsw200stateflagAlarmDataCrc=rsw200stateflagAlarmDataCrc, rsw200labelRFSignalStatus=rsw200labelRFSignalStatus, rsw200labelFactoryDataCRC=rsw200labelFactoryDataCRC, rxgx2Rsw200bHoldTimeData=rxgx2Rsw200bHoldTimeData, rsw200valueRFSignalStatus=rsw200valueRFSignalStatus, rsw200bank2CRC=rsw200bank2CRC, rsw200enumRevertTime=rsw200enumRevertTime, rsw200uomFanCurrent=rsw200uomFanCurrent, rsw200stateflagRFSignalStatus=rsw200stateflagRFSignalStatus, rsw200valueSecStatus=rsw200valueSecStatus, gx2Rsw200DigitalEntry=gx2Rsw200DigitalEntry, rsw200labelSwitchState=rsw200labelSwitchState, rsw200enumSecStatus=rsw200enumSecStatus, gx2Rsw200AnalogTable=gx2Rsw200AnalogTable, rsw200valueFactoryDefault=rsw200valueFactoryDefault, rsw200valueBoot=rsw200valueBoot, rsw200valueRevertMode=rsw200valueRevertMode, rsw200currentValueFanCurrent=rsw200currentValueFanCurrent, rsw200majorHighFanCurrent=rsw200majorHighFanCurrent, rsw200labelSecStatus=rsw200labelSecStatus, rsw200currentValueModTemp=rsw200currentValueModTemp, rsw200labelPriStatus=rsw200labelPriStatus, rsw200minValueModTemp=rsw200minValueModTemp, trapRSW200AlarmDataCRCAlarm=trapRSW200AlarmDataCRCAlarm, rsw200stateflagFlash=rsw200stateflagFlash, trapRSW200InputSwitchedAlarm=trapRSW200InputSwitchedAlarm, rsw200minValueFanCurrent=rsw200minValueFanCurrent, trapRSW200BankBootAlarm=trapRSW200BankBootAlarm, rsw200enumPriStatus=rsw200enumPriStatus, rsw200valueFactoryDataCRC=rsw200valueFactoryDataCRC, gx2Rsw200FactoryTable=gx2Rsw200FactoryTable, rsw200stateflagFactoryDataCRC=rsw200stateflagFactoryDataCRC, rsw200prgEEPROMByte=rsw200prgEEPROMByte, rsw200labelFanCurrent=rsw200labelFanCurrent, rsw200valuePriStatus=rsw200valuePriStatus, rsw200stateflagPriStatus=rsw200stateflagPriStatus, rsw200valuePriActiveStatus=rsw200valuePriActiveStatus, rsw200stateflagPriActiveStatus=rsw200stateflagPriActiveStatus, rsw200stateFlagFanCurrent=rsw200stateFlagFanCurrent, gx2Rsw200Descriptor=gx2Rsw200Descriptor, trapRSW200FanCurrentAlarm=trapRSW200FanCurrentAlarm, trapRSW200RFInputAlarm=trapRSW200RFInputAlarm)
| [
"[email protected]"
]
| |
8a6e89aefd0aab863690b3f259ff2bcd29002021 | 9b54e3d58447e917a238b85891020c392c4ac601 | /acmicpc/14470/14470.py | 67e4bb02871ebb7925dcbc02a4a5f31a47ccf24e | [
"MIT"
]
| permissive | love-adela/algorithm-ps | ea0ebcd641a4c309348b389b8618daa83973f4b2 | c92d105d8ad344def001160367115ecf99d81c0d | refs/heads/master | 2023-05-11T03:37:11.750692 | 2023-04-30T17:31:30 | 2023-04-30T17:31:30 | 174,651,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 162 | py | a = int(input())
b = int(input())
c = int(input())
d = int(input())
e = int(input())
if a < 0:
time = -a* c + d +b *e
else:
time = (b-a) * e
print(time)
| [
"[email protected]"
]
| |
95d4af7b8cfb565d8293b6691bd035ddde66ad43 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /Z8REdTE5P57f4q7dK_19.py | 9bdc112052430aebd5d4e5509f2a4327f341d3b4 | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,271 | py | """
A Collatz sequence is generated like this. Start with a positive number. If
it's even, halve it. If it's odd, multiply it by three and add one. Repeat the
process with the resulting number. The Collatz Conjecture is that every
sequence eventually reaches 1 (continuing past 1 just results in an endless
repeat of the sequence `4, 2, 1`).
The length of the sequence from starting number to 1 varies widely.
Create a function that takes a number as an argument and returns a tuple of
two elements — the number of steps in the Collatz sequence of the number, and
the highest number reached.
### Examples
collatz(2) ➞ (2, 2)
# seq = [2, 1]
collatz(3) ➞ (8, 16)
# seq = [3, 10, 5, 16, 8, 4, 2, 1]
collatz(7) ➞ (17, 52)
# seq = [7, 22, 11, 34, 17, 52, 26, 13, 40, 20, 10, 5, 16, 8, 4, 2, 1]
collatz(8) ➞ (4, 8)
# seq = [8, 4, 2, 1]
### Notes
(Improbable) Bonus: Find a positive starting number that doesn't reach 1, and
score a place in Math history plus a cash prize.
"""
def collatz(n):
seq = [n]
k = n
while True:
if k % 2 == 0:
k /= 2
else:
k = k*3+1
seq.append(k)
if k == 1:
break
return len(seq), round(max(seq))
| [
"[email protected]"
]
| |
2c7378d5804ce5a23049db468925321034b3b0f8 | c23b4c6253ca5a0d42822dd0d28ffa752c11ebf5 | /exercises/55ad104b-7e6b-4f79-85f7-99ab6d43946e/skeletons/6376fb9a-a7ba-4f6b-b910-33ae0f34729e/skeleton.py3 | 1dc8678357cebc450bba6ab2504ef1e7caf277c6 | []
| no_license | josepaiva94/e57d8867-6234-41a6-b239-2cd978ad1e70 | 803e2eb1e2db23c64409bc72ff00c4463875a82f | aa270941dd8cf7b2e1ec8ac89445b1ab3a47f89d | refs/heads/master | 2023-01-07T10:49:56.871378 | 2020-11-16T11:28:14 | 2020-11-16T11:28:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py3 | mac = input()
def checkMAC(mac):
mac_split = {{gap_1}}
hex_arr = "ABCDEF0123456789"
if {{gap_2}} is 6:
for x in {{gap_2}}:
if not({{gap_3}}) or not({{gap_4}}:
return False
return True
else:
return False
print(checkMAC(mac)) | [
"[email protected]"
]
| |
cb9a9d19afb8978fb0f05ed8e8cc82e86052e9ce | 469325b1fd3a6b88710bbbb4f5baa0a26404b37a | /Proj/.history/trafficapp/aicv/imgshandle_20201030055426.py | 31fb8f916d0bfda891c0927036e367ce80096960 | []
| no_license | axiat/IRCRA | 7f0f8ef1a41b8b58d6bc836f960b9d5a29dcec0f | b55bfdd794bd8200fd6f74f57016effebdd9d3e6 | refs/heads/master | 2023-03-13T03:03:33.385625 | 2021-02-21T17:27:52 | 2021-02-21T17:27:52 | 305,881,747 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,852 | py | import cv2
import numpy as np
import time
from PyQt5.QtGui import QImage
# def QImageToCvMat(self,incomingImage):
# ''' Converts a QImage into an opencv MAT format '''
# incomingImage = incomingImage.convertToFormat(QtGui.QImage.Format_RGB888)
# width = incomingImage.width()
# height = incomingImage.height()
# ptr = incomingImage.constBits()
# arr = np.array(ptr).reshape(height, width, 4) # Copies the data
# return arr
def QImageToCvMat(incomingImage):
''' Converts a QImage into an opencv MAT format '''
incomingImage = incomingImage.convertToFormat(QImage.Format_RGB888)
width = incomingImage.width()
height = incomingImage.height()
ptr = incomingImage.bits()
ptr.setsize(height * width * 4)
arr = np.frombuffer(ptr, np.uint8).reshape((height, width, 4))
return arr
def getTime():
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
def timeImg(img):
# img1=np.asarray(img)
# img1 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# cv2.putText(img, getTime(), (5, 30),
# cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 0, 255), 2)
# img = np.expand_dims(img1, axis=2).repeat(3, axis=2)
return img1
def toGray(img):
# 灰度处理
img1 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# 把灰度的2为图像转换为3为灰度图像(彩色)
# img = img1[:, :, np.newaxis].repeat(dim=2)
# cv2.putText(img1, getTime(), (5, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 0, 255), 2)
img = np.expand_dims(img1, axis=2).repeat(3, axis=2)
# print("图像的维度", img.shape)
return img
def toClear(img):
return img
def toBright(img, brightness=127):
return img
def toLine(img):
return img
| [
"[email protected]"
]
| |
ae089e7b09c6e18966e93cccf8067efcbfd52338 | 017fc2259e09ec8760bc473d436ad45e464044f4 | /pennylane/templates/subroutines/grover.py | 46f2d83ab8c0c452e0b5ed4121e61a5b35107d51 | [
"Apache-2.0"
]
| permissive | thomascherickal/pennylane | f1e8ef5b8ffa0c00601da30c39ff89318791e08f | 20dcbbc2f86c3fbd3f6fe2a416300e2d2fc172d0 | refs/heads/master | 2021-10-09T16:15:44.500819 | 2021-09-28T19:18:23 | 2021-09-28T19:18:23 | 199,557,166 | 2 | 1 | Apache-2.0 | 2021-09-28T19:18:23 | 2019-07-30T02:11:54 | Python | UTF-8 | Python | false | false | 5,426 | py | # Copyright 2018-2021 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Contains the Grover Operation template.
"""
import itertools
import functools
import numpy as np
import pennylane as qml
from pennylane.operation import AnyWires, Operation
from pennylane.ops import Hadamard, PauliZ, MultiControlledX
class GroverOperator(Operation):
r"""Performs the Grover Diffusion Operator.
.. math::
G = 2 |s \rangle \langle s | - I
= H^{\bigotimes n} \left( 2 |0\rangle \langle 0| - I \right) H^{\bigotimes n}
where :math:`n` is the number of wires, and :math:`|s\rangle` is the uniform superposition:
.. math::
|s\rangle = H^{\bigotimes n} |0\rangle = \frac{1}{\sqrt{2^n}} \sum_{i=0}^{2^n-1} | i \rangle.
For this template, the operator is implemented with a layer of Hadamards, a layer of :math:`X`,
followed by a multi-controlled :math:`Z` gate, then another layer of :math:`X` and Hadamards.
This is expressed in a compact form by the circuit below:
.. figure:: ../../_static/templates/subroutines/grover.svg
:align: center
:width: 60%
:target: javascript:void(0);
The open circles on the controlled gate indicate control on 0 instead of 1.
The ``Z`` gates on the last wire result from leveraging the circuit identity :math:`HXH = Z`,
where the last ``H`` gate converts the multi-controlled :math:`Z` gate into a
multi-controlled :math:`X` gate.
Args:
wires (Union[Wires, Sequence[int], or int]): the wires to apply to
work_wires (Union[Wires, Sequence[int], or int]): optional auxiliary wires to assist
in the decomposition of :class:`~.MultiControlledX`.
**Example**
The Grover Diffusion Operator amplifies the magnitude of the basis state with
a negative phase. For example, if the solution to the search problem is the :math:`|111\rangle`
state, we require an oracle that flips its phase; this could be implemented using a `CCZ` gate:
.. code-block:: python
n_wires = 3
wires = list(range(n_wires))
def oracle():
qml.Hadamard(wires[-1])
qml.Toffoli(wires=wires)
qml.Hadamard(wires[-1])
We can then implement the entire Grover Search Algorithm for ``num_iterations`` iterations by alternating calls to the oracle and the diffusion operator:
.. code-block:: python
dev = qml.device('default.qubit', wires=wires)
@qml.qnode(dev)
def GroverSearch(num_iterations=1):
for wire in wires:
qml.Hadamard(wire)
for _ in range(num_iterations):
oracle()
qml.templates.GroverOperator(wires=wires)
return qml.probs(wires)
>>> GroverSearch(num_iterations=1)
tensor([0.03125, 0.03125, 0.03125, 0.03125, 0.03125, 0.03125, 0.03125,
0.78125], requires_grad=True)
>>> GroverSearch(num_iterations=2)
tensor([0.0078125, 0.0078125, 0.0078125, 0.0078125, 0.0078125, 0.0078125,
0.0078125, 0.9453125], requires_grad=True)
We can see that the marked :math:`|111\rangle` state has the greatest probability amplitude.
Optimally, the oracle-operator pairing should be repeated :math:`\lceil \frac{\pi}{4}\sqrt{2^{n}} \rceil` times.
"""
num_params = 0
num_wires = AnyWires
par_domain = None
grad_method = None
def __init__(self, wires=None, work_wires=None, do_queue=True, id=None):
if (not hasattr(wires, "__len__")) or (len(wires) < 2):
raise ValueError("GroverOperator must have at least two wires provided.")
self.work_wires = work_wires
super().__init__(wires=wires, do_queue=do_queue, id=id)
def expand(self):
ctrl_str = "0" * (len(self.wires) - 1)
with qml.tape.QuantumTape() as tape:
for wire in self.wires[:-1]:
Hadamard(wire)
PauliZ(self.wires[-1])
MultiControlledX(
control_values=ctrl_str,
control_wires=self.wires[:-1],
wires=self.wires[-1],
work_wires=self.work_wires,
)
PauliZ(self.wires[-1])
for wire in self.wires[:-1]:
Hadamard(wire)
return tape
@property
def matrix(self):
# Redefine the property here to allow for a custom _matrix signature
mat = self._matrix(len(self.wires))
return mat
@classmethod
def _matrix(cls, *params):
num_wires = params[0]
# s1 = H|0>, Hadamard on a single qubit in the ground state
s1 = np.array([1, 1]) / np.sqrt(2)
# uniform superposition state |s>
s = functools.reduce(np.kron, list(itertools.repeat(s1, num_wires)))
# Grover diffusion operator
G = 2 * np.outer(s, s) - np.identity(2 ** num_wires)
return G
| [
"[email protected]"
]
| |
c783408703830e9ce5af701f85c93ff834e2edcd | 325fde42058b2b82f8a4020048ff910cfdf737d7 | /src/automation/azext_automation/vendored_sdks/automation/aio/operations/_key_operations.py | 681d8e496bb3177248fc58d79101828166f94822 | [
"LicenseRef-scancode-generic-cla",
"MIT"
]
| permissive | ebencarek/azure-cli-extensions | 46b0d18fe536fe5884b00d7ffa30f54c7d6887d1 | 42491b284e38f8853712a5af01836f83b04a1aa8 | refs/heads/master | 2023-04-12T00:28:44.828652 | 2021-03-30T22:34:13 | 2021-03-30T22:34:13 | 261,621,934 | 2 | 5 | MIT | 2020-10-09T18:21:52 | 2020-05-06T01:25:58 | Python | UTF-8 | Python | false | false | 4,781 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class KeyOperations:
"""KeyOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~automation_client.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def list_by_automation_account(
self,
resource_group_name: str,
automation_account_name: str,
**kwargs
) -> "models.KeyListResult":
"""Retrieve the automation keys for an account.
:param resource_group_name: Name of an Azure Resource group.
:type resource_group_name: str
:param automation_account_name: The name of the automation account.
:type automation_account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyListResult, or the result of cls(response)
:rtype: ~automation_client.models.KeyListResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.KeyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2015-10-31"
accept = "application/json"
# Construct URL
url = self.list_by_automation_account.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._]+$'),
'automationAccountName': self._serialize.url("automation_account_name", automation_account_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('KeyListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_by_automation_account.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/listKeys'} # type: ignore
| [
"[email protected]"
]
| |
3a100943bf425183c885363493f4a98d0dcd5d1f | 8890925319a25dc3df29f53d0d8125d347680f68 | /looker_client_31/looker_sdk/dialect_info.py | b4b7fbe63b0ad9721c9bda7186538f7328c6c3f0 | [
"MIT"
]
| permissive | ContrastingSounds/looker_sdk_31 | f5d300ae54aee1cc5a2621b36b49541db24ed248 | f973434049fff1b605b10086ab8b84f2f62e3489 | refs/heads/master | 2020-03-19T20:31:24.785373 | 2018-06-11T09:41:36 | 2018-06-11T09:41:36 | 136,802,021 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,857 | py | # coding: utf-8
"""
Experimental Looker API 3.1 Preview
This API 3.1 is in active development. Breaking changes are likely to occur to some API functions in future Looker releases until API 3.1 is officially launched and upgraded to beta status. If you have time and interest to experiment with new or modified services exposed in this embryonic API 3.1, we welcome your participation and feedback! For large development efforts or critical line-of-business projects, we strongly recommend you stick with the API 3.0 while API 3.1 is under construction. # noqa: E501
OpenAPI spec version: 3.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from looker_client_31.looker_sdk.dialect_info_options import DialectInfoOptions # noqa: F401,E501
class DialectInfo(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'label': 'str',
'label_for_database_equivalent': 'str',
'default_port': 'str',
'default_max_connections': 'str',
'supported_options': 'DialectInfoOptions',
'installed': 'bool',
'can': 'dict(str, bool)'
}
attribute_map = {
'name': 'name',
'label': 'label',
'label_for_database_equivalent': 'label_for_database_equivalent',
'default_port': 'default_port',
'default_max_connections': 'default_max_connections',
'supported_options': 'supported_options',
'installed': 'installed',
'can': 'can'
}
def __init__(self, name=None, label=None, label_for_database_equivalent=None, default_port=None, default_max_connections=None, supported_options=None, installed=None, can=None): # noqa: E501
"""DialectInfo - a model defined in Swagger""" # noqa: E501
self._name = None
self._label = None
self._label_for_database_equivalent = None
self._default_port = None
self._default_max_connections = None
self._supported_options = None
self._installed = None
self._can = None
self.discriminator = None
if name is not None:
self.name = name
if label is not None:
self.label = label
if label_for_database_equivalent is not None:
self.label_for_database_equivalent = label_for_database_equivalent
if default_port is not None:
self.default_port = default_port
if default_max_connections is not None:
self.default_max_connections = default_max_connections
if supported_options is not None:
self.supported_options = supported_options
if installed is not None:
self.installed = installed
if can is not None:
self.can = can
@property
def name(self):
"""Gets the name of this DialectInfo. # noqa: E501
The name of the dialect # noqa: E501
:return: The name of this DialectInfo. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this DialectInfo.
The name of the dialect # noqa: E501
:param name: The name of this DialectInfo. # noqa: E501
:type: str
"""
self._name = name
@property
def label(self):
"""Gets the label of this DialectInfo. # noqa: E501
The human-readable label of the connection # noqa: E501
:return: The label of this DialectInfo. # noqa: E501
:rtype: str
"""
return self._label
@label.setter
def label(self, label):
"""Sets the label of this DialectInfo.
The human-readable label of the connection # noqa: E501
:param label: The label of this DialectInfo. # noqa: E501
:type: str
"""
self._label = label
@property
def label_for_database_equivalent(self):
"""Gets the label_for_database_equivalent of this DialectInfo. # noqa: E501
What the dialect calls the equivalent of a normal SQL table # noqa: E501
:return: The label_for_database_equivalent of this DialectInfo. # noqa: E501
:rtype: str
"""
return self._label_for_database_equivalent
@label_for_database_equivalent.setter
def label_for_database_equivalent(self, label_for_database_equivalent):
"""Sets the label_for_database_equivalent of this DialectInfo.
What the dialect calls the equivalent of a normal SQL table # noqa: E501
:param label_for_database_equivalent: The label_for_database_equivalent of this DialectInfo. # noqa: E501
:type: str
"""
self._label_for_database_equivalent = label_for_database_equivalent
@property
def default_port(self):
"""Gets the default_port of this DialectInfo. # noqa: E501
Default port number # noqa: E501
:return: The default_port of this DialectInfo. # noqa: E501
:rtype: str
"""
return self._default_port
@default_port.setter
def default_port(self, default_port):
"""Sets the default_port of this DialectInfo.
Default port number # noqa: E501
:param default_port: The default_port of this DialectInfo. # noqa: E501
:type: str
"""
self._default_port = default_port
@property
def default_max_connections(self):
"""Gets the default_max_connections of this DialectInfo. # noqa: E501
Default number max connections # noqa: E501
:return: The default_max_connections of this DialectInfo. # noqa: E501
:rtype: str
"""
return self._default_max_connections
@default_max_connections.setter
def default_max_connections(self, default_max_connections):
"""Sets the default_max_connections of this DialectInfo.
Default number max connections # noqa: E501
:param default_max_connections: The default_max_connections of this DialectInfo. # noqa: E501
:type: str
"""
self._default_max_connections = default_max_connections
@property
def supported_options(self):
"""Gets the supported_options of this DialectInfo. # noqa: E501
Option support details # noqa: E501
:return: The supported_options of this DialectInfo. # noqa: E501
:rtype: DialectInfoOptions
"""
return self._supported_options
@supported_options.setter
def supported_options(self, supported_options):
"""Sets the supported_options of this DialectInfo.
Option support details # noqa: E501
:param supported_options: The supported_options of this DialectInfo. # noqa: E501
:type: DialectInfoOptions
"""
self._supported_options = supported_options
@property
def installed(self):
"""Gets the installed of this DialectInfo. # noqa: E501
Is the supporting driver installed # noqa: E501
:return: The installed of this DialectInfo. # noqa: E501
:rtype: bool
"""
return self._installed
@installed.setter
def installed(self, installed):
"""Sets the installed of this DialectInfo.
Is the supporting driver installed # noqa: E501
:param installed: The installed of this DialectInfo. # noqa: E501
:type: bool
"""
self._installed = installed
@property
def can(self):
"""Gets the can of this DialectInfo. # noqa: E501
Operations the current user is able to perform on this object # noqa: E501
:return: The can of this DialectInfo. # noqa: E501
:rtype: dict(str, bool)
"""
return self._can
@can.setter
def can(self, can):
"""Sets the can of this DialectInfo.
Operations the current user is able to perform on this object # noqa: E501
:param can: The can of this DialectInfo. # noqa: E501
:type: dict(str, bool)
"""
self._can = can
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DialectInfo):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
7fb5057297d318075054c980e2bc6573327fbf36 | 4f79193bea8bd011f01966535eac14700d652783 | /wejapa/bin/django-admin.py | 600b6ab29608a37b3ae556fb1f93a0a66b4b3179 | []
| no_license | Janice-M/puppy-store | 8aa92ee687f87f96c485e5a19b66e943e0733401 | 4089346cbed5aafcee3e615fa7da33418f66c03c | refs/heads/main | 2023-01-24T06:41:33.355277 | 2020-11-30T20:50:13 | 2020-11-30T20:50:13 | 308,664,959 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 692 | py | #!/home/moringa/Documents/Django/pup/wejapa/bin/python3.6
# When the django-admin.py deprecation ends, remove this script.
import warnings
from django.core import management
try:
from django.utils.deprecation import RemovedInDjango40Warning
except ImportError:
raise ImportError(
'django-admin.py was deprecated in Django 3.1 and removed in Django '
'4.0. Please manually remove this script from your virtual environment '
'and use django-admin instead.'
)
if __name__ == "__main__":
warnings.warn(
'django-admin.py is deprecated in favor of django-admin.',
RemovedInDjango40Warning,
)
management.execute_from_command_line()
| [
"[email protected]"
]
| |
9e9f504f0fb585777d13b480599377adfccab439 | 4a28e3e3afb28c0455ea21cfb983c3a8284dc5dd | /I E4.py | ee1a12a4cf658e738806f55aa474029ddec31781 | []
| no_license | omdeshmukh20/Python-3-Programming | 60f6bc4e627de9d643a429e64878a636f3875cae | 9fb4c7fa54bc26d18b69141493c7a72e0f68f7d0 | refs/heads/main | 2023-08-28T04:37:27.001888 | 2021-10-29T17:03:34 | 2021-10-29T17:03:34 | 370,008,995 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | #Discription: List of pets using if else
#Date: 06/09/21
#Author : Om Deshmukh
myPets = ['Zophie', 'Pooka', 'Fat-tail']
print('Enter a pet name:')
name = input()
if name not in myPets:
print('I do not have a pet named ' + name)
else:
print(name + ' is my pet.')
| [
"[email protected]"
]
| |
60ad984973c35369bf1cb81993e908362db04f80 | a31c54cb9b27e315567ed865e07cb720fc1e5c8e | /revenge/engines/frida/memory/__init__.py | 822cf4949f39526faed7b7372b6be6f31b0ee14f | []
| no_license | bannsec/revenge | 212bc15e09f7d864c837a1829b3dc96410e369d3 | 2073b8fad76ff2ba21a5114be54e959297aa0cf9 | refs/heads/master | 2021-06-25T12:26:02.609076 | 2020-05-29T15:46:45 | 2020-05-29T15:46:45 | 188,461,358 | 51 | 6 | null | null | null | null | UTF-8 | Python | false | false | 305 | py |
import logging
logger = logging.getLogger(__name__)
from .memory_range import FridaMemoryRange as MemoryRange
from .map import FridaMemoryMap as MemoryMap
from .find import FridaMemoryFind as MemoryFind
from .memory_bytes import FridaMemoryBytes as MemoryBytes
from .memory import FridaMemory as Memory
| [
"[email protected]"
]
| |
89ae8ec0dd62583e9077cefef17f34b296f50654 | 1061216c2c33c1ed4ffb33e6211565575957e48f | /python/openapi_client/model/invalid_message_error.py | 222cb173171af386100ac7a72458aaa39665c43e | []
| no_license | MSurfer20/test2 | be9532f54839e8f58b60a8e4587348c2810ecdb9 | 13b35d72f33302fa532aea189e8f532272f1f799 | refs/heads/main | 2023-07-03T04:19:57.548080 | 2021-08-11T19:16:42 | 2021-08-11T19:16:42 | 393,920,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,092 | py | """
Zulip REST API
Powerful open source group chat # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from openapi_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from openapi_client.exceptions import ApiAttributeError
def lazy_import():
from openapi_client.model.invalid_message_error_all_of import InvalidMessageErrorAllOf
from openapi_client.model.json_success_base import JsonSuccessBase
globals()['InvalidMessageErrorAllOf'] = InvalidMessageErrorAllOf
globals()['JsonSuccessBase'] = JsonSuccessBase
class InvalidMessageError(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'result': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501
'msg': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501
'raw_content': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'result': 'result', # noqa: E501
'msg': 'msg', # noqa: E501
'raw_content': 'raw_content', # noqa: E501
}
read_only_vars = {
}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""InvalidMessageError - a model defined in OpenAPI
Keyword Args:
result (bool, date, datetime, dict, float, int, list, str, none_type):
msg (bool, date, datetime, dict, float, int, list, str, none_type):
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
raw_content (str): The raw content of the message. . [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
composed_info = validate_get_composed_info(
constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for var_name, var_value in kwargs.items():
if var_name in discarded_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""InvalidMessageError - a model defined in OpenAPI
Keyword Args:
result (bool, date, datetime, dict, float, int, list, str, none_type):
msg (bool, date, datetime, dict, float, int, list, str, none_type):
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
raw_content (str): The raw content of the message. . [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
composed_info = validate_get_composed_info(
constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for var_name, var_value in kwargs.items():
if var_name in discarded_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
@cached_property
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
InvalidMessageErrorAllOf,
JsonSuccessBase,
],
'oneOf': [
],
}
| [
"[email protected]"
]
| |
d6ad8fc4e2dc393c5f84371c25df764c88bad91e | 4057e59cda5a7b680f44a823163c2e59df550139 | /aliyun-python-sdk-green/aliyunsdkgreen/request/v20180509/PostAsyncScanRequest.py | 8940bef200f5543e8282dcc33fb035d3a8971f5f | [
"Apache-2.0"
]
| permissive | Ma233/aliyun-openapi-python-sdk | 01af5ff520a99bddef6a146b4ab17a666c2cf32a | 7013a2673d20ac32355d0a31cd65a4c8fa36576b | refs/heads/master | 2023-04-04T13:07:45.188734 | 2021-04-12T01:57:58 | 2021-04-12T01:57:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,491 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RoaRequest
from aliyunsdkgreen.endpoint import endpoint_data
class PostAsyncScanRequest(RoaRequest):
def __init__(self):
RoaRequest.__init__(self, 'Green', '2018-05-09', 'PostAsyncScan','green')
self.set_uri_pattern('/green/post/asyncscan')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ClientInfo(self):
return self.get_query_params().get('ClientInfo')
def set_ClientInfo(self,ClientInfo):
self.add_query_param('ClientInfo',ClientInfo) | [
"[email protected]"
]
| |
822d582d60b187270c671e09858a3f9f60a052bf | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/contrib/cv/detection/FCOS/configs/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py | 459af318e785d119b5afef5f25a3095c1cd4e665 | [
"Apache-2.0",
"GPL-1.0-or-later",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 122 | py | _base_ = '../htc/htc_x101_64x4d_fpn_16x1_20e_coco.py'
# learning policy
lr_config = dict(step=[24, 27])
total_epochs = 28
| [
"[email protected]"
]
| |
0ff9d5df07821b0ccf07676e580cb14b38ecf5a9 | bb33e6be8316f35decbb2b81badf2b6dcf7df515 | /source/res/scripts/client/gui/scaleform/daapi/view/meta/browsermeta.py | 03baa92387d38a7e9c7d66f787f593bbf42db740 | []
| no_license | StranikS-Scan/WorldOfTanks-Decompiled | 999c9567de38c32c760ab72c21c00ea7bc20990c | d2fe9c195825ececc728e87a02983908b7ea9199 | refs/heads/1.18 | 2023-08-25T17:39:27.718097 | 2022-09-22T06:49:44 | 2022-09-22T06:49:44 | 148,696,315 | 103 | 39 | null | 2022-09-14T17:50:03 | 2018-09-13T20:49:11 | Python | UTF-8 | Python | false | false | 1,992 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/meta/BrowserMeta.py
from gui.Scaleform.framework.entities.BaseDAAPIComponent import BaseDAAPIComponent
class BrowserMeta(BaseDAAPIComponent):
def browserAction(self, action):
self._printOverrideError('browserAction')
def browserMove(self, x, y, z):
self._printOverrideError('browserMove')
def browserDown(self, x, y, z):
self._printOverrideError('browserDown')
def browserUp(self, x, y, z):
self._printOverrideError('browserUp')
def browserFocusOut(self):
self._printOverrideError('browserFocusOut')
def onBrowserShow(self, needRefresh):
self._printOverrideError('onBrowserShow')
def onBrowserHide(self):
self._printOverrideError('onBrowserHide')
def invalidateView(self):
self._printOverrideError('invalidateView')
def setBrowserSize(self, width, height, scale):
self._printOverrideError('setBrowserSize')
def as_loadBitmapS(self, url):
return self.flashObject.as_loadBitmap(url) if self._isDAAPIInited() else None
def as_resizeS(self, width, height):
return self.flashObject.as_resize(width, height) if self._isDAAPIInited() else None
def as_loadingStartS(self, showContentUnderWaiting):
return self.flashObject.as_loadingStart(showContentUnderWaiting) if self._isDAAPIInited() else None
def as_loadingStopS(self):
return self.flashObject.as_loadingStop() if self._isDAAPIInited() else None
def as_showServiceViewS(self, header, description):
return self.flashObject.as_showServiceView(header, description) if self._isDAAPIInited() else None
def as_hideServiceViewS(self):
return self.flashObject.as_hideServiceView() if self._isDAAPIInited() else None
def as_changeTitleS(self, title):
return self.flashObject.as_changeTitle(title) if self._isDAAPIInited() else None
| [
"[email protected]"
]
| |
aa3bf61bfc07d94f00d6817e70534ec9f50a0ba4 | 0eaf0d3f0e96a839f2ef37b92d4db5eddf4b5e02 | /past12/k.py | a9705a280a1da017a04972e470fa5eb408fb0a6b | []
| no_license | silphire/atcoder | b7b02798a87048757745d99e8564397d1ca20169 | f214ef92f13bc5d6b290746d5a94e2faad20d8b0 | refs/heads/master | 2023-09-03T17:56:30.885166 | 2023-09-02T14:16:24 | 2023-09-02T14:16:24 | 245,110,029 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,904 | py | from collections import defaultdict
class UnionFind(object):
"""
Union-Find (Disjoint Set Union)
"""
def __init__(self, n: int):
assert n > 0
self.parent = [x for x in range(n)]
self.size = [1] * n
def root(self, x: int) -> int:
if self.parent[x] != x:
self.parent[x] = self.root(self.parent[x])
return self.parent[x]
def is_same(self, x: int, y: int) -> bool:
return self.root(x) == self.root(y)
def unite(self, x:int, y: int) -> None:
rx = self.root(x)
ry = self.root(y)
if rx == ry:
return
if self.size[rx] < self.size[ry]:
rx, ry = ry, rx
self.parent[ry] = rx
self.size[rx] += self.size[ry]
def get_size(self, x: int) -> int:
return self.size[self.root(x)]
g = defaultdict(set)
n, m = map(int, input().split())
aa = [0] * m
bb = [0] * m
for i in range(m):
aa[i], bb[i] = map(int, input().split())
g[aa[i]].add(bb[i])
g[bb[i]].add(aa[i])
q = int(input())
tt = [0] * q
xx = [0] * q
yy = [0] * q
for i in range(q):
tt[i], xx[i], yy[i] = map(int, input().split())
if tt[i] == 1:
g[xx[i]].add(yy[i])
g[yy[i]].add(xx[i])
elif tt[i] == 2:
g[xx[i]].remove(yy[i])
g[yy[i]].remove(xx[i])
uf = UnionFind(n + 1)
for k, v in g.items():
for x in v:
uf.unite(k, x)
ans = []
for i in range(q - 1, -1, -1):
if tt[i] == 1:
g[xx[i]].remove(yy[i])
g[yy[i]].remove(xx[i])
uf = UnionFind(n + 1)
for k, v in g.items():
for x in v:
uf.unite(k, x)
elif tt[i] == 2:
g[xx[i]].add(yy[i])
g[yy[i]].add(xx[i])
uf.unite(xx[i], yy[i])
else:
ans.append(uf.is_same(xx[i], yy[i]))
for a in reversed(ans):
if a:
print('Yes')
else:
print('No') | [
"[email protected]"
]
| |
21cc95eced5ffb9542d47d29cc4edd83e4b704eb | 3339d4a79ff8670b5cf81dcbfd9ff4d477adad46 | /setup.py | 6872dd60dbbc2cb1391205b9f9bccfda4a3b5955 | [
"MIT"
]
| permissive | djm/elasticsearch-django | a83314854a2c0b7e1dfcecf9190734e47eb5b752 | 2e607846ed4cc716aff9ac2b65182f45c21b6fd8 | refs/heads/master | 2020-05-23T10:19:43.984729 | 2017-01-13T16:23:59 | 2017-01-13T17:24:49 | 80,419,713 | 0 | 0 | null | 2017-01-30T12:31:47 | 2017-01-30T12:31:46 | null | UTF-8 | Python | false | false | 1,452 | py | # -*- coding: utf-8 -*-
from os import path, chdir, pardir
from setuptools import setup, find_packages
README = open(path.join(path.dirname(__file__), 'README.rst')).read()
# requirements.txt must be included in MANIFEST.in and include_package_data must be True
# in order for this to work; ensures that tox can use the setup to enforce requirements
REQUIREMENTS = '\n'.join(open(path.join(path.dirname(__file__), 'requirements.txt')).readlines()) # noqa
# allow setup.py to be run from any path
chdir(path.normpath(path.join(path.abspath(__file__), pardir)))
setup(
name="elasticsearch-django",
version="0.5.2",
packages=find_packages(),
install_requires=REQUIREMENTS,
include_package_data=True,
license='MIT',
description='Elasticsearch Django app',
long_description=README,
url='https://github.com/yunojuno/elasticsearch-django',
author='Hugo Rodger-Brown',
author_email='[email protected]',
maintainer='Hugo Rodger-Brown',
maintainer_email='[email protected]',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| [
"[email protected]"
]
| |
986af905818ae3d5de425d83b122f62b130e2ab9 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2205/60696/263490.py | 8c63e98e28ee66ededfef13e5763a6545d7bb62a | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 386 | py | class Solution:
def numberOfWays(self, n: int) -> int:
res = 1
for i in range(1, n // 2 + 1):
res *= n - i + 1
res //= i
return res // (n // 2 + 1) % (10**9 + 7)
if __name__ == '__main__':
n = int(input())
for i in range(n):
num = int(input())
num = int(num/2) * 2
print(Solution().numberOfWays(num)) | [
"[email protected]"
]
| |
0c4b8efc0a4cfc2d7b71757971ac85a9988ef90b | 727f1bc2205c88577b419cf0036c029b8c6f7766 | /out-bin/py/google/fhir/labels/bundle_to_label_test.runfiles/pypi__apache_beam_2_9_0/apache_beam/typehints/typehints_test.py | 8de414a779dc8b7df9e42b5ff02f8e76712adad7 | [
"Apache-2.0"
]
| permissive | rasalt/fhir | 55cf78feed3596a3101b86f9e9bbf6652c6ed4ad | d49883cc4d4986e11ca66058d5a327691e6e048a | refs/heads/master | 2020-04-13T00:16:54.050913 | 2019-01-15T14:22:15 | 2019-01-15T14:22:15 | 160,260,223 | 0 | 0 | Apache-2.0 | 2018-12-03T22:07:01 | 2018-12-03T22:07:01 | null | UTF-8 | Python | false | false | 149 | py | /home/rkharwar/.cache/bazel/_bazel_rkharwar/c4bcd65252c8f8250f091ba96375f9a5/external/pypi__apache_beam_2_9_0/apache_beam/typehints/typehints_test.py | [
"[email protected]"
]
| |
b2a403561e60d00efd9c1a6062fcb318dbe0fc98 | d8cbe9ce0469f72b8929af01538b6ceddff10a38 | /tests/components/repairs/test_websocket_api.py | 206cedbe6a537826737075ab92511bc267f0170a | [
"Apache-2.0"
]
| permissive | piitaya/home-assistant | 9c1ba162dac9604e4d43e035e74bad7bba327f0b | 48893738192431f96966998c4ff7a3723a2f8f4a | refs/heads/dev | 2023-03-07T16:13:32.117970 | 2023-01-10T17:47:48 | 2023-01-10T17:47:48 | 172,578,293 | 3 | 1 | Apache-2.0 | 2023-02-22T06:15:56 | 2019-02-25T20:19:40 | Python | UTF-8 | Python | false | false | 16,249 | py | """Test the repairs websocket API."""
from __future__ import annotations
from collections.abc import Awaitable, Callable
from http import HTTPStatus
from unittest.mock import ANY, AsyncMock, Mock
from aiohttp import ClientSession, ClientWebSocketResponse
from freezegun import freeze_time
import pytest
import voluptuous as vol
from homeassistant import data_entry_flow
from homeassistant.components.repairs import RepairsFlow
from homeassistant.components.repairs.const import DOMAIN
from homeassistant.const import __version__ as ha_version
from homeassistant.core import HomeAssistant
from homeassistant.helpers import issue_registry
from homeassistant.setup import async_setup_component
from tests.common import mock_platform
DEFAULT_ISSUES = [
{
"breaks_in_ha_version": "2022.9",
"domain": "fake_integration",
"issue_id": "issue_1",
"is_fixable": True,
"learn_more_url": "https://theuselessweb.com",
"severity": "error",
"translation_key": "abc_123",
"translation_placeholders": {"abc": "123"},
}
]
async def create_issues(hass, ws_client, issues=None):
"""Create issues."""
def api_issue(issue):
excluded_keys = ("data",)
return dict(
{key: issue[key] for key in issue if key not in excluded_keys},
created=ANY,
dismissed_version=None,
ignored=False,
issue_domain=None,
)
if issues is None:
issues = DEFAULT_ISSUES
for issue in issues:
issue_registry.async_create_issue(
hass,
issue["domain"],
issue["issue_id"],
breaks_in_ha_version=issue["breaks_in_ha_version"],
data=issue.get("data"),
is_fixable=issue["is_fixable"],
is_persistent=False,
learn_more_url=issue["learn_more_url"],
severity=issue["severity"],
translation_key=issue["translation_key"],
translation_placeholders=issue["translation_placeholders"],
)
await ws_client.send_json({"id": 1, "type": "repairs/list_issues"})
msg = await ws_client.receive_json()
assert msg["success"]
assert msg["result"] == {"issues": [api_issue(issue) for issue in issues]}
return issues
EXPECTED_DATA = {
"issue_1": None,
"issue_2": {"blah": "bleh"},
"abort_issue1": None,
}
class MockFixFlow(RepairsFlow):
"""Handler for an issue fixing flow."""
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
"""Handle the first step of a fix flow."""
assert self.issue_id in EXPECTED_DATA
assert self.data == EXPECTED_DATA[self.issue_id]
return await (self.async_step_custom_step())
async def async_step_custom_step(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
"""Handle a custom_step step of a fix flow."""
if user_input is not None:
return self.async_create_entry(data={})
return self.async_show_form(step_id="custom_step", data_schema=vol.Schema({}))
class MockFixFlowAbort(RepairsFlow):
"""Handler for an issue fixing flow that aborts."""
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
"""Handle the first step of a fix flow."""
return self.async_abort(reason="not_given")
@pytest.fixture(autouse=True)
async def mock_repairs_integration(hass):
"""Mock a repairs integration."""
hass.config.components.add("fake_integration")
def async_create_fix_flow(hass, issue_id, data):
assert issue_id in EXPECTED_DATA
assert data == EXPECTED_DATA[issue_id]
if issue_id == "abort_issue1":
return MockFixFlowAbort()
return MockFixFlow()
mock_platform(
hass,
"fake_integration.repairs",
Mock(async_create_fix_flow=AsyncMock(wraps=async_create_fix_flow)),
)
mock_platform(
hass,
"integration_without_repairs.repairs",
Mock(spec=[]),
)
async def test_dismiss_issue(hass: HomeAssistant, hass_ws_client) -> None:
"""Test we can dismiss an issue."""
assert await async_setup_component(hass, DOMAIN, {})
client = await hass_ws_client(hass)
issues = await create_issues(hass, client)
await client.send_json(
{
"id": 2,
"type": "repairs/ignore_issue",
"domain": "fake_integration",
"issue_id": "no_such_issue",
"ignore": True,
}
)
msg = await client.receive_json()
assert not msg["success"]
await client.send_json(
{
"id": 3,
"type": "repairs/ignore_issue",
"domain": "fake_integration",
"issue_id": "issue_1",
"ignore": True,
}
)
msg = await client.receive_json()
assert msg["success"]
assert msg["result"] is None
await client.send_json({"id": 4, "type": "repairs/list_issues"})
msg = await client.receive_json()
assert msg["success"]
assert msg["result"] == {
"issues": [
dict(
issue,
created=ANY,
dismissed_version=ha_version,
ignored=True,
issue_domain=None,
)
for issue in issues
]
}
await client.send_json(
{
"id": 5,
"type": "repairs/ignore_issue",
"domain": "fake_integration",
"issue_id": "issue_1",
"ignore": False,
}
)
msg = await client.receive_json()
assert msg["success"]
assert msg["result"] is None
await client.send_json({"id": 6, "type": "repairs/list_issues"})
msg = await client.receive_json()
assert msg["success"]
assert msg["result"] == {
"issues": [
dict(
issue,
created=ANY,
dismissed_version=None,
ignored=False,
issue_domain=None,
)
for issue in issues
]
}
async def test_fix_non_existing_issue(
hass: HomeAssistant, hass_client, hass_ws_client
) -> None:
"""Test trying to fix an issue that doesn't exist."""
assert await async_setup_component(hass, "http", {})
assert await async_setup_component(hass, DOMAIN, {})
ws_client = await hass_ws_client(hass)
client = await hass_client()
issues = await create_issues(hass, ws_client)
url = "/api/repairs/issues/fix"
resp = await client.post(
url, json={"handler": "no_such_integration", "issue_id": "no_such_issue"}
)
assert resp.status != HTTPStatus.OK
url = "/api/repairs/issues/fix"
resp = await client.post(
url, json={"handler": "fake_integration", "issue_id": "no_such_issue"}
)
assert resp.status != HTTPStatus.OK
await ws_client.send_json({"id": 3, "type": "repairs/list_issues"})
msg = await ws_client.receive_json()
assert msg["success"]
assert msg["result"] == {
"issues": [
dict(
issue,
created=ANY,
dismissed_version=None,
ignored=False,
issue_domain=None,
)
for issue in issues
]
}
@pytest.mark.parametrize(
"domain, step, description_placeholders",
(
("fake_integration", "custom_step", None),
("fake_integration_default_handler", "confirm", {"abc": "123"}),
),
)
async def test_fix_issue(
hass: HomeAssistant,
hass_client,
hass_ws_client,
domain,
step,
description_placeholders,
) -> None:
"""Test we can fix an issue."""
assert await async_setup_component(hass, "http", {})
assert await async_setup_component(hass, DOMAIN, {})
ws_client = await hass_ws_client(hass)
client = await hass_client()
issues = [
{
**DEFAULT_ISSUES[0],
"data": {"blah": "bleh"},
"domain": domain,
"issue_id": "issue_2",
}
]
await create_issues(hass, ws_client, issues=issues)
url = "/api/repairs/issues/fix"
resp = await client.post(url, json={"handler": domain, "issue_id": "issue_2"})
assert resp.status == HTTPStatus.OK
data = await resp.json()
flow_id = data["flow_id"]
assert data == {
"data_schema": [],
"description_placeholders": description_placeholders,
"errors": None,
"flow_id": ANY,
"handler": domain,
"last_step": None,
"step_id": step,
"type": "form",
}
url = f"/api/repairs/issues/fix/{flow_id}"
# Test we can get the status of the flow
resp2 = await client.get(url)
assert resp2.status == HTTPStatus.OK
data2 = await resp2.json()
assert data == data2
resp = await client.post(url)
assert resp.status == HTTPStatus.OK
data = await resp.json()
flow_id = data["flow_id"]
assert data == {
"description": None,
"description_placeholders": None,
"flow_id": flow_id,
"handler": domain,
"type": "create_entry",
"version": 1,
}
await ws_client.send_json({"id": 4, "type": "repairs/list_issues"})
msg = await ws_client.receive_json()
assert msg["success"]
assert msg["result"] == {"issues": []}
async def test_fix_issue_unauth(
hass: HomeAssistant, hass_client, hass_admin_user
) -> None:
"""Test we can't query the result if not authorized."""
assert await async_setup_component(hass, "http", {})
assert await async_setup_component(hass, DOMAIN, {})
hass_admin_user.groups = []
client = await hass_client()
url = "/api/repairs/issues/fix"
resp = await client.post(
url, json={"handler": "fake_integration", "issue_id": "issue_1"}
)
assert resp.status == HTTPStatus.UNAUTHORIZED
async def test_get_progress_unauth(
hass: HomeAssistant, hass_client, hass_ws_client, hass_admin_user
) -> None:
"""Test we can't fix an issue if not authorized."""
assert await async_setup_component(hass, "http", {})
assert await async_setup_component(hass, DOMAIN, {})
ws_client = await hass_ws_client(hass)
client = await hass_client()
await create_issues(hass, ws_client)
url = "/api/repairs/issues/fix"
resp = await client.post(
url, json={"handler": "fake_integration", "issue_id": "issue_1"}
)
assert resp.status == HTTPStatus.OK
data = await resp.json()
flow_id = data["flow_id"]
hass_admin_user.groups = []
url = f"/api/repairs/issues/fix/{flow_id}"
# Test we can't get the status of the flow
resp = await client.get(url)
assert resp.status == HTTPStatus.UNAUTHORIZED
async def test_step_unauth(
hass: HomeAssistant, hass_client, hass_ws_client, hass_admin_user
) -> None:
"""Test we can't fix an issue if not authorized."""
assert await async_setup_component(hass, "http", {})
assert await async_setup_component(hass, DOMAIN, {})
ws_client = await hass_ws_client(hass)
client = await hass_client()
await create_issues(hass, ws_client)
url = "/api/repairs/issues/fix"
resp = await client.post(
url, json={"handler": "fake_integration", "issue_id": "issue_1"}
)
assert resp.status == HTTPStatus.OK
data = await resp.json()
flow_id = data["flow_id"]
hass_admin_user.groups = []
url = f"/api/repairs/issues/fix/{flow_id}"
# Test we can't get the status of the flow
resp = await client.post(url)
assert resp.status == HTTPStatus.UNAUTHORIZED
@freeze_time("2022-07-19 07:53:05")
async def test_list_issues(hass: HomeAssistant, hass_storage, hass_ws_client) -> None:
"""Test we can list issues."""
# Add an inactive issue, this should not be exposed in the list
hass_storage[issue_registry.STORAGE_KEY] = {
"version": issue_registry.STORAGE_VERSION_MAJOR,
"data": {
"issues": [
{
"created": "2022-07-19T09:41:13.746514+00:00",
"dismissed_version": None,
"domain": "test",
"is_persistent": False,
"issue_id": "issue_3_inactive",
"issue_domain": None,
},
]
},
}
assert await async_setup_component(hass, DOMAIN, {})
client = await hass_ws_client(hass)
await client.send_json({"id": 1, "type": "repairs/list_issues"})
msg = await client.receive_json()
assert msg["success"]
assert msg["result"] == {"issues": []}
issues = [
{
"breaks_in_ha_version": "2022.9",
"domain": "test",
"is_fixable": True,
"issue_id": "issue_1",
"issue_domain": None,
"learn_more_url": "https://theuselessweb.com",
"severity": "error",
"translation_key": "abc_123",
"translation_placeholders": {"abc": "123"},
},
{
"breaks_in_ha_version": "2022.8",
"domain": "test",
"is_fixable": False,
"issue_id": "issue_2",
"issue_domain": None,
"learn_more_url": "https://theuselessweb.com/abc",
"severity": "other",
"translation_key": "even_worse",
"translation_placeholders": {"def": "456"},
},
]
for issue in issues:
issue_registry.async_create_issue(
hass,
issue["domain"],
issue["issue_id"],
breaks_in_ha_version=issue["breaks_in_ha_version"],
is_fixable=issue["is_fixable"],
is_persistent=False,
learn_more_url=issue["learn_more_url"],
severity=issue["severity"],
translation_key=issue["translation_key"],
translation_placeholders=issue["translation_placeholders"],
)
await client.send_json({"id": 2, "type": "repairs/list_issues"})
msg = await client.receive_json()
assert msg["success"]
assert msg["result"] == {
"issues": [
dict(
issue,
created="2022-07-19T07:53:05+00:00",
dismissed_version=None,
ignored=False,
)
for issue in issues
]
}
async def test_fix_issue_aborted(
hass: HomeAssistant,
hass_client: Callable[..., Awaitable[ClientSession]],
hass_ws_client: Callable[[HomeAssistant], Awaitable[ClientWebSocketResponse]],
) -> None:
"""Test we can fix an issue."""
assert await async_setup_component(hass, "http", {})
assert await async_setup_component(hass, DOMAIN, {})
ws_client = await hass_ws_client(hass)
client = await hass_client()
await create_issues(
hass,
ws_client,
issues=[
{
**DEFAULT_ISSUES[0],
"domain": "fake_integration",
"issue_id": "abort_issue1",
}
],
)
await ws_client.send_json({"id": 3, "type": "repairs/list_issues"})
msg = await ws_client.receive_json()
assert msg["success"]
assert len(msg["result"]["issues"]) == 1
first_issue = msg["result"]["issues"][0]
assert first_issue["domain"] == "fake_integration"
assert first_issue["issue_id"] == "abort_issue1"
resp = await client.post(
"/api/repairs/issues/fix",
json={"handler": "fake_integration", "issue_id": "abort_issue1"},
)
assert resp.status == HTTPStatus.OK
data = await resp.json()
flow_id = data["flow_id"]
assert data == {
"type": "abort",
"flow_id": flow_id,
"handler": "fake_integration",
"reason": "not_given",
"description_placeholders": None,
"result": None,
}
await ws_client.send_json({"id": 4, "type": "repairs/list_issues"})
msg = await ws_client.receive_json()
assert msg["success"]
assert len(msg["result"]["issues"]) == 1
assert msg["result"]["issues"][0] == first_issue
| [
"[email protected]"
]
| |
73d320126cdceeed526c2541becb2e67e6003c88 | 033095b33abcef8ac34638a277de11ab734cf642 | /Chapter04/Refactoring/point_v3.py | 929c6d925f187b40c1dc7872edeb211b1229dadc | [
"MIT"
]
| permissive | PacktPublishing/Hands-On-Application-Development-with-PyCharm | fa2df6bfbe06272a4314a83f03cd90030ed2d466 | 9998906b754a44ced6789e850e8eed139bec0a97 | refs/heads/master | 2023-04-27T23:49:31.779700 | 2023-01-18T09:11:50 | 2023-01-18T09:11:50 | 175,549,552 | 40 | 45 | MIT | 2023-04-21T20:31:21 | 2019-03-14T04:43:12 | Python | UTF-8 | Python | false | false | 1,455 | py | from math import sqrt
from matplotlib import pyplot as plt
class Point():
def __init__(self, x, y):
self.x = x
self.y = y
def __str__(self):
return f'Point ({self.x}, {self.y})'
def __add__(self, p):
return Point(self.x + p.x, self.y + p.y)
def __sub__(self, p):
return Point(self.x - p.x, self.y - p.y)
def distance(self, p):
diff = self - p
distance = sqrt(diff.x**2 + diff.y**2)
return distance
@staticmethod
def draw(x, y):
# set up range of the plot
limit = max(x, y) + 1
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_aspect('equal')
# lines corresponding to x- and y-coordinates
plt.plot([x, x], [0, y], '-', c='blue', linewidth=3)
plt.plot([0, x], [y, y], '-', c='blue', linewidth=3)
plt.scatter(x, y, s=100, marker='o', c='red') # actual point
ax.set_xlim((-limit, limit))
ax.set_ylim((-limit, limit))
# axis arrows
left, right = ax.get_xlim()
bottom, top = ax.get_ylim()
plt.arrow(left, 0, right - left, 0, length_includes_head=True,
head_width=0.15)
plt.arrow(0, bottom, 0, top - bottom, length_includes_head=True,
head_width=0.15)
plt.grid()
plt.show()
if __name__ == '__main__':
p1 = Point(1, 0)
p2 = Point(5, 3)
Point.draw(p2.x, p2.y)
| [
"[email protected]"
]
| |
98930418fa6ba8447e3892b6d3617b8dd0694d2e | f62fd455e593a7ad203a5c268e23129473d968b6 | /swift-2.13.1/swift/obj/reconstructor.py | 35d70b01dfeb03763f5da60a33e64dc896e3371d | [
"Apache-2.0"
]
| permissive | MinbinGong/OpenStack-Ocata | 5d17bcd47a46d48ff9e71e2055f667836174242f | 8b7650128cfd2fdf5d6c8bc4613ac2e396fb2fb3 | refs/heads/master | 2021-06-23T05:24:37.799927 | 2017-08-14T04:33:05 | 2017-08-14T04:33:05 | 99,709,985 | 0 | 2 | null | 2020-07-22T22:06:22 | 2017-08-08T15:48:44 | Python | UTF-8 | Python | false | false | 43,604 | py | # Copyright (c) 2010-2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import errno
import os
from os.path import join
import random
import time
import itertools
from collections import defaultdict
import six
import six.moves.cPickle as pickle
import shutil
from eventlet import (GreenPile, GreenPool, Timeout, sleep, hubs, tpool,
spawn)
from eventlet.support.greenlets import GreenletExit
from swift import gettext_ as _
from swift.common.utils import (
whataremyips, unlink_older_than, compute_eta, get_logger,
dump_recon_cache, mkdirs, config_true_value, list_from_csv, get_hub,
tpool_reraise, GreenAsyncPile, Timestamp, remove_file)
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.bufferedhttp import http_connect
from swift.common.daemon import Daemon
from swift.common.ring.utils import is_local_device
from swift.obj.ssync_sender import Sender as ssync_sender
from swift.common.http import HTTP_OK, HTTP_NOT_FOUND, \
HTTP_INSUFFICIENT_STORAGE
from swift.obj.diskfile import DiskFileRouter, get_data_dir, \
get_tmp_dir
from swift.common.storage_policy import POLICIES, EC_POLICY
from swift.common.exceptions import ConnectionTimeout, DiskFileError, \
SuffixSyncError
SYNC, REVERT = ('sync_only', 'sync_revert')
hubs.use_hub(get_hub())
def _get_partners(frag_index, part_nodes):
"""
Returns the left and right partners of the node whose index is
equal to the given frag_index.
:param frag_index: a fragment index
:param part_nodes: a list of primary nodes
:returns: [<node-to-left>, <node-to-right>]
"""
return [
part_nodes[(frag_index - 1) % len(part_nodes)],
part_nodes[(frag_index + 1) % len(part_nodes)],
]
class RebuildingECDiskFileStream(object):
"""
This class wraps the reconstructed fragment archive data and
metadata in the DiskFile interface for ssync.
"""
def __init__(self, datafile_metadata, frag_index, rebuilt_fragment_iter):
# start with metadata from a participating FA
self.datafile_metadata = datafile_metadata
# the new FA is going to have the same length as others in the set
self._content_length = int(self.datafile_metadata['Content-Length'])
# update the FI and delete the ETag, the obj server will
# recalc on the other side...
self.datafile_metadata['X-Object-Sysmeta-Ec-Frag-Index'] = frag_index
for etag_key in ('ETag', 'Etag'):
self.datafile_metadata.pop(etag_key, None)
self.frag_index = frag_index
self.rebuilt_fragment_iter = rebuilt_fragment_iter
def get_metadata(self):
return self.datafile_metadata
def get_datafile_metadata(self):
return self.datafile_metadata
@property
def content_length(self):
return self._content_length
def reader(self):
for chunk in self.rebuilt_fragment_iter:
yield chunk
class ObjectReconstructor(Daemon):
"""
Reconstruct objects using erasure code. And also rebalance EC Fragment
Archive objects off handoff nodes.
Encapsulates most logic and data needed by the object reconstruction
process. Each call to .reconstruct() performs one pass. It's up to the
caller to do this in a loop.
"""
def __init__(self, conf, logger=None):
"""
:param conf: configuration object obtained from ConfigParser
:param logger: logging object
"""
self.conf = conf
self.logger = logger or get_logger(
conf, log_route='object-reconstructor')
self.devices_dir = conf.get('devices', '/srv/node')
self.mount_check = config_true_value(conf.get('mount_check', 'true'))
self.swift_dir = conf.get('swift_dir', '/etc/swift')
self.bind_ip = conf.get('bind_ip', '0.0.0.0')
self.servers_per_port = int(conf.get('servers_per_port', '0') or 0)
self.port = None if self.servers_per_port else \
int(conf.get('bind_port', 6200))
self.concurrency = int(conf.get('concurrency', 1))
self.stats_interval = int(conf.get('stats_interval', '300'))
self.ring_check_interval = int(conf.get('ring_check_interval', 15))
self.next_check = time.time() + self.ring_check_interval
self.partition_times = []
self.interval = int(conf.get('interval') or
conf.get('run_pause') or 30)
self.http_timeout = int(conf.get('http_timeout', 60))
self.lockup_timeout = int(conf.get('lockup_timeout', 1800))
self.recon_cache_path = conf.get('recon_cache_path',
'/var/cache/swift')
self.rcache = os.path.join(self.recon_cache_path, "object.recon")
# defaults subject to change after beta
self.conn_timeout = float(conf.get('conn_timeout', 0.5))
self.node_timeout = float(conf.get('node_timeout', 10))
self.network_chunk_size = int(conf.get('network_chunk_size', 65536))
self.disk_chunk_size = int(conf.get('disk_chunk_size', 65536))
self.headers = {
'Content-Length': '0',
'user-agent': 'obj-reconstructor %s' % os.getpid()}
if 'handoffs_first' in conf:
self.logger.warning(
'The handoffs_first option is deprecated in favor '
'of handoffs_only. This option may be ignored in a '
'future release.')
# honor handoffs_first for backwards compatibility
default_handoffs_only = config_true_value(conf['handoffs_first'])
else:
default_handoffs_only = False
self.handoffs_only = config_true_value(
conf.get('handoffs_only', default_handoffs_only))
if self.handoffs_only:
self.logger.warning(
'Handoff only mode is not intended for normal '
'operation, use handoffs_only with care.')
elif default_handoffs_only:
self.logger.warning('Ignored handoffs_first option in favor '
'of handoffs_only.')
self._df_router = DiskFileRouter(conf, self.logger)
def load_object_ring(self, policy):
"""
Make sure the policy's rings are loaded.
:param policy: the StoragePolicy instance
:returns: appropriate ring object
"""
policy.load_ring(self.swift_dir)
return policy.object_ring
def check_ring(self, object_ring):
"""
Check to see if the ring has been updated
:param object_ring: the ring to check
:returns: boolean indicating whether or not the ring has changed
"""
if time.time() > self.next_check:
self.next_check = time.time() + self.ring_check_interval
if object_ring.has_changed():
return False
return True
def _full_path(self, node, part, path, policy):
return '%(replication_ip)s:%(replication_port)s' \
'/%(device)s/%(part)s%(path)s ' \
'policy#%(policy)d frag#%(frag_index)s' % {
'replication_ip': node['replication_ip'],
'replication_port': node['replication_port'],
'device': node['device'],
'part': part, 'path': path,
'policy': policy,
'frag_index': node.get('index', 'handoff'),
}
def _get_response(self, node, part, path, headers, policy):
"""
Helper method for reconstruction that GETs a single EC fragment
archive
:param node: the node to GET from
:param part: the partition
:param path: full path of the desired EC archive
:param headers: the headers to send
:param policy: an instance of
:class:`~swift.common.storage_policy.BaseStoragePolicy`
:returns: response
"""
resp = None
try:
with ConnectionTimeout(self.conn_timeout):
conn = http_connect(node['ip'], node['port'], node['device'],
part, 'GET', path, headers=headers)
with Timeout(self.node_timeout):
resp = conn.getresponse()
if resp.status not in [HTTP_OK, HTTP_NOT_FOUND]:
self.logger.warning(
_("Invalid response %(resp)s from %(full_path)s"),
{'resp': resp.status,
'full_path': self._full_path(node, part, path, policy)})
resp = None
elif resp.status == HTTP_NOT_FOUND:
resp = None
except (Exception, Timeout):
self.logger.exception(
_("Trying to GET %(full_path)s"), {
'full_path': self._full_path(node, part, path, policy)})
return resp
def reconstruct_fa(self, job, node, datafile_metadata):
"""
Reconstructs a fragment archive - this method is called from ssync
after a remote node responds that is missing this object - the local
diskfile is opened to provide metadata - but to reconstruct the
missing fragment archive we must connect to multiple object servers.
:param job: job from ssync_sender
:param node: node that we're rebuilding to
:param datafile_metadata: the datafile metadata to attach to
the rebuilt fragment archive
:returns: a DiskFile like class for use by ssync
:raises DiskFileError: if the fragment archive cannot be reconstructed
"""
part_nodes = job['policy'].object_ring.get_part_nodes(
job['partition'])
part_nodes.remove(node)
# the fragment index we need to reconstruct is the position index
# of the node we're rebuilding to within the primary part list
fi_to_rebuild = node['index']
# KISS send out connection requests to all nodes, see what sticks.
# Use fragment preferences header to tell other nodes that we want
# fragments at the same timestamp as our fragment, and that they don't
# need to be durable.
headers = self.headers.copy()
headers['X-Backend-Storage-Policy-Index'] = int(job['policy'])
frag_prefs = [{'timestamp': datafile_metadata['X-Timestamp'],
'exclude': []}]
headers['X-Backend-Fragment-Preferences'] = json.dumps(frag_prefs)
pile = GreenAsyncPile(len(part_nodes))
path = datafile_metadata['name']
for node in part_nodes:
pile.spawn(self._get_response, node, job['partition'],
path, headers, job['policy'])
responses = []
etag = None
for resp in pile:
if not resp:
continue
resp.headers = HeaderKeyDict(resp.getheaders())
if str(fi_to_rebuild) == \
resp.headers.get('X-Object-Sysmeta-Ec-Frag-Index'):
continue
if resp.headers.get('X-Object-Sysmeta-Ec-Frag-Index') in set(
r.headers.get('X-Object-Sysmeta-Ec-Frag-Index')
for r in responses):
continue
responses.append(resp)
etag = sorted(responses, reverse=True,
key=lambda r: Timestamp(
r.headers.get('X-Backend-Timestamp')
))[0].headers.get('X-Object-Sysmeta-Ec-Etag')
responses = [r for r in responses if
r.headers.get('X-Object-Sysmeta-Ec-Etag') == etag]
if len(responses) >= job['policy'].ec_ndata:
break
else:
self.logger.error(
'Unable to get enough responses (%s/%s) '
'to reconstruct %s with ETag %s' % (
len(responses), job['policy'].ec_ndata,
self._full_path(node, job['partition'],
datafile_metadata['name'], job['policy']),
etag))
raise DiskFileError('Unable to reconstruct EC archive')
rebuilt_fragment_iter = self.make_rebuilt_fragment_iter(
responses[:job['policy'].ec_ndata], path, job['policy'],
fi_to_rebuild)
return RebuildingECDiskFileStream(datafile_metadata, fi_to_rebuild,
rebuilt_fragment_iter)
def _reconstruct(self, policy, fragment_payload, frag_index):
return policy.pyeclib_driver.reconstruct(fragment_payload,
[frag_index])[0]
def make_rebuilt_fragment_iter(self, responses, path, policy, frag_index):
"""
Turn a set of connections from backend object servers into a generator
that yields up the rebuilt fragment archive for frag_index.
"""
def _get_one_fragment(resp):
buff = ''
remaining_bytes = policy.fragment_size
while remaining_bytes:
chunk = resp.read(remaining_bytes)
if not chunk:
break
remaining_bytes -= len(chunk)
buff += chunk
return buff
def fragment_payload_iter():
# We need a fragment from each connections, so best to
# use a GreenPile to keep them ordered and in sync
pile = GreenPile(len(responses))
while True:
for resp in responses:
pile.spawn(_get_one_fragment, resp)
try:
with Timeout(self.node_timeout):
fragment_payload = [fragment for fragment in pile]
except (Exception, Timeout):
self.logger.exception(
_("Error trying to rebuild %(path)s "
"policy#%(policy)d frag#%(frag_index)s"),
{'path': path,
'policy': policy,
'frag_index': frag_index,
})
break
if not all(fragment_payload):
break
rebuilt_fragment = self._reconstruct(
policy, fragment_payload, frag_index)
yield rebuilt_fragment
return fragment_payload_iter()
def stats_line(self):
"""
Logs various stats for the currently running reconstruction pass.
"""
if (self.device_count and self.part_count and
self.reconstruction_device_count):
elapsed = (time.time() - self.start) or 0.000001
rate = self.reconstruction_part_count / elapsed
total_part_count = (1.0 * self.part_count *
self.device_count /
self.reconstruction_device_count)
self.logger.info(
_("%(reconstructed)d/%(total)d (%(percentage).2f%%)"
" partitions of %(device)d/%(dtotal)d "
"(%(dpercentage).2f%%) devices"
" reconstructed in %(time).2fs "
"(%(rate).2f/sec, %(remaining)s remaining)"),
{'reconstructed': self.reconstruction_part_count,
'total': self.part_count,
'percentage':
self.reconstruction_part_count * 100.0 / self.part_count,
'device': self.reconstruction_device_count,
'dtotal': self.device_count,
'dpercentage':
self.reconstruction_device_count * 100.0 / self.device_count,
'time': time.time() - self.start, 'rate': rate,
'remaining': '%d%s' %
compute_eta(self.start,
self.reconstruction_part_count,
total_part_count)})
if self.suffix_count and self.partition_times:
self.logger.info(
_("%(checked)d suffixes checked - "
"%(hashed).2f%% hashed, %(synced).2f%% synced"),
{'checked': self.suffix_count,
'hashed': (self.suffix_hash * 100.0) / self.suffix_count,
'synced': (self.suffix_sync * 100.0) / self.suffix_count})
self.partition_times.sort()
self.logger.info(
_("Partition times: max %(max).4fs, "
"min %(min).4fs, med %(med).4fs"),
{'max': self.partition_times[-1],
'min': self.partition_times[0],
'med': self.partition_times[
len(self.partition_times) // 2]})
else:
self.logger.info(
_("Nothing reconstructed for %s seconds."),
(time.time() - self.start))
def kill_coros(self):
"""Utility function that kills all coroutines currently running."""
for coro in list(self.run_pool.coroutines_running):
try:
coro.kill(GreenletExit)
except GreenletExit:
pass
def heartbeat(self):
"""
Loop that runs in the background during reconstruction. It
periodically logs progress.
"""
while True:
sleep(self.stats_interval)
self.stats_line()
def detect_lockups(self):
"""
In testing, the pool.waitall() call very occasionally failed to return.
This is an attempt to make sure the reconstructor finishes its
reconstruction pass in some eventuality.
"""
while True:
sleep(self.lockup_timeout)
if self.reconstruction_count == self.last_reconstruction_count:
self.logger.error(_("Lockup detected.. killing live coros."))
self.kill_coros()
self.last_reconstruction_count = self.reconstruction_count
def _get_hashes(self, policy, path, recalculate=None, do_listdir=False):
df_mgr = self._df_router[policy]
hashed, suffix_hashes = tpool_reraise(
df_mgr._get_hashes, path, recalculate=recalculate,
do_listdir=do_listdir)
self.logger.update_stats('suffix.hashes', hashed)
return suffix_hashes
def get_suffix_delta(self, local_suff, local_index,
remote_suff, remote_index):
"""
Compare the local suffix hashes with the remote suffix hashes
for the given local and remote fragment indexes. Return those
suffixes which should be synced.
:param local_suff: the local suffix hashes (from _get_hashes)
:param local_index: the local fragment index for the job
:param remote_suff: the remote suffix hashes (from remote
REPLICATE request)
:param remote_index: the remote fragment index for the job
:returns: a list of strings, the suffix dirs to sync
"""
suffixes = []
for suffix, sub_dict_local in local_suff.items():
sub_dict_remote = remote_suff.get(suffix, {})
if (sub_dict_local.get(None) != sub_dict_remote.get(None) or
sub_dict_local.get(local_index) !=
sub_dict_remote.get(remote_index)):
suffixes.append(suffix)
return suffixes
def rehash_remote(self, node, job, suffixes):
try:
with Timeout(self.http_timeout):
conn = http_connect(
node['replication_ip'], node['replication_port'],
node['device'], job['partition'], 'REPLICATE',
'/' + '-'.join(sorted(suffixes)),
headers=self.headers)
conn.getresponse().read()
except (Exception, Timeout):
self.logger.exception(
_("Trying to sync suffixes with %s") % self._full_path(
node, job['partition'], '', job['policy']))
def _get_suffixes_to_sync(self, job, node):
"""
For SYNC jobs we need to make a remote REPLICATE request to get
the remote node's current suffix's hashes and then compare to our
local suffix's hashes to decide which suffixes (if any) are out
of sync.
:param: the job dict, with the keys defined in ``_get_part_jobs``
:param node: the remote node dict
:returns: a (possibly empty) list of strings, the suffixes to be
synced with the remote node.
"""
# get hashes from the remote node
remote_suffixes = None
try:
with Timeout(self.http_timeout):
resp = http_connect(
node['replication_ip'], node['replication_port'],
node['device'], job['partition'], 'REPLICATE',
'', headers=self.headers).getresponse()
if resp.status == HTTP_INSUFFICIENT_STORAGE:
self.logger.error(
_('%s responded as unmounted'),
self._full_path(node, job['partition'], '',
job['policy']))
elif resp.status != HTTP_OK:
full_path = self._full_path(node, job['partition'], '',
job['policy'])
self.logger.error(
_("Invalid response %(resp)s from %(full_path)s"),
{'resp': resp.status, 'full_path': full_path})
else:
remote_suffixes = pickle.loads(resp.read())
except (Exception, Timeout):
# all exceptions are logged here so that our caller can
# safely catch our exception and continue to the next node
# without logging
self.logger.exception('Unable to get remote suffix hashes '
'from %r' % self._full_path(
node, job['partition'], '',
job['policy']))
if remote_suffixes is None:
raise SuffixSyncError('Unable to get remote suffix hashes')
suffixes = self.get_suffix_delta(job['hashes'],
job['frag_index'],
remote_suffixes,
node['index'])
# now recalculate local hashes for suffixes that don't
# match so we're comparing the latest
local_suff = self._get_hashes(job['policy'], job['path'],
recalculate=suffixes)
suffixes = self.get_suffix_delta(local_suff,
job['frag_index'],
remote_suffixes,
node['index'])
self.suffix_count += len(suffixes)
return suffixes
def delete_reverted_objs(self, job, objects, frag_index):
"""
For EC we can potentially revert only some of a partition
so we'll delete reverted objects here. Note that we delete
the fragment index of the file we sent to the remote node.
:param job: the job being processed
:param objects: a dict of objects to be deleted, each entry maps
hash=>timestamp
:param frag_index: (int) the fragment index of data files to be deleted
"""
df_mgr = self._df_router[job['policy']]
for object_hash, timestamps in objects.items():
try:
df = df_mgr.get_diskfile_from_hash(
job['local_dev']['device'], job['partition'],
object_hash, job['policy'],
frag_index=frag_index)
df.purge(timestamps['ts_data'], frag_index)
except DiskFileError:
self.logger.exception(
'Unable to purge DiskFile (%r %r %r)',
object_hash, timestamps['ts_data'], frag_index)
continue
def process_job(self, job):
"""
Sync the local partition with the remote node(s) according to
the parameters of the job. For primary nodes, the SYNC job type
will define both left and right hand sync_to nodes to ssync with
as defined by this primary nodes index in the node list based on
the fragment index found in the partition. For non-primary
nodes (either handoff revert, or rebalance) the REVERT job will
define a single node in sync_to which is the proper/new home for
the fragment index.
N.B. ring rebalancing can be time consuming and handoff nodes'
fragment indexes do not have a stable order, it's possible to
have more than one REVERT job for a partition, and in some rare
failure conditions there may even also be a SYNC job for the
same partition - but each one will be processed separately
because each job will define a separate list of node(s) to
'sync_to'.
:param: the job dict, with the keys defined in ``_get_job_info``
"""
self.headers['X-Backend-Storage-Policy-Index'] = int(job['policy'])
begin = time.time()
if job['job_type'] == REVERT:
self._revert(job, begin)
else:
self._sync(job, begin)
self.partition_times.append(time.time() - begin)
self.reconstruction_count += 1
def _sync(self, job, begin):
"""
Process a SYNC job.
"""
self.logger.increment(
'partition.update.count.%s' % (job['local_dev']['device'],))
# after our left and right partners, if there's some sort of
# failure we'll continue onto the remaining primary nodes and
# make sure they're in sync - or potentially rebuild missing
# fragments we find
dest_nodes = itertools.chain(
job['sync_to'],
# I think we could order these based on our index to better
# protect against a broken chain
[
n for n in
job['policy'].object_ring.get_part_nodes(job['partition'])
if n['id'] != job['local_dev']['id'] and
n['id'] not in (m['id'] for m in job['sync_to'])
],
)
syncd_with = 0
for node in dest_nodes:
if syncd_with >= len(job['sync_to']):
# success!
break
try:
suffixes = self._get_suffixes_to_sync(job, node)
except SuffixSyncError:
continue
if not suffixes:
syncd_with += 1
continue
# ssync any out-of-sync suffixes with the remote node
success, _ = ssync_sender(
self, node, job, suffixes)()
# let remote end know to rehash it's suffixes
self.rehash_remote(node, job, suffixes)
# update stats for this attempt
self.suffix_sync += len(suffixes)
self.logger.update_stats('suffix.syncs', len(suffixes))
if success:
syncd_with += 1
self.logger.timing_since('partition.update.timing', begin)
def _revert(self, job, begin):
"""
Process a REVERT job.
"""
self.logger.increment(
'partition.delete.count.%s' % (job['local_dev']['device'],))
syncd_with = 0
reverted_objs = {}
for node in job['sync_to']:
success, in_sync_objs = ssync_sender(
self, node, job, job['suffixes'])()
if success:
self.rehash_remote(node, job, job['suffixes'])
syncd_with += 1
reverted_objs.update(in_sync_objs)
if syncd_with >= len(job['sync_to']):
self.delete_reverted_objs(
job, reverted_objs, job['frag_index'])
else:
self.handoffs_remaining += 1
self.logger.timing_since('partition.delete.timing', begin)
def _get_part_jobs(self, local_dev, part_path, partition, policy):
"""
Helper function to build jobs for a partition, this method will
read the suffix hashes and create job dictionaries to describe
the needed work. There will be one job for each fragment index
discovered in the partition.
For a fragment index which corresponds to this node's ring
index, a job with job_type SYNC will be created to ensure that
the left and right hand primary ring nodes for the part have the
corresponding left and right hand fragment archives.
A fragment index (or entire partition) for which this node is
not the primary corresponding node, will create job(s) with
job_type REVERT to ensure that fragment archives are pushed to
the correct node and removed from this one.
A partition may result in multiple jobs. Potentially many
REVERT jobs, and zero or one SYNC job.
:param local_dev: the local device
:param part_path: full path to partition
:param partition: partition number
:param policy: the policy
:returns: a list of dicts of job info
N.B. If this function ever returns an empty list of jobs the entire
partition will be deleted.
"""
# find all the fi's in the part, and which suffixes have them
try:
hashes = self._get_hashes(policy, part_path, do_listdir=True)
except OSError as e:
if e.errno != errno.ENOTDIR:
raise
self.logger.warning(
'Unexpected entity %r is not a directory' % part_path)
return []
non_data_fragment_suffixes = []
data_fi_to_suffixes = defaultdict(list)
for suffix, fi_hash in hashes.items():
if not fi_hash:
# this is for sanity and clarity, normally an empty
# suffix would get del'd from the hashes dict, but an
# OSError trying to re-hash the suffix could leave the
# value empty - it will log the exception; but there's
# no way to properly address this suffix at this time.
continue
data_frag_indexes = [f for f in fi_hash if f is not None]
if not data_frag_indexes:
non_data_fragment_suffixes.append(suffix)
else:
for fi in data_frag_indexes:
data_fi_to_suffixes[fi].append(suffix)
# helper to ensure consistent structure of jobs
def build_job(job_type, frag_index, suffixes, sync_to):
return {
'job_type': job_type,
'frag_index': frag_index,
'suffixes': suffixes,
'sync_to': sync_to,
'partition': partition,
'path': part_path,
'hashes': hashes,
'policy': policy,
'local_dev': local_dev,
# ssync likes to have it handy
'device': local_dev['device'],
}
# aggregate jobs for all the fragment index in this part
jobs = []
# check the primary nodes - to see if the part belongs here
part_nodes = policy.object_ring.get_part_nodes(partition)
for node in part_nodes:
if node['id'] == local_dev['id']:
# this partition belongs here, we'll need a sync job
frag_index = node['index']
try:
suffixes = data_fi_to_suffixes.pop(frag_index)
except KeyError:
suffixes = []
sync_job = build_job(
job_type=SYNC,
frag_index=frag_index,
suffixes=suffixes,
sync_to=_get_partners(frag_index, part_nodes),
)
# ssync callback to rebuild missing fragment_archives
sync_job['sync_diskfile_builder'] = self.reconstruct_fa
jobs.append(sync_job)
break
# assign remaining data fragment suffixes to revert jobs
ordered_fis = sorted((len(suffixes), fi) for fi, suffixes
in data_fi_to_suffixes.items())
for count, fi in ordered_fis:
revert_job = build_job(
job_type=REVERT,
frag_index=fi,
suffixes=data_fi_to_suffixes[fi],
sync_to=[part_nodes[fi]],
)
jobs.append(revert_job)
# now we need to assign suffixes that have no data fragments
if non_data_fragment_suffixes:
if jobs:
# the first job will be either the sync_job, or the
# revert_job for the fragment index that is most common
# among the suffixes
jobs[0]['suffixes'].extend(non_data_fragment_suffixes)
else:
# this is an unfortunate situation, we need a revert job to
# push partitions off this node, but none of the suffixes
# have any data fragments to hint at which node would be a
# good candidate to receive the tombstones.
jobs.append(build_job(
job_type=REVERT,
frag_index=None,
suffixes=non_data_fragment_suffixes,
# this is super safe
sync_to=part_nodes,
# something like this would be probably be better
# sync_to=random.sample(part_nodes, 3),
))
# return a list of jobs for this part
return jobs
def collect_parts(self, override_devices=None,
override_partitions=None):
"""
Helper for yielding partitions in the top level reconstructor
"""
override_devices = override_devices or []
override_partitions = override_partitions or []
ips = whataremyips(self.bind_ip)
ec_policies = (policy for policy in POLICIES
if policy.policy_type == EC_POLICY)
policy2devices = {}
for policy in ec_policies:
self.load_object_ring(policy)
local_devices = list(six.moves.filter(
lambda dev: dev and is_local_device(
ips, self.port,
dev['replication_ip'], dev['replication_port']),
policy.object_ring.devs))
if override_devices:
local_devices = list(six.moves.filter(
lambda dev_info: dev_info['device'] in override_devices,
local_devices))
policy2devices[policy] = local_devices
self.device_count += len(local_devices)
all_parts = []
for policy, local_devices in policy2devices.items():
df_mgr = self._df_router[policy]
for local_dev in local_devices:
self.reconstruction_device_count += 1
dev_path = df_mgr.get_dev_path(local_dev['device'])
if not dev_path:
self.logger.warning(_('%s is not mounted'),
local_dev['device'])
continue
data_dir = get_data_dir(policy)
obj_path = join(dev_path, data_dir)
tmp_path = join(dev_path, get_tmp_dir(int(policy)))
unlink_older_than(tmp_path, time.time() -
df_mgr.reclaim_age)
if not os.path.exists(obj_path):
try:
mkdirs(obj_path)
except Exception:
self.logger.exception(
'Unable to create %s' % obj_path)
continue
try:
partitions = os.listdir(obj_path)
except OSError:
self.logger.exception(
'Unable to list partitions in %r' % obj_path)
continue
self.part_count += len(partitions)
for partition in partitions:
part_path = join(obj_path, partition)
if partition in ('auditor_status_ALL.json',
'auditor_status_ZBF.json'):
continue
if not partition.isdigit():
self.logger.warning(
'Unexpected entity in data dir: %r' % part_path)
remove_file(part_path)
self.reconstruction_part_count += 1
continue
partition = int(partition)
if override_partitions and (partition not in
override_partitions):
continue
part_info = {
'local_dev': local_dev,
'policy': policy,
'partition': partition,
'part_path': part_path,
}
all_parts.append(part_info)
random.shuffle(all_parts)
return all_parts
def build_reconstruction_jobs(self, part_info):
"""
Helper function for collect_jobs to build jobs for reconstruction
using EC style storage policy
N.B. If this function ever returns an empty list of jobs the entire
partition will be deleted.
"""
jobs = self._get_part_jobs(**part_info)
random.shuffle(jobs)
self.job_count += len(jobs)
return jobs
def _reset_stats(self):
self.start = time.time()
self.job_count = 0
self.part_count = 0
self.device_count = 0
self.suffix_count = 0
self.suffix_sync = 0
self.suffix_hash = 0
self.reconstruction_count = 0
self.reconstruction_part_count = 0
self.reconstruction_device_count = 0
self.last_reconstruction_count = -1
self.handoffs_remaining = 0
def delete_partition(self, path):
self.logger.info(_("Removing partition: %s"), path)
tpool.execute(shutil.rmtree, path, ignore_errors=True)
remove_file(path)
def reconstruct(self, **kwargs):
"""Run a reconstruction pass"""
self._reset_stats()
self.partition_times = []
stats = spawn(self.heartbeat)
lockup_detector = spawn(self.detect_lockups)
sleep() # Give spawns a cycle
try:
self.run_pool = GreenPool(size=self.concurrency)
for part_info in self.collect_parts(**kwargs):
if not self.check_ring(part_info['policy'].object_ring):
self.logger.info(_("Ring change detected. Aborting "
"current reconstruction pass."))
return
self.reconstruction_part_count += 1
jobs = self.build_reconstruction_jobs(part_info)
if not jobs:
# If this part belongs on this node, _get_part_jobs
# will *always* build a sync_job - even if there's
# no suffixes in the partition that needs to sync.
# If there's any suffixes in the partition then our
# job list would have *at least* one revert job.
# Therefore we know this part a) doesn't belong on
# this node and b) doesn't have any suffixes in it.
self.run_pool.spawn(self.delete_partition,
part_info['part_path'])
for job in jobs:
if (self.handoffs_only and job['job_type'] != REVERT):
self.logger.debug('Skipping %s job for %s '
'while in handoffs_only mode.',
job['job_type'], job['path'])
continue
self.run_pool.spawn(self.process_job, job)
with Timeout(self.lockup_timeout):
self.run_pool.waitall()
except (Exception, Timeout):
self.logger.exception(_("Exception in top-level"
"reconstruction loop"))
self.kill_coros()
finally:
stats.kill()
lockup_detector.kill()
self.stats_line()
if self.handoffs_only:
if self.handoffs_remaining > 0:
self.logger.info(_(
"Handoffs only mode still has handoffs remaining. "
"Next pass will continue to revert handoffs."))
else:
self.logger.warning(_(
"Handoffs only mode found no handoffs remaining. "
"You should disable handoffs_only once all nodes "
"are reporting no handoffs remaining."))
def run_once(self, *args, **kwargs):
start = time.time()
self.logger.info(_("Running object reconstructor in script mode."))
override_devices = list_from_csv(kwargs.get('devices'))
override_partitions = [int(p) for p in
list_from_csv(kwargs.get('partitions'))]
self.reconstruct(
override_devices=override_devices,
override_partitions=override_partitions)
total = (time.time() - start) / 60
self.logger.info(
_("Object reconstruction complete (once). (%.02f minutes)"), total)
if not (override_partitions or override_devices):
dump_recon_cache({'object_reconstruction_time': total,
'object_reconstruction_last': time.time()},
self.rcache, self.logger)
def run_forever(self, *args, **kwargs):
self.logger.info(_("Starting object reconstructor in daemon mode."))
# Run the reconstructor continually
while True:
start = time.time()
self.logger.info(_("Starting object reconstruction pass."))
# Run the reconstructor
self.reconstruct()
total = (time.time() - start) / 60
self.logger.info(
_("Object reconstruction complete. (%.02f minutes)"), total)
dump_recon_cache({'object_reconstruction_time': total,
'object_reconstruction_last': time.time()},
self.rcache, self.logger)
self.logger.debug('reconstruction sleeping for %s seconds.',
self.interval)
sleep(self.interval)
| [
"[email protected]"
]
| |
5c1049545c0c2a10a0f9a258fb0b91f1efd85336 | 09122d8d733d17f5c7fa79c1f167b63ef83d6838 | /LeetCode_Algorithm_I/binary_search.py | d9c336c0ef5bf9bc398707f400382081fdfb60bb | []
| no_license | vipulsingh24/DSA | e294ec4aa1b6db2e0ab658f0c7d8389c42563fe2 | bcad07d073b064428bcb1bfe7c53f1e785ecc4eb | refs/heads/main | 2023-08-17T00:57:44.232545 | 2021-10-10T17:53:59 | 2021-10-10T17:53:59 | 391,599,514 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 988 | py | """
Given an array of integers nums which is sorted in ascending order, and an integer target, write a function to search target in nums. If target exists, then return its index. Otherwise, return -1.
You must write an algorithm with O(log n) runtime complexity.
"""
class Solution:
def search(self, nums: List[int], target: int) -> int:
if len(nums) == 1 and nums[0] == target:
return 0
else:
low = 0
high = len(nums)-1
counter = 0
while low <= high:
# mid = (low + high)//2
mid = low + (high-low)//2
if nums[mid] == target:
return mid ## Success, Found
elif nums[mid] > target:
high = mid - 1
elif nums[mid] < target:
low = mid + 1
counter += 1
if counter > len(nums):
break
return -1 #
| [
"[email protected]"
]
| |
e14af7bd3ccfa9c72e9e4ce26896deedd4f79e9a | 4f325c55882487af0aab8853179a7ed5867953b9 | /psfr.py | b8fc850ab94f79025a8daf47044fb7bf8c9fc39a | []
| no_license | ravijaya/oct26 | 6ce7112f85db86fa41fbb2ff6f9f01650055ad6a | 64f7dbe759ddc652297365c8a635b239e1ef0cba | refs/heads/master | 2020-08-28T12:29:54.644234 | 2019-10-26T11:41:41 | 2019-10-26T11:41:41 | 217,700,205 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 76 | py | fp = open('passwd.txt')
for temp in fp:
print(temp, end='')
fp.close() | [
"[email protected]"
]
| |
ba918c9a370632727defbf27b9f61360d89b1d1e | 391a40002b63daff8bb056b2f0b2ae3f7ee52bb3 | /项目/4组_基于MTCNN与FaceNet的人脸识别/4组_基于MTCNN与FaceNet的人脸识别/src/face.py | 5c903bc4586d85543ca2a0df1e690a7e91cf937e | []
| no_license | realllcandy/USTC_SSE_Python | 7b40f0e4ae3531fc41683fd19f71a58ce3815cdb | 3ac15a95e8a99491c322481a70c14b6ab830082f | refs/heads/master | 2023-03-17T21:30:19.068695 | 2020-10-10T06:15:28 | 2020-10-10T06:15:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,374 | py | import pickle
import os
import cv2
import numpy as np
import tensorflow as tf
from scipy import misc
import align.detect_face
import facenet
gpu_memory_fraction = 0.3
facenet_model_checkpoint = os.path.dirname(__file__) + "/../src/models/20180402-114759"
classifier_model = os.path.dirname(__file__) + "/../src/models/classifier.pkl"
debug = False
class Face:
def __init__(self):
self.name = None
self.bounding_box = None
self.image = None
self.container_image = None
self.embedding = None
class Recognition:
def __init__(self):
self.detect = Detection()
self.encoder = Encoder()
self.identifier = Identifier()
def add_identity(self, image, person_name):
faces = self.detect.find_faces(image)
if len(faces) == 1:
face = faces[0]
face.name = person_name
face.embedding = self.encoder.generate_embedding(face)
return faces
def identify(self, image):
faces = self.detect.find_faces(image)
for i, face in enumerate(faces):
if debug:
cv2.imshow("Face: " + str(i), face.image)
face.embedding = self.encoder.generate_embedding(face)
face.name = self.identifier.identify(face)
return faces
class Identifier:
def __init__(self):
with open(classifier_model, 'rb') as infile:
self.model, self.class_names = pickle.load(infile)
def identify(self, face):
if face.embedding is not None:
predictions = self.model.predict_proba([face.embedding])
best_class_indices = np.argmax(predictions, axis=1)
return self.class_names[best_class_indices[0]]
class Encoder:
def __init__(self):
self.sess = tf.Session()
with self.sess.as_default():
facenet.load_model(facenet_model_checkpoint)
def generate_embedding(self, face):
images_placeholder = tf.get_default_graph().get_tensor_by_name("input:0")
embeddings = tf.get_default_graph().get_tensor_by_name("embeddings:0")
phase_train_placeholder = tf.get_default_graph().get_tensor_by_name("phase_train:0")
prewhiten_face = facenet.prewhiten(face.image)
# Run forward pass to calculate embeddings
feed_dict = {images_placeholder: [prewhiten_face], phase_train_placeholder: False}
return self.sess.run(embeddings, feed_dict=feed_dict)[0]
class Detection:
minsize = 20 # minimum size of face
threshold = [0.6, 0.7, 0.7] # three steps's threshold
factor = 0.709 # scale factor
def __init__(self, face_crop_size=160, face_crop_margin=32):
self.pnet, self.rnet, self.onet = self._setup_mtcnn()
self.face_crop_size = face_crop_size
self.face_crop_margin = face_crop_margin
def _setup_mtcnn(self):
with tf.Graph().as_default():
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=gpu_memory_fraction)
sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options, log_device_placement=False))
with sess.as_default():
return align.detect_face.create_mtcnn(sess, None)
def find_faces(self, image):
faces = []
bounding_boxes, _ = align.detect_face.detect_face(image, self.minsize,
self.pnet, self.rnet, self.onet,
self.threshold, self.factor)
for bb in bounding_boxes:
face = Face()
face.container_image = image
face.bounding_box = np.zeros(4, dtype=np.int32)
img_size = np.asarray(image.shape)[0:2]
face.bounding_box[0] = np.maximum(bb[0] - self.face_crop_margin / 2, 0)
face.bounding_box[1] = np.maximum(bb[1] - self.face_crop_margin / 2, 0)
face.bounding_box[2] = np.minimum(bb[2] + self.face_crop_margin / 2, img_size[1])
face.bounding_box[3] = np.minimum(bb[3] + self.face_crop_margin / 2, img_size[0])
cropped = image[face.bounding_box[1]:face.bounding_box[3], face.bounding_box[0]:face.bounding_box[2], :]
face.image = misc.imresize(cropped, (self.face_crop_size, self.face_crop_size), interp='bilinear')
faces.append(face)
return faces
| [
"[email protected]"
]
| |
f97edd5d3b27d38201267d468279199221e65350 | 275f3a3dce44f35a5dc1a626e2052c006bdb71bf | /python3/hackerrank_leetcode/longest_common_prefix/test.py | 72dd75f74c79b1a194638843eaa41b43c1a81fa7 | [
"MIT"
]
| permissive | seLain/codesnippets | 79b7655950b160af7f2b9fa94712594f95a4794d | ae9a1fa05b67f4b3ac1703cc962fcf5f6de1e289 | refs/heads/master | 2018-10-08T10:29:53.982156 | 2018-07-23T13:32:26 | 2018-07-23T13:32:26 | 114,009,746 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 574 | py | import unittest
from main import Solution
class TestSolutionMethods(unittest.TestCase):
solution = Solution()
def test_longestCommonPrefix(self):
# leetcode test
self.assertEqual(self.solution.longestCommonPrefix([]), "")
# customized test
self.assertEqual(self.solution.longestCommonPrefix(["b", "c", "abc"]), "")
self.assertEqual(self.solution.longestCommonPrefix(["ac", "ab", "abc"]), "a")
self.assertEqual(self.solution.longestCommonPrefix(["abc", "abc"]), "abc")
if __name__ == '__main__':
unittest.main() | [
"[email protected]"
]
| |
bd4e18303be56e4297aec515f2a890f7a7d2110e | 2b42b40ae2e84b438146003bf231532973f1081d | /spec/mgm4440359.3.spec | 7ed6f3d63fabce9fb95651b59b7b807fb943d9a4 | []
| no_license | MG-RAST/mtf | 0ea0ebd0c0eb18ec6711e30de7cc336bdae7215a | e2ddb3b145068f22808ef43e2bbbbaeec7abccff | refs/heads/master | 2020-05-20T15:32:04.334532 | 2012-03-05T09:51:49 | 2012-03-05T09:51:49 | 3,625,755 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 23,438 | spec | {
"id": "mgm4440359.3",
"metadata": {
"mgm4440359.3.metadata.json": {
"format": "json",
"provider": "metagenomics.anl.gov"
}
},
"providers": {
"metagenomics.anl.gov": {
"files": {
"100.preprocess.info": {
"compression": null,
"description": null,
"size": 736,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/100.preprocess.info"
},
"100.preprocess.passed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 936353,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/100.preprocess.passed.fna.gz"
},
"100.preprocess.passed.fna.stats": {
"compression": null,
"description": null,
"size": 314,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/100.preprocess.passed.fna.stats"
},
"100.preprocess.removed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 40025,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/100.preprocess.removed.fna.gz"
},
"100.preprocess.removed.fna.stats": {
"compression": null,
"description": null,
"size": 313,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/100.preprocess.removed.fna.stats"
},
"150.dereplication.info": {
"compression": null,
"description": null,
"size": 778,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/150.dereplication.info"
},
"150.dereplication.passed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 886243,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/150.dereplication.passed.fna.gz"
},
"150.dereplication.passed.fna.stats": {
"compression": null,
"description": null,
"size": 314,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/150.dereplication.passed.fna.stats"
},
"150.dereplication.removed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 38778,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/150.dereplication.removed.fna.gz"
},
"150.dereplication.removed.fna.stats": {
"compression": null,
"description": null,
"size": 311,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/150.dereplication.removed.fna.stats"
},
"205.screen.h_sapiens_asm.info": {
"compression": null,
"description": null,
"size": 482,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/205.screen.h_sapiens_asm.info"
},
"205.screen.h_sapiens_asm.removed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 877,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/205.screen.h_sapiens_asm.removed.fna.gz"
},
"299.screen.info": {
"compression": null,
"description": null,
"size": 410,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/299.screen.info"
},
"299.screen.passed.fna.gcs": {
"compression": null,
"description": null,
"size": 4871,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/299.screen.passed.fna.gcs"
},
"299.screen.passed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 885592,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/299.screen.passed.fna.gz"
},
"299.screen.passed.fna.lens": {
"compression": null,
"description": null,
"size": 512,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/299.screen.passed.fna.lens"
},
"299.screen.passed.fna.stats": {
"compression": null,
"description": null,
"size": 314,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/299.screen.passed.fna.stats"
},
"350.genecalling.coding.faa.gz": {
"compression": "gzip",
"description": null,
"size": 588689,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/350.genecalling.coding.faa.gz"
},
"350.genecalling.coding.faa.stats": {
"compression": null,
"description": null,
"size": 119,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/350.genecalling.coding.faa.stats"
},
"350.genecalling.coding.fna.gz": {
"compression": "gzip",
"description": null,
"size": 842464,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/350.genecalling.coding.fna.gz"
},
"350.genecalling.coding.fna.stats": {
"compression": null,
"description": null,
"size": 314,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/350.genecalling.coding.fna.stats"
},
"350.genecalling.info": {
"compression": null,
"description": null,
"size": 714,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/350.genecalling.info"
},
"425.usearch.rna.fna.gz": {
"compression": "gzip",
"description": null,
"size": 205973,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/425.usearch.rna.fna.gz"
},
"425.usearch.rna.fna.stats": {
"compression": null,
"description": null,
"size": 311,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/425.usearch.rna.fna.stats"
},
"440.cluster.rna97.fna.gz": {
"compression": "gzip",
"description": null,
"size": 202039,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/440.cluster.rna97.fna.gz"
},
"440.cluster.rna97.fna.stats": {
"compression": null,
"description": null,
"size": 311,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/440.cluster.rna97.fna.stats"
},
"440.cluster.rna97.info": {
"compression": null,
"description": null,
"size": 947,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/440.cluster.rna97.info"
},
"440.cluster.rna97.mapping": {
"compression": null,
"description": null,
"size": 10454,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/440.cluster.rna97.mapping"
},
"440.cluster.rna97.mapping.stats": {
"compression": null,
"description": null,
"size": 47,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/440.cluster.rna97.mapping.stats"
},
"450.rna.expand.lca.gz": {
"compression": "gzip",
"description": null,
"size": 8154,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/450.rna.expand.lca.gz"
},
"450.rna.expand.rna.gz": {
"compression": "gzip",
"description": null,
"size": 2058,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/450.rna.expand.rna.gz"
},
"450.rna.sims.filter.gz": {
"compression": "gzip",
"description": null,
"size": 1615,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/450.rna.sims.filter.gz"
},
"450.rna.sims.gz": {
"compression": "gzip",
"description": null,
"size": 14423,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/450.rna.sims.gz"
},
"450.rna.sims.info": {
"compression": null,
"description": null,
"size": 1376,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/450.rna.sims.info"
},
"550.cluster.aa90.faa.gz": {
"compression": "gzip",
"description": null,
"size": 563878,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/550.cluster.aa90.faa.gz"
},
"550.cluster.aa90.faa.stats": {
"compression": null,
"description": null,
"size": 119,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/550.cluster.aa90.faa.stats"
},
"550.cluster.aa90.info": {
"compression": null,
"description": null,
"size": 1080,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/550.cluster.aa90.info"
},
"550.cluster.aa90.mapping": {
"compression": null,
"description": null,
"size": 11422,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/550.cluster.aa90.mapping"
},
"550.cluster.aa90.mapping.stats": {
"compression": null,
"description": null,
"size": 47,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/550.cluster.aa90.mapping.stats"
},
"640.loadAWE.info": {
"compression": null,
"description": null,
"size": 114,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/640.loadAWE.info"
},
"650.superblat.expand.lca.gz": {
"compression": "gzip",
"description": null,
"size": 1301497,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/650.superblat.expand.lca.gz"
},
"650.superblat.expand.ontology.gz": {
"compression": "gzip",
"description": null,
"size": 829578,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/650.superblat.expand.ontology.gz"
},
"650.superblat.expand.protein.gz": {
"compression": "gzip",
"description": null,
"size": 1510594,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/650.superblat.expand.protein.gz"
},
"650.superblat.sims.filter.gz": {
"compression": "gzip",
"description": null,
"size": 463431,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/650.superblat.sims.filter.gz"
},
"650.superblat.sims.gz": {
"compression": "gzip",
"description": null,
"size": 2436636,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/650.superblat.sims.gz"
},
"650.superblat.sims.info": {
"compression": null,
"description": null,
"size": 1343,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/650.superblat.sims.info"
},
"900.abundance.function.gz": {
"compression": "gzip",
"description": null,
"size": 1013973,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/900.abundance.function.gz"
},
"900.abundance.lca.gz": {
"compression": "gzip",
"description": null,
"size": 33218,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/900.abundance.lca.gz"
},
"900.abundance.md5.gz": {
"compression": "gzip",
"description": null,
"size": 346855,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/900.abundance.md5.gz"
},
"900.abundance.ontology.gz": {
"compression": "gzip",
"description": null,
"size": 1052943,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/900.abundance.ontology.gz"
},
"900.abundance.organism.gz": {
"compression": "gzip",
"description": null,
"size": 597617,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/900.abundance.organism.gz"
},
"900.loadDB.sims.filter.seq": {
"compression": null,
"description": null,
"size": 2532549,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/900.loadDB.sims.filter.seq"
},
"900.loadDB.source.stats": {
"compression": null,
"description": null,
"size": 690,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/900.loadDB.source.stats"
},
"999.done.COG.stats": {
"compression": null,
"description": null,
"size": 117,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/999.done.COG.stats"
},
"999.done.KO.stats": {
"compression": null,
"description": null,
"size": 154,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/999.done.KO.stats"
},
"999.done.NOG.stats": {
"compression": null,
"description": null,
"size": 111,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/999.done.NOG.stats"
},
"999.done.Subsystems.stats": {
"compression": null,
"description": null,
"size": 791,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/999.done.Subsystems.stats"
},
"999.done.class.stats": {
"compression": null,
"description": null,
"size": 2513,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/999.done.class.stats"
},
"999.done.domain.stats": {
"compression": null,
"description": null,
"size": 61,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/999.done.domain.stats"
},
"999.done.family.stats": {
"compression": null,
"description": null,
"size": 7984,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/999.done.family.stats"
},
"999.done.genus.stats": {
"compression": null,
"description": null,
"size": 11889,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/999.done.genus.stats"
},
"999.done.order.stats": {
"compression": null,
"description": null,
"size": 4335,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/999.done.order.stats"
},
"999.done.phylum.stats": {
"compression": null,
"description": null,
"size": 942,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/999.done.phylum.stats"
},
"999.done.rarefaction.stats": {
"compression": null,
"description": null,
"size": 22605,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/999.done.rarefaction.stats"
},
"999.done.sims.stats": {
"compression": null,
"description": null,
"size": 84,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/999.done.sims.stats"
},
"999.done.species.stats": {
"compression": null,
"description": null,
"size": 39213,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4440359.3/file/999.done.species.stats"
}
},
"id": "mgm4440359.3",
"provider": "metagenomics.anl.gov",
"providerId": "mgm4440359.3"
}
},
"raw": {
"mgm4440359.3.fna.gz": {
"compression": "gzip",
"format": "fasta",
"provider": "metagenomics.anl.gov",
"url": "http://api.metagenomics.anl.gov/reads/mgm4440359.3"
}
}
} | [
"[email protected]"
]
| |
b0e9f516a7ab0d93d6432ced24d61642820d19a9 | 900b98964288a9cb0aaf2e45706ae2b32f92657f | /tests/adspygoogle/common/alltests.py | a8a8e8f47269f3b9e2fd541f8114cc50a28eed09 | [
"BSD-3-Clause",
"Apache-2.0"
]
| permissive | krux/adspygoogle | df2405c2042aa9c9a83d97b8442afe68572e3e2e | 6505a71122f45fe3e675f27f2c29f67a1768069b | refs/heads/master | 2022-02-22T08:55:19.777002 | 2022-02-11T22:42:19 | 2022-02-11T22:42:19 | 7,103,378 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,404 | py | #!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Script to run all existing unit tests."""
__author__ = '[email protected] (Stan Grinberg)'
import glob
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..'))
import unittest
from adspygoogle.adwords import LIB_SIG
from adspygoogle.common.Logger import Logger
LOG_NAME = 'common_api_lib'
LOGGER = Logger(LIB_SIG, os.path.join('..', '..', '..', 'logs'))
suite = unittest.TestSuite()
tests = [test[:-3] for test in glob.glob('*_test.py')]
for test in tests:
suite.addTest(unittest.TestLoader().loadTestsFromModule(__import__(test)))
if __name__ == '__main__':
LOGGER.Log(LOG_NAME, 'Start all unit tests.', log_level=Logger.DEBUG)
unittest.TextTestRunner(verbosity=1).run(suite)
LOGGER.Log(LOG_NAME, 'End all unit tests.', log_level=Logger.DEBUG)
| [
"[email protected]"
]
| |
dbc9dd7e5d35001f7256ece833eb55efa0b57991 | 00ab868fdab064e1674193d43204ef3d3f89b658 | /docs/conf.py | 6a19c95d2e15a7362bdb4490793c9df4bb30c775 | []
| no_license | jeffbuttars/django-futures | 83b29b37552f6a9c1f288197b7a5cb66b32fb203 | ba80a340d264c70bc411dd9944f7047f044245bb | refs/heads/master | 2020-05-18T10:40:00.908843 | 2014-08-24T16:45:03 | 2014-08-24T16:45:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,301 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# DjangoFutures documentation build configuration file, created by
# sphinx-quickstart on Fri Jul 25 21:14:57 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath("../src"))
# In order to auto doc the run_tornado command we need to be able to import a Django settings
# file. Se we have one just for docs, here we add it to the environment.
os.environ['DJANGO_SETTINGS_MODULE'] = 'dfs.settings'
print "PATH", sys.path
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'DjangoFutures'
copyright = '2014, Jeff Buttars'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.0'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'DjangoFuturesdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'DjangoFutures.tex', 'DjangoFutures Documentation',
'Jeff Buttars', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'djangofutures', 'DjangoFutures Documentation',
['Jeff Buttars'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'DjangoFutures', 'DjangoFutures Documentation',
'Jeff Buttars', 'DjangoFutures', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = 'DjangoFutures'
epub_author = 'Jeff Buttars'
epub_publisher = 'Jeff Buttars'
epub_copyright = '2014, Jeff Buttars'
# The basename for the epub file. It defaults to the project name.
#epub_basename = 'DjangoFutures'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
# intersphinx_mapping = {
# 'python': ('http://python.readthedocs.org/en/latest/', None),
# }
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
# On RTD we can't import sphinx_rtd_theme, but it will be applied by
# default anyway. This block will use the same theme when building locally
# as on RTD.
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
intersphinx_mapping = {'http://docs.python.org/': None}
| [
"[email protected]"
]
| |
4a462bf150a9a56a7a27908dbbbfb4f65c656ad2 | 3325f16c04ca8e641cbd58e396f983542b793091 | /Seção 07 - Coleções Python/Exercícios da Seção/Exercício_44.py | e74cdc81ca838e39ebaaefe45c5c38d333e4831a | []
| no_license | romulovieira777/Programacao_em_Python_Essencial | ac929fbbd6a002bcc689b8d6e54d46177632c169 | e81d219db773d562841203ea370bf4f098c4bd21 | refs/heads/master | 2023-06-11T16:06:36.971113 | 2021-07-06T20:57:25 | 2021-07-06T20:57:25 | 269,442,342 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 666 | py | """
44) Leia uma matriz 5 x 5. Leia também um valor X. O programa
deverá fazer uma busca desse valor na matriz e, ao final, escrever a localização
(linha e coluna) ou mensagem de "não encontrado".
"""
lista1 = []
for i in range(5):
lista2 = []
for j in range(5):
num = int(input("Enter a number: "))
lista2.append(num)
lista1.append(lista2)
x = int(input("\nEnter a number x: "))
print(f'\n{lista1}')
cont = 0
for i in range(5):
for j in range(5):
if x == lista1[i][j]:
cont += 1
print(f"\nThe value {x} is found in line {i} and in the column {j}")
if cont == 0:
print("\nNot found")
| [
"[email protected]"
]
| |
a15cc26b1d7917df84ed6039e6ad915c5581b902 | 517d461257edd1d6b239200b931c6c001b99f6da | /HalloWing/CircuitPython/testing_transistors_w_motor.py | 025c05429c16510fc22b9c3cc91a10e59c07db35 | []
| no_license | cmontalvo251/Microcontrollers | 7911e173badff93fc29e52fbdce287aab1314608 | 09ff976f2ee042b9182fb5a732978225561d151a | refs/heads/master | 2023-06-23T16:35:51.940859 | 2023-06-16T19:29:30 | 2023-06-16T19:29:30 | 229,314,291 | 5 | 3 | null | null | null | null | UTF-8 | Python | false | false | 686 | py | import board
import digitalio
import time
import touchio
import audioio
from analogio import AnalogIn,AnalogOut
#Set up Touch Buttons
touch1 = touchio.TouchIn(board.TOUCH1)
touch2 = touchio.TouchIn(board.TOUCH2)
touch3 = touchio.TouchIn(board.TOUCH3)
touch4 = touchio.TouchIn(board.TOUCH4)
#print(dir(board))
#analog_button = AnalogOut(board.A6)
transistor = digitalio.DigitalInOut(board.D4)
transistor.direction = digitalio.Direction.OUTPUT
transistor.value = False
while True:
if touch1.value or touch2.value or touch3.value or touch4.value:
print("Touched")
transistor.value = 65535
time.sleep(2)
transistor.value = 0
time.sleep(0.05)
| [
"[email protected]"
]
| |
830acf70ecae7fd6e2f7dc1714aff73773a35e2c | 7efee8fb6eb0fa0c3474eeee6344f0c3a5842ff1 | /discourse/__init__.py | 8cda3bd93fb73ad81fb65a473bed545b69e8a8f8 | []
| no_license | nyddle/Discourse | ca0f4eaec039b859a607423c81d97367442c2d3a | 031b5bbe7520bfd00addce291661f3e0d8d1e204 | refs/heads/master | 2021-01-15T21:20:39.699191 | 2014-05-16T01:55:06 | 2014-05-16T01:55:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 886 | py | from notice import subscribe, unsubscribe, is_subscribed, send_event, render_mail, send_mail
from models import (model_sig,
get_instance_from_sig,
comment_manipulate,
attachment_manipulate,
document_manipulate,
library_view,
document_view,
attachment_view,
comment_vote,
event)
from notice import send_event
### Helpers ###
def template_repr(var):
r = repr(var)
if r.startswith('u"') or r.startswith("u'"):
return r[1:]
return r
def tag_repr(name, *args, **kwargs):
parts = [name]
if args:
parts.extend(args)
if kwargs:
parts.extend(["%s=%s" % (k, template_repr(v)) for k, v in kwargs.items() if v is not None])
return "{%% %s %%}" % " ".join(parts) | [
"[email protected]"
]
| |
09cdd6da39ecd793c55b596eeafdfec3a03a092e | 6daa3815511b1eb1f4ff3a40b7e9332fab38b8ef | /tastesavant/taste/tests/steps.py | 1de3767d55e2ec324a42e1607c1b534641af88ed | []
| no_license | kaizensoze/archived-projects | 76db01309453606e6b7dd9d2ff926cfee42bcb05 | d39ac099cb40131bac5de66bde7d0e2db5f74189 | refs/heads/master | 2021-05-31T12:16:17.800730 | 2016-02-23T00:27:56 | 2016-02-23T00:27:56 | 14,407,212 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,601 | py | # -*- coding: utf-8 -*-
from urlparse import urlparse
from django.contrib.auth import login, authenticate
from django.contrib.auth.models import User
from django.http import HttpRequest
from lettuce import step, world
from lettuce.django import django_url
from lettuce_webdriver.util import AssertContextManager
from taste.apps.restaurants.models import Restaurant
from taste.apps.reviews.models import Review
# @todo: not very DRY; what's a better way?
MODELS = {
'Restaurant': Restaurant,
'User': User,
}
@step(r'I go to "(.*)"')
def go_to_url(step, url):
with AssertContextManager(step):
full_url = django_url(url)
world.browser.visit(full_url)
@step(u'there is a "(.*?)"')
def there_is_a_model(step, model):
with AssertContextManager(step):
try:
model = MODELS[model]
except KeyError:
raise AssertionError("No such model: %s" % model)
assert model.objects.count > 0
@step(u'I am logged in')
def i_am_logged_in(step):
with AssertContextManager(step):
# We assume a certain user present in the fixtures.
full_url = django_url('/accounts/login/')
world.browser.visit(full_url)
world.browser.fill('username', 'test')
world.browser.fill('password', 'foobar')
world.browser.find_by_css('.button').last.click()
@step(u'my profile is complete')
def my_profile_is_complete(step):
with AssertContextManager(step):
user = User.objects.get(username='test')
assert user.get_profile().is_valid
## Search steps
# ---------------------------------------------------------------------------
@step(u'a user named "([^"]*)" exists')
def a_user_named_name_exists(step, name):
with AssertContextManager(step):
try:
User.objects.get(username=name)
assert True
except User.DoesNotExist:
assert False
@step(u'I search for a user named "([^"]*)"')
def i_search_for_a_user_named_name(step, name):
with AssertContextManager(step):
full_url = django_url('/search/users/')
world.browser.visit(full_url)
# We're using the .last ones here, because apparently Zope Testbrowser
# doesn't get IDs with dashes in them well.
world.browser.find_by_id('friend-input').last.fill(name)
world.browser.find_by_id('friend-input-submit').last.click()
@step(u'I should see a results page showing "([^"]*)"')
def i_should_see_a_results_page_showing_name(step, name):
with AssertContextManager(step):
elts = world.browser.find_by_css('.follow-suggestions-name')
assert any(map(lambda x: x.value == name, elts))
@step(u'a restaurant named "([^"]*)" exists')
def a_restaurant_named_name_exists(step, name):
with AssertContextManager(step):
try:
Restaurant.objects.get(name=name)
assert True
except User.DoesNotExist:
assert False
@step(u'I search for a restaurant named "([^"]*)"')
def i_search_for_a_restaurant_named_name(step, name):
with AssertContextManager(step):
full_url = django_url('/')
world.browser.visit(full_url)
world.browser.find_by_id('id_query').last.fill(name)
world.browser.find_by_id('search-submit').first.click()
@step(u'I should see a results page showing a restaurant named "([^"]*)"')
def i_should_see_a_results_page_showing_a_restaurant_named_name(step, name):
with AssertContextManager(step):
# Wait for the AJAX call
world.browser.is_element_present_by_css('.result-link', wait_time=10)
elts = world.browser.find_by_css('.restaurant-name')
assert any(map(lambda x: x.value == name, elts))
## Review steps
# ---------------------------------------------------------------------------
@step(u'I create a review for "([^""]*)"')
def i_create_a_review_for_name(step, name):
with AssertContextManager(step):
restaurant = Restaurant.objects.get(name=name)
full_url = django_url('/restaurant/%s/review/create_edit/' % restaurant.slug)
world.browser.visit(full_url)
@step(u'a review should exist for "([^""]*)"')
def that_review_should_exist_for_name(step, name):
with AssertContextManager(step):
user = User.objects.get(username='test')
restaurant = Restaurant.objects.get(name=name)
try:
Review.objects.get(user=user, restaurant=restaurant)
assert True
except Review.DoesNotExist:
assert False, "The review does not exist."
@step(u'I should see it on the restaurant page for "([^""]*)"')
def i_should_see_it_on_the_restaurant_page_for_name(step, name):
with AssertContextManager(step):
restaurant = Restaurant.objects.get(name=name)
full_url = django_url('/restaurant/%s/review/savants/' % restaurant.slug)
world.browser.visit(full_url)
elts = world.browser.find_by_css('.review h2')
assert any(map(lambda x: x.value == 'Test E.', elts))
@step(u'I should see a review for "([^""]*)" on my profile')
def i_should_see_a_review_for_name_on_my_profile(step, name):
with AssertContextManager(step):
restaurant = Restaurant.objects.get(name=name)
full_url = django_url('/profiles/%s/' % 'test')
world.browser.visit(full_url)
elts = world.browser.find_by_css('#my-reviews .review h2')
assert any(map(lambda x: x.value == restaurant.name, elts))
## Review steps
# ---------------------------------------------------------------------------
# @todo: parse the path element of the links, since the href is fully specified
@step(u'I should see critic reviews')
def i_should_see_critic_reviews(step):
with AssertContextManager(step):
elts = world.browser.find_by_css('#reviews h2 a')
assert len(elts) > 0
assert all(map(lambda x: urlparse(x['href']).path.startswith('/critic/'), elts))
@step(u'I should see user reviews')
def i_should_see_user_reviews(step):
with AssertContextManager(step):
elts = world.browser.find_by_css('#reviews h2 a')
assert len(elts) > 0
assert all(map(lambda x: urlparse(x['href']).path.startswith('/profiles/'), elts))
@step(u'I should see friend reviews')
def i_should_see_friend_reviews(step):
with AssertContextManager(step):
elts = world.browser.find_by_css('#reviews h2 a')
if len(elts) > 0:
assert all(map(lambda x: urlparse(x['href']).path.startswith('/critic/'), elts))
else:
assert "Want to ask your friends about this Restaurant?" in world.browser.html
| [
"[email protected]"
]
| |
281e26ea1ec3a66c875c9fcbfcddf7bea0ab7124 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02735/s901108588.py | f6b1a85f048bbee1370c201a20c65c14f71472da | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 157 | py | _,*s=open(0);t="."
b=t*101;q=range(101);i=0
for s in s:
a=[i];i+=1
for x,y,z,c in zip(b,t+s,s,q):a+=min(c+(t>z<x),a[-1]+(t>z<y)),
b,q=s,a[1:]
print(a[-2]) | [
"[email protected]"
]
| |
877aeeda55babf2eddf7c6b3e99bbcd34aa7de6b | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2065/60581/238639.py | 0b6140cd9552c6b8e6249ceccb99bf5ca41994cc | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,160 | py | str = list(input())
number = ''
i = 0
judge = False
while i < len(str):
if not judge:
if str[i]==' ' or str[i]=='"':
i += 1
elif str[i]=='-':
judge = True
number += str[i]
i += 1
elif str[i]>='0' and str[i]<='9':
judge = True
number += str[i]
i += 1
else:
print(0)
break
else:
if str[i]>='0' and str[i]<='9':
number += str[i]
i += 1
if i == len(str):
number = int(number)
if number <= pow(2, 31) - 1 and number >= -pow(2, 31):
print(number)
elif number > pow(2, 31) - 1:
print(pow(2, 31) - 1)
elif number < -pow(2, 31):
print(-pow(2, 31))
else:
number = int(number)
if number <= pow(2,31)-1 and number >= -pow(2,31):
print(number)
elif number > pow(2,31)-1:
print(pow(2,31)-1)
elif number < -pow(2,31):
print(-pow(2,31))
break | [
"[email protected]"
]
| |
0ac9b7d50b3f82f281b0d8fbcb21e8b9c3ef7b46 | 6329ece221f3b2295cb3791168c876d260ed0c31 | /test_learn/chapter4基本窗口控件/对话框类控件/QFileDialog.py | ab66856621d3dd3617c5317bdf95c2ce59f1a629 | []
| no_license | dxcv/PythonGUI | 4908e049dde5e569a3ad916c6bac385c7953a243 | a39beef0006e3e4057e3a9bcb3e0e64ea790427e | refs/heads/master | 2020-05-17T17:14:58.250549 | 2019-04-11T09:32:52 | 2019-04-11T09:32:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,655 | py | # -*- coding:UTF-8 -*-
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtGui import *
import sys
class fileDialogDemo(QWidget):
def __init__(self,parent=None):
super(fileDialogDemo,self).__init__(parent)
self.setWindowTitle("Demo")
self.resize(500,500)
self.move(200,200)
layout=QVBoxLayout()
self.btn=QPushButton("加载图片")
self.btn.clicked.connect(self.getfile)
layout.addWidget(self.btn)
#这下面三行就是用来后面放图片的
self.le=QLabel("")
self.le.setAlignment(Qt.AlignCenter)
layout.addWidget(self.le)
self.btn1=QPushButton("加载文本文件")
self.btn1.clicked.connect(self.getfiles)
layout.addWidget(self.btn1)
self.contents=QTextEdit()
layout.addWidget(self.contents)
self.setLayout(layout)
def getfile(self):
fname,_=QFileDialog.getOpenFileName(self,'Open file',"../","Image files(*.jpg *.gif)")
self.le.setPixmap(QPixmap(fname))
def getfiles(self):
dig=QFileDialog()
dig.setFileMode(QFileDialog.AnyFile)
dig.setFilter(QDir.Files)
#下面这部分代码不是很熟悉,不太了解怎么回事,只知道是把文件中的东西拿出来放在self.content里面
if dig.exec_():
filenames=dig.selectedFiles()
f=open(filenames[0],'r')
with f:
data=f.read()
self.contents.setText(data)
if __name__=='__main__':
app=QApplication(sys.argv)
form=fileDialogDemo()
form.show()
sys.exit(app.exec_())
| [
"[email protected]"
]
| |
cbfb0a7ceefd20b47c2632746185783e57fbe556 | 6ef3b1919e7acbc72e5706b2dc6d716f8929e3d2 | /pytorch_lightning/trainer/optimizers.py | 663871d98b419c6f3037dcb83eff927bbacd78ae | [
"MIT"
]
| permissive | linshaoxin-maker/taas | 04f7dcc7c0d2818718e6b245531e017ca5370231 | 34e11fab167a7beb78fbe6991ff8721dc9208793 | refs/heads/main | 2023-01-19T20:58:04.459980 | 2020-11-27T02:28:36 | 2020-11-27T02:28:36 | 329,522,465 | 6 | 0 | MIT | 2021-01-14T06:02:08 | 2021-01-14T06:02:07 | null | UTF-8 | Python | false | false | 6,897 | py | from abc import ABC
from typing import List, Tuple
import torch
from torch import optim
from torch.optim.optimizer import Optimizer
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.utilities import rank_zero_warn
class TrainerOptimizersMixin(ABC):
def init_optimizers(
self,
model: LightningModule
) -> Tuple[List, List, List]:
optim_conf = model.configure_optimizers()
if optim_conf is None:
rank_zero_warn('`LightningModule.configure_optimizers` returned `None`, '
'this fit will run with no optimizer', UserWarning)
optim_conf = _MockOptimizer()
# single output, single optimizer
if isinstance(optim_conf, Optimizer):
return [optim_conf], [], []
# two lists, optimizer + lr schedulers
elif isinstance(optim_conf, (list, tuple)) and len(optim_conf) == 2 \
and isinstance(optim_conf[0], list):
optimizers, lr_schedulers = optim_conf
lr_schedulers = self.configure_schedulers(lr_schedulers)
return optimizers, lr_schedulers, []
# single dictionary
elif isinstance(optim_conf, dict):
optimizer = optim_conf["optimizer"]
lr_scheduler = optim_conf.get("lr_scheduler", [])
if lr_scheduler:
lr_schedulers = self.configure_schedulers([lr_scheduler])
else:
lr_schedulers = []
return [optimizer], lr_schedulers, []
# multiple dictionaries
elif isinstance(optim_conf, (list, tuple)) and isinstance(optim_conf[0], dict):
optimizers = [opt_dict["optimizer"] for opt_dict in optim_conf]
# take only lr wif exists and ot they are defined - not None
lr_schedulers = [
opt_dict["lr_scheduler"] for opt_dict in optim_conf if opt_dict.get("lr_scheduler")
]
# take only freq wif exists and ot they are defined - not None
optimizer_frequencies = [
opt_dict["frequency"] for opt_dict in optim_conf if opt_dict.get("frequency") is not None
]
# clean scheduler list
if lr_schedulers:
lr_schedulers = self.configure_schedulers(lr_schedulers)
# assert that if frequencies are present, they are given for all optimizers
if optimizer_frequencies and len(optimizer_frequencies) != len(optimizers):
raise ValueError("A frequency must be given to each optimizer.")
return optimizers, lr_schedulers, optimizer_frequencies
# single list or tuple, multiple optimizer
elif isinstance(optim_conf, (list, tuple)):
return list(optim_conf), [], []
# unknown configuration
else:
raise ValueError(
'Unknown configuration for model optimizers.'
' Output from `model.configure_optimizers()` should either be:'
' * single output, single `torch.optim.Optimizer`'
' * single output, list of `torch.optim.Optimizer`'
' * single output, a dictionary with `optimizer` key (`torch.optim.Optimizer`)'
' and an optional `lr_scheduler` key (`torch.optim.lr_scheduler`)'
' * two outputs, first being a list of `torch.optim.Optimizer` second being'
' a list of `torch.optim.lr_scheduler`'
' * multiple outputs, dictionaries as described with an optional `frequency` key (int)')
def configure_schedulers(self, schedulers: list):
# Convert each scheduler into dict structure with relevant information
lr_schedulers = []
default_config = {'interval': 'epoch', # default every epoch
'frequency': 1, # default every epoch/batch
'reduce_on_plateau': False, # most often not ReduceLROnPlateau scheduler
'monitor': 'val_loss'} # default value to monitor for ReduceLROnPlateau
for scheduler in schedulers:
if isinstance(scheduler, dict):
if 'scheduler' not in scheduler:
raise ValueError('Lr scheduler should have key `scheduler`',
' with item being a lr scheduler')
scheduler['reduce_on_plateau'] = isinstance(
scheduler['scheduler'], optim.lr_scheduler.ReduceLROnPlateau)
lr_schedulers.append({**default_config, **scheduler})
elif isinstance(scheduler, optim.lr_scheduler.ReduceLROnPlateau):
lr_schedulers.append({**default_config, 'scheduler': scheduler,
'reduce_on_plateau': True})
elif isinstance(scheduler, optim.lr_scheduler._LRScheduler):
lr_schedulers.append({**default_config, 'scheduler': scheduler})
else:
raise ValueError(f'Input {scheduler} to lr schedulers '
'is a invalid input.')
return lr_schedulers
def reinit_scheduler_properties(self, optimizers: list, schedulers: list):
# Reinitialize optimizer.step properties added by schedulers
for scheduler in schedulers:
scheduler = scheduler['scheduler']
for optimizer in optimizers:
# check that we dont mix users optimizers and schedulers
if scheduler.optimizer == optimizer:
# Find the mro belonging to the base lr scheduler class
for i, mro in enumerate(scheduler.__class__.__mro__):
if (
mro == optim.lr_scheduler._LRScheduler
or mro == optim.lr_scheduler.ReduceLROnPlateau
):
idx = i
state = scheduler.state_dict()
else:
state = None
scheduler.__class__.__mro__[idx].__init__(scheduler, optimizer)
if state is not None:
scheduler.load_state_dict(state)
class _MockOptimizer(Optimizer):
"""The `_MockOptimizer` will be used inplace of an optimizer in the event that `None`
is returned from `configure_optimizers`.
"""
def __init__(self):
super().__init__([torch.zeros(1)], {})
def add_param_group(self, param_group):
pass # Do Nothing
def load_state_dict(self, state_dict):
pass # Do Nothing
def state_dict(self):
return {} # Return Empty
def step(self, closure=None):
if closure is not None:
closure()
def zero_grad(self):
pass # Do Nothing
def __repr__(self):
return 'No Optimizer'
| [
"[email protected]"
]
| |
cd62d851128b56dfad4c05a46bf175597a5648dd | ef7eabdd5f9573050ef11d8c68055ab6cdb5da44 | /topCoder/srms/500s/srm501/div2/fox_progression.py | 7d232b6426e31efc69bb07c2a286459f0cdd6a0f | [
"WTFPL"
]
| permissive | gauravsingh58/algo | cdbf68e28019ba7c3e4832e373d32c71902c9c0d | 397859a53429e7a585e5f6964ad24146c6261326 | refs/heads/master | 2022-12-28T01:08:32.333111 | 2020-09-30T19:37:53 | 2020-09-30T19:37:53 | 300,037,652 | 1 | 1 | WTFPL | 2020-10-15T09:26:32 | 2020-09-30T19:29:29 | Java | UTF-8 | Python | false | false | 570 | py | class FoxProgression:
def theCount(self, seq):
if len(seq) == 1:
return -1
a, m = seq[1]-seq[0], seq[1]/float(seq[0])
af, mf = True, m == int(m)
c = sum((af, mf))
for i, j in zip(seq[1:], seq[2:]):
if af and j-i != a:
af = False
c -= 1
if mf and j/float(i) != m:
mf = False
c -= 1
if not af and not mf:
break
if af and mf and seq[-1] + a == seq[-1] * m:
c -= 1
return c
| [
"[email protected]"
]
| |
a1eca980f1e7db4ea2bb2c1345ea2b21ac48c02d | edfdc0d3a2fdeed95ba7aa3d0e198eb9dafe4064 | /operator_api/auditor/serializers/wallet_state.py | 1112734b67db81e9f2b2170501516b8880cfed53 | [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
]
| permissive | xiaobai900/nocust-hub | 880e72ba4e1d324ae36adea6c03c9761a7d91621 | 76f49f9b8a6c264fcbe9e0c110e98031d463c0a8 | refs/heads/master | 2023-05-28T08:18:17.402228 | 2020-11-01T19:48:17 | 2020-11-01T19:48:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,988 | py | from rest_framework import serializers
from ledger.models import Transfer, Deposit, WithdrawalRequest, Withdrawal
from django.db.models import Q
from .admission import AdmissionSerializer
from .proof import ProofSerializer
from .transfer import TransactionSerializer
from .deposit import DepositSerializer
from .withdrawal import WithdrawalSerializer
from .withdrawal_request import WithdrawalRequestSerializer
class WalletStateSerializer(serializers.Serializer):
registration = AdmissionSerializer(read_only=True)
merkle_proof = ProofSerializer(read_only=True)
transactions = TransactionSerializer(many=True, read_only=True)
deposits = DepositSerializer(many=True, read_only=True)
withdrawal_requests = WithdrawalRequestSerializer(
many=True, read_only=True)
withdrawals = WithdrawalSerializer(many=True, read_only=True)
def to_representation(self, wallet_data_request):
balance = wallet_data_request.wallet\
.exclusivebalanceallotment_set\
.filter(eon_number=wallet_data_request.eon_number).last()
transactions = Transfer.objects\
.filter(eon_number=wallet_data_request.eon_number, id__gte=wallet_data_request.transfer_id)\
.filter(Q(wallet=wallet_data_request.wallet) | Q(recipient=wallet_data_request.wallet))\
.select_related('recipient')\
.select_related('wallet')\
.select_related('recipient__token')\
.select_related('wallet__token')\
.order_by('id')
deposits = Deposit.objects \
.filter(wallet=wallet_data_request.wallet)\
.filter(eon_number=wallet_data_request.eon_number) \
.order_by('id')
withdrawal_requests = WithdrawalRequest.objects \
.filter(wallet=wallet_data_request.wallet)\
.filter(eon_number=wallet_data_request.eon_number) \
.order_by('id')
withdrawals = Withdrawal.objects \
.filter(wallet=wallet_data_request.wallet)\
.filter(eon_number=wallet_data_request.eon_number) \
.order_by('id')
return {
'registration':
AdmissionSerializer(
wallet_data_request.wallet, read_only=True).data,
'merkle_proof':
ProofSerializer(
balance, read_only=True).data if balance is not None else None,
'transactions':
TransactionSerializer(transactions, context={
'wallet_id': wallet_data_request.wallet.id}, many=True, read_only=True).data,
'deposits':
DepositSerializer(deposits, many=True, read_only=True).data,
'withdrawal_requests':
WithdrawalRequestSerializer(
withdrawal_requests, many=True, read_only=True).data,
'withdrawals':
WithdrawalSerializer(
withdrawals, many=True, read_only=True).data
}
| [
"[email protected]"
]
| |
0d1b22f3e6aacb8bee17e5edebb65a999f43f842 | 612325535126eaddebc230d8c27af095c8e5cc2f | /depot_tools/external_bin/gsutil/gsutil_4.15/gsutil/third_party/boto/boto/codedeploy/__init__.py | f48c2dae9e305a94834d5aaafbadc02808f60652 | [
"Apache-2.0",
"MIT",
"BSD-3-Clause"
]
| permissive | TrellixVulnTeam/proto-quic_1V94 | 1a3a03ac7a08a494b3d4e9857b24bb8f2c2cd673 | feee14d96ee95313f236e0f0e3ff7719246c84f7 | refs/heads/master | 2023-04-01T14:36:53.888576 | 2019-10-17T02:23:04 | 2019-10-17T02:23:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,703 | py | # Copyright (c) 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
Get all available regions for the AWS CodeDeploy service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
from boto.codedeploy.layer1 import CodeDeployConnection
return get_regions('codedeploy', connection_cls=CodeDeployConnection)
def connect_to_region(region_name, **kw_params):
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
| [
"[email protected]"
]
| |
b13acb7fc13d6d4bd873a484ebf7f63ace15afc0 | 9eaa2c64a777bd24a3cccd0230da5f81231ef612 | /study/1905/month01/code/Stage4/day10/02_renrenlogin2.py | 31c6c8f20308f336b60ec8fed5843bcce90c5158 | [
"MIT"
]
| permissive | Dython-sky/AID1908 | 4528932f2ca66b844d8a3fcab5ed8bf84d20eb0c | 46cd54a7b36b5f009974f2bbb7005a4ad440ca1a | refs/heads/master | 2022-04-14T12:23:30.426270 | 2020-04-01T18:05:19 | 2020-04-01T18:05:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,801 | py | import requests
from lxml import etree
class RenrenLogin(object):
def __init__(self):
# 抓取的url地址
self.url = 'http://www.renren.com/973116780/profile'
self.headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36'
}
def get_cookies(self):
cookies = {}
cookies_str = 'anonymid=k4c0r1yz-pzvxkf; depovince=GW; _r01_=1; JSESSIONID=abcDAiw02y0mU64ddRB8w; ick_login=83f80a9a-2b55-40f3-8685-88a6144f0626; t=f41b47d87ea76c271cb60f84bd6706660; societyguester=f41b47d87ea76c271cb60f84bd6706660; id=973116780; xnsid=e4530f4b; ver=7.0; loginfrom=null; jebe_key=77eabbe7-277c-4414-ab89-64253f69f03f%7Cccea8a8b81a47ac4b3a5c48154653dfb%7C1576717335984%7C1%7C1576717341787; jebe_key=77eabbe7-277c-4414-ab89-64253f69f03f%7Cccea8a8b81a47ac4b3a5c48154653dfb%7C1576717335984%7C1%7C1576717341792; wp_fold=0; jebecookies=cd8b6928-eee7-4bde-a161-7933a4f284d8|||||; XNESSESSIONID=ead277ab8d81'
for kv in cookies_str.split('; '):
key = kv.split('=')[0]
value = kv.split('=')[1]
cookies[key] = value
return cookies
def get_parse_html(self):
cookies = self.get_cookies()
html = requests.get(
url=self.url,
headers=self.headers,
# cookies参数,类型为字典
cookies=cookies,
).text
parse_obj = etree.HTML(html)
xpath_bds = '//*[@id="operate_area"]/div[1]/ul/li[1]/span/text()'
r_list = parse_obj.xpath(xpath_bds)
# r_list['就读于国家检察官学院']
print(r_list)
def run(self):
self.get_parse_html()
if __name__ == '__main__':
spider = RenrenLogin()
spider.run() | [
"[email protected]"
]
| |
57d19a3a065a491b3910d9971156d10e1ef56fb4 | ce5362e8871b53b02c0a95f82fba8df98ccccad1 | /Service/debug.py | f62461eaaf82a35cabf20b378a19acb411186ea5 | []
| no_license | git-hacker/TeamA_NOISES | d7c810ed44d6694b5e6933e87ac3ac221f806e24 | def359e752b84aab0b16cfb2cecf3412f684bfab | refs/heads/master | 2020-03-28T08:57:45.337321 | 2018-09-10T15:45:08 | 2018-09-10T15:45:08 | 148,002,844 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 707 | py | import librosa
import numpy as np
def read_wav(path, fs=16000):
data, _ = librosa.load(path, sr=fs)
return np.trim_zeros(data)
def play(array, fs=16000):
import sounddevice as sd
print("Playing audio...")
sd.play(array, fs, blocking=True)
print("Stop playing.\n")
def denoise(wave, sr=16000):
from oct2py import octave
x = octave.feval('api/logmmse', wave, sr)
x = np.float32(x)
return np.squeeze(x)
def draw_wave(*wave):
from matplotlib import pyplot as plt
plt.figure()
for w in wave:
plt.plot(w, linewidth=.2)
plt.show()
wave = read_wav('media/noise_2jts5xo.wav')
clean = denoise(wave)
print()
# play(wave)
# play(clean)
print(clean) | [
"[email protected]"
]
| |
e102783efdf5acc7145bb6ca5f1590a771086b2d | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/KISS/testcase/firstcases/testcase3_003.py | 15f1fa5f77c982ba7400d547ef607b57ab7e9616 | []
| no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,016 | py | #coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'fr.neamar.kiss',
'appActivity' : 'fr.neamar.kiss.MainActivity',
'resetKeyboard' : True,
'androidCoverage' : 'fr.neamar.kiss/fr.neamar.kiss.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
return
# testcase003
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
element = getElememt(driver, "new UiSelector().resourceId(\"fr.neamar.kiss:id/searchEditText\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("Search apps, contacts, .");
element = getElememt(driver, "new UiSelector().resourceId(\"fr.neamar.kiss:id/searchEditText\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("1test");
element = getElememtBack(driver, "new UiSelector().text(\"clock alarm timer stopwatch\")", "new UiSelector().className(\"android.widget.TextView\").instance(2)")
TouchAction(driver).long_press(element).release().perform()
element = getElememt(driver, "new UiSelector().resourceId(\"fr.neamar.kiss:id/item_app_icon\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"fr.neamar.kiss:id/searchEditText\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("testt");
element = getElememtBack(driver, "new UiSelector().text(\"Clock\")", "new UiSelector().className(\"android.widget.TextView\").instance(1)")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"fr.neamar.kiss:id/searchEditText\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("testt");
element = getElememt(driver, "new UiSelector().resourceId(\"fr.neamar.kiss:id/clearButton\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"fr.neamar.kiss:id/searchEditText\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("Search apps, contacts, .");
element = getElememtBack(driver, "new UiSelector().text(\"Search Google for “12st”\")", "new UiSelector().className(\"android.widget.TextView\")")
TouchAction(driver).long_press(element).release().perform()
element = getElememt(driver, "new UiSelector().resourceId(\"fr.neamar.kiss:id/searchEditText\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("1test");
element = getElememt(driver, "new UiSelector().resourceId(\"fr.neamar.kiss:id/clearButton\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"3_003\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'fr.neamar.kiss'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage) | [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.