blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
aa0f59f38a582475a55c56c36a78be79bab75599 | fe203d5c28e2010cdc78a4b29755e148d58045db | /p02/q07_miles_to_kilometres.py | 1e02355748970bfbdec15c9a409bb1cc807d34ef | [] | no_license | sp0002/cp2019 | d2a9aa5bfe7c82de3ed3f96f281c39be8704d3bd | 6c48528f948dad01f4d6571e3bb22dbf253c423c | refs/heads/master | 2020-04-24T23:24:21.324069 | 2019-04-13T11:13:01 | 2019-04-13T11:13:01 | 171,574,023 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | print("Miles Kilometers Kilometres Miles")
for i in range(10):
print(str(i+1) + (" "*(6-len(str(i+1)))) + "{:.3f}".format(round(((i+1)*1.60934), 3)) +
(" "*(11-len(str(round(((i+1)*1.60934), 3))))) + str(i*5+20) +
(" "*(11-len(str(i*5+20))) + "{:.3f}".format(round((i*5+20)/1.60934, 3)))) | [
"k"
] | k |
ca592bb99a1866b3bd5f87d00cf9884fb0e2e036 | fab39aa4d1317bb43bc11ce39a3bb53295ad92da | /nncf/torch/dynamic_graph/operation_address.py | f9fe1e55d976d86a8bab71816c910f10257af01d | [
"Apache-2.0"
] | permissive | dupeljan/nncf | 8cdce27f25f01ce8e611f15e1dc3036fb8548d6e | 0abfd7103ca212888a946ba4d0fbdb9d436fdaff | refs/heads/develop | 2023-06-22T00:10:46.611884 | 2021-07-22T10:32:11 | 2021-07-22T10:32:11 | 388,719,455 | 0 | 0 | Apache-2.0 | 2021-07-23T07:46:15 | 2021-07-23T07:43:43 | null | UTF-8 | Python | false | false | 1,715 | py | """
Copyright (c) 2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from nncf.torch.dynamic_graph.scope import Scope
class OperationAddress:
def __init__(self, operator_name: str, scope_in_model: Scope, call_order: int):
self.operator_name = operator_name
self.scope_in_model = scope_in_model
self.call_order = call_order
def __eq__(self, other: 'OperationAddress'):
return isinstance(other, OperationAddress) and \
(self.operator_name == other.operator_name) and \
(self.scope_in_model == other.scope_in_model) and \
(self.call_order == other.call_order)
def __str__(self):
return str(self.scope_in_model) + '/' + \
self.operator_name + "_" + str(self.call_order)
def __hash__(self):
return hash((self.operator_name, self.scope_in_model, self.call_order))
@staticmethod
def from_str(s: str):
scope_and_op, _, call_order_str = s.rpartition('_')
scope_str, _, op_name = scope_and_op.rpartition('/')
return OperationAddress(op_name,
Scope.from_str(scope_str),
int(call_order_str))
| [
"[email protected]"
] | |
cdad51f5c22f7a1acdf954745aa1ca7cd922befa | ba3231b25c60b73ca504cd788efa40d92cf9c037 | /nitro-python-13.0.36/nssrc/com/citrix/netscaler/nitro/resource/config/network/ptp.py | 27fa36e319f1e78d559dded8881328498c85d2f6 | [
"Apache-2.0",
"Python-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | zhuweigh/vpx13 | f6d559ae85341e56472e3592cbc67062dac34b93 | b36caa3729d3ca5515fa725f2d91aeaabdb2daa9 | refs/heads/master | 2020-07-04T22:15:16.595728 | 2019-09-20T00:19:56 | 2019-09-20T00:19:56 | 202,435,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,505 | py | #
# Copyright (c) 2008-2019 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class ptp(base_resource) :
""" Configuration for Precision Time Protocol resource. """
def __init__(self) :
self._state = None
@property
def state(self) :
r"""Enables or disables Precision Time Protocol (PTP) on the appliance. If you disable PTP, make sure you enable Network Time Protocol (NTP) on the cluster.<br/>Default value: ENABLE<br/>Possible values = DISABLE, ENABLE.
"""
try :
return self._state
except Exception as e:
raise e
@state.setter
def state(self, state) :
r"""Enables or disables Precision Time Protocol (PTP) on the appliance. If you disable PTP, make sure you enable Network Time Protocol (NTP) on the cluster.<br/>Default value: ENABLE<br/>Possible values = DISABLE, ENABLE
"""
try :
self._state = state
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
r""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(ptp_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.ptp
except Exception as e :
raise e
def _get_object_name(self) :
r""" Returns the value of object identifier argument
"""
try :
return 0
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
r""" Use this API to update ptp.
"""
try :
if type(resource) is not list :
updateresource = ptp()
updateresource.state = resource.state
return updateresource.update_resource(client)
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
r""" Use this API to fetch all the ptp resources that are configured on netscaler.
"""
try :
if not name :
obj = ptp()
response = obj.get_resources(client, option_)
return response
except Exception as e :
raise e
class State:
DISABLE = "DISABLE"
ENABLE = "ENABLE"
class ptp_response(base_response) :
def __init__(self, length=1) :
self.ptp = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.ptp = [ptp() for _ in range(length)]
| [
"[email protected]"
] | |
71de8ab94c91d087323136dda99bddbbcd9ec73f | 73c01a3f052f8ef63890ec3c2e28403ad41e9a71 | /service/migrations/0007_ticket_photo.py | 806039291d087ae7c08ce662cfe1a5f5ce6385fb | [] | no_license | Jokey90/aho | 4c007c65c819efb726a732a8f36067c5a0226100 | 8bcd41e9ef7d40f07499429f385d4fec590636f6 | refs/heads/master | 2020-03-21T22:28:36.395996 | 2018-06-29T09:25:05 | 2018-06-29T09:25:05 | 139,128,834 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 553 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-08-16 12:57
from __future__ import unicode_literals
from django.db import migrations, models
import service.models.ticket
class Migration(migrations.Migration):
dependencies = [
('service', '0006_auto_20170804_1420'),
]
operations = [
migrations.AddField(
model_name='ticket',
name='photo',
field=models.FileField(blank=True, null=True, upload_to=service.models.ticket.file_path, verbose_name='Фото'),
),
]
| [
"[email protected]"
] | |
fb24842332a1d4553a27ced6b2f8e60c9554ad3d | c50fb310d8c52284be2c636f951de796eededae9 | /47.py | f181b2c679dcf18046f30a11c88ec47c1b317684 | [] | no_license | Deepakdk7/Playerset3 | 6f46f638f22d894b9cc93d81b27c221f9dcdaad3 | 636e1feed0f97bbc9e9495a5dbb81a512ed980c5 | refs/heads/master | 2020-06-03T07:35:23.203780 | 2019-08-06T08:56:16 | 2019-08-06T08:56:16 | 191,497,095 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | ax=list(map(int,input().split()))
if ax[0]+ax[1]+ax[2]==180 and ax[0]!=0 and ax[1]!=0 and ax[2]!=0:
print('yes')
else:
print('no')
| [
"[email protected]"
] | |
3c6d1df4a078962da98463d4f64e2fde79dabb3f | 0e0bd9d0082bf71918db9f6c92c2cefd32fd23bd | /guild/plugins/summary_util.py | 212871690080b5a3fec85b0eb605897c66cddc10 | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] | permissive | christabella/guildai | b911d9758296503c431b571dc4696a3690f44b3d | 10d34eb9aa02aa4a374c340e75b5d44d9f3d8a25 | refs/heads/master | 2022-12-17T18:34:45.766299 | 2020-08-31T12:42:25 | 2020-08-31T12:42:25 | 294,189,964 | 0 | 0 | Apache-2.0 | 2020-09-09T18:02:13 | 2020-09-09T18:02:12 | null | UTF-8 | Python | false | false | 8,533 | py | # Copyright 2017-2020 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
import time
import warnings
from guild import python_util
from guild import util
from guild.plugin import Plugin
class SummaryPlugin(Plugin):
"""Summary plugin base class.
Summary plugins log additional summary values (e.g. GPU usage,
etc.) per logged summary. This class is used to patch the TF env
to handle `add_summary` of `tensorflow.summary.FileWriter` and of
`tensorboardX.writer.SummaryToEventTransformer`.
"""
provides = Plugin.provides + ["all", "summary"]
MIN_SUMMARY_INTERVAL = 5
def __init__(self, ep):
super(SummaryPlugin, self).__init__(ep)
self._summary_cache = SummaryCache(self.MIN_SUMMARY_INTERVAL)
def patch_env(self):
self.log.debug("patching tensorflow")
self._patch_guild_summary()
self._try_patch_tensorboardX()
self._try_patch_tensorflow()
def _patch_guild_summary(self):
from guild import summary
python_util.listen_method(
summary.SummaryWriter, "add_scalar", self._handle_guild_scalar
)
def _try_patch_tensorboardX(self):
try:
from tensorboardX import SummaryWriter
except ImportError:
pass
else:
self.log.debug("wrapping tensorboardX.SummaryWriter.add_scalar")
python_util.listen_method(SummaryWriter, "add_scalar", self._handle_scalar)
def _try_patch_tensorflow(self):
try:
import tensorflow as _
except ImportError:
pass
else:
util.try_apply(
[
self._try_listen_tf_v2,
self._try_listen_tf_v1,
self._try_listen_tf_legacy,
self._listen_tf_failed,
]
)
def _try_listen_tf_v2(self):
if not _tf_version().startswith("2."):
raise util.TryFailed()
self._listen_tb_v2_summary()
self._listen_tf_v2_summary()
self._listen_tf_summary()
def _listen_tb_v2_summary(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", Warning)
from tensorboard.plugins.scalar import summary_v2
self.log.debug("wrapping tensorboard.plugins.scalar.summary_v2.scalar")
python_util.listen_function(summary_v2, "scalar", self._handle_scalar)
def _listen_tf_v2_summary(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", Warning)
# pylint: disable=import-error,no-name-in-module
from tensorflow.python.ops import summary_ops_v2
self.log.debug("wrapping tensorflow.python.ops summary_ops_v2.scalar")
python_util.listen_function(
summary_ops_v2, "scalar", self._handle_scalar_ops_v2
)
def _listen_tf_summary(self):
# pylint: disable=import-error,no-name-in-module
from tensorflow import summary
self.log.debug("wrapping tensorflow.summary.scalar")
python_util.listen_function(summary, "scalar", self._handle_scalar)
def _try_listen_tf_v1(self):
if not _tf_version().startswith("1."):
raise util.TryFailed()
try:
# pylint: disable=import-error,no-name-in-module
from tensorflow.compat.v1.summary import FileWriter
except Exception as e:
self.log.debug(
"error importing tensorflow.compat.v1.summary.FileWriter: %s", e
)
raise util.TryFailed()
else:
self.log.debug(
"wrapping tensorflow.compat.v1.summary.FileWriter.add_summary"
)
python_util.listen_method(FileWriter, "add_summary", self._handle_summary)
def _try_listen_tf_legacy(self):
if not _tf_version().startswith("1."):
raise util.TryFailed()
try:
# pylint: disable=import-error,no-name-in-module
from tensorflow.summary import FileWriter
except Exception as e:
self.log.debug("error importing tensorflow.summary.FileWriter: %s", e)
raise util.TryFailed()
else:
self.log.debug("wrapping tensorflow.summary.FileWriter.add_summary")
python_util.listen_method(FileWriter, "add_summary", self._handle_summary)
def _listen_tf_failed(self):
self.log.warning(
"unable to find TensorFlow summary writer, skipping " "summaries for %s",
self.name,
)
def _handle_guild_scalar(self, add_scalar, _tag, _value, step=None):
"""Handler for guild.summary.SummaryWriter.add_scalar.
"""
vals = self._summary_values(step)
if vals:
self.log.debug("summary values via add_scalar: %s", vals)
for tag, val in vals.items():
if val is not None:
add_scalar(tag, val, step)
def _handle_summary(self, add_summary, _summary, global_step=None):
"""Callback to apply summary values via add_summary callback.
This is the TF 1.x API for logging scalars.
See SummaryPlugin docstring above for background.
"""
vals = self._summary_values(global_step)
if vals:
self.log.debug("summary values via add_summary: %s", vals)
summary = tf_scalar_summary(vals)
add_summary(summary, global_step)
def _summary_values(self, global_step):
if self._summary_cache.expired():
self.log.debug("reading summary values")
try:
vals = self.read_summary_values(global_step)
except:
self.log.exception("reading summary values")
vals = {}
self._summary_cache.reset_for_step(global_step, vals)
return self._summary_cache.for_step(global_step)
def _handle_scalar(self, scalar, _name, _data, step=None, description=None):
"""Callback to apply summary values via scalars API.
This is the TF 2.x and tensorboardX API for logging scalars.
"""
# pylint: disable=unused-argument
vals = self._summary_values(step)
if vals:
self.log.debug("summary values via scalar: %s", vals)
for tag, val in vals.items():
if val is None:
continue
scalar(tag, val, step)
def _handle_scalar_ops_v2(self, scalar, _name, _tensor, family=None, step=None):
"""Callback to apply summary values from summary_ops_v2.
"""
# pylint: disable=unused-argument
vals = self._summary_values(step)
if vals:
self.log.debug("summary values via scalar: %s", vals)
for tag, val in vals.items():
if val is None:
continue
scalar(tag, val, step=step)
@staticmethod
def read_summary_values(_global_step):
"""Overridden by subclasses."""
return {}
def _tf_version():
try:
import tensorflow
except ImportError:
return ""
else:
return tensorflow.__version__
def tf_scalar_summary(vals):
# pylint: disable=import-error,no-name-in-module
from tensorflow.core.framework.summary_pb2 import Summary
return Summary(
value=[Summary.Value(tag=key, simple_value=val) for key, val in vals.items()]
)
class SummaryCache(object):
def __init__(self, timeout):
self._timeout = timeout
self._expires = None
self._step = None
self._val = None
def expired(self):
return self._expires is None or time.time() >= self._expires
def reset_for_step(self, step, val):
self._expires = time.time() + self._timeout
self._step = step
self._val = val
def for_step(self, step):
return self._val if step == self._step else None
| [
"[email protected]"
] | |
4ad331771c2d6a9e184381b0059bd839356bffe9 | 89b45e528f3d495f1dd6f5bcdd1a38ff96870e25 | /PythonCrashCourse/chapter_04/exercise4_07.py | d80abad384bb92b361dcbd5dacbb69f125bfe00a | [] | no_license | imatyukin/python | 2ec6e712d4d988335fc815c7f8da049968cc1161 | 58e72e43c835fa96fb2e8e800fe1a370c7328a39 | refs/heads/master | 2023-07-21T13:00:31.433336 | 2022-08-24T13:34:32 | 2022-08-24T13:34:32 | 98,356,174 | 2 | 0 | null | 2023-07-16T02:31:48 | 2017-07-25T22:45:29 | Python | UTF-8 | Python | false | false | 75 | py | #!/usr/bin/env python3
numbers = [i for i in range(3,31,3)]
print(numbers) | [
"[email protected]"
] | |
a7c65bdf2c3d76a9d0bef4bc19e8fa9001e6129b | bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d | /lib/surface/dialogflow/__init__.py | 53f4af3ef55ac638fe4d82d7edb0b86755a4101d | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | google-cloud-sdk-unofficial/google-cloud-sdk | 05fbb473d629195f25887fc5bfaa712f2cbc0a24 | 392abf004b16203030e6efd2f0af24db7c8d669e | refs/heads/master | 2023-08-31T05:40:41.317697 | 2023-08-23T18:23:16 | 2023-08-23T18:23:16 | 335,182,594 | 9 | 2 | NOASSERTION | 2022-10-29T20:49:13 | 2021-02-02T05:47:30 | Python | UTF-8 | Python | false | false | 1,194 | py | # -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The gcloud dialogflow command group."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Dialogflow(base.Group):
"""Interact with and manage Dialogflow agents, entities, and intents.
"""
category = base.AI_AND_MACHINE_LEARNING_CATEGORY
def Filter(self, context, args):
# TODO(b/190532304): Determine if command group works with project number
base.RequireProjectID(args)
del context, args
| [
"[email protected]"
] | |
27d1a3c411b12208e8d4fb289eb2af4bf85cb440 | ca75f7099b93d8083d5b2e9c6db2e8821e63f83b | /z2/part3/updated_part2_batch/jm/parser_errors_2/239061968.py | 6824dca6f32823cb12a5f3a0a45879a6c8761224 | [
"MIT"
] | permissive | kozakusek/ipp-2020-testy | 210ed201eaea3c86933266bd57ee284c9fbc1b96 | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | refs/heads/master | 2022-10-04T18:55:37.875713 | 2020-06-09T21:15:37 | 2020-06-09T21:15:37 | 262,290,632 | 0 | 0 | MIT | 2020-06-09T21:15:38 | 2020-05-08T10:10:47 | C | UTF-8 | Python | false | false | 1,180 | py | from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 239061968
"""
"""
random actions, total chaos
"""
board = gamma_new(2, 3, 2, 4)
assert board is not None
assert gamma_move(board, 1, 0, 1) == 1
assert gamma_move(board, 1, 0, 1) == 0
assert gamma_move(board, 2, 0, 2) == 1
assert gamma_move(board, 1, 1, 2) == 1
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_free_fields(board, 2) == 3
assert gamma_move(board, 1, 0, 1) == 0
assert gamma_move(board, 1, 1, 1) == 1
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_free_fields(board, 2) == 2
assert gamma_move(board, 1, 0, 1) == 0
assert gamma_move(board, 1, 0, 1) == 0
assert gamma_move(board, 2, 1, 2) == 0
assert gamma_move(board, 1, 0, 0) == 1
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_move(board, 2, 0, 2) == 0
assert gamma_move(board, 1, 1, 2) == 0
assert gamma_move(board, 1, 1, 1) == 0
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_golden_possible(board, 2) == 1
gamma_delete(board)
| [
"[email protected]"
] | |
325ce60a54f9b633e41e0764919b0fe431c94705 | 4fd5860beb1e6809eee297509bcc776dfca40aca | /phase_coh_1871_2016/codes/figs.py | 2ab4b1d42585720a11853ce5c3f3f57bcec0ac8f | [] | no_license | manmeet3591/fingerprint-volcano-enso-im | 40a41eca517abdd09079feb7ae58cc866343d6a8 | 21f39125ece4d03c5ee2961e4aae3768ee61cdb8 | refs/heads/master | 2021-07-05T09:49:28.858614 | 2021-04-19T02:55:45 | 2021-04-19T02:55:45 | 229,057,834 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 8,588 | py | #! /usr/local/opt/python/libexec/bin/python
"""
Functions used to plot the output figures
=========================================
"""
# Created: Thu Nov 16, 2017 02:05PM
# Last modified: Fri Nov 17, 2017 09:02AM
# Copyright: Bedartha Goswami <[email protected]>
import numpy as np
import datetime as dt
from scipy import signal
import matplotlib.pyplot as pl
import matplotlib.dates as mdates
def input_timeseries(time, nino_dat, ismr_dat):
"""
Plots input time series, filtered time series, and phase space plots.
"""
# parse the input data
nino_anom = nino_dat["anom"]
nino_filt = nino_dat["filt"]
nino_grad = nino_dat["grad"]
ismr_anom = ismr_dat["anom"]
ismr_filt = ismr_dat["filt"]
ismr_grad = ismr_dat["grad"]
# set up figure
fig = pl.figure(figsize=[8.5, 8.5])
axlabfs, tiklabfs, splabfs = 11, 9, 13
# set up first axis and plot the NINO index
ax1 = fig.add_axes([0.10, 0.725, 0.85, 0.210])
ax1.plot(time, nino_anom,
c="SteelBlue", label="Original")
ax1.plot(time, nino_filt,
c="Tomato",
label="Filtered")
# set up second axis and plot the ISMR index
ax2 = fig.add_axes([0.10, 0.550, 0.85, 0.175])
ax2.plot(time, ismr_anom,
c="SteelBlue", label="Original")
ax2.plot(time, ismr_filt,
c="Tomato",
label="Filtered")
# set up third axis and plot the estimated phases from filtered NINO
ax3 = fig.add_axes([0.125, 0.10, 0.35, 0.35])
ax3.plot(nino_filt, np.imag(signal.hilbert(nino_filt)),
c="Tomato", )
# set up fourth axis and plot the estimated phases from filtered ISMR
ax4 = fig.add_axes([0.625, 0.10, 0.35, 0.35])
ax4.plot(nino_grad, np.imag(signal.hilbert(nino_grad)*12),
c="Tomato", )
# prettify ax1 and ax2
xlo, xhi = dt.datetime(1870, 1, 1), dt.datetime(1930, 12, 31)
for ax in [ax1, ax2]:
ax.set_xlim(xlo, xhi)
XMajorLocator = mdates.YearLocator(base=10, month=6, day=15)
XMinorLocator = mdates.YearLocator(base=2, month=6, day=15)
# XMinorLocator = mdates.MonthLocator(bymonthday=15, interval=3)
XMajorFormatter = mdates.DateFormatter("%Y")
ax.xaxis.set_major_locator(XMajorLocator)
ax.xaxis.set_minor_locator(XMinorLocator)
ax.xaxis.set_major_formatter(XMajorFormatter)
ax.set_ylim(-3.0, 3.0)
ax.set_yticks(np.arange(-2.0, 2.01, 1.0))
ax.grid(which="both")
ax.tick_params(which="major", axis="both", size=8, direction="out")
ax.tick_params(which="minor", axis="both", size=5, direction="out")
ax.tick_params(axis="both", labelsize=tiklabfs)
leg = ax1.legend(loc="upper right")
for txt in leg.get_texts():
txt.set_size(tiklabfs)
ax1.set_ylim(-4., 4.)
ax1.tick_params(bottom="off", top="on", which="both",
labelbottom="off", labeltop="on")
ax2.set_xlabel("Time", fontsize=axlabfs)
ax1.set_xlabel("Time", fontsize=axlabfs)
ax1.xaxis.set_label_position("top")
ax1.set_ylabel("Nino 3", fontsize=axlabfs)
ax2.set_ylabel("ISMR", fontsize=axlabfs)
# prettify ax3 and ax4
ax3.set_xlabel("Filtered Nino 3 signal",
fontsize=axlabfs)
ax3.set_ylabel("Hilbert transform[K/year]",
fontsize=axlabfs)
ax4.set_xlabel("Derivative of filtered Nino 3 signal",
fontsize=axlabfs)
ax4.set_ylabel("Hilbert transform",
fontsize=axlabfs)
for ax in [ax3, ax4]:
ax.grid()
ax.tick_params(axis="both", labelsize=tiklabfs)
ax.tick_params(which="major", size=8)
# save figure
figname = "../plots/01_input_timeseries.png"
pl.savefig(figname)
print("figure saved to: %s" % figname)
return None
def amplitude_timeseries(ct, ampl, nino_grad, nino_hilbert ):
"""
Plots the amplitude, smoothed derivative of nino 3 time series and hilbert transform
"""
fig = pl.figure(figsize=[16.5, 4.5])
axlabfs, ticklabfs, splabfs = 12, 10, 14
ax = fig.add_axes([0.15, 0.15, 0.7, 0.7])
ax.plot(ct, ampl*12,'b', ct, nino_grad*12,'r--', ct, nino_hilbert*12, 'g--', ct, -ampl*12, c='b' )
# prettify ax
xlo, xhi = dt.datetime(1900, 1, 1), dt.datetime(1930, 12, 31)
ax.set_xlim(xlo, xhi)
XMajorLocator = mdates.YearLocator(base=5, month=6, day=15)
XMinorLocator = mdates.YearLocator(base=5, month=6, day=15)
XMajorFormatter = mdates.DateFormatter("%Y")
ax.xaxis.set_major_locator(XMajorLocator)
ax.xaxis.set_minor_locator(XMinorLocator)
ax.set_xlabel("Time[years]", fontsize=axlabfs)
ax.set_ylabel("derivative [K/year]", fontsize=axlabfs)
# save figure
figname = "../plots/04_amplitude_timeseries.png"
pl.savefig(figname)
print("figure saved to: %s" % figname)
return None
def delphi_timeseries(ct, del_phi, te, volc_time, dvolc, aismr):
"""
Plots the instantaneous phase diff with periods of phase sync highlighted.
"""
# set up figure
fig = pl.figure(figsize=[12, 12])
axlabfs, tiklabfs, splabfs = 12, 10, 14
# set up ax1 and plot delPhi and event series there
ax1 = fig.add_axes([0.1, 0.38, 0.85, 0.6])
ax1.plot(ct, -del_phi/6.28,
c="Maroon", zorder=5,
)
ylo, yhi = ax1.get_ylim()
ax1.bar(left = te,
width = 31 * np.ones(len(te)),
height = (yhi - ylo) * np.ones(len(te)),
bottom = ylo * np.ones(len(te)),
edgecolor="none", facecolor="Turquoise",
zorder=1,
)
# set up second ax2 and plot the volcanic radiative forcing
ax2 = fig.add_axes([0.1, 0.1, 0.85, 0.14])
ax2.plot(volc_time, -dvolc/7.5, c="Gray", zorder=5)
# prettify ax1
xlo, xhi = dt.datetime(1870, 1, 1), dt.datetime(2016, 12, 31)
ax1.set_xlim(xlo, xhi)
XMajorLocator = mdates.YearLocator(base=10, month=6, day=15)
XMinorLocator = mdates.YearLocator(base=2, month=6, day=15)
XMajorFormatter = mdates.DateFormatter("%Y")
ax1.xaxis.set_major_locator(XMajorLocator)
ax1.xaxis.set_minor_locator(XMinorLocator)
ax1.xaxis.set_major_formatter(XMajorFormatter)
ax1.set_ylim(ylo, yhi)
ax1.grid(which="both")
ax1.tick_params(which="major", size=8, direction="out")
ax1.tick_params(which="minor", size=5, direction="out")
ax1.tick_params(axis="both", labelsize=tiklabfs)
ax1.set_xlabel("Time", fontsize=axlabfs)
ax1.set_ylabel(r"$\Delta\phi = \phi_{ISMR} - \phi_{NINO}[2\pi]$",
fontsize=axlabfs)
# prettify ax2
xlo, xhi = dt.datetime(1870, 1, 1), dt.datetime(2016, 12, 31)
ax2.set_xlim(xlo, xhi)
XMajorLocator = mdates.YearLocator(base=10, month=6, day=15)
XMinorLocator = mdates.YearLocator(base=2, month=6, day=15)
XMajorFormatter = mdates.DateFormatter("%Y")
ax2.xaxis.set_major_locator(XMajorLocator)
ax2.xaxis.set_minor_locator(XMinorLocator)
ax2.xaxis.set_major_formatter(XMajorFormatter)
ylo, yhi = ax2.get_ylim()
ax2.set_ylim(ylo, yhi)
ax2.grid(which="both")
ax2.set_xlabel("Time", fontsize=axlabfs)
ax2.set_ylabel("VRF (W/$m^2$)", fontsize=axlabfs)
# save figure
figname = "../plots/02_delphi_timeseries.png"
pl.savefig(figname)
print("figure saved to: %s" % figname)
return None
def delphi_histogram(del_phi_dot, lothres, hithres):
"""
Plots the histogram of instantaneous phase differences.
"""
# set up figure
fig = pl.figure(figsize=[6.5, 6.5])
axlabfs, tiklabfs, splabfs = 12, 10, 14
# plot histogram of derivative of del_phi
ax1 = fig.add_axes([0.12, 0.12, 0.85, 0.85])
h, be = np.histogram(del_phi_dot, bins="fd")
bc = 0.5 * (be[1:] + be[:-1])
ax1.fill_between(bc, h,
color="Maroon",
)
ax1.fill_between(bc, h,
color="Turquoise",
where=(bc >= lothres) * (bc <= hithres),
)
# show vertical lines to indicate the interval we choose for del_phi ~ 0
ax1.axvline(lothres, color="k", linestyle="--")
ax1.axvline(hithres, color="k", linestyle="--")
# prettify ax1
ax1.grid()
ax1.set_xlabel(r"$\frac{\Delta\phi}{\mathrm{d}t}$",
fontsize=axlabfs)
ax1.set_ylabel("Histogram counts", fontsize=axlabfs)
ax1.tick_params(axis="both", labelsize=tiklabfs)
_, yhi = ax1.get_ylim()
ax1.set_ylim(0., yhi)
# save figure
figname = "../plots/03_delphi_histogram.png"
pl.savefig(figname)
print("figure saved to: %s" % figname)
return None
| [
"[email protected]"
] | |
7a877964c195ba8b4611fc1c614aab2598a7d346 | b2301365d220ff0295b8beddbed38b0581f9610d | /Django/landscapes/landscapes/urls.py | 6bb380064f3a5011c960add9367daf6a83339d72 | [] | no_license | JoA-MoS/Python | db246a5ff2201c6ef1dfb9d9b0fd8a37e1d7c46d | 4547c2667f3eaf0a001532bb2b103aab3c344fbe | refs/heads/master | 2021-08-16T11:18:20.420868 | 2017-07-21T05:52:18 | 2017-07-21T05:52:18 | 96,125,892 | 0 | 0 | null | 2021-06-10T18:40:09 | 2017-07-03T15:34:52 | Python | UTF-8 | Python | false | false | 786 | py | """landscapes URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^', include('apps.landscape.urls')),
]
| [
"[email protected]"
] | |
ecd93e403f59177af30c79be215d4ebf01bfa725 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_068/ch27_2020_03_11_11_55_56_463054.py | 48928ddb084204cf2ffd0f1ab7892e1dc62e802c | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | a = input("Você tem dúvidas? ")
while a != "não":
print("Pratique mais")
if a == "não":
print("Até a próxima")
| [
"[email protected]"
] | |
cddfc9177257df40d436d50cbb8e5b8c6df0014d | 0b86600e0288c0fefc081a0f428277a68b14882e | /code/test-tex.py | 88494d5f2d9c294df67876d5ebb5c013470ef015 | [] | no_license | Byliguel/python1-exo7 | 9ede37a8d2b8f384d1ebe3d612e8c25bbe47a350 | fbf6b08f4c1e94dd9f170875eee871a84849399e | refs/heads/master | 2020-09-22T10:16:34.044141 | 2019-12-01T11:52:51 | 2019-12-01T11:52:51 | 225,152,986 | 1 | 0 | null | 2019-12-01T11:51:37 | 2019-12-01T11:51:36 | null | UTF-8 | Python | false | false | 128 | py | # Ceci est un test de code Python
# Accents a verifier à é ù
def fonc(x):
"""Fait ceci à é ù"""
return x ** 2 + 1 | [
"[email protected]"
] | |
06610cfadfa7b7f1355f379fc9b4d330bce025b0 | a1e7457b5d1ef03ea9d891a6886718b3029c2ba4 | /zoe_scheduler/state/blobs/__init__.py | 35e47533ccc22fcd06c1ecf2657d097af0742752 | [
"Apache-2.0"
] | permissive | ddcy/zoe | 06bd104b0d3b632ed18ff8a8cc5b580b1f140b1f | bd1ac8cdefeda3ebd1ccc941243b781cb7c0beb2 | refs/heads/master | 2020-12-26T21:46:17.128925 | 2016-02-26T17:52:20 | 2016-02-26T17:52:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 929 | py | # Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class BaseBlobs:
def init(self):
pass
def store_blob(self, kind, name, data):
raise NotImplementedError
def load_blob(self, kind, name):
raise NotImplementedError
def delete_blob(self, kind, name):
raise NotImplementedError
def list_blobs(self, kind):
raise NotImplementedError
| [
"[email protected]"
] | |
d71f2466df1c9554cae2cf12998556a240bc251a | 711756b796d68035dc6a39060515200d1d37a274 | /output_exocyst/optimized_46960.py | 25862edd43771c3123d24cf36d218bdacaf22006 | [] | no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,505 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Sec3_GFPN" not in marker_sets:
s=new_marker_set('Sec3_GFPN')
marker_sets["Sec3_GFPN"]=s
s= marker_sets["Sec3_GFPN"]
mark=s.place_marker((535.641, 593.745, 553.103), (0.15, 0.4, 0.6), 18.4716)
if "Sec3_0" not in marker_sets:
s=new_marker_set('Sec3_0')
marker_sets["Sec3_0"]=s
s= marker_sets["Sec3_0"]
mark=s.place_marker((553.71, 580.669, 571.582), (0.21, 0.49, 0.72), 17.1475)
if "Sec3_1" not in marker_sets:
s=new_marker_set('Sec3_1')
marker_sets["Sec3_1"]=s
s= marker_sets["Sec3_1"]
mark=s.place_marker((575.902, 558.256, 572.128), (0.21, 0.49, 0.72), 17.1475)
if "Sec3_2" not in marker_sets:
s=new_marker_set('Sec3_2')
marker_sets["Sec3_2"]=s
s= marker_sets["Sec3_2"]
mark=s.place_marker((607.414, 559.565, 572.279), (0.21, 0.49, 0.72), 17.1475)
if "Sec3_3" not in marker_sets:
s=new_marker_set('Sec3_3')
marker_sets["Sec3_3"]=s
s= marker_sets["Sec3_3"]
mark=s.place_marker((633.528, 567.66, 579.303), (0.21, 0.49, 0.72), 17.1475)
if "Sec3_4" not in marker_sets:
s=new_marker_set('Sec3_4')
marker_sets["Sec3_4"]=s
s= marker_sets["Sec3_4"]
mark=s.place_marker((644.235, 576.598, 603.825), (0.21, 0.49, 0.72), 17.1475)
if "Sec3_5" not in marker_sets:
s=new_marker_set('Sec3_5')
marker_sets["Sec3_5"]=s
s= marker_sets["Sec3_5"]
mark=s.place_marker((635.298, 582.337, 629.96), (0.21, 0.49, 0.72), 17.1475)
if "Sec3_6" not in marker_sets:
s=new_marker_set('Sec3_6')
marker_sets["Sec3_6"]=s
s= marker_sets["Sec3_6"]
mark=s.place_marker((623.491, 567.126, 650.574), (0.21, 0.49, 0.72), 17.1475)
if "Sec3_GFPC" not in marker_sets:
s=new_marker_set('Sec3_GFPC')
marker_sets["Sec3_GFPC"]=s
s= marker_sets["Sec3_GFPC"]
mark=s.place_marker((521.31, 569.568, 598.694), (0.3, 0.6, 0.8), 18.4716)
if "Sec3_Anch" not in marker_sets:
s=new_marker_set('Sec3_Anch')
marker_sets["Sec3_Anch"]=s
s= marker_sets["Sec3_Anch"]
mark=s.place_marker((726.375, 562.158, 700.437), (0.3, 0.6, 0.8), 18.4716)
if "Sec5_GFPN" not in marker_sets:
s=new_marker_set('Sec5_GFPN')
marker_sets["Sec5_GFPN"]=s
s= marker_sets["Sec5_GFPN"]
mark=s.place_marker((566.996, 618.569, 556.866), (0.5, 0.3, 0.6), 18.4716)
if "Sec5_0" not in marker_sets:
s=new_marker_set('Sec5_0')
marker_sets["Sec5_0"]=s
s= marker_sets["Sec5_0"]
mark=s.place_marker((575.78, 620.274, 567.253), (0.6, 0.31, 0.64), 17.1475)
if "Sec5_1" not in marker_sets:
s=new_marker_set('Sec5_1')
marker_sets["Sec5_1"]=s
s= marker_sets["Sec5_1"]
mark=s.place_marker((594.291, 622.046, 588.341), (0.6, 0.31, 0.64), 17.1475)
if "Sec5_2" not in marker_sets:
s=new_marker_set('Sec5_2')
marker_sets["Sec5_2"]=s
s= marker_sets["Sec5_2"]
mark=s.place_marker((599.483, 602.969, 608.325), (0.6, 0.31, 0.64), 17.1475)
if "Sec5_3" not in marker_sets:
s=new_marker_set('Sec5_3')
marker_sets["Sec5_3"]=s
s= marker_sets["Sec5_3"]
mark=s.place_marker((583.061, 582.352, 618.092), (0.6, 0.31, 0.64), 17.1475)
if "Sec5_4" not in marker_sets:
s=new_marker_set('Sec5_4')
marker_sets["Sec5_4"]=s
s= marker_sets["Sec5_4"]
mark=s.place_marker((569.005, 558.015, 617.547), (0.6, 0.31, 0.64), 17.1475)
if "Sec5_5" not in marker_sets:
s=new_marker_set('Sec5_5')
marker_sets["Sec5_5"]=s
s= marker_sets["Sec5_5"]
mark=s.place_marker((555.937, 533.32, 620.569), (0.6, 0.31, 0.64), 17.1475)
if "Sec5_GFPC" not in marker_sets:
s=new_marker_set('Sec5_GFPC')
marker_sets["Sec5_GFPC"]=s
s= marker_sets["Sec5_GFPC"]
mark=s.place_marker((545.611, 538.15, 607.269), (0.7, 0.4, 0.7), 18.4716)
if "Sec6_GFPN" not in marker_sets:
s=new_marker_set('Sec6_GFPN')
marker_sets["Sec6_GFPN"]=s
s= marker_sets["Sec6_GFPN"]
mark=s.place_marker((547.051, 622.667, 621.128), (1, 1, 0), 18.4716)
if "Sec6_0" not in marker_sets:
s=new_marker_set('Sec6_0')
marker_sets["Sec6_0"]=s
s= marker_sets["Sec6_0"]
mark=s.place_marker((566.186, 606.943, 600.914), (1, 1, 0.2), 17.1475)
if "Sec6_1" not in marker_sets:
s=new_marker_set('Sec6_1')
marker_sets["Sec6_1"]=s
s= marker_sets["Sec6_1"]
mark=s.place_marker((586.652, 589.842, 579.387), (1, 1, 0.2), 17.1475)
if "Sec6_2" not in marker_sets:
s=new_marker_set('Sec6_2')
marker_sets["Sec6_2"]=s
s= marker_sets["Sec6_2"]
mark=s.place_marker((597.33, 581.345, 547.897), (1, 1, 0.2), 17.1475)
if "Sec6_3" not in marker_sets:
s=new_marker_set('Sec6_3')
marker_sets["Sec6_3"]=s
s= marker_sets["Sec6_3"]
mark=s.place_marker((608.175, 572.651, 516.548), (1, 1, 0.2), 17.1475)
if "Sec6_4" not in marker_sets:
s=new_marker_set('Sec6_4')
marker_sets["Sec6_4"]=s
s= marker_sets["Sec6_4"]
mark=s.place_marker((619.11, 563.897, 485.274), (1, 1, 0.2), 17.1475)
if "Sec6_5" not in marker_sets:
s=new_marker_set('Sec6_5')
marker_sets["Sec6_5"]=s
s= marker_sets["Sec6_5"]
mark=s.place_marker((627.923, 555.66, 453.171), (1, 1, 0.2), 17.1475)
if "Sec6_GFPC" not in marker_sets:
s=new_marker_set('Sec6_GFPC')
marker_sets["Sec6_GFPC"]=s
s= marker_sets["Sec6_GFPC"]
mark=s.place_marker((668.461, 556.663, 518.953), (1, 1, 0.4), 18.4716)
if "Sec6_Anch" not in marker_sets:
s=new_marker_set('Sec6_Anch')
marker_sets["Sec6_Anch"]=s
s= marker_sets["Sec6_Anch"]
mark=s.place_marker((617.586, 542.531, 361.748), (1, 1, 0.4), 18.4716)
if "Sec8_0" not in marker_sets:
s=new_marker_set('Sec8_0')
marker_sets["Sec8_0"]=s
s= marker_sets["Sec8_0"]
mark=s.place_marker((559.803, 502.574, 591.807), (0.65, 0.34, 0.16), 17.1475)
if "Sec8_1" not in marker_sets:
s=new_marker_set('Sec8_1')
marker_sets["Sec8_1"]=s
s= marker_sets["Sec8_1"]
mark=s.place_marker((580.228, 520.543, 598.827), (0.65, 0.34, 0.16), 17.1475)
if "Sec8_2" not in marker_sets:
s=new_marker_set('Sec8_2')
marker_sets["Sec8_2"]=s
s= marker_sets["Sec8_2"]
mark=s.place_marker((596.841, 542.385, 604.85), (0.65, 0.34, 0.16), 17.1475)
if "Sec8_3" not in marker_sets:
s=new_marker_set('Sec8_3')
marker_sets["Sec8_3"]=s
s= marker_sets["Sec8_3"]
mark=s.place_marker((611.713, 566.201, 605.545), (0.65, 0.34, 0.16), 17.1475)
if "Sec8_4" not in marker_sets:
s=new_marker_set('Sec8_4')
marker_sets["Sec8_4"]=s
s= marker_sets["Sec8_4"]
mark=s.place_marker((630.962, 549.17, 623.006), (0.65, 0.34, 0.16), 17.1475)
if "Sec8_5" not in marker_sets:
s=new_marker_set('Sec8_5')
marker_sets["Sec8_5"]=s
s= marker_sets["Sec8_5"]
mark=s.place_marker((649.369, 535.136, 643.744), (0.65, 0.34, 0.16), 17.1475)
if "Sec8_GFPC" not in marker_sets:
s=new_marker_set('Sec8_GFPC')
marker_sets["Sec8_GFPC"]=s
s= marker_sets["Sec8_GFPC"]
mark=s.place_marker((650.391, 496.924, 542.858), (0.7, 0.4, 0), 18.4716)
if "Sec8_Anch" not in marker_sets:
s=new_marker_set('Sec8_Anch')
marker_sets["Sec8_Anch"]=s
s= marker_sets["Sec8_Anch"]
mark=s.place_marker((662.636, 557.115, 753.535), (0.7, 0.4, 0), 18.4716)
if "Sec10_GFPN" not in marker_sets:
s=new_marker_set('Sec10_GFPN')
marker_sets["Sec10_GFPN"]=s
s= marker_sets["Sec10_GFPN"]
mark=s.place_marker((704.281, 503.596, 493.349), (0.2, 0.6, 0.2), 18.4716)
if "Sec10_0" not in marker_sets:
s=new_marker_set('Sec10_0')
marker_sets["Sec10_0"]=s
s= marker_sets["Sec10_0"]
mark=s.place_marker((690.658, 504.371, 490.44), (0.3, 0.69, 0.29), 17.1475)
if "Sec10_1" not in marker_sets:
s=new_marker_set('Sec10_1')
marker_sets["Sec10_1"]=s
s= marker_sets["Sec10_1"]
mark=s.place_marker((663.156, 507.597, 485.691), (0.3, 0.69, 0.29), 17.1475)
if "Sec10_2" not in marker_sets:
s=new_marker_set('Sec10_2')
marker_sets["Sec10_2"]=s
s= marker_sets["Sec10_2"]
mark=s.place_marker((635.613, 509.191, 480.401), (0.3, 0.69, 0.29), 17.1475)
if "Sec10_3" not in marker_sets:
s=new_marker_set('Sec10_3')
marker_sets["Sec10_3"]=s
s= marker_sets["Sec10_3"]
mark=s.place_marker((602.968, 508.097, 475.047), (0.3, 0.69, 0.29), 17.1475)
if "Sec10_4" not in marker_sets:
s=new_marker_set('Sec10_4')
marker_sets["Sec10_4"]=s
s= marker_sets["Sec10_4"]
mark=s.place_marker((586.247, 523.518, 491.549), (0.3, 0.69, 0.29), 17.1475)
if "Sec10_5" not in marker_sets:
s=new_marker_set('Sec10_5')
marker_sets["Sec10_5"]=s
s= marker_sets["Sec10_5"]
mark=s.place_marker((585.654, 547.485, 506.206), (0.3, 0.69, 0.29), 17.1475)
if "Sec10_GFPC" not in marker_sets:
s=new_marker_set('Sec10_GFPC')
marker_sets["Sec10_GFPC"]=s
s= marker_sets["Sec10_GFPC"]
mark=s.place_marker((540.06, 454.674, 611.871), (0.4, 0.75, 0.3), 18.4716)
if "Sec10_Anch" not in marker_sets:
s=new_marker_set('Sec10_Anch')
marker_sets["Sec10_Anch"]=s
s= marker_sets["Sec10_Anch"]
mark=s.place_marker((631.347, 641.621, 402.759), (0.4, 0.75, 0.3), 18.4716)
if "Sec15_GFPN" not in marker_sets:
s=new_marker_set('Sec15_GFPN')
marker_sets["Sec15_GFPN"]=s
s= marker_sets["Sec15_GFPN"]
mark=s.place_marker((568.942, 549.602, 529.226), (0.9, 0.5, 0.7), 18.4716)
if "Sec15_0" not in marker_sets:
s=new_marker_set('Sec15_0')
marker_sets["Sec15_0"]=s
s= marker_sets["Sec15_0"]
mark=s.place_marker((580.106, 548.401, 539.571), (0.97, 0.51, 0.75), 17.1475)
if "Sec15_1" not in marker_sets:
s=new_marker_set('Sec15_1')
marker_sets["Sec15_1"]=s
s= marker_sets["Sec15_1"]
mark=s.place_marker((590.072, 523.574, 530.925), (0.97, 0.51, 0.75), 17.1475)
if "Sec15_2" not in marker_sets:
s=new_marker_set('Sec15_2')
marker_sets["Sec15_2"]=s
s= marker_sets["Sec15_2"]
mark=s.place_marker((597.195, 497.707, 522.44), (0.97, 0.51, 0.75), 17.1475)
if "Sec15_3" not in marker_sets:
s=new_marker_set('Sec15_3')
marker_sets["Sec15_3"]=s
s= marker_sets["Sec15_3"]
mark=s.place_marker((600.263, 470.982, 514.214), (0.97, 0.51, 0.75), 17.1475)
if "Sec15_4" not in marker_sets:
s=new_marker_set('Sec15_4')
marker_sets["Sec15_4"]=s
s= marker_sets["Sec15_4"]
mark=s.place_marker((600.512, 444.112, 505.95), (0.97, 0.51, 0.75), 17.1475)
if "Sec15_5" not in marker_sets:
s=new_marker_set('Sec15_5')
marker_sets["Sec15_5"]=s
s= marker_sets["Sec15_5"]
mark=s.place_marker((599.478, 417.328, 497.39), (0.97, 0.51, 0.75), 17.1475)
if "Sec15_GFPC" not in marker_sets:
s=new_marker_set('Sec15_GFPC')
marker_sets["Sec15_GFPC"]=s
s= marker_sets["Sec15_GFPC"]
mark=s.place_marker((670.499, 448.5, 527.213), (1, 0.6, 0.8), 18.4716)
if "Sec15_Anch" not in marker_sets:
s=new_marker_set('Sec15_Anch')
marker_sets["Sec15_Anch"]=s
s= marker_sets["Sec15_Anch"]
mark=s.place_marker((528.487, 386.001, 467.526), (1, 0.6, 0.8), 18.4716)
if "Exo70_GFPN" not in marker_sets:
s=new_marker_set('Exo70_GFPN')
marker_sets["Exo70_GFPN"]=s
s= marker_sets["Exo70_GFPN"]
mark=s.place_marker((524.04, 568.181, 572.181), (0.8, 0, 0), 18.4716)
if "Exo70_0" not in marker_sets:
s=new_marker_set('Exo70_0')
marker_sets["Exo70_0"]=s
s= marker_sets["Exo70_0"]
mark=s.place_marker((523.921, 571.485, 557.484), (0.89, 0.1, 0.1), 17.1475)
if "Exo70_1" not in marker_sets:
s=new_marker_set('Exo70_1')
marker_sets["Exo70_1"]=s
s= marker_sets["Exo70_1"]
mark=s.place_marker((528.33, 581.152, 531.143), (0.89, 0.1, 0.1), 17.1475)
if "Exo70_2" not in marker_sets:
s=new_marker_set('Exo70_2')
marker_sets["Exo70_2"]=s
s= marker_sets["Exo70_2"]
mark=s.place_marker((547.113, 577.925, 510.045), (0.89, 0.1, 0.1), 17.1475)
if "Exo70_3" not in marker_sets:
s=new_marker_set('Exo70_3')
marker_sets["Exo70_3"]=s
s= marker_sets["Exo70_3"]
mark=s.place_marker((568.232, 574.657, 491.328), (0.89, 0.1, 0.1), 17.1475)
if "Exo70_4" not in marker_sets:
s=new_marker_set('Exo70_4')
marker_sets["Exo70_4"]=s
s= marker_sets["Exo70_4"]
mark=s.place_marker((589.106, 571.826, 472.266), (0.89, 0.1, 0.1), 17.1475)
if "Exo70_GFPC" not in marker_sets:
s=new_marker_set('Exo70_GFPC')
marker_sets["Exo70_GFPC"]=s
s= marker_sets["Exo70_GFPC"]
mark=s.place_marker((705.748, 474.225, 557.579), (1, 0.2, 0.2), 18.4716)
if "Exo70_Anch" not in marker_sets:
s=new_marker_set('Exo70_Anch')
marker_sets["Exo70_Anch"]=s
s= marker_sets["Exo70_Anch"]
mark=s.place_marker((477.603, 670.358, 384.03), (1, 0.2, 0.2), 18.4716)
if "Exo84_GFPN" not in marker_sets:
s=new_marker_set('Exo84_GFPN')
marker_sets["Exo84_GFPN"]=s
s= marker_sets["Exo84_GFPN"]
mark=s.place_marker((568.263, 615.714, 542.602), (0.9, 0.4, 0), 18.4716)
if "Exo84_0" not in marker_sets:
s=new_marker_set('Exo84_0')
marker_sets["Exo84_0"]=s
s= marker_sets["Exo84_0"]
mark=s.place_marker((560.374, 589.759, 539.33), (1, 0.5, 0), 17.1475)
if "Exo84_1" not in marker_sets:
s=new_marker_set('Exo84_1')
marker_sets["Exo84_1"]=s
s= marker_sets["Exo84_1"]
mark=s.place_marker((547.303, 553.03, 533.597), (1, 0.5, 0), 17.1475)
if "Exo84_2" not in marker_sets:
s=new_marker_set('Exo84_2')
marker_sets["Exo84_2"]=s
s= marker_sets["Exo84_2"]
mark=s.place_marker((536.621, 516.145, 528.22), (1, 0.5, 0), 17.1475)
if "Exo84_3" not in marker_sets:
s=new_marker_set('Exo84_3')
marker_sets["Exo84_3"]=s
s= marker_sets["Exo84_3"]
mark=s.place_marker((527.727, 485.256, 523.73), (1, 0.5, 0), 17.1475)
if "Exo84_GFPC" not in marker_sets:
s=new_marker_set('Exo84_GFPC')
marker_sets["Exo84_GFPC"]=s
s= marker_sets["Exo84_GFPC"]
mark=s.place_marker((568.791, 511.347, 608.299), (1, 0.6, 0.1), 18.4716)
if "Exo84_Anch" not in marker_sets:
s=new_marker_set('Exo84_Anch')
marker_sets["Exo84_Anch"]=s
s= marker_sets["Exo84_Anch"]
mark=s.place_marker((482.881, 430.091, 445.773), (1, 0.6, 0.1), 18.4716)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"[email protected]"
] | |
47ca697e284e6664d1e966bda357bb112aa56356 | d3b80b8de39d4bb4bab02b0e8b5092c2e32aff24 | /login-getmileage.py | d3256d39233544095f5551a9f87de3fbfbd22e09 | [] | no_license | brightparagon/learn-python-crawl-scrape | 658a058fee4ecdb31c92be54b5cb684f5d4d4658 | f28769fd469f69a485e560dc42d20aa08c51dc77 | refs/heads/master | 2021-05-02T05:23:13.512441 | 2018-07-02T14:58:54 | 2018-07-02T14:58:54 | 120,919,940 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 731 | py | import requests
from bs4 import BeautifulSoup
from urllib.parse import urljoin
USER = "<TEST>"
PASS = "<TEST>"
# start a session
session = requests.session()
# login
login_info = {
"m_id": USER,
"m_passwd": PASS
}
url_login = "http://www.hanbit.co.kr/member/login_proc.php"
res = session.post(url_login, data=login_info)
res.raise_for_status()
# access to mypage
url_mypage = "http://www.hanbit.co.kr/myhanbit/myhanbit.html"
res = session.get(url_mypage)
res.raise_for_status()
# get mileage and ecoin
soup = BeautifulSoup(res.text, "html.parser")
mileage = soup.select_one(".mileage_section1 span").get_text()
ecoin = soup.select_one(".mileage_section2 span").get_text()
print("milage: ", mileage)
print("ecoi: ", ecoin)
| [
"[email protected]"
] | |
1367272170c6c2fe4e9fdfa587a3138dc85931cb | 8ec02d2d8537b83420f5a0bdbb28b9d3f4fd64ef | /rotkehlchen/tests/utils/history.py | 5a8272b8490e3fadbad03dca13de372191d00ceb | [
"BSD-3-Clause"
] | permissive | sponnet/rotki | c571f5cc7586592711aa2af3107f83528ef8ae47 | 7991d254f60ee6db2764db7b12296a5b5a732e18 | refs/heads/master | 2020-12-14T04:37:05.447563 | 2020-01-04T09:12:54 | 2020-01-04T09:28:38 | 234,641,916 | 0 | 0 | BSD-3-Clause | 2020-01-17T21:45:13 | 2020-01-17T21:45:12 | null | UTF-8 | Python | false | false | 25,577 | py | from typing import Any, Dict, List, Union
from unittest.mock import patch
from rotkehlchen.constants.assets import A_BTC, A_ETH
from rotkehlchen.exchanges.data_structures import AssetMovement, MarginPosition, Trade
from rotkehlchen.fval import FVal
from rotkehlchen.rotkehlchen import Rotkehlchen
from rotkehlchen.tests.utils.constants import (
ETH_ADDRESS1,
ETH_ADDRESS2,
ETH_ADDRESS3,
MOCK_INPUT_DATA,
MOCK_INPUT_DATA_HEX,
TX_HASH_STR1,
TX_HASH_STR2,
TX_HASH_STR3,
)
from rotkehlchen.tests.utils.exchanges import POLONIEX_MOCK_DEPOSIT_WITHDRAWALS_RESPONSE
from rotkehlchen.tests.utils.mock import MockResponse
from rotkehlchen.transactions import EthereumTransaction
from rotkehlchen.typing import AssetAmount, AssetMovementCategory, Location, Timestamp, TradeType
from rotkehlchen.utils.misc import hexstring_to_bytes
TEST_END_TS = 1559427707
# Prices queried by cryptocompare @ 02/10/2019
prices = {
'BTC': {
'EUR': {
1446979735: FVal(355.9),
1449809536: FVal(386.175),
1464393600: FVal(422.9),
1473505138: FVal(556.435),
1473897600: FVal(542.87),
1475042230: FVal(537.805),
1476536704: FVal(585.96),
1476979735: FVal(578.505),
1479200704: FVal(667.185),
1480683904: FVal(723.505),
1484629704: FVal(810.49),
1486299904: FVal(942.78),
1487289600: FVal(979.39),
1491177600: FVal(1039.935),
1495969504: FVal(1964.685),
1498694400: FVal(2244.465),
1512693374: FVal(14415.365),
},
},
'ETH': {
'EUR': {
1446979735: FVal(0.8583),
1463184190: FVal(9.187),
1463508234: FVal(10.785),
1473505138: FVal(10.36),
1475042230: FVal(11.925),
1476536704: FVal(10.775),
1479510304: FVal(8.9145),
1491062063: FVal(47.865),
1493291104: FVal(53.175),
1511626623: FVal(396.56),
},
},
'XMR': {
'EUR': {
1449809536: FVal(0.39665),
},
},
'DASH': {
'EUR': {
1479200704: FVal(9.0015),
1480683904: FVal(8.154),
1483351504: FVal(11.115),
1484629704: FVal(12.88),
1485252304: FVal(13.48),
1486299904: FVal(15.29),
1487027104: FVal(16.08),
1502715904: FVal(173.035),
},
},
}
def check_result_of_history_creation(
start_ts: Timestamp,
end_ts: Timestamp,
trade_history: List[Union[Trade, MarginPosition]],
loan_history: Dict,
asset_movements: List[AssetMovement],
eth_transactions: List[EthereumTransaction],
) -> Dict[str, Any]:
"""This function offers some simple assertions on the result of the
created history. The entire processing part of the history is mocked
away by this checking function"""
assert start_ts == 0, 'should be same as given to process_history'
assert end_ts == TEST_END_TS, 'should be same as given to process_history'
# TODO: Add more assertions/check for each action
# OR instead do it in tests for conversion of actions(trades, loans, deposits e.t.c.)
# from exchange to our format for each exchange
assert len(trade_history) == 11
assert trade_history[0].location == Location.KRAKEN
assert trade_history[0].pair == 'ETH_EUR'
assert trade_history[0].trade_type == TradeType.BUY
assert trade_history[1].location == Location.KRAKEN
assert trade_history[1].pair == 'BTC_EUR'
assert trade_history[1].trade_type == TradeType.BUY
assert trade_history[2].location == Location.BITTREX
assert trade_history[2].pair == 'LTC_BTC'
assert trade_history[2].trade_type == TradeType.BUY
assert trade_history[3].location == Location.BITTREX
assert trade_history[3].pair == 'LTC_ETH'
assert trade_history[3].trade_type == TradeType.SELL
assert isinstance(trade_history[4], MarginPosition)
assert trade_history[4].profit_loss == FVal('0.05')
assert trade_history[5].location == Location.BINANCE
assert trade_history[5].pair == 'ETH_BTC'
assert trade_history[5].trade_type == TradeType.BUY
assert trade_history[6].location == Location.BINANCE
assert trade_history[6].pair == 'RDN_ETH'
assert trade_history[6].trade_type == TradeType.SELL
assert trade_history[7].location == Location.POLONIEX
assert trade_history[7].pair == 'ETH_BTC'
assert trade_history[7].trade_type == TradeType.SELL
assert trade_history[8].location == Location.POLONIEX
assert trade_history[8].pair == 'ETH_BTC'
assert trade_history[8].trade_type == TradeType.BUY
assert trade_history[9].location == Location.POLONIEX
assert trade_history[9].pair == 'XMR_ETH'
assert trade_history[9].trade_type == TradeType.BUY
# TODO: investigate why this new bitmex position popped up
assert isinstance(trade_history[10], MarginPosition)
assert trade_history[10].profit_loss == FVal('5E-9')
assert len(loan_history) == 2
assert loan_history[0].currency == A_ETH
assert loan_history[0].earned == AssetAmount(FVal('0.00000001'))
assert loan_history[1].currency == A_BTC
assert loan_history[1].earned == AssetAmount(FVal('0.00000005'))
assert len(asset_movements) == 11
assert asset_movements[0].location == Location.KRAKEN
assert asset_movements[0].category == AssetMovementCategory.DEPOSIT
assert asset_movements[0].asset == A_BTC
assert asset_movements[1].location == Location.KRAKEN
assert asset_movements[1].category == AssetMovementCategory.DEPOSIT
assert asset_movements[1].asset == A_ETH
assert asset_movements[2].location == Location.KRAKEN
assert asset_movements[2].category == AssetMovementCategory.WITHDRAWAL
assert asset_movements[2].asset == A_BTC
assert asset_movements[3].location == Location.KRAKEN
assert asset_movements[3].category == AssetMovementCategory.WITHDRAWAL
assert asset_movements[3].asset == A_ETH
assert asset_movements[4].location == Location.POLONIEX
assert asset_movements[4].category == AssetMovementCategory.WITHDRAWAL
assert asset_movements[4].asset == A_BTC
assert asset_movements[5].location == Location.POLONIEX
assert asset_movements[5].category == AssetMovementCategory.WITHDRAWAL
assert asset_movements[5].asset == A_ETH
assert asset_movements[6].location == Location.POLONIEX
assert asset_movements[6].category == AssetMovementCategory.DEPOSIT
assert asset_movements[6].asset == A_BTC
assert asset_movements[7].location == Location.POLONIEX
assert asset_movements[7].category == AssetMovementCategory.DEPOSIT
assert asset_movements[7].asset == A_ETH
assert asset_movements[8].location == Location.BITMEX
assert asset_movements[8].category == AssetMovementCategory.DEPOSIT
assert asset_movements[8].asset == A_BTC
assert asset_movements[9].location == Location.BITMEX
assert asset_movements[9].category == AssetMovementCategory.WITHDRAWAL
assert asset_movements[9].asset == A_BTC
# TODO: investigate why this new bitmex withdrawal popped up
assert asset_movements[10].location == Location.BITMEX
assert asset_movements[10].category == AssetMovementCategory.WITHDRAWAL
assert asset_movements[10].asset == A_BTC
# The history creation for these is not yet tested
assert len(eth_transactions) == 3
assert eth_transactions[0].block_number == 54092
assert eth_transactions[0].tx_hash == hexstring_to_bytes(TX_HASH_STR1)
assert eth_transactions[0].from_address == ETH_ADDRESS1
assert eth_transactions[0].to_address == ''
assert eth_transactions[0].value == FVal('11901464239480000000000000')
assert eth_transactions[0].input_data == MOCK_INPUT_DATA
assert eth_transactions[1].block_number == 54093
assert eth_transactions[1].tx_hash == hexstring_to_bytes(TX_HASH_STR2)
assert eth_transactions[1].from_address == ETH_ADDRESS2
assert eth_transactions[1].to_address == ETH_ADDRESS1
assert eth_transactions[1].value == FVal('40000300')
assert eth_transactions[1].input_data == MOCK_INPUT_DATA
assert eth_transactions[2].block_number == 54094
assert eth_transactions[2].tx_hash == hexstring_to_bytes(TX_HASH_STR3)
assert eth_transactions[2].from_address == ETH_ADDRESS3
assert eth_transactions[2].to_address == ETH_ADDRESS1
assert eth_transactions[2].value == FVal('500520300')
assert eth_transactions[2].input_data == MOCK_INPUT_DATA
return {}
def check_result_of_history_creation_for_remote_errors(
start_ts: Timestamp,
end_ts: Timestamp,
trade_history: List[Trade],
loan_history: Dict,
asset_movements: List[AssetMovement],
eth_transactions: List[EthereumTransaction],
) -> Dict[str, Any]:
assert len(trade_history) == 0
assert len(loan_history) == 0
assert len(asset_movements) == 0
assert len(eth_transactions) == 0
return {}
def mock_exchange_responses(rotki: Rotkehlchen, remote_errors: bool):
invalid_payload = "[{"
def mock_binance_api_queries(url):
if remote_errors:
payload = invalid_payload
elif 'myTrades' in url:
# Can't mock unknown assets in binance trade query since
# only all known pairs are queried
payload = '[]'
if 'symbol=ETHBTC' in url:
payload = """[{
"symbol": "ETHBTC",
"id": 1,
"orderId": 1,
"price": "0.0063213",
"qty": "5.0",
"commission": "0.005",
"commissionAsset": "ETH",
"time": 1512561941000,
"isBuyer": true,
"isMaker": false,
"isBestMatch": true
}]"""
elif 'symbol=RDNETH' in url:
payload = """[{
"symbol": "RDNETH",
"id": 2,
"orderId": 2,
"price": "0.0063213",
"qty": "5.0",
"commission": "0.005",
"commissionAsset": "RDN",
"time": 1512561942000,
"isBuyer": false,
"isMaker": false,
"isBestMatch": true
}]"""
elif 'depositHistory.html' in url:
payload = '{"success": true, "depositList": []}'
elif 'withdrawHistory.html' in url:
payload = '{"success": true, "withdrawList": []}'
else:
raise RuntimeError(f'Binance test mock got unexpected/unmocked url {url}')
return MockResponse(200, payload)
def mock_poloniex_api_queries(url, req): # pylint: disable=unused-argument
payload = ''
if remote_errors:
payload = invalid_payload
elif 'returnTradeHistory' == req['command']:
payload = """{
"BTC_ETH": [{
"globalTradeID": 394131412,
"tradeID": "5455033",
"date": "2018-10-16 18:05:17",
"rate": "0.06935244",
"amount": "1.40308443",
"total": "0.09730732",
"fee": "0.00100000",
"orderNumber": "104768235081",
"type": "sell",
"category": "exchange"
}, {
"globalTradeID": 394131413,
"tradeID": "5455034",
"date": "2018-10-16 18:07:17",
"rate": "0.06935244",
"amount": "1.40308443",
"total": "0.09730732",
"fee": "0.00100000",
"orderNumber": "104768235081",
"type": "buy",
"category": "exchange"
}],
"ETH_XMR": [{
"globalTradeID": 394131415,
"tradeID": "5455036",
"date": "2018-10-16 18:07:18",
"rate": "0.06935244",
"amount": "1.40308443",
"total": "0.09730732",
"fee": "0.00100000",
"orderNumber": "104768235081",
"type": "buy",
"category": "exchange"
}],
"ETH_NOEXISTINGASSET": [{
"globalTradeID": 394131416,
"tradeID": "5455036",
"date": "2018-10-16 18:07:17",
"rate": "0.06935244",
"amount": "1.40308443",
"total": "0.09730732",
"fee": "0.00100000",
"orderNumber": "104768235081",
"type": "buy",
"category": "exchange"
}],
"ETH_BALLS": [{
"globalTradeID": 394131417,
"tradeID": "5455036",
"date": "2018-10-16 18:07:17",
"rate": "0.06935244",
"amount": "1.40308443",
"total": "0.09730732",
"fee": "0.00100000",
"orderNumber": "104768235081",
"type": "buy",
"category": "exchange"
}]
}"""
elif 'returnLendingHistory' == req['command']:
payload = """[{
"id": 246300115,
"currency": "BTC",
"rate": "0.00013890",
"amount": "0.33714830",
"duration": "0.00090000",
"interest": "0.00000005",
"fee": "0.00000000",
"earned": "0.00000005",
"open": "2017-01-01 23:41:37",
"close": "2017-01-01 23:42:51"
}, {
"id": 246294775,
"currency": "ETH",
"rate": "0.00013890",
"amount": "0.03764586",
"duration": "0.00150000",
"interest": "0.00000001",
"fee": "0.00000000",
"earned": "0.00000001",
"open": "2017-01-01 23:36:32",
"close": "2017-01-01 23:38:45"
}, {
"id": 246294776,
"currency": "NOTEXISTINGASSET",
"rate": "0.00013890",
"amount": "0.03764586",
"duration": "0.00150000",
"interest": "0.00000001",
"fee": "0.00000000",
"earned": "0.00000001",
"open": "2017-01-01 23:36:32",
"close": "2017-01-01 23:38:45"
}, {
"id": 246294777,
"currency": "BDC",
"rate": "0.00013890",
"amount": "0.03764586",
"duration": "0.00150000",
"interest": "0.00000001",
"fee": "0.00000000",
"earned": "0.00000001",
"open": "2017-01-01 23:36:32",
"close": "2017-01-01 23:38:45"
}]"""
elif 'returnDepositsWithdrawals' == req['command']:
payload = POLONIEX_MOCK_DEPOSIT_WITHDRAWALS_RESPONSE
else:
raise RuntimeError(
f'Poloniex test mock got unexpected/unmocked command {req["command"]}',
)
return MockResponse(200, payload)
def mock_bittrex_api_queries(url):
if remote_errors:
payload = invalid_payload
elif 'getorderhistory' in url:
payload = """
{
"success": true,
"message": "''",
"result": [{
"OrderUuid": "fd97d393-e9b9-4dd1-9dbf-f288fc72a185",
"Exchange": "BTC-LTC",
"TimeStamp": "2017-05-01T15:00:00.00",
"OrderType": "LIMIT_BUY",
"Limit": 1e-8,
"Quantity": 667.03644955,
"QuantityRemaining": 0,
"Commission": 0.00004921,
"Price": 0.01968424,
"PricePerUnit": 0.0000295,
"IsConditional": false,
"ImmediateOrCancel": false
}, {
"OrderUuid": "ad97d393-e9b9-4dd1-9dbf-f288fc72a185",
"Exchange": "ETH-LTC",
"TimeStamp": "2017-05-02T15:00:00.00",
"OrderType": "LIMIT_SELL",
"Limit": 1e-8,
"Quantity": 667.03644955,
"QuantityRemaining": 0,
"Commission": 0.00004921,
"Price": 0.01968424,
"PricePerUnit": 0.0000295,
"IsConditional": false,
"ImmediateOrCancel": false
}, {
"OrderUuid": "ed97d393-e9b9-4dd1-9dbf-f288fc72a185",
"Exchange": "PTON-ETH",
"TimeStamp": "2017-05-02T15:00:00.00",
"OrderType": "LIMIT_SELL",
"Limit": 1e-8,
"Quantity": 667.03644955,
"QuantityRemaining": 0,
"Commission": 0.00004921,
"Price": 0.01968424,
"PricePerUnit": 0.0000295,
"IsConditional": false,
"ImmediateOrCancel": false
}, {
"OrderUuid": "1d97d393-e9b9-4dd1-9dbf-f288fc72a185",
"Exchange": "ETH-IDONTEXIST",
"TimeStamp": "2017-05-02T15:00:00.00",
"OrderType": "LIMIT_SELL",
"Limit": 1e-8,
"Quantity": 667.03644955,
"QuantityRemaining": 0,
"Commission": 0.00004921,
"Price": 0.01968424,
"PricePerUnit": 0.0000295,
"IsConditional": false,
"ImmediateOrCancel": false
}, {
"OrderUuid": "2d97d393-e9b9-4dd1-9dbf-f288fc72a185",
"Exchange": "%$#%$#%#$%",
"TimeStamp": "2017-05-02T15:00:00.00",
"OrderType": "LIMIT_BUY",
"Limit": 1e-8,
"Quantity": 667.03644955,
"QuantityRemaining": 0,
"Commission": 0.00004921,
"Price": 0.01968424,
"PricePerUnit": 0.0000295,
"IsConditional": false,
"ImmediateOrCancel": false
}]
}
"""
elif 'getdeposithistory' in url or 'getwithdrawalhistory' in url:
# For now no deposits or withdrawals for bittrex in the big history test
payload = '{"success": true, "message": "''", "result": []}'
else:
raise RuntimeError(f'Bittrex test mock got unexpected/unmocked url {url}')
return MockResponse(200, payload)
def mock_bitmex_api_queries(url, data):
if remote_errors:
payload = invalid_payload
elif 'user/walletHistory' in url:
payload = """[{
"transactID": "id1",
"account": 0,
"currency": "XBt",
"transactType": "Deposit",
"amount": 15000000,
"fee": 0,
"transactStatus": "foo",
"address": "foo",
"tx": "foo",
"text": "foo",
"transactTime": "2017-04-03T15:00:00.929Z",
"timestamp": "2017-04-03T15:00:00.929Z"
},{
"transactID": "id2",
"account": 0,
"currency": "XBt",
"transactType": "RealisedPNL",
"amount": 5000000,
"fee": 0.01,
"transactStatus": "foo",
"address": "foo",
"tx": "foo",
"text": "foo",
"transactTime": "2017-05-02T15:00:00.929Z",
"timestamp": "2017-05-02T15:00:00.929Z"
},{
"transactID": "id3",
"account": 0,
"currency": "XBt",
"transactType": "Withdrawal",
"amount": 1000000,
"fee": 0.001,
"transactStatus": "foo",
"address": "foo",
"tx": "foo",
"text": "foo",
"transactTime": "2017-05-23T15:00:00.00.929Z",
"timestamp": "2017-05-23T15:00:00.929Z"
},{
"transactID": "id4",
"account": 0,
"currency": "XBt",
"transactType": "Withdrawal",
"amount": 0.5,
"fee": 0.001,
"transactStatus": "foo",
"address": "foo",
"tx": "foo",
"text": "foo",
"transactTime": "2019-08-23T15:00:00.00.929Z",
"timestamp": "2019-08-23T15:00:00.929Z"
},{
"transactID": "id5",
"account": 0,
"currency": "XBt",
"transactType": "RealisedPNL",
"amount": 0.5,
"fee": 0.001,
"transactStatus": "foo",
"address": "foo",
"tx": "foo",
"text": "foo",
"transactTime": "2019-08-23T15:00:00.929Z",
"timestamp": "2019-08-23T15:00:00.929Z"
}]"""
else:
raise RuntimeError(f'Bitmex test mock got unexpected/unmocked url {url}')
return MockResponse(200, payload)
# TODO: Turn this into a loop of all exchanges and return a list of patches
poloniex = rotki.exchange_manager.connected_exchanges.get('poloniex', None)
polo_patch = None
if poloniex:
polo_patch = patch.object(
poloniex.session,
'post',
side_effect=mock_poloniex_api_queries,
)
binance = rotki.exchange_manager.connected_exchanges.get('binance', None)
binance_patch = None
if binance:
binance_patch = patch.object(
binance.session,
'get',
side_effect=mock_binance_api_queries,
)
bittrex = rotki.exchange_manager.connected_exchanges.get('bittrex', None)
bittrex_patch = None
if bittrex:
bittrex_patch = patch.object(
bittrex.session,
'get',
side_effect=mock_bittrex_api_queries,
)
bitmex = rotki.exchange_manager.connected_exchanges.get('bitmex', None)
bitmex_patch = None
if bitmex:
bitmex_patch = patch.object(
bitmex.session,
'get',
side_effect=mock_bitmex_api_queries,
)
return polo_patch, binance_patch, bittrex_patch, bitmex_patch
def mock_history_processing(rotki: Rotkehlchen, remote_errors=False):
""" Patch away the processing of history """
mock_function = check_result_of_history_creation
if remote_errors:
mock_function = check_result_of_history_creation_for_remote_errors
accountant_patch = patch.object(
rotki.accountant,
'process_history',
side_effect=mock_function,
)
return accountant_patch
def mock_etherscan_transaction_response():
def mocked_request_dict(url, timeout):
if 'etherscan' not in url:
raise AssertionError(
'Requested non-etherscan url for transaction response test',
)
addr1_tx = f"""{{"blockNumber":"54092","timeStamp":"1439048640","hash":"{TX_HASH_STR1}","nonce":"0","blockHash":"0xd3cabad6adab0b52ea632c386ea19403680571e682c62cb589b5abcd76de2159","transactionIndex":"0","from":"{ETH_ADDRESS1}","to":"","value":"11901464239480000000000000","gas":"2000000","gasPrice":"10000000000000","isError":"0","txreceipt_status":"","input":"{MOCK_INPUT_DATA_HEX}","contractAddress":"0xde0b295669a9fd93d5f28d9ec85e40f4cb697bae","cumulativeGasUsed":"1436963","gasUsed":"1436963","confirmations":"8569454"}}
"""
addr2_tx = f"""{{"blockNumber":"54093","timeStamp":"1439048643","hash":"{TX_HASH_STR2}","nonce":"0","blockHash":"0xf3cabad6adab0b52eb632c386ea194036805713682c62cb589b5abcd76df2159","transactionIndex":"0","from":"{ETH_ADDRESS2}","to":"{ETH_ADDRESS1}","value":"40000300","gas":"2000000","gasPrice":"10000000000000","isError":"0","txreceipt_status":"","input":"{MOCK_INPUT_DATA_HEX}","contractAddress":"0xde0b295669a9fd93d5f28d9ec85e40f4cb697bae","cumulativeGasUsed":"1436963","gasUsed":"1436963","confirmations":"8569454"}}
"""
addr3_tx = f"""{{"blockNumber":"54094","timeStamp":"1439048645","hash":"{TX_HASH_STR3}","nonce":"0","blockHash":"0xe3cabad6adab0b52eb632c3165a194036805713682c62cb589b5abcd76de2159","transactionIndex":"0","from":"{ETH_ADDRESS3}","to":"{ETH_ADDRESS1}","value":"500520300","gas":"2000000","gasPrice":"10000000000000","isError":"0","txreceipt_status":"","input":"{MOCK_INPUT_DATA_HEX}","contractAddress":"0xde0b295669a9fd93d5f28d9ec85e40f4cb697bae","cumulativeGasUsed":"1436963","gasUsed":"1436963","confirmations":"8569454"}}
"""
if 'txlistinternal' in url:
# don't return any internal transactions
payload = '{"status":"1","message":"OK","result":[]}'
else:
# And depending on the given query return corresponding mock transactions for address
if ETH_ADDRESS1 in url:
tx_str = addr1_tx
elif ETH_ADDRESS2 in url:
tx_str = addr2_tx
elif ETH_ADDRESS3 in url:
tx_str = addr3_tx
else:
raise AssertionError(
'Requested etherscan transactions for unknown address in tests',
)
payload = f'{{"status":"1","message":"OK","result":[{tx_str}]}}'
return MockResponse(200, payload)
return patch(
'rotkehlchen.utils.misc.requests.get',
side_effect=mocked_request_dict,
)
def mock_history_processing_and_exchanges(rotki: Rotkehlchen, remote_errors=False):
accountant_patch = mock_history_processing(rotki, remote_errors=remote_errors)
polo_patch, binance_patch, bittrex_patch, bitmex_patch = mock_exchange_responses(
rotki,
remote_errors,
)
return (
accountant_patch,
polo_patch,
binance_patch,
bittrex_patch,
bitmex_patch,
mock_etherscan_transaction_response(),
)
| [
"[email protected]"
] | |
5c94e0ecd48a3e1e6b1341ab6049f1b1c6cc7455 | e0e55462707e8257559736f212ad086fbb5f9af5 | /util/path_config.py | 47d0bb3fd2df3e4777e54233ca5de1acb9a0a277 | [] | no_license | jtpils/SSRNet | 45e5c97f82e21c4f672d3c7e61de0c6036b7a95c | 05d70706f4ecdecd502890a799b0d316db15ebd3 | refs/heads/master | 2022-11-26T11:46:24.597002 | 2020-07-28T02:56:33 | 2020-07-28T02:56:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 204 | py | import sys
open3d_path = '/mnt/A/jokery/projects/Open3D_test3/src/build/lib/'
tc_path = '/mnt/A/jokery/projects/08_2/'
sys.path.append(open3d_path)
from py3d import *
def get_tc_path():
return tc_path
| [
"[email protected]"
] | |
0a895f1f4937acd75b9c99d191c3306bff7ea81a | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02951/s718036718.py | 3cccf0c6bef0cc9a94bcd2b254316aabdb7fd13e | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 78 | py | a, b, c = map(int, input().split())
a = min(b + c, a)
c = b + c - a
print(c) | [
"[email protected]"
] | |
b306dcb0889d1ec282242b257ade90f599f43fea | 227b02ca30168c31accd1b7d38c3436b737a2f8e | /lenstronomy/Data/image_noise.py | 3ab46bd7ca164bfecd4003235a32c6a0ead736ba | [
"MIT"
] | permissive | Thomas-01/lenstronomy | b7d802c6213f0b7780acb7946b9bb150081b5d34 | 36db4c7f43ba28d6bdecdab1f15c537043f4a286 | refs/heads/master | 2020-06-03T04:22:06.123622 | 2020-03-31T18:33:47 | 2020-03-31T18:33:47 | 191,435,987 | 0 | 0 | MIT | 2019-07-11T03:22:29 | 2019-06-11T19:21:27 | Python | UTF-8 | Python | false | false | 4,711 | py | import numpy as np
class ImageNoise(object):
"""
class that deals with noise properties of imaging data
"""
def __init__(self, image_data, exposure_time=None, background_rms=None, noise_map=None, verbose=True):
"""
:param image_data: numpy array, pixel data values
:param exposure_time: int or array of size the data; exposure time
(common for all pixels or individually for each individual pixel)
:param background_rms: root-mean-square value of Gaussian background noise
:param noise_map: int or array of size the data; joint noise sqrt(variance) of each individual pixel.
Overwrites meaning of background_rms and exposure_time.
"""
if exposure_time is not None:
# make sure no negative exposure values are present no dividing by zero
if isinstance(exposure_time, int) or isinstance(exposure_time, float):
if exposure_time <= 10 ** (-10):
exposure_time = 10 ** (-10)
else:
exposure_time[exposure_time <= 10 ** (-10)] = 10 ** (-10)
self._exp_map = exposure_time
self._background_rms = background_rms
self._noise_map = noise_map
if noise_map is not None:
assert np.shape(noise_map) == np.shape(image_data)
else:
if background_rms is not None and exposure_time is not None:
if background_rms * np.max(exposure_time) < 1 and verbose is True:
print("WARNING! sigma_b*f %s < 1 count may introduce unstable error estimates with a Gaussian"
" error function for a Poisson distribution with mean < 1." % (
background_rms * np.max(exposure_time)))
self._data = image_data
@property
def background_rms(self):
"""
:return: rms value of background noise
"""
if self._background_rms is None:
if self._noise_map is None:
raise ValueError("rms background value as 'background_rms' not specified!")
self._background_rms = np.median(self._noise_map)
return self._background_rms
@property
def exposure_map(self):
"""
Units of data and exposure map should result in:
number of flux counts = data * exposure_map
:return: exposure map for each pixel
"""
if self._exp_map is None:
if self._noise_map is None:
raise ValueError("Exposure map has not been specified in Noise() class!")
return self._exp_map
@property
def C_D(self):
"""
Covariance matrix of all pixel values in 2d numpy array (only diagonal component)
The covariance matrix is estimated from the data.
WARNING: For low count statistics, the noise in the data may lead to biased estimates of the covariance matrix.
:return: covariance matrix of all pixel values in 2d numpy array (only diagonal component).
"""
if not hasattr(self, '_C_D'):
if self._noise_map is not None:
self._C_D = self._noise_map ** 2
else:
self._C_D = covariance_matrix(self._data, self.background_rms, self.exposure_map)
return self._C_D
def C_D_model(self, model):
"""
:param model: model (same as data but without noise)
:return: estimate of the noise per pixel based on the model flux
"""
if self._noise_map is not None:
return self._noise_map ** 2
else:
return covariance_matrix(model, self._background_rms, self._exp_map)
def covariance_matrix(data, background_rms, exposure_map):
"""
returns a diagonal matrix for the covariance estimation which describes the error
Notes:
- the exposure map must be positive definite. Values that deviate too much from the mean exposure time will be
given a lower limit to not under-predict the Poisson component of the noise.
- the data must be positive semi-definite for the Poisson noise estimate.
Values < 0 (Possible after mean subtraction) will not have a Poisson component in their noise estimate.
:param data: data array, eg in units of photons/second
:param background_rms: background noise rms, eg. in units (photons/second)^2
:param exposure_map: exposure time per pixel, e.g. in units of seconds
:return: len(d) x len(d) matrix that give the error of background and Poisson components; (photons/second)^2
"""
d_pos = np.zeros_like(data)
d_pos[data >= 0] = data[data >= 0]
sigma = d_pos / exposure_map + background_rms ** 2
return sigma
| [
"[email protected]"
] | |
ec3e99c49cd07250adacf2d417ff14a17a27c5f3 | eea704186322a0441124bae2eaefc185c75a69f1 | /setup.py | 581f4773531a0ab6f4988a45d2bc94e64bcfe9dd | [
"BSD-3-Clause"
] | permissive | ScottTaing/taolib | ff5c78c8e6ba0522f5d932975fdc8805c0564b4e | fbd4138d1be9a3ef032284a52662213833921efc | refs/heads/master | 2021-01-17T23:27:14.511401 | 2011-05-06T05:04:55 | 2011-05-06T05:04:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,423 | py | #!/usr/bin/env python
import os
import sys
from distutils.core import setup, Extension
def main():
if not float(sys.version[:3])>=2.4:
sys.stderr.write("CRITICAL: Python version must be greater than or equal to 2.4! python 2.6.2 is recommended!\n")
sys.exit(1)
setup(name="taolib",
version="1.0",
description="Tao's libraries",
author='Tao (Foo) Liu',
author_email='[email protected]',
url='http://vladimirliu.com/~taoliu/',
package_dir={'taolib' : '.'},
packages=['taolib','taolib.CoreLib',
'taolib.CoreLib.DB','taolib.CoreLib.FeatIO',
'taolib.CoreLib.BasicStat','taolib.CoreLib.WWW',
'taolib.CoreLib.Parser','taolib.CoreLib.SeqIO',
'taolib.CoreLib.BinKeeper','taolib.CoreLib.Algorithm',
'taolib.Assoc',
'taolib.ExtApp',
'taolib.Motif',
# 'taolib.IntegrativeBioinformatics',
# 'taolib.IntegrativeBioinformatics.elements',
# 'taolib.IntegrativeBioinformatics.networks',
# 'taolib.IntegrativeBioinformatics.algos',
# 'taolib.IntegrativeBioinformatics.features',
# 'taolib.IntegrativeBioinformatics.links',
# 'taolib.IntegrativeBioinformatics.apache',
],
scripts=['Scripts/motif_enrich.py',
'Scripts/qc_chIP_peak.py',
'Scripts/qc_chIP_whole.py',
'Scripts/count_probes_in_peaks.py',
'Scripts/count_probes_in_ranges.py',
'Scripts/xyz2image.py',
'Scripts/refine_peak.py',
'Scripts/fq2fa.py',
'Scripts/wiggle_reformat.py',
'Scripts/wig_correlation.py',
'Scripts/wig_correlation_in_bed_file.py',
'Scripts/conservation_plot.py',
'Scripts/wig_extract_chrom.py',
'Scripts/wig_split.py',
'Scripts/wig_call_peaks.py',
'Scripts/wig_call_peaks2.py',
'Scripts/naive_call_peaks.py',
'Scripts/wig2bedGraphBins.py',
'Scripts/bed_correlation.py',
'Scripts/ce_histone_matrix.py',
'Scripts/rand_pos.py',
'Scripts/draw_BED.py',
'Scripts/norm.py',
'Scripts/cutoff.py',
'Scripts/ChIP-seq_Pipeline1.py',
'Scripts/convert_gene_ids.py',
# 'Scripts/hmm_conception.py',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Artistic License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Database',
],
requires=['MySQL_python','PIL']
)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
7bcb803514e2cca016a206f9f03f15936cec735d | 66a9c25cf0c53e2c3029b423018b856103d709d4 | /sleekxmpp/features/feature_starttls/starttls.py | eb5eee1d5f5b8798c4f721522214b325b276dc94 | [
"MIT",
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | fritzy/SleekXMPP | 1b02d3e2b22efeb6bf3f8f487e6c0343b9b85baf | cc1d470397de768ffcc41d2ed5ac3118d19f09f5 | refs/heads/develop | 2020-05-22T04:14:58.568822 | 2020-02-18T22:54:57 | 2020-02-18T22:54:57 | 463,405 | 658 | 254 | NOASSERTION | 2023-06-27T20:05:54 | 2010-01-08T05:54:45 | Python | UTF-8 | Python | false | false | 2,100 | py | """
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2011 Nathanael C. Fritz
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
import logging
from sleekxmpp.stanza import StreamFeatures
from sleekxmpp.xmlstream import RestartStream, register_stanza_plugin
from sleekxmpp.plugins import BasePlugin
from sleekxmpp.xmlstream.matcher import MatchXPath
from sleekxmpp.xmlstream.handler import Callback
from sleekxmpp.features.feature_starttls import stanza
log = logging.getLogger(__name__)
class FeatureSTARTTLS(BasePlugin):
name = 'feature_starttls'
description = 'RFC 6120: Stream Feature: STARTTLS'
dependencies = set()
stanza = stanza
def plugin_init(self):
self.xmpp.register_handler(
Callback('STARTTLS Proceed',
MatchXPath(stanza.Proceed.tag_name()),
self._handle_starttls_proceed,
instream=True))
self.xmpp.register_feature('starttls',
self._handle_starttls,
restart=True,
order=self.config.get('order', 0))
self.xmpp.register_stanza(stanza.Proceed)
self.xmpp.register_stanza(stanza.Failure)
register_stanza_plugin(StreamFeatures, stanza.STARTTLS)
def _handle_starttls(self, features):
"""
Handle notification that the server supports TLS.
Arguments:
features -- The stream:features element.
"""
if 'starttls' in self.xmpp.features:
# We have already negotiated TLS, but the server is
# offering it again, against spec.
return False
elif not self.xmpp.use_tls:
return False
else:
self.xmpp.send(features['starttls'], now=True)
return True
def _handle_starttls_proceed(self, proceed):
"""Restart the XML stream when TLS is accepted."""
log.debug("Starting TLS")
if self.xmpp.start_tls():
self.xmpp.features.add('starttls')
raise RestartStream()
| [
"[email protected]"
] | |
73ccc88785e3b447db6019e216b1ee9fca44786e | 1b87d5f7cba7e068f7b2ea902bba494599d20a78 | /tools/upload/googlecode_upload.py | 7d179f3a0d96db814dd7cef9a9724e7b4579bd08 | [
"BSD-3-Clause"
] | permissive | jpaalasm/pyglet | 906d03fe53160885665beaed20314b5909903cc9 | bf1d1f209ca3e702fd4b6611377257f0e2767282 | refs/heads/master | 2021-01-25T03:27:08.941964 | 2014-01-25T17:50:57 | 2014-01-25T17:50:57 | 16,236,090 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 6,896 | py | #!/usr/bin/env python
#
# Copyright 2006 Google Inc. All Rights Reserved.
# Author: [email protected] (David Anderson)
#
# Script for uploading files to a Google Code project.
#
# This is intended to be both a useful script for people who want to
# streamline project uploads and a reference implementation for
# uploading files to Google Code projects.
#
# To upload a file to Google Code, you need to provide a path to the
# file on your local machine, a small summary of what the file is, a
# project name, and a valid account that is a member or owner of that
# project. You can optionally provide a list of labels that apply to
# the file. The file will be uploaded under the same name that it has
# in your local filesystem (that is, the "basename" or last path
# component). Run the script with '--help' to get the exact syntax
# and available options.
#
# Note that the upload script requests that you enter your
# googlecode.com password. This is NOT your Gmail account password!
# This is the password you use on googlecode.com for committing to
# Subversion and uploading files. You can find your password by going
# to http://code.google.com/hosting/settings when logged in with your
# Gmail account.
#
# If you are looking at this script as a reference for implementing
# your own Google Code file uploader, then you should take a look at
# the upload() function, which is the meat of the uploader. You
# basically need to build a multipart/form-data POST request with the
# right fields and send it to https://PROJECT.googlecode.com/files .
# Authenticate the request using HTTP Basic authentication, as is
# shown below.
#
# Licensed under the terms of the Apache Software License 2.0:
# http://www.apache.org/licenses/LICENSE-2.0
#
# Questions, comments, feature requests and patches are most welcome.
# Please direct all of these to the Google Code users group:
# http://groups-beta.google.com/group/google-code-hosting
"""Google Code file uploader script.
"""
__author__ = '[email protected] (David Anderson)'
import httplib
import os.path
import optparse
import getpass
import base64
def upload(file, project_name, user_name, password, summary, labels=None):
"""Upload a file to a Google Code project's file server.
Args:
file: The local path to the file.
project_name: The name of your project on Google Code.
user_name: Your Google account name.
password: The googlecode.com password for your account.
Note that this is NOT your global Google Account password!
summary: A small description for the file.
labels: an optional list of label strings with which to tag the file.
Returns: a tuple:
http_status: 201 if the upload succeeded, something else if an
error occured.
http_reason: The human-readable string associated with http_status
file_url: If the upload succeeded, the URL of the file on Google
Code, None otherwise.
"""
# The login is the user part of [email protected]. If the login provided
# is in the full user@domain form, strip it down.
if '@' in user_name:
user_name = user_name[:user_name.index('@')]
form_fields = [('summary', summary)]
if labels is not None:
form_fields.extend([('label', l.strip()) for l in labels])
content_type, body = encode_upload_request(form_fields, file)
upload_host = '%s.googlecode.com' % project_name
upload_uri = '/files'
auth_token = base64.b64encode('%s:%s'% (user_name, password))
headers = {
'Authorization': 'Basic %s' % auth_token,
'User-Agent': 'Googlecode.com uploader v0.9.4',
'Content-Type': content_type,
}
server = httplib.HTTPSConnection(upload_host)
server.request('POST', upload_uri, body, headers)
resp = server.getresponse()
server.close()
if resp.status == 201:
location = resp.getheader('Location', None)
else:
location = None
return resp.status, resp.reason, location
def encode_upload_request(fields, file_path):
"""Encode the given fields and file into a multipart form body.
fields is a sequence of (name, value) pairs. file is the path of
the file to upload. The file will be uploaded to Google Code with
the same file name.
Returns: (content_type, body) ready for httplib.HTTP instance
"""
BOUNDARY = '----------Googlecode_boundary_reindeer_flotilla'
CRLF = '\r\n'
body = []
# Add the metadata about the upload first
for key, value in fields:
body.extend(
['--' + BOUNDARY,
'Content-Disposition: form-data; name="%s"' % key,
'',
value,
])
# Now add the file itself
file_name = os.path.basename(file_path)
f = open(file_path)
file_content = f.read()
f.close()
body.extend(
['--' + BOUNDARY,
'Content-Disposition: form-data; name="filename"; filename="%s"'
% file_name,
# The upload server determines the mime-type, no need to set it.
'Content-Type: application/octet-stream',
'',
file_content,
])
# Finalize the form body
body.extend(['--' + BOUNDARY + '--', ''])
return 'multipart/form-data; boundary=%s' % BOUNDARY, CRLF.join(body)
def main():
parser = optparse.OptionParser(usage='googlecode-upload.py -s SUMMARY '
'-p PROJECT -u USERNAME FILE')
parser.add_option('-s', '--summary', dest='summary',
help='Short description of the file')
parser.add_option('-p', '--project', dest='project',
help='Google Code project name')
parser.add_option('-u', '--user', dest='user',
help='Your Google Code username')
parser.add_option('-l', '--labels', dest='labels',
help='An optional list of labels to attach to the file')
options, args = parser.parse_args()
if not options.summary:
parser.error('File summary is missing.')
elif not options.project:
parser.error('Project name is missing.')
elif not options.user:
parser.error('User name is missing.')
elif len(args) < 1:
parser.error('File to upload not provided.')
print 'Please enter your googlecode.com password.'
print '** Note that this is NOT your Gmail account password! **'
print 'It is the password you use to access Subversion repositories,'
print 'and can be found here: http://code.google.com/hosting/settings'
password = getpass.getpass()
file_path = args[0]
if options.labels:
labels = options.labels.split(',')
else:
labels = None
status, reason, url = upload(file_path, options.project,
options.user, password,
options.summary, labels)
if url:
print 'The file was uploaded successfully.'
print 'URL: %s' % url
else:
print 'An error occurred. Your file was not uploaded.'
print 'Google Code upload server said: %s (%s)' % (reason, status)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
038857cb63e1a53e8498e0e7db5a344f570b070f | b501a5eae1018c1c26caa96793c6ee17865ebb2d | /Networking/socket/socket_echo_client_dgram.py | 9a94a43d153d9f68fb0d97cdb5884623b3572b54 | [] | no_license | jincurry/standard_Library_Learn | 12b02f9e86d31ca574bb6863aefc95d63cc558fc | 6c7197f12747456e0f1f3efd09667682a2d1a567 | refs/heads/master | 2022-10-26T07:28:36.545847 | 2018-05-04T12:54:50 | 2018-05-04T12:54:50 | 125,447,397 | 0 | 1 | null | 2022-10-02T17:21:50 | 2018-03-16T01:32:50 | Python | UTF-8 | Python | false | false | 439 | py | import socket
import sys
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
server_address = ('localhost', 10000)
message = b'This is the message, it will be repeated'
try:
print('Sending {!r}'.format(message))
sent = sock.sendto(message, server_address)
print('Waiting for receive')
data, server = sock.recvfrom(4096)
print('Received {!r}'.format(data))
finally:
print('Closing socket')
sock.close()
| [
"[email protected]"
] | |
052a2fec1e7d06a2ba0f6997bf07d9d453dcf143 | f31fda8014ecadf6af7d4e3392fb917c49e0352a | /HeavyIonsAnalysis/JetAnalysis/python/jets/akFilter2CaloJetSequence_pp_mc_cff.py | be232d658ab95be08e158cc0de98688f77436bd0 | [] | no_license | jniedzie/lightbylight | acea5051f053c49824a49a0b78bac3a2247ee75f | f5a4661fcf3fd3c0e9ccd8893a46a238e30c2aa8 | refs/heads/master | 2020-03-18T12:24:31.970468 | 2018-02-09T15:50:00 | 2018-02-09T15:50:00 | 134,724,759 | 0 | 1 | null | 2018-05-24T14:11:12 | 2018-05-24T14:11:12 | null | UTF-8 | Python | false | false | 14,936 | py |
import FWCore.ParameterSet.Config as cms
from HeavyIonsAnalysis.JetAnalysis.patHeavyIonSequences_cff import patJetGenJetMatch, patJetPartonMatch, patJetCorrFactors, patJets
from HeavyIonsAnalysis.JetAnalysis.inclusiveJetAnalyzer_cff import *
from HeavyIonsAnalysis.JetAnalysis.bTaggers_cff import *
from RecoJets.JetProducers.JetIDParams_cfi import *
from RecoJets.JetProducers.nJettinessAdder_cfi import Njettiness
akFilter2Calomatch = patJetGenJetMatch.clone(
src = cms.InputTag("akFilter2CaloJets"),
matched = cms.InputTag("ak2GenJets"),
resolveByMatchQuality = cms.bool(False),
maxDeltaR = 0.2
)
akFilter2CalomatchGroomed = patJetGenJetMatch.clone(
src = cms.InputTag("akFilter2GenJets"),
matched = cms.InputTag("ak2GenJets"),
resolveByMatchQuality = cms.bool(False),
maxDeltaR = 0.2
)
akFilter2Caloparton = patJetPartonMatch.clone(src = cms.InputTag("akFilter2CaloJets")
)
akFilter2Calocorr = patJetCorrFactors.clone(
useNPV = cms.bool(False),
useRho = cms.bool(False),
# primaryVertices = cms.InputTag("hiSelectedVertex"),
levels = cms.vstring('L2Relative','L3Absolute'),
src = cms.InputTag("akFilter2CaloJets"),
payload = "AK2Calo_offline"
)
akFilter2CaloJetID= cms.EDProducer('JetIDProducer', JetIDParams, src = cms.InputTag('akFilter2CaloJets'))
#akFilter2Caloclean = heavyIonCleanedGenJets.clone(src = cms.InputTag('ak2GenJets'))
akFilter2CalobTagger = bTaggers("akFilter2Calo",0.2)
#create objects locally since they dont load properly otherwise
#akFilter2Calomatch = akFilter2CalobTagger.match
akFilter2Caloparton = patJetPartonMatch.clone(src = cms.InputTag("akFilter2CaloJets"), matched = cms.InputTag("genParticles"))
akFilter2CaloPatJetFlavourAssociationLegacy = akFilter2CalobTagger.PatJetFlavourAssociationLegacy
akFilter2CaloPatJetPartons = akFilter2CalobTagger.PatJetPartons
akFilter2CaloJetTracksAssociatorAtVertex = akFilter2CalobTagger.JetTracksAssociatorAtVertex
akFilter2CaloJetTracksAssociatorAtVertex.tracks = cms.InputTag("highPurityTracks")
akFilter2CaloSimpleSecondaryVertexHighEffBJetTags = akFilter2CalobTagger.SimpleSecondaryVertexHighEffBJetTags
akFilter2CaloSimpleSecondaryVertexHighPurBJetTags = akFilter2CalobTagger.SimpleSecondaryVertexHighPurBJetTags
akFilter2CaloCombinedSecondaryVertexBJetTags = akFilter2CalobTagger.CombinedSecondaryVertexBJetTags
akFilter2CaloCombinedSecondaryVertexV2BJetTags = akFilter2CalobTagger.CombinedSecondaryVertexV2BJetTags
akFilter2CaloJetBProbabilityBJetTags = akFilter2CalobTagger.JetBProbabilityBJetTags
akFilter2CaloSoftPFMuonByPtBJetTags = akFilter2CalobTagger.SoftPFMuonByPtBJetTags
akFilter2CaloSoftPFMuonByIP3dBJetTags = akFilter2CalobTagger.SoftPFMuonByIP3dBJetTags
akFilter2CaloTrackCountingHighEffBJetTags = akFilter2CalobTagger.TrackCountingHighEffBJetTags
akFilter2CaloTrackCountingHighPurBJetTags = akFilter2CalobTagger.TrackCountingHighPurBJetTags
akFilter2CaloPatJetPartonAssociationLegacy = akFilter2CalobTagger.PatJetPartonAssociationLegacy
akFilter2CaloImpactParameterTagInfos = akFilter2CalobTagger.ImpactParameterTagInfos
akFilter2CaloImpactParameterTagInfos.primaryVertex = cms.InputTag("offlinePrimaryVertices")
akFilter2CaloJetProbabilityBJetTags = akFilter2CalobTagger.JetProbabilityBJetTags
akFilter2CaloSecondaryVertexTagInfos = akFilter2CalobTagger.SecondaryVertexTagInfos
akFilter2CaloSimpleSecondaryVertexHighEffBJetTags = akFilter2CalobTagger.SimpleSecondaryVertexHighEffBJetTags
akFilter2CaloSimpleSecondaryVertexHighPurBJetTags = akFilter2CalobTagger.SimpleSecondaryVertexHighPurBJetTags
akFilter2CaloCombinedSecondaryVertexBJetTags = akFilter2CalobTagger.CombinedSecondaryVertexBJetTags
akFilter2CaloCombinedSecondaryVertexV2BJetTags = akFilter2CalobTagger.CombinedSecondaryVertexV2BJetTags
akFilter2CaloSecondaryVertexNegativeTagInfos = akFilter2CalobTagger.SecondaryVertexNegativeTagInfos
akFilter2CaloNegativeSimpleSecondaryVertexHighEffBJetTags = akFilter2CalobTagger.NegativeSimpleSecondaryVertexHighEffBJetTags
akFilter2CaloNegativeSimpleSecondaryVertexHighPurBJetTags = akFilter2CalobTagger.NegativeSimpleSecondaryVertexHighPurBJetTags
akFilter2CaloNegativeCombinedSecondaryVertexBJetTags = akFilter2CalobTagger.NegativeCombinedSecondaryVertexBJetTags
akFilter2CaloPositiveCombinedSecondaryVertexBJetTags = akFilter2CalobTagger.PositiveCombinedSecondaryVertexBJetTags
akFilter2CaloNegativeCombinedSecondaryVertexV2BJetTags = akFilter2CalobTagger.NegativeCombinedSecondaryVertexV2BJetTags
akFilter2CaloPositiveCombinedSecondaryVertexV2BJetTags = akFilter2CalobTagger.PositiveCombinedSecondaryVertexV2BJetTags
akFilter2CaloSoftPFMuonsTagInfos = akFilter2CalobTagger.SoftPFMuonsTagInfos
akFilter2CaloSoftPFMuonsTagInfos.primaryVertex = cms.InputTag("offlinePrimaryVertices")
akFilter2CaloSoftPFMuonBJetTags = akFilter2CalobTagger.SoftPFMuonBJetTags
akFilter2CaloSoftPFMuonByIP3dBJetTags = akFilter2CalobTagger.SoftPFMuonByIP3dBJetTags
akFilter2CaloSoftPFMuonByPtBJetTags = akFilter2CalobTagger.SoftPFMuonByPtBJetTags
akFilter2CaloNegativeSoftPFMuonByPtBJetTags = akFilter2CalobTagger.NegativeSoftPFMuonByPtBJetTags
akFilter2CaloPositiveSoftPFMuonByPtBJetTags = akFilter2CalobTagger.PositiveSoftPFMuonByPtBJetTags
akFilter2CaloPatJetFlavourIdLegacy = cms.Sequence(akFilter2CaloPatJetPartonAssociationLegacy*akFilter2CaloPatJetFlavourAssociationLegacy)
#Not working with our PU sub, but keep it here for reference
#akFilter2CaloPatJetFlavourAssociation = akFilter2CalobTagger.PatJetFlavourAssociation
#akFilter2CaloPatJetFlavourId = cms.Sequence(akFilter2CaloPatJetPartons*akFilter2CaloPatJetFlavourAssociation)
akFilter2CaloJetBtaggingIP = cms.Sequence(akFilter2CaloImpactParameterTagInfos *
(akFilter2CaloTrackCountingHighEffBJetTags +
akFilter2CaloTrackCountingHighPurBJetTags +
akFilter2CaloJetProbabilityBJetTags +
akFilter2CaloJetBProbabilityBJetTags
)
)
akFilter2CaloJetBtaggingSV = cms.Sequence(akFilter2CaloImpactParameterTagInfos
*
akFilter2CaloSecondaryVertexTagInfos
* (akFilter2CaloSimpleSecondaryVertexHighEffBJetTags+
akFilter2CaloSimpleSecondaryVertexHighPurBJetTags+
akFilter2CaloCombinedSecondaryVertexBJetTags+
akFilter2CaloCombinedSecondaryVertexV2BJetTags
)
)
akFilter2CaloJetBtaggingNegSV = cms.Sequence(akFilter2CaloImpactParameterTagInfos
*
akFilter2CaloSecondaryVertexNegativeTagInfos
* (akFilter2CaloNegativeSimpleSecondaryVertexHighEffBJetTags+
akFilter2CaloNegativeSimpleSecondaryVertexHighPurBJetTags+
akFilter2CaloNegativeCombinedSecondaryVertexBJetTags+
akFilter2CaloPositiveCombinedSecondaryVertexBJetTags+
akFilter2CaloNegativeCombinedSecondaryVertexV2BJetTags+
akFilter2CaloPositiveCombinedSecondaryVertexV2BJetTags
)
)
akFilter2CaloJetBtaggingMu = cms.Sequence(akFilter2CaloSoftPFMuonsTagInfos * (akFilter2CaloSoftPFMuonBJetTags
+
akFilter2CaloSoftPFMuonByIP3dBJetTags
+
akFilter2CaloSoftPFMuonByPtBJetTags
+
akFilter2CaloNegativeSoftPFMuonByPtBJetTags
+
akFilter2CaloPositiveSoftPFMuonByPtBJetTags
)
)
akFilter2CaloJetBtagging = cms.Sequence(akFilter2CaloJetBtaggingIP
*akFilter2CaloJetBtaggingSV
*akFilter2CaloJetBtaggingNegSV
# *akFilter2CaloJetBtaggingMu
)
akFilter2CalopatJetsWithBtagging = patJets.clone(jetSource = cms.InputTag("akFilter2CaloJets"),
genJetMatch = cms.InputTag("akFilter2Calomatch"),
genPartonMatch = cms.InputTag("akFilter2Caloparton"),
jetCorrFactorsSource = cms.VInputTag(cms.InputTag("akFilter2Calocorr")),
JetPartonMapSource = cms.InputTag("akFilter2CaloPatJetFlavourAssociationLegacy"),
JetFlavourInfoSource = cms.InputTag("akFilter2CaloPatJetFlavourAssociation"),
trackAssociationSource = cms.InputTag("akFilter2CaloJetTracksAssociatorAtVertex"),
useLegacyJetMCFlavour = True,
discriminatorSources = cms.VInputTag(cms.InputTag("akFilter2CaloSimpleSecondaryVertexHighEffBJetTags"),
cms.InputTag("akFilter2CaloSimpleSecondaryVertexHighPurBJetTags"),
cms.InputTag("akFilter2CaloCombinedSecondaryVertexBJetTags"),
cms.InputTag("akFilter2CaloCombinedSecondaryVertexV2BJetTags"),
cms.InputTag("akFilter2CaloJetBProbabilityBJetTags"),
cms.InputTag("akFilter2CaloJetProbabilityBJetTags"),
#cms.InputTag("akFilter2CaloSoftPFMuonByPtBJetTags"),
#cms.InputTag("akFilter2CaloSoftPFMuonByIP3dBJetTags"),
cms.InputTag("akFilter2CaloTrackCountingHighEffBJetTags"),
cms.InputTag("akFilter2CaloTrackCountingHighPurBJetTags"),
),
jetIDMap = cms.InputTag("akFilter2CaloJetID"),
addBTagInfo = True,
addTagInfos = True,
addDiscriminators = True,
addAssociatedTracks = True,
addJetCharge = False,
addJetID = False,
getJetMCFlavour = True,
addGenPartonMatch = True,
addGenJetMatch = True,
embedGenJetMatch = True,
embedGenPartonMatch = True,
# embedCaloTowers = False,
# embedPFCandidates = True
)
akFilter2CaloNjettiness = Njettiness.clone(
src = cms.InputTag("akFilter2CaloJets"),
R0 = cms.double( 0.2)
)
akFilter2CalopatJetsWithBtagging.userData.userFloats.src += ['akFilter2CaloNjettiness:tau1','akFilter2CaloNjettiness:tau2','akFilter2CaloNjettiness:tau3']
akFilter2CaloJetAnalyzer = inclusiveJetAnalyzer.clone(jetTag = cms.InputTag("akFilter2CalopatJetsWithBtagging"),
genjetTag = 'ak2GenJets',
rParam = 0.2,
matchJets = cms.untracked.bool(False),
matchTag = 'patJetsWithBtagging',
pfCandidateLabel = cms.untracked.InputTag('particleFlow'),
trackTag = cms.InputTag("generalTracks"),
fillGenJets = True,
isMC = True,
doSubEvent = True,
useHepMC = cms.untracked.bool(False),
genParticles = cms.untracked.InputTag("genParticles"),
eventInfoTag = cms.InputTag("generator"),
doLifeTimeTagging = cms.untracked.bool(True),
doLifeTimeTaggingExtras = cms.untracked.bool(False),
bTagJetName = cms.untracked.string("akFilter2Calo"),
jetName = cms.untracked.string("akFilter2Calo"),
genPtMin = cms.untracked.double(5),
hltTrgResults = cms.untracked.string('TriggerResults::'+'HISIGNAL'),
doTower = cms.untracked.bool(False),
doSubJets = cms.untracked.bool(True),
doGenSubJets = cms.untracked.bool(False),
subjetGenTag = cms.untracked.InputTag("akFilter2GenJets"),
doGenTaus = True
)
akFilter2CaloJetSequence_mc = cms.Sequence(
#akFilter2Caloclean
#*
akFilter2Calomatch
#*
#akFilter2CalomatchGroomed
*
akFilter2Caloparton
*
akFilter2Calocorr
*
#akFilter2CaloJetID
#*
akFilter2CaloPatJetFlavourIdLegacy
#*
#akFilter2CaloPatJetFlavourId # Use legacy algo till PU implemented
*
akFilter2CaloJetTracksAssociatorAtVertex
*
akFilter2CaloJetBtagging
*
akFilter2CaloNjettiness
*
akFilter2CalopatJetsWithBtagging
*
akFilter2CaloJetAnalyzer
)
akFilter2CaloJetSequence_data = cms.Sequence(akFilter2Calocorr
*
#akFilter2CaloJetID
#*
akFilter2CaloJetTracksAssociatorAtVertex
*
akFilter2CaloJetBtagging
*
akFilter2CaloNjettiness
*
akFilter2CalopatJetsWithBtagging
*
akFilter2CaloJetAnalyzer
)
akFilter2CaloJetSequence_jec = cms.Sequence(akFilter2CaloJetSequence_mc)
akFilter2CaloJetSequence_mb = cms.Sequence(akFilter2CaloJetSequence_mc)
akFilter2CaloJetSequence = cms.Sequence(akFilter2CaloJetSequence_mc)
| [
"[email protected]"
] | |
f72480accb059522869217f8cf37e191dde1a7b4 | 201f07e3ddfd4f1b24c24fc794aa980a255fb2ab | /barriers/views/assessments/overview.py | e1bd181b2078e37846d386e93ed49544758a1f6b | [
"MIT"
] | permissive | uktrade/market-access-python-frontend | 7dab68eed0b35205f4c78758ab88c815f65631c2 | 9510c31f7264c4092f76fce336d05b6709802b1c | refs/heads/master | 2023-09-05T09:38:42.473232 | 2023-08-31T08:42:49 | 2023-08-31T08:42:49 | 225,406,107 | 7 | 3 | MIT | 2023-09-13T09:41:55 | 2019-12-02T15:20:46 | Python | UTF-8 | Python | false | false | 586 | py | from django.conf import settings
from django.views.generic import TemplateView
from ..mixins import BarrierMixin
class AssessmentOverview(BarrierMixin, TemplateView):
template_name = "barriers/assessments/overview.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
assement_class = "assessment-item"
# If not configured, hide
if not settings.PRIORITISATION_STRATEGIC_ASSESSMENTS:
assement_class += " visually-hidden"
context["strategic_ass"] = assement_class
return context
| [
"[email protected]"
] | |
29ded85963427005447945c2553a28c700c64f2c | 50ee2f4f1a7d2e5ff7ac35118c5ac45f9b923865 | /0x04-python-more_data_structures/11-mutiply_list_map.py | e358dac1321fc511f9324487fbc1dcfae81e9a3d | [] | no_license | spencerhcheng/holbertonschool-higher_level_programming | b489fbe8eba6109ef1eaa0d9363f3477e7eb16c4 | f8e1dbc24fcf8fb40ca135d2700872eb773e481e | refs/heads/master | 2021-01-20T06:54:35.044899 | 2018-05-20T05:09:59 | 2018-05-20T05:09:59 | 89,943,332 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 114 | py | #!/usr/bin/python3
def mutiply_list_map(my_list=[], number=0):
return(list(map(lambda x: x*number, my_list)))
| [
"[email protected]"
] | |
71522388895aa9b96e91d33115a18a44030a8f11 | dc182e5b4597bdd104d6695c03744a12ebfe2533 | /Hackerrank Solutions/array.py | 548b0132cc10c538e8612616e21c82103c603c13 | [] | no_license | srinaveendesu/Programs | 06fb4a4b452445e4260f9691fe632c732078d54d | f6dbd8db444678b7ae7658126b59b381b3ab0bab | refs/heads/master | 2023-01-27T14:42:40.989127 | 2023-01-18T22:36:14 | 2023-01-18T22:36:14 | 129,948,488 | 1 | 0 | null | 2022-09-13T23:06:04 | 2018-04-17T18:30:13 | Python | UTF-8 | Python | false | false | 4,002 | py | #QQ# #https://www.hackerrank.com/challenges/array-left-rotation/problem
#!/bin/python3
import math
import os
import random
import re
import sys
if __name__ == '__main__':
nd = input().split()
n = int(nd[0])
d = int(nd[1])
a = list(map(int, input().rstrip().split()))
l = (a[::][d:]+ a[:d])
print (' '.join(map(str,l)))
#QQ# https://www.hackerrank.com/challenges/sparse-arrays/problem
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the matchingStrings function below.
def matchingStrings(strings, queries):
d = {}
for val in strings:
if val not in d.keys():
d[val] = 1
else:
d[val] = d[val] +1
print (d)
lst = []
for val in queries:
if val in d.keys():
lst.append(d[val])
else:
lst.append(0)
return lst
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
strings_count = int(input())
strings = []
for _ in range(strings_count):
strings_item = input()
strings.append(strings_item)
queries_count = int(input())
queries = []
for _ in range(queries_count):
queries_item = input()
queries.append(queries_item)
res = matchingStrings(strings, queries)
fptr.write('\n'.join(map(str, res)))
fptr.write('\n')
fptr.close()
#QQ# https://www.hackerrank.com/challenges/missing-numbers/problem
# !/bin/python3
import math
import os
import random
import re
import sys
# Complete the missingNumbers function below.
def missingNumbers(arr, brr):
arr.sort()
brr.sort()
d1 = {}
for val in arr:
if val not in d1.keys():
d1[val] = 1
else:
d1[val] = d1[val] + 1
d2 = {}
for val in brr:
if val not in d1.keys():
d1[val] = -1
else:
d1[val] = d1[val] - 1
lst = []
for val in d1.keys():
if d1[val] < 0:
lst.append(val)
print(d1)
lst.sort()
return lst
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
n = int(input())
arr = list(map(int, input().rstrip().split()))
m = int(input())
brr = list(map(int, input().rstrip().split()))
result = missingNumbers(arr, brr)
fptr.write(' '.join(map(str, result)))
fptr.write('\n')
fptr.close()
#QQ# https://www.hackerrank.com/challenges/sherlock-and-array/problem
# !/bin/python3
import math
import os
import random
import re
import sys
# Complete the balancedSums function below.
def balancedSums(arr):
flag = False
s = sum(arr)
half = 0
index = 0
n = len(arr)
for i in range(0, n):
temp_s = (s - arr[i]) / 2
if temp_s == half:
index = i
i = n
flag = True
else:
half = half + arr[i]
if not flag:
return 'NO'
return 'YES'
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
T = int(input().strip())
for T_itr in range(T):
n = int(input().strip())
arr = list(map(int, input().rstrip().split()))
result = balancedSums(arr)
fptr.write(result + '\n')
fptr.close()
#QQ# https://www.hackerrank.com/challenges/beautiful-triplets/problem
# !/bin/python3
import math
import os
import random
import re
import sys
# Complete the beautifulTriplets function below.
def beautifulTriplets(d, arr):
lst = []
for val in arr:
a = val
b = val + d
c = val + (2 * d)
if b in arr and c in arr:
lst.append(str(a) + str(b) + str(c))
print(lst)
return len(lst)
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
nd = input().split()
n = int(nd[0])
d = int(nd[1])
arr = list(map(int, input().rstrip().split()))
result = beautifulTriplets(d, arr)
fptr.write(str(result) + '\n')
fptr.close()
| [
"[email protected]"
] | |
26d0cb12f5092278fa8ddcee18b2141fd5dd5574 | 9382a3acd3637a1c242045bff8109dee844d869a | /src/webhook_server_for_evaluation.py | 10a64c5634e81ba458af050880bc5ec18bf38dba | [
"MIT"
] | permissive | byeongkyu/dialogflow_dialog | 4cf19133f73d8ea82b8fb98e33a661804217a5db | c7ae5ce65f8fb6fa830817ab186a9851a26473e0 | refs/heads/master | 2020-03-17T20:07:05.222328 | 2018-08-23T23:42:39 | 2018-08-23T23:42:39 | 133,893,838 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,691 | py | #!/usr/bin/env python
#-*- encoding: utf8 -*-
import os
import time
import rospy
import threading
import logging
import json
import requests
import random
from std_msgs.msg import String, Int16, Empty
from flask import Flask, Response, request, make_response, jsonify
WEATHER_TEXT = [
"The weather in {city} now is {current_weather_desc}, current temperature is {current_temp} degree and wind speed is {current_wind_speed} m/s.",
]
class WebhookServer:
def __init__(self):
self.app = Flask(__name__)
self.app.add_url_rule('/', 'fulfillment', self.handle_fulfillment, methods=['POST'])
self.app.add_url_rule('/', 'index', self.handle_index, methods=['GET'])
# 0: Neutral, 1: Forward Lean, 2: Self disclosure, 3: voice pitch
self.current_scenario = 0 # Neutral
rospy.Subscriber('/select_evaluation_scenario', Int16, self.handle_select_scenario)
self.pub_complete = rospy.Publisher('/complete_execute_scenario', Empty, queue_size=1)
self.port_num = rospy.get_param('~port_num', default=8888)
try:
with open(rospy.get_param('~weather_api')) as f:
self.weather_api_key = json.loads(f.read())
except KeyError, e:
logging.error('Need parameter ~weather_api')
exit(-1)
# print self.weather_api_key
def run(self):
self.app.run(host="0.0.0.0", port=self.port_num)
def handle_select_scenario(self, msg):
self.current_scenario = msg.data
def handle_index(self):
return "<h1>This page is index page of UoA webhook server...</h1>"
def handle_fulfillment(self):
req = request.get_json(silent=True, force=True)
try:
action = req.get('queryResult').get('action')
except AttributeError:
rospy.logwarn('JSON error from fulfillment request')
return "json error"
if action == 'weather':
res = self.get_weather(req)
elif action == 'welcome':
if self.current_scenario == 2: # 2: Self disclosure
res = "Hi there, my name is Nao, the receptionist robot. I'm a little nervous about this task, but how may I help you?"
else:
res = "Hi there, my name is Nao, the receptionist robot. How may I help you?"
elif action == "prescription_not_ready":
if self.current_scenario == 3: # 3: voice pitch
res = '''
<prosody pitch="-15%"> I'm sorry Sam, your doctor has not yet written your prescription and so it is not ready for collection at the moment</prosody>.
<prosody pitch="-15%"> However, I have sent a message to your doctor</prosody>.
<prosody pitch="-15%"> Once the prescription has been written, someone will call you and let you know</prosody>.
<prosody pitch="-15%"> Is there anything else I can help you with</prosody>?
'''
else:
res = req.get('queryResult').get('fulfillmentText')
elif action == "dontknow_doctor_name":
if self.current_scenario == 2: # 2: Self disclosure
res = '''
No problem Sam, I forget things too sometimes.
I can see that you have an appointment with Dr Jones today and have checked you in. Is there anything else I can help you with?
'''
elif self.current_scenario == 3: # 3: voice pitch
res = '''
<prosody pitch="10%"> No problem Sam, I can see that you have an appointment with Dr Jones today and have checked you in</prosody>.
<prosody pitch="10%"> Is there anything else I can help you with</prosody>?
'''
else:
res = req.get('queryResult').get('fulfillmentText')
elif action == "request_bathroom":
if self.current_scenario == 3: # 3: voice pitch
res = '''
%pointing=objects:door% <prosody pitch="10%"> Certainly, the bathroom is located down the hall, second door on the right</prosody>.
'''
else:
res = req.get('queryResult').get('fulfillmentText')
elif action == "goodbye":
if self.current_scenario == 3: # 3: voice pitch
res = '''
<prosody pitch="10%"> I hope you have a nice day, Sam</prosody>.
'''
else:
res = req.get('queryResult').get('fulfillmentText')
self.pub_complete.publish()
return make_response(jsonify({'fulfillmentText': res}))
def get_weather(self, req):
parameters = req.get('queryResult').get('parameters')
result = requests.get('http://api.openweathermap.org/data/2.5/weather?q=%s&appid=%s'%(parameters['geo-city'], self.weather_api_key['api_key']))
weather_data = json.loads(result.text)
# print weather_data
current_city = weather_data['name']
current_weather = weather_data['weather'][0]['main']
current_weather_desc = weather_data['weather'][0]['description']
current_temp = weather_data['main']['temp'] - 273.15 # Kelvin to Celcius
current_wind_speed = weather_data['wind']['speed']
output_string = random.choice(WEATHER_TEXT)
return output_string.format(city=current_city, current_weather_desc=current_weather_desc, current_temp=current_temp, current_wind_speed=current_wind_speed)
if __name__ == '__main__':
threading.Thread(target=lambda: rospy.init_node('webhook_server_node', disable_signals=True)).start()
time.sleep(0.5)
m = WebhookServer()
m.run()
| [
"[email protected]"
] | |
3455029efbae033f2c0da3c1c4522c8c9b33cb33 | e6a90d21e9a983476adda3cccc832a828875cea9 | /Lib/site-packages/ffc/jitobject.py | 19a9d2db3872418e1f60e0ccf70f4753e65356c4 | [] | no_license | maciekswat/dolfin_python_deps | e28c27780d535c961e4b3129bb17628a4ff7167a | 7af15cd0ab522436ca285f8422faa42675345f55 | refs/heads/master | 2021-01-25T12:14:55.779591 | 2014-04-23T22:51:19 | 2014-04-23T22:51:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,931 | py | # Copyright (C) 2008-2013 Anders Logg
#
# This file is part of FFC.
#
# FFC is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# FFC is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with FFC. If not, see <http://www.gnu.org/licenses/>.
#
# Modified by Martin Alnaes, 2013
#
# First added: 2008-09-04
# Last changed: 2013-01-25
# Python modules.
from hashlib import sha1
# Instant modules.
from instant import get_swig_version
# UFL modules.
import ufl
# FFC modules.
from constants import FFC_VERSION
# UFC modules.
import ufc_utils
# Compute signature of all ufc headers combined
ufc_signature = sha1(''.join(getattr(ufc_utils, header)
for header in
(k for k in vars(ufc_utils).keys()
if k.endswith("_header")))
).hexdigest()
class JITObject:
"""This class is a wrapper for a compiled object in the context of
specific compiler parameters. A JITObject is identified either by its
hash value or by its signature. The hash value is valid only in a
single instance of an application (at runtime). The signature is
persistent and may be used for caching modules on disk."""
def __init__(self, form, parameters):
"Create JITObject for given form and parameters"
assert(isinstance(form, ufl.Form))
# Store data
self.form = form
self.parameters = parameters
self._hash = None
self._signature = None
def __hash__(self):
"Return unique integer for form + parameters"
# Check if we have computed the hash before
if not self._hash is None:
return self._hash
# Compute hash based on signature
self._hash = int(self.signature(), 16)
return self._hash
def __eq__(self, other):
"Check for equality"
return hash(self) == hash(other)
def signature(self):
"Return unique string for form + parameters"
# Check if we have computed the signature before
if not self._signature is None:
return self._signature
# Get signature from assumed precomputed form_data
form_signature = self.form.form_data().signature
# Compute other relevant signatures
parameters_signature = _parameters_signature(self.parameters)
ffc_signature = str(FFC_VERSION)
swig_signature = str(get_swig_version())
cell_signature = str(self.form.form_data().cell)
# Build common signature
signatures = [form_signature,
parameters_signature,
ffc_signature,
cell_signature,
ufc_signature]
string = ";".join(signatures)
self._signature = sha1(string).hexdigest()
# Uncomment for debugging
#print "form_signature =", form_signature
#print "parameters_signature =", parameters_signature
#print "ffc_signature =", ffc_signature
#print "cell_signature =", cell_signature
#print "signature =", self._signature
return self._signature
def _parameters_signature(parameters):
"Return parameters signature (some parameters must be ignored)."
parameters = parameters.copy()
ignores = ["log_prefix"]
for ignore in ignores:
if ignore in parameters:
del parameters[ignore]
return str(parameters)
| [
"[email protected]"
] | |
e47ec77483d8ea21ac2c7f17ceca3591cb18192a | df858cb8172f73aad1af25496ac86e637a203bf4 | /Introdução a Programação com Python - exercícios baixados do site oficial/Listagens/07.45 - Jogo da forca.py | bf6002fa6bef2b078f9bd73b91e7c106e5860d59 | [] | no_license | emersonleite/python | 33cd48788e4f641da244ba9fd0460b9a5b1ef0bc | 8157fcd5c7ee7f942a4503ad386e7d2054d5acfc | refs/heads/master | 2020-03-08T04:09:57.857429 | 2019-03-27T14:56:46 | 2019-03-27T14:56:46 | 127,913,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,790 | py | ##############################################################################
# Parte do livro Introdução à Programação com Python
# Autor: Nilo Ney Coutinho Menezes
# Editora Novatec (c) 2010-2014
# Primeira edição - Novembro/2010 - ISBN 978-85-7522-250-8
# Primeira reimpressão - Outubro/2011
# Segunda reimpressão - Novembro/1012
# Terceira reimpressão - Agosto/2013
# Segunda edição - Junho/2014 - ISBN 978-85-7522-408-3
# Site: http://python.nilo.pro.br/
#
# Arquivo: capitulo 07\07.45 - Jogo da forca.py
##############################################################################
palavra = input("Digite a palavra secreta:").lower().strip()
for x in range(100):
print()
digitadas = []
acertos = []
erros = 0
while True:
senha = ""
for letra in palavra:
senha += letra if letra in acertos else "."
print(senha)
if senha == palavra:
print("Você acertou!")
break
tentativa = input("\nDigite uma letra:").lower().strip()
if tentativa in digitadas:
print("Você já tentou esta letra!")
continue
else:
digitadas += tentativa
if tentativa in palavra:
acertos += tentativa
else:
erros += 1
print("Você errou!")
print("X==:==\nX : ")
print("X O " if erros >= 1 else "X")
linha2 = ""
if erros == 2:
linha2 = " | "
elif erros == 3:
linha2 = " \| "
elif erros >= 4:
linha2 = " \|/ "
print("X%s" % linha2)
linha3 = ""
if erros == 5:
linha3 += " / "
elif erros >= 6:
linha3 += " / \ "
print("X%s" % linha3)
print("X\n===========")
if erros == 6:
print("Enforcado!")
break
| [
"[email protected]"
] | |
7287bd809c049bf9525538a634f1b9f8bb262e56 | 866a3a0b02ad10ba7e0f4db18efd38a213b0e3c5 | /18_0.py | b4b60322a0693c11ff34a160f7ce3c5641122b56 | [] | no_license | SleepwalkerCh/Leetcode- | 6da7f5859184ae86c6f16d10e3570f837f2508d6 | 9472b9dd388a8774339ecf9d8ff6cb61b7ea821f | refs/heads/master | 2021-03-07T10:58:25.086406 | 2020-03-19T07:50:32 | 2020-03-19T07:50:32 | 246,260,919 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 749 | py | #18. 4Sum18
# 很简陋的做法,大概率会超时,在O(n^4)基础上做了一些小优化,但是结果未进行查重
# WRONG ANSWER
class Solution:
def fourSum(self, nums: List[int], target: int) -> List[List[int]]:
nums.sort()
result=[]
print(nums)
for i in range(len(nums)-3):
for j in range(i+1,len(nums)-2):
for k in range(j+1,len(nums)-1):
for l in range(k+1,len(nums)):
if nums[i]+nums[j]+nums[k]+nums[l]==target:
result.append([nums[i],nums[j],nums[k],nums[l]])
if nums[i]+nums[j]+nums[k]+nums[l]>target:
break
return result
| [
"[email protected]"
] | |
c73ae77ac18bd3d3bd5970245667f5e8fd8b2471 | 6b6f68f507746e3e39b0e8789af5d044e27d6b0a | /Math/0204_CountPrimes_E.py | e266761a3e920a2ddefc2e292ae002021428d1b4 | [] | no_license | PFZ86/LeetcodePractice | bb0012d8b3120451dda1745875836278d3362e45 | 6db9db1934bc0a8142124d8b56bf6c07bdf43d79 | refs/heads/master | 2021-08-28T08:43:27.343395 | 2021-08-17T20:38:32 | 2021-08-17T20:38:32 | 230,925,656 | 1 | 1 | null | 2021-08-17T20:38:32 | 2019-12-30T14:01:27 | Python | UTF-8 | Python | false | false | 1,430 | py | # https://leetcode.com/problems/count-primes/
# Solution 1: the naive method, time complexity O(n^{1.5})
class Solution(object):
def isPrime(self, num):
if num <= 1:
return False
i = 2
# Use i*i <= num as the ending condition;
# do not use the expensive function sqrt(num)
while i*i <= num:
if num % i == 0:
return False
i += 1
return True
def countPrimes(self, n):
"""
:type n: int
:rtype: int
"""
count = 0
for i in range(1, n):
if self.isPrime(i):
count += 1
return count
# Solution 2: the Sieve method; time complexity O(nloglogn), space complexity O(n)
class Solution(object):
def countPrimes(self, n):
"""
:type n: int
:rtype: int
"""
isPrime = [True] * n
i = 2
# the ending condition is i*i <= n instead of i <= sqrt(n)
for i in range(2, n):
if i*i > n:
break
if isPrime[i]:
# we can start from i*i because multiples of i that
# are less than i*i are already marked as non-prime
j = i*i
while j < n:
isPrime[j] = False
j += i
return sum(isPrime[2:])
| [
"[email protected]"
] | |
478995ae08d50e8625c61409890afdaa49990940 | d41d18d3ea6edd2ec478b500386375a8693f1392 | /plotly/validators/layout/polar/angularaxis/_tickcolor.py | c767e805316121395caad30ece9326f5d675e0b8 | [
"MIT"
] | permissive | miladrux/plotly.py | 38921dd6618650d03be9891d6078e771ffccc99a | dbb79e43e2cc6c5762251537d24bad1dab930fff | refs/heads/master | 2020-03-27T01:46:57.497871 | 2018-08-20T22:37:38 | 2018-08-20T22:37:38 | 145,742,203 | 1 | 0 | MIT | 2018-08-22T17:37:07 | 2018-08-22T17:37:07 | null | UTF-8 | Python | false | false | 458 | py | import _plotly_utils.basevalidators
class TickcolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self,
plotly_name='tickcolor',
parent_name='layout.polar.angularaxis',
**kwargs
):
super(TickcolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='plot',
role='style',
**kwargs
)
| [
"[email protected]"
] | |
f5048e620a1d249b16bfa06ee8a33bc414722ba4 | 9d831207b43422b40e54cf6258a29b2f92b66290 | /src/boot_navigation/reports.py | 9348679320e16ccddb3aafca90894e6209798d01 | [] | no_license | AndreaCensi/yc1304 | 714e70e972e9ee31ac011bdb94a57a8ab568f853 | 0bc2b759423db7da73fac47572719a37a80fee0e | refs/heads/master | 2020-06-04T12:52:10.902082 | 2013-07-19T07:00:49 | 2013-07-19T07:00:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,822 | py | from .navigation_map import NavigationMap
from .plots import plot_arrow_SE2, plot_arrow_se2
from contracts import contract
from geometry import se2_from_linear_angular
from reprep import Report, rgb_zoom, scale
import numpy as np
import warnings
__all__ = ['display_nmap']
@contract(report=Report, nmap=NavigationMap)
def display_nmap(report, nmap):
with report.subsection('sensing') as sub:
display_nmap_sensing(sub, nmap)
f = report.figure()
with f.plot('map') as pylab:
for bd, pose in nmap.data:
commands = bd['commands']
warnings.warn('redo this properly')
if len(commands) == 3:
x, y, omega = commands
else:
x, y = commands
omega = 0
vel = se2_from_linear_angular([x, y], omega)
plot_arrow_SE2(pylab, pose)
plot_arrow_se2(pylab, pose, vel, length=0.04, color='g')
pylab.axis('equal')
@contract(report=Report, nmap=NavigationMap)
def display_nmap_sensing(report, nmap):
obss = list(nmap.get_all_observations())
map1 = np.vstack(obss)
nreps = 4
nspaces = 1
obss2 = []
for o in obss:
for _ in range(nreps):
obss2.append(o)
for _ in range(nspaces):
obss2.append(o * np.nan)
map2 = np.vstack(obss2)
f = report.figure(cols=1)
f.data_rgb('observations', _nmapobs_to_rgb(map1))
f.data_rgb('observations2', _nmapobs_to_rgb(map2))
def _nmapobs_to_rgb(m):
print m.shape
m = m.T
rgb = scale(m, min_value=0, max_value=1, nan_color=[.6, 1, .6])
return rgb_zoom(rgb, 4)
@contract(obss='list(array)')
def _nmapobslist_to_rgb(obss):
map2 = np.vstack(obss)
return _nmapobs_to_rgb(map2)
| [
"[email protected]"
] | |
5ac6bb0b7e9b88f4f929ade58c1f69f32bbdda68 | 61a3cba75de7f6da049bcf34343b9839291bda75 | /mrjob/conf.py | 1536217cf2186e0724b02dce429b8aad1effa0da | [
"Apache-2.0"
] | permissive | joshuamckenty/mrjob | b309ed094945b24a471fa8214bb10d35e364a65a | f24991ffae9e7a2dad9fd3403d5e96635ededa4a | refs/heads/master | 2020-12-24T15:22:45.869655 | 2010-10-28T18:07:00 | 2010-10-28T18:07:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,079 | py | # Copyright 2009-2010 Yelp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""""mrjob.conf" is the name of both this module, and the global config file
for :py:mod:`mrjob`.
We look for :file:`mrjob.conf` in these locations:
- :file:`~/.mrjob`
- :file:`mrjob.conf` anywhere in your :envvar:`$PYTHONPATH`
- :file:`/etc/mrjob.conf`
The point of :file:`mrjob.conf` is to let you set up things you want every
job to have access to so that you don't have to think about it. For example:
- libraries and source code you want to be available for your jobs
- where temp directories and logs should go
- security credentials
:file:`mrjob.conf` is just a `YAML <http://www.yaml.org>`_-encoded dictionary
containing default values to pass in to the constructors of the various runner
classes. Here's a minimal :file:`mrjob.conf`::
runners:
emr:
cmdenv:
TZ: America/Los_Angeles
Now whenever you run ``mr_your_script.py -r emr``,
:py:class:`~mrjob.emr.EMRJobRunner` will automatically set :envvar:`$TZ` to
``America/Los_Angeles`` in your job's environment when it runs on EMR.
Options specified on the command-line take precedence over
:file:`mrjob.conf`. Usually this means simply overriding the option in
:file:`mrjob.conf`. However, we know that *cmdenv*, contains environment
variables, so we do the right thing. For example, if your :file:`mrjob.conf`
contained::
runners:
emr:
cmdenv:
PATH: /usr/local/bin
TZ: America/Los_Angeles
and you ran your job as::
mr_your_script.py -r emr --cmdenv TZ=Europe/Paris --cmdenv PATH=/usr/sbin
We'd automatically handle the :envvar:`$PATH`
variables and your job's environment would be::
{'TZ': 'Europe/Paris', 'PATH': '/usr/sbin:/usr/local/bin'}
What's going on here is that *cmdenv* is associated with
:py:func:`combine_envs`. Each option is associated with an appropriate
combiner function that that combines options in an appropriate way.
Combiners can also do useful things like expanding environment variables and
globs in paths. For example, you could set::
runners:
local:
upload_files: &upload_files
- $DATA_DIR/*.db
hadoop:
upload_files: *upload_files
emr:
upload_files: *upload_files
and every time you ran a job, every job in your ``.db`` file in ``$DATA_DIR``
would automatically be loaded into your job's current working directory.
Also, if you specified additional files to upload with :option:`--file`, those
files would be uploaded in addition to the ``.db`` files, rather than instead
of them.
See :doc:`configs-runners` for the entire dizzying array of configurable
options.
"""
from __future__ import with_statement
import glob
import logging
import os
try:
import simplejson as json # preferred because of C speedups
except ImportError:
import json # built in to Python 2.6 and later
# yaml is nice to have, but we can fall back on json if need be
try:
import yaml
except ImportError:
yaml = None
log = logging.getLogger('mrjob.emr')
### READING AND WRITING mrjob.conf ###
def find_mrjob_conf():
"""Look for :file:`mrjob.conf`, and return its path. Places we look:
- :file:`~/.mrjob`
- :file:`mrjob.conf` in any directory in :envvar:`$PYTHONPATH`
- :file:`/etc/mrjob.conf`
Return ``None`` if we can't find it.
"""
def candidates():
if 'HOME' in os.environ:
yield os.path.join(os.environ['HOME'], '.mrjob')
if os.environ.get('PYTHONPATH'):
for dirname in os.environ['PYTHONPATH'].split(os.pathsep):
yield os.path.join(dirname, 'mrjob.conf')
yield '/etc/mrjob.conf'
for path in candidates():
log.debug('looking for configs in %s' % path)
if os.path.exists(path):
log.info('using configs in %s' % path)
return path
else:
log.info("no configs found; falling back on auto-configuration")
return None
def load_mrjob_conf(conf_path=None):
"""Load the entire data structure in :file:`mrjob.conf`, which should
look something like this::
{'runners':
'local': {'OPTION': VALUE, ...}
'emr': {'OPTION': VALUE, ...}
'hadoop: {'OPTION': VALUE, ...}
}
Returns ``None`` if we can't find :file:`mrjob.conf`.
:type conf_path: str
:param conf_path: an alternate place to look for mrjob.conf. If this is ``False``, we'll always return ``None``.
"""
if conf_path is False:
return None
elif conf_path is None:
conf_path = find_mrjob_conf()
if conf_path is None:
return None
with open(conf_path) as f:
if yaml:
return yaml.safe_load(f)
else:
return json.load(f)
def load_opts_from_mrjob_conf(runner_alias, conf_path=None):
"""Load the options to initialize a runner from mrjob.conf, or return
``{}`` if we can't find them.
:type conf_path: str
:param conf_path: an alternate place to look for mrjob.conf. If this is ``False``, we'll always return ``{}``.
"""
conf = load_mrjob_conf(conf_path=conf_path)
if conf is None:
return {}
try:
return conf['runners'][runner_alias] or {}
except (KeyError, TypeError, ValueError):
log.warning('no configs for runner type %r; returning {}' %
runner_alias)
return {}
def dump_mrjob_conf(conf, f):
"""Write out configuration options to a file.
Useful if you don't want to bother to figure out YAML.
*conf* should look something like this:
{'runners':
'local': {'OPTION': VALUE, ...}
'emr': {'OPTION': VALUE, ...}
'hadoop: {'OPTION': VALUE, ...}
}
:param f: a file object to write to (e.g. ``open('mrjob.conf', 'w')``)
"""
if yaml:
yaml.safe_dump(conf, f, default_flow_style=False)
else:
json.dumps(conf, f, indent=2)
f.flush()
### COMBINING OPTIONS ###
# combiners generally consider earlier values to be defaults, and later
# options to override or add on to them.
def combine_values(*values):
"""Return the last value in *values* that is not ``None``.
The default combiner; useful for simple values (booleans, strings, numbers).
"""
for v in reversed(values):
if v is not None:
return v
else:
return None
def combine_lists(*seqs):
"""Concatenate the given sequences into a list. Ignore ``None`` values.
Generally this is used for a list of commands we want to run; the
"default" commands get run before any commands specific to your job.
"""
result = []
for seq in seqs:
if seq:
result.extend(seq)
return result
def combine_dicts(*dicts):
"""Combine zero or more dictionaries. Values from dicts later in the list
take precedence over values earlier in the list.
If you pass in ``None`` in place of a dictionary, it will be ignored.
"""
result = {}
for d in dicts:
if d:
result.update(d)
return result
def combine_envs(*envs):
"""Combine zero or more dictionaries containing environment variables.
Environment variables later from dictionaries later in the list take
priority over those earlier in the list. For variables ending with
``PATH``, we prepend (and add a colon) rather than overwriting.
If you pass in ``None`` in place of a dictionary, it will be ignored.
"""
result = {}
for env in envs:
if env:
for key, value in env.iteritems():
if key.endswith('PATH') and result.get(key):
result[key] = '%s:%s' % (value, result[key])
else:
result[key] = value
return result
def combine_paths(*paths):
"""Returns the last value in *paths* that is not ``None``.
Resolve ``~`` (home dir) and environment variables."""
return expand_path(combine_values(*paths))
def combine_path_lists(*path_seqs):
"""Concatenate the given sequences into a list. Ignore None values.
Resolve ``~`` (home dir) and environment variables, and expand globs
that refer to the local filesystem."""
results = []
for path in combine_lists(*path_seqs):
expanded = expand_path(path)
# if we can't expand a glob, leave as-is (maybe it refers to
# S3 or HDFS)
paths = sorted(glob.glob(expanded)) or [expanded]
results.extend(paths)
return results
def combine_opts(combiners, *opts_list):
"""The master combiner, used to combine dictionaries of options with
appropriate sub-combiners.
:param combiners: a map from option name to a combine_*() function to combine options by that name. By default, we combine options using :py:func:`combine_values`.
:param opts_list: one or more dictionaries to combine
"""
final_opts = {}
keys = set()
for opts in opts_list:
if opts:
keys.update(opts)
for key in keys:
values = []
for opts in opts_list:
if opts and key in opts:
values.append(opts[key])
combine_func = combiners.get(key) or combine_values
final_opts[key] = combine_func(*values)
return final_opts
def expand_path(path):
"""Resolve ``~`` (home dir) and environment variables in *path*.
If *path* is ``None``, return ``None``.
"""
if path is None:
return None
else:
return os.path.expanduser(os.path.expandvars(path))
| [
"[email protected]"
] | |
d364a514127247742d43f012ec5b553a968c4bf0 | f4dd8aa4e5476ffde24e27273dd47913c7f9177a | /Dlv2_safe2/tests/parser/range.3.test.py | 5291d84363c7b37476ea8dd1fa8f8da8958c4baa | [
"Apache-2.0"
] | permissive | dave90/Dlv_safe2 | e56071ec1b07c45defda571cb721852e2391abfb | f127f413e3f35d599554e64aaa918bc1629985bc | refs/heads/master | 2020-05-30T10:44:13.473537 | 2015-07-12T12:35:22 | 2015-07-12T12:35:22 | 38,256,201 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 373 | py | input = """
%#maxint=10.
f(a).
% intuitively, one could expect: g(4). g(3). g(2). here
% but this does not produce any g():
%g(4..2).
%h(1..3).
f(b).
intersect(X) :- g(X), h(X).
"""
output = """
%#maxint=10.
f(a).
% intuitively, one could expect: g(4). g(3). g(2). here
% but this does not produce any g():
%g(4..2).
%h(1..3).
f(b).
intersect(X) :- g(X), h(X).
"""
| [
"davide@davide-All-Series"
] | davide@davide-All-Series |
7220d2c71f026f768b003347430670f8bafceab5 | 9321d3460ffbbb6cd7917b2bac77ce8321e04737 | /contributions/Legacy/MOO/optimization/master/master_to_slave.py | d57ff0e707a482d8afa58f4b3774b4524cec5be3 | [
"MIT"
] | permissive | muehleisen/CEAforArcGIS | b820d837cd5373b95851b4e5dda609d69f054b97 | b6aeca5a9d70835381625a9162d5695714e1a02b | refs/heads/master | 2021-01-11T21:24:18.482264 | 2017-01-06T05:28:48 | 2017-01-06T05:28:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,673 | py | """Data required for Slave from Master"""
"""
This File sets all variables for the slave optimization, that have to be set by the Master
"""
#import os
#Energy_Models_path ="/Users/Tim/Desktop/ETH/Masterarbeit/Github_Files/urben/Masterarbeit/EnergySystem_Models"
#Network_Raw_Data_Path = "/Users/Tim/Desktop/ETH/Masterarbeit/Tools/Results/Network_loads"
#os.chdir(Energy_Models_path)
import contributions.Legacy.moo.globalVar as gV
import numpy as np
reload(gV)
class MasterSlaveVariables(object):
def __init__(self):
# Name the file which should be loaded:
self.configKey = ""
self.NETWORK_DATA_FILE = ""
self.nBuildingsConnected = 0
self.fNameTotalCSV = ""
#self.Network_Supply_Temp = 70 + 273.0
# Electricity_Type:
self.EL_TYPE = 'normal' # type normal or green (=green power)
# Geothermal Heat Pump,
#self.GHP_max_i = gV.GHP_Cmax_Size_th # [W] Heat power (thermal output)
self.GHP_number = 0.0 # number of probes
#self.GHP_max = self.GHP_number * self.GHP_max_i
self.GHP_SEASON_ON = 0 # Hour in Year, when to switch on GHP
self.GHP_SEASON_OFF = 8760 # Hour in Year, when to switch off GHP
# Sewage Heat Pump
self.HPSew_maxSize = 0
# Lake Heat Pump
self.HPLake_maxSize = 0
# Furnace
self.Furnace_Q_max = 0
self.Furn_Moist_type = "wet" #gV.Furn_Moist_type # set the moisture content of wood chips, either "dry" or "wet"
# GAS TURBINE VARIABLES
#self.gt_size = 1.0E6 # in Watt
self.CC_GT_SIZE = 0
self.gt_fuel = "NG"
# Boiler - Thermal output power!
# add BG / NG Story for both peak and normal boilers
self.Boiler_Q_max = 0
self.BoilerPeak_Q_max = 0
self.BoilerType = "NG" #Choose "NG" or "BG"
self.BoilerPeakType = "NG" #Choose "NG" or "BG"
self.BoilerBackupType = "NG" #Choose "NG" or "BG"
# Cooling Tower :
#self.CT_Qdesign = 0
# Storage
self.STORAGE_SIZE = 1000000.0 # in m^3 - size of hot water storage tank (up to now a random variable)
self.STORAGE_HEIGHT = 3.0 # in m - height of hot water storage tank
self.A_storage_outside = self.STORAGE_SIZE/self.STORAGE_HEIGHT + 2 * np.pi * \
(self.STORAGE_SIZE/self.STORAGE_HEIGHT / np.pi)**0.5 #neglecting ground area for heat losses
self.alpha_loss = 0.0111 # EnergyPRO: 0.3 * 0.037 ; \
# Saplamidis: 0.293542 # Wh / h= 0( .005 / (math.log10(26/25.0) ) ,
# from Vassilis-Storage Optimization Code ** ACHTUNG !! CHANGE - SCALES WITH SIZE (?!)
self.Storage_conv_loss = 0.0111 # losses due to energy conversion from and to storage
self.T_storage_initial = 10 + 273.0 # initial Storage Temperature
self.T_storage_zero = 10 + 273.0 # Reference Temperature Storage
self.Q_in_storage_zero = self.STORAGE_SIZE * 1/ 3600 * 983.21 * 4185 * (self.T_storage_zero - self.T_storage_initial)
self.dT_buffer = 5 # maintain a buffer for "uncertainties", never go below this temperature
# Storage is initially empty
self.T_ST_MAX = 90 + 273.0 # Maximum Temperature of storage allowed
self.T_ST_MIN = 10 + 273.0
# Solar
self.SOLCOL_TYPE_PVT = "PVT_35.csv" # file used as PVT type of collectors
self.SOLCOL_TYPE_SC = "SC_75.csv"
self.SOLCOL_TYPE_PV = "Pv.csv"
self.SOLAR_PART_PVT = 0.0 # [%] How much of the total area is available for PVT
self.SOLAR_PART_SC = 0.0 # How much of the total area is available for Solar Collectors
self.SOLAR_PART_PV = 0.0 # How much of the total area is available for PV (no thermal output, selling electricity)
self.nPVT_installations = 2 # number of PVT installations, required for PVT average size, which goes into KEV remuneration
self.nPV_installations = 2 # number of PVT installations, required for PVT average size, which goes into KEV remuneration
# declare, which power plants will be used : USED = 1 ; NOT USED = 0
self.Boiler_on = 0
self.BoilerPeak_on = 0
self.Furnace_on = 0
self.GHP_on = 0
self.HP_Lake_on = 0
self.HP_Sew_on = 0
self.CC_on = 0
self.WasteServersHeatRecovery = 0 # server heat
self.WasteCompressorHeatRecovery = 0
| [
"[email protected]"
] | |
c15199f76236b5e1a4aa7c00237e9015dab2015a | 9c9512d92f4693a40e80e2dc8df9a74ef34a9b02 | /archive/fibcoll_cmass_pm.py | 0bf73edf6ea47071ec8819e7d74dcb28dacd6166 | [] | no_license | changhoonhahn/FiberCollisions | 9184600bbd596f861755425c46b311b2ab342af5 | ee0bfab26cc0167982822d8bc5c5654eaccbe2ef | refs/heads/master | 2021-01-18T21:19:12.457250 | 2017-01-31T20:33:29 | 2017-01-31T20:33:29 | 34,329,426 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,587 | py | import numpy as np
import pylab as py
from scipy.integrate import simps
from scipy.optimize import curve_fit
from matplotlib import rc
rc('text', usetex=True)
rc('font', family='serif')
prismdir = '/global/data/scr/chh327/powercode/data/'
disp_los = np.loadtxt(prismdir+'cmass-dr11v2-N-Anderson-disp_los_pm.dat')
disp_perp = np.loadtxt(prismdir+'cmass-dr11v2-N-Anderson-disp_perp.dat')
disp_los_tail_red = np.loadtxt(prismdir+'cmass-dr11v2-N-Anderson-tail_red.dat')
disp_los_disttail_red = disp_los_tail_red[ (disp_los_tail_red < 0.7) & (disp_los_tail_red > 0.43) ]
data = np.loadtxt('/global/data/scr/chh327/powercode/data/cmass-dr11v2-N-Anderson-nzw-zlim.dat')
mpc_bin = -1000.0+0.1*np.array(range(20001))
mpc_bin_perp = 0.05*np.array(range(21))
red_bin = 0.01*np.array(range(101))
fig4 = py.figure(4)
dump = fig4.add_subplot(111)
fig1 = py.figure(1)
ax1 = fig1.add_subplot(111)
fig3 = py.figure(3)
ax12 = fig3.add_subplot(111)
hist_disp_los = dump.hist(disp_los,mpc_bin, label='Line of Sight Displacement Histogram')
hist_disp_perp = ax12.hist(disp_perp,mpc_bin_perp, log='True',label=r'Histogram of $d_{\perp}$')
disp_los_x = [ (hist_disp_los[1][i] + hist_disp_los[1][i+1])/2.0 for i in range(len(hist_disp_los[1])-1) ]
disp_perp_x = [ (hist_disp_perp[1][i] + hist_disp_perp[1][i+1])/2.0 for i in range(len(hist_disp_perp[1])-1) ]
def gauss(x,sig):
return np.max(hist_disp_los[0])*np.exp(-0.5*x**2/sig**2)
def expon(x,sig):
return np.max(hist_disp_los[0])*np.exp(-x/sig)
popt, pcov = curve_fit(expon, np.array(disp_los_x[10000:10500]), hist_disp_los[0][10000:10500])
print popt
ax1.plot(disp_los_x, hist_disp_los[0],linewidth=3, label=r'Histogram of $d_{LOS}$')
ax1.plot(np.array(disp_los_x[10000:10500]), expon(np.array(disp_los_x[10000:10500]), popt[0]), 'r', linewidth=3, label=r'Exponential distribution with $\sigma=$'+str(popt))
#ax1.set_yscale('log')
ax1.set_xlim([-50,50])
ax1.set_ylim([0,300])
ax1.set_title(r'$d_{\rm{LOS}}$ Distribution of CMASS DR11v2 North Fiber Collided Pairs')
ax1.set_xlabel('Displacement (Mpc)')
ax1.set_ylabel('Number of Galaxies')
ax1.legend(loc='best')
# Writing the normalized histogram to file:
hist_disp_los_normed = dump.hist( disp_los, mpc_bin, normed=1 )
output = np.zeros(2*len(disp_los_x)).reshape((len(disp_los_x),2))
output[:,0] = disp_los_x
output[:,1] = hist_disp_los_normed[0]
np.savetxt(prismdir+'cmass-dr11v2-N-Anderson-disp_los_hist_normed_pm.dat', output)
#for d in [20, 30, 40]:
# RMSfrac = float(len(disp_los[(disp_los<d) & (disp_los>0)]))/float(len(disp_los))*100.0
# caption = r''+str(np.int(RMSfrac))+"$\%$"
# ax1.annotate(caption, (float(d),200), xycoords='data', xytext=(float(d), 500), textcoords='data',
# arrowprops=dict(arrowstyle="fancy", facecolor='black', connectionstyle="angle3,angleA=0,angleB=-90"),
# fontsize=20, horizontalalignment='center', verticalalignment='top')
fig2 = py.figure(2,figsize=(10,10))
ax2 = fig2.add_subplot(111)
hist_disttail_red = ax2.hist( disp_los_disttail_red, red_bin, normed=1, label=r'Redshift Distribution of Galaxies with $d_{LOS} > 30$')
hist_data_red = dump.hist(data[:,2], red_bin, normed=1,label='Redshift Distribution of Galaxies for data')
hist_data_red_x = [ (hist_data_red[1][i] + hist_data_red[1][i+1])/2.0 for i in range(len(hist_data_red[1])-1) ]
ax2.plot(hist_data_red_x, hist_data_red[0],'r', linewidth=3, label='Redshift Distribution of Galaxies for CMASS dr11v2 NGC')
ax2.set_xlim([0.4, 0.8])
ax2.set_ylim([0.0, 8.0])
ax2.set_xlabel('Redshift (z)')
ax2.set_ylabel('Galaxies')
ax2.legend(loc='best')
py.show()
| [
"[email protected]"
] | |
02256d1be416fd37e092a3e263c29dcedad1ef63 | 78a15793be1ba71ea7eecee33abef4ecbe11d8f2 | /apps/users/migrations/0016_auto_20151102_1457.py | 5a3e982e5c71bdaae3706ce02e4d2db9cbd42842 | [] | no_license | teresaylin/my2009 | f5df9c62492d4c88931f6aa45af31ee88dbe3a1a | 2486750ad73df313d596497b0eb7f4c47518e6a6 | refs/heads/master | 2021-03-21T23:53:55.581074 | 2016-06-01T18:13:44 | 2016-06-01T18:13:44 | 23,392,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 870 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0001_initial'),
('users', '0015_auto_20151102_1456'),
]
operations = [
migrations.AddField(
model_name='commentthread',
name='content_type',
field=models.ForeignKey(to='contenttypes.ContentType', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='commentthread',
name='object_id',
field=models.PositiveIntegerField(null=True),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='commentthread',
unique_together=set([('content_type', 'object_id')]),
),
]
| [
"[email protected]"
] | |
b68b7e14f09c7cdcf1d4e14991aaea2461b218bd | e986ebbf73a6dff7ccc58feb886e54afa57e49d9 | /sdk/python/pulumi_awsx/_utilities.py | 83985732e9f67955c5ab392bc27761660f9323b9 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | pulumi/pulumi-awsx | 5a5bdd77afaa674e9a5dd9f26540ddea5a1cde1c | 45136c540f29eb3dc6efa5b4f51cfe05ee75c7d8 | refs/heads/master | 2023-09-01T21:47:40.877155 | 2023-08-24T04:14:12 | 2023-08-24T04:14:12 | 132,053,036 | 186 | 107 | Apache-2.0 | 2023-09-13T07:28:54 | 2018-05-03T21:46:28 | TypeScript | UTF-8 | Python | false | false | 8,056 | py | # coding=utf-8
# *** WARNING: this file was generated by pulumi-gen-awsx. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import importlib.util
import inspect
import json
import os
import pkg_resources
import sys
import typing
import pulumi
import pulumi.runtime
from semver import VersionInfo as SemverVersion
from parver import Version as PEP440Version
def get_env(*args):
for v in args:
value = os.getenv(v)
if value is not None:
return value
return None
def get_env_bool(*args):
str = get_env(*args)
if str is not None:
# NOTE: these values are taken from https://golang.org/src/strconv/atob.go?s=351:391#L1, which is what
# Terraform uses internally when parsing boolean values.
if str in ["1", "t", "T", "true", "TRUE", "True"]:
return True
if str in ["0", "f", "F", "false", "FALSE", "False"]:
return False
return None
def get_env_int(*args):
str = get_env(*args)
if str is not None:
try:
return int(str)
except:
return None
return None
def get_env_float(*args):
str = get_env(*args)
if str is not None:
try:
return float(str)
except:
return None
return None
def _get_semver_version():
# __name__ is set to the fully-qualified name of the current module, In our case, it will be
# <some module>._utilities. <some module> is the module we want to query the version for.
root_package, *rest = __name__.split('.')
# pkg_resources uses setuptools to inspect the set of installed packages. We use it here to ask
# for the currently installed version of the root package (i.e. us) and get its version.
# Unfortunately, PEP440 and semver differ slightly in incompatible ways. The Pulumi engine expects
# to receive a valid semver string when receiving requests from the language host, so it's our
# responsibility as the library to convert our own PEP440 version into a valid semver string.
pep440_version_string = pkg_resources.require(root_package)[0].version
pep440_version = PEP440Version.parse(pep440_version_string)
(major, minor, patch) = pep440_version.release
prerelease = None
if pep440_version.pre_tag == 'a':
prerelease = f"alpha.{pep440_version.pre}"
elif pep440_version.pre_tag == 'b':
prerelease = f"beta.{pep440_version.pre}"
elif pep440_version.pre_tag == 'rc':
prerelease = f"rc.{pep440_version.pre}"
elif pep440_version.dev is not None:
prerelease = f"dev.{pep440_version.dev}"
# The only significant difference between PEP440 and semver as it pertains to us is that PEP440 has explicit support
# for dev builds, while semver encodes them as "prerelease" versions. In order to bridge between the two, we convert
# our dev build version into a prerelease tag. This matches what all of our other packages do when constructing
# their own semver string.
return SemverVersion(major=major, minor=minor, patch=patch, prerelease=prerelease)
# Determine the version once and cache the value, which measurably improves program performance.
_version = _get_semver_version()
_version_str = str(_version)
def get_version():
return _version_str
def get_resource_opts_defaults() -> pulumi.ResourceOptions:
return pulumi.ResourceOptions(
version=get_version(),
plugin_download_url=get_plugin_download_url(),
)
def get_invoke_opts_defaults() -> pulumi.InvokeOptions:
return pulumi.InvokeOptions(
version=get_version(),
plugin_download_url=get_plugin_download_url(),
)
def get_resource_args_opts(resource_args_type, resource_options_type, *args, **kwargs):
"""
Return the resource args and options given the *args and **kwargs of a resource's
__init__ method.
"""
resource_args, opts = None, None
# If the first item is the resource args type, save it and remove it from the args list.
if args and isinstance(args[0], resource_args_type):
resource_args, args = args[0], args[1:]
# Now look at the first item in the args list again.
# If the first item is the resource options class, save it.
if args and isinstance(args[0], resource_options_type):
opts = args[0]
# If resource_args is None, see if "args" is in kwargs, and, if so, if it's typed as the
# the resource args type.
if resource_args is None:
a = kwargs.get("args")
if isinstance(a, resource_args_type):
resource_args = a
# If opts is None, look it up in kwargs.
if opts is None:
opts = kwargs.get("opts")
return resource_args, opts
# Temporary: just use pulumi._utils.lazy_import once everyone upgrades.
def lazy_import(fullname):
import pulumi._utils as u
f = getattr(u, 'lazy_import', None)
if f is None:
f = _lazy_import_temp
return f(fullname)
# Copied from pulumi._utils.lazy_import, see comments there.
def _lazy_import_temp(fullname):
m = sys.modules.get(fullname, None)
if m is not None:
return m
spec = importlib.util.find_spec(fullname)
m = sys.modules.get(fullname, None)
if m is not None:
return m
loader = importlib.util.LazyLoader(spec.loader)
spec.loader = loader
module = importlib.util.module_from_spec(spec)
m = sys.modules.get(fullname, None)
if m is not None:
return m
sys.modules[fullname] = module
loader.exec_module(module)
return module
class Package(pulumi.runtime.ResourcePackage):
def __init__(self, pkg_info):
super().__init__()
self.pkg_info = pkg_info
def version(self):
return _version
def construct_provider(self, name: str, typ: str, urn: str) -> pulumi.ProviderResource:
if typ != self.pkg_info['token']:
raise Exception(f"unknown provider type {typ}")
Provider = getattr(lazy_import(self.pkg_info['fqn']), self.pkg_info['class'])
return Provider(name, pulumi.ResourceOptions(urn=urn))
class Module(pulumi.runtime.ResourceModule):
def __init__(self, mod_info):
super().__init__()
self.mod_info = mod_info
def version(self):
return _version
def construct(self, name: str, typ: str, urn: str) -> pulumi.Resource:
class_name = self.mod_info['classes'].get(typ, None)
if class_name is None:
raise Exception(f"unknown resource type {typ}")
TheClass = getattr(lazy_import(self.mod_info['fqn']), class_name)
return TheClass(name, pulumi.ResourceOptions(urn=urn))
def register(resource_modules, resource_packages):
resource_modules = json.loads(resource_modules)
resource_packages = json.loads(resource_packages)
for pkg_info in resource_packages:
pulumi.runtime.register_resource_package(pkg_info['pkg'], Package(pkg_info))
for mod_info in resource_modules:
pulumi.runtime.register_resource_module(
mod_info['pkg'],
mod_info['mod'],
Module(mod_info))
_F = typing.TypeVar('_F', bound=typing.Callable[..., typing.Any])
def lift_output_func(func: typing.Any) -> typing.Callable[[_F], _F]:
"""Decorator internally used on {fn}_output lifted function versions
to implement them automatically from the un-lifted function."""
func_sig = inspect.signature(func)
def lifted_func(*args, opts=None, **kwargs):
bound_args = func_sig.bind(*args, **kwargs)
# Convert tuple to list, see pulumi/pulumi#8172
args_list = list(bound_args.args)
return pulumi.Output.from_input({
'args': args_list,
'kwargs': bound_args.kwargs
}).apply(lambda resolved_args: func(*resolved_args['args'],
opts=opts,
**resolved_args['kwargs']))
return (lambda _: lifted_func)
def get_plugin_download_url():
return None
| [
"[email protected]"
] | |
c4674a7fdc765d2349e6a916e2744d418ebef5eb | 97f2852420d6fdc98e5a4a0321c35920ff070d41 | /examples/scripts/csc/cbpdn_ams_clr.py | 9b7714d8854ec1722a6998cd4d1d3d001830f1ee | [
"BSD-3-Clause"
] | permissive | eglxiang/sporco | 93595f3afb6acda758425f7332513eeb892fa51f | e4a716b32b675d6e23ba0bfc3b2d7c6f9bc5d7a3 | refs/heads/master | 2021-05-04T23:03:57.430340 | 2018-01-19T19:03:01 | 2018-01-19T19:03:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,460 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of the SPORCO package. Details of the copyright
# and user license can be found in the 'LICENSE.txt' file distributed
# with the package.
"""
CSC with a Spatial Mask
=======================
This example demonstrates the use of :class:`.cbpdn.AddMaskSim` for convolutional sparse coding with a spatial mask :cite:`wohlberg-2016-boundary`. The example problem is inpainting of randomly distributed corruption of a colour image :cite:`wohlberg-2016-convolutional`.
"""
from __future__ import print_function
from builtins import input
from builtins import range
import numpy as np
from sporco.admm import tvl2
from sporco.admm import cbpdn
from sporco import util
from sporco import metric
from sporco import plot
"""
Load a reference image.
"""
img = util.ExampleImages().image('monarch.png', zoom=0.5, scaled=True,
idxexp=np.s_[:, 160:672])
"""
Create random mask and apply to reference image to obtain test image. (The call to ``numpy.random.seed`` ensures that the pseudo-random noise is reproducible.)
"""
t = 0.5
np.random.seed(12345)
msk = np.random.randn(*(img.shape))
msk[np.abs(msk) > t] = 1;
msk[np.abs(msk) < t] = 0;
imgw = msk * img
"""
Define pad and crop functions.
"""
pn = 8
spad = lambda x: np.pad(x, ((pn, pn), (pn, pn), (0, 0)), mode='symmetric')
zpad = lambda x: np.pad(x, ((pn, pn), (pn, pn), (0, 0)), mode='constant')
crop = lambda x: x[pn:-pn, pn:-pn]
"""
Construct padded mask and test image.
"""
mskp = zpad(msk)
imgwp = spad(imgw)
"""
:math:`\ell_2`-TV denoising with a spatial mask as a non-linear lowpass
filter.
"""
lmbda = 0.05
opt = tvl2.TVL2Denoise.Options({'Verbose': False, 'MaxMainIter': 200,
'DFidWeight': mskp, 'gEvalY': False,
'AutoRho': {'Enabled': True}})
b = tvl2.TVL2Denoise(imgwp, lmbda, opt, caxis=2)
sl = b.solve()
sh = imgwp - sl
"""
Load dictionary.
"""
D = util.convdicts()['RGB:8x8x3x64']
"""
Set up :class:`.admm.cbpdn.ConvBPDN` options.
"""
lmbda = 2e-2
opt = cbpdn.ConvBPDN.Options({'Verbose': True, 'MaxMainIter': 250,
'HighMemSolve': True, 'RelStopTol': 1e-3,
'AuxVarObj': False, 'RelaxParam': 1.8,
'rho': 5e1*lmbda + 1e-1, 'AutoRho': {'Enabled': False,
'StdResiduals': True}})
"""
Construct :class:`.admm.cbpdn.AddMaskSim` wrapper for :class`.admm.cbpdn.ConvBPDN` and solve via wrapper. This example could also have made use of :class`.admm.cbpdn.ConvBPDNMaskDcpl`, which has very similar performance in this application, but :class:`.admm.cbpdn.AddMaskSim` has the advantage of greater flexibility in that the wrapper can be applied to a variety of CSC solver objects.
"""
ams = cbpdn.AddMaskSim(cbpdn.ConvBPDN, D, sh, mskp, lmbda, opt=opt)
X = ams.solve()
"""
Reconstruct from representation.
"""
imgr = crop(sl + ams.reconstruct().squeeze())
"""
Display solve time and reconstruction performance.
"""
print("AddMaskSim wrapped ConvBPDN solve time: %.2fs" %
ams.timer.elapsed('solve'))
print("Corrupted image PSNR: %5.2f dB" % metric.psnr(img, imgw))
print("Recovered image PSNR: %5.2f dB" % metric.psnr(img, imgr))
"""
Display reference, test, and reconstructed image
"""
fig = plot.figure(figsize=(21, 7))
plot.subplot(1, 3, 1)
plot.imview(img, fgrf=fig, title='Reference image')
plot.subplot(1, 3, 2)
plot.imview(imgw, fgrf=fig, title='Corrupted image')
plot.subplot(1, 3, 3)
plot.imview(imgr, fgrf=fig, title='Reconstructed image')
fig.show()
"""
Display lowpass component and sparse representation
"""
fig = plot.figure(figsize=(14, 7))
plot.subplot(1, 2, 1)
plot.imview(sl, fgrf=fig, cmap=plot.cm.Blues, title='Lowpass component')
plot.subplot(1, 2, 2)
plot.imview(np.squeeze(np.sum(abs(X), axis=ams.cri.axisM)), fgrf=fig,
cmap=plot.cm.Blues, title='Sparse representation')
fig.show()
"""
Plot functional value, residuals, and rho
"""
its = ams.getitstat()
fig = plot.figure(figsize=(21, 7))
plot.subplot(1, 3, 1)
plot.plot(its.ObjFun, fgrf=fig, xlbl='Iterations', ylbl='Functional')
plot.subplot(1, 3, 2)
plot.plot(np.vstack((its.PrimalRsdl, its.DualRsdl)).T, fgrf=fig,
ptyp='semilogy', xlbl='Iterations', ylbl='Residual',
lgnd=['Primal', 'Dual'])
plot.subplot(1, 3, 3)
plot.plot(its.Rho, fgrf=fig, xlbl='Iterations', ylbl='Penalty Parameter')
fig.show()
# Wait for enter on keyboard
input()
| [
"[email protected]"
] | |
951f433acec27ae7970c718810065a209f4c17b4 | c2602ec4c504914c0831ab061b6cee779be344a2 | /python/aead/kms_envelope_aead.py | ba9b8f25dc2ed14a763e30b393d4095f7add5406 | [
"Apache-2.0"
] | permissive | shigakio/tink | 85f8da6033139154528bf00bdadd8f3f800f6223 | a61aaeeb5da223357b5ec3513231bc8e493a4ac1 | refs/heads/master | 2021-02-06T14:41:38.074226 | 2020-02-20T06:18:02 | 2020-02-20T06:18:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,463 | py | # Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for envelope encryption with KMS."""
from __future__ import absolute_import
from __future__ import division
# Placeholder for import for type annotations
from __future__ import print_function
import struct
from tink.proto import tink_pb2
from tink.python import core
from tink.python.aead import aead
from tink.python.core import tink_error
# Defines in how many bytes the DEK length will be encoded.
DEK_LEN_BYTES = 4
class KmsEnvelopeAead(aead.Aead):
"""Implements envelope encryption.
Envelope encryption generates a data encryption key (DEK) which is used
to encrypt the payload. The DEK is then send to a KMS to be encrypted and
the encrypted DEK is attached to the ciphertext. In order to decrypt the
ciphertext, the DEK first has to be decrypted by the KMS, and then the DEK
can be used to decrypt the ciphertext. For further information see
https://cloud.google.com/kms/docs/envelope-encryption.
The ciphertext structure is as follows:
* Length of the encrypted DEK: 4 bytes (big endian)
* Encrypted DEK: variable length, specified by the previous 4 bytes
* AEAD payload: variable length
"""
def __init__(self, key_template: tink_pb2.KeyTemplate, remote: aead.Aead):
self.key_template = key_template
self.remote_aead = remote
def encrypt(self, plaintext: bytes, associated_data: bytes) -> bytes:
# Get new key from template
dek = core.Registry.new_key_data(self.key_template)
dek_aead = core.Registry.primitive(dek, aead.Aead)
# Encrypt plaintext
ciphertext = dek_aead.encrypt(plaintext, associated_data)
# Wrap DEK key values with remote
encrypted_dek = self.remote_aead.encrypt(dek.value, b'')
# Construct ciphertext, DEK length encoded as big endian
enc_dek_len = struct.pack('>I', len(encrypted_dek))
return enc_dek_len + encrypted_dek + ciphertext
def decrypt(self, ciphertext: bytes, associated_data: bytes) -> bytes:
ct_len = len(ciphertext)
# Recover DEK length
if ct_len < DEK_LEN_BYTES:
raise tink_error.TinkError
dek_len = struct.unpack('>I', ciphertext[0:DEK_LEN_BYTES])[0]
# Basic check if DEK length can be valid.
if dek_len > (ct_len - DEK_LEN_BYTES) or dek_len < 0:
raise tink_error.TinkError
# Decrypt DEK with remote AEAD
encrypted_dek_bytes = ciphertext[DEK_LEN_BYTES:DEK_LEN_BYTES + dek_len]
dek_bytes = self.remote_aead.decrypt(encrypted_dek_bytes, b'')
# Get AEAD primitive based on DEK
dek = tink_pb2.KeyData()
dek.type_url = self.key_template.type_url
dek.value = dek_bytes
dek.key_material_type = tink_pb2.KeyData.KeyMaterialType.SYMMETRIC
dek_aead = core.Registry.primitive(dek, aead.Aead)
# Extract ciphertext payload and decrypt
ct_bytes = ciphertext[DEK_LEN_BYTES + dek_len:]
return dek_aead.decrypt(ct_bytes, associated_data)
| [
"[email protected]"
] | |
5cc22cbff16ea64707ce2511eef96003aec9056c | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_173/ch161_2020_06_15_19_32_40_555261.py | a28363ec0ee02b8ca4432c1e1eb5d32ebf08b4a3 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 286 | py | def PiWallis(elementos):
numerador,denominador = 2,1
i = 0
mul = 1
while i < elementos:
mul*= numerador/denominador
if i%2 == 0:
denominador += 2
else:
numerador +=2
i+=1
oi=mul*2
return oi | [
"[email protected]"
] | |
0042cd7ad97726820014cb8b6a3f087d560913d2 | b41da6f351f27bf0d45a4e4d0e1be8f3a86f4b64 | /itsybitsy/leetcode/sliding_window.py | 2ddaef9a489cf95fe6af1b0a16bd1c8ef0f3cf44 | [] | no_license | santoshr1016/WeekendMasala | a5adbabe0b78cde567667376d7ddf05bb505a0ff | e099f9ac9677f7acb8faf620af94a06d76cae044 | refs/heads/master | 2020-03-26T00:26:32.649429 | 2019-08-30T07:32:24 | 2019-08-30T07:32:24 | 144,320,624 | 0 | 0 | null | 2019-06-03T23:08:00 | 2018-08-10T18:36:38 | Python | UTF-8 | Python | false | false | 278 | py | def sliding_window(nums, k):
size = len(nums) - (k-1)
op_list = []
for i in range(size):
op_list.append(max(nums[i: i+k]))
print(op_list)
return op_list
nums = [1, 3, -1, -3, 5, 3, 6, 7, 7, 8, 1, 34, -9]
k = 3
rv = sliding_window(nums, k)
print(rv)
| [
"[email protected]"
] | |
28092363189055508902a6a5c0f2d91bdbd1ce62 | c065ff2a6a377aea2303b7b8482558049958a7ec | /shoe/1562059380/tactile.tac | cd120fe362dfa9c3097804be7f1c3def6e34d680 | [] | no_license | waedbara/vision2tactile | 7bc9861eecb4247fd254ea58dc508ed18a03b1af | edbc9dfee61b4a4b1f0caebb2f16faef090dff32 | refs/heads/master | 2022-04-02T20:43:16.621687 | 2019-12-11T08:07:39 | 2019-12-11T08:07:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | tac | ,3632,3750,3712,3742,3519,3722,3759,3345,3575,3584,3489,3674,3255,3344,3162,3038,3148,2864,2829,2696,3728,3718,3659,3515,1999,2024,1896,2125,2652,3265,3474,3326,3423,3297,3090,3153,3077,2165,2254,3127,3435,3151,3294,3310,3224,3297,3235,2820,2675,3464,3467,3457,3360,2046,2061,2965,2037,2463 | [
"[email protected]"
] | |
3dd7edb585e13d632ba412a0b12b8b9348c2948a | 578db86c51d44ebddd0dc7b1738985b3dc69eb74 | /corehq/apps/hqadmin/migrations/0015_rename_sqlhqdeploy.py | 0b35e450b0a336e0722dad15f4ee9786d7198ab2 | [
"BSD-3-Clause"
] | permissive | dimagi/commcare-hq | a43c7dd32b5f89c89fd5aa1b1359ab7301f4ff6b | e7391ddae1af1dbf118211ecb52c83fc508aa656 | refs/heads/master | 2023-08-16T22:38:27.853437 | 2023-08-16T19:07:19 | 2023-08-16T19:07:19 | 247,278 | 499 | 203 | BSD-3-Clause | 2023-09-14T19:03:24 | 2009-07-09T17:00:07 | Python | UTF-8 | Python | false | false | 512 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-31 20:24
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('hqadmin', '0014_remove_sqlhqdeploy_couch_id'),
]
operations = [
migrations.RenameModel(
old_name='SQLHqDeploy',
new_name='HqDeploy',
),
migrations.AlterModelTable(
name='hqdeploy',
table=None,
),
]
| [
"[email protected]"
] | |
41003197a8029e2bdd2fb2389695572510a70bda | 76e9afdf16eabcc9e1a3facd308e56362112efc4 | /plot/lossplot3.py | 22ff012e1ffa7ab7c93816de25e1713f6ecdc95c | [] | no_license | rerejii/pwb_work_2021 | c65c5e787ad98b7d847cb63ebadc24a02f001e90 | 8ecfb2a98d9d396ed505ecc939e384cf6400412d | refs/heads/main | 2023-03-30T10:43:18.115386 | 2021-03-24T05:38:41 | 2021-03-24T05:38:41 | 350,954,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 755 | py | import os
import glob
import csv
import sys
import numpy as np
import pandas as pd
from natsort import natsorted
import matplotlib.pyplot as plt
# import matplotlib
markset = ['A', 'B', 'C', 'D', 'E', 'F']
for mark in markset:
csv_path = 'Z:/hayakawa/binary/20210227/unet_use-bias_beta/unet_use-bias_beta-'+mark+'/CsvDatas/train_loss.csv'
# csv_path = 'Z:/hayakawa/binary/20210227/unet_use-bias_beta_loss/unet_use-bias_beta_loss-A/CsvDatas/train_loss.csv'
df = pd.read_csv(csv_path)
# print(df)
df.plot(y=['loss'])
# print(df['loss'].values)
# print(df.index.values)
# x = df.index.values
# y = df['loss'].values
# plt.plot(x, y)
plt.ylim([0.00, 0.05])
plt.savefig('train_loss-'+mark+'.png')
plt.show() | [
"[email protected]"
] | |
4255372facaf9a7101262b53db5d91bb11fa70e9 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/103/usersdata/222/50669/submittedfiles/av1_3.py | a078720d7ccf91267555d632f67ab1c0ab99e466 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | # -*- coding: utf-8 -*-
import math
a=int(input('a:'))
b=int(input('b:'))
r=1
resto=1
cont=0
while r>0:
r=a%b
a=b
b=r
cont=cont+1
resto=resto+1
print(resto)
print(cont) | [
"[email protected]"
] | |
700c496a84a90d5c782ada2ec88467c3c5ab4266 | a913309bda87feee7f0637cb73901b4bcdca44bd | /0x0C-python-input_output/2-read_lines.py | a37333dfcb5deb5dd181b3116277abe6647cebde | [] | no_license | KamalTaleb/holbertonschool-python | e77b4f88a7ae60db158c0defa6c3f7737ad96562 | 29ffbccf1d02c7cf76a5df04d9386105dc149a81 | refs/heads/master | 2023-01-20T20:24:32.388729 | 2020-11-26T12:22:57 | 2020-11-26T12:22:57 | 292,331,337 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 436 | py | #!/usr/bin/python3
"""read lines"""
def read_lines(filename="", nb_lines=0):
"""
read_lines
"""
line_number = 0
with open(filename, 'r', encoding='utf-8') as f:
for line in f:
line_number += 1
if line_number <= nb_lines and nb_lines > 0:
print(line, end="")
elif nb_lines <= 0:
print(line, end="")
else:
break
| [
"[email protected]"
] | |
0e365224111f952db09c0c48889ec831a0dc1b5c | d7b4e2e391e1f15fd7cb4fbf4d9aee598131b007 | /models/BiLSTM1d.py | 38efbe949140c842932c9d98b0a6c2aa0f9616d9 | [
"MIT"
] | permissive | wuyou33/DL-based-Intelligent-Diagnosis-Benchmark | eba2ce6f948b5abe68069e749f64501a32e1d7ca | e534f925cf454d07352f7ef82d75a8d6dac5355c | refs/heads/master | 2021-01-02T15:06:29.041349 | 2019-12-28T21:47:21 | 2019-12-28T21:47:21 | 239,673,952 | 1 | 0 | MIT | 2020-02-11T04:15:21 | 2020-02-11T04:15:20 | null | UTF-8 | Python | false | false | 1,692 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
class BiLSTM(nn.Module):
def __init__(self, in_channel=1, out_channel=10):
super(BiLSTM, self).__init__()
self.hidden_dim = 64
self.kernel_num = 16
self.num_layers = 2
self.V = 25
self.embed1 = nn.Sequential(
nn.Conv1d(in_channel, self.kernel_num, kernel_size=3, padding=1),
nn.BatchNorm1d(self.kernel_num),
nn.ReLU(inplace=True),
nn.MaxPool1d(kernel_size=2, stride=2))
self.embed2 = nn.Sequential(
nn.Conv1d(self.kernel_num, self.kernel_num*2, kernel_size=3, padding=1),
nn.BatchNorm1d(self.kernel_num*2),
nn.ReLU(inplace=True),
nn.AdaptiveMaxPool1d(self.V))
self.hidden2label1 = nn.Sequential(nn.Linear(self.V * 2 * self.hidden_dim, self.hidden_dim * 4), nn.ReLU(), nn.Dropout())
self.hidden2label2 = nn.Linear(self.hidden_dim * 4, out_channel)
self.bilstm = nn.LSTM(self.kernel_num*2, self.hidden_dim,
num_layers=self.num_layers, bidirectional=True,
batch_first=True, bias=False)
def forward(self, x):
x = self.embed1(x)
x = self.embed2(x)
x = x.view(-1, self.kernel_num*2, self.V)
x = torch.transpose(x, 1, 2)
bilstm_out, _ = self.bilstm(x)
bilstm_out = torch.tanh(bilstm_out)
bilstm_out = bilstm_out.view(bilstm_out.size(0), -1)
logit = self.hidden2label1(bilstm_out)
logit = self.hidden2label2(logit)
return logit
| [
"[email protected]"
] | |
e54a254830aa0ee6382ff75d8b3544e326d9c316 | 3b7d82cc23bb9a760e897e881a2cbfb1d4cb954f | /labcoat/attributes.py | 720be9a8f39e048d6a4c02aa7c26741a4478da53 | [] | no_license | gulopine/labcoat | af972a17e7f165d49e9333c7d1a779d6f616d608 | 4a6bfe05f97ad5f63d6c4d097553ae659bad2312 | refs/heads/master | 2021-01-17T06:25:21.033563 | 2011-03-13T15:30:34 | 2011-03-13T15:30:34 | 1,448,589 | 10 | 2 | null | null | null | null | UTF-8 | Python | false | false | 4,523 | py | class AttributeTester(object):
def __init__(self, specimen):
self.__dict__['specimen'] = specimen
self.__dict__['instance'] = specimen.instance
self.__dict__['results'] = specimen.results
def __getattr__(self, name):
self.__dict__['name'] = name
result = self.test(self.__dict__['instance'], name)
self.__dict__['results'].append((result, self.test.output, self.__dict__))
class S(AttributeTester):
def __getattr__(self, name):
self.__dict__['name'] = name
return AttributeComparison(self, name)
def __setattr__(self, name, value):
self.__dict__['name'] = name
setattr(self.instance, name, value)
class AttributeComparison:
def __init__(self, specimen, name):
self.instance = specimen.instance
self.results = specimen.results
self.name = name
def test(self, func, other, display):
value = getattr(self.instance, self.name)
success = func(value, other)
data = dict(self.__dict__, value=other)
return (success, display, data)
def __le__(self, other):
self.results.append(self.test(lambda a, b: a <= b, other, '%s is at most %r'))
def __lt__(self, other):
self.results.append(self.test(lambda a, b: a < b, other, '%s is less than %r'))
def __eq__(self, other):
self.results.append(self.test(lambda a, b: a == b, other, '%s is equal to %r'))
def __ne__(self, other):
self.results.append(self.test(lambda a, b: a != b, other, '%s is different from %r'))
def __gt__(self, other):
self.results.append(self.test(lambda a, b: a > b, other, '%s is greater than %r'))
def __ge__(self, other):
self.results.append(self.test(lambda a, b: a >= b, other, '%s is at least %r'))
class Has(AttributeTester):
def test(self, instance, name):
# Passing requires that the attribute exist and evaluate to True
return hasattr(instance, name) and bool(getattr(instance, name))
test.output = 'has %(name)s'
def __call__(self, num):
return HasNum(self.specimen, num)
class HasNum(AttributeTester):
def __init__(self, specimen, num, **kwargs):
super(HasNum, self).__init__(specimen, **kwargs)
self.__dict__['num'] = num
def test(self, instance, name):
# Passing requires that the attribute exist and evaluate to True
return hasattr(instance, name) and len(getattr(instance, name)) == self.__dict__['num']
test.output = 'has %(num)s %(name)s'
@property
def or_less(self):
return HasNumOrLess(self.specimen, self.num)
@property
def or_more(self):
return HasNumOrMore(self.specimen, self.num)
class HasNumOrMore(HasNum):
def test(self, instance, name):
return hasattr(instance, name) and len(getattr(instance, name)) >= self.num
test.output = 'has %(num)s or more %(name)s'
class HasNumOrLess(HasNum):
def test(self, instance, name):
return hasattr(instance, name) and len(getattr(instance, name)) <= self.num
test.output = 'has %(num)s or less %(name)s'
class Lacks(AttributeTester):
def test(self, instance, name):
# Passing requires that the attribute evaluate to False or not exist
return not (hasattr(instance, name) and bool(getattr(instance, name)))
test.output = 'lacks %(name)s'
def __call__(self, num):
return LacksNum(self.specimen, num)
class LacksNum(Lacks):
def __init__(self, specimen, num, **kwargs):
super(LacksNum, self).__init__(specimen, **kwargs)
self.__dict__['num'] = num
def test(self, instance, name):
return not hasattr(instance, name) or len(getattr(instance, name)) != self.num
test.output = 'lacks %(num)s %(name)s'
@property
def or_less(self):
return LacksNumOrLess(self.specimen, self.num)
@property
def or_more(self):
return LacksNumOrMore(self.specimen, self.num)
class LacksNumOrMore(LacksNum):
def test(self, instance, name):
return hasattr(instance, name) and len(getattr(instance, name)) < self.num
test.output = 'lacks %(num)s or more %(name)s'
class LacksNumOrLess(LacksNum):
def test(self, instance, name):
return hasattr(instance, name) and len(getattr(instance, name)) > self.num
test.output = 'lacks %(num)s or less %(name)s'
| [
"[email protected]"
] | |
1885e133317728b591d8e215221e805903af13f0 | 867b5d6efc6761e97412613c19d41c4fbe927238 | /demos/callLineEdit.py | b58032c82c1b8cca29274ff8e02ab12d8525c398 | [] | no_license | LouisLu78/pyqt5 | 516bdcd35a6678b1add300a4a14854ef61165a08 | 10e0ab9b186c88131180dba19ded483431c6966f | refs/heads/master | 2020-09-26T13:55:54.350566 | 2020-04-23T14:33:49 | 2020-04-23T14:33:49 | 226,268,644 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 649 | py | # -*- coding: utf-8 -*-
# author: Guangqiang Lu time:2019/12/2
#The codes below are copied from textbook.
import sys
from PyQt5.QtWidgets import QDialog, QApplication
from demos.demoLineEdit import *
class MyForm(QDialog):
def __init__(self):
super().__init__()
self.ui = Ui_Dialog()
self.ui.setupUi(self)
self.ui.ButtonClickMe.clicked.connect(self.dispmessage)
self.show()
def dispmessage(self):
self.ui.labelResponse.setText("Hello "
+self.ui.lineEditName.text())
if __name__=="__main__":
app = QApplication(sys.argv)
w = MyForm()
w.show()
sys.exit(app.exec_())
| [
"[email protected]"
] | |
1be1c7204f14485d21ef5d7127501591e9648bad | 64bf39b96a014b5d3f69b3311430185c64a7ff0e | /intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/cisco/aci/plugins/modules/aci_epg_to_contract.py | 45be2dd127a031e62cb74fd7e45089bff716e049 | [
"MIT",
"GPL-3.0-only"
] | permissive | SimonFangCisco/dne-dna-code | 7072eba7da0389e37507b7a2aa5f7d0c0735a220 | 2ea7d4f00212f502bc684ac257371ada73da1ca9 | refs/heads/master | 2023-03-10T23:10:31.392558 | 2021-02-25T15:04:36 | 2021-02-25T15:04:36 | 342,274,373 | 0 | 0 | MIT | 2021-02-25T14:39:22 | 2021-02-25T14:39:22 | null | UTF-8 | Python | false | false | 9,989 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_epg_to_contract
short_description: Bind EPGs to Contracts (fv:RsCons, fv:RsProv)
description:
- Bind EPGs to Contracts on Cisco ACI fabrics.
notes:
- The C(tenant), C(app_profile), C(EPG), and C(Contract) used must exist before using this module in your playbook.
The M(cisco.aci.aci_tenant), M(cisco.aci.aci_ap), M(cisco.aci.aci_epg), and M(cisco.aci.aci_contract) modules can be used for this.
options:
ap:
description:
- Name of an existing application network profile, that will contain the EPGs.
type: str
aliases: [ app_profile, app_profile_name ]
contract:
description:
- The name of the contract.
type: str
aliases: [ contract_name ]
contract_type:
description:
- Determines if the EPG should Provide or Consume the Contract.
type: str
required: yes
choices: [ consumer, provider ]
epg:
description:
- The name of the end point group.
type: str
aliases: [ epg_name ]
priority:
description:
- QoS class.
- The APIC defaults to C(unspecified) when unset during creation.
type: str
choices: [ level1, level2, level3, unspecified ]
provider_match:
description:
- The matching algorithm for Provided Contracts.
- The APIC defaults to C(at_least_one) when unset during creation.
type: str
choices: [ all, at_least_one, at_most_one, none ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
choices: [ absent, present, query ]
default: present
tenant:
description:
- Name of an existing tenant.
type: str
aliases: [ tenant_name ]
extends_documentation_fragment:
- cisco.aci.aci
seealso:
- module: cisco.aci.aci_ap
- module: cisco.aci.aci_epg
- module: cisco.aci.aci_contract
- name: APIC Management Information Model reference
description: More information about the internal APIC classes B(fv:RsCons) and B(fv:RsProv).
link: https://developer.cisco.com/docs/apic-mim-ref/
author:
- Jacob McGill (@jmcgill298)
'''
EXAMPLES = r'''
- name: Add a new contract to EPG binding
cisco.aci.aci_epg_to_contract:
host: apic
username: admin
password: SomeSecretPassword
tenant: anstest
ap: anstest
epg: anstest
contract: anstest_http
contract_type: provider
state: present
delegate_to: localhost
- name: Remove an existing contract to EPG binding
cisco.aci.aci_epg_to_contract:
host: apic
username: admin
password: SomeSecretPassword
tenant: anstest
ap: anstest
epg: anstest
contract: anstest_http
contract_type: provider
state: absent
delegate_to: localhost
- name: Query a specific contract to EPG binding
cisco.aci.aci_epg_to_contract:
host: apic
username: admin
password: SomeSecretPassword
tenant: anstest
ap: anstest
epg: anstest
contract: anstest_http
contract_type: provider
state: query
delegate_to: localhost
register: query_result
- name: Query all provider contract to EPG bindings
cisco.aci.aci_epg_to_contract:
host: apic
username: admin
password: SomeSecretPassword
contract_type: provider
state: query
delegate_to: localhost
register: query_result
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: str
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: str
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: str
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: str
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.cisco.aci.plugins.module_utils.aci import ACIModule, aci_argument_spec
ACI_CLASS_MAPPING = dict(
consumer={
'class': 'fvRsCons',
'rn': 'rscons-',
},
provider={
'class': 'fvRsProv',
'rn': 'rsprov-',
},
)
PROVIDER_MATCH_MAPPING = dict(
all='All',
at_least_one='AtleastOne',
at_most_one='AtmostOne',
none='None',
)
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
contract_type=dict(type='str', required=True, choices=['consumer', 'provider']),
ap=dict(type='str', aliases=['app_profile', 'app_profile_name']), # Not required for querying all objects
epg=dict(type='str', aliases=['epg_name']), # Not required for querying all objects
contract=dict(type='str', aliases=['contract_name']), # Not required for querying all objects
priority=dict(type='str', choices=['level1', 'level2', 'level3', 'unspecified']),
provider_match=dict(type='str', choices=['all', 'at_least_one', 'at_most_one', 'none']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
tenant=dict(type='str', aliases=['tenant_name']), # Not required for querying all objects
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['ap', 'contract', 'epg', 'tenant']],
['state', 'present', ['ap', 'contract', 'epg', 'tenant']],
],
)
ap = module.params.get('ap')
contract = module.params.get('contract')
contract_type = module.params.get('contract_type')
epg = module.params.get('epg')
priority = module.params.get('priority')
provider_match = module.params.get('provider_match')
if provider_match is not None:
provider_match = PROVIDER_MATCH_MAPPING[provider_match]
state = module.params.get('state')
tenant = module.params.get('tenant')
aci_class = ACI_CLASS_MAPPING[contract_type]["class"]
aci_rn = ACI_CLASS_MAPPING[contract_type]["rn"]
if contract_type == "consumer" and provider_match is not None:
module.fail_json(msg="the 'provider_match' is only configurable for Provided Contracts")
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='fvTenant',
aci_rn='tn-{0}'.format(tenant),
module_object=tenant,
target_filter={'name': tenant},
),
subclass_1=dict(
aci_class='fvAp',
aci_rn='ap-{0}'.format(ap),
module_object=ap,
target_filter={'name': ap},
),
subclass_2=dict(
aci_class='fvAEPg',
aci_rn='epg-{0}'.format(epg),
module_object=epg,
target_filter={'name': epg},
),
subclass_3=dict(
aci_class=aci_class,
aci_rn='{0}{1}'.format(aci_rn, contract),
module_object=contract,
target_filter={'tnVzBrCPName': contract},
),
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class=aci_class,
class_config=dict(
matchT=provider_match,
prio=priority,
tnVzBrCPName=contract,
),
)
aci.get_diff(aci_class=aci_class)
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
8e6086f8659c6e50223d4ade9b087dd525b2aabc | 6294e1613c812612d4463da83cfc24d8c213d3f6 | /arjuna/interact/gui/dispatcher/driver/driver_commands.py | 449247a61e551959f2f3511cbf38eef4b541f5b8 | [
"Apache-2.0"
] | permissive | prabhudatta22/arjuna | 52747bc2d9600f1cd04457b29c919221464a7b88 | 37a9afe3f8f2b2e82da854c3e497a67a77c6749f | refs/heads/master | 2022-12-06T04:10:21.346896 | 2020-08-31T06:03:13 | 2020-08-31T06:03:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,897 | py | # This file is a part of Arjuna
# Copyright 2015-2020 Rahul Verma
# Website: www.RahulVerma.net
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from selenium.webdriver.common.action_chains import ActionChains
class DriverCommands:
@classmethod
def go_to_url(cls, driver, url):
driver.get(url)
@classmethod
def refresh_browser(cls, driver):
driver.refresh()
@classmethod
def go_back_in_browser(cls, driver):
driver.back()
@classmethod
def go_forward_in_browser(cls, driver):
driver.forward()
@classmethod
def quit(cls, driver):
driver.quit()
@classmethod
def get_page_title(cls, driver):
return driver.title
@classmethod
def get_url(cls, driver):
return driver
@classmethod
def get_source(cls, driver):
return driver.page_source
@classmethod
def send_keys(cls, driver, key_str):
print(key_str)
ActionChains(driver).send_keys(key_str).perform()
@classmethod
def is_web_alert_present(cls, driver):
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
try:
WebDriverWait(driver, 1).until(EC.alert_is_present(),'Timed out.')
return True
except Exception as e:
return False
@classmethod
def confirm_web_alert(cls, driver):
driver.switch_to.alert.accept()
@classmethod
def dismiss_web_alert(cls, driver):
driver.switch_to.alert.dismiss()
@classmethod
def send_text_to_web_alert(cls, driver, text):
driver.switch_to.alert.send_keys(text)
@classmethod
def get_text_from_web_alert(cls, driver):
return driver.switch_to.alert.text
@classmethod
def focus_on_frame(cls, driver, element):
driver.switch_to.frame(element)
@classmethod
def focus_on_dom_root(cls, driver):
return driver.switch_to.default_content()
@classmethod
def focus_on_parent_frame(cls, driver):
driver.switch_to.parent_frame()
@classmethod
def execute_javascript(cls, driver, script, *args):
from arjuna import log_debug
log_debug("Executing JavaScript {} with args {}.".format(script, args))
return driver.execute_script(script, *args)
@classmethod
def take_screenshot(cls, driver, file_path):
return driver.save_screenshot(file_path)
@classmethod
def take_screenshot_as_base64(cls, driver):
return driver.get_screenshot_as_base64()
@classmethod
def set_window_size(cls, driver, width, height):
driver.set_window_size(width, height)
@classmethod
def maximize_window(cls, driver):
driver.maximize_window()
@classmethod
def get_current_window_handle(cls, driver):
return driver.current_window_handle
@classmethod
def focus_on_window(cls, driver, window_handle):
driver.switch_to.window(window_handle)
@classmethod
def close_current_window(cls, driver):
driver.close()
@classmethod
def get_window_title(cls, driver):
return driver.title
@classmethod
def get_current_window_size(cls, driver):
return driver.get_window_size()
@classmethod
def get_all_winodw_handles(cls, driver):
return driver.window_handles
@classmethod
def replace_with_element(cls, setu_driver, value_tuple):
if value_tuple[1] == True:
return setu_driver.get_element_for_setu_id(value_tuple[0])
else:
return value_tuple[0]
@classmethod
def perform_action_chain(cls, setu_driver, driver, action_chain):
chain = ActionChains(driver)
for action in action_chain:
kwargs = {k:cls.replace_with_element(setu_driver, v) for k,v in action[1].items()}
getattr(chain, action[0])(**kwargs)
chain.perform()
@classmethod
def hover_on_element(cls, driver, webelement):
chain = ActionChains(driver).move_to_element(webelement).perform()
@classmethod
def mouse_click_on_element(cls, driver, webelement):
chain = ActionChains(driver).click(webelement).perform()
@classmethod
def scroll_to_element(cls, driver, webelement):
cls.execute_javascript(driver, "arguments[0].scrollIntoView(true);", webelement)
| [
"[email protected]"
] | |
9b48c9385f3523743bd3f869ee21796c098b6f19 | 728871b962f2a5ec8d8ec7d5b607def074fb8864 | /W261/HW13-Questions/PageRank.py | e2a57722c38f075390050316f7fe32acfb01088f | [] | no_license | leiyang-mids/MIDS | 0191ffbaf9f7f6ec0e77522241c3e76d012850f1 | 918b0d8afc395840626eb31c451ad6c4b2f3bc39 | refs/heads/master | 2020-05-25T15:46:56.480467 | 2019-03-28T16:16:17 | 2019-03-28T16:16:17 | 35,463,263 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,302 | py | from time import time
from datetime import datetime
def initialize(line):
# parse line
nid, adj = line.strip().split('\t', 1)
exec 'adj = %s' %adj
# initialize node struct
node = {'a':adj.keys(), 'p':0}
rankMass = 1.0/len(adj)
# emit pageRank mass and node
return [(m, rankMass) for m in node['a']] + [(nid.strip('"'), node)]
def accumulateMass(a, b):
if isinstance(a, float) and isinstance(b, float):
return a+b
if isinstance(a, float) and not isinstance(b, float):
b['p'] += a
return b
else:
a['p'] += b
return a
def getDangling(node):
global nDangling
if isinstance(node[1], float):
nDangling += 1
return (node[0], {'a':[], 'p':node[1]})
else:
return node
def redistributeMass(node):
node[1]['p'] = (p_dangling.value+node[1]['p'])*damping + alpha
return node
def distributeMass(node):
global lossMass
mass, adj = node[1]['p'], node[1]['a']
node[1]['p'] = 0
if len(adj) == 0:
lossMass += mass
return [node]
else:
rankMass = mass/len(adj)
return [(x, rankMass) for x in adj]+[node]
def getIndex(line):
elem = line.strip().split('\t')
return (elem[1], elem[0])
def logTime():
return str(datetime.now())
| [
"[email protected]"
] | |
9bc3b610843612d19d76e61bd47db7d4bfb9af9d | 08b74293c409086681eda77310f61831552478f1 | /instafilter/model.py | 8861ab8fc61abd65f2855c66af97c9cde85038f8 | [] | no_license | jiaxinwang/instafilter | d895928a3c311edf8ce14f49e716334842a51acf | cdc84b1b1055fd3d8b5ba81db69f9abeef5346e7 | refs/heads/master | 2023-07-21T19:57:38.976494 | 2021-09-06T14:17:42 | 2021-09-06T14:17:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 450 | py | import torch
from torch import nn
class ColorNet(nn.Module):
def __init__(self):
super().__init__()
self.fc1 = nn.Linear(5, 25)
self.fc2 = nn.Linear(25, 25)
self.fc3 = nn.Linear(25, 25)
self.fc4 = nn.Linear(25, 5)
def forward(self, x):
x = torch.tanh(self.fc1(x))
x = torch.tanh(self.fc2(x))
x = torch.tanh(self.fc3(x))
x = torch.tanh(self.fc4(x))
return x
| [
"[email protected]"
] | |
dcf71ee1082f20b11c686ad9ed9d87c549416a0d | adbbeeae023ffd1f4932210efd9bd0e1e326e501 | /flair/datasets.py | 178937afd07955c6b1bf78cc94045bea054311fa | [
"MIT"
] | permissive | sidney1994/CLNER | 504747882587d030532e03c1d15bffae8552c84b | 7340566c3f10d8b532f357d1d3fcfad6348b02fa | refs/heads/main | 2023-06-28T20:30:09.914547 | 2021-08-03T09:40:22 | 2021-08-03T09:40:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 185,662 | py | import os, csv
import json
from abc import abstractmethod
from torch.utils.data import Dataset, random_split
from typing import List, Dict, Union
import re
import logging
from pathlib import Path
import torch.utils.data.dataloader
from torch.utils.data.dataset import Subset, ConcatDataset
import flair
from flair.data import Sentence, Corpus, Token, FlairDataset
from flair.file_utils import cached_path
import pdb
from flair.image_encoder import *
log = logging.getLogger("flair")
class ColumnCorpus(Corpus):
def __init__(
self,
data_folder: Union[str, Path],
column_format: Dict[int, str],
train_file=None,
test_file=None,
dev_file=None,
tag_to_bioes=None,
comment_symbol: str = None,
in_memory: bool = True,
):
"""
Instantiates a Corpus from CoNLL column-formatted task data such as CoNLL03 or CoNLL2000.
:param data_folder: base folder with the task data
:param column_format: a map specifying the column format
:param train_file: the name of the train file
:param test_file: the name of the test file
:param dev_file: the name of the dev file, if None, dev data is sampled from train
:param tag_to_bioes: whether to convert to BIOES tagging scheme
:param comment_symbol: if set, lines that begin with this symbol are treated as comments
:param in_memory: If set to True, the dataset is kept in memory as Sentence objects, otherwise does disk reads
:return: a Corpus with annotated train, dev and test data
"""
if type(data_folder) == str:
data_folder: Path = Path(data_folder)
if train_file is not None:
train_file = data_folder / train_file
if test_file is not None:
test_file = data_folder / test_file
if dev_file is not None:
dev_file = data_folder / dev_file
# automatically identify train / test / dev files
if train_file is None:
for file in data_folder.iterdir():
file_name = file.name
if file_name.endswith(".gz") or file_name.endswith(".swp") or file_name.endswith(".pkl"):
continue
if "train" in file_name:
train_file = file
if "dev" in file_name:
dev_file = file
if "testa" in file_name:
dev_file = file
if "testb" in file_name:
test_file = file
# if no test file is found, take any file with 'test' in name
if test_file is None:
for file in data_folder.iterdir():
file_name = file.name
if file_name.endswith(".gz"):
continue
if "test" in file_name:
test_file = file
log.info("Reading data from {}".format(data_folder))
log.info("Train: {}".format(train_file))
log.info("Dev: {}".format(dev_file))
log.info("Test: {}".format(test_file))
# get train data
train = ColumnDataset(
train_file,
column_format,
tag_to_bioes,
comment_symbol=comment_symbol,
in_memory=in_memory,
)
# read in test file if exists, otherwise sample 10% of train data as test dataset
if test_file is not None:
test = ColumnDataset(
test_file,
column_format,
tag_to_bioes,
comment_symbol=comment_symbol,
in_memory=in_memory,
)
else:
train_length = len(train)
test_size: int = round(train_length / 10)
splits = random_split(train, [train_length - test_size, test_size])
train = splits[0]
test = splits[1]
# read in dev file if exists, otherwise sample 10% of train data as dev dataset
if dev_file is not None:
dev = ColumnDataset(
dev_file,
column_format,
tag_to_bioes,
comment_symbol=comment_symbol,
in_memory=in_memory,
)
else:
train_length = len(train)
dev_size: int = round(train_length / 10)
splits = random_split(train, [train_length - dev_size, dev_size])
train = splits[0]
dev = splits[1]
super(ColumnCorpus, self).__init__(train, dev, test, name=data_folder.name)
class UniversalDependenciesCorpus(Corpus):
def __init__(
self,
data_folder: Union[str, Path],
train_file=None,
test_file=None,
dev_file=None,
in_memory: bool = True,
add_root: bool = False,
spliter = '\t',
):
"""
Instantiates a Corpus from CoNLL-U column-formatted task data such as the UD corpora
:param data_folder: base folder with the task data
:param train_file: the name of the train file
:param test_file: the name of the test file
:param dev_file: the name of the dev file, if None, dev data is sampled from train
:param in_memory: If set to True, keeps full dataset in memory, otherwise does disk reads
:return: a Corpus with annotated train, dev and test data
"""
if type(data_folder) == str:
data_folder: Path = Path(data_folder)
# automatically identify train / test / dev files
if train_file is None:
for file in data_folder.iterdir():
file_name = file.name
if "train" in file_name:
train_file = file
if "test" in file_name:
test_file = file
if "dev" in file_name:
dev_file = file
if "testa" in file_name:
dev_file = file
if "testb" in file_name:
test_file = file
log.info("Reading data from {}".format(data_folder))
log.info("Train: {}".format(train_file))
log.info("Test: {}".format(test_file))
log.info("Dev: {}".format(dev_file))
# get train data
train = UniversalDependenciesDataset(train_file, in_memory=in_memory, add_root=add_root, spliter=spliter)
# get test data
test = UniversalDependenciesDataset(test_file, in_memory=in_memory, add_root=add_root, spliter=spliter)
# get dev data
dev = UniversalDependenciesDataset(dev_file, in_memory=in_memory, add_root=add_root, spliter=spliter)
super(UniversalDependenciesCorpus, self).__init__(
train, dev, test, name=data_folder.name
)
class UD(UniversalDependenciesCorpus):
def __init__(self, treebank: str, base_path: Union[str, Path] = None, in_memory: bool = True, add_root: bool = False):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name / treebank
# if not os.path.exists(data_folder):
# os.call("git clone https://github.com/UniversalDependencies/"+treebank+" "+data_folder+"/"+treebank)
# # # download data if necessary
# # web_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Tamil-TTB/master"
# # cached_path(f"{web_path}/ta_ttb-ud-dev.conllu", Path("datasets") / dataset_name)
# # cached_path(
# # f"{web_path}/ta_ttb-ud-test.conllu", Path("datasets") / dataset_name
# # )
# # cached_path(
# # f"{web_path}/ta_ttb-ud-train.conllu", Path("datasets") / dataset_name
# )
super(UD, self).__init__(data_folder, in_memory=in_memory, add_root=add_root)
# class SRL(UniversalDependenciesCorpus):
# def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, lang='en', add_root: bool = True):
# if type(base_path) == str:
# base_path: Path = Path(base_path)
# # this dataset name
# dataset_name = self.__class__.__name__.lower()
# # default dataset folder is the cache root
# if not base_path:
# base_path = Path(flair.cache_root) / "datasets"
# data_folder = base_path / dataset_name / Path(lang)
# # if not os.path.exists(data_folder):
# # os.call("git clone https://github.com/UniversalDependencies/"+treebank+" "+data_folder+"/"+treebank)
# # # # download data if necessary
# # # web_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Tamil-TTB/master"
# # # cached_path(f"{web_path}/ta_ttb-ud-dev.conllu", Path("datasets") / dataset_name)
# # # cached_path(
# # # f"{web_path}/ta_ttb-ud-test.conllu", Path("datasets") / dataset_name
# # # )
# # # cached_path(
# # # f"{web_path}/ta_ttb-ud-train.conllu", Path("datasets") / dataset_name
# # )
# super(SRL, self).__init__(data_folder, in_memory=in_memory, add_root=add_root)
class SRL(Corpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, train_file=None, test_file=None, dev_file=None, lang='en'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / 'srl' / Path(lang)
if train_file is None:
for file in data_folder.iterdir():
file_name = file.name
if "train" in file_name:
train_file = file
if "test" in file_name:
test_file = file
if "dev" in file_name:
dev_file = file
if "testa" in file_name:
dev_file = file
if "testb" in file_name:
test_file = file
if test_file is None:
test_file = dev_file
log.info("Reading data from {}".format(data_folder))
log.info("Train: {}".format(train_file))
log.info("Test: {}".format(test_file))
log.info("Dev: {}".format(dev_file))
# get train data
train = UniversalDependenciesDataset(train_file, in_memory=in_memory, add_root=True)
# get test data
test = UniversalDependenciesDataset(test_file, in_memory=in_memory, add_root=True)
# get dev data
dev = UniversalDependenciesDataset(dev_file, in_memory=in_memory, add_root=True)
super(SRL, self).__init__(
train, dev, test, name='srl-'+lang
)
class UD_PROJ(UniversalDependenciesCorpus):
def __init__(self, treebank: str, base_path: Union[str, Path] = None, in_memory: bool = True, add_root: bool = False):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name / treebank
# if not os.path.exists(data_folder):
# os.call("git clone https://github.com/UniversalDependencies/"+treebank+" "+data_folder+"/"+treebank)
# # # download data if necessary
# # web_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Tamil-TTB/master"
# # cached_path(f"{web_path}/ta_ttb-ud-dev.conllu", Path("datasets") / dataset_name)
# # cached_path(
# # f"{web_path}/ta_ttb-ud-test.conllu", Path("datasets") / dataset_name
# # )
# # cached_path(
# # f"{web_path}/ta_ttb-ud-train.conllu", Path("datasets") / dataset_name
# )
super(UD_PROJ, self).__init__(data_folder, in_memory=in_memory, add_root=add_root)
class PTB(Corpus):
def __init__(self, treebank: str = None, base_path: Union[str, Path] = None, in_memory: bool = True, add_root: bool = True, tag_to_bioes=None):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets" / "ptb_3.3.0_modified"
data_folder = base_path
# data_folder = '../Parser-v4/data/SemEval15/ptb_3.3.0'
# if not os.path.exists(data_folder):
# os.call("git clone https://github.com/UniversalDependencies/"+treebank+" "+data_folder+"/"+treebank)
# # # download data if necessary
# # web_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Tamil-TTB/master"
# # cached_path(f"{web_path}/ta_ttb-ud-dev.conllu", Path("datasets") / dataset_name)
# # cached_path(
# # f"{web_path}/ta_ttb-ud-test.conllu", Path("datasets") / dataset_name
# # )
# # cached_path(
# # f"{web_path}/ta_ttb-ud-train.conllu", Path("datasets") / dataset_name
# )
log.info("Reading data from {}".format(data_folder))
# log.info("Train: {}".format(data_folder/'train_modified_projective.conllu'))
log.info("Train: {}".format(data_folder/'train_modified.conllu'))
log.info("Test: {}".format(data_folder/'test.conllu'))
log.info("Dev: {}".format(data_folder/'dev.conllu'))
# train = UniversalDependenciesDataset(data_folder/'train_modified_projective.conllu', in_memory=in_memory, add_root=True)
train = UniversalDependenciesDataset(data_folder/'train_modified.conllu', in_memory=in_memory, add_root=True)
# train = UniversalDependenciesDataset(Path('test2.conll'), in_memory=in_memory, add_root=True)
# get test data
test = UniversalDependenciesDataset(data_folder/'test.conllu', in_memory=in_memory, add_root=True)
# test = UniversalDependenciesDataset(Path('test2.conll'), in_memory=in_memory, add_root=True)
# get dev data
dev = UniversalDependenciesDataset(data_folder/'dev.conllu', in_memory=in_memory, add_root=True)
# dev = UniversalDependenciesDataset(Path('test2.conll'), in_memory=in_memory, add_root=True)
super(PTB, self).__init__(
train, dev, test, name=treebank
)
class WSJ_POS(Corpus):
def __init__(self, treebank: str = None, base_path: Union[str, Path] = None, in_memory: bool = True, add_root: bool = True, tag_to_bioes=None):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets" / "wsj_pos"
data_folder = base_path
# data_folder = '../Parser-v4/data/SemEval15/ptb_3.3.0'
# if not os.path.exists(data_folder):
# os.call("git clone https://github.com/UniversalDependencies/"+treebank+" "+data_folder+"/"+treebank)
# # # download data if necessary
# # web_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Tamil-TTB/master"
# # cached_path(f"{web_path}/ta_ttb-ud-dev.conllu", Path("datasets") / dataset_name)
# # cached_path(
# # f"{web_path}/ta_ttb-ud-test.conllu", Path("datasets") / dataset_name
# # )
# # cached_path(
# # f"{web_path}/ta_ttb-ud-train.conllu", Path("datasets") / dataset_name
# )
log.info("Reading data from {}".format(data_folder))
# log.info("Train: {}".format(data_folder/'train_modified_projective.conllu'))
log.info("Train: {}".format(data_folder/'train.conllu'))
log.info("Test: {}".format(data_folder/'test.conllu'))
log.info("Dev: {}".format(data_folder/'dev.conllu'))
# train = UniversalDependenciesDataset(data_folder/'train_modified_projective.conllu', in_memory=in_memory, add_root=True)
train = UniversalDependenciesDataset(data_folder/'train.conllu', in_memory=in_memory, add_root=True)
# train = UniversalDependenciesDataset(Path('test2.conll'), in_memory=in_memory, add_root=True)
# get test data
test = UniversalDependenciesDataset(data_folder/'test.conllu', in_memory=in_memory, add_root=True)
# test = UniversalDependenciesDataset(Path('test2.conll'), in_memory=in_memory, add_root=True)
# get dev data
dev = UniversalDependenciesDataset(data_folder/'dev.conllu', in_memory=in_memory, add_root=True)
# dev = UniversalDependenciesDataset(Path('test2.conll'), in_memory=in_memory, add_root=True)
super(WSJ_POS, self).__init__(
train, dev, test, name=treebank
)
class CTB(Corpus):
def __init__(self, treebank: str = None, base_path: Union[str, Path] = None, in_memory: bool = True, add_root: bool = True, tag_to_bioes=None):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets" / "CTB5_YM"
data_folder = base_path
# data_folder = '../Parser-v4/data/SemEval15/ptb_3.3.0'
# if not os.path.exists(data_folder):
# os.call("git clone https://github.com/UniversalDependencies/"+treebank+" "+data_folder+"/"+treebank)
# # # download data if necessary
# # web_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Tamil-TTB/master"
# # cached_path(f"{web_path}/ta_ttb-ud-dev.conllu", Path("datasets") / dataset_name)
# # cached_path(
# # f"{web_path}/ta_ttb-ud-test.conllu", Path("datasets") / dataset_name
# # )
# # cached_path(
# # f"{web_path}/ta_ttb-ud-train.conllu", Path("datasets") / dataset_name
# )
log.info("Reading data from {}".format(data_folder))
log.info("Train: {}".format(data_folder/'CTB5.1-train.gp_modified.conll'))
log.info("Test: {}".format(data_folder/'CTB5.1-test.gp_modified.conll'))
log.info("Dev: {}".format(data_folder/'CTB5.1-devel.gp_modified.conll'))
train = UniversalDependenciesDataset(data_folder/'CTB5.1-train.gp_modified.conll', in_memory=in_memory, add_root=True)
# get test data
test = UniversalDependenciesDataset(data_folder/'CTB5.1-test.gp_modified.conll', in_memory=in_memory, add_root=True)
# get dev data
dev = UniversalDependenciesDataset(data_folder/'CTB5.1-devel.gp_modified.conll', in_memory=in_memory, add_root=True)
super(CTB, self).__init__(
train, dev, test, name=treebank
)
class ENHANCEDUD(Corpus):
def __init__(self, treebank: str, base_path: Union[str, Path] = None, in_memory: bool = True, train_file=None, test_file=None, dev_file=None, eud_path = 'enhanced_ud'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
if 'UNREL' in treebank:
eud_path = eud_path+'_unrel'
treebank = treebank.split('-')[0]
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / eud_path / treebank
if train_file is None:
for file in data_folder.iterdir():
file_name = file.name
if "train" in file_name:
train_file = file
if "test" in file_name:
test_file = file
if "dev" in file_name:
dev_file = file
if "testa" in file_name:
dev_file = file
if "testb" in file_name:
test_file = file
if test_file is None:
test_file = dev_file
log.info("Reading data from {}".format(data_folder))
log.info("Train: {}".format(train_file))
log.info("Test: {}".format(test_file))
log.info("Dev: {}".format(dev_file))
# get train data
train = UniversalDependenciesDataset(train_file, in_memory=in_memory, add_root=True)
# get test data
test = UniversalDependenciesDataset(test_file, in_memory=in_memory, add_root=True)
# get dev data
dev = UniversalDependenciesDataset(dev_file, in_memory=in_memory, add_root=True)
super(ENHANCEDUD, self).__init__(
train, dev, test, name=treebank
)
class UNREL_ENHANCEDUD(Corpus):
def __init__(self, treebank: str, base_path: Union[str, Path] = None, in_memory: bool = True, train_file=None, test_file=None, dev_file=None, eud_path = 'unrel_enhanced_ud'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / unrel_enhanced_ud / treebank
if train_file is None:
for file in data_folder.iterdir():
file_name = file.name
if "train" in file_name:
train_file = file
if "test" in file_name:
test_file = file
if "dev" in file_name:
dev_file = file
if "testa" in file_name:
dev_file = file
if "testb" in file_name:
test_file = file
if test_file is None:
test_file = dev_file
log.info("Reading data from {}".format(data_folder))
log.info("Train: {}".format(train_file))
log.info("Test: {}".format(test_file))
log.info("Dev: {}".format(dev_file))
# get train data
train = UniversalDependenciesDataset(train_file, in_memory=in_memory, add_root=True)
# get test data
test = UniversalDependenciesDataset(test_file, in_memory=in_memory, add_root=True)
# get dev data
dev = UniversalDependenciesDataset(dev_file, in_memory=in_memory, add_root=True)
super(UNREL_ENHANCEDUD, self).__init__(
train, dev, test, name=treebank
)
class SDP(Corpus):
def __init__(self, treebank: str, base_path: Union[str, Path] = None, in_memory: bool = True, train_file=None, test_file=None, dev_file=None,):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / 'sdp' / treebank
if train_file is None:
for file in data_folder.iterdir():
file_name = file.name
if "train" in file_name:
train_file = file
if "test" in file_name:
test_file = file
if "dev" in file_name:
dev_file = file
if "testa" in file_name:
dev_file = file
if "testb" in file_name:
test_file = file
if test_file is None:
test_file = dev_file
log.info("Reading data from {}".format(data_folder))
log.info("Train: {}".format(train_file))
log.info("Test: {}".format(test_file))
log.info("Dev: {}".format(dev_file))
# get train data
train = UniversalDependenciesDataset(train_file, in_memory=in_memory, add_root=True)
# get test data
test = UniversalDependenciesDataset(test_file, in_memory=in_memory, add_root=True)
# get dev data
dev = UniversalDependenciesDataset(dev_file, in_memory=in_memory, add_root=True)
super(SDP, self).__init__(
train, dev, test, name=treebank
)
class ClassificationCorpus(Corpus):
def __init__(
self,
data_folder: Union[str, Path],
train_file=None,
test_file=None,
dev_file=None,
use_tokenizer: bool = True,
max_tokens_per_doc: int = -1,
max_chars_per_doc: int = -1,
in_memory: bool = False,
):
"""
Instantiates a Corpus from text classification-formatted task data
:param data_folder: base folder with the task data
:param train_file: the name of the train file
:param test_file: the name of the test file
:param dev_file: the name of the dev file, if None, dev data is sampled from train
:param use_tokenizer: If True, tokenizes the dataset, otherwise uses whitespace tokenization
:param max_tokens_per_doc: If set, truncates each Sentence to a maximum number of Tokens
:param max_chars_per_doc: If set, truncates each Sentence to a maximum number of chars
:param in_memory: If True, keeps dataset as Sentences in memory, otherwise only keeps strings
:return: a Corpus with annotated train, dev and test data
"""
if type(data_folder) == str:
data_folder: Path = Path(data_folder)
if train_file is not None:
train_file = data_folder / train_file
if test_file is not None:
test_file = data_folder / test_file
if dev_file is not None:
dev_file = data_folder / dev_file
# automatically identify train / test / dev files
if train_file is None:
for file in data_folder.iterdir():
file_name = file.name
if "train" in file_name:
train_file = file
if "test" in file_name:
test_file = file
if "dev" in file_name:
dev_file = file
if "testa" in file_name:
dev_file = file
if "testb" in file_name:
test_file = file
log.info("Reading data from {}".format(data_folder))
log.info("Train: {}".format(train_file))
log.info("Dev: {}".format(dev_file))
log.info("Test: {}".format(test_file))
train: Dataset = ClassificationDataset(
train_file,
use_tokenizer=use_tokenizer,
max_tokens_per_doc=max_tokens_per_doc,
max_chars_per_doc=max_chars_per_doc,
in_memory=in_memory,
)
test: Dataset = ClassificationDataset(
test_file,
use_tokenizer=use_tokenizer,
max_tokens_per_doc=max_tokens_per_doc,
max_chars_per_doc=max_chars_per_doc,
in_memory=in_memory,
)
if dev_file is not None:
dev: Dataset = ClassificationDataset(
dev_file,
use_tokenizer=use_tokenizer,
max_tokens_per_doc=max_tokens_per_doc,
max_chars_per_doc=max_chars_per_doc,
in_memory=in_memory,
)
else:
train_length = len(train)
dev_size: int = round(train_length / 10)
splits = random_split(train, [train_length - dev_size, dev_size])
train = splits[0]
dev = splits[1]
super(ClassificationCorpus, self).__init__(
train, dev, test, name=data_folder.name
)
class CSVClassificationCorpus(Corpus):
def __init__(
self,
data_folder: Union[str, Path],
column_name_map: Dict[int, str],
train_file=None,
test_file=None,
dev_file=None,
use_tokenizer: bool = True,
max_tokens_per_doc=-1,
max_chars_per_doc=-1,
in_memory: bool = False,
skip_header: bool = False,
**fmtparams,
):
"""
Instantiates a Corpus for text classification from CSV column formatted data
:param data_folder: base folder with the task data
:param column_name_map: a column name map that indicates which column is text and which the label(s)
:param train_file: the name of the train file
:param test_file: the name of the test file
:param dev_file: the name of the dev file, if None, dev data is sampled from train
:param max_tokens_per_doc: If set, truncates each Sentence to a maximum number of Tokens
:param max_chars_per_doc: If set, truncates each Sentence to a maximum number of chars
:param use_tokenizer: If True, tokenizes the dataset, otherwise uses whitespace tokenization
:param in_memory: If True, keeps dataset as Sentences in memory, otherwise only keeps strings
:param fmtparams: additional parameters for the CSV file reader
:return: a Corpus with annotated train, dev and test data
"""
if type(data_folder) == str:
data_folder: Path = Path(data_folder)
if train_file is not None:
train_file = data_folder / train_file
if test_file is not None:
test_file = data_folder / test_file
if dev_file is not None:
dev_file = data_folder / dev_file
# automatically identify train / test / dev files
if train_file is None:
for file in data_folder.iterdir():
file_name = file.name
if "train" in file_name:
train_file = file
if "test" in file_name:
test_file = file
if "dev" in file_name:
dev_file = file
if "testa" in file_name:
dev_file = file
if "testb" in file_name:
test_file = file
log.info("Reading data from {}".format(data_folder))
log.info("Train: {}".format(train_file))
log.info("Dev: {}".format(dev_file))
log.info("Test: {}".format(test_file))
train: Dataset = CSVClassificationDataset(
train_file,
column_name_map,
use_tokenizer=use_tokenizer,
max_tokens_per_doc=max_tokens_per_doc,
max_chars_per_doc=max_chars_per_doc,
in_memory=in_memory,
skip_header=skip_header,
**fmtparams,
)
if test_file is not None:
test: Dataset = CSVClassificationDataset(
test_file,
column_name_map,
use_tokenizer=use_tokenizer,
max_tokens_per_doc=max_tokens_per_doc,
max_chars_per_doc=max_chars_per_doc,
in_memory=in_memory,
skip_header=skip_header,
**fmtparams,
)
else:
train_length = len(train)
test_size: int = round(train_length / 10)
splits = random_split(train, [train_length - test_size, test_size])
train = splits[0]
test = splits[1]
if dev_file is not None:
dev: Dataset = CSVClassificationDataset(
dev_file,
column_name_map,
use_tokenizer=use_tokenizer,
max_tokens_per_doc=max_tokens_per_doc,
max_chars_per_doc=max_chars_per_doc,
in_memory=in_memory,
skip_header=skip_header,
**fmtparams,
)
else:
train_length = len(train)
dev_size: int = round(train_length / 10)
splits = random_split(train, [train_length - dev_size, dev_size])
train = splits[0]
dev = splits[1]
super(CSVClassificationCorpus, self).__init__(
train, dev, test, name=data_folder.name
)
class SentenceDataset(FlairDataset):
"""
A simple Dataset object to wrap a List of Sentence
"""
def __init__(self, sentences: Union[Sentence, List[Sentence]]):
"""
Instantiate SentenceDataset
:param sentences: Sentence or List of Sentence that make up SentenceDataset
"""
# cast to list if necessary
if type(sentences) == Sentence:
sentences = [sentences]
self.sentences = sentences
@abstractmethod
def is_in_memory(self) -> bool:
return True
def __len__(self):
return len(self.sentences)
def __getitem__(self, index: int = 0) -> Sentence:
return self.sentences[index]
class ColumnDataset(FlairDataset):
def __init__(
self,
path_to_column_file: Path,
column_name_map: Dict[int, str],
tag_to_bioes: str = None,
comment_symbol: str = None,
in_memory: bool = True,
):
"""
Instantiates a column dataset (typically used for sequence labeling or word-level prediction).
:param path_to_column_file: path to the file with the column-formatted data
:param column_name_map: a map specifying the column format
:param tag_to_bioes: whether to convert to BIOES tagging scheme
:param comment_symbol: if set, lines that begin with this symbol are treated as comments
:param in_memory: If set to True, the dataset is kept in memory as Sentence objects, otherwise does disk reads
"""
assert path_to_column_file.exists()
self.path_to_column_file = path_to_column_file
self.tag_to_bioes = tag_to_bioes
self.column_name_map = column_name_map
self.comment_symbol = comment_symbol
# store either Sentence objects in memory, or only file offsets
self.in_memory = in_memory
if self.in_memory:
self.sentences: List[Sentence] = []
else:
self.indices: List[int] = []
self.total_sentence_count: int = 0
# most data sets have the token text in the first column, if not, pass 'text' as column
self.text_column: int = 0
for column in self.column_name_map:
if column_name_map[column] == "text":
self.text_column = column
# determine encoding of text file
encoding = "utf-8"
try:
lines: List[str] = open(str(path_to_column_file), encoding="utf-8").read(
10
).strip().split("\n")
except:
log.info(
'UTF-8 can\'t read: {} ... using "latin-1" instead.'.format(
path_to_column_file
)
)
encoding = "latin1"
sentence: Sentence = Sentence()
with open(str(self.path_to_column_file), encoding=encoding) as f:
line = f.readline()
position = 0
# pdb.set_trace()
while line:
if self.comment_symbol is not None and line.startswith(comment_symbol):
line = f.readline()
continue
if line.isspace():
if len(sentence) > 0:
sentence.infer_space_after()
if self.in_memory:
if self.tag_to_bioes is not None:
sentence.convert_tag_scheme(
tag_type=self.tag_to_bioes, target_scheme="iobes"
)
self.sentences.append(sentence)
else:
self.indices.append(position)
position = f.tell()
self.total_sentence_count += 1
sentence: Sentence = Sentence()
else:
fields: List[str] = re.split("\s+", line)
token = Token(fields[self.text_column])
for column in column_name_map:
if len(fields) > column:
if column != self.text_column:
token.add_tag(
self.column_name_map[column], fields[column]
)
sentence.add_token(token)
line = f.readline()
if len(sentence.tokens) > 0:
sentence.infer_space_after()
if self.in_memory:
if self.tag_to_bioes is not None:
sentence.convert_tag_scheme(
tag_type=self.tag_to_bioes, target_scheme="iobes"
)
self.sentences.append(sentence)
else:
self.indices.append(position)
self.total_sentence_count += 1
@property
def reset_sentence_count(self):
self.total_sentence_count = len(self.sentences)
return
def is_in_memory(self) -> bool:
return self.in_memory
def __len__(self):
return self.total_sentence_count
def __getitem__(self, index: int = 0) -> Sentence:
if self.in_memory:
sentence = self.sentences[index]
else:
with open(str(self.path_to_column_file), encoding="utf-8") as file:
file.seek(self.indices[index])
line = file.readline()
sentence: Sentence = Sentence()
while line:
if self.comment_symbol is not None and line.startswith("#"):
line = file.readline()
continue
if line.strip().replace("", "") == "":
if len(sentence) > 0:
sentence.infer_space_after()
if self.tag_to_bioes is not None:
sentence.convert_tag_scheme(
tag_type=self.tag_to_bioes, target_scheme="iobes"
)
break
else:
fields: List[str] = re.split("\s+", line)
token = Token(fields[self.text_column])
for column in self.column_name_map:
if len(fields) > column:
if column != self.text_column:
token.add_tag(
self.column_name_map[column], fields[column]
)
sentence.add_token(token)
line = file.readline()
return sentence
class UniversalDependenciesDataset(FlairDataset):
def __init__(self, path_to_conll_file: Path, in_memory: bool = True, add_root=False, root_tag='<ROOT>',spliter='\t'):
"""
Instantiates a column dataset in CoNLL-U format.
:param path_to_conll_file: Path to the CoNLL-U formatted file
:param in_memory: If set to True, keeps full dataset in memory, otherwise does disk reads
"""
assert path_to_conll_file.exists()
self.in_memory = in_memory
self.path_to_conll_file = path_to_conll_file
self.total_sentence_count: int = 0
if self.in_memory:
self.sentences: List[Sentence] = []
else:
self.indices: List[int] = []
with open(str(self.path_to_conll_file), encoding="utf-8") as file:
line = file.readline()
line_count=0
position = 0
sentence: Sentence = Sentence()
if add_root:
token = Token(root_tag, head_id=int(0))
token.add_tag("lemma", str('_'))
token.add_tag("upos", str('_'))
token.add_tag("pos", str('_'))
token.add_tag("dependency", str('root'))
token.add_tag("enhancedud", str('0:root'))
token.add_tag("srl", str('0:root'))
token.lemma = token.tags['lemma']._value
token.upos = token.tags['upos']._value
token.pos = token.tags['pos']._value
sentence.add_token(token)
while line:
line_count+=1
line = line.strip()
fields: List[str] = re.split(spliter+"+", line)
# if 'unlabel' in str(path_to_conll_file) and line_count>92352:
# pdb.set_trace()
if line == "":
if (len(sentence)==1 and sentence[0].text==root_tag):
pdb.set_trace()
if len(sentence) > 0 and not (len(sentence)==1 and sentence[0].text==root_tag):
# pdb.set_trace()
self.total_sentence_count += 1
if self.in_memory:
self.sentences.append(sentence)
else:
self.indices.append(position)
position = file.tell()
sentence: Sentence = Sentence()
if add_root:
token = Token(root_tag, head_id=int(0))
token.add_tag("lemma", str('_'))
token.add_tag("upos", str('_'))
token.add_tag("pos", str('_'))
token.add_tag("dependency", str('root'))
token.add_tag("enhancedud", str('0:root'))
token.add_tag("srl", str('0:root'))
token.lemma = token.tags['lemma']._value
token.upos = token.tags['upos']._value
token.pos = token.tags['pos']._value
sentence.add_token(token)
elif line.startswith("#"):
line = file.readline()
continue
elif "." in fields[0] and (len(fields) == 10 or len(fields) == 3):
line = file.readline()
continue
elif "-" in fields[0] and (len(fields) == 10 or len(fields) == 3):
line = file.readline()
continue
elif len(fields)==2:
# reading the raw text
token = Token(fields[0])
sentence.add_token(token)
elif len(fields)==3:
token = Token(fields[1])
# pdb.set_trace()
sentence.add_token(token)
else:
token = Token(fields[1], head_id=int(fields[6]))
token.add_tag("lemma", str(fields[2]))
token.add_tag("upos", str(fields[3]))
token.add_tag("pos", str(fields[4]))
token.add_tag("dependency", str(fields[7]))
token.add_tag("enhancedud", str(fields[8]))
token.add_tag("srl", str(fields[8]))
for morph in str(fields[5]).split("|"):
if not "=" in morph:
continue
token.add_tag(morph.split("=")[0].lower(), morph.split("=")[1])
if len(fields) > 10 and str(fields[10]) == "Y":
token.add_tag("frame", str(fields[11]))
token.lemma = token.tags['lemma']._value
token.upos = token.tags['upos']._value
token.pos = token.tags['pos']._value
sentence.add_token(token)
line = file.readline()
if len(sentence.tokens) > 0 and not (len(sentence)==1 and sentence[0].text==root_tag):
self.total_sentence_count += 1
if self.in_memory:
self.sentences.append(sentence)
else:
self.indices.append(position)
def is_in_memory(self) -> bool:
return self.in_memory
@property
def reset_sentence_count(self):
self.total_sentence_count = len(self.sentences)
return
def __len__(self):
return self.total_sentence_count
def __getitem__(self, index: int = 0) -> Sentence:
if self.in_memory:
sentence = self.sentences[index]
else:
with open(str(self.path_to_conll_file), encoding="utf-8") as file:
file.seek(self.indices[index])
line = file.readline()
sentence: Sentence = Sentence()
while line:
line = line.strip()
fields: List[str] = re.split("\t+", line)
if line == "":
if len(sentence) > 0:
break
elif line.startswith("#"):
line = file.readline()
continue
elif "." in fields[0]:
line = file.readline()
continue
elif "-" in fields[0]:
line = file.readline()
continue
else:
token = Token(fields[1], head_id=int(fields[6]))
token.add_tag("lemma", str(fields[2]))
token.add_tag("upos", str(fields[3]))
token.add_tag("pos", str(fields[4]))
token.add_tag("dependency", str(fields[7]))
for morph in str(fields[5]).split("|"):
if not "=" in morph:
continue
token.add_tag(
morph.split("=")[0].lower(), morph.split("=")[1]
)
if len(fields) > 10 and str(fields[10]) == "Y":
token.add_tag("frame", str(fields[11]))
token.lemma = token.tags['lemma']._value
token.upos = token.tags['upos']._value
token.pos = token.tags['pos']._value
sentence.add_token(token)
line = file.readline()
return sentence
class CSVClassificationDataset(FlairDataset):
def __init__(
self,
path_to_file: Union[str, Path],
column_name_map: Dict[int, str],
max_tokens_per_doc: int = -1,
max_chars_per_doc: int = -1,
use_tokenizer=True,
in_memory: bool = True,
skip_header: bool = False,
**fmtparams,
):
"""
Instantiates a Dataset for text classification from CSV column formatted data
:param path_to_file: path to the file with the CSV data
:param column_name_map: a column name map that indicates which column is text and which the label(s)
:param max_tokens_per_doc: If set, truncates each Sentence to a maximum number of Tokens
:param max_chars_per_doc: If set, truncates each Sentence to a maximum number of chars
:param use_tokenizer: If True, tokenizes the dataset, otherwise uses whitespace tokenization
:param in_memory: If True, keeps dataset as Sentences in memory, otherwise only keeps strings
:param skip_header: If True, skips first line because it is header
:param fmtparams: additional parameters for the CSV file reader
:return: a Corpus with annotated train, dev and test data
"""
if type(path_to_file) == str:
path_to_file: Path = Path(path_to_file)
assert path_to_file.exists()
# variables
self.path_to_file = path_to_file
self.in_memory = in_memory
self.use_tokenizer = use_tokenizer
self.column_name_map = column_name_map
self.max_tokens_per_doc = max_tokens_per_doc
self.max_chars_per_doc = max_chars_per_doc
# different handling of in_memory data than streaming data
if self.in_memory:
self.sentences = []
else:
self.raw_data = []
self.total_sentence_count: int = 0
# most data sets have the token text in the first column, if not, pass 'text' as column
self.text_columns: List[int] = []
for column in column_name_map:
if column_name_map[column] == "text":
self.text_columns.append(column)
with open(self.path_to_file) as csv_file:
csv_reader = csv.reader(csv_file, **fmtparams)
if skip_header:
next(csv_reader, None) # skip the headers
for row in csv_reader:
# test if format is OK
wrong_format = False
for text_column in self.text_columns:
if text_column >= len(row):
wrong_format = True
if wrong_format:
continue
# test if at least one label given
has_label = False
for column in self.column_name_map:
if self.column_name_map[column].startswith("label") and row[column]:
has_label = True
break
if not has_label:
continue
if self.in_memory:
text = " ".join(
[row[text_column] for text_column in self.text_columns]
)
if self.max_chars_per_doc > 0:
text = text[: self.max_chars_per_doc]
sentence = Sentence(text, use_tokenizer=self.use_tokenizer)
for column in self.column_name_map:
if (
self.column_name_map[column].startswith("label")
and row[column]
):
sentence.add_label(row[column])
if (
len(sentence) > self.max_tokens_per_doc
and self.max_tokens_per_doc > 0
):
sentence.tokens = sentence.tokens[: self.max_tokens_per_doc]
self.sentences.append(sentence)
else:
self.raw_data.append(row)
self.total_sentence_count += 1
def is_in_memory(self) -> bool:
return self.in_memory
def __len__(self):
return self.total_sentence_count
def __getitem__(self, index: int = 0) -> Sentence:
if self.in_memory:
return self.sentences[index]
else:
row = self.raw_data[index]
text = " ".join([row[text_column] for text_column in self.text_columns])
if self.max_chars_per_doc > 0:
text = text[: self.max_chars_per_doc]
sentence = Sentence(text, use_tokenizer=self.use_tokenizer)
for column in self.column_name_map:
if self.column_name_map[column].startswith("label") and row[column]:
sentence.add_label(row[column])
if len(sentence) > self.max_tokens_per_doc and self.max_tokens_per_doc > 0:
sentence.tokens = sentence.tokens[: self.max_tokens_per_doc]
return sentence
class ClassificationDataset(FlairDataset):
def __init__(
self,
path_to_file: Union[str, Path],
max_tokens_per_doc=-1,
max_chars_per_doc=-1,
use_tokenizer=True,
in_memory: bool = True,
):
"""
Reads a data file for text classification. The file should contain one document/text per line.
The line should have the following format:
__label__<class_name> <text>
If you have a multi class task, you can have as many labels as you want at the beginning of the line, e.g.,
__label__<class_name_1> __label__<class_name_2> <text>
:param path_to_file: the path to the data file
:param max_tokens_per_doc: Takes at most this amount of tokens per document. If set to -1 all documents are taken as is.
:param max_tokens_per_doc: If set, truncates each Sentence to a maximum number of Tokens
:param max_chars_per_doc: If set, truncates each Sentence to a maximum number of chars
:param use_tokenizer: If True, tokenizes the dataset, otherwise uses whitespace tokenization
:param in_memory: If True, keeps dataset as Sentences in memory, otherwise only keeps strings
:return: list of sentences
"""
if type(path_to_file) == str:
path_to_file: Path = Path(path_to_file)
assert path_to_file.exists()
self.label_prefix = "__label__"
self.in_memory = in_memory
self.use_tokenizer = use_tokenizer
if self.in_memory:
self.sentences = []
else:
self.indices = []
self.total_sentence_count: int = 0
self.max_chars_per_doc = max_chars_per_doc
self.max_tokens_per_doc = max_tokens_per_doc
self.path_to_file = path_to_file
with open(str(path_to_file), encoding="utf-8") as f:
line = f.readline()
position = 0
while line:
if "__label__" not in line or " " not in line:
position = f.tell()
line = f.readline()
continue
if self.in_memory:
sentence = self._parse_line_to_sentence(
line, self.label_prefix, use_tokenizer
)
if sentence is not None and len(sentence.tokens) > 0:
self.sentences.append(sentence)
self.total_sentence_count += 1
else:
self.indices.append(position)
self.total_sentence_count += 1
position = f.tell()
line = f.readline()
def _parse_line_to_sentence(
self, line: str, label_prefix: str, use_tokenizer: bool = True
):
words = line.split()
labels = []
l_len = 0
for i in range(len(words)):
if words[i].startswith(label_prefix):
l_len += len(words[i]) + 1
label = words[i].replace(label_prefix, "")
labels.append(label)
else:
break
text = line[l_len:].strip()
if self.max_chars_per_doc > 0:
text = text[: self.max_chars_per_doc]
if text and labels:
sentence = Sentence(text, labels=labels, use_tokenizer=use_tokenizer)
if (
sentence is not None
and len(sentence) > self.max_tokens_per_doc
and self.max_tokens_per_doc > 0
):
sentence.tokens = sentence.tokens[: self.max_tokens_per_doc]
return sentence
return None
def is_in_memory(self) -> bool:
return self.in_memory
def __len__(self):
return self.total_sentence_count
def __getitem__(self, index: int = 0) -> Sentence:
if self.in_memory:
return self.sentences[index]
else:
with open(str(self.path_to_file), encoding="utf-8") as file:
file.seek(self.indices[index])
line = file.readline()
sentence = self._parse_line_to_sentence(
line, self.label_prefix, self.use_tokenizer
)
return sentence
class TWITTER(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = None,
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "upos"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
super(TWITTER, self).__init__(
data_folder, columns, tag_to_bioes=None, in_memory=in_memory
)
class TWITTER_NEW(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = None,
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "upos"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
super(TWITTER_NEW, self).__init__(
data_folder, columns, tag_to_bioes=None, in_memory=in_memory
)
class ARK(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = None,
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "upos"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
super(ARK, self).__init__(
data_folder, columns, tag_to_bioes=None, in_memory=in_memory
)
class RITTER(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = None,
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "upos"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
super(RITTER, self).__init__(
data_folder, columns, tag_to_bioes=None, in_memory=in_memory
)
class RITTER_NEW(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = None,
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "upos"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
super(RITTER_NEW, self).__init__(
data_folder, columns, tag_to_bioes=None, in_memory=in_memory
)
class TWEEBANK(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
super(TWEEBANK, self).__init__(data_folder, in_memory=in_memory)
class TWEEBANK_NEW(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
super(TWEEBANK_NEW, self).__init__(data_folder, in_memory=in_memory)
class CONLL_03(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "chunk", 3: "ner"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# check if data there
if not data_folder.exists():
log.warning("-" * 100)
log.warning(f'ACHTUNG: CoNLL-03 dataset not found at "{data_folder}".')
log.warning(
'Instructions for obtaining the data can be found here: https://www.clips.uantwerpen.be/conll2003/ner/"'
)
log.warning("-" * 100)
super(CONLL_03, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_NEW(CONLL_03):
def __init__(self,**kwargs):
super(CONLL_03_NEW, self).__init__(
**kwargs
)
class CONLL_03_ENGLISH(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "chunk", 3: "ner"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# check if data there
if not data_folder.exists():
log.warning("-" * 100)
log.warning(f'ACHTUNG: CoNLL-03 dataset not found at "{data_folder}".')
log.warning(
'Instructions for obtaining the data can be found here: https://www.clips.uantwerpen.be/conll2003/ner/"'
)
log.warning("-" * 100)
super(CONLL_03_ENGLISH, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_ENGLISH_DOC(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "chunk", 3: "ner"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# check if data there
if not data_folder.exists():
log.warning("-" * 100)
log.warning(f'ACHTUNG: CoNLL-03 dataset not found at "{data_folder}".')
log.warning(
'Instructions for obtaining the data can be found here: https://www.clips.uantwerpen.be/conll2003/ner/"'
)
log.warning("-" * 100)
super(CONLL_03_ENGLISH_DOC, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_ENGLISH_CASED(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "chunk", 3: "ner"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# check if data there
if not data_folder.exists():
log.warning("-" * 100)
log.warning(f'ACHTUNG: CoNLL-03 dataset not found at "{data_folder}".')
log.warning(
'Instructions for obtaining the data can be found here: https://www.clips.uantwerpen.be/conll2003/ner/"'
)
log.warning("-" * 100)
super(CONLL_03_ENGLISH_CASED, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_ENGLISH_DOC_CASED(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "chunk", 3: "ner"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# check if data there
if not data_folder.exists():
log.warning("-" * 100)
log.warning(f'ACHTUNG: CoNLL-03 dataset not found at "{data_folder}".')
log.warning(
'Instructions for obtaining the data can be found here: https://www.clips.uantwerpen.be/conll2003/ner/"'
)
log.warning("-" * 100)
super(CONLL_03_ENGLISH_DOC_CASED, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_VIETNAMESE(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "chunk",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "chunk"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# check if data there
if not data_folder.exists():
log.warning("-" * 100)
log.warning(f'ACHTUNG: CoNLL-03 dataset not found at "{data_folder}".')
log.warning(
'Instructions for obtaining the data can be found here: https://www.clips.uantwerpen.be/conll2003/ner/"'
)
log.warning("-" * 100)
super(CONLL_03_VIETNAMESE, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CHUNK_CONLL_03_VIETNAMESE(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "chunk",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "chunk"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# check if data there
if not data_folder.exists():
log.warning("-" * 100)
log.warning(f'ACHTUNG: CoNLL-03 dataset not found at "{data_folder}".')
log.warning(
'Instructions for obtaining the data can be found here: https://www.clips.uantwerpen.be/conll2003/ner/"'
)
log.warning("-" * 100)
super(CHUNK_CONLL_03_VIETNAMESE, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_GERMAN(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus for German. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'lemma', 'pos' or 'np' to predict
word lemmas, POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "lemma", 2: "pos", 3: "chunk", 4: "ner"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# check if data there
if not data_folder.exists():
log.warning("-" * 100)
log.warning(f'ACHTUNG: CoNLL-03 dataset not found at "{data_folder}".')
log.warning(
'Instructions for obtaining the data can be found here: https://www.clips.uantwerpen.be/conll2003/ner/"'
)
log.warning("-" * 100)
super(CONLL_03_GERMAN, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_GERMAN_NEW(CONLL_03_GERMAN):
def __init__(self,**kwargs):
super(CONLL_03_GERMAN_NEW, self).__init__(
**kwargs
)
class CONLL_06_GERMAN(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus for German. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'lemma', 'pos' or 'np' to predict
word lemmas, POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "ner"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# check if data there
if not data_folder.exists():
log.warning("-" * 100)
log.warning(f'ACHTUNG: CoNLL-03 dataset not found at "{data_folder}".')
log.warning(
'Instructions for obtaining the data can be found here: https://www.clips.uantwerpen.be/conll2003/ner/"'
)
log.warning("-" * 100)
super(CONLL_06_GERMAN, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_DUTCH(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus for Dutch. The first time you call this constructor it will automatically
download the dataset.
:param base_path: Default is None, meaning that corpus gets auto-downloaded and loaded. You can override this
to point to a different folder but typically this should not be necessary.
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' to predict
POS tags instead
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "ner"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
conll_02_path = "https://www.clips.uantwerpen.be/conll2002/ner/data/"
cached_path(f"{conll_02_path}ned.testa", Path("datasets") / dataset_name)
cached_path(f"{conll_02_path}ned.testb", Path("datasets") / dataset_name)
cached_path(f"{conll_02_path}ned.train", Path("datasets") / dataset_name)
super(CONLL_03_DUTCH, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_DUTCH_NEW(CONLL_03_DUTCH):
def __init__(self,**kwargs):
super(CONLL_03_DUTCH_NEW, self).__init__(
**kwargs
)
class CONLL_03_SPANISH(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus for Spanish. The first time you call this constructor it will automatically
download the dataset.
:param base_path: Default is None, meaning that corpus gets auto-downloaded and loaded. You can override this
to point to a different folder but typically this should not be necessary.
:param tag_to_bioes: NER by default, should not be changed
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "ner"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
conll_02_path = "https://www.clips.uantwerpen.be/conll2002/ner/data/"
cached_path(f"{conll_02_path}esp.testa", Path("datasets") / dataset_name)
cached_path(f"{conll_02_path}esp.testb", Path("datasets") / dataset_name)
cached_path(f"{conll_02_path}esp.train", Path("datasets") / dataset_name)
super(CONLL_03_SPANISH, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_SPANISH_NEW(CONLL_03_SPANISH):
def __init__(self,**kwargs):
super(CONLL_03_SPANISH_NEW, self).__init__(
**kwargs
)
#------------------------------------------------------------
#for NER as dp
class CONLL_03_DP(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner_dp",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
#------------------
columns = {0: "text", 1: "pos", 2: "chunk", 3: "ner_dp"}
self.columns = columns
# this dataset name
#---------------
dataset_name = 'conll_03_english'
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# check if data there
if not data_folder.exists():
log.warning("-" * 100)
log.warning(f'ACHTUNG: CoNLL-03 dataset not found at "{data_folder}".')
log.warning(
'Instructions for obtaining the data can be found here: https://www.clips.uantwerpen.be/conll2003/ner/"'
)
log.warning("-" * 100)
super(CONLL_03_DP, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
#------------------------KD-------------------------------------------
class CONLL_03_GERMAN_DP(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner_dp",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus for German. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'lemma', 'pos' or 'np' to predict
word lemmas, POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "lemma", 2: "pos", 3: "chunk", 4: "ner_dp"}
self.columns = columns
# this dataset name
# dataset_name = self.__class__.__name__.lower()
dataset_name = 'conll_03_german_new'
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# check if data there
if not data_folder.exists():
log.warning("-" * 100)
log.warning(f'ACHTUNG: CoNLL-03 dataset not found at "{data_folder}".')
log.warning(
'Instructions for obtaining the data can be found here: https://www.clips.uantwerpen.be/conll2003/ner/"'
)
log.warning("-" * 100)
super(CONLL_03_GERMAN_DP, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_06_GERMAN_DP(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner_dp",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus for German. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'lemma', 'pos' or 'np' to predict
word lemmas, POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "ner_dp"}
self.columns = columns
# this dataset name
dataset_name = 'conll_06_german'
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# check if data there
if not data_folder.exists():
log.warning("-" * 100)
log.warning(f'ACHTUNG: CoNLL-03 dataset not found at "{data_folder}".')
log.warning(
'Instructions for obtaining the data can be found here: https://www.clips.uantwerpen.be/conll2003/ner/"'
)
log.warning("-" * 100)
super(CONLL_06_GERMAN_DP, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_DUTCH_DP(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner_dp",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus for Dutch. The first time you call this constructor it will automatically
download the dataset.
:param base_path: Default is None, meaning that corpus gets auto-downloaded and loaded. You can override this
to point to a different folder but typically this should not be necessary.
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' to predict
POS tags instead
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "ner_dp"}
self.columns = columns
# this dataset name
# dataset_name = self.__class__.__name__.lower()
dataset_name = 'conll_03_dutch_new'
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
conll_02_path = "https://www.clips.uantwerpen.be/conll2002/ner/data/"
# pdb.set_trace()
cached_path(f"{conll_02_path}ned.testa", Path("datasets") / dataset_name)
cached_path(f"{conll_02_path}ned.testb", Path("datasets") / dataset_name)
cached_path(f"{conll_02_path}ned.train", Path("datasets") / dataset_name)
super(CONLL_03_DUTCH_DP, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_SPANISH_DP(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner_dp",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus for Spanish. The first time you call this constructor it will automatically
download the dataset.
:param base_path: Default is None, meaning that corpus gets auto-downloaded and loaded. You can override this
to point to a different folder but typically this should not be necessary.
:param tag_to_bioes: NER by default, should not be changed
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "ner_dp"}
self.columns = columns
# this dataset name
# dataset_name = self.__class__.__name__.lower()
dataset_name = 'conll_03_spanish_new'
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
conll_02_path = "https://www.clips.uantwerpen.be/conll2002/ner/data/"
cached_path(f"{conll_02_path}esp.testa", Path("datasets") / dataset_name)
cached_path(f"{conll_02_path}esp.testb", Path("datasets") / dataset_name)
cached_path(f"{conll_02_path}esp.train", Path("datasets") / dataset_name)
super(CONLL_03_SPANISH_DP, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_IND(CONLL_03):
pass
class CONLL_03_GERMAN_IND(CONLL_03_GERMAN):
pass
class CONLL_03_DUTCH_IND(CONLL_03_DUTCH):
pass
class CONLL_03_SPANISH_IND(CONLL_03_SPANISH):
pass
class CONLL_03_TOY(CONLL_03):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
super(CONLL_03_TOY, self).__init__(
base_path, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_GERMAN_TOY(CONLL_03_GERMAN):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
super(CONLL_03_GERMAN_TOY, self).__init__(
base_path, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_FAKE(CONLL_03):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
super(CONLL_03_FAKE, self).__init__(
base_path, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_GERMAN_FAKE(CONLL_03_GERMAN):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
super(CONLL_03_GERMAN_FAKE, self).__init__(
base_path, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_SPANISH_FAKE(CONLL_03_SPANISH):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
super(CONLL_03_SPANISH_FAKE, self).__init__(
base_path, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CONLL_03_DUTCH_FAKE(CONLL_03_DUTCH):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
super(CONLL_03_DUTCH_FAKE, self).__init__(
base_path, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CHUNK_CONLL_03_ENGLISH(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "chunk",
in_memory: bool = True,
):
"""
Initialize the CoNLL-2000 corpus for English chunking.
The first time you call this constructor it will automatically download the dataset.
:param base_path: Default is None, meaning that corpus gets auto-downloaded and loaded. You can override this
to point to a different folder but typically this should not be necessary.
:param tag_to_bioes: 'np' by default, should not be changed, but you can set 'pos' instead to predict POS tags
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 2: "chunk"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
super(CHUNK_CONLL_03_ENGLISH, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class PANX(ColumnCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, lang='en',tag_to_bioes='ner'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
columns = {0: "text", 1: "ner"}
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / Path('panxdataset')/ Path(lang)
super(PANX, self).__init__(data_folder, columns, in_memory=in_memory,
train_file=data_folder/('wikiann-'+lang+'_train.bio'),
test_file=data_folder/('wikiann-'+lang+'_test.bio'),
dev_file=data_folder/('wikiann-'+lang+'_dev.bio'),
tag_to_bioes=tag_to_bioes,
)
class PANX_DP(ColumnCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, lang='en',tag_to_bioes='ner_dp'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
# dataset_name = self.__class__.__name__.lower()
dataset_name = 'panx'
columns = {0: "text", 1: "ner_dp"}
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / Path('panxdataset')/ Path(lang)
super(PANX_DP, self).__init__(data_folder, columns, in_memory=in_memory,
train_file=data_folder/('wikiann-'+lang+'_train.bio'),
test_file=data_folder/('wikiann-'+lang+'_test.bio'),
dev_file=data_folder/('wikiann-'+lang+'_dev.bio'),
tag_to_bioes=tag_to_bioes,
)
class ATIS(ColumnCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, lang='en',tag_to_bioes='atis'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
columns = {0: "text", 1: "atis"}
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / Path('atis')/ Path(lang)
super(ATIS, self).__init__(data_folder, columns, in_memory=in_memory,
tag_to_bioes=tag_to_bioes,
)
class COMMNER(ColumnCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, lang='en',tag_to_bioes='ner'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
columns = {0: "text", 1: "ner"}
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / Path('commner')/ Path(lang)
super(COMMNER, self).__init__(data_folder, columns, in_memory=in_memory,
train_file=data_folder/(lang+'.train.conll.bio'),
test_file=data_folder/(lang+'.test.conll.bio'),
dev_file=data_folder/(lang+'.dev.conll.bio'),
tag_to_bioes=tag_to_bioes,
)
class FRQUERY(ColumnCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, lang='fr.annotated.all.clean.conll',tag_to_bioes='ner'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
columns = {0: "text", 1: "ner"}
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / Path('frquery')
super(FRQUERY, self).__init__(data_folder, columns, in_memory=in_memory,
train_file=data_folder/(lang+'.train'),
test_file=data_folder/(lang+'.test'),
dev_file=data_folder/(lang+'.dev'),
tag_to_bioes=tag_to_bioes,
)
class ICBU(ColumnCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, lang='all.csv',tag_to_bioes='ner'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
columns = {0: "text", 1: "ner"}
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / Path('icbu')
super(ICBU, self).__init__(data_folder, columns, in_memory=in_memory,
# train_file=data_folder/(lang+'.train'),
# test_file=data_folder/(lang+'.test'),
# dev_file=data_folder/(lang+'.dev'),
train_file=data_folder/('train.txt'),
test_file=data_folder/('test.txt'),
dev_file=data_folder/('dev.txt'),
tag_to_bioes=tag_to_bioes,
)
class ONTONOTE_ENG(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "ner"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
super(ONTONOTE_ENG, self).__init__(
data_folder, columns, tag_to_bioes=None, in_memory=in_memory
)
class UNLABEL(ColumnCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, modelname='', lang='en',tag_to_bioes='ner', columns = {0: "text", 1: "ner"}, extra=None):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
columns = {0: "text", 1: "gold_ner", 2:"ner", 3:"score"}
data_folder = base_path / Path('unlabeled_data')
super(UNLABEL, self).__init__(data_folder, columns, in_memory=in_memory,
train_file=data_folder/('train.'+modelname+'.'+lang+'.conllu') if extra is None else data_folder/('train.'+modelname+'.'+lang+'.'+extra+'.conllu'),
test_file=data_folder/('empty_testb.conllu'),
dev_file=data_folder/('empty_testa.conllu'),
tag_to_bioes=None,
)
class UNLABEL_DEPENDENCY(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, modelname='', lang='en', extra=None):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / Path('unlabeled_data')
super(UNLABEL_DEPENDENCY, self).__init__(data_folder, in_memory=in_memory,
train_file=data_folder/('train.'+modelname+'.'+lang+'.conllu') if extra is None else data_folder/('train.'+modelname+'.'+lang+'.'+extra+'.conllu'),
test_file=data_folder/('empty_testb.conllu'),
dev_file=data_folder/('empty_testa.conllu'),
add_root=True,
)
class MIXED_NER(ColumnCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, lang='en',tag_to_bioes='ner'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
columns = {0: "text", 1: "ner"}
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / Path('mixed_ner')/ Path(lang)
super(MIXED_NER, self).__init__(data_folder, columns, in_memory=in_memory,
train_file=data_folder/('wikiann-'+lang+'_train.bio'),
test_file=data_folder/('wikiann-'+lang+'_test.bio'),
dev_file=data_folder/('wikiann-'+lang+'_dev.bio'),
tag_to_bioes=tag_to_bioes,
)
class LOW10_NER(ColumnCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, lang='en',tag_to_bioes='ner'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
columns = {0: "text", 1: "ner"}
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / Path('low10_ner')/ Path(lang)
super(LOW10_NER, self).__init__(data_folder, columns, in_memory=in_memory,
train_file=data_folder/('wikiann-'+lang+'_train.bio'),
test_file=data_folder/('wikiann-'+lang+'_test.bio'),
dev_file=data_folder/('wikiann-'+lang+'_dev.bio'),
tag_to_bioes=tag_to_bioes,
)
class PANXPRED(ColumnCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, lang='en',tag_to_bioes='ner'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
columns = {0: "text", 1: "ner"}
# columns = {0: "text", 1: "gold_ner", 2:"ner", 3:"score"}
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / Path('unlabeledpanx')/ Path(lang)
super(PANXPRED, self).__init__(data_folder, columns, in_memory=in_memory,
train_file=data_folder/('wikiann-'+lang+'_train.bio'),
test_file=base_path/Path('panxdataset')/ Path(lang) /('wikiann-'+lang+'_test.bio'),
dev_file=base_path/Path('panxdataset')/ Path(lang) /('wikiann-'+lang+'_dev.bio'),
tag_to_bioes=None,
)
class PANXPRED2(ColumnCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, lang='en',tag_to_bioes='ner'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
columns = {0: "text", 1: "ner"}
# columns = {0: "text", 1: "gold_ner", 2:"ner", 3:"score"}
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / Path('unlabeledpanx2')/ Path(lang)
super(PANXPRED2, self).__init__(data_folder, columns, in_memory=in_memory,
train_file=data_folder/('wikiann-'+lang+'_train.bio'),
test_file=base_path/Path('panxdataset')/ Path(lang) /('wikiann-'+lang+'_test.bio'),
dev_file=base_path/Path('panxdataset')/ Path(lang) /('wikiann-'+lang+'_dev.bio'),
tag_to_bioes=None,
)
class SEMEVAL16(ColumnCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, lang='en',tag_to_bioes='ner'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
columns = {0: "text", 1: "ast"}
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / Path('semeval-2016')
lc_to_lang = {'tr':'Turkish','es':'Spanish','nl':'Dutch','en':'English','ru':'Russian'}
language = lc_to_lang[lang]
train_file = Path('train/' + language+'_semeval2016_restaurants_train.bio')
dev_file = Path('train/' + language+'_semeval2016_restaurants_dev.bio')
test_file = Path('test/' + lang.upper()+'_REST_SB1_TEST.xml.gold.bio')
# pdb.set_trace()
super(SEMEVAL16, self).__init__(data_folder, columns, in_memory=in_memory,
train_file=data_folder/train_file,
test_file=data_folder/test_file,
dev_file=data_folder/dev_file,
tag_to_bioes=tag_to_bioes,
)
class SEMEVAL14_LAPTOP(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "ast"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
super(SEMEVAL14_LAPTOP, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class SEMEVAL14_RESTAURANT(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "ast"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
super(SEMEVAL14_RESTAURANT, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class SEMEVAL15_RESTAURANT(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the CoNLL-03 corpus. This is only possible if you've manually downloaded it to your machine.
Obtain the corpus from https://www.clips.uantwerpen.be/conll2003/ner/ and put it into some folder. Then point
the base_path parameter in the constructor to this folder
:param base_path: Path to the CoNLL-03 corpus on your machine
:param tag_to_bioes: NER by default, need not be changed, but you could also select 'pos' or 'np' to predict
POS tags or chunks respectively
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "ast"}
self.columns = columns
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
super(SEMEVAL15_RESTAURANT, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class CALCS(ColumnCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True, lang='en',tag_to_bioes='ner'):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
columns = {0: "text", 1: "cs"}
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
if lang=='en' or lang=='es':
target_path=Path('CALCS_ENG_SPA')
elif lang=='ar' or lang=='eg':
target_path=Path('CALCS_MSA_EGY')
elif lang=='hi':
columns = {0: "text",1: "lang", 2: "ner"}
target_path=Path('CALCS_ENG_HIN')
data_folder = base_path / target_path
# lc_to_lang = {'tr':'Turkish','es':'Spanish','nl':'Dutch','en':'English','ru':'Russian'}
# language = lc_to_lang[lang]
# pdb.set_trace()
super(CALCS, self).__init__(data_folder, columns, in_memory=in_memory,
train_file=data_folder/'calcs_train.conll',
test_file=data_folder/'calcs_test.conll',
dev_file=data_folder/'calcs_dev.conll',
tag_to_bioes=tag_to_bioes,
)
class CONLL_2000(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "chunk",
in_memory: bool = True,
):
"""
Initialize the CoNLL-2000 corpus for English chunking.
The first time you call this constructor it will automatically download the dataset.
:param base_path: Default is None, meaning that corpus gets auto-downloaded and loaded. You can override this
to point to a different folder but typically this should not be necessary.
:param tag_to_bioes: 'np' by default, should not be changed, but you can set 'pos' instead to predict POS tags
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "chunk"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
conll_2000_path = "https://www.clips.uantwerpen.be/conll2000/chunking/"
data_file = Path(flair.cache_root) / "datasets" / dataset_name / "train.txt"
if not data_file.is_file():
cached_path(
f"{conll_2000_path}train.txt.gz", Path("datasets") / dataset_name
)
cached_path(
f"{conll_2000_path}test.txt.gz", Path("datasets") / dataset_name
)
import gzip, shutil
with gzip.open(
Path(flair.cache_root) / "datasets" / dataset_name / "train.txt.gz",
"rb",
) as f_in:
with open(
Path(flair.cache_root) / "datasets" / dataset_name / "train.txt",
"wb",
) as f_out:
shutil.copyfileobj(f_in, f_out)
with gzip.open(
Path(flair.cache_root) / "datasets" / dataset_name / "test.txt.gz", "rb"
) as f_in:
with open(
Path(flair.cache_root) / "datasets" / dataset_name / "test.txt",
"wb",
) as f_out:
shutil.copyfileobj(f_in, f_out)
super(CONLL_2000, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class GERMEVAL(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
"""
Initialize the GermEval NER corpus for German. This is only possible if you've manually downloaded it to your
machine. Obtain the corpus from https://sites.google.com/site/germeval2014ner/home/ and put it into some folder.
Then point the base_path parameter in the constructor to this folder
:param base_path: Path to the GermEval corpus on your machine
:param tag_to_bioes: 'ner' by default, should not be changed.
:param in_memory: If True, keeps dataset in memory giving speedups in training.
"""
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {1: "text", 2: "ner"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# check if data there
if not data_folder.exists():
log.warning("-" * 100)
log.warning(f'ACHTUNG: GermEval-14 dataset not found at "{data_folder}".')
log.warning(
'Instructions for obtaining the data can be found here: https://sites.google.com/site/germeval2014ner/home/"'
)
log.warning("-" * 100)
super(GERMEVAL, self).__init__(
data_folder,
columns,
tag_to_bioes=tag_to_bioes,
comment_symbol="#",
in_memory=in_memory,
)
class IMDB(ClassificationCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = False):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
imdb_acl_path = "http://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz"
data_path = Path(flair.cache_root) / "datasets" / dataset_name
data_file = data_path / "train.txt"
if not data_file.is_file():
cached_path(imdb_acl_path, Path("datasets") / dataset_name)
import tarfile
with tarfile.open(
Path(flair.cache_root)
/ "datasets"
/ dataset_name
/ "aclImdb_v1.tar.gz",
"r:gz",
) as f_in:
datasets = ["train", "test"]
labels = ["pos", "neg"]
for label in labels:
for dataset in datasets:
f_in.extractall(
data_path,
members=[
m
for m in f_in.getmembers()
if f"{dataset}/{label}" in m.name
],
)
with open(f"{data_path}/{dataset}.txt", "at") as f_p:
current_path = data_path / "aclImdb" / dataset / label
for file_name in current_path.iterdir():
if file_name.is_file() and file_name.name.endswith(
".txt"
):
f_p.write(
f"__label__{label} "
+ file_name.open("rt", encoding="utf-8").read()
+ "\n"
)
super(IMDB, self).__init__(
data_folder, use_tokenizer=False, in_memory=in_memory
)
class NEWSGROUPS(ClassificationCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = False):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
twenty_newsgroups_path = (
"http://qwone.com/~jason/20Newsgroups/20news-bydate.tar.gz"
)
data_path = Path(flair.cache_root) / "datasets" / dataset_name
data_file = data_path / "20news-bydate-train.txt"
if not data_file.is_file():
cached_path(
twenty_newsgroups_path, Path("datasets") / dataset_name / "original"
)
import tarfile
with tarfile.open(
Path(flair.cache_root)
/ "datasets"
/ dataset_name
/ "original"
/ "20news-bydate.tar.gz",
"r:gz",
) as f_in:
datasets = ["20news-bydate-test", "20news-bydate-train"]
labels = [
"alt.atheism",
"comp.graphics",
"comp.os.ms-windows.misc",
"comp.sys.ibm.pc.hardware",
"comp.sys.mac.hardware",
"comp.windows.x",
"misc.forsale",
"rec.autos",
"rec.motorcycles",
"rec.sport.baseball",
"rec.sport.hockey",
"sci.crypt",
"sci.electronics",
"sci.med",
"sci.space",
"soc.religion.christian",
"talk.politics.guns",
"talk.politics.mideast",
"talk.politics.misc",
"talk.religion.misc",
]
for label in labels:
for dataset in datasets:
f_in.extractall(
data_path / "original",
members=[
m
for m in f_in.getmembers()
if f"{dataset}/{label}" in m.name
],
)
with open(f"{data_path}/{dataset}.txt", "at") as f_p:
current_path = data_path / "original" / dataset / label
for file_name in current_path.iterdir():
if file_name.is_file():
f_p.write(
f"__label__{label} "
+ file_name.open("rt", encoding="latin1")
.read()
.replace("\n", " <n> ")
+ "\n"
)
super(NEWSGROUPS, self).__init__(
data_folder, use_tokenizer=False, in_memory=in_memory
)
class NER_BASQUE(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "ner"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ner_basque_path = "http://ixa2.si.ehu.eus/eiec/"
data_path = Path(flair.cache_root) / "datasets" / dataset_name
data_file = data_path / "named_ent_eu.train"
if not data_file.is_file():
cached_path(
f"{ner_basque_path}/eiec_v1.0.tgz", Path("datasets") / dataset_name
)
import tarfile, shutil
with tarfile.open(
Path(flair.cache_root) / "datasets" / dataset_name / "eiec_v1.0.tgz",
"r:gz",
) as f_in:
corpus_files = (
"eiec_v1.0/named_ent_eu.train",
"eiec_v1.0/named_ent_eu.test",
)
for corpus_file in corpus_files:
f_in.extract(corpus_file, data_path)
shutil.move(f"{data_path}/{corpus_file}", data_path)
super(NER_BASQUE, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class TREC_50(ClassificationCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
trec_path = "https://cogcomp.seas.upenn.edu/Data/QA/QC/"
original_filenames = ["train_5500.label", "TREC_10.label"]
new_filenames = ["train.txt", "test.txt"]
for original_filename in original_filenames:
cached_path(
f"{trec_path}{original_filename}",
Path("datasets") / dataset_name / "original",
)
data_file = data_folder / new_filenames[0]
if not data_file.is_file():
for original_filename, new_filename in zip(
original_filenames, new_filenames
):
with open(
data_folder / "original" / original_filename,
"rt",
encoding="latin1",
) as open_fp:
with open(
data_folder / new_filename, "wt", encoding="utf-8"
) as write_fp:
for line in open_fp:
line = line.rstrip()
fields = line.split()
old_label = fields[0]
question = " ".join(fields[1:])
# Create flair compatible labels
# TREC-6 : NUM:dist -> __label__NUM
# TREC-50: NUM:dist -> __label__NUM:dist
new_label = "__label__"
new_label += old_label
write_fp.write(f"{new_label} {question}\n")
super(TREC_50, self).__init__(
data_folder, use_tokenizer=False, in_memory=in_memory
)
class TREC_6(ClassificationCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
trec_path = "https://cogcomp.seas.upenn.edu/Data/QA/QC/"
original_filenames = ["train_5500.label", "TREC_10.label"]
new_filenames = ["train.txt", "test.txt"]
for original_filename in original_filenames:
cached_path(
f"{trec_path}{original_filename}",
Path("datasets") / dataset_name / "original",
)
data_file = data_folder / new_filenames[0]
if not data_file.is_file():
for original_filename, new_filename in zip(
original_filenames, new_filenames
):
with open(
data_folder / "original" / original_filename,
"rt",
encoding="latin1",
) as open_fp:
with open(
data_folder / new_filename, "wt", encoding="utf-8"
) as write_fp:
for line in open_fp:
line = line.rstrip()
fields = line.split()
old_label = fields[0]
question = " ".join(fields[1:])
# Create flair compatible labels
# TREC-6 : NUM:dist -> __label__NUM
# TREC-50: NUM:dist -> __label__NUM:dist
new_label = "__label__"
new_label += old_label.split(":")[0]
write_fp.write(f"{new_label} {question}\n")
super(TREC_6, self).__init__(
data_folder, use_tokenizer=False, in_memory=in_memory
)
class UD_ENGLISH(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
web_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_English-EWT/master"
cached_path(f"{web_path}/en_ewt-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(
f"{web_path}/en_ewt-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{web_path}/en_ewt-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_ENGLISH, self).__init__(data_folder, in_memory=in_memory)
class UD_TAMIL(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
web_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Tamil-TTB/master"
cached_path(f"{web_path}/ta_ttb-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(
f"{web_path}/ta_ttb-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{web_path}/ta_ttb-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_TAMIL, self).__init__(data_folder, in_memory=in_memory)
class UD_GERMAN(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_German-GSD/master"
cached_path(f"{ud_path}/de_gsd-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/de_gsd-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/de_gsd-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_GERMAN, self).__init__(data_folder, in_memory=in_memory)
class UD_GERMAN_HDT(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = False):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = (
"https://raw.githubusercontent.com/UniversalDependencies/UD_German-HDT/dev"
)
cached_path(f"{ud_path}/de_hdt-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/de_hdt-ud-test.conllu", Path("datasets") / dataset_name)
train_filenames = [
"de_hdt-ud-train-a-1.conllu",
"de_hdt-ud-train-a-2.conllu",
"de_hdt-ud-train-b-1.conllu",
"de_hdt-ud-train-b-2.conllu",
]
for train_file in train_filenames:
cached_path(
f"{ud_path}/{train_file}", Path("datasets") / dataset_name / "original"
)
data_path = Path(flair.cache_root) / "datasets" / dataset_name
new_train_file: Path = data_path / "de_hdt-ud-train-all.conllu"
if not new_train_file.is_file():
with open(new_train_file, "wt") as f_out:
for train_filename in train_filenames:
with open(data_path / "original" / train_filename, "rt") as f_in:
f_out.write(f_in.read())
super(UD_GERMAN_HDT, self).__init__(data_folder, in_memory=in_memory)
class UD_DUTCH(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Dutch-Alpino/master"
cached_path(
f"{ud_path}/nl_alpino-ud-dev.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/nl_alpino-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/nl_alpino-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_DUTCH, self).__init__(data_folder, in_memory=in_memory)
class UD_FRENCH(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_French-GSD/master"
cached_path(f"{ud_path}/fr_gsd-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/fr_gsd-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/fr_gsd-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_FRENCH, self).__init__(data_folder, in_memory=in_memory)
class UD_ITALIAN(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Italian-ISDT/master"
cached_path(f"{ud_path}/it_isdt-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/it_isdt-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/it_isdt-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_ITALIAN, self).__init__(data_folder, in_memory=in_memory)
class UD_SPANISH(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Spanish-GSD/master"
cached_path(f"{ud_path}/es_gsd-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/es_gsd-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/es_gsd-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_SPANISH, self).__init__(data_folder, in_memory=in_memory)
class UD_PORTUGUESE(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Portuguese-Bosque/master"
cached_path(
f"{ud_path}/pt_bosque-ud-dev.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/pt_bosque-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/pt_bosque-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_PORTUGUESE, self).__init__(data_folder, in_memory=in_memory)
class UD_ROMANIAN(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Romanian-RRT/master"
cached_path(f"{ud_path}/ro_rrt-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/ro_rrt-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/ro_rrt-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_ROMANIAN, self).__init__(data_folder, in_memory=in_memory)
class UD_CATALAN(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Catalan-AnCora/master"
cached_path(
f"{ud_path}/ca_ancora-ud-dev.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/ca_ancora-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/ca_ancora-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_CATALAN, self).__init__(data_folder, in_memory=in_memory)
class UD_POLISH(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Polish-LFG/master"
cached_path(f"{ud_path}/pl_lfg-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/pl_lfg-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/pl_lfg-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_POLISH, self).__init__(data_folder, in_memory=in_memory)
class UD_CZECH(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = False):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Czech-PDT/master"
cached_path(f"{ud_path}/cs_pdt-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/cs_pdt-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/cs_pdt-ud-train-c.conllu",
Path("datasets") / dataset_name / "original",
)
cached_path(
f"{ud_path}/cs_pdt-ud-train-l.conllu",
Path("datasets") / dataset_name / "original",
)
cached_path(
f"{ud_path}/cs_pdt-ud-train-m.conllu",
Path("datasets") / dataset_name / "original",
)
cached_path(
f"{ud_path}/cs_pdt-ud-train-v.conllu",
Path("datasets") / dataset_name / "original",
)
data_path = Path(flair.cache_root) / "datasets" / dataset_name
train_filenames = [
"cs_pdt-ud-train-c.conllu",
"cs_pdt-ud-train-l.conllu",
"cs_pdt-ud-train-m.conllu",
"cs_pdt-ud-train-v.conllu",
]
new_train_file: Path = data_path / "cs_pdt-ud-train-all.conllu"
if not new_train_file.is_file():
with open(new_train_file, "wt") as f_out:
for train_filename in train_filenames:
with open(data_path / "original" / train_filename, "rt") as f_in:
f_out.write(f_in.read())
super(UD_CZECH, self).__init__(data_folder, in_memory=in_memory)
class UD_SLOVAK(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Slovak-SNK/master"
cached_path(f"{ud_path}/sk_snk-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/sk_snk-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/sk_snk-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_SLOVAK, self).__init__(data_folder, in_memory=in_memory)
class UD_SWEDISH(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Swedish-Talbanken/master"
cached_path(
f"{ud_path}/sv_talbanken-ud-dev.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/sv_talbanken-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/sv_talbanken-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_SWEDISH, self).__init__(data_folder, in_memory=in_memory)
class UD_DANISH(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Danish-DDT/master"
cached_path(f"{ud_path}/da_ddt-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/da_ddt-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/da_ddt-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_DANISH, self).__init__(data_folder, in_memory=in_memory)
class UD_NORWEGIAN(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Norwegian-Bokmaal/master"
cached_path(
f"{ud_path}/no_bokmaal-ud-dev.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/no_bokmaal-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/no_bokmaal-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_NORWEGIAN, self).__init__(data_folder, in_memory=in_memory)
class UD_FINNISH(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Finnish-TDT/master"
cached_path(f"{ud_path}/fi_tdt-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/fi_tdt-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/fi_tdt-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_FINNISH, self).__init__(data_folder, in_memory=in_memory)
class UD_SLOVENIAN(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Slovenian-SSJ/master"
cached_path(f"{ud_path}/sl_ssj-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/sl_ssj-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/sl_ssj-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_SLOVENIAN, self).__init__(data_folder, in_memory=in_memory)
class UD_CROATIAN(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Croatian-SET/master"
cached_path(f"{ud_path}/hr_set-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/hr_set-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/hr_set-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_CROATIAN, self).__init__(data_folder, in_memory=in_memory)
class UD_SERBIAN(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Serbian-SET/master"
cached_path(f"{ud_path}/sr_set-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/sr_set-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/sr_set-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_SERBIAN, self).__init__(data_folder, in_memory=in_memory)
class UD_BULGARIAN(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Bulgarian-BTB/master"
cached_path(f"{ud_path}/bg_btb-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/bg_btb-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/bg_btb-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_BULGARIAN, self).__init__(data_folder, in_memory=in_memory)
class UD_ARABIC(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Arabic-PADT/master"
cached_path(f"{ud_path}/ar_padt-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/ar_padt-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/ar_padt-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_ARABIC, self).__init__(data_folder, in_memory=in_memory)
class UD_HEBREW(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Hebrew-HTB/master"
cached_path(f"{ud_path}/he_htb-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/he_htb-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/he_htb-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_HEBREW, self).__init__(data_folder, in_memory=in_memory)
class UD_TURKISH(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Turkish-IMST/master"
cached_path(f"{ud_path}/tr_imst-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/tr_imst-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/tr_imst-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_TURKISH, self).__init__(data_folder, in_memory=in_memory)
class UD_PERSIAN(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Persian-Seraji/master"
cached_path(
f"{ud_path}/fa_seraji-ud-dev.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/fa_seraji-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/fa_seraji-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_PERSIAN, self).__init__(data_folder, in_memory=in_memory)
class UD_RUSSIAN(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Russian-SynTagRus/master"
cached_path(
f"{ud_path}/ru_syntagrus-ud-dev.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/ru_syntagrus-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/ru_syntagrus-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_RUSSIAN, self).__init__(data_folder, in_memory=in_memory)
class UD_HINDI(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Hindi-HDTB/master"
cached_path(f"{ud_path}/hi_hdtb-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/hi_hdtb-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/hi_hdtb-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_HINDI, self).__init__(data_folder, in_memory=in_memory)
class UD_INDONESIAN(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Indonesian-GSD/master"
cached_path(f"{ud_path}/id_gsd-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/id_gsd-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/id_gsd-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_INDONESIAN, self).__init__(data_folder, in_memory=in_memory)
class UD_JAPANESE(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Japanese-GSD/master"
cached_path(f"{ud_path}/ja_gsd-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/ja_gsd-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/ja_gsd-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_JAPANESE, self).__init__(data_folder, in_memory=in_memory)
class UD_CHINESE(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Chinese-GSD/master"
cached_path(f"{ud_path}/zh_gsd-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/zh_gsd-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/zh_gsd-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_CHINESE, self).__init__(data_folder, in_memory=in_memory)
class UD_KOREAN(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Korean-Kaist/master"
cached_path(
f"{ud_path}/ko_kaist-ud-dev.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/ko_kaist-ud-test.conllu", Path("datasets") / dataset_name
)
cached_path(
f"{ud_path}/ko_kaist-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_KOREAN, self).__init__(data_folder, in_memory=in_memory)
class UD_BASQUE(UniversalDependenciesCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = True):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
ud_path = "https://raw.githubusercontent.com/UniversalDependencies/UD_Basque-BDT/master"
cached_path(f"{ud_path}/eu_bdt-ud-dev.conllu", Path("datasets") / dataset_name)
cached_path(f"{ud_path}/eu_bdt-ud-test.conllu", Path("datasets") / dataset_name)
cached_path(
f"{ud_path}/eu_bdt-ud-train.conllu", Path("datasets") / dataset_name
)
super(UD_BASQUE, self).__init__(data_folder, in_memory=in_memory)
def _download_wassa_if_not_there(emotion, data_folder, dataset_name):
for split in ["train", "dev", "test"]:
data_file = data_folder / f"{emotion}-{split}.txt"
if not data_file.is_file():
if split == "train":
url = f"http://saifmohammad.com/WebDocs/EmoInt%20Train%20Data/{emotion}-ratings-0to1.train.txt"
if split == "dev":
url = f"http://saifmohammad.com/WebDocs/EmoInt%20Dev%20Data%20With%20Gold/{emotion}-ratings-0to1.dev.gold.txt"
if split == "test":
url = f"http://saifmohammad.com/WebDocs/EmoInt%20Test%20Gold%20Data/{emotion}-ratings-0to1.test.gold.txt"
path = cached_path(url, Path("datasets") / dataset_name)
with open(path, "r") as f:
with open(data_file, "w") as out:
next(f)
for line in f:
fields = line.split("\t")
out.write(f"__label__{fields[3].rstrip()} {fields[1]}\n")
os.remove(path)
class WASSA_ANGER(ClassificationCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = False):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
_download_wassa_if_not_there("anger", data_folder, dataset_name)
super(WASSA_ANGER, self).__init__(
data_folder, use_tokenizer=False, in_memory=in_memory
)
class WASSA_FEAR(ClassificationCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = False):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
_download_wassa_if_not_there("fear", data_folder, dataset_name)
super(WASSA_FEAR, self).__init__(
data_folder, use_tokenizer=False, in_memory=in_memory
)
class WASSA_JOY(ClassificationCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = False):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
_download_wassa_if_not_there("joy", data_folder, dataset_name)
super(WASSA_JOY, self).__init__(
data_folder, use_tokenizer=False, in_memory=in_memory
)
class WASSA_SADNESS(ClassificationCorpus):
def __init__(self, base_path: Union[str, Path] = None, in_memory: bool = False):
if type(base_path) == str:
base_path: Path = Path(base_path)
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
_download_wassa_if_not_there("sadness", data_folder, dataset_name)
super(WASSA_SADNESS, self).__init__(
data_folder, use_tokenizer=False, in_memory=in_memory
)
def _download_wikiner(language_code: str, dataset_name: str):
# download data if necessary
wikiner_path = (
"https://raw.githubusercontent.com/dice-group/FOX/master/input/Wikiner/"
)
lc = language_code
data_file = (
Path(flair.cache_root)
/ "datasets"
/ dataset_name
/ f"aij-wikiner-{lc}-wp3.train"
)
if not data_file.is_file():
cached_path(
f"{wikiner_path}aij-wikiner-{lc}-wp3.bz2", Path("datasets") / dataset_name
)
import bz2, shutil
# unpack and write out in CoNLL column-like format
bz_file = bz2.BZ2File(
Path(flair.cache_root)
/ "datasets"
/ dataset_name
/ f"aij-wikiner-{lc}-wp3.bz2",
"rb",
)
with bz_file as f, open(
Path(flair.cache_root)
/ "datasets"
/ dataset_name
/ f"aij-wikiner-{lc}-wp3.train",
"w",
) as out:
for line in f:
line = line.decode("utf-8")
words = line.split(" ")
for word in words:
out.write("\t".join(word.split("|")) + "\n")
class WIKINER_ENGLISH(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = False,
):
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "ner"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
_download_wikiner("en", dataset_name)
super(WIKINER_ENGLISH, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class WIKINER_GERMAN(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = False,
):
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "ner"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
_download_wikiner("de", dataset_name)
super(WIKINER_GERMAN, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class WIKINER_DUTCH(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = False,
):
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "ner"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
_download_wikiner("nl", dataset_name)
super(WIKINER_DUTCH, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class WIKINER_FRENCH(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = False,
):
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "ner"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
_download_wikiner("fr", dataset_name)
super(WIKINER_FRENCH, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class WIKINER_ITALIAN(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = False,
):
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "ner"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
_download_wikiner("it", dataset_name)
super(WIKINER_ITALIAN, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class WIKINER_SPANISH(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = False,
):
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "ner"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
_download_wikiner("es", dataset_name)
super(WIKINER_SPANISH, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class WIKINER_PORTUGUESE(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = False,
):
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "ner"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
_download_wikiner("pt", dataset_name)
super(WIKINER_PORTUGUESE, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class WIKINER_POLISH(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = False,
):
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "ner"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
_download_wikiner("pl", dataset_name)
super(WIKINER_POLISH, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class WIKINER_RUSSIAN(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = False,
):
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "pos", 2: "ner"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
_download_wikiner("ru", dataset_name)
super(WIKINER_RUSSIAN, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class WNUT_17(ColumnCorpus):
def __init__(
self,
base_path: Union[str, Path] = None,
tag_to_bioes: str = "ner",
in_memory: bool = True,
):
if type(base_path) == str:
base_path: Path = Path(base_path)
# column format
columns = {0: "text", 1: "ner"}
# this dataset name
dataset_name = self.__class__.__name__.lower()
# default dataset folder is the cache root
if not base_path:
base_path = Path(flair.cache_root) / "datasets"
data_folder = base_path / dataset_name
# download data if necessary
wnut_path = "https://noisy-text.github.io/2017/files/"
cached_path(f"{wnut_path}wnut17train.conll", Path("datasets") / dataset_name)
cached_path(f"{wnut_path}emerging.dev.conll", Path("datasets") / dataset_name)
cached_path(
f"{wnut_path}emerging.test.annotated", Path("datasets") / dataset_name
)
super(WNUT_17, self).__init__(
data_folder, columns, tag_to_bioes=tag_to_bioes, in_memory=in_memory
)
class DataLoader(torch.utils.data.dataloader.DataLoader):
def __init__(
self,
dataset,
batch_size=1,
shuffle=False,
sampler=None,
batch_sampler=None,
num_workers=4,
drop_last=False,
timeout=0,
worker_init_fn=None,
):
# in certain cases, multi-CPU data loading makes no sense and slows
# everything down. For this reason, we detect if a dataset is in-memory:
# if so, num_workers is set to 0 for faster processing
flair_dataset = dataset
while True:
if type(flair_dataset) is Subset:
flair_dataset = flair_dataset.dataset
elif type(flair_dataset) is ConcatDataset:
flair_dataset = flair_dataset.datasets[0]
else:
break
if type(flair_dataset) is list:
num_workers = 0
elif isinstance(flair_dataset, FlairDataset) and flair_dataset.is_in_memory():
num_workers = 0
super(DataLoader, self).__init__(
dataset,
batch_size=batch_size,
shuffle=shuffle,
sampler=sampler,
batch_sampler=batch_sampler,
num_workers=num_workers,
collate_fn=list,
drop_last=drop_last,
timeout=timeout,
worker_init_fn=worker_init_fn,
)
class CoupleDataset:
"""docstring for CoupleDataset"""
def __init__(self, corpus1, corpus2):
self.corpus1=corpus1
self.corpus2=corpus2
def __len__(self):
return len(self.corpus1)
def __getitem__(self, index):
return self.corpus1[index],self.corpus2[index]
def is_in_memory(self) -> bool:
return self.corpus1.in_memory
| [
"[email protected]"
] | |
a48cc5f99932ad1cb65d9cf20b449ded9b64ec57 | 12972f4d9e7de2c38e79ae911f2e7b125965cac9 | /virtual/lib/python3.6/site-packages/setuptools/_vendor/six.py | f8340b1e3743bbbdaf6b43684502ae46bda8f54b | [
"MIT"
] | permissive | Michellemukami/pitch | b33d0de81cc2a0dfe70ddc1e91affc88af63ff2b | aebb7736d18766343a5a295de0782aa175245c35 | refs/heads/master | 2022-10-22T03:55:33.364628 | 2019-08-07T10:15:10 | 2019-08-07T10:15:10 | 200,673,234 | 0 | 0 | null | 2022-09-16T18:07:53 | 2019-08-05T14:38:26 | Python | UTF-8 | Python | false | false | 30,094 | py | """Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# userS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__user__ = "Benjamin Peterson <[email protected]>"
__version__ = "1.10.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
def create_unbound_method(func, cls):
return func
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
import struct
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
| [
"[email protected]"
] | |
4f872c0b06702838fd0288d6dcc363e5aa3b0c8a | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-2/4c735f24ea954a5ced87bd0bd1573fa3eb914c18-<save_inference_model>-fix.py | f762eaaca815726528bd1b1c9533f60f924fc3cf | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,018 | py |
def save_inference_model(self, executor, dirname, feeded_var_names, target_vars, main_program=None, export_for_deployment=True):
'\n Prune the given `main_program` to build a new program especially for inference,\n and then save it and all related parameters to given `dirname` by the `executor`.\n '
if (main_program is not None):
io.save_inference_model(dirname, feeded_var_names, target_vars, executor, main_program, None, None, export_for_deployment)
else:
io.save_inference_model(dirname, feeded_var_names, target_vars, executor, self._origin_program, None, None, export_for_deployment, True)
model_basename = '__model__'
model_filename = os.path.join(dirname, model_basename)
with open(model_filename, 'rb') as f:
program_desc_str = f.read()
program = Program.parse_from_string(program_desc_str)
program._copy_dist_param_info_from(self.main_program)
self.save_persistables(executor, dirname, program)
| [
"[email protected]"
] | |
d360f406e4e83ad61f765f38b25b91b19ee2015a | c1631329d605c04269357db500610468bb5a03e1 | /net2.py | 9323dd0aef0a9afe49fd3f24cc4bb695e3eafa0f | [] | no_license | andygom/bestInd | e164f77d68c5988c8636a377fd3f1cecf443f8c3 | 9fada18de0533b81390850f5d76526d8e3e0f2c4 | refs/heads/main | 2023-03-21T23:11:21.761657 | 2021-03-19T03:46:57 | 2021-03-19T03:46:57 | 349,291,886 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,813 | py | import numpy as np
from numpy import random
import time
from adafruit_servokit import ServoKit
kit = ServoKit channels=16
# INDIVIDUO CON experimento p100 g800 m0.5 / 5 ----- F I T N E S S D E 41.59513354301453
# weightsIH = np.random.random 14, 14 * 2 - 1
weightsIH = [-1.54777539e-01, 3.97567849e-02, 4.52048525e+00, -3.17314212e-01,
7.24028023e-03, -2.48570679e-02, 4.26078616e-02, 4.87324695e+00,
3.01411514e-01, 5.87172588e-04, 1.12164266e-02, 2.42976642e-01,
-2.64589355e+00, -3.06214023e-01] , [ 6.04021634e+00, 9.54626637e-02, -1.00867707e+00, -3.47528593e-02,
-9.73591728e-04, 3.18560320e-02, 4.99005519e-01, -8.89079985e-02,
-5.82909081e-01, -2.35788649e-02, 1.99146296e-02, -5.82767748e-02,
-1.74785908e+00, -2.93027024e-02] , [-1.51699724e+00, 4.37666447e-03, -4.89639848e+00, 1.35873539e+00,
3.68365849e-01, 2.48613136e+00, -3.08601962e-01, -2.92232446e-04,
-8.74145558e-02, -2.91851448e-01, -2.05628778e-01, 2.52083014e+00,
-1.28947720e-03, -3.69929251e-02] , [ 0.14579911, -1.44535398, -2.49183407, 0.00758193, -4.43960787,
0.02008601, 1.81110337, 0.02337051, 0.89933651, 0.0151445 ,
0.28026332, 1.27992345, 0.00706883, 0.0094326 ] , [ 1.32951538e+00, -5.73086615e+01, 1.03671386e-02, 1.21570260e-02,
1.96064035e-02, 8.51626100e-01, -8.08900999e-01, -4.86461250e-03,
-2.02892861e-01, -1.71484243e+00, 3.30024518e+00, 1.07100984e+00,
3.17998921e-03, 7.33693948e+00] , [ 6.80707299e-03, -3.67101200e-06, 1.62081557e-03, -1.88921405e-02,
9.15636781e-01, 4.65372421e-01, -8.53342040e+00, -3.06801838e-04,
-1.16764701e-02, 8.06124183e+01, 1.95237981e-03, 2.02982693e-03,
4.31600415e-01, -1.06953622e+00], [ -0.735408 , -4.79959472, -4.88295444, 2.99831233,
-0.23201268, 0.72127274, -0.16440405, 1.56473089,
-0.80455491, -0.15161709, 2.03309716, 4.82994935,
-39.57393979, 0.10524952] , [-6.52889688e-01, -3.65073043e-06, -4.72863396e-02, -6.53288053e-03,
7.40784393e-04, 1.74601675e-07, -4.20163004e+00, 1.87500574e+00,
-4.73172614e-02, 3.27867320e-04, 4.03536358e-01, 3.57936755e-04,
2.15737525e-01, -1.29169651e-03] , [ 0.81003697, 0.06301062, 0.01348914, -0.00239483, 0.09335406,
0.09145969, 0.59258431, -0.10885904, 0.71195307, 0.35145697,
-0.00100081, 0.10498547, 0.03834157, -0.01673424] , [-4.47136142e-01, 1.66017884e-02, -2.68108730e-04, 2.53254091e-02,
-4.22477072e-01, 6.86425278e+00, 4.19579834e-04, -3.13470103e-02,
-7.78813803e-01, -4.84225080e-02, -2.57947551e+00, -2.35209442e+01,
2.82571427e-01, -4.38408094e-01] , [-1.02214097e-01, -8.52361714e-03, -1.36938370e-03, -3.04682745e-02,
2.32272429e-03, 2.67779175e-01, -9.28053379e-03, -1.39751671e+01,
9.69162268e-01, 1.56430954e+01, 3.18741448e+00, 2.13396681e+00,
-1.24319871e-01, -3.31242739e-01] , [-8.31733085e-03, 6.24562395e-01, 2.29482504e-01, -1.29478038e-05,
-4.31860130e-01, -1.33050876e-02, -1.22589111e-01, 3.13219398e-01,
2.24015841e-03, -2.99286762e-01, 2.79616335e+00, 3.87034570e+00,
3.41529477e-03, -2.69008403e-03] , [ 3.01793423e-01, 1.07333295e-03, -9.40753656e-03, 1.52399436e+00,
-5.00238702e-03, 4.19960440e-03, -3.89651445e-01, 3.75576098e-01,
-9.24162139e-01, -6.79210679e-02, 1.82836481e-01, -2.15121976e+00,
2.60675269e-05, -5.81125187e-01] , [-1.25071127e+00, 1.55093670e-03, 9.72488676e-02, -1.81658564e-01,
2.63154709e-01, 1.06059966e+00, 7.55682575e-01, 1.22300220e+01,
-3.82877006e-01, -6.24371026e-01, 5.03832361e-01, -2.30627898e-04,
2.79594823e+00, 4.21651883e-03]
weightsHO = [-4.47886839e+00, -3.31166003e+00, -2.79549289e-03, 3.28100196e-02,
2.81232253e-03, 6.82228809e-02, 2.71481567e-01, -7.63047637e-04] , [ 0.00432984, -0.01551173, -0.0128544 , 0.48185824, 0.01291198,
-0.596858 , 1.91739179, 0.01029896] , [ 0.0203695 , -1.05618309, 0.4392908 , 1.08797459, -0.01392269,
0.71895939, -0.03282281, -0.00311812] , [ 1.24396883e-02, 1.28063624e+00, 4.34206120e-02, 6.35060286e-02,
-3.62655848e-04, 5.64039364e-02, 1.71655858e-01, -2.23238369e+00] , [-1.55987332e-03, -1.51732573e+00, 1.61595016e+00, 5.38726536e+00,
1.06416114e+00, 3.56895445e-01, 8.78055468e-01, -1.31792970e-02] , [ 7.29683476e-02, 3.46833309e-01, -4.38737482e-03, 5.88026465e+00,
-9.78426363e-04, 1.45256560e-01, 1.41301485e-01, -9.50188095e-01] , [-7.61907953e+00, 4.19071036e+00, 2.47617837e-04, 3.10014161e+00,
2.02174375e-01, -5.76716665e-01, 3.46695880e-02, -1.90269065e-01] , [ 1.64662423e-03, -1.86086350e-02, 1.14325565e-03, 1.99699248e-04,
-3.82419161e+00, -2.01987400e+00, -1.98113863e-01, -1.68211285e-02] , [ 2.50639442e+01, -1.37985247e+01, 1.83691975e-02, -1.07348600e-04,
-2.43145020e-02, 2.03147378e-03, 1.80075497e+00, 4.56989101e-01] , [-3.26075392e-04, -1.55227292e-01, -7.87569098e-01, -6.16532092e-03,
6.55208375e-02, 7.93845780e-01, 5.30793630e-04, -4.70779262e-01] , [ 3.99853426e-03, -5.75598838e-01, -1.01445584e+00, -1.77048943e-02,
-8.82624413e-02, -1.42993243e-01, 9.37698017e-01, -8.33155647e+00] , [ 3.12110112e-02, -4.18248831e-01, 1.35245606e-01, 1.62444393e-05,
-3.86799715e-02, -1.01895529e-01, -3.24972460e-02, -1.11278307e-02] , [ 6.00281740e-02, -6.69332304e-01, 4.22212666e+00, 4.74948757e-02,
2.64664699e-04, -3.57835926e-01, -2.26437572e+00, 4.12533257e-02] , [-5.39667493e-04, -1.69793240e-01, 3.76377519e-03, 3.74690477e+00,
-7.73269959e-01, -1.79347299e-01, 7.04338269e-01, -1.38699779e-01]
neuronH = [0 for c in range(14)]
neuronO = [0 for c in range(8)]
motor = [0 for c in range(8)]
ultsensor = [0 for c in range(6)]
COUNT = 0
# normalización de los motores
def motorNorm(motorArray):
print(motorArray)
for i in range(0, len(motorArray)):
motor[i] = ((motorArray[i] + 1) * 180) / 2
print(motor)
# if abs(motorArray[i] * 180) == 0:
# # -1 - 1
# 180 0
# motor[i] = 90
# else:
# motor[i] = abs(motorArray[i] * 180)
# print(motor)
kit.servo[0].angle = motor[0]
kit.servo[1].angle = motor[1]
kit.servo[2].angle = motor[2]
kit.servo[3].angle = motor[3]
kit.servo[4].angle = motor[4]
kit.servo[5].angle = motor[5]
kit.servo[6].angle = motor[6]
kit.servo[7].angle = motor[7]
# implementar mandar motor a los motores reales
# normalizacion de los sensotres ultrasonicos
def usNorm(usArray):
for i in range(0, len(usArray)):
ultsensor[i] = abs(usArray[i] * 20)
# entradas de los sensores
def input():
print('new inputs')
sensorInput = np.random.random((3)) * 2 - 1
def network():
sensorInput = np.random.random((14)) * 2 - 1
print(COUNT)
if COUNT == 0:
sensorInput = np.random.random((14)) * 2 - 1
else:
for i in range(0, 6):
sensorInput[i] = neuronO[i - 6]
# for i in range(6, 14):
# sensorInput[i] = neuronO[i - 6]
# print(sensorInput)
# print(motor)
# print(sensorInput)
# print(weightsIH)
for j in range(0, 14):
for i in range(0, 14):
anh = []
anh.append(sensorInput[j] * weightsIH[j][i])
# print(sum(anh))
# print(anh)
neuronH[j] = np.tanh(sum(anh))
# dotN = np.dot(sensorInput, weightsIH[i])
# neuronH[i] = np.tanh(dotN)
# print(neuronH)
for j in range(0, 8):
for i in range(0, 14):
anOP = []
anOP.append(neuronH[j] * weightsIH[j][i])
neuronO[j] = np.tanh(sum(anOP))
# print(neuronO)
# print(neuronO)
motorNorm(neuronO)
input()
# sensorInput = np.random.random((14)) * 2 - 1
# print(neuronH)
# print(neuronO)
try:
while True:
# if COUNT == 0:
# kit.servo[0].angle = 90
# kit.servo[1].angle = 90
# kit.servo[2].angle = 90
# kit.servo[3].angle = 90
# kit.servo[4].angle = 90
# kit.servo[5].angle = 90
# kit.servo[6].angle = 90
# kit.servo[7].angle = 90
network()
COUNT = COUNT + 1
# intentar no utilizar sleep y que sean intervalos iguales a los del simulador
time.sleep(2)
except KeyboardInterrupt:
print("Press Ctrl-C to terminate while statement")
pass
| [
"[email protected]"
] | |
80b3def3345e608e8f51501194c5d23249ed50dc | 634fb5fe10e8f944da44ab31896acc8471ec5f18 | /hq_env/bin/sphinx-autogen | 42f0972c1ff3e67f0b39e10ebd4c8b542116b016 | [] | no_license | dimagi/commcarehq-venv | 277d0b6fada24f2edd54f74850267201153412a7 | 2c52e3fb0f974cae5c5feaea1d5de851fe530c80 | refs/heads/master | 2021-01-18T14:05:47.931306 | 2015-07-20T10:10:41 | 2015-07-20T10:10:41 | 11,513,855 | 1 | 1 | null | 2015-07-20T10:10:41 | 2013-07-18T21:09:22 | Python | UTF-8 | Python | false | false | 331 | #!/home/travis/virtualenv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'Sphinx==1.2b1','console_scripts','sphinx-autogen'
__requires__ = 'Sphinx==1.2b1'
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.exit(
load_entry_point('Sphinx==1.2b1', 'console_scripts', 'sphinx-autogen')()
)
| [
"[email protected]"
] | ||
276413737c57b3c74b11ccee0d0df56f0c65692a | d802a0793a4a4af0336912932c35499edac16845 | /Python/Python/Regex and Parsing/regex_substitution.py | c7a8c4e9aca66eb624f877498c6606e23f9b07fa | [] | no_license | butterflylady/hackerrank | fb1ca6801855b3956bbfb72a2f7a95db4513aca4 | c42e2c80c41a95eb10d9a061eb8e7132e52a71ac | refs/heads/master | 2021-08-27T21:02:50.512292 | 2021-08-05T08:40:37 | 2021-08-05T08:40:37 | 167,610,834 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 322 | py | import re
def change_symb(match):
symb = match.group(0)
if symb == "&&":
return "and"
elif symb == "||":
return "or"
n = int(input())
for i in range(n):
line = input()
pattern = '(?<= )(&&|\|\|)(?= )' # Ex. s="A && && && && && && B"
print(re.sub(pattern, change_symb, line))
| [
"[email protected]"
] | |
3df929b6a508a9d626634464b85f4d50299530ae | e88c152d699cef4af64fa5aa4b9c61631c03c8b6 | /Solutions/0949.Largest-Time-for-Given-Digits.py | 816118cebc673cecb74253fdf254dfeec22a97db | [] | no_license | arnabs542/Leetcode-3 | 062af047b1f828b9def2a6e2a4d906e77090b569 | 7e10ff62981db88053b511c3ef8bd284d728d2fc | refs/heads/master | 2023-02-26T19:21:05.037170 | 2021-01-20T07:04:48 | 2021-01-20T07:04:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,939 | py | """
949. Largest Time for Given Digits
Given an array arr of 4 digits, find the latest 24-hour time that can be made using each digit exactly once.
24-hour times are formatted as "HH:MM", where HH is between 00 and 23, and MM is between 00 and 59.
The earliest 24-hour time is 00:00, and the latest is 23:59.
Return the latest 24-hour time in "HH:MM" format. If no valid time can be made, return an empty string.
Example 1:
Input: A = [1,2,3,4]
Output: "23:41"
Explanation: The valid 24-hour times are "12:34", "12:43", "13:24", "13:42", "14:23", "14:32", "21:34", "21:43", "23:14", and "23:41".
Of these times, "23:41" is the latest.
Example 2:
Input: A = [5,5,5,5]
Output: ""
Explanation: There are no valid 24-hour times as "55:55" is not valid.
Example 3:
Input: A = [0,0,0,0]
Output: "00:00"
Example 4:
Input: A = [0,0,1,0]
Output: "10:00"
"""
"""
step 1: find all possible permutations - O(4!).
step 2: update max_possible time that can be constructed from the permutations.
"""
class Solution:
def largestTimeFromDigits(self, arr: List[int]) -> str:
def backtrack(curr_comb):
if len(curr_comb) == 4:
permut.append(curr_comb)
return
for idx in range(4):
if idx not in visited:
visited.add(idx)
backtrack(curr_comb + str(arr[idx]))
visited.remove(idx)
permut = []
visited = set()
backtrack("")
max_time = -1
res = ""
for a, b, c, d in permut:
hour = int(a) * 10 + int(b)
minute = int(c) * 10 + int(d)
if 0 <= hour < 24 and 0 <= minute < 60:
time = hour * 60 + minute
if time > max_time:
max_time = time
res = str(a) + str(b) + ":" + str(c) + str(d)
return res
| [
"[email protected]"
] | |
70e3ea01aca468d440c1dbfa13b939fad9364327 | 67bdebd561b19af9bf759b6ed5de8556b93ea91f | /lower_priority.py | ba5a00b47b30c1d9e0248d2c3e339ba751417911 | [] | no_license | rlowrance/re-avm | 91371ec79f6b6f48e17643da4dfb7a4894d0a0ca | d4cfa62e9f65d325e8ac98caa61d3fb666b8a6a2 | refs/heads/master | 2021-01-17T07:34:16.876133 | 2017-02-06T21:04:59 | 2017-02-06T21:04:59 | 42,865,972 | 31 | 10 | null | null | null | null | UTF-8 | Python | false | false | 638 | py | 'lower priority of current process'
import os
import pdb
def lower_priority():
# ref: http://stackoverflow.com/questions/1023038/change-process-priority-in-python-cross-platform
assert os.name in ('nt', 'posix'), os.name
if os.name == 'nt':
import win32api
import win32process
import win32con
pid = win32api.GetCurrentProcessId()
handle = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, True, pid)
win32process.SetPriorityClass(handle, win32process.BELOW_NORMAL_PRIORITY_CLASS)
elif os.name == 'posix':
os.nice(1)
if __name__ == '__main__':
lower_priority() | [
"[email protected]"
] | |
55340e6faac1c5014b387c237c09e8efb4f223ca | 41a20700b5bb351d20562ac23ec4db06bc96f0d7 | /src/plum/plumr.py | 184a7d811a8acaa3a4afc138963e0ce6e0c62ab0 | [] | no_license | kedz/noiseylg | ee0c54634767e8d3789b4ffb93727988c29c6979 | 17266e1a41e33aecb95dc1c3aca68f6bccee86d5 | refs/heads/master | 2020-07-30T11:22:08.351759 | 2019-10-30T21:33:11 | 2019-10-30T21:33:11 | 210,212,253 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 10,551 | py | import argparse
from pathlib import Path
import os
import random
from pprint import pprint
import json
from collections import OrderedDict
def handle_debug_opts(args, plum_pointers, checkpoints):
if args.pprint_ds_sample is not None:
pprint_sample_datasource(args.pprint_ds_sample,
plum_pointers["datasources"],
args.pprint_ds_nsamples)
if args.pprint_pipeline_sample is not None:
pprint_sample_pipeline(args.pprint_pipeline_sample,
plum_pointers["pipelines"])
if args.pprint_model is not None:
pprint_model(args.pprint_model, plum_pointers["models"])
if args.pprint_params is not None:
pprint_params(args.pprint_params, plum_pointers["models"])
if args.pprint_vocab is not None:
pprint_vocab(args.pprint_vocab, plum_pointers["vocabs"])
def pprint_checkpoints(checkpoints):
print("\nShowing saved checkpoints:")
for ckpt, md in checkpoints.items():
if md.get("default", False):
print(" * {} | {}".format(ckpt, str(md["criterion"])))
else:
print(" {} | {}".format(ckpt, str(md["criterion"])))
print("\n * default run is best checkpoint from latest run.")
print()
def pprint_sample_datasource(datasource_names, datasources, num_samples):
if datasource_names == []:
datasource_names = list(datasources)
for name in datasource_names:
if name not in datasources:
raise ValueError("No datasource with name: {}".format(name))
datasource = datasources[name]
print("Drawing {} samples from datasource {}".format(
num_samples, name))
indices = list(range(len(datasource)))
random.shuffle(indices)
sample_indices = indices[:num_samples]
for n, idx in enumerate(sample_indices, 1):
print(name, "sample {} no. {}".format(n, idx))
pprint(datasource[idx])
print()
def pprint_model(names, models):
if names == []:
names = list(models)
print("Pretty printing models:\n")
for name in names:
print(name)
pprint(models[name])
print()
print()
def pprint_sample_pipeline(pipeline_names, pipelines):
if pipeline_names == []:
pipeline_names = list(pipelines)
for pipeline_name in pipeline_names:
print(pipeline_name)
for batch in pipelines[pipeline_name]:
pprint(batch)
break
print()
def pprint_params(model_names, models):
if model_names == []:
model_names = list(models)
for name in model_names:
if name not in models:
raise ValueError("No model with name: {}".format(name))
model = models[name]
print("{} parameters:".format(name))
names = []
dtypes = []
dims = []
tags = []
for pname, param in model.named_parameters():
names.append(pname)
dtypes.append(str(param.dtype))
dims.append(str(tuple(param.size())))
tags.append(str(model.parameter_tags(pname)))
template = " {:" + str(max([len(x) for x in names])) + "s}" + \
" | {:" + str(max([len(x) for x in dtypes])) + "s}" + \
" | {:" + str(max([len(x) for x in dims])) + "s}" + \
" | {:" + str(max([len(x) for x in tags])) + "s}"
for name, dtype, dim, tag in zip(names, dtypes, dims, tags):
print(template.format(name, dtype, dim, tag))
print()
def pprint_vocab(vocab_names, vocabs):
if vocab_names == []:
vocab_names = list(vocabs)
for name in vocab_names:
if name not in vocabs:
raise ValueError("No vocab with name: {}".format(name))
print(name)
for idx, token in vocabs[name].enumerate():
print(idx, token, vocabs[name].count(token))
print()
def get_meta_path():
return Path.home() / ".plumr_meta.json"
def load_plumr_meta(verbose=False):
meta_path = get_meta_path()
if not meta_path.exists():
meta_path.parent.mkdir(exist_ok=True, parents=True)
meta_path.write_text(json.dumps({"ext_modules": []}))
if verbose:
print("Reading meta from: {}".format(meta_path))
return json.loads(meta_path.read_text())
def update_ext_libs(meta, add_libs=None, del_libs=None, verbose=False):
if add_libs is None:
add_libs = []
if del_libs is None:
del_libs = []
for lib in add_libs:
if lib not in meta["ext_modules"]:
try:
__import__(lib)
meta["ext_modules"].append(lib)
if verbose:
print("Added lib: {}".format(lib))
except Exception as e:
print("Could not import: {}".format(lib))
print("Got exception:")
print(e)
for lib in del_libs:
if lib in meta["ext_modules"]:
if verbose:
print("Removing lib: {}".format(lib))
meta["ext_modules"].pop(meta["ext_modules"].index(lib))
get_meta_path().write_text(json.dumps(meta))
def import_ext_libs(meta):
for lib in meta["ext_modules"]:
__import__(lib)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("config", type=Path, nargs="?")
parser.add_argument("--pprint", action="store_true")
parser.add_argument("--pprint-ds-sample", default=None, nargs="*",
required=False)
parser.add_argument("--pprint-ds-nsamples", default=4, type=int)
parser.add_argument("--pprint-model", default=None, nargs="*",
required=False)
parser.add_argument("--pprint-params", default=None, nargs="*",
required=False)
parser.add_argument("--pprint-pipeline-sample", default=None, nargs="*",
required=False)
parser.add_argument("--pprint-vocab", default=None, nargs="*",
required=False)
parser.add_argument("--pprint-ckpts", action="store_true")
parser.add_argument("--default-ckpt", type=str, default=None)
parser.add_argument("-P", action="store_true")
parser.add_argument("--run", type=str, nargs="+", default=None)
parser.add_argument("--proj", type=Path, required=False, default=None)
parser.add_argument("--gpu", type=int, default=-1)
parser.add_argument("--add-libs", nargs="+", default=None)
parser.add_argument("--del-libs", nargs="+", default=None)
args = parser.parse_args()
pedantic = args.P
if pedantic:
print("\n ** Running in pedantic mode. Expect lots of messages. **\n")
plumr_meta = load_plumr_meta(verbose=pedantic)
update_ext_libs(plumr_meta, add_libs=args.add_libs,
del_libs=args.del_libs, verbose=pedantic)
if args.add_libs is not None or args.del_libs is not None:
exit()
if args.proj is None:
project_directory = args.config.parent
else:
project_directory = args.proj
import plum
import_ext_libs(plumr_meta)
vocab_cache = project_directory / "vocabs"
checkpoints = find_checkpoints(project_directory, args.default_ckpt)
if args.pprint_ckpts:
pprint_checkpoints(checkpoints)
if args.config is None:
return
if not args.config.exists():
raise Exception("Config path doesn't exists: {}".format(args.config))
plum_parser = plum.PlumParser(pprint_parse=args.pprint,
vocab_cache=vocab_cache,
verbose=pedantic)
plum_object, plum_pointers, config_json = plum_parser.parse_file(
args.config, return_json=True)
handle_debug_opts(args, plum_pointers, checkpoints)
if args.run is not None:
for program in args.run:
if program not in plum_pointers["programs"]:
raise RuntimeError(
"Program {} was not found in config {}".format(
program, args.config))
else:
env = create_environment(project_directory, program)
env["checkpoints"] = checkpoints
env["gpu"] = args.gpu
(env["proj_dir"] / "config.json").write_text(config_json)
plum_pointers["programs"][program].run(env, verbose=pedantic)
def find_checkpoints(root_dir, user_default):
checkpoints = OrderedDict()
default = None
ckpt_metas = list(root_dir.rglob("ckpt.metadata.json"))
ckpt_metas.sort(key=lambda x: os.stat(x).st_mtime)
for path in ckpt_metas:
meta = json.loads(path.read_text())
default = ckpt_id = "{}:{}".format(
path.parent.parent.name, meta["optimal_checkpoint"].split(".")[-2])
for item in meta["checkpoint_manifest"][::-1]:
ckpt_id = "{}:{}".format(
path.parent.parent.name, item["checkpoint"].split(".")[-2])
checkpoints[ckpt_id] = {
"criterion": {meta["criterion"]: item["criterion"]},
"path": path.parent / item["checkpoint"]
}
if len(checkpoints) == 0:
return checkpoints
if user_default is None:
checkpoints[default]["default"] = True
else:
if user_default not in checkpoints:
from warnings import warn
warn("User upplied default checkpoint not found, using {}".format(
default))
checkpoints[default]["default"] = True
else:
checkpoints[user_default]["default"] = True
return checkpoints
#? for ckpt_id, md in checkpoints.items():
#? for crit, val in md["criterion"].items():
#? if ckpt_id == default:
#? print(" * {} | {} = {:6.7f}".format(ckpt_id, crit, val))
#? else:
#? print(" {} | {} = {:6.7f}".format(ckpt_id, crit, val))
def create_environment(proj, prog):
# ckpts = find_checkpoints(proj)
proj_dir = proj / prog
tb_dir = proj / "tb" / prog
run_num = 1
run = "run1"
while (proj_dir / run).exists() or (tb_dir / run).exists():
run_num += 1
run = "run{}".format(run_num)
proj_dir = proj_dir / run
tb_dir = tb_dir / run
proj_dir.mkdir(exist_ok=True, parents=True)
tb_dir.mkdir(exist_ok=True, parents=True)
return {"proj_dir": proj_dir, "tensorboard_dir": tb_dir}
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
d839256a8d0dfd778bd501e3df9f2c3f253c6d65 | e23512edf95ea66640eab85adb8ca0c24ae6e3f7 | /tensorflow/contrib/cluster_resolver/python/training/tpu_cluster_resolver.py | 2e75ac226ea74e879edda5e03dff3d53c8a76569 | [
"Apache-2.0"
] | permissive | snuspl/tensorflow | 755ac46c3163adb119de0755ed706b1c960991fb | 212d4e9e5f4093ecb90e5b7837d4e02da7506228 | refs/heads/r1.6 | 2021-06-25T18:03:17.625202 | 2018-12-30T09:35:50 | 2018-12-30T09:35:50 | 134,066,972 | 1 | 3 | Apache-2.0 | 2020-06-10T06:12:19 | 2018-05-19T14:02:25 | C++ | UTF-8 | Python | false | false | 5,508 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementation of Cluster Resolvers for Cloud TPUs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves.urllib.request import Request
from six.moves.urllib.request import urlopen
from tensorflow.contrib.cluster_resolver.python.training.cluster_resolver import ClusterResolver
from tensorflow.python.training.server_lib import ClusterSpec
_GOOGLE_API_CLIENT_INSTALLED = True
try:
from googleapiclient import discovery # pylint: disable=g-import-not-at-top
from oauth2client.client import GoogleCredentials # pylint: disable=g-import-not-at-top
except ImportError:
_GOOGLE_API_CLIENT_INSTALLED = False
class TPUClusterResolver(ClusterResolver):
"""Cluster Resolver for Google Cloud TPUs.
This is an implementation of cluster resolvers for the Google Cloud TPU
service. As Cloud TPUs are in alpha, you will need to specify a API definition
file for this to consume, in addition to a list of Cloud TPUs in your Google
Cloud Platform project.
"""
def _requestComputeMetadata(self, path):
req = Request('http://metadata/computeMetadata/v1/%s' % path,
headers={'Metadata-Flavor': 'Google'})
resp = urlopen(req)
return resp.read()
def __init__(self,
tpu_names,
zone=None,
project=None,
job_name='tpu_worker',
credentials='default',
service=None):
"""Creates a new TPUClusterResolver object.
The ClusterResolver will then use the parameters to query the Cloud TPU APIs
for the IP addresses and ports of each Cloud TPU listed.
Args:
tpu_names: A list of names of the target Cloud TPUs.
zone: Zone where the TPUs are located. If omitted or empty, we will assume
that the zone of the TPU is the same as the zone of the GCE VM, which we
will try to discover from the GCE metadata service.
project: Name of the GCP project containing Cloud TPUs. If omitted or
empty, we will try to discover the project name of the GCE VM from the
GCE metadata service.
job_name: Name of the TensorFlow job the TPUs belong to.
credentials: GCE Credentials. If None, then we use default credentials
from the oauth2client
service: The GCE API object returned by the googleapiclient.discovery
function. If you specify a custom service object, then the credentials
parameter will be ignored.
Raises:
ImportError: If the googleapiclient is not installed.
"""
if not project:
project = self._requestComputeMetadata('/project/project-id')
if not zone:
zone_path = self._requestComputeMetadata('/instance/zone')
zone = zone_path.split('/')[-1]
self._project = project
self._zone = zone
self._tpu_names = tpu_names
self._job_name = job_name
self._credentials = credentials
if credentials == 'default':
if _GOOGLE_API_CLIENT_INSTALLED:
self._credentials = GoogleCredentials.get_application_default()
if service is None:
if not _GOOGLE_API_CLIENT_INSTALLED:
raise ImportError('googleapiclient must be installed before using the '
'TPU cluster resolver')
self._service = discovery.build(
'tpu', 'v1alpha1',
credentials=self._credentials)
else:
self._service = service
def get_master(self):
"""Get the ClusterSpec grpc master path.
This returns the grpc path (grpc://1.2.3.4:8470) of first instance in the
ClusterSpec returned by the cluster_spec function. This is suitable for use
for the `master` argument in tf.Session() when you are using one TPU.
Returns:
string, the grpc path of the first instance in the ClusterSpec.
Raises:
ValueError: If none of the TPUs specified exists.
"""
job_tasks = self.cluster_spec().job_tasks(self._job_name)
if not job_tasks:
raise ValueError('No TPUs exists with the specified names exist.')
return 'grpc://' + job_tasks[0]
def cluster_spec(self):
"""Returns a ClusterSpec object based on the latest TPU information.
We retrieve the information from the GCE APIs every time this method is
called.
Returns:
A ClusterSpec containing host information returned from Cloud TPUs.
"""
worker_list = []
for tpu_name in self._tpu_names:
full_name = 'projects/%s/locations/%s/nodes/%s' % (
self._project, self._zone, tpu_name)
request = self._service.projects().locations().nodes().get(name=full_name)
response = request.execute()
instance_url = '%s:%s' % (response['ipAddress'], response['port'])
worker_list.append(instance_url)
return ClusterSpec({self._job_name: worker_list})
| [
"[email protected]"
] | |
860f1791698bd78cf19dfd6b510dded8bfc3d7e6 | ccbfc7818c0b75929a1dfae41dc061d5e0b78519 | /aliyun-openapi-python-sdk-master/aliyun-python-sdk-dds/aliyunsdkdds/request/v20151201/ModifyDBInstanceSSLRequest.py | fa202b9965c36683da32f4164ac33488935281df | [
"Apache-2.0"
] | permissive | P79N6A/dysms_python | 44b634ffb2856b81d5f79f65889bfd5232a9b546 | f44877b35817e103eed469a637813efffa1be3e4 | refs/heads/master | 2020-04-28T15:25:00.368913 | 2019-03-13T07:52:34 | 2019-03-13T07:52:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,252 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class ModifyDBInstanceSSLRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Dds', '2015-12-01', 'ModifyDBInstanceSSL','dds')
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_SecurityToken(self):
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self,SecurityToken):
self.add_query_param('SecurityToken',SecurityToken)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_SSLAction(self):
return self.get_query_params().get('SSLAction')
def set_SSLAction(self,SSLAction):
self.add_query_param('SSLAction',SSLAction)
def get_DBInstanceId(self):
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self,DBInstanceId):
self.add_query_param('DBInstanceId',DBInstanceId)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId) | [
"[email protected]"
] | |
6fba3e2f19852a48949e376eb639e1e68a6b4b9b | 78d35bb7876a3460d4398e1cb3554b06e36c720a | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_02_01/aio/operations/_route_tables_operations.py | 49df42e16d5c87c760e0e1d5c64e87e8bdf8319f | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | catchsrinivas/azure-sdk-for-python | e35f59b60318a31b3c940a7a3a07b61b28118aa5 | 596227a7738a5342274486e30489239d539b11d1 | refs/heads/main | 2023-08-27T09:08:07.986249 | 2021-11-11T11:13:35 | 2021-11-11T11:13:35 | 427,045,896 | 0 | 0 | MIT | 2021-11-11T15:14:31 | 2021-11-11T15:14:31 | null | UTF-8 | Python | false | false | 29,731 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class RouteTablesOperations:
"""RouteTablesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_02_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
route_table_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
route_table_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified route table.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
async def get(
self,
resource_group_name: str,
route_table_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.RouteTable":
"""Gets the specified route table.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteTable, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_02_01.models.RouteTable
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTable"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteTable', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
route_table_name: str,
parameters: "_models.RouteTable",
**kwargs: Any
) -> "_models.RouteTable":
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTable"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'RouteTable')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('RouteTable', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('RouteTable', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
route_table_name: str,
parameters: "_models.RouteTable",
**kwargs: Any
) -> AsyncLROPoller["_models.RouteTable"]:
"""Create or updates a route table in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:param parameters: Parameters supplied to the create or update route table operation.
:type parameters: ~azure.mgmt.network.v2019_02_01.models.RouteTable
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RouteTable or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_02_01.models.RouteTable]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTable"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteTable', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
async def _update_tags_initial(
self,
resource_group_name: str,
route_table_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.RouteTable":
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTable"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteTable', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
async def begin_update_tags(
self,
resource_group_name: str,
route_table_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> AsyncLROPoller["_models.RouteTable"]:
"""Updates a route table tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:param parameters: Parameters supplied to update route table tags.
:type parameters: ~azure.mgmt.network.v2019_02_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RouteTable or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_02_01.models.RouteTable]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTable"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_tags_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteTable', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.RouteTableListResult"]:
"""Gets all route tables in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteTableListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_02_01.models.RouteTableListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTableListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('RouteTableListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables'} # type: ignore
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.RouteTableListResult"]:
"""Gets all route tables in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteTableListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_02_01.models.RouteTableListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTableListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('RouteTableListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/routeTables'} # type: ignore
| [
"[email protected]"
] | |
07fb742a3154e4c2d48b95cdd0de4b5caa6e4ef6 | 649bd422025e421d86025743eac324c9b882a2e8 | /exam/1_three-dimensional_atomic_system/dump/phasetrans/temp221_6000.py | c3d96f9d609326fe4bc4a6ffcbb094569ea52cb6 | [] | no_license | scheuclu/atom_class | 36ddee1f6a5995872e858add151c5942c109847c | 0c9a8c63d9b38898c1869fe8983126cef17662cd | refs/heads/master | 2021-01-21T10:52:28.448221 | 2017-03-07T23:04:41 | 2017-03-07T23:04:41 | 83,489,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68,781 | py | ITEM: TIMESTEP
6000
ITEM: NUMBER OF ATOMS
2048
ITEM: BOX BOUNDS pp pp pp
-1.7251793543927784e+02 2.1971793543932216e+02
-1.7251793543927784e+02 2.1971793543932216e+02
-1.7251793543927784e+02 2.1971793543932216e+02
ITEM: ATOMS id type xs ys zs
1781 1 0.166858 0.0483895 0.47982
2019 1 0.667454 0.0408147 0.327508
1402 1 0.126028 0.107579 0.0222365
946 1 0.41962 0.22531 0.0315073
634 1 0.719169 0.00947924 0.296099
1540 1 0.055396 0.337112 0.0141179
1145 1 0.0944242 0.280701 0.0221489
1790 1 0.379169 0.325649 0.0439541
898 1 0.109468 0.138224 0.492574
1703 1 0.704322 0.0295604 0.292673
721 1 0.601582 0.41114 0.032581
1183 1 0.384061 0.0590666 0.0255187
1080 1 0.785704 0.0109976 0.328347
207 1 0.885959 0.0506229 0.011638
925 1 0.144049 0.0303409 0.0589073
825 1 0.136275 0.494198 0.428881
1788 1 0.16501 0.0904832 0.115858
1690 1 0.513212 0.12866 0.0207592
1843 1 0.292884 0.147836 0.0444932
394 1 0.232533 0.18692 0.050074
1520 1 0.541721 0.263317 0.0339968
1868 1 0.618482 0.390721 0.0311359
83 1 0.743384 0.0275708 0.497587
1404 1 0.133096 0.0350312 0.322459
1903 1 0.893717 0.49823 8.84766e-05
592 1 0.410045 0.461067 0.075258
1379 1 0.411639 0.488746 0.212435
764 1 0.77383 0.293054 0.498866
1797 1 0.610495 0.0461062 0.00408997
307 1 0.576881 0.0917467 0.0212058
1813 1 0.60828 0.0734168 0.101475
1842 1 0.00374821 0.0248614 0.0197789
691 1 0.844695 0.195975 0.0500139
686 1 0.578776 0.0255127 0.376377
1131 1 0.0521193 0.182857 0.0197944
351 1 0.460419 0.212448 0.0675386
969 1 0.570824 0.260742 0.0433312
570 1 0.760345 0.347292 0.0375517
976 1 0.0327571 0.412079 0.00813936
1986 1 0.538568 0.36445 0.0524922
727 1 0.690219 0.479279 0.0940327
480 1 0.774137 0.34948 0.0420485
948 1 0.459241 0.457639 0.0286265
201 1 0.497723 0.44201 0.0311968
387 1 0.726386 0.410315 0.0711661
709 1 0.210419 0.0299075 0.0793622
464 1 0.336401 0.0772408 0.0393659
1386 1 0.750951 0.046251 0.0354531
1607 1 0.395249 0.083767 0.0530909
321 1 0.48847 0.0888926 0.117795
1417 1 0.324243 0.128511 0.0265194
1007 1 0.80308 0.143361 0.048058
961 1 0.395938 0.187154 -0.000419575
268 1 0.788292 0.197928 0.0321338
1899 1 0.931692 0.195189 0.0872727
1730 1 0.36453 0.175592 0.120328
1300 1 0.270395 0.293491 0.0880877
1912 1 0.425274 0.235086 0.0663083
1795 1 0.851236 0.413911 0.0811524
199 1 0.0262205 0.301068 0.0074452
1030 1 0.465944 0.399504 0.498407
1442 1 0.347827 0.481581 0.0915155
195 1 0.775407 0.491774 0.0109908
766 1 0.328225 0.0690112 0.0646283
1280 1 0.72842 0.0824292 0.056527
795 1 0.154517 0.0643204 0.158143
958 1 0.551084 0.121556 0.11704
945 1 0.470528 0.187572 0.093818
331 1 0.601909 0.125784 0.133762
1507 1 0.210145 0.19619 0.0635108
1833 1 0.231959 0.242133 0.0905651
1103 1 0.279148 0.266319 0.0776424
700 1 0.699188 0.259256 0.0663497
1889 1 0.85456 0.237914 0.0866317
553 1 0.139388 0.322844 0.0333711
349 1 0.131933 0.401899 0.0587359
561 1 0.317775 0.391585 0.0604869
15 1 0.331345 0.357285 0.0534612
395 1 0.948468 0.395539 0.0699686
676 1 0.202567 0.393105 0.0877653
1798 1 0.301736 0.384028 0.0679238
823 1 0.531002 0.422638 0.171819
56 1 0.924963 0.500455 0.112976
345 1 0.490481 0.43345 0.0730892
1613 1 0.31939 0.48512 0.242346
745 1 0.979887 0.491948 0.115278
2048 1 0.150499 0.115017 0.00699299
1186 1 0.399534 0.462794 0.0401034
1778 1 0.0940793 0.03016 0.021457
1135 1 0.52588 0.491143 0.172744
500 1 0.258222 0.0606685 0.14339
270 1 0.395985 0.0606663 0.103506
486 1 0.120608 0.174044 0.0955499
1969 1 0.646755 0.107599 0.0456993
794 1 0.0572367 0.148978 0.0793211
1420 1 0.606648 0.155662 0.120642
148 1 0.075439 0.189618 0.0779264
1489 1 0.498427 0.225051 0.0877491
2010 1 0.976133 0.186301 0.179244
503 1 0.306633 0.25491 0.109335
1796 1 0.854387 0.238013 0.103447
939 1 0.962837 0.240319 0.08728
461 1 0.334736 0.288265 0.119762
180 1 0.338223 0.344007 0.101953
874 1 0.0546952 0.479343 0.0546197
1602 1 0.342396 0.414314 0.0751138
1274 1 0.703339 0.38319 0.0893583
1427 1 0.893098 0.422392 0.13
1873 1 0.760964 0.0130742 0.412734
358 1 0.274263 0.0659196 0.112383
1893 1 0.725135 0.0533799 0.0746415
1825 1 0.359953 0.0623279 0.0604033
1921 1 0.554413 0.0987638 0.109301
1687 1 0.117209 0.169204 0.0878894
1205 1 0.43836 0.152358 0.153118
21 1 0.594856 0.176462 0.149325
791 1 0.191494 0.212345 0.0950874
1004 1 0.591274 0.191698 0.0892055
1287 1 0.794841 0.18048 0.0639331
505 1 0.380274 0.208677 0.137314
928 1 0.783338 0.209468 0.103742
812 1 0.195847 0.260956 0.136362
2027 1 0.203472 0.212592 0.0946016
1156 1 0.963582 0.299762 0.110307
653 1 0.852209 0.341568 0.132179
1881 1 0.640486 0.269869 0.114123
336 1 0.214224 0.387228 0.130532
1169 1 0.95098 0.393669 0.120549
935 1 0.788628 0.422562 0.0762794
1037 1 0.12657 0.0544496 0.136823
1393 1 0.231788 0.0456169 0.144554
468 1 0.326578 0.0243191 0.14679
1575 1 0.318539 0.0977072 0.123195
769 1 0.669551 0.104525 0.12232
269 1 0.444044 0.105218 0.154672
1761 1 0.274919 0.0461898 0.138032
1934 1 0.313164 0.091997 0.140763
1119 1 0.371198 0.125371 0.0918696
688 1 0.744515 0.174689 0.141252
203 1 0.186397 0.128377 0.122742
692 1 0.291534 0.199873 0.141121
441 1 0.412924 0.134289 0.161522
206 1 0.957809 0.143149 0.0536539
182 1 0.651514 0.235166 0.151863
539 1 0.836221 0.204986 0.168964
710 1 0.0741149 0.31428 0.112462
245 1 0.248387 0.206391 0.037272
1571 1 0.299139 0.29368 0.0851778
1272 1 0.0352466 0.296968 0.138448
1195 1 0.0700462 0.349698 0.110062
1701 1 0.0266489 0.288903 0.128799
1771 1 0.29191 0.279616 0.194781
1757 1 0.783749 0.39594 0.125452
1539 1 0.766465 0.392988 0.130626
967 1 0.937534 0.384554 0.105665
1985 1 0.0769051 0.453633 0.136026
1134 1 0.449599 0.378997 0.176223
1993 1 0.189036 0.460793 0.106211
1908 1 0.527585 0.4128 0.152751
708 1 0.107271 0.436292 0.140649
1414 1 0.288673 0.488572 0.116483
1576 1 0.925288 0.460064 0.136819
1097 1 0.147279 0.264481 0.487315
1285 1 0.758762 0.116854 0.180824
1557 1 0.404816 0.0255166 0.160108
71 1 0.911907 0.159504 0.178839
959 1 0.120434 0.125872 0.0867949
2016 1 0.276569 0.125107 0.111556
229 1 0.952822 0.0868816 0.143393
943 1 0.203241 0.100404 0.234984
1316 1 0.248319 0.226536 0.121298
783 1 0.431302 0.191046 0.101992
1431 1 0.327485 0.211555 0.180947
30 1 0.978258 0.187986 0.133809
537 1 0.287062 0.203026 0.199555
158 1 0.612945 0.272332 0.112785
1255 1 0.332579 0.238543 0.146609
1188 1 0.175944 0.262804 0.190506
1421 1 0.657771 0.30284 0.232391
1673 1 0.68252 0.280679 0.21798
1022 1 0.868627 0.249634 0.223127
123 1 0.0350804 0.283363 0.126015
1617 1 0.68471 0.31409 0.151041
1942 1 0.727921 0.320898 0.180335
1744 1 0.384846 0.41657 0.159672
1314 1 0.211778 0.174534 0.0197534
2003 1 0.118457 0.459361 0.151553
901 1 0.314477 0.487539 0.148191
127 1 0.278546 0.466454 0.130297
1573 1 0.914679 0.468036 0.150833
1331 1 0.476392 0.49565 0.159002
650 1 0.990175 0.313702 0.018555
241 1 0.554775 0.046374 0.139929
642 1 0.440898 0.00934896 0.179857
1165 1 0.81392 0.0503579 0.151491
787 1 0.698822 0.119966 0.148397
1469 1 0.71842 0.243612 0.192366
1884 1 0.418062 0.232697 0.179091
258 1 0.712333 0.364827 0.182256
476 1 0.960051 0.325189 0.132649
1975 1 0.494321 0.326655 0.131976
482 1 0.210679 0.397592 0.123187
1227 1 0.276247 0.423294 0.164894
910 1 0.355472 0.354712 0.177852
404 1 0.58351 0.440732 0.194472
2032 1 0.944804 0.36128 0.130821
1429 1 0.608869 0.421894 0.180478
1774 1 0.793489 0.390561 0.128559
860 1 0.860291 0.179833 0.490474
1719 1 0.838855 0.480451 0.147896
1325 1 0.810674 0.431194 0.478123
611 1 0.420408 0.0416472 0.149006
25 1 0.694038 0.0796502 0.133555
1178 1 0.460233 0.148466 0.182887
804 1 0.862768 0.0258671 0.222576
1849 1 0.788379 0.121591 0.208976
641 1 0.866415 0.177175 0.243339
1503 1 0.0422309 0.217 0.24351
366 1 0.163276 0.204625 0.156087
102 1 0.688338 0.237879 0.202165
1655 1 0.942003 0.217159 0.182827
281 1 0.493693 0.259401 0.187385
1111 1 0.876146 0.297383 0.206625
717 1 0.409244 0.310412 0.196875
1481 1 0.110049 0.390186 0.134938
790 1 0.242121 0.446452 0.179354
864 1 0.287026 0.441629 0.205626
1663 1 0.421299 0.447286 0.188861
806 1 0.265007 0.475611 0.175096
1601 1 0.363075 0.104897 0.0180199
488 1 0.240878 0.463436 0.431647
842 1 0.935209 0.362426 0.0273071
6 1 0.56913 0.0690293 0.196206
1125 1 0.823446 0.12756 0.236646
1411 1 0.951703 0.17941 0.185027
1081 1 0.00444125 0.227568 0.242799
192 1 0.0865334 0.228981 0.177817
1361 1 0.643824 0.329416 0.193244
175 1 0.737164 0.288619 0.16906
647 1 0.346967 0.310604 0.25741
899 1 0.937944 0.371458 0.165726
1656 1 0.0317115 0.419956 0.206516
1217 1 0.694696 0.351509 0.194636
1466 1 0.473501 0.467877 0.253467
908 1 0.602397 0.385073 0.228016
1064 1 0.837215 0.464984 0.237223
446 1 0.931156 0.479829 0.219301
1249 1 0.281967 0.0472331 0.247073
597 1 0.969018 0.0929954 0.201267
526 1 0.407353 0.130648 0.246795
1137 1 0.6196 0.157445 0.189193
829 1 0.571675 0.181383 0.242731
1932 1 0.74837 0.242016 0.210893
1850 1 0.523122 0.280118 0.214874
421 1 0.128526 0.281774 0.186067
655 1 0.116546 0.33537 0.199742
1084 1 0.135745 0.359565 0.206817
1075 1 0.479739 0.351828 0.236733
879 1 0.916585 0.316764 0.219517
1521 1 0.355578 0.367972 0.262413
1488 1 0.791891 0.419221 0.267932
1283 1 0.865419 0.442631 0.171949
1172 1 0.5326 0.0965701 0.263051
1645 1 0.772981 0.15966 0.19459
1772 1 0.134182 0.262205 0.214572
1115 1 0.689592 0.235856 0.212083
504 1 0.717953 0.197978 0.232875
409 1 0.0289005 0.324873 0.199483
810 1 0.86131 0.386241 0.261083
1387 1 0.90966 0.349969 0.188668
675 1 0.256667 0.473116 0.27018
151 1 0.664053 0.437737 0.271886
538 1 0.541988 0.0283086 0.227298
620 1 0.716687 0.0560541 0.30015
960 1 0.433959 0.0543267 0.255395
97 1 0.251652 0.122983 0.240986
979 1 0.710255 0.126514 0.265107
2028 1 0.362528 0.168081 0.257671
1193 1 0.0801783 0.179854 0.299256
835 1 0.791942 0.246562 0.199626
1979 1 0.586341 0.320812 0.25158
774 1 0.639849 0.316173 0.229745
164 1 0.067186 0.441966 0.169978
65 1 0.149018 0.426603 0.269396
1085 1 0.348846 0.480158 0.236352
1599 1 0.944198 0.340773 0.497746
2002 1 0.427844 0.0923011 0.324203
693 1 0.487266 0.158866 0.259561
1800 1 0.632118 0.114471 0.237281
230 1 0.968307 0.18455 0.300388
1333 1 0.479278 0.124995 0.281764
1708 1 0.593465 0.180399 0.274283
638 1 0.162507 0.214953 0.2366
185 1 0.579465 0.31791 0.262054
469 1 0.223719 0.372781 0.326296
118 1 0.398354 0.396334 0.31552
983 1 0.631071 0.44382 0.239848
856 1 0.0204824 0.413665 0.316948
1203 1 0.164611 0.419212 0.327308
914 1 0.142872 0.223675 0.408827
704 1 0.302718 0.0547875 0.343932
1174 1 0.694005 0.0118132 0.320103
439 1 0.575463 0.054349 0.272777
1624 1 0.885503 0.473618 0.0101461
1943 1 0.460279 0.0435857 0.243948
1866 1 0.90168 0.040637 0.302686
953 1 0.207957 0.0794368 0.325821
1230 1 0.411867 0.147256 0.247217
1840 1 0.593225 0.102699 0.322647
1117 1 0.795081 0.133551 0.249399
750 1 0.577238 0.241657 0.27046
66 1 0.620152 0.334719 0.290288
367 1 0.980991 0.368974 0.292397
630 1 0.822657 0.376997 0.270753
664 1 0.579823 0.337697 0.346044
218 1 0.664631 0.397391 0.301653
1515 1 0.0838226 0.359552 0.271956
485 1 0.381828 0.480356 0.321033
662 1 0.677824 0.456811 0.289335
1105 1 0.787784 0.0125642 0.286778
121 1 0.995952 0.169452 0.253421
974 1 0.0670806 0.178778 0.342985
887 1 0.564764 0.0899046 0.263375
635 1 0.745194 0.108511 0.356709
1605 1 0.754084 0.272736 0.287223
870 1 0.446972 0.233945 0.353294
889 1 0.945254 0.313488 0.360558
2008 1 0.879934 0.377125 0.357449
1922 1 0.524675 0.336654 0.370145
1998 1 0.368871 0.4265 0.327267
125 1 0.286496 0.417175 0.310452
1854 1 0.522503 0.452309 0.357109
378 1 0.133884 0.474196 0.250955
1065 1 0.693728 0.106085 0.480075
751 1 0.210948 0.439798 0.339874
128 1 0.267611 0.0492009 0.328147
1340 1 0.391008 0.0206672 0.0152569
1684 1 0.957412 0.114401 0.320976
26 1 0.291261 0.14092 0.272981
1878 1 0.772362 0.176223 0.35259
1245 1 0.0494199 0.25325 0.369898
566 1 0.229714 0.267698 0.380427
1578 1 0.376597 0.253014 0.335389
1834 1 0.572582 0.442306 0.351735
1595 1 0.659915 0.344803 0.27655
1129 1 0.649129 0.334831 0.327159
926 1 0.973849 0.350638 0.305935
565 1 0.219976 0.434037 0.416083
1974 1 0.641485 0.401507 0.361374
410 1 0.372657 0.493793 0.326111
402 1 0.996236 0.0487621 0.404833
100 1 0.735378 0.088356 0.357728
332 1 0.401197 0.12913 0.383896
1242 1 0.0487311 0.176136 0.339994
1296 1 0.33044 0.138031 0.35089
1822 1 0.0934525 0.198849 0.279904
36 1 0.389962 0.124069 0.354821
728 1 0.299047 0.127068 0.336692
1581 1 0.412072 0.254107 0.355276
305 1 0.496962 0.291654 0.309132
1086 1 0.970283 0.205888 0.324433
484 1 0.574467 0.239934 0.305505
1696 1 0.482903 0.318528 0.343747
310 1 0.270104 0.380708 0.320375
1091 1 0.481017 0.440353 0.333254
1038 1 0.0865621 0.445749 0.375333
272 1 0.329104 0.404494 0.342016
1782 1 0.089088 0.45651 0.373013
1551 1 0.261661 0.0186798 0.388367
362 1 0.503346 0.0784769 0.322844
1341 1 0.153221 0.151396 0.395518
373 1 0.626887 0.0961335 0.362102
105 1 0.717219 0.0899684 0.327054
1552 1 0.716599 0.248573 0.345005
216 1 0.882786 0.254183 0.36611
1871 1 0.0251094 0.300311 0.377474
1092 1 0.319218 0.251254 0.361776
1728 1 0.935613 0.231575 0.384684
1967 1 0.556674 0.264 0.387349
990 1 0.578733 0.296927 0.390793
701 1 0.533802 0.308086 0.331356
552 1 0.187366 0.332233 0.35272
130 1 0.479353 0.363414 0.363811
1149 1 0.912213 0.307472 0.364149
992 1 0.698042 0.348604 0.357031
86 1 0.147172 0.404852 0.364978
1609 1 0.718372 0.486388 0.127739
1209 1 0.658394 0.438827 0.333904
1801 1 0.361499 0.0526158 0.355839
249 1 0.315311 0.114481 0.371481
518 1 0.883564 0.183802 0.394793
720 1 0.60746 0.242984 0.398245
7 1 0.171426 0.258576 0.372403
361 1 0.487614 0.276537 0.404601
92 1 0.642185 0.329649 0.4347
146 1 0.053417 0.367465 0.380637
1732 1 0.554258 0.467361 0.408807
191 1 0.196036 0.487033 0.396974
644 1 0.0891672 0.105246 0.395855
877 1 0.994428 0.0365762 0.418241
1955 1 0.971086 0.0556824 0.315806
1876 1 0.379737 0.0647637 0.358421
99 1 0.835931 0.0854271 0.384963
1548 1 0.00688413 0.110327 0.422248
24 1 0.302195 0.125312 0.404513
1700 1 0.87503 0.098264 0.454416
242 1 0.229883 0.162838 0.429613
205 1 0.201242 0.255936 0.410243
107 1 0.974706 0.336016 0.432342
1301 1 0.656923 0.325825 0.394297
339 1 0.143974 0.191951 0.349031
1867 1 0.342446 0.465659 0.431138
120 1 0.681799 0.384444 0.394406
1819 1 0.927378 0.42301 0.426681
197 1 0.944374 0.370058 0.450649
645 1 0.445439 0.441236 0.38424
1189 1 0.889717 0.332661 0.0117272
364 1 0.603816 0.470585 0.425097
1498 1 0.828638 0.48349 0.405285
98 1 0.160796 0.0161862 0.422856
873 1 0.507082 0.0908428 0.429524
1240 1 0.832608 0.0639593 0.406243
1118 1 0.0296653 0.0656352 0.427175
1525 1 0.159077 0.0866622 0.460018
1270 1 0.128477 0.133628 0.422556
157 1 0.139094 0.150528 0.449498
551 1 0.650293 0.128026 0.329349
1132 1 0.272174 0.232686 0.462351
1858 1 0.502377 0.264788 0.459996
1327 1 0.479179 0.358484 0.478879
414 1 0.173789 0.324988 0.414815
660 1 0.680896 0.404254 0.458677
1588 1 0.949382 0.416437 0.474774
1582 1 0.145238 0.430411 0.42304
17 1 0.895347 0.45706 0.488543
633 1 0.0848332 0.485393 0.362752
612 1 0.787633 0.423511 0.0217916
673 1 0.306228 0.0392485 0.445585
797 1 0.410329 0.0710745 0.482422
1633 1 0.965819 0.0598067 0.411531
297 1 0.644077 0.0486674 0.457169
1055 1 0.531589 0.0867458 0.467843
947 1 0.418507 0.123366 0.436473
643 1 0.602159 0.165532 0.40583
1479 1 0.627794 0.358506 0.00468088
1829 1 0.1717 0.174625 0.386544
1556 1 0.726952 0.297251 0.399611
39 1 0.75996 0.0190059 0.269678
760 1 0.232187 0.36642 0.433817
1628 1 0.0143111 0.369831 0.416945
382 1 0.298082 0.43132 0.426637
1244 1 0.760201 0.478376 0.489974
169 1 0.543485 0.409135 0.49797
233 1 0.310141 0.259933 0.00236609
918 1 0.608981 0.0789279 0.461687
198 1 0.836021 0.124556 0.455136
371 1 0.300921 0.0915934 0.0228171
1251 1 0.348397 0.134757 0.474042
308 1 0.197477 0.498504 0.353003
1591 1 0.194716 0.184716 0.487141
1040 1 0.103783 0.118298 0.460662
540 1 0.152533 0.163922 0.416649
628 1 0.36904 0.137525 0.424383
221 1 0.0832315 0.172223 0.451954
463 1 0.384816 0.309466 0.453942
236 1 0.034321 0.422584 0.44558
671 1 0.320064 0.425511 0.398016
1388 1 0.837845 0.430454 0.415049
1458 1 0.0871006 0.479796 0.456891
1450 1 0.237529 0.48742 0.451331
1051 1 0.859978 0.498041 0.498995
1486 1 0.401063 0.0105164 0.494766
293 1 0.0580882 0.0906149 0.480912
963 1 0.257083 0.153915 0.48171
1273 1 0.749228 0.157973 0.469958
40 1 0.402378 0.224094 0.475632
666 1 0.149584 0.296003 0.422433
1917 1 0.783784 0.37517 0.43402
1902 1 0.738181 0.427824 0.479913
1647 1 0.0361281 0.465039 0.436235
1627 1 0.0298547 0.00460274 0.11833
1424 1 0.494854 0.477003 0.118667
1672 1 0.101886 0.135367 0.481045
1141 1 0.315562 0.0853315 0.409265
177 1 0.859907 0.124757 0.476753
465 1 0.69866 0.176757 0.45715
1635 1 0.123917 0.0741363 0.406582
335 1 0.427651 0.0317 0.220293
1278 1 0.874638 0.221334 0.436386
67 1 0.396343 0.209507 0.0243147
1371 1 0.274692 0.232874 0.481759
76 1 0.512729 0.0147093 0.130035
1448 1 0.526663 0.264479 0.487667
1751 1 0.329286 0.333247 0.461343
70 1 0.404681 0.364451 0.489461
637 1 0.102544 0.368514 0.480135
34 1 0.649663 0.0760307 0.443491
1222 1 0.621079 0.369626 0.474025
328 1 0.701334 0.408069 0.490149
1219 1 0.284096 0.480922 0.256899
718 1 0.991965 0.48827 0.426142
1754 1 0.5672 0.412127 0.0332373
1121 1 0.724023 0.454278 0.246926
1348 1 0.415496 0.233284 0.444061
1572 1 0.00779925 0.407478 0.0409472
45 1 0.210109 0.0614488 0.00106776
1164 1 0.390702 0.047313 0.516356
585 1 0.722716 0.0407539 0.998032
18 1 0.397304 0.0143873 0.735819
1457 1 0.57615 0.0542855 0.524329
165 1 0.721203 0.00733536 0.54049
1896 1 0.780372 0.00821851 0.577992
350 1 0.165957 0.387838 0.929912
1184 1 0.0595236 0.104144 0.52147
1018 1 0.158495 0.0381927 0.725699
330 1 0.0857302 0.00837215 0.858533
1584 1 0.77905 0.107402 0.526147
447 1 0.0534464 0.203155 0.533831
1887 1 0.182159 0.18224 0.609883
1360 1 0.811425 0.00444714 0.977159
1155 1 0.360748 0.477682 0.656332
1305 1 0.854298 0.468061 0.906882
1623 1 0.212663 0.17582 0.507252
1176 1 0.265189 0.455357 0.812232
886 1 0.831269 0.193743 0.564076
594 1 0.713245 0.231855 0.505311
342 1 0.216267 0.224735 0.520139
391 1 0.695167 0.259727 0.547111
1444 1 0.74729 0.259341 0.566547
187 1 0.851177 0.00324435 0.921786
1297 1 0.331669 0.375925 0.557082
968 1 0.471655 0.466414 0.58241
785 1 0.726634 0.367061 0.548612
137 1 0.252962 0.469051 0.751495
1471 1 0.259774 0.475576 0.764571
176 1 0.026506 0.377263 0.989872
434 1 0.586084 0.465212 0.591021
941 1 0.523341 0.417354 0.577742
1449 1 0.549935 0.0635029 0.565566
309 1 0.660709 0.0239652 0.535852
348 1 0.882449 0.0371435 0.736338
1714 1 0.79679 0.138368 0.523759
470 1 0.8925 0.0429817 0.510609
775 1 0.229715 0.151873 0.507285
956 1 0.813321 0.258114 0.53512
1199 1 0.951806 0.289898 0.529992
580 1 0.351056 0.296916 0.540212
1104 1 0.521889 0.481365 0.843136
491 1 0.166996 0.379718 0.522232
569 1 0.779877 0.381706 0.521924
423 1 0.801217 0.309263 0.545998
1346 1 0.0960377 0.343778 0.533274
1824 1 0.922833 0.328862 0.514288
69 1 0.197981 0.4412 0.542431
223 1 0.609356 0.384387 0.550511
325 1 0.607588 0.125228 0.993701
1376 1 0.726595 0.490025 0.64489
1619 1 0.635622 0.427628 0.545832
1029 1 0.256647 0.474702 0.514892
1662 1 0.0974551 0.0172005 0.707109
487 1 0.215926 0.0568052 0.562105
189 1 0.158853 0.101884 0.568073
883 1 0.235416 0.00266352 0.962706
1478 1 0.953399 0.0443375 0.517085
670 1 0.969673 0.130035 0.530352
1963 1 0.0361354 0.0391509 0.535937
697 1 0.632728 0.112497 0.535187
651 1 0.987384 0.116352 0.537674
1112 1 0.318597 0.148153 0.527256
1648 1 0.327613 0.262524 0.599029
2039 1 0.336896 0.196283 0.56575
217 1 0.538073 0.186091 0.530505
588 1 0.347412 0.287616 0.571611
1536 1 0.508136 0.259439 0.597857
711 1 0.665611 0.352606 0.560449
298 1 0.378493 0.374246 0.503688
1190 1 0.940184 0.446646 0.538784
1861 1 0.0529628 0.230857 0.527733
781 1 0.090855 0.0102932 0.553226
1841 1 0.37709 0.119216 0.64682
37 1 0.570988 0.0875976 0.558835
1033 1 0.220352 0.348748 0.504561
567 1 0.282696 0.205122 0.554409
533 1 0.551706 0.128193 0.575872
776 1 0.020182 0.152797 0.601047
1784 1 0.691265 0.135378 0.565287
885 1 0.742543 0.225768 0.575519
60 1 0.262897 0.225942 0.591045
909 1 0.955458 0.220596 0.582254
1159 1 0.150329 0.18162 0.633126
1847 1 0.0334348 0.263174 0.504951
1542 1 0.368398 0.290848 0.55163
587 1 0.667392 0.309374 0.551779
415 1 0.0847652 0.359249 0.588579
1281 1 0.639844 0.309003 0.561305
616 1 0.117345 0.373081 0.546848
1232 1 0.0141232 0.472904 0.584407
938 1 0.428013 0.319183 0.522048
82 1 0.746771 0.400448 0.603218
1182 1 0.254516 0.0523112 0.586496
472 1 0.764441 0.039475 0.539214
1397 1 0.465008 0.0967545 0.610112
1827 1 0.46648 0.0739619 0.567861
832 1 0.842698 0.117387 0.516979
1674 1 0.808737 0.12728 0.610208
116 1 0.626046 0.175858 0.627474
799 1 0.84129 0.388166 0.99674
1213 1 0.9733 0.291721 0.600971
875 1 0.00297264 0.301085 0.553268
138 1 0.472103 0.275818 0.572545
374 1 0.469558 0.26699 0.592556
262 1 0.264671 0.273736 0.637003
1359 1 0.00932443 0.247183 0.57277
1506 1 0.425216 0.408579 0.583487
278 1 0.577917 0.367152 0.617967
252 1 0.127302 0.3939 0.606487
122 1 0.997844 0.392741 0.56232
1363 1 0.636885 0.385036 0.559014
850 1 0.657296 0.499987 0.559149
1806 1 0.181899 0.467796 0.528145
789 1 0.408098 0.480645 0.632375
231 1 0.618997 0.0155081 0.568863
1256 1 0.9281 0.368166 0.996401
560 1 0.821778 0.133328 0.631216
448 1 0.00803125 0.134256 0.623097
1695 1 0.465149 0.147485 0.5906
1087 1 0.686797 0.0884071 0.675227
292 1 0.923945 0.205891 0.571605
179 1 0.455378 0.181852 0.565931
1748 1 0.934888 0.20677 0.609833
323 1 0.435845 0.27402 0.641166
1872 1 0.860653 0.289072 0.572405
412 1 0.763641 0.319053 0.55752
1558 1 0.319823 0.340558 0.542959
872 1 0.689039 0.318093 0.545791
294 1 0.791017 0.417187 0.52636
1221 1 0.78372 0.424254 0.555815
1302 1 0.0220827 -6.43943e-06 0.622333
1658 1 0.478575 0.0523957 0.625423
1753 1 0.865583 0.0637916 0.515366
1260 1 0.760008 0.0477897 0.574814
478 1 0.0691171 0.110827 0.657308
669 1 0.882113 0.0891303 0.651013
623 1 0.432092 0.146398 0.62928
1310 1 0.0261218 0.140322 0.606296
1839 1 0.121149 0.247153 0.597645
1368 1 0.95349 0.323411 0.656386
867 1 0.40296 0.335854 0.565154
1870 1 0.0436205 0.485784 0.660434
1000 1 0.459361 0.416618 0.619372
1158 1 0.609969 0.483655 0.84934
370 1 0.637089 0.497084 0.592004
1021 1 0.951192 0.0719512 0.617414
1154 1 0.287255 0.0567384 0.659156
1939 1 0.621911 0.16025 0.631605
1443 1 0.523897 0.17979 0.657365
1023 1 0.921586 0.241005 0.641049
1050 1 0.326046 0.360517 0.592941
511 1 0.33014 0.393396 0.645461
499 1 0.354062 0.312808 0.657969
1941 1 0.190702 0.376444 0.621303
1692 1 0.597508 0.414903 0.628914
1456 1 0.966874 0.395179 0.688795
1612 1 0.143245 0.440575 0.644551
749 1 0.919178 0.0249237 0.994771
106 1 0.0671925 0.474559 0.986783
1956 1 0.169573 0.282092 0.560548
13 1 0.370405 0.0666846 0.630851
261 1 0.743303 0.0794683 0.643218
788 1 0.624295 0.110875 0.632637
1372 1 0.917091 0.129118 0.617204
183 1 0.908715 0.162218 0.62299
435 1 0.366937 0.12175 0.556237
1812 1 0.559946 0.134043 0.734985
1413 1 0.567864 0.272084 0.579053
400 1 0.173575 0.312484 0.660309
1836 1 0.0667053 0.313307 0.627222
324 1 0.205767 0.425578 0.628914
1224 1 0.950552 0.386367 0.617932
209 1 0.71336 0.425308 0.705797
1237 1 0.229426 0.147774 0.540689
699 1 0.545606 0.433805 0.703708
46 1 0.903664 0.44071 0.629421
801 1 0.240843 0.0603736 0.571005
957 1 0.225802 0.081211 0.654127
1226 1 0.734374 0.0234719 0.662255
1643 1 0.111898 0.0798432 0.639965
629 1 0.249692 0.107589 0.65893
1698 1 0.364519 0.111019 0.689156
1976 1 0.0297366 0.0739732 0.624032
1194 1 0.796627 0.0451663 0.7233
1634 1 0.932635 0.083319 0.699985
1398 1 0.415523 0.166962 0.715359
1618 1 0.661799 0.148121 0.641819
1882 1 0.235422 0.154951 0.72513
1775 1 0.67166 0.147566 0.647034
1785 1 0.905165 0.170438 0.730037
149 1 0.275242 0.185653 0.569721
1041 1 0.0166422 0.302989 0.676221
1445 1 0.845167 0.387313 0.666707
520 1 0.467462 0.0683307 0.971603
300 1 0.375189 0.394816 0.72988
1996 1 0.246017 0.477581 0.709058
493 1 0.985077 0.456006 0.841699
1721 1 0.505258 0.0917753 0.616285
694 1 0.932089 0.15177 0.710115
818 1 0.31585 0.150649 0.624755
1780 1 0.112522 0.220402 0.678804
1682 1 0.235019 0.246741 0.72542
1646 1 0.227733 0.193002 0.729579
432 1 0.967543 0.269956 0.715416
591 1 0.78368 0.245368 0.62184
541 1 0.164934 0.344463 0.670847
1016 1 0.113031 0.234726 0.682184
848 1 0.598975 0.217877 0.669505
722 1 0.321817 0.344952 0.601053
1965 1 0.849105 0.36717 0.722782
220 1 0.153451 0.434772 0.716251
1485 1 0.693341 0.44518 0.692064
1709 1 0.0326285 0.448898 0.630594
188 1 0.458795 0.24855 0.525287
1830 1 0.462249 0.395957 0.510349
136 1 0.577302 0.00976069 0.963079
31 1 0.0070042 0.0730427 0.718552
1390 1 0.480959 0.140736 0.765414
1435 1 0.667742 0.11726 0.695047
1480 1 0.20734 0.121579 0.706484
111 1 0.311085 0.191802 0.661517
1177 1 0.469095 0.208273 0.711254
1078 1 0.296382 0.251387 0.66423
703 1 0.67061 0.327798 0.774827
1362 1 0.600715 0.349098 0.610321
756 1 0.234237 0.370147 0.675457
1197 1 0.848011 0.417969 0.733391
1859 1 0.671315 0.431077 0.696149
170 1 0.101773 0.398211 0.664333
1864 1 0.115341 0.484578 0.751165
316 1 0.888733 0.475874 0.697226
89 1 0.53226 0.0993111 0.737107
48 1 0.639642 0.124383 0.797743
1284 1 0.957258 0.129502 0.788603
1693 1 0.885363 0.275761 0.798429
1338 1 0.518226 0.223932 0.749085
1052 1 0.0105373 0.250573 0.699306
160 1 0.531104 0.328358 0.755026
291 1 0.768472 0.326616 0.753885
311 1 0.179728 0.314786 0.726525
1567 1 0.366596 0.319099 0.784085
1783 1 0.185419 0.430665 0.775216
196 1 0.00960509 0.477452 0.678092
683 1 0.203733 0.476719 0.785087
1337 1 0.110605 0.0145395 0.806864
1793 1 0.714949 0.494875 0.680288
1290 1 0.0225037 0.0253223 0.765455
283 1 0.241908 0.0153705 0.754778
227 1 0.41438 0.184631 0.685072
858 1 0.865072 0.217819 0.741691
1929 1 0.0740446 0.145566 0.711119
302 1 0.880672 0.11536 0.76302
962 1 0.171882 0.183916 0.738465
154 1 0.960669 0.180059 0.773611
1844 1 0.272128 0.289893 0.696228
1268 1 0.0841298 0.342682 0.729826
1888 1 0.346428 0.265604 0.686117
1765 1 0.255499 0.327482 0.748721
826 1 0.561389 0.465373 0.712634
1944 1 0.0564807 0.496147 0.780121
606 1 0.548176 0.400618 0.763344
436 1 0.253665 0.043171 0.686253
1415 1 0.902717 0.0176749 0.817066
49 1 0.107371 0.0427073 0.743815
1538 1 0.507249 0.104091 0.85028
43 1 0.501324 0.126581 0.761675
1354 1 0.836508 0.148758 0.787787
1958 1 0.121836 0.195277 0.721793
363 1 0.179538 0.21583 0.770935
1736 1 0.589229 0.180315 0.813293
819 1 0.651671 0.300069 0.739251
965 1 0.596197 0.229726 0.759769
1914 1 0.686906 0.293692 0.71479
417 1 0.800464 0.260292 0.745953
2045 1 0.307094 0.239885 0.742063
891 1 0.343209 0.268338 0.785404
255 1 0.602924 0.309424 0.756761
1742 1 0.857678 0.361919 0.685432
1940 1 0.330195 0.0609586 0.74999
88 1 0.35459 0.0666683 0.714899
830 1 0.550414 0.0493025 0.738331
68 1 0.610578 0.159438 0.816036
881 1 0.205476 0.22324 0.853641
1704 1 0.715455 0.289942 0.782281
385 1 0.0604435 0.294968 0.786623
2022 1 0.495228 0.329776 0.751027
1320 1 0.970605 0.37454 0.840693
1724 1 0.259505 0.377724 0.709228
1254 1 0.451217 0.391386 0.803247
608 1 0.263605 0.475845 0.780029
1574 1 0.424136 0.468345 0.819045
1045 1 0.780848 0.393442 0.838784
296 1 0.637317 0.430068 0.747622
868 1 0.379477 0.490562 0.776306
984 1 0.326441 0.0476682 0.871229
135 1 0.224351 0.0318824 0.754573
288 1 0.21267 0.0226824 0.846991
2017 1 0.831481 0.157745 0.812263
1298 1 0.662317 0.176522 0.806256
496 1 0.276739 0.219955 0.747831
1710 1 0.832988 0.123731 0.80464
564 1 0.109749 0.255669 0.79246
129 1 0.872598 0.271574 0.868669
33 1 0.296156 0.240233 0.80463
234 1 0.883079 0.346983 0.798682
443 1 0.0293059 0.327008 0.781416
159 1 0.0531053 0.374846 0.816176
845 1 0.722379 0.429313 0.844841
1566 1 0.772502 0.41695 0.836544
1544 1 0.320682 0.426631 0.824764
1913 1 0.359108 0.0364704 0.838576
163 1 0.692909 0.0312458 0.886748
444 1 0.113781 0.111874 0.773329
1487 1 0.160456 0.0942812 0.807428
982 1 0.49649 0.21792 0.836638
1981 1 0.526043 0.234956 0.806757
1181 1 0.591093 0.217106 0.836381
1546 1 0.0174795 0.250141 0.761297
284 1 0.031034 0.266845 0.87872
2033 1 0.948749 0.235805 0.899779
38 1 0.641314 0.267723 0.842782
665 1 0.622445 0.304426 0.806787
1375 1 0.752721 0.356546 0.832222
460 1 0.266911 0.30879 0.786067
1311 1 0.380113 0.414324 0.832182
494 1 0.756493 0.410174 0.84832
355 1 0.0570938 0.452558 0.781075
1263 1 0.8705 0.422872 0.502898
1067 1 0.094071 0.0825869 0.813519
22 1 0.732471 0.0314226 0.833617
87 1 0.155704 0.0564061 0.858833
1756 1 0.963285 0.032748 0.84072
1243 1 0.353147 0.0595036 0.833695
343 1 0.92698 0.094104 0.774834
734 1 0.819135 0.109947 0.898139
742 1 0.400886 0.118925 0.777266
2 1 0.131134 0.170113 0.790193
1638 1 0.282188 0.175273 0.817145
28 1 0.956164 0.320992 0.758262
1983 1 0.595335 0.375742 0.879822
1215 1 0.219447 0.347864 0.793733
614 1 0.763643 0.417591 0.854696
1491 1 0.52618 0.410939 0.859437
987 1 0.320984 0.0257837 0.500887
1717 1 0.962558 0.0377354 0.854386
1358 1 0.0259639 0.0573126 0.77695
674 1 0.552038 0.139017 0.838261
624 1 0.655005 0.114301 0.82712
1541 1 0.694442 0.115056 0.787026
1855 1 0.878127 0.228096 0.900214
1126 1 0.445723 0.236761 0.896649
1128 1 0.125407 0.223253 0.819482
920 1 0.963291 0.224871 0.850251
512 1 0.377189 0.306091 0.876513
1101 1 0.743661 0.38695 0.828537
507 1 0.998123 0.376018 0.786969
1056 1 0.374268 0.429793 0.909363
1432 1 0.77338 0.441121 0.841099
112 1 0.772617 0.448265 0.850888
780 1 0.772941 0.0936886 0.973994
1848 1 0.236249 0.447587 0.853898
161 1 0.405084 0.481763 0.816128
369 1 0.623929 0.494856 0.803114
1436 1 0.276289 0.179359 0.506509
1773 1 0.79081 0.466808 0.851983
72 1 0.656069 0.236244 0.524668
1163 1 0.937985 0.0606255 0.860559
619 1 0.371796 0.162301 0.83103
1365 1 0.915265 0.161879 0.894456
732 1 0.151501 0.232137 0.860063
555 1 0.686266 0.274383 0.865801
936 1 0.757228 0.326109 0.891839
80 1 0.419244 0.348423 0.819431
1005 1 0.65537 0.403394 0.797734
1335 1 0.0085548 0.377636 0.915281
1355 1 0.996371 0.433401 0.818673
532 1 0.633951 0.0615708 0.866846
59 1 0.136896 0.155477 0.856207
438 1 0.769203 0.113557 0.841414
802 1 0.678841 0.014701 0.887057
706 1 0.652312 0.0954723 0.941142
456 1 0.392375 0.160398 0.946797
1438 1 0.458716 0.166437 0.902425
1946 1 0.812088 0.156734 0.871791
1042 1 0.919136 0.230444 0.873962
1583 1 0.21541 0.259151 0.906411
1804 1 0.673647 0.30087 0.817018
41 1 0.176414 0.380637 0.893671
1669 1 0.814694 0.332877 0.948068
457 1 0.597528 0.352024 0.945906
1972 1 0.137439 0.396467 0.813171
805 1 0.515811 0.417218 0.87369
52 1 0.35738 0.479091 0.875195
1110 1 0.336972 0.432479 0.986139
746 1 0.109402 0.0439829 0.876468
1994 1 0.236 0.1453 0.935847
937 1 0.54093 0.164438 0.878647
1678 1 0.517183 0.16333 0.854589
1522 1 0.957175 0.306091 0.882938
1650 1 0.941707 0.266734 0.897943
822 1 0.966268 0.319009 0.869668
568 1 0.327716 0.349915 0.975438
1579 1 0.35765 0.327963 0.857736
1973 1 0.918584 0.321893 0.924833
1718 1 0.965415 0.310702 0.88291
513 1 0.897523 0.372782 0.845318
376 1 0.889743 0.465837 0.87478
314 1 0.595623 0.492352 0.892519
1010 1 0.322975 0.0588235 0.888893
773 1 0.392781 0.138578 0.914345
1168 1 0.568748 0.0944683 0.929118
1621 1 0.545527 0.188741 0.869654
1570 1 0.0658993 0.16092 0.916216
1891 1 0.990739 0.125907 0.920513
1999 1 0.0499344 0.205521 0.891545
401 1 0.776247 0.134886 0.861296
1527 1 0.954561 0.192143 0.875826
1233 1 0.781496 0.243211 0.886417
1048 1 0.887275 0.209792 0.878355
1238 1 0.396437 0.324603 0.911405
1412 1 0.583785 0.292615 0.91936
1632 1 0.429481 0.378348 0.931573
462 1 0.52511 0.380715 0.92138
786 1 0.519802 0.355013 0.946451
1484 1 0.960616 0.379063 0.922179
1228 1 0.972184 0.418839 0.875095
1382 1 0.910931 0.493136 0.937696
1894 1 0.0694281 0.0402078 0.924053
450 1 0.434444 0.0867286 0.852423
752 1 0.586261 0.0916702 0.965112
1735 1 0.776363 0.135135 0.521411
861 1 0.268763 0.189838 0.945105
1905 1 0.747048 0.167157 0.885462
131 1 0.766209 0.168166 0.941746
1547 1 0.713683 0.212688 0.928046
1499 1 0.949428 0.240061 0.909028
1554 1 0.803369 0.333495 0.998329
226 1 0.939494 0.301515 0.954423
405 1 0.557214 0.316562 0.927972
730 1 0.64967 0.316478 0.944046
1094 1 0.272915 0.366515 0.933607
114 1 0.245355 0.0244554 0.512785
1563 1 0.440855 0.493148 0.836939
1454 1 0.590682 0.446652 0.918974
1430 1 0.359996 0.476919 0.949403
1180 1 0.449 0.372671 0.990123
254 1 0.318553 0.244986 0.960297
1962 1 0.434501 0.0829853 0.906137
1818 1 0.640293 0.0153891 0.938025
1319 1 0.68418 0.117012 0.931649
1766 1 0.0757803 0.00507119 0.599956
42 1 0.193463 0.11145 0.994908
663 1 0.0918607 0.107459 0.951843
1437 1 0.0566293 0.405748 0.982281
648 1 0.500694 0.0767559 0.980453
617 1 0.504112 0.163691 0.986105
1667 1 0.740773 0.159798 0.970279
815 1 0.837651 0.125548 0.901689
247 1 0.0413812 0.184738 0.946829
1760 1 0.886595 0.285159 0.958559
841 1 0.205799 0.32551 0.995984
517 1 0.798167 0.263216 0.908144
1513 1 0.255208 0.253612 0.99685
1191 1 0.891152 0.335984 0.962893
656 1 0.0160396 0.298795 0.976485
1568 1 0.646498 0.379494 0.956254
1490 1 0.113388 0.417707 0.95371
1253 1 0.0671871 0.459683 0.945953
35 1 0.526954 0.453587 0.962304
1140 1 0.291298 0.366233 0.995838
530 1 0.512238 0.269604 0.980194
90 1 0.197196 0.0515739 0.955607
1892 1 0.344735 0.0242425 0.602236
678 1 0.832616 0.161505 0.958797
1740 1 0.634268 0.196769 0.957122
1218 1 0.821896 0.12972 0.987856
1856 1 0.963579 0.137212 0.960716
658 1 0.948581 0.283046 0.937361
1061 1 0.900079 0.304158 1.0003
1723 1 0.309252 0.305119 0.985497
1739 1 0.578184 0.452802 0.81985
1550 1 0.773141 0.397263 0.975405
1869 1 0.251986 0.414983 0.968161
923 1 0.426078 0.491578 0.773597
543 1 0.0992666 0.0961073 0.995957
1995 1 0.579024 0.0608711 0.994163
1653 1 0.0160566 0.495358 0.561528
1640 1 0.941396 0.499121 0.789193
621 1 0.0182851 0.551837 0.00216585
1062 1 0.37292 0.996117 0.177631
150 1 0.160893 0.913354 0.0482034
473 1 0.0986632 0.850724 0.481919
2031 1 0.982796 0.589342 0.0261138
1918 1 0.408363 0.601493 0.0515394
152 1 0.00160504 0.535099 0.105179
1845 1 0.699147 0.545651 0.0932117
1705 1 0.75242 0.805039 0.0054765
495 1 0.545589 0.516838 0.0109516
545 1 0.730783 0.779562 0.000265592
1831 1 0.0095057 0.851408 0.015012
1047 1 0.137258 0.769039 0.0346845
1990 1 0.398672 0.828571 0.0181176
2038 1 0.432196 0.828979 0.00781369
1837 1 0.95378 0.578215 0.474976
263 1 0.94409 0.943981 0.0239128
741 1 0.823402 0.82902 0.487902
715 1 0.482886 0.963956 0.0113474
549 1 0.850246 0.542588 0.23769
816 1 0.15292 0.545866 0.000185314
944 1 0.635651 0.561159 0.0127032
836 1 0.159215 0.59683 0.037246
1289 1 0.976615 0.580196 0.0366008
1322 1 0.574938 0.661372 0.00309718
950 1 0.816465 0.702722 0.0405573
406 1 0.743817 0.978663 0.199958
1832 1 0.0451547 0.686507 0.0308881
1569 1 0.284342 0.653203 0.0685767
1157 1 0.748863 0.715466 0.0680354
1707 1 0.200874 0.747432 0.0235448
1231 1 0.865897 0.84513 0.0197301
1911 1 0.693929 0.768125 0.124552
995 1 0.561839 0.781459 0.0460036
1428 1 0.62999 0.809864 0.0243877
422 1 0.894488 0.770112 0.00329252
1313 1 0.365014 0.800085 0.0468476
445 1 0.926774 0.78217 0.0457682
317 1 0.97505 0.884096 0.0194647
1024 1 0.877595 0.807744 0.0494537
1069 1 0.766421 0.884337 0.00369161
1970 1 0.522158 0.995762 0.454709
1949 1 0.106745 0.94336 0.0113178
814 1 0.985971 0.971152 0.0515309
900 1 0.210512 0.958895 0.0554476
398 1 0.0672635 0.547244 0.0257715
1027 1 0.754001 0.5144 0.158202
340 1 0.904211 0.604651 0.0148464
326 1 0.884463 0.641092 0.0390038
2005 1 0.0372166 0.803256 0.0413573
2018 1 0.659095 0.732172 0.0840628
1683 1 0.117583 0.808087 0.0314731
1057 1 0.111608 0.811551 0.0633455
808 1 0.119877 0.814241 0.0575064
1776 1 0.286353 0.839044 0.0620416
2000 1 0.312508 0.940668 0.0226793
590 1 0.645939 0.896349 0.037801
1919 1 0.0356558 0.934853 0.0421645
477 1 0.673635 0.886165 0.0444511
1961 1 0.516003 0.507646 0.0776666
2042 1 0.619302 0.588842 0.0460002
1108 1 0.718299 0.641247 0.0416137
740 1 0.121887 0.699749 0.119005
1247 1 0.725676 0.758964 0.0686329
1003 1 0.0628468 0.738351 0.0632116
1726 1 0.539358 0.734851 0.0307188
256 1 0.924613 0.813239 0.0809302
2009 1 0.10031 0.807805 0.0993398
75 1 0.54111 0.755272 0.065837
94 1 0.0458322 0.762292 0.062461
1950 1 0.621119 0.966844 0.0343671
1475 1 0.00471879 0.966055 0.0668284
601 1 0.400917 0.992183 0.0825939
573 1 0.888094 0.936742 0.0780307
1826 1 0.385922 0.969808 0.14708
1196 1 0.125223 0.610167 0.0246144
251 1 0.328231 0.581297 0.0777589
672 1 0.637688 0.660684 0.0348898
1642 1 0.689166 0.644049 0.0340627
1519 1 0.915344 0.691756 0.0962147
379 1 0.759893 0.709823 0.113444
1266 1 0.710873 0.501221 0.1806
1308 1 0.24601 0.849917 0.0422434
1923 1 0.621644 0.789146 0.139542
346 1 0.978777 0.802005 0.0932376
985 1 0.410746 0.916952 0.0620689
1236 1 0.547449 0.843264 0.0625753
1652 1 0.083664 0.86044 0.102641
589 1 0.968357 0.873984 0.140539
1017 1 0.58363 0.90631 0.0754075
603 1 0.976582 0.991725 0.115025
103 1 0.921743 0.94877 0.435782
211 1 0.290984 0.549298 0.0733491
736 1 0.170348 0.537618 0.489985
556 1 0.304412 0.568353 0.135884
1295 1 0.165804 0.601557 0.143553
2021 1 0.0625968 0.638976 0.0990681
225 1 0.464392 0.668456 0.0712426
1054 1 0.928817 0.66984 0.0686605
84 1 0.14361 0.843132 0.124871
613 1 0.157898 0.786527 0.102309
1838 1 0.939628 0.868018 0.11531
1291 1 0.5569 0.968983 0.0471366
690 1 0.827431 0.91186 0.137289
396 1 0.685856 0.950094 0.101195
1641 1 0.0565847 0.545856 0.139069
609 1 0.477974 0.537026 0.113267
1206 1 0.0379005 0.59794 0.127553
1762 1 0.77578 0.605361 0.101112
905 1 0.585784 0.579228 0.128543
1731 1 0.0399069 0.700165 0.169095
451 1 0.773741 0.680498 0.0568299
202 1 0.644986 0.722465 0.147729
58 1 0.00323276 0.69275 0.152852
527 1 0.032665 0.740273 0.115123
840 1 0.360175 0.763286 0.0347246
426 1 0.41777 0.760693 0.0979037
1143 1 0.568802 0.736012 0.140711
1446 1 0.179395 0.873421 0.163105
1734 1 0.214926 0.790137 0.117488
618 1 0.0156819 0.841863 0.0929263
1835 1 0.618769 0.820274 0.129452
849 1 0.874562 0.917379 0.0933274
915 1 0.43488 0.999373 0.279404
1210 1 0.0256491 0.901054 0.114909
1166 1 0.173203 0.959298 0.0636434
359 1 0.112989 0.937084 0.114934
171 1 0.205817 0.867382 0.00492282
698 1 0.904636 0.571883 0.150525
1746 1 0.739397 0.566104 0.128688
977 1 0.904082 0.994525 0.481675
338 1 0.314488 0.654373 0.0256563
1585 1 0.316231 0.715911 0.200755
1706 1 0.00337629 0.652136 0.119273
1416 1 0.411758 0.761211 0.214079
1686 1 0.976978 0.764024 0.123668
1377 1 0.891023 0.77947 0.150764
859 1 0.754538 0.886494 0.124957
712 1 0.793947 0.816149 0.0926007
1852 1 0.182236 0.888302 0.182329
1461 1 0.842383 0.940654 0.191337
1802 1 0.0330449 0.997927 0.0913175
1366 1 0.302647 0.98881 0.161006
1517 1 0.459374 0.767048 0.0357141
1138 1 0.20732 0.533861 0.114211
246 1 0.774355 0.538725 0.192102
489 1 0.054666 0.650601 0.180834
1223 1 0.681963 0.629835 0.18647
96 1 0.748045 0.686774 0.135245
1577 1 0.852274 0.593817 0.0942627
193 1 0.806903 0.623176 0.143132
299 1 0.183598 0.751156 0.217775
104 1 0.902411 0.726526 0.148892
237 1 0.93293 0.729833 0.179926
145 1 0.354998 0.685187 0.187329
837 1 0.512606 0.746112 0.169129
126 1 0.193465 0.872128 0.152581
1089 1 0.734869 0.789502 0.158321
1006 1 0.777241 0.849899 0.106745
368 1 0.732085 0.75073 0.210337
1357 1 0.0794991 0.935685 0.107725
1626 1 0.478156 0.901092 0.151832
1001 1 0.41797 0.940544 0.188385
383 1 0.461539 0.843141 0.417414
1392 1 0.867041 0.551535 0.0399387
1604 1 0.737841 0.560954 0.167136
897 1 0.314351 0.531302 0.305326
2037 1 0.847463 0.504384 0.129804
1088 1 0.0828002 0.564234 0.157949
295 1 0.45166 0.548001 0.130809
508 1 0.571541 0.542909 0.175757
1453 1 0.80786 0.539736 0.156914
1477 1 0.365745 0.680333 0.21241
869 1 0.534353 0.614433 0.235846
716 1 0.721408 0.647561 0.17724
1562 1 0.19335 0.728893 0.22551
771 1 0.116408 0.686718 0.146289
615 1 0.969303 0.763379 0.184455
893 1 0.967111 0.688044 0.145071
1 1 0.316098 0.831205 0.166675
639 1 0.463196 0.804243 0.136961
20 1 0.116024 0.94054 0.135671
318 1 0.539639 0.976967 0.153694
1815 1 0.612158 0.998742 0.139498
215 1 0.331841 0.532921 0.450745
1286 1 0.826602 0.537353 0.168262
501 1 0.0122798 0.585105 0.194298
1241 1 0.211893 0.58724 0.206949
51 1 0.209441 0.585633 0.21757
134 1 0.983162 0.62614 0.194466
1406 1 0.141854 0.622151 0.143707
214 1 0.876405 0.742152 0.228781
333 1 0.865028 0.830317 0.176469
184 1 0.674073 0.864799 0.200764
1309 1 0.0148951 0.879112 0.0876497
63 1 0.870894 0.927934 0.138752
27 1 0.906545 0.916456 0.13657
1817 1 0.12873 0.979672 0.201537
1356 1 0.716713 0.947945 0.218852
796 1 0.088967 0.528164 0.20377
1594 1 0.864544 0.537535 0.168947
1493 1 0.104717 0.540363 0.218553
725 1 0.760455 0.592521 0.242029
626 1 0.542275 0.684504 0.181785
1679 1 0.678239 0.706404 0.165651
1791 1 0.935346 0.630867 0.143159
765 1 0.790216 0.66971 0.233495
1722 1 0.148819 0.79851 0.206405
1814 1 0.535693 0.849379 0.19236
2024 1 0.496092 0.852104 0.199031
44 1 0.923374 0.829884 0.307701
843 1 0.822686 0.880551 0.239532
1851 1 0.957359 0.926623 0.19892
636 1 0.175369 0.843863 0.207634
1787 1 0.662924 0.930745 0.198762
852 1 0.606284 0.908712 0.207225
695 1 0.953524 0.893142 0.0668396
85 1 0.187214 0.591289 0.222331
554 1 0.442506 0.62269 0.227785
2040 1 0.656147 0.626067 0.242492
784 1 0.548521 0.636388 0.308382
452 1 0.085903 0.713101 0.219807
139 1 0.577869 0.693842 0.238197
954 1 0.212373 0.78449 0.167242
1928 1 0.774385 0.722923 0.235714
424 1 0.527466 0.808691 0.234594
133 1 0.937412 0.837149 0.227305
1123 1 0.316776 0.827333 0.228431
1593 1 0.122781 0.824206 0.235063
357 1 0.124995 0.776373 0.20543
2023 1 0.591552 0.886703 0.203596
1146 1 0.579221 0.819092 0.196872
1380 1 0.990731 0.896079 0.250324
878 1 0.472 0.935115 0.210774
1173 1 0.678858 0.861929 0.243004
839 1 0.288611 0.939946 0.211002
459 1 0.486923 0.927463 0.198063
1419 1 0.656534 0.956092 0.229359
562 1 0.997553 0.986091 0.348303
1968 1 0.602222 0.984214 0.208332
2036 1 0.19849 0.5367 0.292499
1777 1 0.393467 0.542714 0.192534
1745 1 0.546857 0.559277 0.239065
1433 1 0.593791 0.60991 0.24484
803 1 0.127266 0.637291 0.195288
743 1 0.0129902 0.583348 0.234032
1670 1 0.0301046 0.667609 0.230658
1063 1 0.0464173 0.731949 0.270529
779 1 0.91105 0.694248 0.19969
62 1 0.0436433 0.753974 0.284966
1139 1 0.708597 0.7713 0.231627
1523 1 0.980562 0.747125 0.194176
289 1 0.672916 0.816238 0.25262
388 1 0.612675 0.846355 0.242218
1136 1 0.305703 0.884029 0.258978
2011 1 0.422141 0.784124 0.236253
1467 1 0.435263 0.949101 0.229947
1846 1 0.714827 0.58236 0.288896
2012 1 0.0863744 0.708655 0.240142
625 1 0.306168 0.681087 0.310135
1927 1 0.388958 0.734061 0.277813
871 1 0.209931 0.748174 0.219423
1385 1 0.893243 0.760084 0.228124
1276 1 0.350931 0.806124 0.292025
821 1 0.179491 0.767466 0.241858
341 1 0.429251 0.834738 0.241444
91 1 0.783133 0.814723 0.204649
1738 1 0.0981724 0.63835 0.00306437
1044 1 0.206539 0.638921 0.280625
890 1 0.145448 0.57748 0.318223
1150 1 0.773013 0.647103 0.327231
142 1 0.434846 0.639676 0.290675
2044 1 0.534606 0.648791 0.298557
2041 1 0.517025 0.752033 0.28695
1931 1 0.94116 0.655264 0.20285
1039 1 0.413312 0.726178 0.279667
1410 1 0.355265 0.771235 0.281628
1369 1 0.108872 0.837757 0.256957
1002 1 0.146424 0.894411 0.304234
1630 1 0.18094 0.92085 0.300268
101 1 0.109176 0.913833 0.221179
782 1 0.289478 0.923108 0.269279
998 1 0.156114 0.915049 0.288626
282 1 0.456848 0.930016 0.267272
1279 1 0.294169 0.993924 0.0769814
820 1 0.200426 0.526459 0.313066
1589 1 0.598087 0.525917 0.341091
162 1 0.426127 0.61394 0.326539
1559 1 0.158427 0.635026 0.221477
744 1 0.957244 0.726849 0.228864
575 1 0.0245625 0.704033 0.311049
1318 1 0.0693111 0.708158 0.26744
267 1 0.0341527 0.750605 0.260308
1400 1 0.507225 0.821637 0.351887
1964 1 0.754077 0.787861 0.369333
204 1 0.253035 0.891531 0.266296
892 1 0.376483 0.802594 0.304314
1857 1 0.607454 0.964762 0.192975
1529 1 0.998375 0.896472 0.324272
1743 1 0.390055 0.936636 0.283076
748 1 0.549907 0.517293 0.438741
1307 1 0.85703 0.538598 0.411717
156 1 0.578976 0.528996 0.306325
16 1 0.274966 0.505197 0.325926
1293 1 0.860644 0.529312 0.269372
563 1 0.361067 0.569121 0.315643
631 1 0.958918 0.549606 0.29087
1008 1 0.113879 0.590663 0.353514
912 1 0.355424 0.584208 0.358583
12 1 0.289264 0.621414 0.223459
54 1 0.360337 0.719672 0.315106
529 1 0.546715 0.701129 0.309505
1026 1 0.614982 0.709995 0.26804
1323 1 0.275872 0.780741 0.278402
1862 1 0.997232 0.749967 0.308107
1590 1 0.118558 0.77074 0.283061
737 1 0.433958 0.951585 0.254414
1810 1 0.951467 0.916513 0.328982
851 1 0.971432 0.87035 0.326025
1187 1 0.190375 0.897998 0.310694
372 1 0.233396 0.973665 0.315343
702 1 0.227971 0.918965 0.489668
581 1 0.00153492 0.525165 0.189985
113 1 0.0902703 0.507345 0.303995
1201 1 0.721444 0.598206 0.397313
1808 1 0.393431 0.576709 0.335998
1294 1 0.501108 0.672717 0.309564
1071 1 0.871336 0.693607 0.354286
2006 1 0.672819 0.672838 0.383296
1992 1 0.844822 0.725313 0.285074
492 1 0.918758 0.732621 0.355357
679 1 0.15496 0.771424 0.371283
1816 1 0.813964 0.764245 0.355028
1152 1 0.402915 0.775769 0.33118
927 1 0.553737 0.797943 0.31271
1109 1 0.090725 0.805748 0.264915
1043 1 0.191946 0.841534 0.34312
866 1 0.656581 0.837484 0.45799
978 1 0.942535 0.852506 0.278962
235 1 0.125543 0.871558 0.38412
857 1 0.653031 0.893826 0.305889
167 1 0.639644 0.838959 0.337713
1077 1 0.265955 0.942837 0.326777
1336 1 0.582575 0.898433 0.369839
273 1 0.727672 0.969039 0.338356
1364 1 0.0662611 0.938915 0.331688
1947 1 0.190999 0.992346 0.263719
559 1 0.530578 0.944282 0.341856
793 1 0.391174 0.992978 0.38221
955 1 0.0407474 0.575657 0.418184
1394 1 0.86004 0.571896 0.291306
949 1 0.629815 0.604369 0.330834
1505 1 0.636754 0.528007 0.346779
1345 1 0.0372707 0.562043 0.322091
865 1 0.98785 0.573494 0.343902
437 1 0.23058 0.624543 0.376272
677 1 0.377521 0.648655 0.383414
1036 1 0.018357 0.745606 0.324723
831 1 0.617046 0.742819 0.36409
427 1 0.861512 0.686706 0.383928
1907 1 0.568575 0.732313 0.294026
1374 1 0.870892 0.717207 0.295034
1277 1 0.475583 0.835064 0.336915
942 1 0.347101 0.923053 0.291972
381 1 0.544963 0.845528 0.383584
57 1 0.920023 0.905941 0.296723
610 1 0.226921 0.99053 0.262896
1750 1 0.412808 0.959286 0.40699
166 1 0.438444 0.950913 0.463983
1543 1 0.226332 0.572701 0.413786
411 1 0.222448 0.610448 0.433254
1399 1 0.349655 0.614813 0.367295
2020 1 0.400761 0.715567 0.415368
735 1 0.538741 0.667315 0.295826
844 1 0.609127 0.67531 0.360582
1654 1 0.706166 0.775577 0.384554
1096 1 0.838599 0.661755 0.375725
1198 1 0.356834 0.711883 0.301762
855 1 0.476234 0.844215 0.363156
1587 1 0.947284 0.906391 0.361025
680 1 0.200612 0.87812 0.389045
930 1 0.424201 0.941087 0.406894
1592 1 0.194558 0.925983 0.325028
525 1 0.670045 0.956371 0.348326
652 1 0.590937 0.971595 0.359403
384 1 0.0537747 0.981916 0.426705
1530 1 0.213764 0.956506 0.375618
23 1 0.267279 0.514829 0.433739
1470 1 0.383041 0.993025 0.312022
2034 1 0.9922 0.952205 0.390618
813 1 0.533646 0.507054 0.37422
1518 1 0.629193 0.565228 0.387359
186 1 0.162617 0.574451 0.380484
1261 1 0.290151 0.516798 0.417128
458 1 0.638796 0.591994 0.39316
1099 1 0.878815 0.625605 0.388314
168 1 0.430259 0.641405 0.30032
729 1 0.31124 0.741675 0.295507
1620 1 0.996957 0.780996 0.364035
1120 1 0.676639 0.711434 0.341113
1472 1 0.886476 0.742205 0.430897
1395 1 0.427484 0.74378 0.427769
1328 1 0.10238 0.828381 0.449146
1661 1 0.76168 0.744833 0.409269
772 1 0.093162 0.824818 0.389591
1951 1 0.656092 0.820993 0.433901
1494 1 0.589585 0.823285 0.411947
506 1 0.894261 0.926934 0.326815
1737 1 0.761911 0.91392 0.440497
1733 1 0.933152 0.920919 0.443497
1492 1 0.435086 0.900221 0.3512
1759 1 0.0732603 0.947406 0.377179
416 1 0.0218698 0.948347 0.383574
110 1 0.707649 0.938645 0.445226
1510 1 0.558297 0.515704 0.421503
1860 1 0.21256 0.971203 0.14483
257 1 0.933298 0.519823 0.411451
1282 1 0.19297 0.589584 0.351304
1013 1 0.691231 0.670357 0.381113
194 1 0.97456 0.653138 0.385933
77 1 0.923578 0.658896 0.404041
1535 1 0.062777 0.71762 0.394137
1629 1 0.238789 0.87824 0.456063
1500 1 0.73297 0.893131 0.38179
986 1 0.232479 0.904099 0.434185
1720 1 0.572146 0.82431 0.406341
1192 1 0.805737 0.853942 0.425075
1727 1 0.887609 0.856496 0.378771
1533 1 0.587413 0.920969 0.405869
1989 1 0.837835 0.549701 0.359274
1729 1 0.985582 0.52036 0.441136
916 1 0.639384 0.610787 0.417094
1012 1 0.794179 0.54155 0.466796
1216 1 0.904919 0.631152 0.381231
140 1 0.402689 0.571633 0.459448
510 1 0.13751 0.671214 0.325406
1391 1 0.569805 0.675326 0.443257
1259 1 0.815742 0.779273 0.464434
940 1 0.0260698 0.805254 0.433871
1524 1 0.0741825 0.842441 0.421652
1925 1 0.0818286 0.866989 0.410302
208 1 0.763182 0.510495 0.362145
1526 1 0.875073 0.711569 0.487758
895 1 0.316448 0.522716 0.082719
1926 1 0.251535 0.577814 0.425024
497 1 0.614822 0.717517 0.469363
1699 1 0.334067 0.693261 0.447349
1497 1 0.791516 0.750208 0.409297
1560 1 0.605736 0.855753 0.411457
1060 1 0.215269 0.784356 0.483751
1032 1 0.107243 0.89667 0.445612
1660 1 0.841671 0.892999 0.397188
287 1 0.843021 0.839448 0.408007
1204 1 0.664168 0.532649 0.4284
1405 1 0.0289584 0.978924 0.0265596
1971 1 0.336335 0.543261 0.0628317
733 1 0.724244 0.526941 0.435784
1532 1 0.211793 0.504242 0.456093
1987 1 0.212794 0.513917 0.44457
550 1 0.985373 0.568807 0.480775
442 1 0.553455 0.561006 0.46348
1982 1 0.311994 0.501531 0.425491
627 1 0.442026 0.57404 0.422105
315 1 0.233319 0.664757 0.453634
1680 1 0.35371 0.623176 0.450479
2047 1 0.977487 0.554899 0.424541
32 1 0.599434 0.683471 0.386531
1020 1 0.966832 0.605656 0.411727
453 1 0.576359 0.665601 0.443128
119 1 0.321771 0.683565 0.443339
11 1 0.240803 0.758526 0.484671
481 1 0.822124 0.710037 0.459886
1794 1 0.655391 0.820478 0.471231
1153 1 0.329195 0.843646 0.467712
1074 1 0.764644 0.782238 0.447424
1702 1 0.98755 0.900272 0.462594
1160 1 0.880267 0.521339 0.163325
93 1 0.158753 0.974905 0.429058
467 1 0.228058 0.972193 0.469745
1258 1 0.376021 0.616139 0.494535
1770 1 0.544177 0.638315 0.474656
661 1 0.318108 0.606208 0.428269
971 1 0.155092 0.961618 0.0186756
502 1 0.0566831 0.613232 0.476382
1214 1 0.268052 0.982556 0.0297771
932 1 0.559795 0.515962 0.39359
240 1 0.433339 0.804265 0.474651
1046 1 0.0612769 0.6811 0.446029
1769 1 0.361012 0.974213 0.101295
1528 1 0.887714 0.754594 0.488566
1545 1 0.0293836 0.754638 0.464846
687 1 0.0375281 0.773318 0.496443
425 1 0.389039 0.73408 0.441543
253 1 0.476822 0.870263 0.387507
356 1 0.470288 0.520009 0.00447646
894 1 0.669097 0.855832 0.497387
1349 1 0.806779 0.902396 0.45638
1897 1 0.670088 0.931426 0.453846
1596 1 0.676444 0.966785 0.455441
407 1 0.29887 0.894264 0.480232
755 1 0.0254529 0.839875 0.0473898
1713 1 0.405186 0.569748 0.437223
1664 1 0.372663 0.644287 0.488489
577 1 0.272472 0.637155 0.499433
970 1 0.157396 0.706887 0.495231
365 1 0.156857 0.825016 0.432299
952 1 0.0561006 0.835433 0.495019
1161 1 0.0538522 0.83649 0.468963
1334 1 0.942132 0.851145 0.0198512
707 1 0.858728 0.724482 0.0537981
1676 1 0.124283 0.511942 0.333899
1455 1 0.955344 0.518672 0.0914187
754 1 0.00215681 0.993478 0.251164
1514 1 0.213886 0.551505 0.0087336
354 1 0.378142 0.776817 0.0084918
770 1 0.147593 0.609856 0.00360538
172 1 0.709844 0.900765 0.496791
244 1 0.269813 0.523892 0.520099
951 1 0.729859 0.531509 0.503463
605 1 0.32905 0.591364 0.512324
509 1 0.38163 0.878517 0.516988
1906 1 0.936116 0.599837 0.530346
1028 1 0.172756 0.579284 0.56181
966 1 0.374695 0.64086 0.499925
1957 1 0.61218 0.65033 0.542301
903 1 0.940546 0.715397 0.509706
1396 1 0.585135 0.878796 0.53822
1170 1 0.0591391 0.979388 0.504897
1202 1 0.354768 0.950007 0.526197
1463 1 0.362781 0.964925 0.556864
586 1 0.0871609 0.914766 0.61158
1208 1 0.819488 0.934762 0.523232
266 1 0.122456 0.918674 0.529472
576 1 0.256562 0.732509 0.506964
622 1 0.506235 0.574482 0.512576
265 1 0.821193 0.704242 0.568138
1403 1 0.449013 0.778685 0.548927
1049 1 0.80957 0.739532 0.546099
1758 1 0.849782 0.580856 0.516318
248 1 0.293557 0.844419 0.54522
759 1 0.655193 0.76112 0.565205
2004 1 0.352303 0.842959 0.506926
1292 1 0.463975 0.888136 0.586347
1339 1 0.779393 0.899846 0.539486
147 1 0.890201 0.917066 0.540216
1561 1 0.0751325 0.819344 0.994406
1953 1 0.225407 0.880023 0.533277
682 1 0.955686 0.928511 0.54456
1317 1 0.271348 0.607868 0.605025
1688 1 0.58998 0.522032 0.521455
304 1 0.496474 0.571248 0.541496
1076 1 0.651981 0.561532 0.566486
758 1 0.314977 0.602683 0.56715
433 1 0.157419 0.723461 0.512011
1933 1 0.412772 0.669558 0.52119
29 1 0.216842 0.808941 0.52557
303 1 0.617242 0.694294 0.522096
14 1 0.104004 0.765645 0.596158
228 1 0.680886 0.730356 0.515222
264 1 0.781978 0.736271 0.513622
828 1 0.136158 0.86775 0.571721
1265 1 0.34858 0.813055 0.540687
1200 1 0.786845 0.865988 0.537883
911 1 0.914386 0.845634 0.571636
1883 1 0.353521 0.833382 0.534594
1909 1 0.455816 0.926913 0.501592
1725 1 0.650559 0.949676 0.527678
763 1 0.597847 0.91462 0.502622
659 1 0.509468 0.97285 0.614157
1910 1 0.13126 0.626827 0.998378
689 1 0.7592 0.536201 0.554051
1501 1 0.461679 0.509168 0.542782
972 1 0.530502 0.584947 0.55939
181 1 0.012402 0.567946 0.526025
1098 1 0.576574 0.68142 0.513704
1898 1 0.42004 0.631357 0.603866
649 1 0.279937 0.586407 0.537994
1409 1 0.675256 0.629561 0.596483
74 1 0.461711 0.679826 0.589439
1019 1 0.447362 0.957951 0.509964
1649 1 0.622069 0.74593 0.542906
490 1 0.154097 0.75113 0.533142
1439 1 0.187569 0.862837 0.540665
896 1 0.946582 0.876088 0.596946
817 1 0.520719 0.949919 0.568891
1496 1 0.0401954 0.980579 0.509901
5 1 0.426864 0.912208 0.543594
847 1 0.718449 0.526155 0.60082
108 1 0.130028 0.535467 0.533606
713 1 0.127764 0.529161 0.645871
259 1 0.742798 0.763963 0.9927
1504 1 0.798462 0.597659 0.570363
714 1 0.290011 0.612071 0.588972
1378 1 0.767309 0.610238 0.579689
250 1 0.187999 0.74418 0.629134
79 1 0.450762 0.647898 0.589642
1651 1 0.735522 0.694605 0.573971
1144 1 0.0242375 0.778011 0.639466
528 1 0.624756 0.851718 0.639001
524 1 0.871861 0.801494 0.555907
1342 1 0.402897 0.861727 0.585229
1459 1 0.749416 0.797313 0.59676
153 1 0.71231 0.780272 0.554294
353 1 0.509988 0.855466 0.565863
1093 1 0.753009 0.827035 0.62761
999 1 0.681267 0.837925 0.590768
1980 1 0.829805 0.873923 0.545074
557 1 0.315763 0.987919 0.588495
1275 1 0.57146 0.995416 0.563853
1229 1 0.46055 0.542457 0.570432
534 1 0.192588 0.544268 0.620554
800 1 0.940528 0.568501 0.541063
1175 1 0.336584 0.568737 0.640078
1122 1 0.327117 0.628582 0.60433
1031 1 0.175562 0.724703 0.550908
1351 1 0.29939 0.722627 0.634225
719 1 0.711332 0.732291 0.594536
1697 1 0.251191 0.806957 0.581833
1344 1 0.313284 0.799355 0.595006
1665 1 0.479937 0.902317 0.621823
392 1 0.0859924 0.818008 0.589893
1904 1 0.312632 0.873746 0.628678
1079 1 0.752993 0.829888 0.658598
1565 1 0.918489 0.852685 0.633516
640 1 0.605291 0.934274 0.622223
1250 1 0.440169 0.988856 0.613573
1610 1 0.179301 0.969387 0.584316
768 1 0.743946 0.924885 0.554154
344 1 0.279881 0.515854 0.590198
9 1 0.205631 0.551696 0.571122
47 1 0.701758 0.570141 0.586982
389 1 0.520137 0.631102 0.589035
846 1 0.357566 0.646764 0.587992
1425 1 0.397819 0.685146 0.667666
1767 1 0.764583 0.684729 0.629818
173 1 0.647553 0.73631 0.655168
286 1 0.820346 0.858488 0.605366
584 1 0.456263 0.85587 0.632406
1234 1 0.111985 0.831078 0.683049
523 1 0.714154 0.883266 0.616152
1564 1 0.745292 0.925163 0.652852
1694 1 0.385466 0.915057 0.672857
988 1 0.949348 0.936995 0.98352
913 1 0.602347 0.576498 0.632616
595 1 0.885642 0.555256 0.571128
1389 1 0.350144 0.532937 0.574912
548 1 0.362928 0.613203 0.581108
981 1 0.0446679 0.647487 0.641122
1811 1 0.602053 0.68195 0.686737
531 1 0.596753 0.697542 0.645986
997 1 0.28505 0.75084 0.671643
1615 1 0.0724316 0.82876 0.590414
907 1 0.560136 0.744445 0.613061
514 1 0.860074 0.7104 0.620791
78 1 0.656048 0.75621 0.659719
19 1 0.732868 0.830775 0.670272
975 1 0.7205 0.860945 0.643708
274 1 0.625538 0.830987 0.657297
1668 1 0.779002 0.843213 0.619132
290 1 0.160622 0.933375 0.608387
1977 1 0.791677 0.958368 0.701729
544 1 0.323816 0.93218 0.561418
1603 1 0.0852774 0.522001 0.706467
632 1 0.163011 0.985725 0.638793
1468 1 0.105035 0.523084 0.63163
1482 1 0.139891 0.580876 0.612995
1332 1 0.333085 0.520707 0.622048
1935 1 0.788659 0.978862 0.534793
747 1 0.323977 0.558002 0.620417
600 1 0.0537755 0.696361 0.656335
1809 1 0.736835 0.651361 0.607717
1659 1 0.0605567 0.781643 0.613766
1879 1 0.269026 0.732458 0.685162
558 1 0.658879 0.831945 0.659778
95 1 0.141931 0.836807 0.690831
681 1 0.271399 0.881174 0.709002
519 1 0.804989 0.727085 0.969558
8 1 0.936688 0.933066 0.649984
1807 1 0.140503 0.602266 0.518351
1303 1 0.203238 0.606026 0.608961
1014 1 0.590339 0.55711 0.687895
408 1 0.162065 0.590259 0.627878
430 1 0.0960786 0.54962 0.620082
1082 1 0.591335 0.648205 0.716069
657 1 0.129575 0.655512 0.658385
1711 1 0.64196 0.751959 0.67547
1853 1 0.729287 0.811772 0.658711
474 1 0.170823 0.816558 0.789495
10 1 0.33528 0.749437 0.679365
1805 1 0.979328 0.869384 0.701477
1418 1 0.586987 0.87608 0.705649
1779 1 0.854937 0.82762 0.7056
1151 1 0.70247 0.877884 0.721525
602 1 0.79506 0.841081 0.682099
1755 1 0.656091 0.932998 0.664085
1715 1 0.142249 0.951067 0.634616
1329 1 0.656162 0.505331 0.869714
1764 1 0.862468 0.575194 0.728156
1326 1 0.953362 0.54638 0.681618
1343 1 0.281909 0.539457 0.647199
2007 1 0.423297 0.57027 0.656614
329 1 0.551356 0.58343 0.68286
1185 1 0.682586 0.531612 0.659511
399 1 0.535823 0.714646 0.701504
807 1 0.210399 0.701226 0.616783
2043 1 0.706478 0.692337 0.710911
124 1 0.0718783 0.861972 0.6981
1452 1 0.926946 0.853919 0.675128
428 1 0.321041 0.868836 0.629712
761 1 0.456022 0.861375 0.690674
1789 1 0.442237 0.931641 0.727047
1997 1 0.511805 0.920949 0.706034
322 1 0.506267 0.889019 0.658865
1936 1 0.747598 0.960383 0.611712
989 1 0.24693 0.996454 0.723717
1874 1 0.440011 0.962453 0.700699
1058 1 0.237622 0.977823 0.988113
1534 1 0.439662 0.545367 0.722541
798 1 0.182885 0.611635 0.699855
516 1 0.398511 0.611083 0.697516
980 1 0.908234 0.639558 0.743307
403 1 0.226437 0.572902 0.700953
1350 1 0.568609 0.687381 0.76717
582 1 0.893832 0.701797 0.713094
1220 1 0.817583 0.703467 0.778053
55 1 0.462109 0.834008 0.684342
1352 1 0.859057 0.723159 0.753558
685 1 0.87182 0.858134 0.671594
1954 1 0.911887 0.848989 0.738099
1712 1 0.0619258 0.89041 0.732406
1384 1 0.715333 0.983724 0.704689
1142 1 0.486135 0.983727 0.658862
578 1 0.358177 0.511926 0.721772
906 1 0.738542 0.540368 0.782953
4 1 0.266033 0.63287 0.733735
757 1 0.703034 0.773043 0.68924
375 1 0.814688 0.831758 0.713067
1324 1 0.744244 0.880249 0.75212
1347 1 0.664029 0.913413 0.615022
483 1 0.247941 0.897006 0.686907
1330 1 0.841201 0.953113 0.704132
1920 1 0.105276 0.931745 0.727027
1895 1 0.0104522 0.963278 0.957453
1207 1 0.0579211 0.994363 0.76215
213 1 0.493108 0.97806 0.704539
420 1 0.589867 0.992993 0.64403
1685 1 0.184688 0.523147 0.690346
397 1 0.289283 0.54701 0.715106
1009 1 0.359848 0.615055 0.75769
1179 1 0.66171 0.540132 0.757178
1600 1 0.3834 0.545572 0.729646
53 1 0.677295 0.653227 0.78491
1133 1 0.94505 0.603304 0.720794
117 1 0.968227 0.708138 0.761082
2030 1 0.919515 0.648959 0.764935
1915 1 0.441944 0.713848 0.760551
2015 1 0.983366 0.706497 0.787911
1877 1 0.649911 0.751654 0.707816
239 1 0.57887 0.779554 0.761519
1937 1 0.00172147 0.850734 0.746898
115 1 0.440341 0.809975 0.764847
1966 1 0.374021 0.810636 0.789116
279 1 0.0117522 0.932089 0.737136
684 1 0.859815 0.960469 0.694396
81 1 0.903129 0.877744 0.731742
738 1 0.231579 0.918433 0.839328
1124 1 0.737691 0.95116 0.763818
1440 1 0.0640069 0.982899 0.744132
1299 1 0.279865 0.98712 0.649441
1474 1 0.548943 0.88322 0.966093
1267 1 0.484147 0.983613 0.76263
1537 1 0.0871138 0.523054 0.768161
753 1 0.790173 0.553084 0.766199
827 1 0.94882 0.627045 0.764658
1945 1 0.460082 0.562239 0.756526
1100 1 0.586563 0.593481 0.814068
854 1 0.68731 0.662688 0.827248
1107 1 0.868017 0.626521 0.793167
1235 1 0.340766 0.708423 0.723477
921 1 0.700424 0.823097 0.763406
696 1 0.971099 0.789748 0.763707
449 1 0.0982031 0.792387 0.769728
260 1 0.182119 0.836838 0.747213
884 1 0.10807 0.791707 0.749818
1162 1 0.337454 0.8413 0.750097
1423 1 0.0105224 0.855787 0.726416
1741 1 0.583565 0.98845 0.730577
1763 1 0.733038 0.959347 0.755923
1586 1 0.397049 0.846611 0.514541
219 1 0.0924716 0.996523 0.756923
1991 1 0.693229 0.963858 0.77517
285 1 0.751676 0.761206 0.987796
1306 1 0.361083 0.650936 0.826405
1611 1 0.415732 0.54491 0.761394
1434 1 0.0424499 0.585755 0.775395
1476 1 0.963934 0.774417 0.793254
1502 1 0.664379 0.740555 0.839182
1666 1 0.0562212 0.819371 0.756521
1353 1 0.710277 0.841554 0.796365
2001 1 0.227965 0.79666 0.811426
973 1 0.236107 0.87503 0.792753
1509 1 0.350431 0.859195 0.784482
1880 1 0.540517 0.872199 0.802795
1114 1 0.730795 0.948576 0.75131
1068 1 0.740496 0.891761 0.733255
1248 1 0.542275 0.95076 0.806824
327 1 0.37106 0.52593 0.664072
888 1 0.707615 0.501327 0.62329
1622 1 0.326547 0.546833 0.742353
1616 1 0.408304 0.512026 0.788452
917 1 0.819885 0.560096 0.866475
1011 1 0.806284 0.596995 0.820254
1262 1 0.0372317 0.604478 0.827386
1373 1 0.251878 0.615191 0.79827
991 1 0.388839 0.594148 0.728229
2035 1 0.833329 0.614123 0.835907
1441 1 0.152744 0.650303 0.733402
360 1 0.950555 0.670142 0.866242
919 1 0.391038 0.678481 0.804875
1930 1 0.703868 0.757124 0.846644
178 1 0.968999 0.669417 0.754186
1447 1 0.49285 0.746168 0.728125
863 1 0.482489 0.753986 0.727073
1636 1 0.13857 0.729503 0.843763
571 1 0.977008 0.78622 0.793821
1675 1 0.935273 0.824627 0.814361
1212 1 0.291866 0.794678 0.812278
1465 1 0.0898431 0.847698 0.781122
2046 1 0.910076 0.873228 0.816465
1580 1 0.438253 0.845048 0.845088
1512 1 0.712284 0.854444 0.740685
238 1 0.572659 0.90155 0.812377
1211 1 0.671202 0.514447 0.628561
3 1 0.0721539 0.524905 0.813494
1768 1 0.952711 0.560439 0.764869
834 1 0.798662 0.602832 0.845383
1239 1 0.9701 0.502539 0.744853
2025 1 0.530543 0.638221 0.818275
1321 1 0.757155 0.554918 0.806166
1495 1 0.0959336 0.593151 0.80408
1508 1 0.143766 0.634785 0.825771
767 1 0.265036 0.595574 0.795214
668 1 0.701925 0.663457 0.695511
1113 1 0.75136 0.729509 0.859414
377 1 0.9134 0.635283 0.866368
1948 1 0.464637 0.700864 0.794939
1252 1 0.745278 0.734746 0.816675
929 1 0.756818 0.744492 0.852115
73 1 0.770898 0.675242 0.851495
1034 1 0.0151007 0.879796 0.812997
1370 1 0.476214 0.773177 0.790299
224 1 0.0263608 0.739489 0.793748
904 1 0.0666476 0.919545 0.796723
598 1 0.444609 0.838017 0.833199
1555 1 0.767583 0.793641 0.792962
1960 1 0.584827 0.88946 0.802001
212 1 0.259373 0.875001 0.774039
1598 1 0.656416 0.944311 0.845499
1090 1 0.196258 0.927562 0.876935
542 1 0.079433 0.890681 0.792108
498 1 0.375207 0.984741 0.753497
1460 1 0.120434 0.952903 0.754455
1264 1 0.691154 0.702573 0.972143
1531 1 0.470695 0.514923 0.855622
276 1 0.786756 0.550331 0.901329
1885 1 0.860411 0.603476 0.884437
1304 1 0.163407 0.556253 0.877481
1786 1 0.917761 0.54938 0.840123
811 1 0.685588 0.56629 0.856006
1246 1 0.800413 0.590124 0.837061
141 1 0.125802 0.596719 0.894259
1644 1 0.918367 0.591749 0.803531
1426 1 0.643055 0.570727 0.85994
1073 1 0.0225427 0.59757 0.805994
1984 1 0.0654352 0.716467 0.789558
1053 1 0.283869 0.773576 0.80799
1171 1 0.578119 0.830005 0.798769
994 1 0.537725 0.879884 0.832566
1422 1 0.984753 0.835022 0.846567
1637 1 0.269871 0.944425 0.876468
1606 1 0.46026 0.927926 0.779979
1083 1 0.549366 0.905939 0.856071
521 1 0.549376 0.955421 0.821292
1820 1 0.902209 0.949082 0.889167
922 1 0.307911 0.873348 0.830575
222 1 0.900409 0.909665 0.84135
723 1 0.197674 0.677477 0.937928
347 1 0.553886 0.724456 0.9411
1608 1 0.977268 0.714777 0.871805
232 1 0.964534 0.758171 0.854733
667 1 0.690028 0.810796 0.86185
174 1 0.816496 0.763051 0.821245
352 1 0.848987 0.782174 0.834081
1938 1 0.61227 0.899419 0.853464
1959 1 0.307537 0.966354 0.836799
1070 1 0.384517 0.958293 0.851095
479 1 0.276982 0.506407 0.87627
1225 1 0.93304 0.636184 0.836939
862 1 0.201834 0.665605 0.931799
200 1 0.349895 0.641618 0.871635
475 1 0.805034 0.640759 0.938832
454 1 0.209496 0.620074 0.867215
413 1 0.691825 0.706845 0.826433
271 1 0.411784 0.691159 0.874847
1462 1 0.70606 0.751695 0.80594
1792 1 0.746311 0.763464 0.916979
1553 1 0.109605 0.765093 0.832199
824 1 0.219312 0.87709 0.884397
2029 1 0.380182 0.896743 0.828704
934 1 0.642394 0.997104 0.868546
337 1 0.760068 0.908476 0.891288
431 1 0.868036 0.931594 0.876706
931 1 0.845418 0.96334 0.861588
515 1 0.196498 0.930798 0.891615
1516 1 0.207511 0.986232 0.865253
1367 1 0.61891 0.951631 0.879004
277 1 0.934925 0.927779 0.853778
190 1 0.620281 0.999168 0.876475
809 1 0.281184 0.968676 0.529371
1752 1 0.471401 0.558854 0.865935
522 1 0.459391 0.52053 0.955485
1597 1 0.0389816 0.597928 0.860501
1271 1 0.407567 0.614247 0.949758
466 1 0.00407997 0.659925 0.876849
312 1 0.668403 0.69523 0.908426
1716 1 0.738651 0.721589 0.838126
996 1 0.0406493 0.80455 0.865375
1015 1 0.167858 0.756044 0.963775
536 1 0.0548251 0.841346 0.78539
50 1 0.0854333 0.763847 0.923387
1059 1 0.156941 0.845998 0.864972
1381 1 0.442068 0.901151 0.921019
1148 1 0.934968 0.93582 0.872875
724 1 0.148849 0.996969 0.850166
876 1 0.460103 0.995631 0.719843
1401 1 0.78249 0.501185 0.97838
739 1 0.641171 0.557799 0.83558
1025 1 0.936049 0.538584 0.923351
301 1 0.92813 0.558481 0.873765
1127 1 0.488457 0.5614 0.925608
1464 1 0.349555 0.577903 0.92961
882 1 0.324417 0.578101 0.92022
1691 1 0.463031 0.655917 0.89879
1821 1 0.478436 0.601107 0.844123
574 1 0.600465 0.625719 0.915273
583 1 0.618358 0.738822 0.958663
1747 1 0.169343 0.834725 0.912587
1167 1 0.885974 0.817367 0.955951
455 1 0.229853 0.918759 0.882558
1657 1 0.686847 0.85725 0.835613
654 1 0.240999 0.903555 0.871706
1549 1 0.462546 0.937808 0.891802
1799 1 0.575929 0.93259 0.894845
1106 1 0.609412 0.966054 0.887337
1803 1 0.362336 0.969747 0.909739
1116 1 0.641294 0.514687 0.901113
731 1 0.954125 0.588796 0.947082
1147 1 0.0285273 0.652881 0.9212
705 1 0.941222 0.568444 0.954753
471 1 0.302742 0.702622 0.944066
1890 1 0.764737 0.62872 0.914689
1473 1 0.675009 0.711583 0.886339
1315 1 0.727252 0.743259 0.9041
880 1 0.622116 0.755181 0.912482
792 1 0.312761 0.772574 0.852792
390 1 0.727874 0.802973 0.898678
607 1 0.295569 0.812221 0.927937
320 1 0.244894 0.819815 0.852511
572 1 0.315788 0.830203 0.875511
596 1 0.486997 0.894897 0.909731
1095 1 0.677596 0.907458 0.969594
2013 1 0.727036 0.905635 0.905749
275 1 0.801868 0.984288 0.880577
319 1 0.931071 0.933546 0.974529
993 1 0.560336 0.997143 0.99793
429 1 0.996105 0.99845 0.909009
902 1 0.920677 0.971897 0.874752
1130 1 0.89861 0.953372 0.935964
778 1 0.0334881 0.528521 0.979268
61 1 0.388318 0.963072 0.654995
419 1 0.504578 0.582462 0.873893
1671 1 0.701527 0.618032 0.885929
1511 1 0.658473 0.614416 0.923395
393 1 0.985646 0.725956 0.935349
1257 1 0.78642 0.724151 0.92948
306 1 0.239898 0.803111 0.882941
1863 1 0.617305 0.77622 0.932562
604 1 0.993 0.765974 0.929362
1483 1 0.801554 0.8422 0.932939
546 1 0.118449 0.856845 0.989718
2014 1 0.169246 0.965836 0.907884
1408 1 0.991793 0.864911 0.904674
1625 1 0.0855319 0.922489 0.965201
1924 1 0.00402151 0.994189 0.901134
280 1 0.200973 0.997667 0.948038
1689 1 0.162039 0.53416 0.895188
1865 1 0.532572 0.637921 0.945539
1288 1 0.521102 0.546913 0.884878
1312 1 0.846537 0.510632 0.500968
1102 1 0.216088 0.571529 0.945215
1978 1 0.527163 0.85373 0.996356
64 1 0.522002 0.55648 0.910021
1066 1 0.740586 0.53716 0.994502
838 1 0.469331 0.668342 0.978145
1886 1 0.560198 0.658582 0.937854
535 1 0.645695 0.766951 0.997611
1900 1 0.32721 0.804473 0.962404
833 1 0.582312 0.903354 0.984152
1639 1 0.215664 0.89987 0.923774
924 1 0.0394914 0.913325 0.992095
1749 1 0.182327 0.972795 0.928506
1614 1 0.240624 0.972672 0.948838
933 1 0.622017 0.908816 0.971567
593 1 0.771418 0.553092 0.580697
243 1 0.299102 0.504159 0.536356
599 1 0.607751 0.583892 0.976118
1035 1 0.811382 0.647975 0.940476
547 1 0.0689951 0.642115 0.964846
1901 1 0.886483 0.601153 0.974784
1828 1 0.0119612 0.759134 0.949523
1681 1 0.286228 0.992851 0.699027
1269 1 0.406914 0.775732 0.948807
1383 1 0.730133 0.786008 0.96479
143 1 0.48314 0.794385 0.953884
646 1 0.0599305 0.861705 0.985603
2026 1 0.629141 0.859035 0.969392
1407 1 0.840886 0.861973 0.94381
440 1 0.417351 0.916365 0.99545
313 1 0.0901917 0.996088 0.964919
1823 1 0.363142 0.97123 0.893013
1451 1 0.392377 0.951773 0.958755
155 1 0.205308 0.505424 0.973424
1072 1 0.27985 0.981634 0.814936
853 1 0.973694 0.671135 0.972106
210 1 0.254645 0.529601 0.644421
762 1 0.444927 0.751194 0.994618
1952 1 0.444245 0.637896 0.97651
777 1 0.970547 0.775172 0.939379
386 1 0.911534 0.993365 0.516358
1677 1 0.399243 0.826384 0.962332
144 1 0.710277 0.540088 0.533263
132 1 0.311016 0.849641 0.972674
1875 1 0.401178 0.845721 0.969359
1988 1 0.764022 0.983205 0.627141
726 1 0.503813 0.523954 0.805582
1916 1 0.994497 0.510961 0.698105
1631 1 0.38238 0.52588 0.943838
964 1 0.841446 0.519044 0.540236
579 1 0.537937 0.60898 0.50799
380 1 0.195886 0.652179 0.993269
418 1 0.267143 0.893041 0.501173
109 1 0.632811 0.998875 0.538444
334 1 0.492882 0.500197 0.879637
| [
"[email protected]"
] | |
23d627ec0997959cf1212df3ad37627b2530ced6 | c8abf01fb77b526a0a6af1f7ed5b740d8aec65ba | /user_profile/migrations/0001_initial.py | 486a7471308c7ad448471795d084c1ecd6dd824b | [] | no_license | bitapardaz/diabet | f1cc6e039792c91bfb67754f5c7e18141f2573cc | 8a9b38d81c512148be43ea9cf4d09acbd07c3af0 | refs/heads/master | 2021-07-16T17:08:24.502219 | 2017-10-23T21:56:27 | 2017-10-23T21:56:27 | 108,029,858 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 880 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
),
migrations.CreateModel(
name='UserType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100)),
],
),
migrations.AddField(
model_name='userprofile',
name='title',
field=models.ForeignKey(to='user_profile.UserType'),
),
]
| [
"[email protected]"
] | |
49de933b088b5782f28cb1a4cf8951cf0b947a90 | b59bc650ae07b18757b455e1b5cb7cfde91714a4 | /.env/local/lib/python3.5/site-packages/pip/_vendor/urllib3/util/response.py | 4dc7a11092cdd6afe3b1da5a4c197e42dfea15ff | [] | no_license | jhashubham28/BE_Project_SpeakerRecognition | d07c584359a5ebc6b524b3b4617b072c58724d17 | ede8fd53e79973e4116030e5a36f9deaa61dcc63 | refs/heads/master | 2020-05-29T20:42:15.055659 | 2019-05-30T20:17:30 | 2019-05-30T20:17:30 | 189,354,351 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 125 | py | /home/pi/Downloads/BE_Project-SpeakerRecognition-master/.env/lib/python3.5/site-packages/pip/_vendor/urllib3/util/response.py | [
"[email protected]"
] | |
f876294b823cd42948d89f02caf02bbee39ddf80 | fab39aa4d1317bb43bc11ce39a3bb53295ad92da | /tests/torch/test_load_model_state.py | 5eda1ed71238d33770b716b7cb5786bd3945a565 | [
"Apache-2.0"
] | permissive | dupeljan/nncf | 8cdce27f25f01ce8e611f15e1dc3036fb8548d6e | 0abfd7103ca212888a946ba4d0fbdb9d436fdaff | refs/heads/develop | 2023-06-22T00:10:46.611884 | 2021-07-22T10:32:11 | 2021-07-22T10:32:11 | 388,719,455 | 0 | 0 | Apache-2.0 | 2021-07-23T07:46:15 | 2021-07-23T07:43:43 | null | UTF-8 | Python | false | false | 27,407 | py | """
Copyright (c) 2019 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from typing import Dict, List, Set
import pytest
import torch
from examples.torch.common.model_loader import load_model
from nncf.torch.checkpoint_loading import KeyMatcher
from nncf.torch.checkpoint_loading import OPTIONAL_PARAMETERS_REGISTRY
from nncf.torch.checkpoint_loading import ProcessedKeyStatus
from nncf.torch.checkpoint_loading import ProcessedKeys
from nncf.torch.checkpoint_loading import load_state
from nncf.torch.dynamic_graph.transform_graph import replace_modules_by_nncf_modules
from nncf.torch.layers import NNCF_PADDING_VALUE_ATTR_NAME
from nncf.torch.nncf_network import EXTERNAL_QUANTIZERS_STORAGE_NAME
from nncf.torch.nncf_network import LEGACY_ACT_STORAGE_NAME
from tests.torch.helpers import BasicConvTestModel
from tests.torch.helpers import check_equal
def test_export_sq_11_is_ok(tmp_path):
test_path = str(tmp_path.joinpath("test.onnx"))
model = load_model('squeezenet1_1', pretrained=False)
dummy_input = torch.randn(1, 3, 224, 224)
torch.onnx.export(model, dummy_input, test_path, verbose=False)
os.remove(test_path)
def test_load_state_skips_not_matched_params__from_larger_to_smaller():
ref_weights = BasicConvTestModel.default_weight()
ref_bias = BasicConvTestModel.default_bias()
model_save = BasicConvTestModel(out_channels=1, weight_init=2, bias_init=2)
model_load = BasicConvTestModel(out_channels=2)
num_loaded = load_state(model_load, model_save.state_dict())
act_bias = model_load.conv.bias.data
act_weights = model_load.conv.weight.data
assert num_loaded == 0
check_equal(act_bias, ref_bias)
check_equal(act_weights, ref_weights)
def test_can_skip_padding_value():
model = BasicConvTestModel(out_channels=2)
state_dict = ({'conv.weight': model.default_weight(),
'conv.bias': model.default_bias()})
model, _ = replace_modules_by_nncf_modules(model)
num_loaded = load_state(model, state_dict, is_resume=True)
assert num_loaded == 2
def test_can_load_padding_value():
VALUE_TO_SET = 5
model = BasicConvTestModel()
state_dict = ({
'conv.weight': model.default_weight(),
'conv.bias': model.default_bias(),
'.'.join(['conv', NNCF_PADDING_VALUE_ATTR_NAME]): torch.Tensor([VALUE_TO_SET])
})
model, _ = replace_modules_by_nncf_modules(model)
assert model.conv.get_padding_value_ref().item() == 0
num_loaded = load_state(model, state_dict, is_resume=True)
assert num_loaded == 3
assert model.conv.get_padding_value_ref().item() == VALUE_TO_SET
def test_load_state_skips_not_matched_params__from_smaller_to_larger():
ref_weights = torch.tensor([[[[3, 2],
[2, 3]]]])
ref_bias = torch.tensor([2.])
model_save = BasicConvTestModel(out_channels=2)
model_load = BasicConvTestModel(out_channels=1, weight_init=2, bias_init=2)
num_loaded = load_state(model_load, model_save.state_dict())
assert num_loaded == 0
act_bias = model_load.conv.bias.data
act_weights = model_load.conv.weight.data
check_equal(act_bias, ref_bias)
check_equal(act_weights, ref_weights)
class MatchKeyDesc:
MOCKED_VALUE = torch.zeros([1])
def __init__(self, num_loaded=0, is_resume=True, expects_error=False,
state_dict_to_load: Dict[str, torch.Tensor] = None,
model_state_dict: Dict[str, torch.Tensor] = None):
self.state_dict_to_load = state_dict_to_load if state_dict_to_load else {}
self.model_state_dict = model_state_dict if model_state_dict else {}
self.new_dict: Dict[str, torch.Tensor] = {}
self.num_loaded = num_loaded
self.processed_keys = ProcessedKeys()
self.ignored_keys = []
self.is_resume = is_resume
self.expects_error = expects_error
self.has_deprecation_warning = False
def __str__(self):
result = '-'.join(self.state_dict_to_load.keys()) + '__TO__' + '-'.join(self.model_state_dict.keys())
if self.ignored_keys:
result += '__IGNORE__' + '-'.join(self.ignored_keys)
if self.is_resume:
result += '__resume'
return result
def setup_test(self, mocker):
pass
def keys_to_load(self, keys: List[str]):
for k in keys:
self.state_dict_to_load[k] = self.MOCKED_VALUE
return self
def model_keys(self, keys: List[str]):
for k in keys:
self.model_state_dict[k] = self.MOCKED_VALUE
return self
def keys_to_ignore(self, keys: List[str]):
self.ignored_keys = keys
return self
def missing(self, keys: List[str]):
self.processed_keys.extend_keys(keys, ProcessedKeyStatus.MISSING)
return self
def unexpected(self, keys: List[str]):
self.processed_keys.extend_keys(keys, ProcessedKeyStatus.UNEXPECTED)
return self
def size_mismatched(self, keys: List[str]):
self.processed_keys.extend_keys(keys, ProcessedKeyStatus.SIZE_MISMATCHED)
return self
def matched(self, keys: List[str]):
self.processed_keys.extend_keys(keys, ProcessedKeyStatus.MATCHED)
return self
def skipped(self, keys: List[str]):
self.processed_keys.extend_keys(keys, ProcessedKeyStatus.SKIPPED)
return self
def all_not_matched(self):
self.unexpected(list(self.state_dict_to_load))
self.missing(list(self.model_state_dict))
return self
def all_matched(self):
self.matched(list(self.model_state_dict))
return self
def with_deprecation_warning(self):
self.has_deprecation_warning = True
return self
OP1 = 'op1'
OP2 = 'op2'
PREFIX = 'prx'
SUFFIX = 'sfx'
OP1_NOT_PARAM = f'{PREFIX}_{OP1}'
OP1_SUFFIX = f'{PREFIX}.{OP1}'
OP1_PREFIX = f'{OP1}.{SUFFIX}'
OP2_SUFFIX = f'{PREFIX}.{OP2}'
OP2_NOT_PARAM = f'{PREFIX}_{OP2}'
OP2_MIDDLE = f'{PREFIX}.{OP2}.{SUFFIX}'
class OptionalMatchKeyDesc(MatchKeyDesc):
def setup_test(self, mocker):
def fn() -> Set['str']:
return {OP1, OP2}
mocked_registry_get = mocker.patch.object(OPTIONAL_PARAMETERS_REGISTRY, 'get_parameters_names')
mocked_registry_get.side_effect = fn
MATCH_KEY_DESC_LIST = [
# basic errors handling: mismatched size, unexpected and missing
MatchKeyDesc(num_loaded=0, expects_error=True,
state_dict_to_load={'1': torch.zeros(1)},
model_state_dict={'1': torch.zeros(2)})
.size_mismatched(['1']),
MatchKeyDesc(num_loaded=0, is_resume=False,
state_dict_to_load={'1': torch.zeros(1)},
model_state_dict={'1': torch.zeros(2)})
.size_mismatched(['1']),
MatchKeyDesc(num_loaded=1, is_resume=False,
state_dict_to_load={'1': torch.zeros(1)},
model_state_dict={'1': torch.zeros(2)}).keys_to_load(['2']).model_keys(['2', '3'])
.size_mismatched(['1']).missing(['3']).matched(['2']),
MatchKeyDesc(num_loaded=1, is_resume=False,
state_dict_to_load={'1': torch.zeros(1)},
model_state_dict={'1': torch.zeros(2)}).keys_to_load(['2', '4']).model_keys(['2', '3'])
.size_mismatched(['1']).missing(['3']).unexpected(['4']).matched(['2']),
MatchKeyDesc(num_loaded=2).keys_to_load(['1', '2']).model_keys(['1', '2'])
.all_matched(),
MatchKeyDesc(num_loaded=1, expects_error=True).keys_to_load(['1', '2']).model_keys(['1'])
.unexpected(['2']).matched(['1']),
MatchKeyDesc(num_loaded=1, expects_error=True).keys_to_load(['1']).model_keys(['1', '2'])
.missing(['2']).matched(['1']),
MatchKeyDesc(num_loaded=1, is_resume=False).keys_to_load(['1']).model_keys(['1', '2'])
.missing(['2']).matched(['1']),
# wrapping by NNCFNetwork and DataParallel & DistributedDataParallel
MatchKeyDesc(num_loaded=2).keys_to_load(['module.1', 'nncf_module.2']).model_keys(['1', '2'])
.all_matched(),
MatchKeyDesc(num_loaded=2).keys_to_load(['1', '2']).model_keys(['module.1', 'nncf_module.2'])
.all_matched(),
MatchKeyDesc(num_loaded=2).keys_to_load(['module.nncf_module.1', 'module.2']).model_keys(['1', 'nncf_module.2'])
.all_matched(),
MatchKeyDesc(num_loaded=0, expects_error=True)
.keys_to_load(['module.nncf_module.1.1', 'module.2']).model_keys(['1', '2.2'])
.all_not_matched(),
# collisions after normalization of keys
# different order of pre_ops
MatchKeyDesc(num_loaded=2)
.keys_to_load(['pre_ops.0.op.1', 'pre_ops.1.op.2'])
.model_keys(['pre_ops.1.op.1', 'pre_ops.0.op.2'])
.all_matched(),
# binarization of activation and weight may have the identical parameter (e.g. enabled)
MatchKeyDesc(num_loaded=2)
.keys_to_load(['pre_ops.0.op.1', 'pre_ops.1.op.1'])
.model_keys(['pre_ops.0.op.1', 'pre_ops.1.op.1'])
.all_matched(),
MatchKeyDesc(num_loaded=2)
.keys_to_load(['nncf_module.pre_ops.1.op.1', 'nncf_module.pre_ops.0.op.1'])
.model_keys(['module.nncf_module.pre_ops.1.op.1', 'module.nncf_module.pre_ops.0.op.1'])
.all_matched(),
# quantization -> quantization + sparsity: op.1 was first, than
MatchKeyDesc(num_loaded=2)
.keys_to_load(['pre_ops.0.op.1', 'pre_ops.1.op.2'])
.model_keys(['pre_ops.1.op.1', 'pre_ops.1.op.2'])
.all_matched(),
MatchKeyDesc(num_loaded=2)
.keys_to_load(['module.1', '1']).model_keys(['module.1', '1'])
.all_matched(),
MatchKeyDesc(num_loaded=1, expects_error=True)
.keys_to_load(['module.1', '1']).model_keys(['module.1'])
.matched(['module.1']).unexpected(['module.1']),
MatchKeyDesc(num_loaded=2)
.keys_to_load(['pre_ops.0.op.1', 'module.pre_ops.1.op.2'])
.model_keys(['module.pre_ops.0.op.1|OUTPUT', 'pre_ops.6.op.2'])
.all_matched(),
MatchKeyDesc(num_loaded=1, expects_error=True)
.keys_to_load(['module.1']).model_keys(['module.1', '1'])
.matched(['1']).missing(['module.1']),
MatchKeyDesc(num_loaded=1, expects_error=True)
.keys_to_load(['1']).model_keys(['module.1', '1'])
.matched(['1']).missing(['module.1']),
MatchKeyDesc(num_loaded=1, expects_error=True)
.keys_to_load(['1', 'module.1']).model_keys(['1'])
.matched(['1']).unexpected(['module.1']),
# can match legacy activation quantizer storage name
MatchKeyDesc(num_loaded=2)
.keys_to_load([LEGACY_ACT_STORAGE_NAME + '.relu_0.' + OP1,
LEGACY_ACT_STORAGE_NAME + '.relu_0.' + OP2])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0.' + OP1,
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0.' + OP2])
.all_matched()
.with_deprecation_warning(),
# can match new format of activation quantizer with |INPUT and |OUTPUT
MatchKeyDesc(num_loaded=2)
.keys_to_load(['relu_0.' + OP1, 'relu_0.' + OP2]).model_keys(['relu_0|OUTPUT.' + OP1, 'relu_0|INPUT.' + OP2])
.all_matched(),
# can match legacy activation quantizer + new format with |INPUT and |OUTPUT
MatchKeyDesc(num_loaded=2)
.keys_to_load([LEGACY_ACT_STORAGE_NAME + '.relu_0.' + OP1,
LEGACY_ACT_STORAGE_NAME + '.relu_0.' + OP2])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0|OUTPUT.' + OP1,
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0|INPUT.' + OP2])
.all_matched()
.with_deprecation_warning(),
# can match version agnostic format with the version_specific format
MatchKeyDesc(num_loaded=4)
.keys_to_load(["conv2d.weight",
"RELUModule.weight",
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0.' + OP1,
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0.' + OP2])
.model_keys(["conv2d.weight",
"RELUModule.weight",
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0|OUTPUT.' + OP1,
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0|INPUT.' + OP2])
.all_matched()
.with_deprecation_warning(),
# can match version agnostic format with the version_specific format + legacy act quant
MatchKeyDesc(num_loaded=4)
.keys_to_load(["conv2d.weight",
"RELUModule.weight",
LEGACY_ACT_STORAGE_NAME + '.RELU_0.' + OP1,
LEGACY_ACT_STORAGE_NAME + '.RELU_0.' + OP2])
.model_keys(["conv2d.weight",
"RELUModule.weight",
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu__0|OUTPUT.' + OP1,
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu__0|INPUT.' + OP2])
.all_matched()
.with_deprecation_warning(),
# can match unified FQ
MatchKeyDesc(num_loaded=1)
.keys_to_load(['module.' + LEGACY_ACT_STORAGE_NAME + '.relu_0.' + OP1,
'module.' + LEGACY_ACT_STORAGE_NAME + '.relu_1.' + OP1])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0|OUTPUT;relu_1|OUTPUT.' + OP1])
.all_matched()
.with_deprecation_warning(),
MatchKeyDesc(num_loaded=1)
.keys_to_load(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_1.' + OP1])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0|OUTPUT;relu_1|OUTPUT.' + OP1])
.all_matched()
.with_deprecation_warning(),
MatchKeyDesc(num_loaded=1)
.keys_to_load(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_1.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_2.' + OP1])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0|OUTPUT;relu_2|OUTPUT;relu_1|OUTPUT.' + OP1])
.all_matched()
.with_deprecation_warning(),
# not matched common operation
MatchKeyDesc(num_loaded=1, expects_error=True)
.keys_to_load(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_1.' + OP2,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_2.' + OP1_NOT_PARAM])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0|OUTPUT;relu_2|OUTPUT;relu_1|OUTPUT.' + OP1])
.matched([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0|OUTPUT;relu_2|OUTPUT;relu_1|OUTPUT.' + OP1])
.unexpected(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_1.' + OP2,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_2.' + OP1_NOT_PARAM]),
# not all unified scopes are matched: relu_3 vs relu_1
MatchKeyDesc(num_loaded=1, expects_error=True)
.keys_to_load(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_3.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_2.' + OP1])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0|OUTPUT;relu_2|OUTPUT;relu_1|OUTPUT.' + OP1])
.matched([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0|OUTPUT;relu_2|OUTPUT;relu_1|OUTPUT.' + OP1])
.unexpected(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_3.' + OP1]),
# won't match relu_ and relu
MatchKeyDesc(num_loaded=2, expects_error=True)
.keys_to_load(["conv2d.weight",
"RELUModule.weight",
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0.' + OP1,
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0.' + OP2])
.model_keys(["conv2d.weight",
"RELUModule.weight",
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu__0|OUTPUT.' + OP1,
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu__0|INPUT.' + OP2])
.matched(["conv2d.weight", "RELUModule.weight"])
.unexpected([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0.' + OP1,
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu_0.' + OP2])
.missing([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu__0|OUTPUT.' + OP1,
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.relu__0|INPUT.' + OP2]),
# can skip ignored parameters
MatchKeyDesc(num_loaded=1).keys_to_load(['1']).model_keys(['1', '2'])
.keys_to_ignore(['2'])
.skipped(['2']).matched(['1']),
MatchKeyDesc(num_loaded=1).keys_to_load(['1', '2']).model_keys(['1'])
.keys_to_ignore(['2'])
.skipped(['2']).matched(['1']),
MatchKeyDesc(num_loaded=0, state_dict_to_load={'1': torch.zeros(1)}, model_state_dict={'1': torch.zeros(2)})
.keys_to_ignore(['1'])
.skipped(['1']),
MatchKeyDesc(num_loaded=0, expects_error=True)
.keys_to_load(['module.nncf_module.1.1', '2.2']).model_keys(['module.1', 'module.2'])
.keys_to_ignore(['1', '2.2'])
.skipped(['module.1', '2.2']).missing(['module.2']).unexpected(['module.nncf_module.1.1']),
# optional parameter - not necessary in checkpoint can be initialized by default in the model
# can match legacy activation quantizer + new format with |INPUT and |OUTPUT
MatchKeyDesc(num_loaded=2)
.keys_to_load([LEGACY_ACT_STORAGE_NAME + '.RELU_0.' + OP1,
LEGACY_ACT_STORAGE_NAME + '.RELU_0.' + OP2])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT.' + OP1,
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|INPUT.' + OP2])
.all_matched()
.with_deprecation_warning(),
# can match unified FQ
MatchKeyDesc(num_loaded=1)
.keys_to_load(['module.' + LEGACY_ACT_STORAGE_NAME + '.RELU_0.' + OP1,
'module.' + LEGACY_ACT_STORAGE_NAME + '.RELU_1.' + OP1])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_1|OUTPUT.' + OP1])
.all_matched()
.with_deprecation_warning(),
MatchKeyDesc(num_loaded=1)
.keys_to_load(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_1.' + OP1])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_1|OUTPUT.' + OP1])
.all_matched()
.with_deprecation_warning(),
MatchKeyDesc(num_loaded=1)
.keys_to_load(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_1.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_2.' + OP1])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_2|OUTPUT;RELU_1|OUTPUT.' + OP1])
.all_matched()
.with_deprecation_warning(),
# not matched common operation
MatchKeyDesc(num_loaded=1, expects_error=True)
.keys_to_load(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_1.' + OP2,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_2.' + OP1_NOT_PARAM])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_2|OUTPUT;RELU_1|OUTPUT.' + OP1])
.matched([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_2|OUTPUT;RELU_1|OUTPUT.' + OP1])
.unexpected(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_1.' + OP2,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_2.' + OP1_NOT_PARAM]),
# not all unified scopes are matched: RELU_3 vs RELU_1
MatchKeyDesc(num_loaded=1, expects_error=True)
.keys_to_load(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_3.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_2.' + OP1])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_2|OUTPUT;RELU_1|OUTPUT.' + OP1])
.matched([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_2|OUTPUT;RELU_1|OUTPUT.' + OP1])
.unexpected(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_3.' + OP1]),
# can skip ignored parameters
MatchKeyDesc(num_loaded=1).keys_to_load(['1']).model_keys(['1', '2'])
.keys_to_ignore(['2'])
.skipped(['2']).matched(['1']),
MatchKeyDesc(num_loaded=1).keys_to_load(['1', '2']).model_keys(['1'])
.keys_to_ignore(['2'])
.skipped(['2']).matched(['1']),
MatchKeyDesc(num_loaded=0, state_dict_to_load={'1': torch.zeros(1)}, model_state_dict={'1': torch.zeros(2)})
.keys_to_ignore(['1'])
.skipped(['1']),
MatchKeyDesc(num_loaded=0, expects_error=True)
.keys_to_load(['module.nncf_module.1.1', '2.2']).model_keys(['module.1', 'module.2'])
.keys_to_ignore(['1', '2.2'])
.skipped(['module.1', '2.2']).missing(['module.2']).unexpected(['module.nncf_module.1.1']),
# optional parameter - not necessary in checkpoint can be initialized by default in the model
# can match legacy activation quantizer + new format with |INPUT and |OUTPUT
MatchKeyDesc(num_loaded=2)
.keys_to_load([LEGACY_ACT_STORAGE_NAME + '.RELU_0.' + OP1,
LEGACY_ACT_STORAGE_NAME + '.RELU_0.' + OP2])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT.' + OP1,
EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|INPUT.' + OP2])
.all_matched()
.with_deprecation_warning(),
# can match unified FQ
MatchKeyDesc(num_loaded=1)
.keys_to_load(['module.' + LEGACY_ACT_STORAGE_NAME + '.RELU_0.' + OP1,
'module.' + LEGACY_ACT_STORAGE_NAME + '.RELU_1.' + OP1])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_1|OUTPUT.' + OP1])
.all_matched()
.with_deprecation_warning(),
MatchKeyDesc(num_loaded=1)
.keys_to_load(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_1.' + OP1])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_1|OUTPUT.' + OP1])
.all_matched()
.with_deprecation_warning(),
MatchKeyDesc(num_loaded=1)
.keys_to_load(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_1.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_2.' + OP1])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_2|OUTPUT;RELU_1|OUTPUT.' + OP1])
.all_matched()
.with_deprecation_warning(),
# not matched common operation
MatchKeyDesc(num_loaded=1, expects_error=True)
.keys_to_load(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_1.' + OP2,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_2.' + OP1_NOT_PARAM])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_2|OUTPUT;RELU_1|OUTPUT.' + OP1])
.matched([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_2|OUTPUT;RELU_1|OUTPUT.' + OP1])
.unexpected(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_1.' + OP2,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_2.' + OP1_NOT_PARAM]),
# not all unified scopes are matched: RELU_3 vs RELU_1
MatchKeyDesc(num_loaded=1, expects_error=True)
.keys_to_load(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_3.' + OP1,
'module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_2.' + OP1])
.model_keys([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_2|OUTPUT;RELU_1|OUTPUT.' + OP1])
.matched([EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_0|OUTPUT;RELU_2|OUTPUT;RELU_1|OUTPUT.' + OP1])
.unexpected(['module.' + EXTERNAL_QUANTIZERS_STORAGE_NAME + '.RELU_3.' + OP1]),
OptionalMatchKeyDesc(num_loaded=0)
.keys_to_load([])
.model_keys([OP1])
.skipped([OP1]),
OptionalMatchKeyDesc(num_loaded=1)
.keys_to_load([OP1_PREFIX])
.model_keys([OP1_PREFIX, OP1_SUFFIX, OP2_SUFFIX])
.matched([OP1_PREFIX]).skipped([OP1_SUFFIX, OP2_SUFFIX]),
OptionalMatchKeyDesc(num_loaded=1, expects_error=True)
.keys_to_load([OP1_PREFIX, OP2_MIDDLE])
.model_keys([OP1_PREFIX, OP1_SUFFIX, OP2_SUFFIX])
.unexpected([OP2_MIDDLE]).matched([OP1_PREFIX]).skipped([OP1_SUFFIX, OP2_SUFFIX]),
OptionalMatchKeyDesc(num_loaded=1, expects_error=True)
.keys_to_load([OP1_PREFIX])
.model_keys([OP1_PREFIX, OP1_SUFFIX, OP2_SUFFIX, OP2_MIDDLE])
.missing([OP2_MIDDLE]).matched([OP1_PREFIX]).skipped([OP1_SUFFIX, OP2_SUFFIX]),
OptionalMatchKeyDesc(num_loaded=2, expects_error=True)
.keys_to_load([OP1_PREFIX, OP1_SUFFIX, OP2_SUFFIX])
.model_keys([OP1_PREFIX, OP1_SUFFIX, OP2_MIDDLE])
.missing([OP2_MIDDLE]).unexpected([OP2_SUFFIX]).matched([OP1_PREFIX, OP1_SUFFIX]),
OptionalMatchKeyDesc(num_loaded=1, expects_error=True)
.keys_to_load([OP1_PREFIX])
.model_keys([OP1_PREFIX, OP1_NOT_PARAM, OP2_NOT_PARAM])
.matched([OP1_PREFIX]).missing([OP1_NOT_PARAM, OP2_NOT_PARAM]),
OptionalMatchKeyDesc(num_loaded=2, expects_error=True)
.keys_to_load([OP1_PREFIX, OP1_NOT_PARAM, OP2_NOT_PARAM])
.model_keys([OP1_PREFIX, OP1_NOT_PARAM, OP2_MIDDLE])
.missing([OP2_MIDDLE]).unexpected([OP2_NOT_PARAM]).matched([OP1_PREFIX, OP1_NOT_PARAM]),
]
@pytest.mark.parametrize('desc', MATCH_KEY_DESC_LIST, ids=[str(d) for d in MATCH_KEY_DESC_LIST])
def test_match_key(desc: MatchKeyDesc, mocker):
desc.setup_test(mocker)
key_matcher = KeyMatcher(desc.is_resume, desc.state_dict_to_load, desc.model_state_dict, desc.ignored_keys)
if desc.has_deprecation_warning:
with pytest.deprecated_call():
new_dict = key_matcher.run()
else:
new_dict = key_matcher.run()
num_loaded_layers = len(new_dict)
assert num_loaded_layers == desc.num_loaded
# pylint: disable=protected-access
assert key_matcher._processed_keys._keys == desc.processed_keys._keys
if desc.expects_error:
with pytest.raises(RuntimeError):
key_matcher.handle_problematic_keys()
else:
key_matcher.handle_problematic_keys()
| [
"[email protected]"
] | |
b4825e7c09ac027db28bf8dd543d8e729e4955c0 | f34d3948b707e461151ee33296a61fb23a6d3f44 | /month01/day05/exercise04.py | 594a4d052bb6f40071f72eab8c844809892fe23a | [] | no_license | xiao-a-jian/python-study | f9c4e3ee7a2f9ae83bec6afa7c7b5434e8243ed8 | c8e8071277bcea8463bf6f2e8cd9e30ae0f1ddf3 | refs/heads/master | 2022-06-09T17:44:41.804228 | 2020-05-05T07:48:07 | 2020-05-05T07:48:07 | 256,927,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 416 | py | """
在终端中录入10个疫情省份的确诊人数
最后打印人数最多的、最少的、平均人数.(使用内置函数实现)
"""
list_confirmed = []
for item in range(10):
number = int(input("请输入第%d个省份的疫情人数:" % (item + 1)))
list_confirmed.append(number)
print(max(list_confirmed))
print(min(list_confirmed))
print(sum(list_confirmed) / len(list_confirmed))
| [
"[email protected]"
] | |
a6824d7e85b0264d31b1561314abad8654470d27 | 401fc99cefe615f8ebefb6dd9c2b043c506f5bd0 | /tests/conftest.py | 640a86e0d9e1741295eded78699d2d9877f23fe9 | [
"MIT"
] | permissive | atviriduomenys/spinta | 0f85496860ebbcecfccd8dde2bf219564ee66baa | 1fac5b6b75ec65188d815078fd135bc05d49b31c | refs/heads/master | 2023-09-02T13:22:58.411937 | 2023-08-18T12:59:17 | 2023-08-18T12:59:17 | 168,724,854 | 12 | 4 | MIT | 2023-09-14T13:29:39 | 2019-02-01T16:16:11 | Python | UTF-8 | Python | false | false | 4,119 | py | import builtins
import inspect
import os
import re
import sys
import time as time_module
from itertools import chain
from itertools import islice
from traceback import format_stack
from typing import Any
from typing import Dict
from typing import Iterator
from typing import TextIO
from typing import Type
import objprint
import pprintpp
import sqlparse
from pygments import highlight
from pygments.formatters.terminal256 import Terminal256Formatter
from pygments.lexers.python import Python3Lexer
from pygments.lexers.python import Python3TracebackLexer
from pygments.lexers.sql import PostgresLexer
from sqlalchemy.sql import ClauseElement
objprint.config(honor_existing=False, depth=1)
def formatter():
return Terminal256Formatter(style='vim')
def ppsql(qry):
sql = str(qry) % qry.compile().params
sql = sqlparse.format(sql, reindent=True, keyword_case='upper')
sql = highlight(sql, PostgresLexer(), formatter())
print(sql)
na = object()
arg_re = re.compile(r'pp\(([^,)]+)')
def pp(
obj: Any = na,
*args,
v: Any = na,
t: Type = na,
on: bool = True, # print if on condition is true
st: bool = False,
tb: bool = False,
time: bool = False,
file: TextIO = sys.__stderr__,
prefix: str = '\n',
suffix: str = '',
kwargs: Dict[str, Any] = None,
) -> Any:
if obj is na:
ret = None
else:
ret = obj
if not on:
return ret
if obj is Ellipsis:
print(file=file)
print('_' * 72, file=file)
return ret
if time:
start = time_module.time()
ret = obj(*args, **kwargs)
delta = time_module.time() - start
else:
delta = None
if v is not na and obj is not v:
return ret
if t is not na and not isinstance(obj, t):
return ret
if obj is na:
out = ''
lexer = None
elif isinstance(obj, Iterator):
out = list(islice(obj, 10))
ret = chain(out, obj)
out = '<generator> ' + pprintpp.pformat(out)
lexer = Python3Lexer()
elif isinstance(obj, ClauseElement):
out = str(obj.compile(compile_kwargs={"literal_binds": True}))
out = sqlparse.format(out, reindent=True, keyword_case='upper')
out = '\n' + out
lexer = PostgresLexer()
else:
out = pprintpp.pformat(obj)
lexer = Python3Lexer()
if obj is not na:
frame = inspect.currentframe()
frame = inspect.getouterframes(frame)[1]
line = inspect.getframeinfo(frame[0]).code_context[0].strip()
_, line = line.split('pp(', 1)
arg = []
stack = []
term = {
'(': ')',
'[': ']',
'{': '}',
'"': '"',
"'": "'",
}
for c in line:
if (c == '\\' and (not stack or stack[-1] != '\\')) or c in term:
stack.append(c)
elif stack:
if stack[-1] == '\\' or c == term[stack[-1]]:
stack.pop()
elif c in ',)':
break
arg.append(c)
arg = ''.join(arg)
out = f'{arg} = {out}'
if lexer:
out = highlight(out, lexer, formatter())
if prefix:
print(prefix, end='', file=file)
if st:
stack = ["Stack trace (pp):\n"]
cwd = os.getcwd() + '/'
for item in format_stack():
if '/_pytest/' in item:
continue
if '/site-packages/pluggy/' in item:
continue
if '/multipledispatch/dispatcher.py' in item:
continue
item = item.replace(cwd, '')
stack.append(item)
stack = ''.join(stack)
stack = highlight(stack, Python3TracebackLexer(), formatter())
print(stack, end='', file=file)
print(out.strip(), file=file)
if suffix:
print(suffix, end='', file=file)
if time:
print(f'Time: {delta}s', file=file)
if tb:
raise RuntimeError('pp')
return ret
builtins.pp = pp
builtins.op = objprint.op
pytest_plugins = ['spinta.testing.pytest']
| [
"[email protected]"
] | |
c9ce4a22aa785427947d43da377376900804d900 | cedc66fcd0c8e347a2e363a832819cfee3598c8a | /iprPy/record/point_defect/__init__.py | 6fec3f441d280d0a2e752c187abfbbb6c176e1ca | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-public-domain"
] | permissive | Aditya-912/iprPy | 1b201c3a62b691249a76dd07d507b2d94813b6b8 | 260bf203baf43a33a1bb3a079d7d5343c131dcd5 | refs/heads/master | 2023-03-11T21:35:17.963425 | 2021-03-05T16:31:27 | 2021-03-05T16:31:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 78 | py | # iprPy imports
from .PointDefect import PointDefect
__all__ = ['PointDefect'] | [
"[email protected]"
] | |
02a8274f0e85d0ac8c9b03e1f1d001771b15a3b3 | 0a1c2118c631c5b22dbb0906ceef263461377cb7 | /dev/python/proto/sensor_msgs_pb2.py | 9fbfd485cf761db6478399838056dcef1a21c5e4 | [
"MIT"
] | permissive | gecko-robotics/gecko-protobuf | dc9b583babe822aa1965b4b4a0aade9e24355a90 | 70bb1407a25d27588da6c58e4a6bc25dc7975234 | refs/heads/master | 2021-07-17T22:34:03.926951 | 2020-06-14T22:22:13 | 2020-06-14T22:22:13 | 181,314,012 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 33,355 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: proto/sensor_msgs.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from proto import standard_msgs_pb2 as proto_dot_standard__msgs__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='proto/sensor_msgs.proto',
package='sensor_msgs',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x17proto/sensor_msgs.proto\x12\x0bsensor_msgs\x1a\x19proto/standard_msgs.proto\"\xdc\x01\n\x03Imu\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12\x32\n\x13linear_acceleration\x18\x02 \x01(\x0b\x32\x15.standard_msgs.Vector\x12/\n\x10\x61ngular_velocity\x18\x03 \x01(\x0b\x32\x15.standard_msgs.Vector\x12.\n\x0borientation\x18\x04 \x01(\x0b\x32\x19.standard_msgs.Quaternion\x12-\n\x0emagnetic_field\x18\x05 \x01(\x0b\x32\x15.standard_msgs.Vector\"\x99\x01\n\x07ImuInfo\x12\x1e\n\x16orientation_covariance\x18\x01 \x03(\x01\x12#\n\x1b\x61ngular_velocity_covariance\x18\x02 \x03(\x01\x12&\n\x1elinear_acceleration_covariance\x18\x03 \x03(\x01\x12!\n\x19magnetic_field_covariance\x18\x04 \x03(\x01\"|\n\x05Image\x12\r\n\x05width\x18\x01 \x01(\r\x12\x0e\n\x06height\x18\x02 \x01(\r\x12\r\n\x05\x64\x65pth\x18\x03 \x01(\r\x12\x12\n\ncompressed\x18\x04 \x01(\x08\x12\x10\n\x08\x65ncoding\x18\x05 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x06 \x01(\x0c\x12\x11\n\ttimestamp\x18\x07 \x01(\x01\"z\n\nCameraInfo\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12\x0e\n\x06height\x18\x02 \x01(\r\x12\r\n\x05width\x18\x03 \x01(\r\x12\r\n\x01\x44\x18\x04 \x03(\x01\x42\x02\x10\x01\x12\r\n\x01K\x18\x05 \x03(\x01\x42\x02\x10\x01\x12\r\n\x01R\x18\x06 \x03(\x01\x42\x02\x10\x01\x12\r\n\x01P\x18\x07 \x03(\x01\x42\x02\x10\x01\"\x8a\x01\n\tLaserScan\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12\x17\n\x0f\x61ngle_increment\x18\x02 \x01(\x01\x12\x11\n\trange_min\x18\x03 \x01(\x01\x12\x11\n\trange_max\x18\x04 \x01(\x01\x12\x12\n\x06ranges\x18\x05 \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0bintensities\x18\x06 \x03(\x01\x42\x02\x10\x01\"\xb5\x02\n\tNavSatFix\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12\x10\n\x08latitude\x18\x02 \x01(\x01\x12\x11\n\tlongitude\x18\x03 \x01(\x01\x12\x10\n\x08\x61ltitude\x18\x04 \x01(\x01\x12-\n\x06status\x18\x05 \x01(\x0e\x32\x1d.sensor_msgs.NavSatFix.Status\x12/\n\x07service\x18\x06 \x01(\x0e\x32\x1e.sensor_msgs.NavSatFix.Service\"6\n\x06Status\x12\x07\n\x03\x46IX\x10\x00\x12\x0b\n\x07SBASFIX\x10\x01\x12\x0b\n\x07GBASFIX\x10\x02\x12\t\n\x05NOFIX\x10\x03\"F\n\x07Service\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03GPS\x10\x01\x12\x0b\n\x07GLONASS\x10\x02\x12\x0b\n\x07\x43OMPASS\x10\x04\x12\x0b\n\x07GALILEO\x10\x08\"\xed\x02\n\x0c\x42\x61tteryState\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12\x0f\n\x07voltage\x18\x02 \x01(\x02\x12\x0f\n\x07\x63urrent\x18\x03 \x01(\x02\x12\x0e\n\x06\x63harge\x18\x04 \x01(\x02\x12\x10\n\x08\x63\x61pacity\x18\x05 \x01(\x02\x12\x38\n\ntechnology\x18\x06 \x01(\x0e\x32$.sensor_msgs.BatteryState.Technology\x12\x30\n\x06status\x18\x07 \x01(\x0e\x32 .sensor_msgs.BatteryState.Status\"H\n\nTechnology\x12\x08\n\x04NIMH\x10\x00\x12\x08\n\x04LION\x10\x01\x12\x08\n\x04LIPO\x10\x02\x12\x08\n\x04LIFE\x10\x03\x12\x08\n\x04NICD\x10\x04\x12\x08\n\x04LIMN\x10\x05\"P\n\x06Status\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43HARGING\x10\x01\x12\x0f\n\x0b\x44ISCHARGING\x10\x02\x12\x10\n\x0cNOT_CHARGING\x10\x03\x12\x08\n\x04\x46ULL\x10\x04\"\xae\x01\n\x05Range\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12%\n\x04type\x18\x02 \x01(\x0e\x32\x17.sensor_msgs.Range.Type\x12\x0b\n\x03\x66ov\x18\x03 \x01(\x02\x12\x11\n\trange_min\x18\x04 \x01(\x02\x12\x11\n\trange_max\x18\x05 \x01(\x02\x12\r\n\x05range\x18\x06 \x01(\x02\")\n\x04Type\x12\x0e\n\nULTRASOUND\x10\x00\x12\x06\n\x02IR\x10\x01\x12\t\n\x05LIDAR\x10\x02\x62\x06proto3')
,
dependencies=[proto_dot_standard__msgs__pb2.DESCRIPTOR,])
_NAVSATFIX_STATUS = _descriptor.EnumDescriptor(
name='Status',
full_name='sensor_msgs.NavSatFix.Status',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='FIX', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SBASFIX', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GBASFIX', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NOFIX', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1021,
serialized_end=1075,
)
_sym_db.RegisterEnumDescriptor(_NAVSATFIX_STATUS)
_NAVSATFIX_SERVICE = _descriptor.EnumDescriptor(
name='Service',
full_name='sensor_msgs.NavSatFix.Service',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GPS', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GLONASS', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='COMPASS', index=3, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GALILEO', index=4, number=8,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1077,
serialized_end=1147,
)
_sym_db.RegisterEnumDescriptor(_NAVSATFIX_SERVICE)
_BATTERYSTATE_TECHNOLOGY = _descriptor.EnumDescriptor(
name='Technology',
full_name='sensor_msgs.BatteryState.Technology',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NIMH', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LION', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LIPO', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LIFE', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NICD', index=4, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LIMN', index=5, number=5,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1361,
serialized_end=1433,
)
_sym_db.RegisterEnumDescriptor(_BATTERYSTATE_TECHNOLOGY)
_BATTERYSTATE_STATUS = _descriptor.EnumDescriptor(
name='Status',
full_name='sensor_msgs.BatteryState.Status',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHARGING', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DISCHARGING', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NOT_CHARGING', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FULL', index=4, number=4,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1435,
serialized_end=1515,
)
_sym_db.RegisterEnumDescriptor(_BATTERYSTATE_STATUS)
_RANGE_TYPE = _descriptor.EnumDescriptor(
name='Type',
full_name='sensor_msgs.Range.Type',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='ULTRASOUND', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='IR', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LIDAR', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1651,
serialized_end=1692,
)
_sym_db.RegisterEnumDescriptor(_RANGE_TYPE)
_IMU = _descriptor.Descriptor(
name='Imu',
full_name='sensor_msgs.Imu',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timestamp', full_name='sensor_msgs.Imu.timestamp', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='linear_acceleration', full_name='sensor_msgs.Imu.linear_acceleration', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='angular_velocity', full_name='sensor_msgs.Imu.angular_velocity', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='orientation', full_name='sensor_msgs.Imu.orientation', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnetic_field', full_name='sensor_msgs.Imu.magnetic_field', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=68,
serialized_end=288,
)
_IMUINFO = _descriptor.Descriptor(
name='ImuInfo',
full_name='sensor_msgs.ImuInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='orientation_covariance', full_name='sensor_msgs.ImuInfo.orientation_covariance', index=0,
number=1, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='angular_velocity_covariance', full_name='sensor_msgs.ImuInfo.angular_velocity_covariance', index=1,
number=2, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='linear_acceleration_covariance', full_name='sensor_msgs.ImuInfo.linear_acceleration_covariance', index=2,
number=3, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnetic_field_covariance', full_name='sensor_msgs.ImuInfo.magnetic_field_covariance', index=3,
number=4, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=291,
serialized_end=444,
)
_IMAGE = _descriptor.Descriptor(
name='Image',
full_name='sensor_msgs.Image',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='width', full_name='sensor_msgs.Image.width', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='height', full_name='sensor_msgs.Image.height', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='depth', full_name='sensor_msgs.Image.depth', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compressed', full_name='sensor_msgs.Image.compressed', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='encoding', full_name='sensor_msgs.Image.encoding', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='sensor_msgs.Image.data', index=5,
number=6, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timestamp', full_name='sensor_msgs.Image.timestamp', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=446,
serialized_end=570,
)
_CAMERAINFO = _descriptor.Descriptor(
name='CameraInfo',
full_name='sensor_msgs.CameraInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timestamp', full_name='sensor_msgs.CameraInfo.timestamp', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='height', full_name='sensor_msgs.CameraInfo.height', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='width', full_name='sensor_msgs.CameraInfo.width', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='D', full_name='sensor_msgs.CameraInfo.D', index=3,
number=4, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\020\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='K', full_name='sensor_msgs.CameraInfo.K', index=4,
number=5, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\020\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='R', full_name='sensor_msgs.CameraInfo.R', index=5,
number=6, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\020\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='P', full_name='sensor_msgs.CameraInfo.P', index=6,
number=7, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\020\001'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=572,
serialized_end=694,
)
_LASERSCAN = _descriptor.Descriptor(
name='LaserScan',
full_name='sensor_msgs.LaserScan',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timestamp', full_name='sensor_msgs.LaserScan.timestamp', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='angle_increment', full_name='sensor_msgs.LaserScan.angle_increment', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='range_min', full_name='sensor_msgs.LaserScan.range_min', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='range_max', full_name='sensor_msgs.LaserScan.range_max', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ranges', full_name='sensor_msgs.LaserScan.ranges', index=4,
number=5, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\020\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='intensities', full_name='sensor_msgs.LaserScan.intensities', index=5,
number=6, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\020\001'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=697,
serialized_end=835,
)
_NAVSATFIX = _descriptor.Descriptor(
name='NavSatFix',
full_name='sensor_msgs.NavSatFix',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timestamp', full_name='sensor_msgs.NavSatFix.timestamp', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='latitude', full_name='sensor_msgs.NavSatFix.latitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='longitude', full_name='sensor_msgs.NavSatFix.longitude', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='altitude', full_name='sensor_msgs.NavSatFix.altitude', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='sensor_msgs.NavSatFix.status', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='service', full_name='sensor_msgs.NavSatFix.service', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_NAVSATFIX_STATUS,
_NAVSATFIX_SERVICE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=838,
serialized_end=1147,
)
_BATTERYSTATE = _descriptor.Descriptor(
name='BatteryState',
full_name='sensor_msgs.BatteryState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timestamp', full_name='sensor_msgs.BatteryState.timestamp', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='voltage', full_name='sensor_msgs.BatteryState.voltage', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='current', full_name='sensor_msgs.BatteryState.current', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='charge', full_name='sensor_msgs.BatteryState.charge', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='capacity', full_name='sensor_msgs.BatteryState.capacity', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='technology', full_name='sensor_msgs.BatteryState.technology', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='sensor_msgs.BatteryState.status', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_BATTERYSTATE_TECHNOLOGY,
_BATTERYSTATE_STATUS,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1150,
serialized_end=1515,
)
_RANGE = _descriptor.Descriptor(
name='Range',
full_name='sensor_msgs.Range',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timestamp', full_name='sensor_msgs.Range.timestamp', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='sensor_msgs.Range.type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fov', full_name='sensor_msgs.Range.fov', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='range_min', full_name='sensor_msgs.Range.range_min', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='range_max', full_name='sensor_msgs.Range.range_max', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='range', full_name='sensor_msgs.Range.range', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_RANGE_TYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1518,
serialized_end=1692,
)
_IMU.fields_by_name['linear_acceleration'].message_type = proto_dot_standard__msgs__pb2._VECTOR
_IMU.fields_by_name['angular_velocity'].message_type = proto_dot_standard__msgs__pb2._VECTOR
_IMU.fields_by_name['orientation'].message_type = proto_dot_standard__msgs__pb2._QUATERNION
_IMU.fields_by_name['magnetic_field'].message_type = proto_dot_standard__msgs__pb2._VECTOR
_NAVSATFIX.fields_by_name['status'].enum_type = _NAVSATFIX_STATUS
_NAVSATFIX.fields_by_name['service'].enum_type = _NAVSATFIX_SERVICE
_NAVSATFIX_STATUS.containing_type = _NAVSATFIX
_NAVSATFIX_SERVICE.containing_type = _NAVSATFIX
_BATTERYSTATE.fields_by_name['technology'].enum_type = _BATTERYSTATE_TECHNOLOGY
_BATTERYSTATE.fields_by_name['status'].enum_type = _BATTERYSTATE_STATUS
_BATTERYSTATE_TECHNOLOGY.containing_type = _BATTERYSTATE
_BATTERYSTATE_STATUS.containing_type = _BATTERYSTATE
_RANGE.fields_by_name['type'].enum_type = _RANGE_TYPE
_RANGE_TYPE.containing_type = _RANGE
DESCRIPTOR.message_types_by_name['Imu'] = _IMU
DESCRIPTOR.message_types_by_name['ImuInfo'] = _IMUINFO
DESCRIPTOR.message_types_by_name['Image'] = _IMAGE
DESCRIPTOR.message_types_by_name['CameraInfo'] = _CAMERAINFO
DESCRIPTOR.message_types_by_name['LaserScan'] = _LASERSCAN
DESCRIPTOR.message_types_by_name['NavSatFix'] = _NAVSATFIX
DESCRIPTOR.message_types_by_name['BatteryState'] = _BATTERYSTATE
DESCRIPTOR.message_types_by_name['Range'] = _RANGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Imu = _reflection.GeneratedProtocolMessageType('Imu', (_message.Message,), {
'DESCRIPTOR' : _IMU,
'__module__' : 'proto.sensor_msgs_pb2'
# @@protoc_insertion_point(class_scope:sensor_msgs.Imu)
})
_sym_db.RegisterMessage(Imu)
ImuInfo = _reflection.GeneratedProtocolMessageType('ImuInfo', (_message.Message,), {
'DESCRIPTOR' : _IMUINFO,
'__module__' : 'proto.sensor_msgs_pb2'
# @@protoc_insertion_point(class_scope:sensor_msgs.ImuInfo)
})
_sym_db.RegisterMessage(ImuInfo)
Image = _reflection.GeneratedProtocolMessageType('Image', (_message.Message,), {
'DESCRIPTOR' : _IMAGE,
'__module__' : 'proto.sensor_msgs_pb2'
# @@protoc_insertion_point(class_scope:sensor_msgs.Image)
})
_sym_db.RegisterMessage(Image)
CameraInfo = _reflection.GeneratedProtocolMessageType('CameraInfo', (_message.Message,), {
'DESCRIPTOR' : _CAMERAINFO,
'__module__' : 'proto.sensor_msgs_pb2'
# @@protoc_insertion_point(class_scope:sensor_msgs.CameraInfo)
})
_sym_db.RegisterMessage(CameraInfo)
LaserScan = _reflection.GeneratedProtocolMessageType('LaserScan', (_message.Message,), {
'DESCRIPTOR' : _LASERSCAN,
'__module__' : 'proto.sensor_msgs_pb2'
# @@protoc_insertion_point(class_scope:sensor_msgs.LaserScan)
})
_sym_db.RegisterMessage(LaserScan)
NavSatFix = _reflection.GeneratedProtocolMessageType('NavSatFix', (_message.Message,), {
'DESCRIPTOR' : _NAVSATFIX,
'__module__' : 'proto.sensor_msgs_pb2'
# @@protoc_insertion_point(class_scope:sensor_msgs.NavSatFix)
})
_sym_db.RegisterMessage(NavSatFix)
BatteryState = _reflection.GeneratedProtocolMessageType('BatteryState', (_message.Message,), {
'DESCRIPTOR' : _BATTERYSTATE,
'__module__' : 'proto.sensor_msgs_pb2'
# @@protoc_insertion_point(class_scope:sensor_msgs.BatteryState)
})
_sym_db.RegisterMessage(BatteryState)
Range = _reflection.GeneratedProtocolMessageType('Range', (_message.Message,), {
'DESCRIPTOR' : _RANGE,
'__module__' : 'proto.sensor_msgs_pb2'
# @@protoc_insertion_point(class_scope:sensor_msgs.Range)
})
_sym_db.RegisterMessage(Range)
_CAMERAINFO.fields_by_name['D']._options = None
_CAMERAINFO.fields_by_name['K']._options = None
_CAMERAINFO.fields_by_name['R']._options = None
_CAMERAINFO.fields_by_name['P']._options = None
_LASERSCAN.fields_by_name['ranges']._options = None
_LASERSCAN.fields_by_name['intensities']._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
1ced5c6e0a04b057cee4c476830695a129dc95c5 | 6dd400fec6f302bd0dcf309e2deec5de906d205c | /django_test6maria/myguest/urls.py | 9f02e4a7aca1f6b490e241f6a970eacb04b2142f | [] | no_license | Leo-hw/psou | aa938b7cfaa373a0980649125270c48d816202b0 | 70379156a623257d412bcccbac72986a61226bd4 | refs/heads/master | 2023-02-21T19:00:02.902510 | 2021-01-25T07:03:26 | 2021-01-25T07:03:26 | 332,616,685 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | from django.contrib import admin
from django.urls import path
from myguest import views
urlpatterns = [
path('', views.ListFunc),
path('insert/', views.InsertFunc),
path('insertok/', views.InsertFuncOk),
]
| [
"Bonghwan@DESKTOP-60LSTNL"
] | Bonghwan@DESKTOP-60LSTNL |
b540bb335ba07eb23f656de01bc48f3e0888a51e | 7fc678c2b1a0ef8849364e9c3e272b4509003796 | /py/merge-us-tracts-sql-code-gen.py | 482fce872848f536f179fddb32fdcccd07dcc606 | [] | no_license | nygeog/postgis_reference | ee7a599c2b60d713b5ae67039b5e5f4cfef9d7e8 | d3da41fa91dcd6e667d62cb3cc2439aed99f90a9 | refs/heads/master | 2020-04-05T23:28:14.942786 | 2017-08-12T16:51:29 | 2017-08-12T16:51:29 | 42,251,833 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 856 | py | mergeTableName = 'tracts_2010_us'
attrList = ['geoid10', 'aland10', 'awater10', 'intptlat10', 'intptlon10', 'shape_leng', 'shape_area', 'geom']
attrListString = ", ".join(attrList)
statesList = ["01","02","04","05","06","08","09","10","11","12","13","15","16","17","18","19","20","21","22","23","24","25","26","27","28","29","30","31","32","33","34","35","36","37","38","39","40","41","42","44","45","46","47","48","49","50","51","53","54","55","56","72"]
e1 = """CREATE TABLE """+mergeTableName+""" AS("""
print e1
e3 = """UNION"""
statesListLen = len(statesList)
for i, item in enumerate(statesList):
e2 = """SELECT """+attrListString+""" FROM tracts_2010_state_"""+item
print e2
if i < (statesListLen - 1):
print e3
e4 = """);"""
e5 = """SELECT Populate_Geometry_Columns('""" + mergeTableName + """'::regclass);"""
print e4
print e5 | [
"[email protected]"
] | |
dd0492a4e1c8d9c5c1695bf08f02984c8d021074 | b71a6e7050b0a4368007350d91ee078288a7318c | /examples/issues/issue189_img.py | cb2817ea11871f73a6164e09d73305c788330037 | [
"Apache-2.0"
] | permissive | jarvisteach/appJar | 2dfd0da6cb85ea3535379ed000efd97fb42fe4f8 | 0b59ce041da2197dcff3410e20f298676f1f7266 | refs/heads/appJar | 2023-08-29T09:42:01.812005 | 2019-09-28T18:34:06 | 2019-09-28T18:34:06 | 39,996,518 | 696 | 103 | NOASSERTION | 2023-02-20T01:01:16 | 2015-07-31T08:59:20 | Python | UTF-8 | Python | false | false | 252 | py | import sys
sys.path.append("../../")
from appJar import gui
with gui(useTtk=False) as app:
# app.addImageButton("button2", None, "Capture 2.PNG", align=None) # Uncomment this
app.addIconButton("button", None, "md-play", align="none") # Or this
| [
"[email protected]"
] | |
a541b63a4a516afe5d07e6e2cb94b6f9ecfbfbca | 897a660b24d0ccc1a25132550dc4c735644efbfb | /048_self_powers.py | 0b745c15bbdfe4cbee42e2c6029e4f76bd87c684 | [
"MIT"
] | permissive | gradam/project-euler | 409b99a542c661cbc3e002bb9b89f5d03ea40cd5 | d939ad54d057fd8ed2db06948b6ccc7833d68ab6 | refs/heads/master | 2021-01-24T17:35:50.870830 | 2017-12-31T09:58:35 | 2017-12-31T09:58:35 | 30,153,280 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 117 | py | suma = 0
for x in range(1, 1000):
suma += x**x
print(str(suma)[-10:])
print(str(28433*(2**7830457) + 1)[-10:]) | [
"[email protected]"
] | |
de6921dce3a160ae4dad9ee43b7a29ee9f4d8404 | 95a2568c20993bd423791f6796ecff36d6a71d26 | /utils.py | 46c959bcd0ae74ab2155a51327b036899514e5fe | [
"MIT"
] | permissive | kugooer/nazurin-1 | 15dff321436eaf9ca75c79f3be9e41cc958063a2 | 092da0b77e50e3f81cc99ae7d86523efbf691baf | refs/heads/master | 2023-01-19T08:32:11.973482 | 2020-12-01T15:24:23 | 2020-12-01T15:24:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,979 | py | from requests.adapters import HTTPAdapter
from mimetypes import guess_type
from shutil import copyfileobj
from functools import wraps
from pathlib import Path
from html import escape
from time import sleep
import requests
import logging
import re
import os
from config import DOWNLOAD_DIR, UA, RETRIES
from telegram import ChatAction, InputMediaPhoto
from telegram.error import RetryAfter
# Logging
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
logger = logging.getLogger('bot')
def send_action(action):
"""Sends `action` while processing func command."""
def decorator(func):
@wraps(func)
def command_func(update, context, *args, **kwargs):
context.bot.send_chat_action(chat_id=update.effective_message.chat_id, action=action)
return func(update, context, *args, **kwargs)
return command_func
return decorator
typing = send_action(ChatAction.TYPING)
uploading_video = send_action(ChatAction.UPLOAD_VIDEO)
uploading_photo = send_action(ChatAction.UPLOAD_PHOTO)
uploading_document = send_action(ChatAction.UPLOAD_DOCUMENT)
@uploading_photo
def sendPhotos(update, context, imgs, details=None):
if details is None:
details = dict()
bot = context.bot
message = update.message
chat_id = message.chat_id
message_id = message.message_id
media = list()
if len(imgs) > 10:
imgs = imgs[:10]
message.reply_text('Notice: Too many pages, sending only 10 of them' )
caption = str()
for key, value in details.items():
caption += str(key) + ': ' + str(value) + '\n'
if len(caption) > 1024:
caption = caption[:1024]
message.reply_text('Notice: Caption too long, trimmed')
caption = escape(caption, quote=False)
for img in imgs:
filetype = str(guess_type(img.url)[0])
if filetype.startswith('image'):
media.append(InputMediaPhoto(img.display_url, parse_mode='HTML'))
else:
message.reply_text('File is not image, try download option.')
return
media[0].caption = caption
while True:
try:
bot.sendMediaGroup(chat_id, media, reply_to_message_id=message_id)
except RetryAfter as error:
sleep(error.retry_after)
continue
break
@uploading_document
def sendDocuments(update, context, imgs, chat_id=None):
bot = context.bot
message_id = update.message.message_id
if not chat_id:
chat_id = update.message.chat_id
else:
message_id = None # Sending to channel, no message to reply
for img in imgs:
while True:
try:
bot.sendDocument(chat_id, open(img.path, 'rb'), filename=img.name, reply_to_message_id=message_id)
except RetryAfter as error:
sleep(error.retry_after)
continue
break
def handleBadRequest(update, context, error):
logger.info('BadRequest exception: ' + str(error))
if 'Wrong file identifier/http url' in error.message or 'Failed to get http url content' in error.message:
update.message.reply_text(
'Failed to send image as photo, maybe the size is too big, '
'consider using download option or try again.\n'
f'Error: {error.message}'
)
elif 'Group send failed' in error.message:
update.message.reply_text(
'Failed to send images because one of them is too large, '
'consider using download option or try again.\n'
f'Error: {error.message}'
)
else:
raise error
def downloadImages(imgs, headers=None):
if headers is None:
headers = dict()
if not os.path.exists(DOWNLOAD_DIR):
os.makedirs(DOWNLOAD_DIR)
with requests.Session() as session:
session.headers.update({'User-Agent': UA})
session.mount('https://', HTTPAdapter(max_retries=RETRIES))
for img in imgs:
response = session.get(img.url, stream=True, timeout=5).raw
with open(img.path, 'wb') as f:
copyfileobj(response, f)
def sanitizeFilename(name):
# https://docs.microsoft.com/zh-cn/windows/win32/fileio/naming-a-file
name = re.sub(r"[\"*/:<>?\\|]+", '_', name) # reserved characters
name = re.sub(r"[\t\n\r\f\v]+", ' ', name)
name = re.sub(r"\u202E|\u200E|\u200F", '', name) # RTL marks
filename, ext = os.path.splitext(name)
filename = filename.strip()
if Path(filename).is_reserved():
filename = '_' + filename
name = filename + ext
if len(name) > 255:
name = filename[:255 - len(ext)] + ext
return name
class NazurinError(Exception):
def __init__(self, msg):
"""Initialize with error message."""
super().__init__(msg)
self.msg = str(msg)
def __str__(self):
"""Returns the string representation of this exception."""
return self.msg | [
"[email protected]"
] | |
c0b1823a5549769b3efe0b1033816e1186aca332 | c7e765a9bed33d3bfb21774e3995bf4a09e04add | /adminmgr/media/code/A2/python/task/BD_174_261_754_XxLnqgI.py | e6c151bc96e99f32096cdee7f9cd74743a158637 | [
"Apache-2.0"
] | permissive | IamMayankThakur/test-bigdata | 13dd2ac7fb76c9baed6c3a0aa943057a22e2d237 | 7f507918c7bec31c92eedcd94491a83486623049 | refs/heads/master | 2022-05-03T00:59:44.127494 | 2022-02-10T19:50:16 | 2022-02-10T19:50:16 | 201,585,028 | 10 | 4 | Apache-2.0 | 2022-04-22T23:39:45 | 2019-08-10T05:34:09 | Python | UTF-8 | Python | false | false | 2,960 | py | from __future__ import print_function
import re
import sys
from operator import add
from pyspark.sql import SparkSession
def computeContribs(urls, rank):
"""Calculates URL contributions to the rank of other URLs."""
num_urls = len(urls)
for url in urls:
yield (url, rank / num_urls)
def parseNeighbors(urls):
"""Parses a urls pair string into urls pair."""
parts = re.split(r',', urls)
return parts[0], parts[1]
def getValues(urls):
parts = re.split(r',', urls)
return parts[0], int(parts[2])/int(parts[3])
if __name__ == "__main__":
if len(sys.argv) != 4:
print("Usage: pagerank <file> <iterations> <weights>", file=sys.stderr)
sys.exit(-1)
# print("WARN: This is a naive implementation of PageRank and is given as an example!\n" +
# "Please refer to PageRank implementation provided by graphx",
# file=sys.stderr)
# Initialize the spark context.
spark = SparkSession\
.builder\
.appName("PythonPageRank")\
.getOrCreate()
# Loads in input file. It should be in format of:
# URL neighbor URL
# URL neighbor URL
# URL neighbor URL
# ...
lines = spark.read.text(sys.argv[1]).rdd.map(lambda r: r[0])
# Loads all URLs from input file and initialize their neighbors.
links = lines.map(lambda urls: parseNeighbors(urls)
).distinct().groupByKey().cache()
# Loads all URLs with other URL(s) link to from input file and initialize ranks of them to one.
ranks = lines.map(lambda x: getValues(x)).distinct().reduceByKey(add)
ranks = ranks.mapValues(lambda rank: rank if rank > 1.0 else 1.0)
N = ranks.count()
iterations = int(sys.argv[2])
weight = float(sys.argv[3])/100 if int(sys.argv[3])!=0 else 0.8
if(iterations==0):
while(1):
cnt = 0
oldRanks = ranks
contribs = links.join(ranks).flatMap(
lambda url_urls_rank: computeContribs(url_urls_rank[1][0], url_urls_rank[1][1]))
ranks = contribs.reduceByKey(add).mapValues(
lambda rank: rank * weight + (1-weight))
s = 0
test = oldRanks.join(ranks).map(lambda r: abs(r[1][0]-r[1][1]))
for i in test.collect():
if(i < 0.0001):
cnt += 1
if(cnt == test.count()):
break
else:
for iteration in range(int(sys.argv[2])):
contribs = links.join(ranks).flatMap(
lambda url_urls_rank: computeContribs(url_urls_rank[1][0], url_urls_rank[1][1]))
ranks = contribs.reduceByKey(add).mapValues(
lambda rank: rank * weight + (1-weight))
# Collects all URL ranks and dump them to console.
for (link, rank) in ranks.sortBy(lambda x: (-x[1],x[0])).collect():
print("%s,%s" % (link, round(rank,12)))
spark.stop()
| [
"[email protected]"
] | |
0d9d45d96dd79d9e1c3bc2408a3f391808380dce | e874e3b4312b2beebaa42fa1489b50c618055190 | /Aula 2 Semana - Turtle - Preenchendo as formas.py | 2c20ce02817224506dde0a83bf5b80e026b47a64 | [] | no_license | CarlosDinart/PUC-SP | 611a9acb6a82b7db2174d2d439b5666db48a530e | 5f5f1ea4b9c55c7d20b2dcd92c461b3d8ebbb664 | refs/heads/master | 2023-01-23T06:46:42.492764 | 2020-12-09T19:41:01 | 2020-12-09T19:41:01 | 320,058,535 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 852 | py | from turtle import *
fillcolor('purple') # fillcolor() - esta funcao retorna ou defina a cor de preenchimento;
pensize(10) #pensize()- esta funcao defina a espessura da linha para a largura ou retorne-a.
# Se resizemode for definido como “auto” e a forma de tartaruga for um polígono,
# esse polígono será desenhado com a mesma espessura de linha. Se nenhum argumento
# for fornecido, o pensize atual é retornado.
pencolor('black') #pencolor() - esta funcao defini a cor da caneta;
forward(100)
begin_fill() # begin_fill() - Para ser chamado antes de desenhar uma forma a ser preenchida;
forward(100)
left(90)
forward(100)
left(90)
forward(100)
left(90)
forward(100)
left(90)
end_fill() #end_fill() -Esta funcao Preenche a forma desenhada após a última chamada para begin_fill();
done()
| [
"[email protected]"
] | |
9bd0e691b2cf9e835167a6fa49536ee50961c4f6 | fcf4b584795dbdbb24bfa5e68028f4c9ac070b69 | /useraccount/models.py | 5dc7dd17acbc76bebc82627ebb3fb0ecb9fbeae0 | [] | no_license | vineethjpalatty/testproject | 3e86ae3f030349f4c633a6ac5ef17814bb373ff6 | 1087ca2ecbd5e2fe72a4a5c628e674eeaa4d2b2f | refs/heads/master | 2022-12-01T14:33:30.155041 | 2020-08-13T17:48:27 | 2020-08-13T17:48:27 | 287,338,410 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,092 | py | from django.contrib.auth.models import AbstractUser
from django.db import models
import pytz
# Create your models here.
class DateBaseModel(models.Model):
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class UserDetail(AbstractUser,DateBaseModel):
TIMEZONES = tuple(zip(pytz.all_timezones, pytz.all_timezones))
user_id = models.CharField(max_length=20,verbose_name="User ID", unique=True)
timezone = models.CharField(max_length=50,verbose_name="Time Zone", choices=TIMEZONES, default='UTC')
password = models.CharField(verbose_name="password", max_length=128, null=True, blank=True)
def __str__(self):
return self.username
class ActivityPeriod(DateBaseModel):
user = models.ForeignKey('UserDetail',on_delete=models.CASCADE,related_name='get_related_activity_period')
start_time = models.DateTimeField(verbose_name='Start Time')
end_time = models.DateTimeField(verbose_name='End Time')
def __str__(self):
return self.user.username
| [
"[email protected]"
] | |
5e3f9f87ef9fec750e839eda115dfd7bb06d500a | 5ee5e19a42417fdfb5248c070d41b61b86465eaf | /abc_243/b.py | 8de66992b8f504c12ab2caefd7c905d55910e476 | [] | no_license | w40141/atcoder | 2e98cfe9fcb33aca8ac4567afecf603084964897 | 3ad74ca71ab77b929a097730047f4cf59ac38604 | refs/heads/master | 2023-08-08T21:37:42.744860 | 2023-08-07T00:50:34 | 2023-08-07T00:50:34 | 179,308,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 263 | py | n = int(input())
a_li = list(map(int, input().split()))
b_li = list(map(int, input().split()))
same_num = 0
for a, b in zip(a_li, b_li):
if a == b:
same_num += 1
a_s = set(a_li)
b_s = set(b_li)
c = a_s & b_s
print(same_num)
print(len(c) - same_num)
| [
"[email protected]"
] | |
cf5fddbf61aeb35918e576d1a1ac9e69f4b6b4c1 | ea5762e8754d6b039963b0125822afb261844cc8 | /src/compas_rhino/geometry/__init__.py | 443b7cf51d3a306b59d12a71d80dd9480b9c6d76 | [
"MIT"
] | permissive | gonzalocasas/compas | 787977a4712fbfb9e230c4f433b6e2be509e4855 | 2fabc7e5c966a02d823fa453564151e1a1e7e3c6 | refs/heads/master | 2020-03-23T20:17:55.126856 | 2018-07-24T22:30:08 | 2018-07-24T22:30:08 | 142,033,431 | 0 | 0 | MIT | 2018-07-31T14:54:52 | 2018-07-23T15:27:19 | Python | UTF-8 | Python | false | false | 597 | py | """
.. _compas_rhino.geometry:
********************************************************************************
geometry
********************************************************************************
.. module:: compas_rhino.geometry
Object-oriented wrappers for native Rhino geometry.
.. autosummary::
:toctree: generated/
RhinoCurve
RhinoMesh
RhinoPoint
RhinoSurface
"""
from .point import RhinoPoint
from .curve import RhinoCurve
from .mesh import RhinoMesh
from .surface import RhinoSurface
__all__ = ['RhinoPoint', 'RhinoCurve', 'RhinoMesh', 'RhinoSurface', ]
| [
"[email protected]"
] | |
f02f0c07768583e5d8cf8ec015a786ade7c11d29 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /LQgpGFMK9t9MELvph_9.py | 6cae0722546debd006fb3cb38f4989346c5cb5e9 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 829 | py | """
Given a square list ( _n_ * _n_ size) implement a function that returns a new
list containing two lists equal to the two diagonals, in the following order:
diagonal 1 = from upper-left to lower-right corner
diagonal 2 = from upper-right to lower-left corner
### Examples
get_diagonals([ [1, 2], [3, 4] ]) ➞ [ [1, 4], [2, 3] ]
get_diagonals([ ["a", "b", "c"], ["d", "e", "f"], ["g", "h", "i"] ]) ➞ [ ["a", "e", "i"], ["c", "e", "g"] ]
get_diagonals([ [True] ]) ➞ [ [True], [True] ]
### Notes
* Your function must also work with single elements or empty lists.
* Try to build both diagonals with a single loop.
"""
def get_diagonals(lst):
ll = []
lr = []
for i in range(1, len(lst) + 1):
ll.append(lst[i - 1][i - 1])
lr.append(lst[i - 1][-i])
return [ll, lr]
| [
"[email protected]"
] | |
abd5014fe7f609414f56f0c5502e2ffe8eb72e7b | bc441bb06b8948288f110af63feda4e798f30225 | /user_service_sdk/model/metadata_center/stream_metric_states_pb2.pyi | 9e2b0174cd0abac74c50c620d020df01b5ca5d95 | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,399 | pyi | # @generated by generate_proto_mypy_stubs.py. Do not edit!
import sys
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.internal.containers import (
RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
from typing import (
Iterable as typing___Iterable,
Optional as typing___Optional,
Text as typing___Text,
Union as typing___Union,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
from user_service_sdk.model.metadata_center.stream_metric_schema_pb2 import (
StreamMetricSchema as user_service_sdk___model___metadata_center___stream_metric_schema_pb2___StreamMetricSchema,
)
builtin___bool = bool
builtin___bytes = bytes
builtin___float = float
builtin___int = int
if sys.version_info < (3,):
builtin___buffer = buffer
builtin___unicode = unicode
class StreamMetricStates(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
org = ... # type: builtin___int
command = ... # type: typing___Text
@property
def payload(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[user_service_sdk___model___metadata_center___stream_metric_schema_pb2___StreamMetricSchema]: ...
def __init__(self,
*,
org : typing___Optional[builtin___int] = None,
command : typing___Optional[typing___Text] = None,
payload : typing___Optional[typing___Iterable[user_service_sdk___model___metadata_center___stream_metric_schema_pb2___StreamMetricSchema]] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> StreamMetricStates: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> StreamMetricStates: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"command",b"command",u"org",b"org",u"payload",b"payload"]) -> None: ...
| [
"[email protected]"
] | |
8ba61262c059e952b15e3587e40bdf3fe82a14b6 | db575f3401a5e25494e30d98ec915158dd7e529b | /BIO_Stocks/BCRX.py | 33bc27bd3c2d590575f2a523774f25c8f3d0e061 | [] | no_license | andisc/StockWebScraping | b10453295b4b16f065064db6a1e3bbcba0d62bad | 41db75e941cfccaa7043a53b0e23ba6e5daa958a | refs/heads/main | 2023-08-08T01:33:33.495541 | 2023-07-22T21:41:08 | 2023-07-22T21:41:08 | 355,332,230 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,070 | py | import requests
from lxml import html
from bs4 import BeautifulSoup
import os
from datetime import date, datetime
from ValidationTools import validateday
from Database_Connections import InsertData, Insert_Logging
def main(id_control):
try:
url = 'https://ir.biocryst.com/press-releases'
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'}
result = requests.get(url, headers=headers)
#print(result.content.decode())
html_content = result.content.decode()
soup = BeautifulSoup(html_content, 'html.parser')
#print(soup)
table = soup.find('table', attrs={'class':'nirtable news collapse-table'})
#print(table)
table_body = table.find('tbody')
rows = table_body.find_all('tr')
FIRST_ROW_columns = rows[0].find_all('td')
v_article_date = FIRST_ROW_columns[0].text.lstrip().rstrip()
article_desc = FIRST_ROW_columns[1]
#if the process find any article with the today date
istoday, v_art_date = validateday(v_article_date)
if (istoday == True):
v_ticker = os.path.basename(__file__).replace(".py", "")
v_url = article_desc.a.get('href')
v_description = article_desc.text.lstrip().rstrip()
now = datetime.now()
print("URL: " + v_url)
print("DESCRIPTION: " + v_description)
print("ARTICLE_DATE: " + str(now))
# Insert articles
if "https://" in v_url:
InsertData(v_ticker, v_description, v_url, v_art_date)
else:
InsertData(v_ticker, v_description, url, v_art_date)
except Exception:
error_message = "Entrou na excepção ao tratar " + os.path.basename(__file__) + "..."
print(error_message)
Insert_Logging(id_control, 'Detail', error_message)
pass
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
b2038b79165d9711f194bb613dda99871eb2eb4d | d3b77550a40b860970450e702b6bcd28d5f9b3e4 | /LeetCode/1464_maximum_prod_of_two_elements_in_array.py | 0a66f0faf3a2108ddb4d63c69809049704a12e67 | [] | no_license | CateGitau/Python_programming | 47bc9277544814ad853b44a88f129713f1a40697 | 6ae42b3190134c4588ad785d62e08b0763cf6b3a | refs/heads/master | 2023-07-08T03:08:46.236063 | 2021-08-12T09:38:03 | 2021-08-12T09:38:03 | 228,712,021 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 463 | py | """
Given the array of integers nums, you will choose two different indices i and j of that array.
Return the maximum value of (nums[i]-1)*(nums[j]-1)
"""
nums = [10,2,5,2]
def maxProduct(nums):
maxim = 0
for i in range(len(nums)):
for j in range(len(nums)):
if i != j:
ans = ((nums[i]-1)*(nums[j]-1))
if ans > maxim:
maxim = ans
return maxim
print(maxProduct(nums))
| [
"[email protected]"
] | |
595f1092a393032fbfe3530084a64011e38ba1be | bbd69601912a3361d788efd03a47f9d4e3bac09e | /demo/agw/HyperLinkCtrl.py | 61df943f9642ce956aea9500436b0dd59655b898 | [] | no_license | wxWidgets/Phoenix | 56929484460a0399a8f1d9582bc77c20aa14748d | a1184286703cf24c4b88e5bc14cf2979c1b1ea00 | refs/heads/master | 2023-09-01T07:10:17.437093 | 2023-08-31T05:38:01 | 2023-08-31T05:38:01 | 5,078,061 | 2,268 | 677 | null | 2023-09-09T17:06:59 | 2012-07-17T06:22:25 | Python | UTF-8 | Python | false | false | 4,894 | py | #!/usr/bin/env python
import wx
import os
import sys
try:
dirName = os.path.dirname(os.path.abspath(__file__))
except:
dirName = os.path.dirname(os.path.abspath(sys.argv[0]))
sys.path.append(os.path.split(dirName)[0])
try:
from agw import hyperlink as hl
except ImportError: # if it's not there locally, try the wxPython lib.
import wx.lib.agw.hyperlink as hl
#----------------------------------------------------------------------
class TestPanel(wx.Panel):
def __init__(self, parent, log):
self.log = log
wx.Panel.__init__(self, parent, -1)
self.SetFont(wx.Font(10, wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False))
sizer = wx.BoxSizer(wx.VERTICAL)
self.SetSizer(sizer)
# Creator credits
text1 = wx.StaticText(self, -1, "HyperLinkCtrl Example By Andrea Gavana")
text1.SetFont(wx.Font(9, wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, False, 'Verdana'))
sizer.Add((0,10))
sizer.Add(text1, 0, wx.LEFT | wx.TOP | wx.BOTTOM, 10)
text2 = wx.StaticText(self, -1, "Latest Revision: 11 May 2005")
text2.SetFont(wx.Font(8, wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, 'Verdana'))
sizer.Add(text2, 0, wx.LEFT, 10)
sizer.Add((0,25))
# Default Web links:
self._hyper1 = hl.HyperLinkCtrl(self, wx.ID_ANY, "wxPython Main Page",
URL="http://www.wxpython.org/")
sizer.Add(self._hyper1, 0, wx.ALL, 10)
# Web link with underline rollovers, opens in window
self._hyper2 = hl.HyperLinkCtrl(self, wx.ID_ANY, "My Home Page",
URL="http://xoomer.virgilio.it/infinity77/")
sizer.Add(self._hyper2, 0, wx.ALL, 10)
self._hyper2.Bind(hl.EVT_HYPERLINK_MIDDLE, self.OnMiddleLink)
self._hyper2.AutoBrowse(False)
self._hyper2.SetColours("BLUE", "BLUE", "BLUE")
self._hyper2.EnableRollover(True)
self._hyper2.SetUnderlines(False, False, True)
self._hyper2.SetBold(True)
self._hyper2.OpenInSameWindow(True) # middle click to open in window
self._hyper2.SetToolTip(wx.ToolTip("Middle-click to open in browser window"))
self._hyper2.UpdateLink()
# Intense link examples..
self._hyper3 = hl.HyperLinkCtrl(self, wx.ID_ANY, "wxPython Mail Archive",
URL="http://lists.wxwidgets.org/")
sizer.Add(self._hyper3, 0, wx.ALL, 10)
self._hyper3.Bind(hl.EVT_HYPERLINK_RIGHT, self.OnRightLink)
self._hyper3.SetLinkCursor(wx.CURSOR_QUESTION_ARROW)
self._hyper3.SetColours("DARK GREEN", "RED", "NAVY")
self._hyper3.SetUnderlines(False, False, False)
self._hyper3.EnableRollover(True)
self._hyper3.SetBold(True)
self._hyper3.DoPopup(False)
self._hyper3.UpdateLink()
self._hyper4 = hl.HyperLinkCtrl(self, wx.ID_ANY,
"Open Google In Current Browser Window?",
URL="http://www.google.com")
sizer.Add(self._hyper4, 0, wx.ALL, 10)
self._hyper4.Bind(hl.EVT_HYPERLINK_LEFT, self.OnLink)
self._hyper4.SetToolTip(wx.ToolTip("Click link for yes, no, cancel dialog"))
self._hyper4.AutoBrowse(False)
def OnLink(self, event):
# Goto URL, demonstrates attempt to open link in current window:
strs = "Open Google In Current Browser Window "
strs = strs + "(NO Opens Google In Another Browser Window)?"
nResult = wx.MessageBox(strs, "HyperLinkCtrl", wx.YES_NO |
wx.CANCEL | wx.ICON_QUESTION, self)
if nResult == wx.YES:
self._hyper4.GotoURL("http://www.google.com", True, True)
elif nResult == wx.NO:
self._hyper4.GotoURL("http://www.google.com", True, False)
def OnRightLink(self, event):
pos = self._hyper3.GetPosition() + event.GetPosition()
menuPopUp = wx.Menu("Having a nice day?")
ID_MENU_YES = wx.NewIdRef()
ID_MENU_NO = wx.NewIdRef()
menuPopUp.Append(ID_MENU_YES, "Yes, absolutely!")
menuPopUp.Append(ID_MENU_NO, "I've had better")
self.PopupMenu(menuPopUp)
menuPopUp.Destroy()
def OnMiddleLink(self, event):
self._hyper2.GotoURL("http://xoomer.virgilio.it/infinity77/",
True, True)
#----------------------------------------------------------------------
def runTest(frame, nb, log):
win = TestPanel(nb, log)
return win
#----------------------------------------------------------------------
overview = hl.__doc__
if __name__ == '__main__':
import sys,os
import run
run.main(['', os.path.basename(sys.argv[0])] + sys.argv[1:])
| [
"[email protected]"
] | |
4f54d925f1dd8a37f173fcd6da68ed5f39fd2e46 | 909762751929e2fed02311953e15f8a6316efbd0 | /tests/test_oskar/plot_antpos.py | 6987aa1365419f0706b0b734bae1ec105e2b156a | [] | no_license | telegraphic/interfits | a166258459deaeb831d49787952a3e08d2aaaf40 | 0ee46e94b84d405c8381772be05b42e0b9c41158 | refs/heads/master | 2021-01-19T01:57:32.397157 | 2016-07-02T01:38:06 | 2016-07-02T01:38:06 | 10,858,699 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 881 | py | import numpy as np
import matplotlib
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from test_main import *
def plot3d(x,y,z, xl='X', yl='Y', zl='Z', c='#cc0000'):
ax.scatter(x, y, z, c=c)
ax.set_xlabel(xl)
ax.set_ylabel(yl)
ax.set_zlabel(zl)
l = LedaFits('vis_00.uvfits')
xyz = l.d_array_geometry["STABXYZ"]
x,y,z = np.split(xyz, 3, axis=1)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
plot3d(x, y, z, 'X', 'Y', 'Z', c='#00cc00')
plt.show()
bls = coords.computeBaselineVectors(xyz)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
uvw = coords.computeUVW(bls, H=0, d=np.rad2deg(0)) * 1e6
u,v,w = np.split(uvw, 3, axis=1)
plot3d(u, v, w, 'U', 'V', 'W')
uvw = coords.computeUVW(bls, H=0, d=np.deg2rad(34.07)) * 1e6
u,v,w = np.split(uvw, 3, axis=1)
plot3d(u, v, w, 'U', 'V', 'W', c='#00cc00')
plt.show() | [
"[email protected]"
] | |
b7e438493cf19af3ecdba1a85a98ab4eb17912fd | 8dbe574f3b20308d79ef37643570d7dec15e67d9 | /cn.zero/py.ori.fmt/c1110.bin.py | d76d22acac5506f6e347f103aed173e368a82f42 | [] | no_license | xaeingking/ZeroAoVoiceScripts | 62526d004bd02e645970930ecd4b6053809092ab | 512c1fd544954a38c92fc097f5b0c006031ee87d | refs/heads/master | 2020-05-20T17:04:55.028776 | 2019-01-29T10:40:44 | 2019-01-29T10:40:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247,235 | py | from ZeroScenarioHelper import *
def main():
CreateScenaFile(
"c1110.bin", # FileName
"c1110", # MapName
"c1110", # Location
0x0017, # MapIndex
"ed7100",
0x00002000, # Flags
("", "", "", "", "", ""), # include
0x00, # PlaceNameNumber
0x00, # PreInitFunctionIndex
b'\x00\xff\xff', # Unknown_51
# Information
[0, 0, -1000, 0, 0, 2500, 34000, 262, 30, 45, 0, 360, 0, 0, 0, 0, 0, 1, 23, 0, 3, 0, 4],
)
BuildStringList((
"c1110", # 0
"接待小姐希恩", # 1
"库利普主任", # 2
"阿奈斯特秘书", # 3
"麦克道尔市长", # 4
"弗兰茨巡警", # 5
"市政府职员", # 6
"市政府职员", # 7
"市政府职员", # 8
"雷因兹", # 9
"格蕾丝", # 10
"紫发的女孩", # 11
"运输公司员工", # 12
"运输公司员工", # 13
"哈尔特曼议长", # 14
"议员", # 15
"议员", # 16
))
AddCharChip((
"chr/ch34600.itc", # 00
"chr/ch28000.itc", # 01
"chr/ch00000.itc", # 02
"chr/ch02300.itc", # 03
"chr/ch05802.itc", # 04
"chr/ch30000.itc", # 05
"chr/ch28100.itc", # 06
"chr/ch28200.itc", # 07
"chr/ch27600.itc", # 08
"chr/ch06000.itc", # 09
"chr/ch05200.itc", # 0A
"chr/ch00000.itc", # 0B
"chr/ch00000.itc", # 0C
"chr/ch00000.itc", # 0D
"chr/ch00000.itc", # 0E
"chr/ch00000.itc", # 0F
"chr/ch00000.itc", # 10
"chr/ch00000.itc", # 11
"chr/ch00000.itc", # 12
"chr/ch00000.itc", # 13
"chr/ch00000.itc", # 14
"chr/ch00000.itc", # 15
"chr/ch00000.itc", # 16
"chr/ch00000.itc", # 17
"chr/ch00000.itc", # 18
"chr/ch00000.itc", # 19
"chr/ch00000.itc", # 1A
"chr/ch00000.itc", # 1B
"chr/ch00000.itc", # 1C
"chr/ch00000.itc", # 1D
))
DeclNpc(0, 0, 7400, 180, 261, 0x0, 0, 0, 0, 0, 0, 0, 6, 255, 0)
DeclNpc(3529, 4000, 16209, 315, 261, 0x0, 0, 1, 0, 0, 1, 0, 7, 255, 0)
DeclNpc(0, 0, 0, 0, 389, 0x0, 0, 3, 0, 0, 0, 0, 8, 255, 0)
DeclNpc(-44990, 250, 14710, 180, 469, 0x0, 0, 4, 0, 255, 255, 0, 10, 255, 0)
DeclNpc(-13510, 4000, 14529, 135, 389, 0x0, 0, 5, 0, 0, 0, 0, 11, 255, 0)
DeclNpc(-4429, 0, 4460, 180, 389, 0x0, 0, 6, 0, 0, 0, 0, 12, 255, 0)
DeclNpc(0, 0, 7400, 180, 389, 0x0, 0, 7, 0, 0, 0, 0, 13, 255, 0)
DeclNpc(0, 0, 7400, 180, 389, 0x0, 0, 8, 0, 0, 0, 0, 14, 255, 0)
DeclNpc(2670, 0, -1090, 0, 389, 0x0, 0, 6, 0, 0, 0, 0, 15, 255, 0)
DeclNpc(1730, 0, 5389, 315, 389, 0x0, 0, 9, 0, 0, 0, 0, 16, 255, 0)
DeclNpc(0, 0, 5159, 360, 389, 0x0, 0, 10, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclEvent(0x0000, 0, 18, 0.0, 3.5, 0.0, 517.5625, [0.0714285746216774, -0.0, 0.0, 0.0, -0.0, 0.3076923191547394, -0.0, 0.0, 0.0, -0.0, 0.20000000298023224, 0.0, -0.0, -1.076923131942749, -0.0, 1.0])
DeclEvent(0x0000, 0, 19, -6.400000095367432, 16.75, 4.0, 3136.0, [0.0714285746216774, -0.0, 0.0, 0.0, -0.0, 0.125, -0.0, 0.0, 0.0, -0.0, 0.20000000298023224, 0.0, 0.4571428894996643, -2.09375, -0.800000011920929, 1.0])
DeclActor(0, 0, 6000, 1500, 0, 1500, 7460, 0x007E, 0, 5, 0x0000)
DeclActor(-44940, 0, 13190, 1500, -44990, 1500, 14710, 0x007E, 0, 9, 0x0000)
DeclActor(-8100, 4000, 19780, 1500, -8100, 5500, 19780, 0x007C, 0, 36, 0x0000)
DeclActor(8000, 4120, 19640, 1500, 8000, 5520, 19640, 0x007C, 0, 37, 0x0000)
ScpFunction((
"Function_0_4B4", # 00, 0
"Function_1_56C", # 01, 1
"Function_2_597", # 02, 2
"Function_3_5C2", # 03, 3
"Function_4_8E1", # 04, 4
"Function_5_AE4", # 05, 5
"Function_6_AE8", # 06, 6
"Function_7_279D", # 07, 7
"Function_8_3C98", # 08, 8
"Function_9_3F45", # 09, 9
"Function_10_3F49", # 0A, 10
"Function_11_4F4F", # 0B, 11
"Function_12_4FD0", # 0C, 12
"Function_13_5241", # 0D, 13
"Function_14_52B5", # 0E, 14
"Function_15_530F", # 0F, 15
"Function_16_540F", # 10, 16
"Function_17_5755", # 11, 17
"Function_18_729B", # 12, 18
"Function_19_7C51", # 13, 19
"Function_20_84B5", # 14, 20
"Function_21_8516", # 15, 21
"Function_22_85CC", # 16, 22
"Function_23_864F", # 17, 23
"Function_24_9351", # 18, 24
"Function_25_985D", # 19, 25
"Function_26_9D75", # 1A, 26
"Function_27_A42C", # 1B, 27
"Function_28_A659", # 1C, 28
"Function_29_ABEA", # 1D, 29
"Function_30_B1B6", # 1E, 30
"Function_31_C3DD", # 1F, 31
"Function_32_C41C", # 20, 32
"Function_33_D241", # 21, 33
"Function_34_D264", # 22, 34
"Function_35_D28D", # 23, 35
"Function_36_D306", # 24, 36
"Function_37_D51F", # 25, 37
))
def Function_0_4B4(): pass
label("Function_0_4B4")
RunExpression(0x2, (scpexpr(EXPR_RAND), scpexpr(EXPR_PUSH_LONG, 0x8), scpexpr(EXPR_IMOD), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Switch(
(scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_END)),
(0, "loc_4F4"),
(1, "loc_500"),
(2, "loc_50C"),
(3, "loc_518"),
(4, "loc_524"),
(5, "loc_530"),
(6, "loc_53C"),
(SWITCH_DEFAULT, "loc_548"),
)
label("loc_4F4")
OP_A0(0xFE, 1450, 0x0, 0xFB)
Jump("loc_554")
label("loc_500")
OP_A0(0xFE, 1550, 0x0, 0xFB)
Jump("loc_554")
label("loc_50C")
OP_A0(0xFE, 1600, 0x0, 0xFB)
Jump("loc_554")
label("loc_518")
OP_A0(0xFE, 1400, 0x0, 0xFB)
Jump("loc_554")
label("loc_524")
OP_A0(0xFE, 1650, 0x0, 0xFB)
Jump("loc_554")
label("loc_530")
OP_A0(0xFE, 1350, 0x0, 0xFB)
Jump("loc_554")
label("loc_53C")
OP_A0(0xFE, 1500, 0x0, 0xFB)
Jump("loc_554")
label("loc_548")
OP_A0(0xFE, 1500, 0x0, 0xFB)
Jump("loc_554")
label("loc_554")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_56B")
OP_A0(0xFE, 1500, 0x0, 0xFB)
Jump("loc_554")
label("loc_56B")
Return()
# Function_0_4B4 end
def Function_1_56C(): pass
label("Function_1_56C")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_596")
OP_94(0xFE, 0x193C, 0x3B1A, 0x672, 0x41BE, 0x3E8)
Sleep(300)
Jump("Function_1_56C")
label("loc_596")
Return()
# Function_1_56C end
def Function_2_597(): pass
label("Function_2_597")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_5C1")
OP_94(0xFE, 0xFFFFEB74, 0x37FA, 0xFFFFF5F6, 0x43EE, 0x3E8)
Sleep(300)
Jump("Function_2_597")
label("loc_5C1")
Return()
# Function_2_597 end
def Function_3_5C2(): pass
label("Function_3_5C2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xE0, 0)), scpexpr(EXPR_END)), "loc_5EC")
SetChrPos(0x9, -46210, 0, 12030, 0)
BeginChrThread(0x9, 0, 0, 0)
ClearChrFlags(0xB, 0x80)
Jump("loc_8B6")
label("loc_5EC")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC3, 6)), scpexpr(EXPR_END)), "loc_611")
SetChrPos(0x9, -40440, 0, 11040, 90)
BeginChrThread(0x9, 0, 0, 0)
Jump("loc_8B6")
label("loc_611")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC2, 2)), scpexpr(EXPR_END)), "loc_61F")
Jump("loc_8B6")
label("loc_61F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 0)), scpexpr(EXPR_END)), "loc_632")
ClearChrFlags(0x10, 0x80)
Jump("loc_8B6")
label("loc_632")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA7, 6)), scpexpr(EXPR_END)), "loc_645")
ClearChrFlags(0xB, 0x80)
Jump("loc_8B6")
label("loc_645")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 3)), scpexpr(EXPR_END)), "loc_6FC")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x22, 0x0, 0x10)"), scpexpr(EXPR_END)), "loc_68C")
ClearChrFlags(0xD, 0x80)
SetChrPos(0x9, -1820, 4000, 17030, 225)
SetChrPos(0xD, -3170, 4000, 16140, 45)
BeginChrThread(0x9, 0, 0, 0)
Jump("loc_6F7")
label("loc_68C")
ClearChrFlags(0xD, 0x80)
ClearChrFlags(0xE, 0x80)
SetChrPos(0x9, 3830, 4000, 18150, 0)
SetChrPos(0xD, -5310, 4000, 15540, 225)
SetChrPos(0xE, -6700, 4000, 14240, 45)
BeginChrThread(0x9, 0, 0, 0)
SetChrFlags(0xD, 0x10)
SetChrFlags(0xE, 0x10)
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x22, 0x1, 0x0)"), scpexpr(EXPR_END)), "loc_6F7")
SetChrPos(0x9, 4870, 4000, 17950, 180)
label("loc_6F7")
Jump("loc_8B6")
label("loc_6FC")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAA, 2)), scpexpr(EXPR_END)), "loc_721")
SetChrPos(0x9, -40440, 0, 11040, 90)
BeginChrThread(0x9, 0, 0, 0)
Jump("loc_8B6")
label("loc_721")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA1, 4)), scpexpr(EXPR_END)), "loc_74B")
SetChrPos(0x9, -40440, 0, 11040, 90)
BeginChrThread(0x9, 0, 0, 0)
ClearChrFlags(0xB, 0x80)
Jump("loc_8B6")
label("loc_74B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 2)), scpexpr(EXPR_END)), "loc_7B0")
SetChrPos(0xF, -5160, 4000, 16030, 90)
SetChrPos(0x9, -3740, 4000, 16030, 270)
BeginChrThread(0x9, 0, 0, 0)
SetChrFlags(0x9, 0x10)
ClearChrFlags(0xC, 0x80)
ClearChrFlags(0xD, 0x80)
ClearChrFlags(0xE, 0x80)
SetChrPos(0xE, -6520, 4000, 15440, 90)
ClearChrFlags(0xF, 0x80)
ClearChrFlags(0xB, 0x80)
Jump("loc_8B6")
label("loc_7B0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 0)), scpexpr(EXPR_END)), "loc_7DA")
SetChrPos(0x9, -46210, 0, 12030, 0)
BeginChrThread(0x9, 0, 0, 0)
ClearChrFlags(0xB, 0x80)
Jump("loc_8B6")
label("loc_7DA")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x83, 7)), scpexpr(EXPR_END)), "loc_815")
ClearChrFlags(0xA, 0x80)
SetChrPos(0xA, -5160, 4000, 16030, 90)
SetChrPos(0x9, -3740, 4000, 16030, 270)
BeginChrThread(0x9, 0, 0, 0)
Jump("loc_8B6")
label("loc_815")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x82, 0)), scpexpr(EXPR_END)), "loc_823")
Jump("loc_8B6")
label("loc_823")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x80, 7)), scpexpr(EXPR_END)), "loc_842")
ClearChrFlags(0x11, 0x80)
SetChrFlags(0x11, 0x10)
TurnDirection(0x8, 0x11, 0)
Jump("loc_8B6")
label("loc_842")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x80, 0)), scpexpr(EXPR_END)), "loc_850")
Jump("loc_8B6")
label("loc_850")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x64, 1)), scpexpr(EXPR_END)), "loc_85E")
Jump("loc_8B6")
label("loc_85E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x61, 2)), scpexpr(EXPR_END)), "loc_86C")
Jump("loc_8B6")
label("loc_86C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x61, 1)), scpexpr(EXPR_END)), "loc_87A")
Jump("loc_8B6")
label("loc_87A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x60, 0)), scpexpr(EXPR_END)), "loc_89F")
SetChrPos(0x9, -4070, 4000, 16180, 180)
BeginChrThread(0x9, 0, 0, 2)
Jump("loc_8B6")
label("loc_89F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x42, 5)), scpexpr(EXPR_END)), "loc_8AD")
Jump("loc_8B6")
label("loc_8AD")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x41, 6)), scpexpr(EXPR_END)), "loc_8B6")
label("loc_8B6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x60, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 4)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x53, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_8D1")
ClearChrFlags(0x12, 0x80)
Event(0, 25)
label("loc_8D1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x5C, 0)), scpexpr(EXPR_END)), "loc_8E0")
ClearScenarioFlags(0x5C, 0)
Event(0, 32)
label("loc_8E0")
Return()
# Function_3_5C2 end
def Function_4_8E1(): pass
label("Function_4_8E1")
OP_65(0x1, 0x1)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x61, 2)), scpexpr(EXPR_END)), "loc_8F3")
Jump("loc_8FC")
label("loc_8F3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x61, 1)), scpexpr(EXPR_END)), "loc_8FC")
label("loc_8FC")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xE0, 0)), scpexpr(EXPR_END)), "loc_90E")
OP_66(0x1, 0x1)
Jump("loc_96D")
label("loc_90E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 0)), scpexpr(EXPR_END)), "loc_91C")
Jump("loc_96D")
label("loc_91C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA7, 6)), scpexpr(EXPR_END)), "loc_92E")
OP_66(0x1, 0x1)
Jump("loc_96D")
label("loc_92E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAA, 2)), scpexpr(EXPR_END)), "loc_93C")
Jump("loc_96D")
label("loc_93C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA1, 4)), scpexpr(EXPR_END)), "loc_94E")
OP_66(0x1, 0x1)
Jump("loc_96D")
label("loc_94E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 2)), scpexpr(EXPR_END)), "loc_960")
OP_66(0x1, 0x1)
Jump("loc_96D")
label("loc_960")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 0)), scpexpr(EXPR_END)), "loc_96D")
OP_66(0x1, 0x1)
label("loc_96D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 3)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA7, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_997")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x22, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_997")
SetMapObjFrame(0xFF, "model06", 0x0, 0x1)
label("loc_997")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0x4), scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_9B0")
OP_10(0x0, 0x0)
OP_10(0x6, 0x1)
Jump("loc_9B6")
label("loc_9B0")
OP_10(0x0, 0x1)
OP_10(0x6, 0x0)
label("loc_9B6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x81, 5)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x81, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_9D2")
OP_7D(0xFF, 0xD2, 0xC8, 0x0, 0x0)
Jump("loc_9E9")
label("loc_9D2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xE0, 0)), scpexpr(EXPR_END)), "loc_9E9")
OP_7D(0xFF, 0xD2, 0xC8, 0x0, 0x0)
Jump("loc_9E9")
label("loc_9E9")
OP_1B(0x2, 0xFF, 0xFFFF)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA1, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAD, 7)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_A05")
OP_1B(0x2, 0x0, 0x23)
label("loc_A05")
OP_65(0x2, 0x1)
SetMapObjFlags(0x1, 0x10)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA7, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xB8, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_A28")
OP_66(0x2, 0x1)
ClearMapObjFlags(0x1, 0x10)
label("loc_A28")
OP_66(0x3, 0x1)
ClearMapObjFlags(0x2, 0x10)
ClearMapObjFlags(0x3, 0x4)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA1, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_A4C")
SetMapObjFlags(0x3, 0x4)
label("loc_A4C")
ModifyEventFlags(0, 0, 0x80)
ModifyEventFlags(0, 1, 0x80)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 0)), scpexpr(EXPR_END)), "loc_A64")
Jump("loc_AE3")
label("loc_A64")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA7, 6)), scpexpr(EXPR_END)), "loc_AE3")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x0, 0x10)"), scpexpr(EXPR_END)), "loc_A7E")
Jump("loc_AE3")
label("loc_A7E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xBE, 7)), scpexpr(EXPR_END)), "loc_A91")
OP_1B(0x1, 0x0, 0x17)
Jump("loc_AE3")
label("loc_A91")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x1, 0x3)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x1, 0x4)"), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_AB0")
ModifyEventFlags(1, 1, 0x80)
Jump("loc_AE3")
label("loc_AB0")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x0, 0x2)"), scpexpr(EXPR_END)), "loc_ACB")
OP_66(0x2, 0x1)
ClearMapObjFlags(0x1, 0x10)
Jump("loc_AE3")
label("loc_ACB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xBE, 6)), scpexpr(EXPR_END)), "loc_AE3")
ModifyEventFlags(1, 0, 0x80)
OP_66(0x2, 0x1)
ClearMapObjFlags(0x1, 0x10)
label("loc_AE3")
Return()
# Function_4_8E1 end
def Function_5_AE4(): pass
label("Function_5_AE4")
Call(0, 6)
Return()
# Function_5_AE4 end
def Function_6_AE8(): pass
label("Function_6_AE8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x83, 1)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x83, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_AFE")
Call(0, 24)
Jump("loc_279C")
label("loc_AFE")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x0, 0x2)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x1, 0x1)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_B44")
OP_4B(0x8, 0xFF)
TurnDirection(0x0, 0x8, 0)
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x1, 0x0)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_B3C")
Call(0, 26)
Jump("loc_B3F")
label("loc_B3C")
Call(0, 27)
label("loc_B3F")
Jump("loc_279C")
label("loc_B44")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x1, 0x2)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x1, 0x5)"), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x1, 0x7)"), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_B84")
OP_4B(0x8, 0xFF)
TurnDirection(0x0, 0x8, 0)
Call(0, 29)
Jump("loc_279C")
label("loc_B84")
TalkBegin(0x8)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 7)), scpexpr(EXPR_END)), "loc_D43")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_CAC")
#C0001
ChrTalk(
0x8,
(
"今天实在是\x01",
"太感谢了。\x02",
)
)
CloseMessageWindow()
#C0002
ChrTalk(
0x8,
(
"特别任务支援科……吗,\x01",
"据说是为了向市民提供服务\x01",
"而设立的部门……\x02",
)
)
CloseMessageWindow()
#C0003
ChrTalk(
0x8,
(
"不过,连我们都沾光受助了,\x01",
"真是十分感谢啊。\x02",
)
)
CloseMessageWindow()
#C0004
ChrTalk(
0x101,
(
"#0009F哈哈哈……\x01",
"(被这么称赞,还真是不好意思啊。)\x02",
)
)
CloseMessageWindow()
#C0005
ChrTalk(
0x102,
(
"#0100F如果以后再有什么事情,\x01",
"请不用客气,随时提出委托哦。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_D3E")
label("loc_CAC")
#C0006
ChrTalk(
0x8,
(
"今天实在是\x01",
"太感谢了。\x02",
)
)
CloseMessageWindow()
#C0007
ChrTalk(
0x8,
(
"特别任务支援科……吗,\x01",
"据说是为了向市民提供服务\x01",
"而设立的部门……\x02",
)
)
CloseMessageWindow()
#C0008
ChrTalk(
0x8,
(
"不过,连我们都沾光受助了,\x01",
"真是十分感谢啊。\x02",
)
)
CloseMessageWindow()
label("loc_D3E")
Jump("loc_2799")
label("loc_D43")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x1, 0x1)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_DD2")
#C0009
ChrTalk(
0x8,
(
"三处空房都确认完毕之后,\x01",
"就请来通知我吧。\x02",
)
)
CloseMessageWindow()
#C0010
ChrTalk(
0x8,
(
"文件的内容也许会有\x01",
"一些遗漏或错误之处,\x01",
"还请多加注意啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_2799")
label("loc_DD2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xE0, 0)), scpexpr(EXPR_END)), "loc_F30")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_EC5")
#C0011
ChrTalk(
0x8,
(
"自治州议会在不久前\x01",
"总算结束了。\x02",
)
)
CloseMessageWindow()
#C0012
ChrTalk(
0x8,
(
"这样一来,财务科的各位\x01",
"也就可以安心回家了。\x02",
)
)
CloseMessageWindow()
#C0013
ChrTalk(
0x8,
(
"不过……市长今天似乎也像往常一样,\x01",
"准备一直加班到深夜。\x02",
)
)
CloseMessageWindow()
#C0014
ChrTalk(
0x8,
(
"好像是急着把预算方面的文件整理完……\x01",
"真希望他不要太勉强自己啊。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_F2B")
label("loc_EC5")
#C0015
ChrTalk(
0x8,
(
"市长今天似乎也像往日一样,\x01",
"准备一直加班到深夜。\x02",
)
)
CloseMessageWindow()
#C0016
ChrTalk(
0x8,
(
"他真是位一丝不苟的人……\x01",
"好担心他的身体呢。\x02",
)
)
CloseMessageWindow()
label("loc_F2B")
Jump("loc_2799")
label("loc_F30")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC3, 6)), scpexpr(EXPR_END)), "loc_1185")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1117")
#C0017
ChrTalk(
0x8,
(
"从今天早上开始,就不断有人来询问\x01",
"空港被封锁的事情。\x02",
)
)
CloseMessageWindow()
#C0018
ChrTalk(
0x8,
(
"真是麻烦啊,因为警察局的人\x01",
"说过要暂时保密,稳定事态,\x01",
"所以我只能敷衍说是要临时进行设备检查……\x02",
)
)
CloseMessageWindow()
#C0019
ChrTalk(
0x8,
"这种理由到底能撑到什么时候呢……\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC4, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_10A6")
OP_63(0x0, 0x0, 2000, 0x0, 0x1, 0xFA, 0x2)
Sound(29, 0, 100, 0)
OP_63(0x1, 0x0, 2000, 0x0, 0x1, 0xFA, 0x2)
Sound(29, 0, 100, 0)
OP_63(0x2, 0x0, 2000, 0x0, 0x1, 0xFA, 0x2)
Sound(29, 0, 100, 0)
OP_63(0x3, 0x0, 2000, 0x0, 0x1, 0xFA, 0x2)
Sound(29, 0, 100, 0)
Sleep(1000)
#C0020
ChrTalk(
0x101,
"#0005F(空港那边似乎是发生了什么事啊。)\x02",
)
CloseMessageWindow()
Jump("loc_110F")
label("loc_10A6")
#C0021
ChrTalk(
0x103,
(
"#0203F(对外公布的官方消息\x01",
" 好像是『对设备进行临时检查』。)\x02",
)
)
CloseMessageWindow()
#C0022
ChrTalk(
0x101,
"#0001F(市政府这边也很难办呢。)\x02",
)
CloseMessageWindow()
label("loc_110F")
SetScenarioFlags(0x0, 0)
Jump("loc_1180")
label("loc_1117")
#C0023
ChrTalk(
0x8,
(
"从今天早上开始,就不断有人来询问\x01",
"空港被封锁的事情。\x02",
)
)
CloseMessageWindow()
#C0024
ChrTalk(
0x8,
"唉,这种敷衍的理由到底能撑到什么时候呢……\x02",
)
CloseMessageWindow()
label("loc_1180")
Jump("loc_2799")
label("loc_1185")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC2, 2)), scpexpr(EXPR_END)), "loc_136B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_12FA")
#C0025
ChrTalk(
0x8,
(
"自治州议会今天好像也\x01",
"纠纷不断呢。\x02",
)
)
CloseMessageWindow()
#C0026
ChrTalk(
0x8,
(
"呼……议员先生们那种难看的样子,\x01",
"无论如何也不能被市民们看到啊。\x02",
)
)
CloseMessageWindow()
#C0027
ChrTalk(
0x8,
(
"大家的态度都十分恶劣,\x01",
"互相谩骂的场面此起彼伏。\x02",
)
)
CloseMessageWindow()
OP_63(0x0, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
OP_63(0x1, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
OP_63(0x2, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
OP_63(0x3, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Sleep(1000)
#C0028
ChrTalk(
0x104,
"#0303F(……那种场面,确实是不想看见啊。)\x02",
)
CloseMessageWindow()
#C0029
ChrTalk(
0x102,
"#0108F(这就是议会的现实啊……)\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_1366")
label("loc_12FA")
#C0030
ChrTalk(
0x8,
(
"自治州议会今天好像也\x01",
"纠纷不断呢。\x02",
)
)
CloseMessageWindow()
#C0031
ChrTalk(
0x8,
(
"呼……议员先生们那种难看的样子,\x01",
"无论如何也不能被市民们看到啊。\x02",
)
)
CloseMessageWindow()
label("loc_1366")
Jump("loc_2799")
label("loc_136B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 0)), scpexpr(EXPR_END)), "loc_1599")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1535")
#C0032
ChrTalk(
0x8,
(
"这里是克洛斯贝尔\x01",
"市政厅的接待处……\x02",
)
)
CloseMessageWindow()
OP_63(0x8, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x0, 0x10)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x0, 0x10)"), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_13E8")
#C0033
ChrTalk(
0x8,
"啊,是各位啊。\x02",
)
CloseMessageWindow()
Jump("loc_140A")
label("loc_13E8")
#C0034
ChrTalk(
0x8,
"啊,各位是……警察局的人吧。\x02",
)
CloseMessageWindow()
label("loc_140A")
#C0035
ChrTalk(
0x8,
(
"呼,我还以为是来\x01",
"抱怨的市民呢。\x02",
)
)
CloseMessageWindow()
#C0036
ChrTalk(
0x102,
"#0105F来抱怨的市民?\x02",
)
CloseMessageWindow()
#C0037
ChrTalk(
0x8,
(
"嗯,按照计划安排,预算会议\x01",
"本来是应该截止到昨天就结束的……\x02",
)
)
CloseMessageWindow()
#C0038
ChrTalk(
0x8,
(
"但和往年一样,辩论过程中纠纷不断,\x01",
"直到现在,还在对预算问题争执不休。\x02",
)
)
CloseMessageWindow()
#C0039
ChrTalk(
0x8,
(
"会期大概又要\x01",
"延长好几天了吧……\x02",
)
)
CloseMessageWindow()
#C0040
ChrTalk(
0x102,
(
"#0106F(唉……自治州议会\x01",
" 也还是老样子呢。)\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_1594")
label("loc_1535")
#C0041
ChrTalk(
0x8,
(
"今年的议会也要延长会期……\x01",
"所以预算计划的执行也会随之推迟。\x02",
)
)
CloseMessageWindow()
#C0042
ChrTalk(
0x8,
"给市民们带来了不少麻烦。\x02",
)
CloseMessageWindow()
label("loc_1594")
Jump("loc_2799")
label("loc_1599")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA7, 6)), scpexpr(EXPR_END)), "loc_180D")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x0, 0x10)"), scpexpr(EXPR_END)), "loc_15F5")
#C0043
ChrTalk(
0x8,
(
"各位……\x01",
"非常感谢。\x02",
)
)
CloseMessageWindow()
#C0044
ChrTalk(
0x8,
(
"请容我代表市政厅,\x01",
"向各位表示谢意。\x02",
)
)
CloseMessageWindow()
Jump("loc_1808")
label("loc_15F5")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x1, 0x3)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x1, 0x4)"), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_1670")
#C0045
ChrTalk(
0x8,
(
"已经把市长喜欢的饮料\x01",
"买回来了吧?\x02",
)
)
CloseMessageWindow()
#C0046
ChrTalk(
0x8,
(
"我想他一定会很高兴的。\x01",
"那就请各位直接\x01",
"去交给市长吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_1808")
label("loc_1670")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x1, 0x2)"), scpexpr(EXPR_END)), "loc_16FB")
#C0047
ChrTalk(
0x8,
(
"果汁店\x01",
"搬地方了……?\x02",
)
)
CloseMessageWindow()
#C0048
ChrTalk(
0x8,
(
"抱歉,我也不知道\x01",
"搬到了什么地方呢……\x02",
)
)
CloseMessageWindow()
#C0049
ChrTalk(
0x8,
(
"但我觉得,应该还在市内的\x01",
"某个地方正常营业吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_1808")
label("loc_16FB")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x0, 0x2)"), scpexpr(EXPR_END)), "loc_17A8")
#C0050
ChrTalk(
0x8,
(
"市长喜欢的饮料,\x01",
"可以在喷泉广场的\x01",
"果汁店买到。\x02",
)
)
CloseMessageWindow()
#C0051
ChrTalk(
0x8,
(
"但一般不公开对外出售,\x01",
"算是特别饮料哦。\x02",
)
)
CloseMessageWindow()
#C0052
ChrTalk(
0x8,
(
"虽然有些麻烦,但还是拜托各位\x01",
"去买回来,然后交给市长。\x02",
)
)
CloseMessageWindow()
Jump("loc_1808")
label("loc_17A8")
#C0053
ChrTalk(
0x8,
(
"从明天开始,预算会议\x01",
"就要在议事堂正式召开了。\x02",
)
)
CloseMessageWindow()
#C0054
ChrTalk(
0x8,
(
"媒体的人也会过来……\x01",
"应该又会开始忙了。\x02",
)
)
CloseMessageWindow()
label("loc_1808")
Jump("loc_2799")
label("loc_180D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 3)), scpexpr(EXPR_END)), "loc_1905")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_18AB")
#C0055
ChrTalk(
0x8,
(
"今天晚上五点\x01",
"将要召开闭幕式。\x02",
)
)
CloseMessageWindow()
#C0056
ChrTalk(
0x8,
(
"不过,会场大概会\x01",
"非常混乱嘈杂。\x02",
)
)
CloseMessageWindow()
#C0057
ChrTalk(
0x8,
(
"至于闭幕宣言,请通过市政厅前方广场的\x01",
"扩音器来收听吧。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_1900")
label("loc_18AB")
#C0058
ChrTalk(
0x8,
(
"会场大概会\x01",
"非常混乱嘈杂。\x02",
)
)
CloseMessageWindow()
#C0059
ChrTalk(
0x8,
(
"请通过市政厅前方\x01",
"广场的扩音器\x01",
"来收听闭幕宣言吧。\x02",
)
)
CloseMessageWindow()
label("loc_1900")
Jump("loc_2799")
label("loc_1905")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAA, 2)), scpexpr(EXPR_END)), "loc_1978")
#C0060
ChrTalk(
0x8,
"游行总算是顺利结束了呢。\x02",
)
CloseMessageWindow()
#C0061
ChrTalk(
0x8,
"呼……太好了。\x02",
)
CloseMessageWindow()
#C0062
ChrTalk(
0x8,
(
"各位市民好像也都玩得很尽兴,\x01",
"这就最好不过了。\x02",
)
)
CloseMessageWindow()
Jump("loc_2799")
label("loc_1978")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA1, 4)), scpexpr(EXPR_END)), "loc_1A1A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_19DA")
#C0063
ChrTalk(
0x8,
"游行终于要开始了啊……\x02",
)
CloseMessageWindow()
#C0064
ChrTalk(
0x8,
(
"呼,希望今年也能\x01",
"平安无事地展开啊……\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_1A15")
label("loc_19DA")
#C0065
ChrTalk(
0x8,
(
"每年的游行活动中都会发生很多事故,\x01",
"真是很令人头疼……\x02",
)
)
CloseMessageWindow()
label("loc_1A15")
Jump("loc_2799")
label("loc_1A1A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 2)), scpexpr(EXPR_END)), "loc_1A91")
#C0066
ChrTalk(
0x8,
(
"如果想旁听今天的国际研讨会,\x01",
"需要事先提出申请。\x02",
)
)
CloseMessageWindow()
#C0067
ChrTalk(
0x8,
(
"实在非常抱歉,\x01",
"今天的旁听申请名额\x01",
"已经没有了。\x02",
)
)
CloseMessageWindow()
Jump("loc_2799")
label("loc_1A91")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 0)), scpexpr(EXPR_END)), "loc_1B84")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1B33")
#C0068
ChrTalk(
0x8,
(
"我们这里在配发\x01",
"纪念庆典的宣传手册。\x02",
)
)
CloseMessageWindow()
#C0069
ChrTalk(
0x8,
(
"纪念庆典的日程表,主要店铺,\x01",
"还有游行队伍的经过路线都有\x01",
"详细记录哦。\x02",
)
)
CloseMessageWindow()
#C0070
ChrTalk(
0x8,
"还请多加利用。\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_1B7F")
label("loc_1B33")
#C0071
ChrTalk(
0x8,
(
"我们这里在配发\x01",
"纪念庆典的宣传手册。\x02",
)
)
CloseMessageWindow()
#C0072
ChrTalk(
0x8,
(
"当然是免费的,\x01",
"还请多加使用。\x02",
)
)
CloseMessageWindow()
label("loc_1B7F")
Jump("loc_2799")
label("loc_1B84")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x83, 7)), scpexpr(EXPR_END)), "loc_1DDE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x91, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1CED")
#C0073
ChrTalk(
0x8,
(
"啊,各位……\x01",
"事情已经办完了吗?\x02",
)
)
CloseMessageWindow()
#C0074
ChrTalk(
0x102,
"#0100F嗯,托您的福。\x02",
)
CloseMessageWindow()
#C0075
ChrTalk(
0x104,
(
"#0300F虽然事态又变得\x01",
"很棘手了…\x02",
)
)
CloseMessageWindow()
OP_63(0x8, 0x0, 2000, 0x0, 0x1, 0xFA, 0x2)
Sound(29, 0, 100, 0)
Sleep(1000)
#C0076
ChrTalk(
0x8,
"这个,虽然不是很明白……\x02",
)
CloseMessageWindow()
#C0077
ChrTalk(
0x8,
(
"不过,以后如果还准备\x01",
"进入地下空间的话,\x01",
"钥匙就放在你们那里也没关系哦。\x02",
)
)
CloseMessageWindow()
#C0078
ChrTalk(
0x8,
"请各位随意使用吧。\x02",
)
CloseMessageWindow()
#C0079
ChrTalk(
0x101,
(
"#0000F是吗,那我们就不客气了。\x02\x03",
"#0003F(果然还是有些\x01",
" 在意约纳呢……)\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x91, 2)
Jump("loc_1DD9")
label("loc_1CED")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1D77")
#C0080
ChrTalk(
0x8,
(
"市政厅今天也有些混乱忙碌呢,\x01",
"一直都人来人往的。\x02",
)
)
CloseMessageWindow()
#C0081
ChrTalk(
0x8,
(
"因为要进行纪念庆典开幕式的预先演习。\x01",
"……真是给各位添麻烦了。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_1DD9")
label("loc_1D77")
#C0082
ChrTalk(
0x8,
(
"今天要在大会堂内进行\x01",
"纪念庆典开幕式的预先演习。\x02",
)
)
CloseMessageWindow()
#C0083
ChrTalk(
0x8,
(
"所以暂时谢绝市民们入内,\x01",
"还请多加谅解。\x02",
)
)
CloseMessageWindow()
label("loc_1DD9")
Jump("loc_2799")
label("loc_1DDE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x83, 2)), scpexpr(EXPR_END)), "loc_1E50")
#C0084
ChrTalk(
0x8,
(
"地下空间B区域的入口\x01",
"在住宅街的水道附近。\x02",
)
)
CloseMessageWindow()
#C0085
ChrTalk(
0x8,
(
"以前就有人把钥匙弄丢过,\x01",
"所以还请各位多加注意。\x02",
)
)
CloseMessageWindow()
Jump("loc_2799")
label("loc_1E50")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x82, 0)), scpexpr(EXPR_END)), "loc_1F46")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1ECD")
#C0086
ChrTalk(
0x8,
(
"今天要在大会堂内进行\x01",
"纪念庆典开幕式的预先演习。\x02",
)
)
CloseMessageWindow()
#C0087
ChrTalk(
0x8,
(
"所以暂时谢绝市民们入内,\x01",
"还请多加谅解。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_1F41")
label("loc_1ECD")
#C0088
ChrTalk(
0x8,
(
"说起来,警察们好像\x01",
"也在进行游行活动的预先演习呢。\x02",
)
)
CloseMessageWindow()
#C0089
ChrTalk(
0x8,
(
"呼……随着纪念庆典的临近,\x01",
"各种各样的活动都接踵而来呢。\x02",
)
)
CloseMessageWindow()
label("loc_1F41")
Jump("loc_2799")
label("loc_1F46")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x80, 7)), scpexpr(EXPR_END)), "loc_1F98")
#C0090
ChrTalk(
0x8,
(
"呼……\x01",
"克洛斯贝尔时代周刊的人\x01",
"一直纠缠不休,真是让人头疼啊……\x02",
)
)
CloseMessageWindow()
Jump("loc_2799")
label("loc_1F98")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x80, 0)), scpexpr(EXPR_END)), "loc_20CF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2062")
#C0091
ChrTalk(
0x8,
(
"下个月,庆祝自治州\x01",
"创立七十周年的\x01",
"纪念庆典终于要开幕了。\x02",
)
)
CloseMessageWindow()
#C0092
ChrTalk(
0x8,
(
"为了庆祝七十周年这个大日子,\x01",
"会期要比往年更长,\x01",
"总共将要召开五天。\x02",
)
)
CloseMessageWindow()
#C0093
ChrTalk(
0x8,
(
"至于详细情况,\x01",
"请就参考宣传手册吧。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_20CA")
label("loc_2062")
#C0094
ChrTalk(
0x8,
(
"下个月将要召开克洛斯贝尔自治州\x01",
"建立七十周年的纪念庆典。\x02",
)
)
CloseMessageWindow()
#C0095
ChrTalk(
0x8,
(
"至于详细的情况,\x01",
"请就参考宣传手册吧。\x02",
)
)
CloseMessageWindow()
label("loc_20CA")
Jump("loc_2799")
label("loc_20CF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x64, 1)), scpexpr(EXPR_END)), "loc_22CF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_225E")
#C0096
ChrTalk(
0x8,
(
"克洛斯贝尔自治州今年将要迎来\x01",
"创立七十周年纪念日。\x02",
)
)
CloseMessageWindow()
#C0097
ChrTalk(
0x8,
(
"在创立纪念庆典中,\x01",
"将会举办各种各样的活动。\x02",
)
)
CloseMessageWindow()
#C0098
ChrTalk(
0x8,
(
"详细情报也会刊登在\x01",
"克洛斯贝尔市刊上,\x01",
"请一定要多留意哦。\x02",
)
)
CloseMessageWindow()
#C0099
ChrTalk(
0x104,
(
"#0305F克洛斯贝尔市刊……?\x01",
"嘿,还有那种刊物啊。\x02",
)
)
CloseMessageWindow()
OP_63(0x8, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Sleep(1000)
#C0100
ChrTalk(
0x8,
(
"嗯,就是放在接待处柜台旁边\x01",
"的那些宣传手册。\x02",
)
)
CloseMessageWindow()
#C0101
ChrTalk(
0x8,
(
"虽然不太显眼,\x01",
"但其实每个月都会更新的。\x02",
)
)
CloseMessageWindow()
#C0102
ChrTalk(
0x101,
"#0003F(以前都不知道……)\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_22CA")
label("loc_225E")
#C0103
ChrTalk(
0x8,
(
"读市刊的人果然\x01",
"很少啊……\x02",
)
)
CloseMessageWindow()
#C0104
ChrTalk(
0x8,
(
"要是内容能再精彩\x01",
"一点就好了。\x02",
)
)
CloseMessageWindow()
#C0105
ChrTalk(
0x102,
"#0106F(接待人员也真是很辛苦呢……)\x02",
)
CloseMessageWindow()
label("loc_22CA")
Jump("loc_2799")
label("loc_22CF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x61, 2)), scpexpr(EXPR_END)), "loc_2391")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2357")
#C0106
ChrTalk(
0x8,
(
"有一辆巴士\x01",
"失去了联络……\x01",
"交通科的人已经前去探查了。\x02",
)
)
CloseMessageWindow()
#C0107
ChrTalk(
0x8,
(
"是发生什么故障了吗……\x01",
"稍微有些让人担心呢……\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_238C")
label("loc_2357")
#C0108
ChrTalk(
0x8,
(
"巴士时不时就会出现故障,\x01",
"但愿别出什么事就好……\x02",
)
)
CloseMessageWindow()
label("loc_238C")
Jump("loc_2799")
label("loc_2391")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x61, 1)), scpexpr(EXPR_END)), "loc_2428")
#C0109
ChrTalk(
0x8,
"欢迎来到克洛斯贝尔市政厅。\x02",
)
CloseMessageWindow()
#C0110
ChrTalk(
0x8,
(
"如果想知道巴士的运行时刻,\x01",
"可以随时来本接待处\x01",
"进行咨询哦。\x02",
)
)
CloseMessageWindow()
#C0111
ChrTalk(
0x8,
(
"导力巴士的运行是由\x01",
"交通科来管辖的。\x02",
)
)
CloseMessageWindow()
Jump("loc_2799")
label("loc_2428")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x60, 0)), scpexpr(EXPR_END)), "loc_2598")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2512")
#C0112
ChrTalk(
0x8,
(
"我来为您介绍一下\x01",
"市政厅内的机构设置吧。\x02",
)
)
CloseMessageWindow()
#C0113
ChrTalk(
0x8,
(
"这座市政厅包括\x01",
"两部分区域。\x02",
)
)
CloseMessageWindow()
#C0114
ChrTalk(
0x8,
(
"上楼之后,左手方向就是大会堂,\x01",
"右手边是克洛斯贝尔议事堂。\x02",
)
)
CloseMessageWindow()
#C0115
ChrTalk(
0x8,
(
"大会堂内经常举办各种活动,\x01",
"各位市民对那个地方\x01",
"应该也很熟悉。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_2593")
label("loc_2512")
#C0116
ChrTalk(
0x8,
(
"上楼之后,左手方向就是大会堂,\x01",
"右手边是克洛斯贝尔议事堂。\x02",
)
)
CloseMessageWindow()
#C0117
ChrTalk(
0x8,
(
"大会堂内经常举办各种活动,\x01",
"各位市民对那个地方\x01",
"应该也很熟悉。\x02",
)
)
CloseMessageWindow()
label("loc_2593")
Jump("loc_2799")
label("loc_2598")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x53, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_26BE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2654")
#C0118
ChrTalk(
0x8,
(
"这里是克洛斯贝尔\x01",
"市政厅的接待处。\x02",
)
)
CloseMessageWindow()
#C0119
ChrTalk(
0x8,
(
"如果想支付各种费用,\x01",
"或是申请迁居,\x01",
"就请来这里办理手续。\x02",
)
)
CloseMessageWindow()
#C0120
ChrTalk(
0x101,
(
"#0000F啊哈哈……\x01",
"(市政厅的接待人员好像也很辛苦呢。)\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_26B9")
label("loc_2654")
#C0121
ChrTalk(
0x8,
(
"这里是克洛斯贝尔\x01",
"市政厅的接待处。\x02",
)
)
CloseMessageWindow()
#C0122
ChrTalk(
0x8,
(
"如果想支付各种费用,\x01",
"或是申请迁居,\x01",
"就请来这里办理手续。\x02",
)
)
CloseMessageWindow()
label("loc_26B9")
Jump("loc_2799")
label("loc_26BE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_271F")
#C0123
ChrTalk(
0x8,
"刚才那个女孩子,不要紧吧……\x02",
)
CloseMessageWindow()
#C0124
ChrTalk(
0x8,
(
"虽然我向她介绍了\x01",
"旧城区的公寓,不过……\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_2799")
label("loc_271F")
#C0125
ChrTalk(
0x8,
(
"刚才来了一位紫头发的女孩,\x01",
"让我为她介绍房租最便宜\x01",
"的公寓。\x02",
)
)
CloseMessageWindow()
#C0126
ChrTalk(
0x8,
(
"虽然我向她介绍了\x01",
"旧城区的公寓,不过……\x01",
"不要紧吧……\x02",
)
)
CloseMessageWindow()
label("loc_2799")
TalkEnd(0x8)
label("loc_279C")
Return()
# Function_6_AE8 end
def Function_7_279D(): pass
label("Function_7_279D")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x22, 0x0, 0x2)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x22, 0x1, 0x0)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x22, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_27BE")
Call(0, 30)
Return()
label("loc_27BE")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xE0, 0)), scpexpr(EXPR_END)), "loc_285C")
#C0127
ChrTalk(
0xFE,
(
"预算会议总算是\x01",
"结束了啊。\x02",
)
)
CloseMessageWindow()
#C0128
ChrTalk(
0xFE,
(
"听说市长今天也坚持要继续加班,\x01",
"处理剩下的文件……\x02",
)
)
CloseMessageWindow()
#C0129
ChrTalk(
0xFE,
(
"嗯~真是不忍心看下去了。\x01",
"既然如此,我也来帮忙吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_3C94")
label("loc_285C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC3, 6)), scpexpr(EXPR_END)), "loc_291E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_28C2")
#C0130
ChrTalk(
0xFE,
"市长最近也一直在加班工作。\x02",
)
CloseMessageWindow()
#C0131
ChrTalk(
0xFE,
(
"希望预算方案能在今天\x01",
"讨论出个结果啊。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2919")
label("loc_28C2")
#C0132
ChrTalk(
0xFE,
"市长最近也一直在加班工作。\x02",
)
CloseMessageWindow()
#C0133
ChrTalk(
0xFE,
(
"他都已经上年纪了……\x01",
"这样下去,真担心他的身体呢。\x02",
)
)
CloseMessageWindow()
label("loc_2919")
Jump("loc_3C94")
label("loc_291E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC2, 2)), scpexpr(EXPR_END)), "loc_2A61")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_29F8")
#C0134
ChrTalk(
0xFE,
(
"在议会召开的期间,职员们也必须要\x01",
"做好充足的准备,以回答各种问题。\x01",
"相关部门一直加班加点地工作呢。\x02",
)
)
CloseMessageWindow()
#C0135
ChrTalk(
0xFE,
"大家好像都很忙啊……\x02",
)
CloseMessageWindow()
#C0136
ChrTalk(
0xFE,
(
"不过,我们科倒是很清闲,\x01",
"唯一的工作就是管理办公用品而已。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2A5C")
label("loc_29F8")
#C0137
ChrTalk(
0xFE,
(
"不过,看着那些忙碌的同事们,\x01",
"总是觉得有些不好意思呢。\x02",
)
)
CloseMessageWindow()
#C0138
ChrTalk(
0xFE,
(
"至少也应该替他们\x01",
"把走廊清扫干净啊。\x02",
)
)
CloseMessageWindow()
label("loc_2A5C")
Jump("loc_3C94")
label("loc_2A61")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 0)), scpexpr(EXPR_END)), "loc_2B42")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2B08")
#C0139
ChrTalk(
0xFE,
(
"麦克道尔市长\x01",
"去出席议会了。\x02",
)
)
CloseMessageWindow()
#C0140
ChrTalk(
0xFE,
(
"昨天的争论也处于白热化状态……\x01",
"但到了最后,预算方案还是没能定下来。\x02",
)
)
CloseMessageWindow()
#C0141
ChrTalk(
0xFE,
"……唉,市长也真是不容易呢。\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2B3D")
label("loc_2B08")
#C0142
ChrTalk(
0xFE,
(
"我很担心市长的身体啊,\x01",
"希望他不要太勉强自己……\x02",
)
)
CloseMessageWindow()
label("loc_2B3D")
Jump("loc_3C94")
label("loc_2B42")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA7, 6)), scpexpr(EXPR_END)), "loc_2FFB")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x0, 0x10)"), scpexpr(EXPR_END)), "loc_2BCB")
#C0143
ChrTalk(
0xFE,
(
"市长从以前开始就一直\x01",
"很喜欢吃苦西红柿。\x02",
)
)
CloseMessageWindow()
#C0144
ChrTalk(
0xFE,
(
"不过……那种东西毕竟不是人人都能接受的呢。\x01",
"至少我就受不了……\x02",
)
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
label("loc_2BCB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xBE, 7)), scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_2C21")
#C0145
ChrTalk(
0xFE,
(
"唉……每次都是如此,\x01",
"一看见哈尔特曼议长,我就紧张得要窒息。\x02",
)
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
label("loc_2C21")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x22, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2DE1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2D63")
#C0146
ChrTalk(
0xFE,
(
"哦哦,各位,听我说啊。\x01",
"『圣徒的祈祷』已经找回来了!\x02",
)
)
CloseMessageWindow()
#C0147
ChrTalk(
0xFE,
"那座雕像可是市政厅的象征啊。\x02",
)
CloseMessageWindow()
#C0148
ChrTalk(
0xFE,
(
"其实,在纪念庆典期间,\x01",
"那个,曾经被人盗走了呢……\x02",
)
)
CloseMessageWindow()
#C0149
ChrTalk(
0xFE,
(
"不过,就在前几天,搜查一科的人\x01",
"终于帮忙找回来了。\x02",
)
)
CloseMessageWindow()
#C0150
ChrTalk(
0xFE,
(
"哈哈哈,太好了……\x01",
"如果这雕像丢失,克洛斯贝尔注定\x01",
"会沦为笑料,这耻辱永远都无法洗刷。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2DDC")
label("loc_2D63")
#C0151
ChrTalk(
0xFE,
(
"能找回『圣徒的祈祷』,\x01",
"真是太好了啊……\x02",
)
)
CloseMessageWindow()
#C0152
ChrTalk(
0xFE,
(
"不过是在纪念庆典期间被盗走的,\x01",
"所以还是让各位外国人士\x01",
"看了个大笑话啊。\x02",
)
)
CloseMessageWindow()
label("loc_2DDC")
Jump("loc_2FF6")
label("loc_2DE1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xB2, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2E98")
#C0153
ChrTalk(
0xFE,
(
"这不是各位警察嘛,\x01",
"前几天真是承蒙相助了。\x02",
)
)
CloseMessageWindow()
#C0154
ChrTalk(
0xFE,
(
"如你们所见,『圣徒的祈祷』\x01",
"今日已经气度威严地坐镇于此了。\x02",
)
)
CloseMessageWindow()
#C0155
ChrTalk(
0xFE,
(
"我一定会倍加小心地看守,\x01",
"绝对不会让它再次被盗了。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0xB2, 5)
Jump("loc_2FF6")
label("loc_2E98")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2F8E")
#C0156
ChrTalk(
0xFE,
(
"麦克道尔市长是个很亲切的人,\x01",
"对待我们这些下属职员也都和蔼有礼。\x02",
)
)
CloseMessageWindow()
#C0157
ChrTalk(
0xFE,
(
"可是……虽然态度亲切和善,\x01",
"但他绝不会轻易吐露自己的真实想法呢。\x02",
)
)
CloseMessageWindow()
#C0158
ChrTalk(
0xFE,
(
"身为政治家,以自身的立场来说,\x01",
"这也是理所当然的行事方式。\x01",
"……但还是稍微有些失落啊。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2FF6")
label("loc_2F8E")
#C0159
ChrTalk(
0xFE,
(
"麦克道尔市长绝对不会轻易\x01",
"向我们吐露自己的真实想法。\x02",
)
)
CloseMessageWindow()
#C0160
ChrTalk(
0xFE,
(
"会不会出现一个能让市长\x01",
"完全信赖的人呢……\x02",
)
)
CloseMessageWindow()
label("loc_2FF6")
Jump("loc_3C94")
label("loc_2FFB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 3)), scpexpr(EXPR_END)), "loc_328B")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x22, 0x0, 0x10)"), scpexpr(EXPR_END)), "loc_30C6")
#C0161
ChrTalk(
0xFE,
(
"虽、虽然不太明白,\x01",
"不过真是得救了啊。\x02",
)
)
CloseMessageWindow()
#C0162
ChrTalk(
0xFE,
(
"现在正忙着准备闭幕式呢。\x01",
"……虽然无法正式向你们表示谢意,\x02",
)
)
CloseMessageWindow()
#C0163
ChrTalk(
0xFE,
(
"但实在是很感谢各位的帮忙。\x01",
"如果以后再有什么事情,还请继续关照啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_3286")
label("loc_30C6")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x22, 0x1, 0x0)"), scpexpr(EXPR_END)), "loc_3213")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3177")
#C0164
ChrTalk(
0xFE,
(
"今天可是闭幕式召开的日子,\x01",
"居然会发生这种事情……\x02",
)
)
CloseMessageWindow()
#C0165
ChrTalk(
0xFE,
(
"各位,无论如何,也请尽早\x01",
"把『圣徒的祈祷』找回来吧。\x02",
)
)
CloseMessageWindow()
#C0166
ChrTalk(
0xFE,
"事态真的是十万火急,拜托了!\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_320E")
label("loc_3177")
#C0167
ChrTalk(
0xFE,
(
"从下午开始,就会有参加\x01",
"闭幕式的人员入馆了……\x01",
"如果到那个时候,雕像仍然没找回的话,可就……\x02",
)
)
CloseMessageWindow()
#C0168
ChrTalk(
0xFE,
(
"各位,无论如何,请尽早\x01",
"把『圣徒的祈祷』找回来吧。\x02",
)
)
CloseMessageWindow()
label("loc_320E")
Jump("loc_3286")
label("loc_3213")
#C0169
ChrTalk(
0xFE,
(
"哈,这叫什么事啊……\x01",
"在市长外出期间,\x01",
"居然会发生这种事……\x02",
)
)
CloseMessageWindow()
#C0170
ChrTalk(
0xFE,
(
"如今,也只有拜托他们了啊……\x01",
"(自言自语)……\x02",
)
)
CloseMessageWindow()
label("loc_3286")
Jump("loc_3C94")
label("loc_328B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAA, 2)), scpexpr(EXPR_END)), "loc_3311")
#C0171
ChrTalk(
0xFE,
(
"市长已经前去和\x01",
"各位大人物进行会谈了。\x02",
)
)
CloseMessageWindow()
#C0172
ChrTalk(
0xFE,
(
"应该也会见到IBC的\x01",
"库罗伊斯总裁等人。\x02",
)
)
CloseMessageWindow()
#C0173
ChrTalk(
0xFE,
"大概又要很晚才能回来吧……\x02",
)
CloseMessageWindow()
Jump("loc_3C94")
label("loc_3311")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA1, 4)), scpexpr(EXPR_END)), "loc_3386")
#C0174
ChrTalk(
0xFE,
(
"市长好像又\x01",
"消瘦了一点……\x02",
)
)
CloseMessageWindow()
#C0175
ChrTalk(
0xFE,
(
"在这种时期,休息也许\x01",
"确实是奢望……\x01",
"但还是希望他不要太勉强自己啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_3C94")
label("loc_3386")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 2)), scpexpr(EXPR_END)), "loc_33DB")
#C0176
ChrTalk(
0xFE,
(
"啊,媒体人士比想象中的\x01",
"还要多啊。\x02",
)
)
CloseMessageWindow()
#C0177
ChrTalk(
0xFE,
"必须要增设一些座位才行啊。\x02",
)
CloseMessageWindow()
Jump("loc_3C94")
label("loc_33DB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 0)), scpexpr(EXPR_END)), "loc_3457")
#C0178
ChrTalk(
0xFE,
(
"……我最近一直都在给\x01",
"麦克道尔市长帮忙呢。\x02",
)
)
CloseMessageWindow()
#C0179
ChrTalk(
0xFE,
(
"因为市长毕竟遇到了\x01",
"那样的事件。\x02",
)
)
CloseMessageWindow()
#C0180
ChrTalk(
0xFE,
"而且我平时都很闲呢。\x02",
)
CloseMessageWindow()
Jump("loc_3C94")
label("loc_3457")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x83, 7)), scpexpr(EXPR_END)), "loc_34DE")
#C0181
ChrTalk(
0xFE,
(
"哎呀呀,还好有阿奈斯特先生\x01",
"帮忙安排,总算得救了。\x01",
"听证会好像可以顺利召开了。\x02",
)
)
CloseMessageWindow()
#C0182
ChrTalk(
0xFE,
(
"市长一直都很忙,\x01",
"平时很难找到他。\x02",
)
)
CloseMessageWindow()
Jump("loc_3C94")
label("loc_34DE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x82, 0)), scpexpr(EXPR_END)), "loc_360F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_35AC")
#C0183
ChrTalk(
0xFE,
(
"真是无奈啊,今天就要召开听证会了,\x01",
"但市长却不见人影。\x02",
)
)
CloseMessageWindow()
#C0184
ChrTalk(
0xFE,
(
"他平时一直都很忙,所以早就习惯了,\x01",
"总觉得到时候自然就能找到……\x02",
)
)
CloseMessageWindow()
#C0185
ChrTalk(
0xFE,
(
"呼,至少也要找到首席秘书\x01",
"阿奈斯特先生才行啊。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_360A")
label("loc_35AC")
#C0186
ChrTalk(
0xFE,
(
"如果没有市长的印章,\x01",
"听证会就不能召开了。\x02",
)
)
CloseMessageWindow()
#C0187
ChrTalk(
0xFE,
(
"至少也要找到首席秘书\x01",
"阿奈斯特先生才行啊。\x02",
)
)
CloseMessageWindow()
label("loc_360A")
Jump("loc_3C94")
label("loc_360F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x80, 7)), scpexpr(EXPR_END)), "loc_36A2")
#C0188
ChrTalk(
0xFE,
(
"说起来,明天好像\x01",
"要召开听证会吧……\x02",
)
)
CloseMessageWindow()
#C0189
ChrTalk(
0xFE,
(
"需要布置大会堂,\x01",
"归纳市民们的请愿……\x02",
)
)
CloseMessageWindow()
#C0190
ChrTalk(
0xFE,
(
"市长到时也会出席的,\x01",
"必须要事先谈好才行啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_3C94")
label("loc_36A2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x80, 0)), scpexpr(EXPR_END)), "loc_37E9")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_37A1")
#C0191
ChrTalk(
0xFE,
(
"……关于明年预算方案的\x01",
"洽谈会已经开始了。\x02",
)
)
CloseMessageWindow()
#C0192
ChrTalk(
0xFE,
(
"政府会举办活动,解答各界对预算会议结果提出的质疑,\x01",
"而媒体界对这项活动的关注度可是相当之高呢。\x02",
)
)
CloseMessageWindow()
#C0193
ChrTalk(
0xFE,
(
"年轻的议员肯定会被压制,\x01",
"只有那些重量级的政治家\x01",
"才能摆出一副架子,侃侃而谈。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_37E4")
label("loc_37A1")
#C0194
ChrTalk(
0xFE,
(
"最近正是议员先生们\x01",
"意气风发的时期啊。\x02",
)
)
CloseMessageWindow()
#C0195
ChrTalk(
0xFE,
"呼,真让人郁闷啊。\x02",
)
CloseMessageWindow()
label("loc_37E4")
Jump("loc_3C94")
label("loc_37E9")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x64, 1)), scpexpr(EXPR_END)), "loc_3861")
#C0196
ChrTalk(
0xFE,
(
"昨天的巴士故障事件,\x01",
"原因好像只是因为平时的维护工作不足。\x02",
)
)
CloseMessageWindow()
#C0197
ChrTalk(
0xFE,
(
"哎呀呀,这种琐碎的事情\x01",
"还真是够多的。\x02",
)
)
CloseMessageWindow()
Jump("loc_3C94")
label("loc_3861")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x61, 2)), scpexpr(EXPR_END)), "loc_38C5")
#C0198
ChrTalk(
0xFE,
(
"刚才,交通科的人\x01",
"急匆匆地跑出去了……\x02",
)
)
CloseMessageWindow()
#C0199
ChrTalk(
0xFE,
(
"交通科的人总是那么忙,\x01",
"真令人同情啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_3C94")
label("loc_38C5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x61, 1)), scpexpr(EXPR_END)), "loc_39C8")
#C0200
ChrTalk(
0xFE,
(
"克洛斯贝尔市近年来\x01",
"为了服务市民,一直都在\x01",
"大力推动导力巴士的发展。\x02",
)
)
CloseMessageWindow()
#C0201
ChrTalk(
0xFE,
(
"话虽如此……但其实谁都明白,\x01",
"这只是议员们为了讨好市民,赢得人气,\x01",
"通过增加投资预算才建造出的产物。\x02",
)
)
CloseMessageWindow()
#C0202
ChrTalk(
0xFE,
(
"算了,不管动机如何,只要能让\x01",
"大家的生活越来越方便,那就是好事。\x02",
)
)
CloseMessageWindow()
Jump("loc_3C94")
label("loc_39C8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x60, 0)), scpexpr(EXPR_END)), "loc_3AF6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3AA3")
#C0203
ChrTalk(
0xFE,
(
"这边的大门通向市政厅的\x01",
"左翼区域。\x02",
)
)
CloseMessageWindow()
#C0204
ChrTalk(
0xFE,
"市长的办公室就在里面。\x02",
)
CloseMessageWindow()
#C0205
ChrTalk(
0xFE,
(
"而另一边的大门……\x01",
"通向市政厅的右翼区域,\x01",
"里面有议长的办公室。\x02",
)
)
CloseMessageWindow()
#C0206
ChrTalk(
0xFE,
(
"这两位全是在自治州内外都拥有\x01",
"相当影响力的大政治家。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_3AF1")
label("loc_3AA3")
#C0207
ChrTalk(
0xFE,
(
"市长和议长\x01",
"现在都不在哦。\x02",
)
)
CloseMessageWindow()
#C0208
ChrTalk(
0xFE,
(
"他们两位都是大政治家,\x01",
"所以平时都很忙的。\x02",
)
)
CloseMessageWindow()
label("loc_3AF1")
Jump("loc_3C94")
label("loc_3AF6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x42, 5)), scpexpr(EXPR_END)), "loc_3BE1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3B93")
#C0209
ChrTalk(
0xFE,
"我在总务二科工作。\x02",
)
CloseMessageWindow()
#C0210
ChrTalk(
0xFE,
(
"议事堂和大会堂的管理工作\x01",
"都由我负责,不过……\x02",
)
)
CloseMessageWindow()
#C0211
ChrTalk(
0xFE,
(
"平时还是很闲啊。\x01",
"需要做的只有扫除之类的事情而已。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_3BDC")
label("loc_3B93")
#C0212
ChrTalk(
0xFE,
(
"不过,议会一旦召开,\x01",
"那就又该有得忙了。\x02",
)
)
CloseMessageWindow()
#C0213
ChrTalk(
0xFE,
"平时倒是一直都很闲啊。\x02",
)
CloseMessageWindow()
label("loc_3BDC")
Jump("loc_3C94")
label("loc_3BE1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x41, 6)), scpexpr(EXPR_END)), "loc_3C94")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3C51")
#C0214
ChrTalk(
0xFE,
"好啦,扫除扫除……\x02",
)
CloseMessageWindow()
#C0215
ChrTalk(
0xFE,
(
"今天就把会场给\x01",
"打扫干净吧。\x02",
)
)
CloseMessageWindow()
#C0216
ChrTalk(
0xFE,
"也没什么其它事情可做。\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_3C94")
label("loc_3C51")
#C0217
ChrTalk(
0xFE,
"哎呀……请问有何贵干?\x02",
)
CloseMessageWindow()
#C0218
ChrTalk(
0xFE,
(
"如果有事,请去找\x01",
"接待处的希恩吧。\x02",
)
)
CloseMessageWindow()
label("loc_3C94")
TalkEnd(0xFE)
Return()
# Function_7_279D end
def Function_8_3C98(): pass
label("Function_8_3C98")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3E75")
OP_4B(0x9, 0xFF)
TurnDirection(0xA, 0x9, 0)
#C0219
ChrTalk(
0x9,
(
"那么,阿奈斯特先生,\x01",
"拜托你了。\x02",
)
)
CloseMessageWindow()
#C0220
ChrTalk(
0xA,
(
"#2600F谢谢,\x01",
"我会善加利用的。\x02\x03",
"#2604F很好,只要有这个,\x01",
"听证会总算就可以……\x02",
)
)
CloseMessageWindow()
OP_63(0xA, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
TurnDirection(0xA, 0x0, 500)
#C0221
ChrTalk(
0xA,
(
"#2600F哈哈,从市民们那里收集来的\x01",
"意见和请愿,已经整理好了。\x02\x03",
"如果在听证会中正式提出,\x01",
"对那些议员们争权夺利的行为,\x01",
"应该也能起到些警示作用吧。\x02\x03",
"#2603F那么,在听证会开始之前,\x01",
"必须要和市长商量一下……\x02",
)
)
CloseMessageWindow()
#C0222
ChrTalk(
0x102,
"#0108F………………………………\x02",
)
CloseMessageWindow()
#C0223
ChrTalk(
0x101,
(
"#0003F(艾莉……果然还是\x01",
" 对政治之道有所留恋吧?)\x02",
)
)
CloseMessageWindow()
OP_4C(0x9, 0xFF)
SetScenarioFlags(0x0, 2)
Jump("loc_3F41")
label("loc_3E75")
#C0224
ChrTalk(
0xA,
(
"#2600F从市民们那里收集来的意见和请愿,\x01",
"已经整理成资料了……\x02\x03",
"如果在听证会中正式提出,\x01",
"对那些议员们争权夺利的行为,\x01",
"应该也能起到些警示作用吧。\x02\x03",
"#2603F那么,在听证会开始之前,\x01",
"必须要和市长商量一下……\x02",
)
)
CloseMessageWindow()
label("loc_3F41")
TalkEnd(0xFE)
Return()
# Function_8_3C98 end
def Function_9_3F45(): pass
label("Function_9_3F45")
Call(0, 10)
Return()
# Function_9_3F45 end
def Function_10_3F49(): pass
label("Function_10_3F49")
OP_52(0x0, 0x5, (scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xB, 0x5, (scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
TalkBegin(0xB)
ClearChrFlags(0xB, 0x10)
TurnDirection(0xB, 0x0, 0)
OP_52(0xB, 0x4, (scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0x168), scpexpr(EXPR_ADD), scpexpr(EXPR_PUSH_LONG, 0x168), scpexpr(EXPR_IMOD), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xB, 0x4, (scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0x168), scpexpr(EXPR_ADD), scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x5), scpexpr(EXPR_SUB), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xB, 0x5, (scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0xB4), scpexpr(EXPR_IDIV), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0x2D), scpexpr(EXPR_LEQ), scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0x13B), scpexpr(EXPR_GE), scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0x195), scpexpr(EXPR_LEQ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_OR), scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0x2A3), scpexpr(EXPR_GE), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_3FDD")
Jump("loc_4027")
label("loc_3FDD")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x5), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_3FFD")
OP_52(0xB, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_4027")
label("loc_3FFD")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x5), scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_401D")
OP_52(0xB, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_4027")
label("loc_401D")
OP_52(0xB, 0x8, (scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x5), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
label("loc_4027")
OP_52(0xB, 0x4, (scpexpr(EXPR_GET_CHR_WORK, 0x0, 0x5), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0x0, 0x5, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xB, 0x5, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrFlags(0xB, 0x10)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xE0, 0)), scpexpr(EXPR_END)), "loc_445F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xED, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_43CC")
SetChrSubChip(0xB, 0x0)
#C0225
ChrTalk(
0xB,
(
"#2501F嗯,马上准备好吧,\x01",
"我来检查那些文件。\x02",
)
)
CloseMessageWindow()
OP_63(0xB, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
OP_52(0x0, 0x5, (scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xB, 0x5, (scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
TalkBegin(0xB)
ClearChrFlags(0xB, 0x10)
TurnDirection(0xB, 0x0, 0)
OP_52(0xB, 0x4, (scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0x168), scpexpr(EXPR_ADD), scpexpr(EXPR_PUSH_LONG, 0x168), scpexpr(EXPR_IMOD), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xB, 0x4, (scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0x168), scpexpr(EXPR_ADD), scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x5), scpexpr(EXPR_SUB), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xB, 0x5, (scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0xB4), scpexpr(EXPR_IDIV), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0x2D), scpexpr(EXPR_LEQ), scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0x13B), scpexpr(EXPR_GE), scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0x195), scpexpr(EXPR_LEQ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_OR), scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x4), scpexpr(EXPR_PUSH_LONG, 0x2A3), scpexpr(EXPR_GE), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_413D")
Jump("loc_4187")
label("loc_413D")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x5), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_415D")
OP_52(0xB, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_4187")
label("loc_415D")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x5), scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_417D")
OP_52(0xB, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_4187")
label("loc_417D")
OP_52(0xB, 0x8, (scpexpr(EXPR_GET_CHR_WORK, 0xB, 0x5), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
label("loc_4187")
OP_52(0xB, 0x4, (scpexpr(EXPR_GET_CHR_WORK, 0x0, 0x5), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0x0, 0x5, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xB, 0x5, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrFlags(0xB, 0x10)
#C0226
ChrTalk(
0xB,
(
"#2500F哦,是你们啊。\x01",
"……有什么事吗?\x02",
)
)
CloseMessageWindow()
#C0227
ChrTalk(
0x101,
(
"#0000F啊,不……只是在调查的过程中\x01",
"正好路过,就顺便过来看看。\x02",
)
)
CloseMessageWindow()
#C0228
ChrTalk(
0x102,
(
"#0100F外公好像\x01",
"相当忙啊……\x02",
)
)
CloseMessageWindow()
#C0229
ChrTalk(
0xB,
(
"#2503F嗯,会期延长了\x01",
"整整三天啊。\x02\x03",
"#2500F各方面的预算安排,\x01",
"都必须要加急处理好。\x02",
)
)
CloseMessageWindow()
#C0230
ChrTalk(
0x102,
"#0101F………………………………\x02",
)
CloseMessageWindow()
#C0231
ChrTalk(
0xB,
(
"#2500F哈哈,你们好像在担心\x01",
"我加班会不会累坏身体吧。\x02\x03",
"#2503F用不着替我担心,这种繁忙的\x01",
"状态大概也只会持续到今天为止。\x02\x03",
"#2500F艾莉,如果你正在工作,\x01",
"就要集中精力处理自己那边的正事。\x02",
)
)
CloseMessageWindow()
#C0232
ChrTalk(
0x102,
"#0103F……是的,我知道了。\x02",
)
CloseMessageWindow()
#C0233
ChrTalk(
0x103,
"#0200F那么,我们就快去医院吧。\x02",
)
CloseMessageWindow()
#C0234
ChrTalk(
0x104,
"#0300F是啊。\x02",
)
CloseMessageWindow()
SetScenarioFlags(0xED, 5)
Jump("loc_445A")
label("loc_43CC")
OP_4B(0x9, 0xFF)
SetChrSubChip(0xB, 0x0)
#C0235
ChrTalk(
0xB,
(
"#2503F库利普,不好意思啊,\x01",
"联络财务科的事就交给你了。\x02\x03",
"#2500F至于哈尔特曼议长那边,\x01",
"就由我把文件送给他。\x02",
)
)
CloseMessageWindow()
#C0236
ChrTalk(
0x9,
"是的,明白了。\x02",
)
CloseMessageWindow()
OP_4C(0x9, 0xFF)
label("loc_445A")
Jump("loc_4F47")
label("loc_445F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC3, 6)), scpexpr(EXPR_END)), "loc_446D")
Jump("loc_4F47")
label("loc_446D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC2, 2)), scpexpr(EXPR_END)), "loc_447B")
Jump("loc_4F47")
label("loc_447B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xC0, 0)), scpexpr(EXPR_END)), "loc_4489")
Jump("loc_4F47")
label("loc_4489")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA7, 6)), scpexpr(EXPR_END)), "loc_4826")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x0, 0x10)"), scpexpr(EXPR_END)), "loc_481E")
#C0237
ChrTalk(
0xB,
(
"#2509F多亏你们给我送来的饮料,\x01",
"让我下午也能干劲十足地继续工作。\x02\x03",
"#2503F(啜饮)……\x01",
"嗯,很美味!\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_4819")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x26, 0x1, 0x3)"), scpexpr(EXPR_END)), "loc_471E")
OP_63(0x101, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4560")
OP_63(0x102, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Jump("loc_45B5")
label("loc_4560")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_458D")
OP_63(0x103, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Jump("loc_45B5")
label("loc_458D")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_45B5")
OP_63(0x104, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
label("loc_45B5")
Sleep(1000)
#C0238
ChrTalk(
0x101,
"#0006F(好像真是很喜欢啊……)\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_463B")
#C0239
ChrTalk(
0x102,
(
"#0103F(真不愧是外公……\x01",
" 在动荡的时代中崛起的\x01",
" 著名政治家啊……)\x02",
)
)
CloseMessageWindow()
Jump("loc_46EA")
label("loc_463B")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_468F")
#C0240
ChrTalk(
0x103,
(
"#0206F(竟然能把味道那么苦的奶昔……\x01",
" 真是值得尊敬呢。)\x02",
)
)
CloseMessageWindow()
Jump("loc_46EA")
label("loc_468F")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_46EA")
#C0241
ChrTalk(
0x104,
(
"#0303F(竟然能把味道那么苦的奶昔……\x01",
" 身为男人,他真是值得尊敬啊!)\x02",
)
)
CloseMessageWindow()
label("loc_46EA")
#C0242
ChrTalk(
0x153,
(
"#1105F(哎~怎么了?\x01",
" 本来就很美味啊~)\x02",
)
)
CloseMessageWindow()
Jump("loc_4816")
label("loc_471E")
OP_63(0x101, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4763")
OP_63(0x102, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Jump("loc_47B8")
label("loc_4763")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4790")
OP_63(0x103, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Jump("loc_47B8")
label("loc_4790")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_47B8")
OP_63(0x104, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
label("loc_47B8")
Sleep(1000)
#C0243
ChrTalk(
0x153,
"#1111F(不错啊,好像很美味呢~)\x02",
)
CloseMessageWindow()
#C0244
ChrTalk(
0x101,
(
"#0006F(当、当时是不是\x01",
" 应该稍微尝一下呢……)\x02",
)
)
CloseMessageWindow()
label("loc_4816")
SetScenarioFlags(0x0, 3)
label("loc_4819")
Jump("loc_4821")
label("loc_481E")
Call(0, 17)
label("loc_4821")
Jump("loc_4F47")
label("loc_4826")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 3)), scpexpr(EXPR_END)), "loc_4834")
Jump("loc_4F47")
label("loc_4834")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xAA, 2)), scpexpr(EXPR_END)), "loc_4842")
Jump("loc_4F47")
label("loc_4842")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA1, 4)), scpexpr(EXPR_END)), "loc_4A7B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_49FA")
#C0245
ChrTalk(
0xB,
(
"#2503F嗯,今天的游行活动,\x01",
"参与人数好像是至今为止最多的一次呢。\x02\x03",
"#2500F在游行开始之前,\x01",
"必须要去和大家打个招呼才行。\x01",
"……但还真是有点紧张啊。\x02",
)
)
CloseMessageWindow()
#C0246
ChrTalk(
0x101,
"#0005F哎……?\x02",
)
CloseMessageWindow()
#C0247
ChrTalk(
0x104,
"#0300F那个……您是在开玩笑吧?\x02",
)
CloseMessageWindow()
#C0248
ChrTalk(
0x102,
(
"#0106F呼……\x01",
"对外公来说,这种程度的小场面,\x01",
"根本没理由会紧张吧?\x02",
)
)
CloseMessageWindow()
#C0249
ChrTalk(
0xB,
(
"#2509F呵呵……\x02\x03",
"#2500F游行结束之后,纪念庆典\x01",
"的主要节目也就算是彻底落幕了。\x02\x03",
"你们的工作大概也能恢复轻松了吧,\x01",
"希望一切都能进展顺利啊。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 3)
Jump("loc_4A76")
label("loc_49FA")
#C0250
ChrTalk(
0xB,
(
"#2500F游行结束之后,纪念庆典\x01",
"的主要节目也就算是彻底落幕了。\x02\x03",
"你们的工作大概也能恢复轻松了吧,\x01",
"希望一切都能进展顺利啊。\x02",
)
)
CloseMessageWindow()
label("loc_4A76")
Jump("loc_4F47")
label("loc_4A7B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 2)), scpexpr(EXPR_END)), "loc_4C3D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_4BB1")
#C0251
ChrTalk(
0xB,
(
"#2500F今天召开的这场研讨会\x01",
"是由我提议的。\x02\x03",
"针对克洛斯贝尔的现状与将来,\x01",
"以完全公开的形式展开讨论。\x02\x03",
"#2503F对政治方面的影响也许是微不足道的,\x01",
"但来自各国的有识之士都齐聚一堂。\x02\x03",
"#2500F在自治州建立七十周年的这个日子里,\x01",
"通过这种活动来仔细审视一下克洛斯贝尔,\x01",
"也算是个不算的契机吧。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 3)
Jump("loc_4C38")
label("loc_4BB1")
#C0252
ChrTalk(
0xB,
(
"#2500F今天召开的这场研讨会\x01",
"是由我提议的。\x02\x03",
"针对克洛斯贝尔的现状与将来,\x01",
"以完全公开的形式展开讨论。\x01",
"一定会是一场很有意义的讨论吧。\x02",
)
)
CloseMessageWindow()
label("loc_4C38")
Jump("loc_4F47")
label("loc_4C3D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 0)), scpexpr(EXPR_END)), "loc_4F47")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_4EAD")
#C0253
ChrTalk(
0xB,
(
"#2500F哦,艾莉。\x01",
"还有支援科的诸位也来了啊。\x02\x03",
"#2503F不好意思啊,在纪念庆典时期,\x01",
"也有一大堆事情等着我去处理。\x02\x03",
"本来还打算和你们一起吃顿饭,\x01",
"结果也一直没有机会。\x02",
)
)
CloseMessageWindow()
#C0254
ChrTalk(
0x101,
(
"#0005F哪、哪里……\x01",
"请您不必在意。\x02",
)
)
CloseMessageWindow()
#C0255
ChrTalk(
0x103,
(
"#0200F比起这些,您的身体\x01",
"已经不要紧了吗?\x02",
)
)
CloseMessageWindow()
#C0256
ChrTalk(
0xB,
(
"#2503F不要紧了,只不过是那种程度的小事而已,\x01",
"怎么可能这么轻易就让我倒下啊。\x02\x03",
"#2501F帝国与共和国之间的纷争还在持续,\x01",
"与三十年前比起来……\x02",
)
)
CloseMessageWindow()
#C0257
ChrTalk(
0x102,
(
"#0103F外公……先别说这些了。\x01",
"那种话题,只要一旦开始说,就会没完没了的。\x02\x03",
"#0100F至于吃饭,\x01",
"以后总会有机会的。\x02\x03",
"您的身体才是最重要的,\x01",
"请一定不要太勉强自己啊。\x02",
)
)
CloseMessageWindow()
#C0258
ChrTalk(
0xB,
(
"#2500F嗯,我知道了。\x01",
"谢谢你,艾莉。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 3)
Jump("loc_4F47")
label("loc_4EAD")
#C0259
ChrTalk(
0xB,
(
"#2500F在纪念庆典期间,\x01",
"也有一大堆事情等着我去办呢。\x02\x03",
"#2503F本来还想找个机会,和你们\x01",
"轻轻松松闲谈一次的,可是……\x02\x03",
"#2500F嗯,还是等以后有机会再说吧。\x02",
)
)
CloseMessageWindow()
label("loc_4F47")
SetChrSubChip(0xB, 0x0)
TalkEnd(0xB)
Return()
# Function_10_3F49 end
def Function_11_4F4F(): pass
label("Function_11_4F4F")
TalkBegin(0xFE)
#C0260
ChrTalk(
0xFE,
"啊,是罗伊德还有各位吗。\x02",
)
CloseMessageWindow()
#C0261
ChrTalk(
0xFE,
(
"在今天的研讨会上,\x01",
"麦克道尔市长和外国的\x01",
"VIP都会出席哦。\x02",
)
)
CloseMessageWindow()
#C0262
ChrTalk(
0xFE,
"那阵容真是相当壮观啊。\x02",
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_11_4F4F end
def Function_12_4FD0(): pass
label("Function_12_4FD0")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 3)), scpexpr(EXPR_END)), "loc_5145")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x22, 0x0, 0x10)"), scpexpr(EXPR_END)), "loc_509D")
#C0263
ChrTalk(
0xFE,
(
"『圣者的祈祷』已经找回来了啊。\x01",
"太好了……\x02",
)
)
CloseMessageWindow()
#C0264
ChrTalk(
0xFE,
(
"不对不对,现在可不是高兴的时候。\x01",
"闭幕式的准备工作\x01",
"都快来不及做了。\x02",
)
)
CloseMessageWindow()
#C0265
ChrTalk(
0xFE,
(
"距离闭幕式的召开还有三个小时……\x01",
"必须要抓紧时间啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_5140")
label("loc_509D")
OP_4B(0xD, 0xFF)
OP_4B(0xE, 0xFF)
#C0266
ChrTalk(
0xD,
(
"哇啊啊……\x01",
"竟然在市长不在的时候……!\x02",
)
)
CloseMessageWindow()
OP_63(0xE, 0x0, 2000, 0x28, 0x2B, 0x64, 0x3)
#C0267
ChrTalk(
0xE,
(
"总、总之,必须要尽快\x01",
"做好闭幕式的准备工作……\x02",
)
)
CloseMessageWindow()
#C0268
ChrTalk(
0xE,
(
"我这就去准备会场中\x01",
"的椅子哦!\x02",
)
)
CloseMessageWindow()
OP_4C(0xD, 0xFF)
OP_4C(0xE, 0xFF)
label("loc_5140")
Jump("loc_523D")
label("loc_5145")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 2)), scpexpr(EXPR_END)), "loc_523D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_51BE")
#C0269
ChrTalk(
0xFE,
(
"准备旁听国际研讨会的各位,\x01",
"还请稍等一下~\x02",
)
)
CloseMessageWindow()
#C0270
ChrTalk(
0xFE,
(
"到了开场时间之后,\x01",
"我会立即引领您入场的~\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 4)
Jump("loc_523D")
label("loc_51BE")
#C0271
ChrTalk(
0xFE,
(
"呼,国际研讨会总算是\x01",
"可以顺利召开了啊。\x02",
)
)
CloseMessageWindow()
#C0272
ChrTalk(
0xFE,
(
"顺便一说,亚里欧斯·马克莱因\x01",
"也来到了会场呢。\x01",
"会场中的警备工作已经没问题了。\x02",
)
)
CloseMessageWindow()
label("loc_523D")
TalkEnd(0xFE)
Return()
# Function_12_4FD0 end
def Function_13_5241(): pass
label("Function_13_5241")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA3, 3)), scpexpr(EXPR_END)), "loc_5255")
Call(0, 12)
Jump("loc_52B1")
label("loc_5255")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 2)), scpexpr(EXPR_END)), "loc_52B1")
OP_93(0xFE, 0x5A, 0x0)
#C0273
ChrTalk(
0xFE,
(
"那个,已经\x01",
"没有时间了……\x02",
)
)
CloseMessageWindow()
#C0274
ChrTalk(
0xFE,
(
"我想,差不多也该请\x01",
"各位出席者入场了……\x02",
)
)
CloseMessageWindow()
label("loc_52B1")
TalkEnd(0xFE)
Return()
# Function_13_5241 end
def Function_14_52B5(): pass
label("Function_14_52B5")
TalkBegin(0xFE)
#C0275
ChrTalk(
0xFE,
(
"今天也有活动召开,\x01",
"我们总务二科也是很忙的。\x02",
)
)
CloseMessageWindow()
#C0276
ChrTalk(
0xFE,
"……虽然平时是很清闲的部门。\x02",
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_14_52B5 end
def Function_15_530F(): pass
label("Function_15_530F")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 6)), scpexpr(EXPR_END)), "loc_539D")
#C0277
ChrTalk(
0xFE,
(
"最近,在总编的命令下,\x01",
"我暂时要给格蕾丝前辈\x01",
"当助手……\x02",
)
)
CloseMessageWindow()
#C0278
ChrTalk(
0xFE,
(
"……前辈经常要去现场采访,\x01",
"每到这种时候就把我丢在一边不管。\x02",
)
)
CloseMessageWindow()
Jump("loc_540B")
label("loc_539D")
#C0279
ChrTalk(
0xFE,
(
"格蕾丝前辈去\x01",
"报道议会的情况了。\x02",
)
)
CloseMessageWindow()
#C0280
ChrTalk(
0xFE,
(
"可是,不要紧吗……\x01",
"她又是在没有许可证的情况下\x01",
"偷偷潜入进去的……\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 6)
label("loc_540B")
TalkEnd(0xFE)
Return()
# Function_15_530F end
def Function_16_540F(): pass
label("Function_16_540F")
TalkBegin(0xFE)
OP_4B(0x8, 0xFF)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 5)), scpexpr(EXPR_END)), "loc_54E9")
#C0281
ChrTalk(
0x11,
(
"#2103F再怎么说,我也是克洛斯贝尔的市民,\x01",
"平时都有依法纳税的啊~\x02\x03",
"#2100F那些人可都是靠我们交的税金发薪过活的,\x01",
"把他们的联络方式告诉我有什么不可以的~!?\x02",
)
)
CloseMessageWindow()
#C0282
ChrTalk(
0x8,
(
"啊~那个,\x01",
"听你这么一说,确实……\x02",
)
)
CloseMessageWindow()
Jump("loc_574D")
label("loc_54E9")
#C0283
ChrTalk(
0x11,
(
"#2104F嗯~很好,针对这一点,\x01",
"继续乘胜追击、不断紧逼,\x01",
"她应该就会告诉我了吧?\x02\x03",
"#2109F议员们的紧急联络方式,\x01",
"你应该都有记录吧?\x02",
)
)
CloseMessageWindow()
#C0284
ChrTalk(
0x8,
(
"事务所之外的联络方式是绝对\x01",
"不能透露的,这可是硬性规定。\x02",
)
)
CloseMessageWindow()
#C0285
ChrTalk(
0x8,
"还请您多加理解啊。\x02",
)
CloseMessageWindow()
#C0286
ChrTalk(
0x11,
(
"#2106F啊啊~!\x01",
"不要这么说嘛~\x02\x03",
"#2100F那么,至少把\x01",
"秘书们的联络方式告诉我啊,\x01",
"这总可以了吧?\x02\x03",
"#2109F那些官方机构的秘书,\x01",
"应该都登记在册的吧?\x02",
)
)
CloseMessageWindow()
#C0287
ChrTalk(
0x8,
"就、就算你这么说,我也……\x02",
)
CloseMessageWindow()
OP_63(0x0, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
OP_63(0x1, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
OP_63(0x2, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
OP_63(0x3, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Sleep(1000)
#C0288
ChrTalk(
0x101,
(
"#0006F(格蕾丝小姐……\x01",
" 可真是喜欢强人所难啊。)\x02",
)
)
CloseMessageWindow()
#C0289
ChrTalk(
0x102,
(
"#0100F(趁她的矛头还没有转向我们,\x01",
" 还是赶快逃跑吧。)\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 5)
label("loc_574D")
OP_4C(0x8, 0xFF)
TalkEnd(0xFE)
Return()
# Function_16_540F end
def Function_17_5755(): pass
label("Function_17_5755")
EventBegin(0x1)
FadeToDark(500, 0, -1)
OP_0D()
EventBegin(0x0)
LoadChrToIndex("chr/ch06500.itc", 0x1E)
LoadChrToIndex("chr/ch27800.itc", 0x1F)
LoadChrToIndex("chr/ch27400.itc", 0x20)
CreatePortrait(0, 234, 0, 490, 256, 0, 0, 256, 256, 0, 0, 256, 256, 0xFFFFFF, 0x1, "bu02700.itp")
OP_68(-45850, 1600, 12420, 0)
MoveCamera(44, 18, 0, 0)
OP_6E(350, 0)
SetCameraDistance(21760, 0)
SetChrPos(0x101, -45750, 0, 10750, 0)
SetChrPos(0x153, -45000, 0, 12250, 0)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5817")
SetChrPos(0x102, -44250, 0, 11500, 0)
Jump("loc_585E")
label("loc_5817")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_583D")
SetChrPos(0x103, -44250, 0, 11500, 0)
Jump("loc_585E")
label("loc_583D")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_585E")
SetChrPos(0x104, -44250, 0, 11500, 0)
label("loc_585E")
SetChrChipByIndex(0x15, 0x1E)
SetChrSubChip(0x15, 0x0)
SetChrPos(0x15, -45000, 120, 3000, 0)
OP_A7(0x15, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
ClearChrFlags(0x15, 0x80)
ClearChrBattleFlags(0x15, 0x8000)
SetChrChipByIndex(0x16, 0x1F)
SetChrChipByIndex(0x17, 0x20)
SetChrSubChip(0x16, 0x0)
SetChrSubChip(0x17, 0x0)
SetChrPos(0x16, -45750, 120, 1750, 0)
SetChrPos(0x17, -44250, 120, 2500, 0)
OP_A7(0x16, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x17, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
ClearChrFlags(0x16, 0x80)
ClearChrBattleFlags(0x16, 0x8000)
ClearChrFlags(0x17, 0x80)
ClearChrBattleFlags(0x17, 0x8000)
SetChrSubChip(0xB, 0x0)
FadeToBright(1000, 0)
OP_0D()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5939")
#C0290
ChrTalk(
0xB,
(
"#5P#2505F哦,艾莉,\x01",
"还有罗伊德也来了啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_59BE")
label("loc_5939")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_597E")
#C0291
ChrTalk(
0xB,
(
"#5P#2505F哦,罗伊德……\x01",
"还有缇欧也来了啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_59BE")
label("loc_597E")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_59BE")
#C0292
ChrTalk(
0xB,
(
"#5P#2505F哦,罗伊德……\x01",
"还有兰迪也来了啊。\x02",
)
)
CloseMessageWindow()
label("loc_59BE")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5A64")
#C0293
ChrTalk(
0x102,
(
"#12P#0103F外公……\x01",
"那个,最近一直都没能和您联络,\x01",
"实在是对不起。\x02",
)
)
CloseMessageWindow()
#C0294
ChrTalk(
0x101,
(
"#6P#0003F之前那件事情,\x01",
"承蒙您的多方关照……\x01",
"真是不知该如何感谢才好。\x02",
)
)
CloseMessageWindow()
Jump("loc_5ACB")
label("loc_5A64")
#C0295
ChrTalk(
0x101,
(
"#6P#0000F市长……好久不见了。\x02\x03",
"#0003F之前那件事情,\x01",
"承蒙您的多方关照……\x01",
"真是不知该如何感谢才好。\x02",
)
)
CloseMessageWindow()
label("loc_5ACB")
#C0296
ChrTalk(
0xB,
(
"#5P#2503F哪里,没什么大不了的。\x02\x03",
"#2500F我只不过是去拜托那些参加了『竞拍会』\x01",
"的议员,让他们对详细情况做个说明而已。\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5B93")
#C0297
ChrTalk(
0x102,
(
"#12P#0100F不,正是因为有您的帮忙,\x01",
"才能够牵制住议长。\x02",
)
)
CloseMessageWindow()
Jump("loc_5C32")
label("loc_5B93")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5BE5")
#C0298
ChrTalk(
0x103,
(
"#12P#0200F不过,正是因为您的帮忙,\x01",
"才能将议长牵制住……\x02",
)
)
CloseMessageWindow()
Jump("loc_5C32")
label("loc_5BE5")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5C32")
#C0299
ChrTalk(
0x104,
(
"#12P#0300F哈,但正是因为有您帮忙,\x01",
"才能够牵制住议长啊。\x02",
)
)
CloseMessageWindow()
label("loc_5C32")
#C0300
ChrTalk(
0x101,
(
"#6P#0000F总而言之……\x01",
"实在是太感谢您了。\x02",
)
)
CloseMessageWindow()
#C0301
ChrTalk(
0xB,
(
"#5P#2509F哈哈,如果连这点小事也值得如此在意,\x01",
"那么蒙你们相救的我,\x01",
"可该如何感谢才好啊。\x02\x03",
"#2503F──嗯,先不说这些,\x01",
"那位小姑娘莫非就是……\x02",
)
)
CloseMessageWindow()
#C0302
ChrTalk(
0x101,
(
"#6P#0000F啊,是的,\x01",
"她的名字叫琪雅。\x02",
)
)
CloseMessageWindow()
#C0303
ChrTalk(
0x153,
(
"#6P#1110F初次见面~\x02\x03",
"#1109F老爷爷,你的胡子全都白了,\x01",
"真是好帅气啊~!\x02",
)
)
CloseMessageWindow()
#C0304
ChrTalk(
0xB,
(
"#5P#2509F帅气……\x01",
"哈哈,真是个有趣的小姑娘啊。\x02\x03",
"#2500F嗯,性格阳光又开朗,\x01",
"而且好像有种不可思议的魅力……\x02\x03",
"#2501F……听说她丧失了记忆,\x01",
"到现在还没有查明身份来历吗?\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5E47")
#C0305
ChrTalk(
0x102,
"#12P#0103F嗯,现在还没有……\x02",
)
CloseMessageWindow()
Jump("loc_5EAC")
label("loc_5E47")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5E7E")
#C0306
ChrTalk(
0x103,
"#12P#0203F是的,目前还没有。\x02",
)
CloseMessageWindow()
Jump("loc_5EAC")
label("loc_5E7E")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5EAC")
#C0307
ChrTalk(
0x104,
"#12P#0306F嗯,暂时还没。\x02",
)
CloseMessageWindow()
label("loc_5EAC")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA8, 0)), scpexpr(EXPR_END)), "loc_5EFC")
#C0308
ChrTalk(
0x101,
(
"#6P#0001F不过,我们已经委托游击士协会\x01",
"帮忙调查她的身世了……\x02",
)
)
CloseMessageWindow()
Jump("loc_5F41")
label("loc_5EFC")
#C0309
ChrTalk(
0x101,
(
"#6P#0001F不过,我们正准备\x01",
"去委托游击士协会\x01",
"帮忙调查她的身世……\x02",
)
)
CloseMessageWindow()
label("loc_5F41")
#C0310
ChrTalk(
0xB,
(
"#5P#2500F嗯,交给他们确实是最合适的。\x01",
"现在也只能尽量\x01",
"使用一切能够想到的手段了。\x02\x03",
"#2503F──不管怎么说,\x01",
"以后如果遇到什么困难,\x01",
"随时都可以来找我商量。\x02\x03",
"虽然我只是个行事受限,\x01",
"没什么用的市长……\x02\x03",
"#2500F不过,我至少还没有窝囊到那种程度,\x01",
"会对这种连小小幼女都企图\x01",
"加害的愚蠢恶徒视而不见。\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6090")
#C0311
ChrTalk(
0x102,
"#12P#0102F外公……\x02",
)
CloseMessageWindow()
Jump("loc_60F3")
label("loc_6090")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_60C1")
#C0312
ChrTalk(
0x103,
"#12P#0202F市长先生……\x02",
)
CloseMessageWindow()
Jump("loc_60F3")
label("loc_60C1")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_60F3")
#C0313
ChrTalk(
0x104,
"#12P#0302F噢噢,市长先生……\x02",
)
CloseMessageWindow()
label("loc_60F3")
#C0314
ChrTalk(
0x101,
(
"#6P#0004F……光凭您的这一番话,\x01",
"就能给我们带来莫大勇气呢。\x02",
)
)
CloseMessageWindow()
#C0315
ChrTalk(
0x153,
"#6P#1109F嘿嘿嘿,老爷爷果然很帅!\x02",
)
CloseMessageWindow()
Sound(811, 0, 100, 0)
Sleep(500)
#N0316
NpcTalk(
0x15,
"男人的声音",
(
"──麦克道尔市长,\x01",
"可以稍微打扰一下吗?\x02",
)
)
CloseMessageWindow()
OP_63(0xB, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x153, 0x0, 1700, 0x26, 0x26, 0xFA, 0x1)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_61FE")
OP_63(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Jump("loc_6247")
label("loc_61FE")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6225")
OP_63(0x103, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Jump("loc_6247")
label("loc_6225")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6247")
OP_63(0x104, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
label("loc_6247")
Sleep(1000)
#C0317
ChrTalk(
0xB,
"#5P#2501F嗯……\x02",
)
CloseMessageWindow()
Sound(103, 0, 100, 0)
OP_68(-45000, 1500, 9000, 3000)
def lambda_627B():
OP_93(0x101, 0xB4, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 1, lambda_627B)
Sleep(50)
def lambda_628B():
OP_93(0x153, 0xB4, 0x1F4)
ExitThread()
QueueWorkItem(0x153, 1, lambda_628B)
Sleep(50)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_62B8")
def lambda_62AB():
OP_93(0x102, 0xB4, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 1, lambda_62AB)
Jump("loc_62F7")
label("loc_62B8")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_62DA")
def lambda_62CD():
OP_93(0x103, 0xB4, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 1, lambda_62CD)
Jump("loc_62F7")
label("loc_62DA")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_62F7")
def lambda_62EF():
OP_93(0x104, 0xB4, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 1, lambda_62EF)
label("loc_62F7")
def lambda_62FC():
OP_97(0x15, 0x0, 0x0, 0xFA0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x15, 1, lambda_62FC)
def lambda_6316():
OP_A7(0x15, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
ExitThread()
QueueWorkItem(0x15, 2, lambda_6316)
Sleep(50)
def lambda_632A():
OP_97(0x16, 0x0, 0x0, 0xFA0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x16, 1, lambda_632A)
def lambda_6344():
OP_A7(0x16, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
ExitThread()
QueueWorkItem(0x16, 2, lambda_6344)
Sleep(50)
def lambda_6358():
OP_97(0x17, 0x0, 0x0, 0xFA0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x17, 1, lambda_6358)
def lambda_6372():
OP_A7(0x17, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
ExitThread()
QueueWorkItem(0x17, 2, lambda_6372)
WaitChrThread(0x101, 1)
OP_63(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_63CB")
WaitChrThread(0x102, 1)
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Jump("loc_6428")
label("loc_63CB")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_63FC")
WaitChrThread(0x103, 1)
OP_63(0x103, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Jump("loc_6428")
label("loc_63FC")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6428")
WaitChrThread(0x104, 1)
OP_63(0x104, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
label("loc_6428")
Sleep(1000)
WaitChrThread(0x15, 1)
WaitChrThread(0x16, 1)
WaitChrThread(0x17, 1)
OP_6F(0x79)
#C0318
ChrTalk(
0x101,
"#6P#0005F(什么……!)\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6491")
#C0319
ChrTalk(
0x102,
"#11P#0105F(哈尔特曼议长……!)\x02",
)
CloseMessageWindow()
Jump("loc_64FC")
label("loc_6491")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_64C6")
#C0320
ChrTalk(
0x103,
"#11P#0205F(帝国派的……)\x02",
)
CloseMessageWindow()
Jump("loc_64FC")
label("loc_64C6")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_64FC")
#C0321
ChrTalk(
0x104,
"#11P#0301F(帝国派的首领吗……)\x02",
)
CloseMessageWindow()
label("loc_64FC")
OP_68(-45850, 1600, 12420, 2000)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_653C")
def lambda_6522():
OP_97(0x102, 0xBB8, 0x0, 0x0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_6522)
Jump("loc_6595")
label("loc_653C")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_656B")
def lambda_6551():
OP_97(0x103, 0xBB8, 0x0, 0x0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x103, 1, lambda_6551)
Jump("loc_6595")
label("loc_656B")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6595")
def lambda_6580():
OP_97(0x104, 0xBB8, 0x0, 0x0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x104, 1, lambda_6580)
label("loc_6595")
Sleep(50)
def lambda_659D():
OP_97(0x101, 0xBB8, 0x0, 0x0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_659D)
Sleep(50)
def lambda_65BA():
OP_97(0x153, 0xBB8, 0x0, 0x0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x153, 1, lambda_65BA)
def lambda_65D4():
OP_97(0x15, 0x0, 0x0, 0xFA0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x15, 1, lambda_65D4)
Sleep(50)
def lambda_65F1():
OP_97(0x16, 0x0, 0x0, 0xFA0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x16, 1, lambda_65F1)
Sleep(50)
def lambda_660E():
OP_97(0x17, 0x0, 0x0, 0xFA0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x17, 1, lambda_660E)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6649")
WaitChrThread(0x102, 1)
def lambda_663C():
OP_93(0x102, 0x10E, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 1, lambda_663C)
Jump("loc_6690")
label("loc_6649")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_666F")
WaitChrThread(0x103, 1)
def lambda_6662():
OP_93(0x103, 0x10E, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 1, lambda_6662)
Jump("loc_6690")
label("loc_666F")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6690")
WaitChrThread(0x104, 1)
def lambda_6688():
OP_93(0x104, 0x10E, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 1, lambda_6688)
label("loc_6690")
WaitChrThread(0x101, 1)
def lambda_6699():
OP_93(0x101, 0x10E, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 1, lambda_6699)
WaitChrThread(0x153, 1)
def lambda_66AA():
OP_93(0x153, 0x10E, 0x1F4)
ExitThread()
QueueWorkItem(0x153, 1, lambda_66AA)
WaitChrThread(0x101, 1)
WaitChrThread(0x153, 1)
#C0322
ChrTalk(
0xB,
(
"#5P#2500F哈尔特曼议长,有何贵干呢?\x02\x03",
"如您所见,我正在接待客人。\x02",
)
)
CloseMessageWindow()
OP_C9(0x0, 0x3, 0xFFFFFFFF, 0x1F4, 0x0)
OP_C9(0x0, 0x0, 0xFFFFD8F0, 0x0, 0x1F4)
OP_CA(0x0, 0x0, 0x3)
OP_CA(0x0, 0x0, 0x0)
SetMessageWindowPos(14, 280, 35, 3)
#A0323
AnonymousTalk(
0x15,
(
"这可真是失礼了──不过,\x01",
"我也稍微有点急事要找您。\x02\x03",
"关于帝国政府于前日提出的建议,\x01",
"希望能与您再次商讨一下。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
OP_C9(0x0, 0x3, 0xFFFFFF, 0x1F4, 0x0)
OP_C9(0x0, 0x0, 0x0, 0x0, 0x1F4)
OP_CA(0x0, 0x0, 0x3)
OP_CA(0x0, 0x0, 0x0)
OP_6F(0x79)
#C0324
ChrTalk(
0xB,
"#5P#2503F可是,那个……\x02",
)
CloseMessageWindow()
#C0325
ChrTalk(
0x15,
(
"#12P#2703F我明白市长您的意思。\x01",
"不过,希望您也能考虑一下我的立场。\x02\x03",
"#2702F还是说……\x01",
"您想借此机会,同坎贝尔议员\x01",
"他们联盟呢?\x02",
)
)
CloseMessageWindow()
#C0326
ChrTalk(
0xB,
(
"#5P#2503F……我并没打算\x01",
"与共和国派的人联成一线。\x02\x03",
"#2501F当然,关于此点,对你们帝国派也是一样的。\x02",
)
)
CloseMessageWindow()
#C0327
ChrTalk(
0x15,
(
"#12P#2702F既然如此,就请您把那种\x01",
"平衡调控的能力充分发挥出来吧。\x02\x03",
"#2703F因为我那小小的聚会\x01",
"被搞得不欢而散,\x01",
"而给整个自治州带来了负面影响……\x02\x03",
"#2700F就算是为了把这笔账结清,您也该……\x02",
)
)
CloseMessageWindow()
#C0328
ChrTalk(
0xB,
(
"#5P#2503F……看来,接下来确实有必要\x01",
"与您再谈一次啊……\x02",
)
)
CloseMessageWindow()
SetChrSubChip(0xB, 0x1)
#C0329
ChrTalk(
0xB,
(
"#5P#2500F……诸位,\x01",
"你们难得前来拜访,却遇到这种事,\x01",
"实在是不好意思……\x02",
)
)
CloseMessageWindow()
def lambda_6A23():
TurnDirection(0x101, 0xB, 500)
ExitThread()
QueueWorkItem(0x101, 1, lambda_6A23)
Sleep(50)
def lambda_6A33():
TurnDirection(0x153, 0xB, 500)
ExitThread()
QueueWorkItem(0x153, 1, lambda_6A33)
Sleep(50)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6A5A")
TurnDirection(0x102, 0xB, 500)
Jump("loc_6A8D")
label("loc_6A5A")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6A76")
TurnDirection(0x103, 0xB, 500)
Jump("loc_6A8D")
label("loc_6A76")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6A8D")
TurnDirection(0x104, 0xB, 500)
label("loc_6A8D")
#C0330
ChrTalk(
0x101,
(
"#11P#0005F哪、哪里,\x01",
"请您不要在意。\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6AF3")
#C0331
ChrTalk(
0x102,
"#11P#0103F那我们这就告辞了,外公。\x02",
)
CloseMessageWindow()
Jump("loc_6B56")
label("loc_6AF3")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6B24")
#C0332
ChrTalk(
0x103,
"#11P#0203F……告辞了。\x02",
)
CloseMessageWindow()
Jump("loc_6B56")
label("loc_6B24")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6B56")
#C0333
ChrTalk(
0x104,
"#11P#0300F那我们就先失陪啦。\x02",
)
CloseMessageWindow()
label("loc_6B56")
#C0334
ChrTalk(
0x153,
(
"#5P#1109F再见啦~!\x01",
"大胡子的老爷爷!\x02",
)
)
CloseMessageWindow()
FadeToDark(1000, 0, -1)
OP_0D()
SetChrFlags(0xB, 0x80)
SetChrBattleFlags(0xB, 0x8000)
SetChrFlags(0x15, 0x80)
SetChrBattleFlags(0x15, 0x8000)
SetChrFlags(0x16, 0x80)
SetChrBattleFlags(0x16, 0x8000)
SetChrFlags(0x17, 0x80)
SetChrBattleFlags(0x17, 0x8000)
OP_49()
OP_CA(0x1, 0xFF, 0x0)
OP_D5(0x1E)
OP_D5(0x1F)
OP_D5(0x20)
OP_68(-7040, 5500, 17890, 0)
MoveCamera(0, 24, 0, 0)
OP_6E(440, 0)
SetCameraDistance(19810, 0)
SetChrPos(0x101, -7590, 4000, 19290, 135)
SetChrPos(0x153, -7450, 4000, 17650, 0)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6C35")
SetChrPos(0x102, -6220, 4000, 18950, 270)
CloseMessageWindow()
Jump("loc_6C7E")
label("loc_6C35")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6C5C")
SetChrPos(0x103, -6220, 4000, 18950, 270)
CloseMessageWindow()
Jump("loc_6C7E")
label("loc_6C5C")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6C7E")
SetChrPos(0x104, -6220, 4000, 18950, 270)
CloseMessageWindow()
label("loc_6C7E")
FadeToBright(1000, 0)
OP_0D()
#C0335
ChrTalk(
0x101,
"#5P#0001F市长……可真是不容易呢。\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6D65")
#C0336
ChrTalk(
0x102,
(
"#11P#0101F……嗯……\x02\x03",
"类似的场面,我以前\x01",
"也曾见过不知多少次了……\x02\x03",
"#0108F不过,自己明明就是\x01",
"举办违法竞拍会的的幕后首脑,\x01",
"竟然还能如此理直气壮地提出要求……\x02",
)
)
CloseMessageWindow()
Jump("loc_6E59")
label("loc_6D65")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6DD8")
#C0337
ChrTalk(
0x103,
(
"#11P#0203F那位议长……\x01",
"还真是恬不知耻呢。\x02\x03",
"#0200F那场违法竞拍会\x01",
"明明就是他自己举办的……\x02",
)
)
CloseMessageWindow()
Jump("loc_6E59")
label("loc_6DD8")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6E59")
#C0338
ChrTalk(
0x104,
(
"#11P#0306F话说回来,那个议长\x01",
"可真是不知羞耻啊。\x02\x03",
"#0301F明明举办了那种\x01",
"违法的竞拍会,态度竟然还能\x01",
"如此坦然。\x02",
)
)
CloseMessageWindow()
label("loc_6E59")
#C0339
ChrTalk(
0x101,
(
"#5P#0001F像这种程度的事情,\x01",
"他大概完全有自信能控制好,\x01",
"不使其成为丑闻吧……\x02\x03",
"#0006F从某种意义上来说,他远比\x01",
"那些黑手党还要坏得多啊……\x02",
)
)
CloseMessageWindow()
#C0340
ChrTalk(
0x153,
(
"#6P#1110F喂喂,罗伊德~\x02\x03",
"那个老爷爷\x01",
"不只是帅,\x01",
"而且也很强吧~?\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
def lambda_6F41():
TurnDirection(0x101, 0x153, 500)
ExitThread()
QueueWorkItem(0x101, 1, lambda_6F41)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6F77")
OP_63(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
TurnDirection(0x102, 0x153, 500)
Jump("loc_6FCE")
label("loc_6F77")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6FA5")
OP_63(0x103, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
TurnDirection(0x103, 0x153, 500)
Jump("loc_6FCE")
label("loc_6FA5")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6FCE")
OP_63(0x104, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
TurnDirection(0x104, 0x153, 500)
label("loc_6FCE")
Sleep(500)
WaitChrThread(0x101, 1)
#C0341
ChrTalk(
0x101,
(
"#5P#0005F很强……\x01",
"你为什么会这么想呢?\x02",
)
)
CloseMessageWindow()
#C0342
ChrTalk(
0x153,
(
"#6P#1111F因为,那个看上去好像很嚣张的大叔,\x01",
"不是和同伴们一起来的吗?\x02\x03",
"#1110F他肯定是因为知道自己\x01",
"一个人不是老爷爷的对手,\x01",
"所以才带着别人一起来的吧~?\x02",
)
)
CloseMessageWindow()
#C0343
ChrTalk(
0x101,
"#5P#0005F啊……\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7139")
#C0344
ChrTalk(
0x102,
(
"#11P#0105F虽然议长带去的人\x01",
"都是帝国派的重要议员……\x02\x03",
"#0104F呵呵,不过,你这种看问题\x01",
"的角度确实也有些道理呢。\x02",
)
)
CloseMessageWindow()
Jump("loc_71F8")
label("loc_7139")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7189")
#C0345
ChrTalk(
0x103,
(
"#11P#0205F原来如此……\x01",
"这种看问题的角度也颇有道理呢。\x02",
)
)
CloseMessageWindow()
Jump("loc_71F8")
label("loc_7189")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_71F8")
#C0346
ChrTalk(
0x104,
(
"#11P#0304F哈哈,原来如此啊。\x02\x03",
"#0300F没错,确实可以这么说,\x01",
"他根本就没有和市长单挑的自信。\x02",
)
)
CloseMessageWindow()
label("loc_71F8")
#C0347
ChrTalk(
0x101,
(
"#5P#0003F嗯~是啊,像我们这种后辈,\x01",
"为市长担心根本就是不自量力吧。\x02\x03",
"(话虽如此,不过,\x01",
" 要是能为他做些什么就好了……)\x02",
)
)
CloseMessageWindow()
OP_66(0x2, 0x1)
ClearMapObjFlags(0x1, 0x10)
ModifyEventFlags(1, 0, 0x80)
SetChrPos(0x0, -7590, 4000, 19290, 135)
SetScenarioFlags(0xBE, 6)
EventEnd(0x5)
Return()
# Function_17_5755 end
def Function_18_729B(): pass
label("Function_18_729B")
EventBegin(0x0)
Fade(500)
OP_4B(0x8, 0xFF)
SetChrSubChip(0x8, 0x0)
Jc((scpexpr(EXPR_GET_CHR_WORK, 0x0, 0x1), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_7320")
OP_68(-3140, 1500, 3700, 0)
MoveCamera(0, 16, 0, 0)
OP_6E(440, 0)
SetCameraDistance(19330, 0)
SetChrPos(0x101, -4500, 130, 2900, 90)
SetChrPos(0x153, -5250, 130, 3650, 90)
SetChrPos(0xEF, -6000, 130, 2150, 90)
Jump("loc_7381")
label("loc_7320")
OP_68(3140, 1500, 3700, 0)
MoveCamera(0, 16, 0, 0)
OP_6E(440, 0)
SetCameraDistance(19330, 0)
SetChrPos(0x101, 4500, 130, 2900, 270)
SetChrPos(0x153, 5250, 130, 3650, 270)
SetChrPos(0xEF, 6000, 130, 2150, 270)
label("loc_7381")
TurnDirection(0x8, 0x101, 0)
OP_0D()
#C0348
ChrTalk(
0x8,
"啊,各位……\x02",
)
CloseMessageWindow()
#C0349
ChrTalk(
0x8,
(
"那个,市长室的情况\x01",
"怎么样了……?\x02",
)
)
CloseMessageWindow()
def lambda_73C7():
TurnDirection(0x101, 0x8, 500)
ExitThread()
QueueWorkItem(0x101, 1, lambda_73C7)
Sleep(50)
def lambda_73D7():
TurnDirection(0x153, 0x8, 500)
ExitThread()
QueueWorkItem(0x153, 1, lambda_73D7)
Sleep(50)
TurnDirection(0xEF, 0x8, 500)
WaitChrThread(0x101, 1)
WaitChrThread(0x153, 1)
#C0350
ChrTalk(
0x101,
"#0005F哎……\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_GET_CHR_WORK, 0x0, 0x1), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_7482")
OP_68(-1290, 1500, 4900, 2500)
SetChrFlags(0xEF, 0x40)
def lambda_742E():
OP_95(0xFE, -2360, 0, 4980, 2000, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_742E)
Sleep(50)
def lambda_744B():
OP_95(0xFE, -3140, 0, 5670, 2000, 0x0)
ExitThread()
QueueWorkItem(0x153, 1, lambda_744B)
Sleep(50)
def lambda_7468():
OP_95(0xFE, -3710, 0, 4340, 2000, 0x0)
ExitThread()
QueueWorkItem(0xEF, 1, lambda_7468)
Jump("loc_74EC")
label("loc_7482")
OP_68(1290, 1500, 4900, 2500)
SetChrFlags(0xEF, 0x40)
def lambda_749D():
OP_95(0xFE, 2360, 0, 4980, 2000, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_749D)
Sleep(50)
def lambda_74BA():
OP_95(0xFE, 3140, 0, 5670, 2000, 0x0)
ExitThread()
QueueWorkItem(0x153, 1, lambda_74BA)
Sleep(50)
def lambda_74D7():
OP_95(0xFE, 3710, 0, 4340, 2000, 0x0)
ExitThread()
QueueWorkItem(0xEF, 1, lambda_74D7)
label("loc_74EC")
WaitChrThread(0x101, 1)
WaitChrThread(0x153, 1)
WaitChrThread(0xEF, 1)
ClearChrFlags(0xEF, 0x40)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_752A")
#C0351
ChrTalk(
0x102,
"#0105F您是说……?\x02",
)
CloseMessageWindow()
Jump("loc_7581")
label("loc_752A")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_755B")
#C0352
ChrTalk(
0x103,
"#0205F您的意思是……?\x02",
)
CloseMessageWindow()
Jump("loc_7581")
label("loc_755B")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7581")
#C0353
ChrTalk(
0x104,
"#0305F你是指…?\x02",
)
CloseMessageWindow()
label("loc_7581")
#C0354
ChrTalk(
0x8,
(
"那个,哈尔特曼议长\x01",
"刚才带着几个跟班议员,\x01",
"气势汹汹地进去了……\x02",
)
)
CloseMessageWindow()
#C0355
ChrTalk(
0x8,
(
"虽然对这种事早就司空见惯了,\x01",
"但还是有些担心呢……\x02",
)
)
CloseMessageWindow()
#C0356
ChrTalk(
0x101,
"#0000F啊,是说那件事啊。\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7672")
#C0357
ChrTalk(
0x102,
(
"#0100F虽然稍微有点担心……\x01",
"不过,我想市长应该是不会有问题的。\x02",
)
)
CloseMessageWindow()
Jump("loc_7717")
label("loc_7672")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_76CA")
#C0358
ChrTalk(
0x103,
(
"#0200F虽然稍微有些担心……\x01",
"不过,市长先生的话,应该没问题的。\x02",
)
)
CloseMessageWindow()
Jump("loc_7717")
label("loc_76CA")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7717")
#C0359
ChrTalk(
0x104,
(
"#0300F虽然稍微有点担心,\x01",
"不过,市长先生他肯定应付得了。\x02",
)
)
CloseMessageWindow()
label("loc_7717")
#C0360
ChrTalk(
0x8,
"是吗……\x02",
)
CloseMessageWindow()
#C0361
ChrTalk(
0x8,
(
"……在这种时候,\x01",
"我们市政厅的职员总是无能为力啊。\x02",
)
)
CloseMessageWindow()
#C0362
ChrTalk(
0x8,
(
"至少也应该去给\x01",
"百忙之中的市长\x01",
"送些慰问品才对……\x02",
)
)
CloseMessageWindow()
#C0363
ChrTalk(
0x101,
(
"#0006F……是啊,\x01",
"我也是这么想的……\x02",
)
)
CloseMessageWindow()
OP_63(0x153, 0x0, 1700, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
#C0364
ChrTalk(
0x153,
(
"#1105F要给老爷爷\x01",
"送东西吗~?\x02\x03",
"#1110F那就给老爷爷\x01",
"送些他喜欢吃的东西,\x01",
"好不好~?\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x8, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0xEF, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
TurnDirection(0x101, 0x153, 500)
Sleep(500)
#C0365
ChrTalk(
0x101,
"#0005F有、有道理。\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_78CD")
#C0366
ChrTalk(
0x102,
(
"#0100F确实,要送慰问品的话,\x01",
"这应该是个很好的选择。\x02",
)
)
CloseMessageWindow()
Jump("loc_7964")
label("loc_78CD")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_791B")
#C0367
ChrTalk(
0x103,
(
"#0203F确实,如果要送慰问品的话,\x01",
"这个选择应该不错。\x02",
)
)
CloseMessageWindow()
Jump("loc_7964")
label("loc_791B")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7964")
#C0368
ChrTalk(
0x104,
(
"#0304F确实,送慰问品的话,\x01",
"这应该是个不错的主意呢。\x02",
)
)
CloseMessageWindow()
label("loc_7964")
TurnDirection(0x101, 0x8, 500)
#C0369
ChrTalk(
0x8,
"呵呵,是啊。\x02",
)
CloseMessageWindow()
#C0370
ChrTalk(
0x8,
"……那么,各位。\x02",
)
CloseMessageWindow()
#C0371
ChrTalk(
0x8,
(
"如果方便的话,能不能帮忙\x01",
"去买些市长喜欢的饮料呢?\x02",
)
)
CloseMessageWindow()
#C0372
ChrTalk(
0x8,
(
"在喷泉广场的果汁店\x01",
"应该就可以买到了,\x01",
"就在这附近……\x02",
)
)
CloseMessageWindow()
#C0373
ChrTalk(
0x101,
(
"#0000F明白了,\x01",
"这种小事当然没问题。\x02\x03",
"那个,您知道市长喜欢的是\x01",
"哪种饮料吗?\x02",
)
)
CloseMessageWindow()
#C0374
ChrTalk(
0x8,
(
"具体名称我也不太清楚,\x01",
"不过好像是平时不会公开销售\x01",
"的特别饮料呢。\x02",
)
)
CloseMessageWindow()
#C0375
ChrTalk(
0x8,
(
"只要去问问店主,\x01",
"应该就能知道了吧。\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7B0E")
#C0376
ChrTalk(
0x102,
(
"#0100F呵呵,那我们就\x01",
"快点去喷泉广场\x01",
"的果汁店吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_7B97")
label("loc_7B0E")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7B56")
#C0377
ChrTalk(
0x103,
(
"#0200F那么,我们就快些去\x01",
"喷泉广场的果汁店吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_7B97")
label("loc_7B56")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7B97")
#C0378
ChrTalk(
0x104,
(
"#0300F那好,我们赶快去\x01",
"喷泉广场的果汁店吧。\x02",
)
)
CloseMessageWindow()
label("loc_7B97")
#C0379
ChrTalk(
0x153,
"#1109F出发啦~!\x02",
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
Sound(80, 0, 100, 0)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
#A0380
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"任务【给市长的慰问品】\x07\x00",
"开始!\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(14, 280, 60, 3)
Jc((scpexpr(EXPR_GET_CHR_WORK, 0x0, 0x1), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_7C28")
SetChrPos(0x0, -2360, 0, 4980, 45)
Jump("loc_7C39")
label("loc_7C28")
SetChrPos(0x0, 2360, 0, 4980, 315)
label("loc_7C39")
OP_93(0x8, 0xB4, 0x0)
OP_4C(0x8, 0xFF)
ModifyEventFlags(0, 0, 0x80)
OP_29(0x26, 0x4, 0x2)
EventEnd(0x5)
Return()
# Function_18_729B end
def Function_19_7C51(): pass
label("Function_19_7C51")
EventBegin(0x1)
FadeToDark(500, 0, -1)
OP_0D()
EventBegin(0x0)
LoadChrToIndex("chr/ch06500.itc", 0x1E)
LoadChrToIndex("chr/ch27800.itc", 0x1F)
LoadChrToIndex("chr/ch27400.itc", 0x20)
OP_4B(0x9, 0xFF)
SetChrPos(0x9, 8119, 4000, 14200, 315)
Jc((scpexpr(EXPR_GET_CHR_WORK, 0x0, 0x1), scpexpr(EXPR_PUSH_LONG, 0x1900), scpexpr(EXPR_NEG), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_7D5A")
OP_68(-7680, 5500, 15150, 0)
MoveCamera(0, 23, 0, 0)
OP_6E(500, 0)
SetCameraDistance(17000, 0)
SetChrPos(0x101, -9250, 4000, 14500, 45)
SetChrPos(0x153, -10350, 4000, 14450, 45)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7D0E")
SetChrPos(0x102, -9600, 4000, 13150, 45)
Jump("loc_7D55")
label("loc_7D0E")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7D34")
SetChrPos(0x103, -9600, 4000, 13150, 45)
Jump("loc_7D55")
label("loc_7D34")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7D55")
SetChrPos(0x104, -9600, 4000, 13150, 45)
label("loc_7D55")
Jump("loc_7E17")
label("loc_7D5A")
OP_68(-4800, 5500, 16000, 0)
MoveCamera(0, 23, 0, 0)
OP_6E(500, 0)
SetCameraDistance(17000, 0)
SetChrPos(0x101, -1500, 4000, 14500, 270)
SetChrPos(0x153, -750, 4000, 15250, 270)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7DD0")
SetChrPos(0x102, -500, 4000, 13750, 270)
Jump("loc_7E17")
label("loc_7DD0")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7DF6")
SetChrPos(0x103, -500, 4000, 13750, 270)
Jump("loc_7E17")
label("loc_7DF6")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7E17")
SetChrPos(0x104, -500, 4000, 13750, 270)
label("loc_7E17")
SetChrChipByIndex(0x15, 0x1E)
SetChrSubChip(0x15, 0x0)
SetChrPos(0x15, -8400, 4000, 20200, 135)
OP_A7(0x15, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
ClearChrFlags(0x15, 0x80)
ClearChrBattleFlags(0x15, 0x8000)
SetChrChipByIndex(0x16, 0x1F)
SetChrChipByIndex(0x17, 0x20)
SetChrSubChip(0x16, 0x0)
SetChrSubChip(0x17, 0x0)
SetChrPos(0x16, -8800, 4000, 21500, 135)
SetChrPos(0x17, -9750, 4000, 21050, 135)
OP_A7(0x16, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x17, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
ClearChrFlags(0x16, 0x80)
ClearChrBattleFlags(0x16, 0x8000)
ClearChrFlags(0x17, 0x80)
ClearChrBattleFlags(0x17, 0x8000)
FadeToBright(1000, 0)
OP_0D()
OP_0D()
#N0381
NpcTalk(
0x15,
"声音",
"#2P──哼,告辞了。\x02",
)
CloseMessageWindow()
ClearMapObjFlags(0x1, 0x10)
OP_71(0x1, 0x0, 0xA, 0x0, 0x0)
Sound(103, 0, 100, 0)
OP_79(0x1)
def lambda_7EEA():
OP_97(0x15, 0xDAC, 0x0, 0xFFFFF254, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x15, 1, lambda_7EEA)
def lambda_7F04():
OP_A7(0x15, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x15, 2, lambda_7F04)
Sleep(50)
def lambda_7F18():
OP_97(0x16, 0xDAC, 0x0, 0xFFFFF254, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x16, 1, lambda_7F18)
def lambda_7F32():
OP_A7(0x16, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x16, 2, lambda_7F32)
Sleep(50)
def lambda_7F46():
OP_97(0x17, 0xDAC, 0x0, 0xFFFFF254, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x17, 1, lambda_7F46)
def lambda_7F60():
OP_A7(0x17, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x17, 2, lambda_7F60)
WaitChrThread(0x15, 1)
WaitChrThread(0x16, 1)
WaitChrThread(0x17, 1)
ClearMapObjFlags(0x1, 0x10)
OP_71(0x1, 0xA, 0x0, 0x0, 0x0)
Sound(104, 0, 100, 0)
OP_79(0x1)
SetMapObjFlags(0x1, 0x10)
#C0382
ChrTalk(
0x16,
(
"#11P真是的!\x01",
"怎么会有如此顽固的老人!\x02",
)
)
CloseMessageWindow()
#C0383
ChrTalk(
0x17,
(
"#5P竟然回绝了议长阁下\x01",
"难得提出的建议……!\x02",
)
)
CloseMessageWindow()
#C0384
ChrTalk(
0x15,
(
"#11P#2702F哼,也罢,\x01",
"至少也算是给了他一个警告。\x02\x03",
"#2703F……坎贝尔他们那边,\x01",
"就像往常一样处理就可以了……\x02\x03",
"#2701F问题还是鲁巴彻那边,\x01",
"真是一群没用的东西。\x02",
)
)
CloseMessageWindow()
#C0385
ChrTalk(
0x16,
"#11P说、说得也是呢!\x02",
)
CloseMessageWindow()
#C0386
ChrTalk(
0x17,
(
"#5P全靠议长的支持,他们才能风光下去,\x01",
"看来有必要再次提醒那些家伙一下,\x01",
"免得他们忘掉这个事实……\x02",
)
)
CloseMessageWindow()
#C0387
ChrTalk(
0x15,
(
"#11P#2703F嗯,暂时就先\x01",
"不和马尔克尼会面了。\x02\x03",
"#2702F还有,已经有很久没开会了,\x01",
"今天晚上让大家集合一下吧。\x02\x03",
"给我通知帝国派的所有人。\x02",
)
)
CloseMessageWindow()
#C0388
ChrTalk(
0x16,
"#11P明白了!\x02",
)
CloseMessageWindow()
#C0389
ChrTalk(
0x17,
"#5P这就向他们发出紧急召集令!\x02",
)
CloseMessageWindow()
OP_68(6200, 5500, 17600, 6000)
BeginChrThread(0x15, 3, 0, 20)
BeginChrThread(0x16, 3, 0, 21)
BeginChrThread(0x17, 3, 0, 22)
Jc((scpexpr(EXPR_GET_CHR_WORK, 0x0, 0x1), scpexpr(EXPR_PUSH_LONG, 0x1900), scpexpr(EXPR_NEG), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_8238")
Sleep(1000)
def lambda_820B():
OP_93(0xFE, 0x5A, 0x12C)
ExitThread()
QueueWorkItem(0x101, 1, lambda_820B)
Sleep(50)
def lambda_821B():
OP_93(0xFE, 0x5A, 0x12C)
ExitThread()
QueueWorkItem(0xEF, 1, lambda_821B)
Sleep(50)
def lambda_822B():
OP_93(0xFE, 0x5A, 0x12C)
ExitThread()
QueueWorkItem(0x153, 1, lambda_822B)
Jump("loc_8268")
label("loc_8238")
Sleep(2500)
def lambda_8240():
OP_93(0xFE, 0x2D, 0x12C)
ExitThread()
QueueWorkItem(0x101, 1, lambda_8240)
Sleep(50)
def lambda_8250():
OP_93(0xFE, 0x2D, 0x12C)
ExitThread()
QueueWorkItem(0xEF, 1, lambda_8250)
Sleep(50)
def lambda_8260():
OP_93(0xFE, 0x2D, 0x12C)
ExitThread()
QueueWorkItem(0x153, 1, lambda_8260)
label("loc_8268")
WaitChrThread(0x15, 3)
WaitChrThread(0x17, 3)
WaitChrThread(0x16, 3)
OP_71(0x2, 0xA, 0x0, 0x0, 0x0)
Sound(104, 0, 100, 0)
Sleep(1000)
Jc((scpexpr(EXPR_GET_CHR_WORK, 0x0, 0x1), scpexpr(EXPR_PUSH_LONG, 0x1900), scpexpr(EXPR_NEG), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_82D3")
Fade(500)
OP_68(-8100, 5500, 13450, 0)
MoveCamera(0, 23, 0, 0)
OP_6E(500, 0)
SetCameraDistance(17000, 0)
OP_0D()
Jump("loc_8307")
label("loc_82D3")
Fade(500)
OP_68(-820, 4500, 15700, 0)
MoveCamera(11, 27, 0, 0)
OP_6E(500, 0)
SetCameraDistance(17000, 0)
OP_0D()
label("loc_8307")
#C0390
ChrTalk(
0x101,
(
"#5P#0003F呼……\x01",
"事情好像已经谈完了啊。\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_8379")
#C0391
ChrTalk(
0x102,
(
"#12P#0100F就趁现在,把慰问品\x01",
"给外公送去吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_8406")
label("loc_8379")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_83C1")
#C0392
ChrTalk(
0x103,
(
"#12P#0200F就趁现在,把慰问品\x01",
"送给市长先生吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_8406")
label("loc_83C1")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_8406")
#C0393
ChrTalk(
0x104,
(
"#12P#0300F就趁现在,把慰问品\x01",
"给市长先生送去吧。\x02",
)
)
CloseMessageWindow()
label("loc_8406")
FadeToDark(1000, 0, -1)
OP_0D()
SetChrFlags(0x15, 0x80)
SetChrBattleFlags(0x15, 0x8000)
SetChrFlags(0x16, 0x80)
SetChrBattleFlags(0x16, 0x8000)
SetChrFlags(0x17, 0x80)
SetChrBattleFlags(0x17, 0x8000)
OP_49()
OP_D5(0x1E)
OP_D5(0x1F)
OP_D5(0x20)
OP_68(-6390, 5500, 17440, 0)
MoveCamera(0, 25, 0, 0)
OP_6E(440, 0)
SetCameraDistance(24500, 0)
SetChrPos(0x0, -6390, 4000, 17440, 315)
SetChrPos(0x1, -6390, 4000, 17440, 315)
SetChrPos(0x153, -6390, 4000, 17440, 315)
OP_4C(0x9, 0xFF)
ModifyEventFlags(0, 1, 0x80)
OP_1B(0x1, 0x0, 0x17)
SetScenarioFlags(0xBE, 7)
FadeToBright(1000, 0)
OP_0D()
EventEnd(0x5)
Return()
# Function_19_7C51 end
def Function_20_84B5(): pass
label("Function_20_84B5")
def lambda_84BA():
OP_97(0xFE, 0x2710, 0x0, 0x0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_84BA)
WaitChrThread(0xFE, 1)
def lambda_84D8():
OP_93(0xFE, 0x2D, 0x12C)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_84D8)
Sleep(1000)
def lambda_84E8():
OP_97(0xFE, 0xFA0, 0x0, 0xFA0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_84E8)
Sleep(2000)
def lambda_8505():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0xFE, 2, lambda_8505)
WaitChrThread(0xFE, 1)
Return()
# Function_20_84B5 end
def Function_21_8516(): pass
label("Function_21_8516")
def lambda_851B():
OP_97(0xFE, 0x2AF8, 0x0, 0x0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_851B)
WaitChrThread(0xFE, 1)
def lambda_8539():
OP_95(0xFE, 6720, 4000, 19320, 2000, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_8539)
WaitChrThread(0xFE, 1)
OP_71(0x2, 0x0, 0xA, 0x0, 0x0)
Sound(103, 0, 100, 0)
OP_79(0x2)
def lambda_856C():
OP_93(0xFE, 0x87, 0x12C)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_856C)
WaitChrThread(0xFE, 1)
Sleep(2000)
def lambda_8580():
OP_95(0xFE, 7210, 4000, 18830, 2000, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_8580)
WaitChrThread(0xFE, 1)
def lambda_859E():
OP_97(0xFE, 0xBB8, 0x0, 0xBB8, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_859E)
Sleep(1000)
def lambda_85BB():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0xFE, 2, lambda_85BB)
WaitChrThread(0xFE, 1)
Return()
# Function_21_8516 end
def Function_22_85CC(): pass
label("Function_22_85CC")
def lambda_85D1():
OP_97(0xFE, 0x2904, 0x0, 0xFFFFFE0C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_85D1)
WaitChrThread(0xFE, 1)
def lambda_85EF():
OP_93(0xFE, 0x2D, 0x12C)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_85EF)
WaitChrThread(0xFE, 1)
Sleep(1000)
def lambda_8603():
OP_97(0xFE, 0x3E8, 0x0, 0x0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_8603)
WaitChrThread(0xFE, 1)
def lambda_8621():
OP_97(0xFE, 0xFA0, 0x0, 0xFA0, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_8621)
Sleep(2000)
def lambda_863E():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0xFE, 2, lambda_863E)
WaitChrThread(0xFE, 1)
Return()
# Function_22_85CC end
def Function_23_864F(): pass
label("Function_23_864F")
EventBegin(0x0)
FadeToDark(0, 0, -1)
LoadChrToIndex("chr/ch05800.itc", 0x1E)
OP_68(-45000, 1500, 11000, 0)
MoveCamera(45, 25, 0, 0)
OP_6E(350, 0)
SetCameraDistance(25500, 0)
SetChrPos(0x101, -45750, 0, 1500, 0)
SetChrPos(0x153, -45000, 0, 2750, 0)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_86D7")
SetChrPos(0x102, -44250, 0, 2000, 0)
Jump("loc_871E")
label("loc_86D7")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_86FD")
SetChrPos(0x103, -44250, 0, 2000, 0)
Jump("loc_871E")
label("loc_86FD")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_871E")
SetChrPos(0x104, -44250, 0, 2000, 0)
label("loc_871E")
SetChrChipByIndex(0xB, 0x1E)
SetChrSubChip(0xB, 0x0)
SetChrPos(0xB, -45000, 0, 11500, 0)
FadeToBright(1000, 0)
OP_0D()
#C0394
ChrTalk(
0x101,
(
"#0000F对不起,\x01",
"我们又来打扰了。\x02",
)
)
CloseMessageWindow()
OP_63(0xB, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_93(0xB, 0xB4, 0x1F4)
Sleep(500)
#C0395
ChrTalk(
0xB,
(
"#5P#2500F哦,是你们啊。\x01",
"刚才真是不好意思。\x02",
)
)
CloseMessageWindow()
def lambda_87B7():
OP_98(0x153, 0x0, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x153, 1, lambda_87B7)
Sleep(50)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_87FE")
def lambda_87E4():
OP_98(0x102, 0x0, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_87E4)
Jump("loc_8857")
label("loc_87FE")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_882D")
def lambda_8813():
OP_98(0x103, 0x0, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x103, 1, lambda_8813)
Jump("loc_8857")
label("loc_882D")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_8857")
def lambda_8842():
OP_98(0x104, 0x0, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x104, 1, lambda_8842)
label("loc_8857")
Sleep(50)
def lambda_885F():
OP_98(0x101, 0x0, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_885F)
OP_68(-45000, 1100, 10500, 3000)
OP_6F(0x79)
WaitChrThread(0x153, 1)
WaitChrThread(0x101, 1)
#C0396
ChrTalk(
0x101,
(
"#12P#0001F哪里……\x01",
"刚才事态好像很棘手呢,辛苦您了。\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_8908")
#C0397
ChrTalk(
0x102,
(
"#11P#0108F外公……\x01",
"那个,您不要紧吧?\x02",
)
)
CloseMessageWindow()
Jump("loc_89A3")
label("loc_8908")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_8962")
#C0398
ChrTalk(
0x103,
(
"#11P#0200F那些人的态度好像\x01",
"很不友好,提出的要求也很让您为难……\x02",
)
)
CloseMessageWindow()
Jump("loc_89A3")
label("loc_8962")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_89A3")
#C0399
ChrTalk(
0x104,
(
"#11P#0301F您没事吧?\x01",
"那些人好像来势不善啊。\x02",
)
)
CloseMessageWindow()
label("loc_89A3")
#C0400
ChrTalk(
0xB,
(
"#5P#2509F哦,那伙人还是很好打发的。\x01",
"那种程度的威胁,\x01",
"对我来说不过是家常便饭而已。\x02\x03",
"#2500F话说回来,你们还有什么事吗?\x01",
"是不是落下什么东西了?\x02",
)
)
CloseMessageWindow()
#C0401
ChrTalk(
0x101,
(
"#12P#0005F啊,没有……\x02\x03",
"#0002F……去吧,琪雅。\x02",
)
)
CloseMessageWindow()
#C0402
ChrTalk(
0x153,
"#6P#1100F嗯。\x02",
)
CloseMessageWindow()
OP_98(0x153, 0x0, 0x0, 0x3E8, 0x3E8, 0x0)
Sleep(500)
SetChrName("")
#A0403
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"琪雅把特制苦西红柿奶昔\x01",
"交给了麦克道尔市长。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SubItemNumber('ZWEI2企鹅', 1)
OP_98(0x153, 0x0, 0x0, 0xFFFFFC18, 0x3E8, 0x0)
#C0404
ChrTalk(
0xB,
(
"#5P#2505F这是……我喜欢喝的饮料啊。\x02\x03",
"难道说……\x01",
"你们是特意为我买来的吗?\x02",
)
)
CloseMessageWindow()
#C0405
ChrTalk(
0x153,
(
"#6P#1110F这是大家送给老爷爷的慰问品~\x02\x03",
"#1109F因为老爷爷看上去一直都\x01",
"非常努力呢~\x02",
)
)
CloseMessageWindow()
#C0406
ChrTalk(
0xB,
"#5P#2509F哦……\x02",
)
CloseMessageWindow()
#C0407
ChrTalk(
0x101,
(
"#12P#0003F那个,是市政厅的接待员\x01",
"委托我们给您送慰问品的……\x02\x03",
"#0000F而且,果汁店的店长\x01",
"也免费将它赠送给我们了。\x02",
)
)
CloseMessageWindow()
#C0408
ChrTalk(
0xB,
(
"#5P#2503F是吗……\x01",
"确实,这可真是大家\x01",
"送给我的慰劳品啊。\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_8D0F")
#C0409
ChrTalk(
0xB,
(
"#5P#2500F谢谢你们,罗伊德、艾莉,\x01",
"还有琪雅。\x02\x03",
"#2509F托你们的福,我已经精神百倍了,\x01",
"下午也可以干劲十足地工作了。\x02",
)
)
CloseMessageWindow()
#C0410
ChrTalk(
0x102,
(
"#11P#0102F呵呵,不过还是请您不要\x01",
"太勉强自己啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_8E88")
label("loc_8D0F")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_8DC8")
#C0411
ChrTalk(
0xB,
(
"#5P#2500F谢谢你们,罗伊德、缇欧,\x01",
"还有琪雅。\x02\x03",
"#2509F托你们的福,我已经精神百倍了,\x01",
"下午也可以干劲十足地工作了。\x02",
)
)
CloseMessageWindow()
#C0412
ChrTalk(
0x103,
(
"#11P#0200F但最好还是不要\x01",
"太勉强自己啊……\x02",
)
)
CloseMessageWindow()
Jump("loc_8E88")
label("loc_8DC8")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_8E88")
#C0413
ChrTalk(
0xB,
(
"#5P#2500F谢谢你们,罗伊德、兰迪,\x01",
"还有琪雅。\x02\x03",
"#2509F托你们的福,我已经精神百倍了,\x01",
"下午也可以干劲十足地工作了。\x02",
)
)
CloseMessageWindow()
#C0414
ChrTalk(
0x104,
(
"#11P#0300F哈哈,那就好啊,\x01",
"不过您还是别太勉强自己呀。\x02",
)
)
CloseMessageWindow()
label("loc_8E88")
#C0415
ChrTalk(
0xB,
(
"#5P#2503F嗯,对了。\x02\x03",
"既然收到了让我如此\x01",
"开心的慰劳品……\x02",
)
)
CloseMessageWindow()
OP_97(0xB, 0x157C, 0x0, 0x0, 0x7D0, 0x0)
Sleep(500)
SetChrName("")
#A0416
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"麦克道尔市长从下面的柜子中\x01",
"取出了一个小纸包。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
OP_63(0x101, 0x0, 2000, 0x0, 0x1, 0xFA, 0x2)
Sound(29, 0, 100, 0)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_8F5E")
OP_63(0x102, 0x0, 2000, 0x0, 0x1, 0xFA, 0x2)
Sound(29, 0, 100, 0)
Jump("loc_8FB3")
label("loc_8F5E")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_8F8B")
OP_63(0x103, 0x0, 2000, 0x0, 0x1, 0xFA, 0x2)
Sound(29, 0, 100, 0)
Jump("loc_8FB3")
label("loc_8F8B")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_8FB3")
OP_63(0x104, 0x0, 2000, 0x0, 0x1, 0xFA, 0x2)
Sound(29, 0, 100, 0)
label("loc_8FB3")
OP_97(0xB, 0xFFFFEA84, 0x0, 0x0, 0x7D0, 0x0)
OP_93(0xB, 0xB4, 0x0)
#C0417
ChrTalk(
0xB,
(
"#5P#2500F这是共和国的朋友以前\x01",
"送给我的贵重东方药品。\x02\x03",
"希望你们能收下。\x02",
)
)
CloseMessageWindow()
OP_98(0xB, 0x0, 0x0, 0xFFFFFC18, 0x3E8, 0x0)
Sleep(500)
OP_98(0xB, 0x0, 0x0, 0x3E8, 0x3E8, 0x0)
FadeToDark(300, 0, 100)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
Sound(17, 0, 100, 0)
#A0418
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_ITEM, '还魂粉'),
scpstr(SCPSTR_CODE_COLOR, 0x0),
"收下了。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(14, 280, 60, 3)
OP_5A()
AddItemNumber('还魂粉', 1)
#C0419
ChrTalk(
0x101,
(
"#12P#0005F这、这个……\x01",
"我们不能收这么贵重的东西啊。\x02",
)
)
CloseMessageWindow()
#C0420
ChrTalk(
0xB,
(
"#5P#2503F不必客气啊,这东西留在我这里也没有用。\x02\x03",
"而你们这些年轻警察经常会\x01",
"遭遇危险,正需要它。\x01",
"我只是希望能物尽其用而已。\x02\x03",
"#2500F总之,请你们收下吧。\x02",
)
)
CloseMessageWindow()
#C0421
ChrTalk(
0x101,
"#12P#0000F……明白了。\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_91BF")
#C0422
ChrTalk(
0x102,
(
"#11P#0102F呵呵……\x01",
"那我们就不再客气了。\x02",
)
)
CloseMessageWindow()
Jump("loc_923A")
label("loc_91BF")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x2)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_91FA")
#C0423
ChrTalk(
0x103,
"#11P#0202F那么,我们就不客气了。\x02",
)
CloseMessageWindow()
Jump("loc_923A")
label("loc_91FA")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x3)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_923A")
#C0424
ChrTalk(
0x104,
"#11P#0309F既然您都这么说了,那就不客气啦!\x02",
)
CloseMessageWindow()
label("loc_923A")
#C0425
ChrTalk(
0x153,
"#6P#1109F老爷爷~谢谢啦~!\x02",
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
Sound(9, 0, 100, 0)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
#A0426
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"任务【给市长的慰问品】\x07\x00",
"完成!\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(14, 280, 60, 3)
FadeToDark(1000, 0, -1)
OP_0D()
SetChrChipByIndex(0xB, 0x4)
OP_49()
OP_D5(0x1E)
OP_68(-45000, 1500, 12000, 0)
MoveCamera(45, 25, 0, 0)
OP_6E(350, 0)
SetCameraDistance(25500, 0)
SetChrPos(0x0, -45000, 0, 12000, 0)
SetChrPos(0x1, -45000, 0, 12000, 0)
SetChrPos(0x153, -45000, 0, 12000, 0)
SetChrPos(0xB, -45000, 250, 14700, 180)
OP_1B(0x1, 0xFF, 0xFFFF)
OP_29(0x26, 0x4, 0x10)
OP_29(0x26, 0x1, 0x6)
FadeToBright(500, 0)
OP_0D()
EventEnd(0x5)
Return()
# Function_23_864F end
def Function_24_9351(): pass
label("Function_24_9351")
EventBegin(0x0)
OP_4B(0x8, 0xFF)
Fade(1000)
OP_68(0, 1100, 6000, 0)
MoveCamera(35, 18, 0, 0)
OP_6E(400, 0)
SetCameraDistance(24000, 0)
SetChrPos(0x101, -600, 0, 4700, 0)
SetChrPos(0x102, 600, 0, 4700, 0)
SetChrPos(0x103, -700, 0, 3600, 0)
SetChrPos(0x104, 700, 0, 3600, 0)
OP_0D()
#C0427
ChrTalk(
0x8,
(
"#5P欢迎。\x01",
"欢迎来到克洛斯贝尔市政厅。\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x3, 0x1, 0x0)"), scpexpr(EXPR_END)), "loc_9465")
OP_63(0x8, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
#C0428
ChrTalk(
0x8,
(
"#5P啊……\x01",
"各位好像是警察吧。\x02",
)
)
CloseMessageWindow()
#C0429
ChrTalk(
0x8,
"#5P是要与哪位工作人员会面吗?\x02",
)
CloseMessageWindow()
Jump("loc_9524")
label("loc_9465")
#C0430
ChrTalk(
0x8,
"#5P来此有何贵干呢?\x02",
)
CloseMessageWindow()
#C0431
ChrTalk(
0x101,
(
"#12P#0000F是的,我们是克洛斯贝尔\x01",
"警察局的人……\x02",
)
)
CloseMessageWindow()
Sound(804, 0, 100, 0)
SetChrName("")
#A0432
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"罗伊德将调查手册出示给对方。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
#C0433
ChrTalk(
0x8,
"#5P啊,是这样啊。\x02",
)
CloseMessageWindow()
#C0434
ChrTalk(
0x8,
"#5P那么,是要与哪位工作人员会面吗?\x02",
)
CloseMessageWindow()
label("loc_9524")
#C0435
ChrTalk(
0x102,
"#0103F不,其实是……\x02",
)
CloseMessageWindow()
SetChrName("")
#A0436
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"提出了为进行调查工作,\x01",
"希望能借用一下地下空间B区域的钥匙。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
#C0437
ChrTalk(
0x8,
(
"#5P地下空间B区域……\x01",
"应该是城市西北部的那个区域吧。\x02",
)
)
CloseMessageWindow()
#C0438
ChrTalk(
0x8,
"#5P请稍等一下。\x02",
)
CloseMessageWindow()
FadeToDark(1000, 0, -1)
OP_93(0x8, 0x0, 0x1F4)
def lambda_95ED():
OP_95(0xFE, 0, 0, 9400, 1500, 0x0)
ExitThread()
QueueWorkItem(0x8, 1, lambda_95ED)
OP_0D()
WaitChrThread(0x8, 1)
Sleep(500)
OP_93(0x8, 0x0, 0x0)
def lambda_9616():
OP_95(0xFE, 0, 0, 7400, 1500, 0x0)
ExitThread()
QueueWorkItem(0x8, 1, lambda_9616)
FadeToBright(1000, 0)
OP_0D()
WaitChrThread(0x8, 1)
#C0439
ChrTalk(
0x8,
"#5P就是这把。\x02",
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
Sound(17, 0, 100, 0)
#A0440
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_ITEM, '地下空间B区域的钥匙'),
scpstr(SCPSTR_CODE_COLOR, 0x0),
"得到了。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(14, 280, 60, 3)
OP_5A()
AddItemNumber('地下空间B区域的钥匙', 1)
#C0441
ChrTalk(
0x101,
"#12P#0002F多谢,那我们就暂时借用一下了。\x02",
)
CloseMessageWindow()
#C0442
ChrTalk(
0x103,
(
"#12P#0200F不过,竟然这么爽快\x01",
"就借给我们了啊……?\x02",
)
)
CloseMessageWindow()
#C0443
ChrTalk(
0x8,
(
"#5P嗯,因为上面吩咐我们要尽量\x01",
"协助克洛斯贝尔警察局的工作。\x02",
)
)
CloseMessageWindow()
#C0444
ChrTalk(
0x8,
(
"#5P顺便一说,我们也将同样的钥匙\x01",
"借给游击士协会了。\x02",
)
)
CloseMessageWindow()
#C0445
ChrTalk(
0x101,
"#12P#0012F是、是这样啊……\x02",
)
CloseMessageWindow()
#C0446
ChrTalk(
0x8,
(
"#5P地下空间B区域的入口\x01",
"就在住宅街的水道附近。\x02",
)
)
CloseMessageWindow()
#C0447
ChrTalk(
0x8,
(
"#5P以前就曾有人把钥匙弄丢过,\x01",
"所以还请各位多加小心。\x02",
)
)
CloseMessageWindow()
#C0448
ChrTalk(
0x102,
"#0100F嗯,明白了。\x02",
)
CloseMessageWindow()
#C0449
ChrTalk(
0x104,
"#12P#0309F这位姐姐,多谢啦。\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetChrPos(0x0, 0, 0, 4000, 180)
OP_4C(0x8, 0xFF)
SetScenarioFlags(0x83, 2)
OP_29(0x43, 0x1, 0x5)
EventEnd(0x5)
Return()
# Function_24_9351 end
def Function_25_985D(): pass
label("Function_25_985D")
EventBegin(0x0)
FadeToDark(0, 0, -1)
OP_4B(0x12, 0xFF)
OP_4B(0x8, 0xFF)
OP_68(0, 900, 6560, 0)
MoveCamera(54, 25, 0, 0)
OP_6E(440, 0)
SetCameraDistance(21000, 0)
SetChrPos(0x101, -650, 0, -3500, 0)
SetChrPos(0x102, 650, 0, -3500, 0)
SetChrPos(0x103, -900, 0, -4800, 0)
SetChrPos(0x104, 900, 0, -4800, 0)
FadeToBright(2000, 0)
OP_0D()
#C0450
ChrTalk(
0x8,
(
"#5P最便宜的出租公寓……\x01",
"……是吗?\x02",
)
)
CloseMessageWindow()
#C0451
ChrTalk(
0x8,
(
"#5P我们这里有些资料,\x01",
"应该可以作为您的参考……\x02",
)
)
CloseMessageWindow()
#C0452
ChrTalk(
0x12,
(
"#12P#1805F啊,原来还有这种租房指南呀!\x02\x03",
"#1809F太好了……\x01",
"………这本手册,\x01",
"我可不可以借去看看呢?\x02",
)
)
CloseMessageWindow()
#C0453
ChrTalk(
0x8,
(
"#5P嗯,这是可以随意领取的,\x01",
"直接拿回去看也没问题。\x02",
)
)
CloseMessageWindow()
#C0454
ChrTalk(
0x8,
"#5P不过,那个,太便宜的地方其实有些……\x02",
)
CloseMessageWindow()
#C0455
ChrTalk(
0x12,
(
"#12P#1806F呼,必须要早点找到住所,\x01",
"集中精神来练习啊……\x02\x03",
"#1802F#3S……啊,不好意思。\x01",
"非常感谢你为我介绍。\x02",
)
)
CloseMessageWindow()
OP_93(0x12, 0xB4, 0x1F4)
Sleep(300)
OP_68(0, 1500, -1030, 3000)
def lambda_9AA3():
OP_98(0x101, 0x0, 0x0, 0x7D0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_9AA3)
Sleep(50)
def lambda_9AC0():
OP_95(0xFE, 0, 0, 1140, 2000, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_9AC0)
def lambda_9ADA():
OP_98(0x102, 0x0, 0x0, 0x7D0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_9ADA)
Sleep(50)
def lambda_9AF7():
OP_98(0x103, 0x0, 0x0, 0x7D0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x103, 1, lambda_9AF7)
Sleep(50)
def lambda_9B14():
OP_98(0x104, 0x0, 0x0, 0x7D0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x104, 1, lambda_9B14)
Sleep(350)
WaitChrThread(0x101, 1)
WaitChrThread(0x102, 1)
WaitChrThread(0x103, 1)
WaitChrThread(0x104, 1)
OP_63(0x12, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
#C0456
ChrTalk(
0x12,
"#5P#1805F啊,对不起!\x02",
)
CloseMessageWindow()
#C0457
ChrTalk(
0x101,
"#12P#0005F哪里,应该是我们道歉才对……\x02",
)
CloseMessageWindow()
def lambda_9B9D():
OP_98(0x101, 0xFFFFFE0C, 0x0, 0x0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_9B9D)
Sleep(50)
def lambda_9BBA():
OP_98(0x102, 0x1F4, 0x0, 0x0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_9BBA)
Sleep(25)
def lambda_9BD7():
OP_98(0x103, 0xFFFFFE0C, 0x0, 0x0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x103, 1, lambda_9BD7)
Sleep(25)
def lambda_9BF4():
OP_98(0x104, 0x1F4, 0x0, 0x0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x104, 1, lambda_9BF4)
WaitChrThread(0x101, 1)
OP_93(0x101, 0x2D, 0x0)
WaitChrThread(0x102, 1)
OP_93(0x102, 0x13B, 0x0)
WaitChrThread(0x103, 1)
OP_93(0x103, 0x2D, 0x0)
WaitChrThread(0x104, 1)
OP_93(0x104, 0x13B, 0x0)
#C0458
ChrTalk(
0x12,
"#5P#1804F………………(低头行礼)\x02",
)
CloseMessageWindow()
def lambda_9C61():
OP_97(0xFE, 0x0, 0x0, 0xFFFFDF94, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x12, 1, lambda_9C61)
Sleep(500)
def lambda_9C7E():
label("loc_9C7E")
TurnDirection(0x101, 0x12, 500)
Yield()
Jump("loc_9C7E")
QueueWorkItem2(0x101, 2, lambda_9C7E)
def lambda_9C90():
label("loc_9C90")
TurnDirection(0x102, 0x12, 500)
Yield()
Jump("loc_9C90")
QueueWorkItem2(0x102, 2, lambda_9C90)
def lambda_9CA2():
label("loc_9CA2")
TurnDirection(0x103, 0x12, 500)
Yield()
Jump("loc_9CA2")
QueueWorkItem2(0x103, 2, lambda_9CA2)
def lambda_9CB4():
label("loc_9CB4")
TurnDirection(0x104, 0x12, 500)
Yield()
Jump("loc_9CB4")
QueueWorkItem2(0x104, 2, lambda_9CB4)
Sleep(1200)
def lambda_9CC9():
OP_A7(0x12, 0xFF, 0xFF, 0xFF, 0x0, 0x3E8)
ExitThread()
QueueWorkItem(0x12, 2, lambda_9CC9)
WaitChrThread(0x12, 1)
WaitChrThread(0x12, 2)
EndChrThread(0x101, 0x2)
EndChrThread(0x102, 0x2)
EndChrThread(0x103, 0x2)
EndChrThread(0x104, 0x2)
SetChrFlags(0x12, 0x80)
SetChrBattleFlags(0x12, 0x8000)
Sleep(300)
#C0459
ChrTalk(
0x101,
"#5P#0000F那个女孩,好像是刚才见过的那个……\x02",
)
CloseMessageWindow()
#C0460
ChrTalk(
0x102,
(
"#5P#0102F呵呵,好像总能\x01",
"和她巧遇啊。\x02",
)
)
CloseMessageWindow()
OP_5A()
SetChrPos(0x0, 0, 0, -1720, 0)
OP_4C(0x12, 0xFF)
OP_4C(0x8, 0xFF)
SetScenarioFlags(0x53, 0)
EventEnd(0x5)
Return()
# Function_25_985D end
def Function_26_9D75(): pass
label("Function_26_9D75")
EventBegin(0x0)
FadeToDark(1000, 0, -1)
OP_0D()
OP_68(0, 1200, 6000, 0)
MoveCamera(35, 18, 0, 0)
OP_6E(400, 0)
SetCameraDistance(24500, 0)
SetChrPos(0x101, -600, 0, 4700, 0)
SetChrPos(0x102, 600, 0, 4700, 0)
SetChrPos(0x103, -700, 0, 3600, 0)
SetChrPos(0x104, 700, 0, 3600, 0)
FadeToBright(1000, 0)
OP_0D()
#C0461
ChrTalk(
0x8,
(
"#5P这里是克洛斯贝尔\x01",
"市政厅的接待处。\x02",
)
)
CloseMessageWindow()
#C0462
ChrTalk(
0x8,
(
"#5P如果想支付各种费用,\x01",
"或是申请迁居,\x01",
"就请来这里办理手续吧。\x02",
)
)
CloseMessageWindow()
#C0463
ChrTalk(
0x101,
(
"#12P#0000F那个,不好意思。\x01",
"我们是克洛斯贝尔警察局的人。\x02\x03",
"前来处理市政厅\x01",
"发来的支援请求……\x02",
)
)
CloseMessageWindow()
OP_63(0x8, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(500)
#C0464
ChrTalk(
0x8,
"#5P啊,原来各位是警察局的人啊!\x02",
)
CloseMessageWindow()
#C0465
ChrTalk(
0x8,
(
"#5P太好了,来得这么早,\x01",
"真是出乎意料呢。\x02",
)
)
CloseMessageWindow()
#C0466
ChrTalk(
0x102,
(
"#11P#0100F呵呵,可以请您尽快\x01",
"将委托的内容交代给我们吗?\x02\x03",
"好像是关于确认\x01",
"无人住所之类的吧……\x02",
)
)
CloseMessageWindow()
#C0467
ChrTalk(
0x8,
"#5P嗯,我立刻进行说明。\x02",
)
CloseMessageWindow()
#C0468
ChrTalk(
0x8,
(
"#5P……各位应该知道\x01",
"什么是『住户登记』吧。\x02",
)
)
CloseMessageWindow()
#C0469
ChrTalk(
0x8,
(
"#5P市民在搬到克洛斯贝尔定居\x01",
"的时候,都要来这里\x01",
"进行住户登记申请。\x02",
)
)
CloseMessageWindow()
#C0470
ChrTalk(
0x8,
(
"#5P可是……实际上,也有很多人在没有\x01",
"经过申请的情况下就直接入住或离开。\x02",
)
)
CloseMessageWindow()
#C0471
ChrTalk(
0x8,
(
"#5P我们市政厅也无法\x01",
"完全掌握到具体情况。\x02",
)
)
CloseMessageWindow()
#C0472
ChrTalk(
0x103,
"#12P#0200F是吗……\x02",
)
CloseMessageWindow()
#C0473
ChrTalk(
0x8,
(
"#5P所以,希望各位能\x01",
"帮忙确认一下空房。\x02",
)
)
CloseMessageWindow()
#C0474
ChrTalk(
0x8,
(
"#5P特别是,在登记为空房的住所中,\x01",
"可能也会存在一些不准确的记录。\x02",
)
)
CloseMessageWindow()
#C0475
ChrTalk(
0x8,
(
"#5P居民科的人好像也\x01",
"没空来处理……\x01",
"所以希望各位能帮忙进行确认。\x02",
)
)
CloseMessageWindow()
#C0476
ChrTalk(
0x104,
(
"#0300F总而言之,也就是让我们\x01",
"替政府机关的大人物们跑一趟嘛。\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Sleep(1200)
#C0477
ChrTalk(
0x101,
(
"#12P#0006F兰迪,这么说也太失礼了。\x02\x03",
"#0001F说起空房,从防止犯罪的角度来说,\x01",
"也绝对是一项不容忽视的问题。\x01",
"所以确认工作还是必不可少的。\x02",
)
)
CloseMessageWindow()
#C0478
ChrTalk(
0x104,
"#0305F哦,原来还有这种说法啊。\x02",
)
CloseMessageWindow()
#C0479
ChrTalk(
0x102,
(
"#11P#0100F算了,只不过是在市内转一圈而已,\x01",
"也不是什么坏事嘛。\x02",
)
)
CloseMessageWindow()
#C0480
ChrTalk(
0x8,
(
"#5P……那个,既然如此,\x01",
"各位愿意接受这项工作吗?\x02",
)
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Menu(
0,
-1,
-1,
0,
(
"【接受】\x01", # 0
"【拒绝】\x01", # 1
)
)
MenuEnd(0x0)
OP_60(0x0)
FadeToBright(300, 0)
OP_5A()
OP_29(0x3, 0x1, 0x0)
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_A352")
Call(0, 28)
Return()
label("loc_A352")
#C0481
ChrTalk(
0x101,
(
"#12P#0006F抱歉,\x01",
"虽然大致情况已经了解了,\x01",
"但我们现在还有其它工作……\x02",
)
)
CloseMessageWindow()
#C0482
ChrTalk(
0x8,
(
"#5P是吗……那就麻烦各位\x01",
"有空的时候再来帮忙吧。\x02",
)
)
CloseMessageWindow()
#C0483
ChrTalk(
0x8,
(
"#5P只要是在今天之内,\x01",
"随便什么时候都可以的。\x02",
)
)
CloseMessageWindow()
OP_5A()
SetChrPos(0x0, 0, 0, 4000, 180)
OP_4C(0x8, 0xFF)
OP_29(0x3, 0x1, 0x1F)
EventEnd(0x5)
Return()
# Function_26_9D75 end
def Function_27_A42C(): pass
label("Function_27_A42C")
EventBegin(0x0)
FadeToDark(1000, 0, -1)
OP_0D()
OP_68(0, 1200, 6000, 0)
MoveCamera(35, 18, 0, 0)
OP_6E(400, 0)
SetCameraDistance(24500, 0)
SetChrPos(0x101, -600, 0, 4700, 0)
SetChrPos(0x102, 600, 0, 4700, 0)
SetChrPos(0x103, -700, 0, 3600, 0)
SetChrPos(0x104, 700, 0, 3600, 0)
FadeToBright(1000, 0)
OP_0D()
#C0484
ChrTalk(
0x8,
(
"#5P希望各位能帮忙\x01",
"确认一下市内的空房。\x02",
)
)
CloseMessageWindow()
#C0485
ChrTalk(
0x8,
(
"#5P居民科的人好像也\x01",
"没空来处理……\x01",
"各位愿意接受这项工作吗?\x02",
)
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Menu(
0,
-1,
-1,
0,
(
"【接受】\x01", # 0
"【拒绝】\x01", # 1
)
)
MenuEnd(0x0)
OP_60(0x0)
FadeToBright(300, 0)
OP_5A()
OP_29(0x3, 0x1, 0x0)
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_A57F")
Call(0, 28)
Return()
label("loc_A57F")
#C0486
ChrTalk(
0x101,
(
"#12P#0006F抱歉,\x01",
"虽然大致情况已经了解了,\x01",
"但我们现在还有其它工作……\x02",
)
)
CloseMessageWindow()
#C0487
ChrTalk(
0x8,
(
"#5P是吗……那就麻烦各位\x01",
"有空的时候再来帮忙吧。\x02",
)
)
CloseMessageWindow()
#C0488
ChrTalk(
0x8,
(
"#5P只要是在今天之内,\x01",
"随便什么时候都可以的。\x02",
)
)
CloseMessageWindow()
OP_5A()
SetChrPos(0x0, 0, 0, 4000, 180)
OP_4C(0x8, 0xFF)
OP_29(0x3, 0x1, 0x1F)
EventEnd(0x5)
Return()
# Function_27_A42C end
def Function_28_A659(): pass
label("Function_28_A659")
OP_29(0x3, 0x2, 0x1F)
#C0489
ChrTalk(
0x8,
(
"#5P非常感谢,\x01",
"那我这就把文件交给你们。\x02",
)
)
CloseMessageWindow()
#C0490
ChrTalk(
0x8,
(
"#5P……这就是此次工作\x01",
"的相关文件。\x02",
)
)
CloseMessageWindow()
#C0491
ChrTalk(
0x101,
"#12P#0000F嗯,那就暂时由我们保管了。\x02",
)
CloseMessageWindow()
Sleep(200)
OP_95(0x101, -70, 0, 5240, 1000, 0x0)
OP_93(0x101, 0x0, 0x1F4)
Sleep(500)
FadeToDark(300, 0, 100)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
Sound(17, 0, 100, 0)
#A0492
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_ITEM, 0x35A),
scpstr(SCPSTR_CODE_COLOR, 0x0),
"收下了。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(14, 280, 60, 3)
OP_5A()
AddItemNumber(0x35A, 1)
OP_96(0x101, 0xFFFFFDA8, 0x0, 0x125C, 0x3E8, 0x0)
Sleep(400)
def lambda_A762():
OP_93(0xFE, 0x87, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 1, lambda_A762)
Sleep(200)
def lambda_A772():
OP_93(0xFE, 0xE1, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 1, lambda_A772)
def lambda_A77F():
OP_93(0xFE, 0x2D, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 1, lambda_A77F)
def lambda_A78C():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 1, lambda_A78C)
Sleep(500)
#C0493
ChrTalk(
0x101,
(
"#6P#0005F嗯,一共有三处啊。\x02\x03",
"#0003F住宅街有一处。\x01",
"看起来,好像在接近出口的位置呢。\x02\x03",
"东街也有一处……\x01",
"从地址来看,好像紧挨着\x01",
"游击士协会的右侧啊。\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x45, 1)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x42, 0)), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_A8FA")
#C0494
ChrTalk(
0x104,
(
"#0300F挨着协会的右边吗?\x01",
"那倒是很好记啊。\x02\x03",
"#0305F最后的一处……好像在旧城区啊?\x02",
)
)
CloseMessageWindow()
#C0495
ChrTalk(
0x102,
(
"#11P#0103F旧城区的公寓\x01",
"『莲花公馆』有三处空房……\x02\x03",
"#0105F稍等一下,\x01",
"我先把这些记录在调查手册里。\x02",
)
)
CloseMessageWindow()
Jump("loc_A9D6")
label("loc_A8FA")
#C0496
ChrTalk(
0x104,
(
"#0303F挨着协会的右边吗?\x01",
"那倒是很好记啊。\x02\x03",
"#0305F最后的这个地方……\x01",
"是在哪里啊?\x02",
)
)
CloseMessageWindow()
#C0497
ChrTalk(
0x102,
(
"#11P#0100F这个地方,如今被称作旧城区哦。\x02\x03",
"#0103F『莲花公馆』公寓中\x01",
"有三处空房……\x02\x03",
"#0105F稍等一下,\x01",
"我先把这些记录在调查手册里。\x02",
)
)
CloseMessageWindow()
label("loc_A9D6")
#C0498
ChrTalk(
0x102,
"#11P#0103F(认真记录……)\x02",
)
CloseMessageWindow()
Sleep(500)
#C0499
ChrTalk(
0x102,
"#11P#0100F嗯,记好了。\x02",
)
CloseMessageWindow()
#C0500
ChrTalk(
0x101,
"#6P#0000F谢啦,艾莉。\x02",
)
CloseMessageWindow()
Sleep(200)
def lambda_AA38():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 1, lambda_AA38)
def lambda_AA45():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 1, lambda_AA45)
def lambda_AA52():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 1, lambda_AA52)
def lambda_AA5F():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 1, lambda_AA5F)
Sleep(500)
#C0501
ChrTalk(
0x101,
(
"#12P#0000F那我们现在就出发,\x01",
"去确认那些空房吧。\x02\x03",
"完成之后,来这里报告就可以了吧?\x02",
)
)
CloseMessageWindow()
#C0502
ChrTalk(
0x8,
(
"#5P嗯,三处空房都确认完毕之后,\x01",
"过来通知我就可以了。\x01",
"拜托各位了。\x02",
)
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
Sound(80, 0, 100, 0)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
#A0503
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"任务【无人住所的确认】\x07\x00",
"开始!\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(14, 280, 60, 3)
FadeToDark(1000, 0, -1)
OP_0D()
OP_68(0, 1500, 4000, 0)
MoveCamera(0, 25, 0, 0)
OP_6E(440, 0)
SetCameraDistance(24500, 0)
SetChrPos(0x0, 0, 0, 4000, 0)
SetChrPos(0x1, 0, 0, 4000, 0)
SetChrPos(0x2, 0, 0, 4000, 0)
SetChrPos(0x3, 0, 0, 4000, 0)
OP_4C(0x8, 0xFF)
OP_29(0x3, 0x1, 0x1)
Sleep(500)
EventEnd(0x5)
Return()
# Function_28_A659 end
def Function_29_ABEA(): pass
label("Function_29_ABEA")
EventBegin(0x0)
FadeToDark(1000, 0, -1)
OP_0D()
OP_68(0, 1200, 6000, 0)
MoveCamera(35, 18, 0, 0)
OP_6E(400, 0)
SetCameraDistance(24500, 0)
SetChrPos(0x101, -600, 0, 4700, 0)
SetChrPos(0x102, 600, 0, 4700, 0)
SetChrPos(0x103, -700, 0, 3600, 0)
SetChrPos(0x104, 700, 0, 3600, 0)
FadeToBright(1000, 0)
OP_0D()
#C0504
ChrTalk(
0x8,
(
"#5P啊,各位,\x01",
"确认空房的工作还顺利吗?\x02",
)
)
CloseMessageWindow()
#C0505
ChrTalk(
0x101,
(
"#12P#0000F嗯,已经全部\x01",
"确认完毕了,\x01",
"现在就来报告。\x02",
)
)
CloseMessageWindow()
Sleep(200)
OP_95(0x101, -70, 0, 5240, 1000, 0x0)
OP_93(0x101, 0x0, 0x1F4)
Sleep(500)
FadeToDark(300, 0, 100)
Sound(17, 0, 100, 0)
Sleep(400)
SetChrName("")
#A0506
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"提交了修正过错误的文件。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
OP_5A()
Sleep(300)
OP_96(0x101, 0xFFFFFDA8, 0x0, 0x125C, 0x3E8, 0x0)
Sleep(400)
SubItemNumber(0x35A, 1)
OP_63(0x8, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(500)
#C0507
ChrTalk(
0x8,
(
"#5P啊,标记了房间号,\x01",
"旁边还加上了批注呢……\x02",
)
)
CloseMessageWindow()
#C0508
ChrTalk(
0x8,
(
"#5P真是太感谢了,\x01",
"居民科的人一定也会很高兴的。\x02",
)
)
CloseMessageWindow()
#C0509
ChrTalk(
0x104,
(
"#0306F哈哈,不过,\x01",
"如果可以的话,希望他们能把\x01",
"列表整理得稍微像样一点啊。\x02\x03",
"错漏百出,害得我们\x01",
"四处乱跑,走了不少冤枉路呢。\x02",
)
)
CloseMessageWindow()
#C0510
ChrTalk(
0x103,
(
"#12P#0200F身为警察,我们也许\x01",
"无权指责这些,\x01",
"但管理未免也太混乱了。\x02",
)
)
CloseMessageWindow()
#C0511
ChrTalk(
0x8,
(
"#5P真、真抱歉……\x01",
"居民科的人其实也在努力,\x01",
"希望能确保资料的正确性。\x02",
)
)
CloseMessageWindow()
#C0512
ChrTalk(
0x8,
(
"#5P……但他们那边经常会承受\x01",
"很多来自议员们的压力……\x01",
"所以,实在是非常抱歉啊。\x02",
)
)
CloseMessageWindow()
#C0513
ChrTalk(
0x101,
(
"#12P#0005F哪、哪里……原来还有这种事啊。\x01",
"(看来,在政府机关里也会有很多麻烦事呢。)\x02",
)
)
CloseMessageWindow()
#C0514
ChrTalk(
0x102,
(
"#11P#0108F(嗯……在克洛斯贝尔,\x01",
" 议员的力量是相当强大的……)\x02\x03",
"#0100F总之,能帮上忙就好。\x02\x03",
"毕竟市政厅的人也不容易……\x01",
"如果以后再有什么事情,欢迎来找我们支援科。\x02",
)
)
CloseMessageWindow()
#C0515
ChrTalk(
0x103,
(
"#12P#0200F是啊,像这种程度的事情,\x01",
"我们应该能帮上一点忙。\x02",
)
)
CloseMessageWindow()
#C0516
ChrTalk(
0x104,
"#0300F哈,总之不用客气,有事就随时开口吧。\x02",
)
CloseMessageWindow()
#C0517
ChrTalk(
0x8,
(
"#5P好的……非常感谢。\x01",
"那到时候就麻烦各位了。\x02",
)
)
CloseMessageWindow()
OP_5A()
FadeToDark(1000, 0, -1)
OP_0D()
Sound(9, 0, 100, 0)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
#A0518
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"任务【无人住所的确认】\x07\x00",
"完成!\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
SetMessageWindowPos(14, 280, 60, 3)
OP_68(0, 1500, 4000, 0)
MoveCamera(0, 25, 0, 0)
OP_6E(440, 0)
SetCameraDistance(24500, 0)
SetChrPos(0x0, 0, 0, 4000, 0)
SetChrPos(0x1, 0, 0, 4000, 0)
SetChrPos(0x2, 0, 0, 4000, 0)
SetChrPos(0x3, 0, 0, 4000, 0)
OP_4C(0x8, 0xFF)
OP_29(0x3, 0x2, 0x1E)
OP_29(0x3, 0x1, 0x8)
OP_29(0x3, 0x4, 0x10)
SetScenarioFlags(0x0, 7)
Sleep(500)
EventEnd(0x5)
Return()
# Function_29_ABEA end
def Function_30_B1B6(): pass
label("Function_30_B1B6")
EventBegin(0x0)
OP_4B(0x9, 0xFF)
FadeToDark(1000, 0, -1)
OP_0D()
OP_68(4650, 5200, 16390, 0)
MoveCamera(38, 25, 0, 0)
OP_6E(540, 0)
SetCameraDistance(18000, 0)
SetChrPos(0x101, 4160, 4000, 16219, 0)
SetChrPos(0x102, 5480, 4000, 16219, 0)
SetChrPos(0x103, 4160, 4000, 14800, 0)
SetChrPos(0x104, 5480, 4000, 14800, 0)
SetChrPos(0x9, 4870, 4000, 18550, 0)
CreatePortrait(0, 112, 8, 368, 264, 0, 0, 256, 256, 0, 0, 256, 256, 0xFFFFFF, 0x1, "c_vis030.itp")
FadeToBright(1000, 0)
OP_0D()
#C0519
ChrTalk(
0x9,
(
"#5P呼啊啊,真是的,\x01",
"怎么会有这种事……\x02",
)
)
CloseMessageWindow()
OP_63(0x9, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(500)
OP_93(0x9, 0xB4, 0x1F4)
#C0520
ChrTalk(
0x9,
(
"#5P啊,你们!\x01",
"是警察局·特别任务支援科的人,没错吧!?\x02",
)
)
CloseMessageWindow()
OP_95(0x9, 4870, 4000, 17950, 2000, 0x0)
#C0521
ChrTalk(
0x101,
(
"#6P#0000F是的,没错。\x01",
"我们是为了接受支援请求而来的。\x02\x03",
"那个,听说是出了\x01",
"很严重的事情……\x02",
)
)
CloseMessageWindow()
#C0522
ChrTalk(
0x102,
"#12P#0100F莫非是什么东西被盗了吗?\x02",
)
CloseMessageWindow()
#C0523
ChrTalk(
0x9,
"#5P那、那个……\x02",
)
CloseMessageWindow()
OP_82(0x0, 0x64, 0xBB8, 0x1F4)
#C0524
ChrTalk(
0x9,
(
"#5P#4S正是如此啊!!\x01",
"有个非常重要的东西被盗走了!!\x02",
)
)
CloseMessageWindow()
OP_63(0x0, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x1, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x2, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x3, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(500)
#C0525
ChrTalk(
0x103,
"#12P#0205F原来还真的是盗窃案啊。\x02",
)
CloseMessageWindow()
#C0526
ChrTalk(
0x104,
(
"#12P#0305F竟然能从市政厅里\x01",
"把东西偷走?\x02\x03",
"#0306F虽然早就知道纪念庆典中应该会有很多风波,\x01",
"但真没想到会出现这么胆大妄为的家伙啊……\x02",
)
)
CloseMessageWindow()
#C0527
ChrTalk(
0x101,
"#6P#0001F那么,被盗走的东西是……\x02",
)
CloseMessageWindow()
#C0528
ChrTalk(
0x9,
(
"#5P啊啊……就是原本放置在那里的\x01",
"巨大雕像啊。\x02",
)
)
CloseMessageWindow()
OP_68(2790, 4200, 19050, 2000)
MoveCamera(26, 25, 0, 2000)
def lambda_B56D():
OP_93(0xFE, 0x13B, 0x190)
ExitThread()
QueueWorkItem(0x0, 1, lambda_B56D)
def lambda_B57A():
OP_93(0xFE, 0x13B, 0x190)
ExitThread()
QueueWorkItem(0x1, 1, lambda_B57A)
def lambda_B587():
OP_93(0xFE, 0x13B, 0x190)
ExitThread()
QueueWorkItem(0x2, 1, lambda_B587)
def lambda_B594():
OP_93(0xFE, 0x13B, 0x190)
ExitThread()
QueueWorkItem(0x3, 1, lambda_B594)
def lambda_B5A1():
OP_93(0xFE, 0x10E, 0x190)
ExitThread()
QueueWorkItem(0x9, 1, lambda_B5A1)
Sleep(2000)
OP_63(0x0, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x1)
OP_63(0x1, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x1)
OP_63(0x2, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x1)
OP_63(0x3, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x1)
Sleep(1200)
#C0529
ChrTalk(
0x101,
(
"#6P#0003F这、这么一说……\x01",
"好像确实能感觉到\x01",
"那里缺了点什么东西啊。\x02",
)
)
CloseMessageWindow()
#C0530
ChrTalk(
0x103,
(
"#12P#0205F真的被盗了呢。\x01",
"……稍微有些难以置信。\x02",
)
)
CloseMessageWindow()
#C0531
ChrTalk(
0x9,
(
"#5P我也是一样\x01",
"难以置信啊……\x02",
)
)
CloseMessageWindow()
OP_5A()
Fade(500)
OP_68(4650, 5200, 16390, 0)
MoveCamera(38, 25, 0, 0)
OP_6E(540, 0)
SetCameraDistance(18000, 0)
OP_0D()
OP_93(0x9, 0xB4, 0x1F4)
def lambda_B6D6():
OP_93(0xFE, 0x0, 0x190)
ExitThread()
QueueWorkItem(0x0, 1, lambda_B6D6)
Sleep(10)
def lambda_B6E6():
OP_93(0xFE, 0x0, 0x190)
ExitThread()
QueueWorkItem(0x1, 1, lambda_B6E6)
def lambda_B6F3():
OP_93(0xFE, 0x0, 0x190)
ExitThread()
QueueWorkItem(0x2, 1, lambda_B6F3)
Sleep(18)
def lambda_B703():
OP_93(0xFE, 0x0, 0x190)
ExitThread()
QueueWorkItem(0x3, 1, lambda_B703)
Sleep(300)
#C0532
ChrTalk(
0x9,
(
"#5P那座雕像名叫『圣徒的祈祷』,\x01",
"是克洛斯贝尔自治州刚刚成立之际,\x01",
"由著名的雕刻家创作的雕像作品。\x02",
)
)
CloseMessageWindow()
#C0533
ChrTalk(
0x102,
(
"#12P#0100F马格纳斯·海克特创作的\x01",
"『圣徒的祈祷』啊。\x02\x03",
"#0103F出生于克洛斯贝尔的稀世雕刻家,\x01",
"为了歌颂自治州的诞生所雕刻出的杰作……\x02\x03",
"#0100F据我所知,正因为它有着那样的历史背景,\x01",
"所以一直都被视为市政厅的象征呢。\x02\x03",
"#0108F但在这座雕像的面前,召开那种\x01",
"让帝国派与共和国派谋取私利的议会,\x01",
"倒真是很具有讽刺意义呢。\x02",
)
)
CloseMessageWindow()
#C0534
ChrTalk(
0x9,
"#5P哈哈,我完全赞同。\x02",
)
CloseMessageWindow()
#C0535
ChrTalk(
0x9,
(
"#5P算了,先不管这些。\x01",
"总之,那座雕像可以说是克洛斯贝尔\x01",
"的骄傲,绝对是十分重要的东西。\x02",
)
)
CloseMessageWindow()
#C0536
ChrTalk(
0x9,
(
"#5P更何况,今天下午还要举办\x01",
"闭幕式与宾客的招待会……\x02",
)
)
CloseMessageWindow()
#C0537
ChrTalk(
0x9,
(
"#5P呼,在这种难得的庆祝日,\x01",
"实在是不能让大家看到\x01",
"这种丢脸的情景啊……\x02",
)
)
CloseMessageWindow()
#C0538
ChrTalk(
0x101,
(
"#6P#0003F确实如此,再这么下去,\x01",
"克洛斯贝尔将会成为各国的笑料。\x02",
)
)
CloseMessageWindow()
#C0539
ChrTalk(
0x104,
(
"#12P#0301F至少克洛斯贝尔警察的\x01",
"形象与公信力会一落千丈吧。\x02\x03",
"#0306F克洛斯贝尔时代周刊之类的媒体\x01",
"肯定也会再写一堆恐怖的讽刺文章。\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Sleep(12)
OP_63(0x102, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Sleep(8)
OP_63(0x103, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Sleep(12)
OP_63(0x9, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Sleep(1400)
#C0540
ChrTalk(
0x103,
(
"#12P#0203F兰迪前辈……\x01",
"请不要说那些多余的话了。\x02",
)
)
CloseMessageWindow()
#C0541
ChrTalk(
0x104,
(
"#12P#0306F不好意思啊,只是我的\x01",
"脑子里一下就想到这些了。\x02",
)
)
CloseMessageWindow()
#C0542
ChrTalk(
0x101,
(
"#6P#0003F这些情况都很有可能成真,\x01",
"就这点来说,也完全让人笑不出来啊……\x02",
)
)
CloseMessageWindow()
#C0543
ChrTalk(
0x9,
(
"#5P呼……还是别再说这种\x01",
"不吉利的话了……\x02",
)
)
CloseMessageWindow()
#C0544
ChrTalk(
0x9,
(
"#5P市政厅本来就已经\x01",
"丑闻不断,屡遭曝光了,\x01",
"所以经不起这么大的折腾了……\x02",
)
)
CloseMessageWindow()
#C0545
ChrTalk(
0x102,
(
"#12P#0103F看来,我们还是尽早展开调查为好啊……\x02\x03",
"#0105F不过,库利普先生,\x01",
"您有什么关于犯人的线索吗?\x02",
)
)
CloseMessageWindow()
#C0546
ChrTalk(
0x9,
(
"#5P啊……这个嘛,\x01",
"在现场倒是发现了这种东西。\x02",
)
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
SetChrName("")
#A0547
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"库利普掏出了一张卡片。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Sound(18, 0, 100, 0)
OP_C9(0x0, 0x3, 0xFFFFFFFF, 0x1F4, 0x0)
OP_C9(0x0, 0x0, 0x0, 0xFFFFD8F0, 0x1F4)
OP_CA(0x0, 0x0, 0x3)
OP_CA(0x0, 0x0, 0x0)
Sleep(1000)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
#A0548
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"警察局特别任务支援科的诸位,\x01",
"挑战我的谜题,以展现你们的智慧吧。\x02",
)
)
CloseMessageWindow()
#A0549
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
" 最初的钥匙\x01",
" 在不夜之城的象征处\x01",
"抬头仰望那昏暗的天空吧\x01",
" ──怪盗B\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
SetMessageWindowPos(14, 280, 60, 3)
FadeToBright(300, 0)
OP_C9(0x0, 0x3, 0xFFFFFF, 0x1F4, 0x0)
OP_C9(0x0, 0x0, 0x0, 0x0, 0x1F4)
OP_CA(0x0, 0x0, 0x3)
OP_CA(0x0, 0x0, 0x0)
OP_63(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x102, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1000)
OP_64(0xFFFF)
#C0550
ChrTalk(
0x101,
"#6P#0005F怪……怪盗B……!?\x02",
)
CloseMessageWindow()
#C0551
ChrTalk(
0x102,
"#12P#0105F……他竟然会现身于克洛斯贝尔……\x02",
)
CloseMessageWindow()
#C0552
ChrTalk(
0x104,
(
"#12P#0300F哈,只是一个偷东西的贼而已,\x01",
"竟然还给自己取个\x01",
"那么拉风帅气的外号。\x02\x03",
"#0303F……嗯?等一下,那个名字……\x01",
"我以前好像在什么地方听到过呢……\x02",
)
)
CloseMessageWindow()
#C0553
ChrTalk(
0x103,
(
"#12P#0203F我也感觉好像\x01",
"有所耳闻呢。\x02\x03",
"#0200F……你们两位\x01",
"知道些什么吗?\x02",
)
)
CloseMessageWindow()
#C0554
ChrTalk(
0x101,
(
"#6P#0003F我也只是稍有耳闻而已……\x02\x03",
"#0001F好像是个在外国不断犯案,\x01",
"超级有名的犯罪者。\x02",
)
)
CloseMessageWindow()
#C0555
ChrTalk(
0x102,
(
"#12P#0103F……以帝国为中心进行活动,\x01",
"曾经盗走过无数美术珍品,\x01",
"是个神出鬼没的怪人……\x02\x03",
"#0100F这就是被称为『怪盗B』的人物。\x02\x03",
"他的行动非常胆大妄为,在行动之前,\x01",
"总要事先送出记录着犯罪预告的卡片。\x01",
"但从来都没有被逮捕过。\x02\x03",
"他会使用多种不可思议的神奇秘术,\x01",
"轻而易举地将目标完美盗走。\x01",
"所以也有一些人视他为英雄……\x02",
)
)
CloseMessageWindow()
#C0556
ChrTalk(
0x103,
(
"#12P#0203F也就是说,那个怪盗……\x01",
"这次是将挑战书\x01",
"发给了我们吗?\x02",
)
)
CloseMessageWindow()
#C0557
ChrTalk(
0x9,
(
"#5P嗯,所以这次没有找游击士,\x01",
"而是叫你们来帮忙,原因就在这里了。\x02",
)
)
CloseMessageWindow()
#C0558
ChrTalk(
0x9,
(
"#5P因为他清清楚楚地指名\x01",
"要你们『特别任务支援科』来处理。\x02",
)
)
CloseMessageWindow()
#C0559
ChrTalk(
0x104,
"#12P#0304F……嘿,还挺有意思的嘛。\x02",
)
CloseMessageWindow()
TurnDirection(0x104, 0x101, 500)
Sleep(200)
#C0560
ChrTalk(
0x104,
(
"#12P#0300F喂,罗伊德,\x01",
"虽然不清楚这怪盗究竟是何方神圣,\x01",
"但不管怎么样,我们赶快去抓住他吧。\x02",
)
)
CloseMessageWindow()
def lambda_C245():
TurnDirection(0xFE, 0x104, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_C245)
def lambda_C252():
TurnDirection(0xFE, 0x102, 400)
ExitThread()
QueueWorkItem(0x103, 1, lambda_C252)
def lambda_C25F():
TurnDirection(0xFE, 0x103, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_C25F)
Sleep(300)
#C0561
ChrTalk(
0x101,
(
"#6P#0001F嗯,被他盗走的雕像\x01",
"也必须要夺回来……\x01",
"他的挑战,我们接受了!\x02",
)
)
CloseMessageWindow()
#C0562
ChrTalk(
0x102,
"#12P#0100F说得对,我们行动吧。\x02",
)
CloseMessageWindow()
#C0563
ChrTalk(
0x103,
(
"#12P#0200F谜题的提示是\x01",
"『不夜之城的象征』啊……\x01",
"看样子,应该就在市内吧。\x02",
)
)
CloseMessageWindow()
#C0564
ChrTalk(
0x104,
(
"#12P#0300F好,那我们就在市内\x01",
"寻找线索吧!\x02",
)
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
Sound(80, 0, 100, 0)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
#A0565
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"任务【市政厅的紧急请求】\x07\x00",
"开始!\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(14, 280, 60, 3)
FadeToDark(1000, 0, -1)
OP_0D()
SetChrPos(0x0, 5080, 4000, 14410, 180)
OP_4C(0x9, 0xFF)
OP_CA(0x1, 0xFF, 0x0)
OP_29(0x22, 0x1, 0x0)
Sleep(500)
EventEnd(0x5)
Return()
# Function_30_B1B6 end
def Function_31_C3DD(): pass
label("Function_31_C3DD")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_C41B")
OP_95(0xFE, 3550, 4000, 18550, 1000, 0x0)
Sleep(200)
OP_95(0xFE, 5980, 4000, 18550, 1000, 0x0)
Sleep(200)
Jump("Function_31_C3DD")
label("loc_C41B")
Return()
# Function_31_C3DD end
def Function_32_C41C(): pass
label("Function_32_C41C")
EventBegin(0x0)
FadeToDark(0, 0, -1)
LoadChrToIndex("chr/ch26000.itc", 0x1E)
OP_4B(0x9, 0xFF)
OP_68(-40, 5500, 20590, 0)
MoveCamera(1, 40, 0, 0)
OP_6E(500, 0)
SetCameraDistance(25270, 0)
SetChrPos(0x101, -580, 4000, 15800, 0)
SetChrPos(0x102, 550, 4000, 15800, 0)
SetChrPos(0x103, -580, 4000, 14500, 0)
SetChrPos(0x104, 550, 4000, 14500, 0)
SetChrPos(0x9, -1850, 4000, 16820, 45)
SetChrPos(0x13, -1790, 4000, 19380, 90)
SetChrPos(0x14, 470, 4000, 18500, 0)
SetChrChipByIndex(0x13, 0x1E)
SetChrSubChip(0x13, 0x1)
SetChrChipByIndex(0x14, 0x1E)
SetChrSubChip(0x14, 0x1)
BeginChrThread(0x13, 0, 0, 0)
BeginChrThread(0x14, 0, 0, 0)
ClearChrFlags(0x13, 0x80)
ClearChrBattleFlags(0x13, 0x8000)
ClearChrFlags(0x14, 0x80)
ClearChrBattleFlags(0x14, 0x8000)
SetChrFlags(0xD, 0x80)
SetChrBattleFlags(0xD, 0x8000)
SetChrFlags(0xE, 0x80)
SetChrBattleFlags(0xE, 0x8000)
SetMapObjFrame(0xFF, "model06", 0x1, 0x1)
MoveCamera(1, 19, 0, 4000)
FadeToBright(2000, 0)
OP_0D()
OP_6F(0x40)
Fade(800)
OP_68(30, 4500, 17170, 0)
MoveCamera(39, 25, 0, 0)
OP_6E(520, 0)
SetCameraDistance(19220, 0)
OP_0D()
#C0566
ChrTalk(
0x9,
(
"#6P真没想到,竟然这么快\x01",
"就找回来了……\x02",
)
)
CloseMessageWindow()
def lambda_C5A6():
OP_93(0xFE, 0x87, 0x190)
ExitThread()
QueueWorkItem(0x9, 1, lambda_C5A6)
Sleep(20)
def lambda_C5B6():
OP_93(0xFE, 0x13B, 0x190)
ExitThread()
QueueWorkItem(0x0, 1, lambda_C5B6)
Sleep(12)
def lambda_C5C6():
OP_93(0xFE, 0x13B, 0x190)
ExitThread()
QueueWorkItem(0x1, 1, lambda_C5C6)
def lambda_C5D3():
OP_93(0xFE, 0x13B, 0x190)
ExitThread()
QueueWorkItem(0x2, 1, lambda_C5D3)
Sleep(15)
def lambda_C5E3():
OP_93(0xFE, 0x13B, 0x190)
ExitThread()
QueueWorkItem(0x3, 1, lambda_C5E3)
Sleep(400)
#C0567
ChrTalk(
0x9,
(
"#6P支援科的各位,真是多谢了。\x01",
"我发自内心地感谢你们啊。\x02",
)
)
CloseMessageWindow()
#C0568
ChrTalk(
0x101,
(
"#11P#0012F哈哈哈……虽然费了不少周折,\x01",
"但最终能顺利解决,真是太好了。\x02",
)
)
CloseMessageWindow()
BeginChrThread(0x14, 1, 0, 33)
#C0569
ChrTalk(
0x102,
(
"#11P#0100F这样一来,闭幕式和招待会\x01",
"也就可以顺利进行了吧。\x02",
)
)
CloseMessageWindow()
#C0570
ChrTalk(
0x9,
(
"#6P嗯,是啊。\x01",
"如此一来,我也就可以放心了。\x02",
)
)
CloseMessageWindow()
WaitChrThread(0x14, 1)
OP_4B(0x13, 0xFF)
OP_4B(0x14, 0xFF)
def lambda_C6FB():
OP_95(0xFE, -1570, 4000, 18500, 1000, 0x0)
ExitThread()
QueueWorkItem(0x13, 1, lambda_C6FB)
OP_95(0x14, -1570, 4000, 18500, 1000, 0x0)
OP_95(0x14, -490, 4000, 18500, 1000, 0x0)
OP_93(0x14, 0xB4, 0x1F4)
#C0571
ChrTalk(
0x13,
"#5P好,安放完毕了!\x02",
)
CloseMessageWindow()
#C0572
ChrTalk(
0x14,
"#11P感谢您的惠顾~!\x02",
)
CloseMessageWindow()
def lambda_C777():
OP_93(0xFE, 0x2D, 0x190)
ExitThread()
QueueWorkItem(0x9, 1, lambda_C777)
Sleep(20)
def lambda_C787():
OP_93(0xFE, 0x0, 0x190)
ExitThread()
QueueWorkItem(0x0, 1, lambda_C787)
Sleep(12)
def lambda_C797():
OP_93(0xFE, 0x0, 0x190)
ExitThread()
QueueWorkItem(0x1, 1, lambda_C797)
def lambda_C7A4():
OP_93(0xFE, 0x0, 0x190)
ExitThread()
QueueWorkItem(0x2, 1, lambda_C7A4)
Sleep(15)
def lambda_C7B4():
OP_93(0xFE, 0x0, 0x190)
ExitThread()
QueueWorkItem(0x3, 1, lambda_C7B4)
Sleep(300)
#C0573
ChrTalk(
0x9,
(
"#6P噢,好的,\x01",
"真是辛苦了。\x02",
)
)
CloseMessageWindow()
#C0574
ChrTalk(
0x104,
"#12P#0300F多谢,帮大忙了啊。\x02",
)
CloseMessageWindow()
#C0575
ChrTalk(
0x13,
"#5P哈哈,可真是件不得了的大行李啊。\x02",
)
CloseMessageWindow()
#C0576
ChrTalk(
0x14,
(
"#11P那么,我就先\x01",
"失陪啦~!\x02",
)
)
CloseMessageWindow()
def lambda_C850():
label("loc_C850")
TurnDirection(0xFE, 0x13, 300)
Yield()
Jump("loc_C850")
QueueWorkItem2(0x0, 1, lambda_C850)
def lambda_C862():
label("loc_C862")
TurnDirection(0xFE, 0x13, 300)
Yield()
Jump("loc_C862")
QueueWorkItem2(0x1, 1, lambda_C862)
def lambda_C874():
label("loc_C874")
TurnDirection(0xFE, 0x13, 300)
Yield()
Jump("loc_C874")
QueueWorkItem2(0x2, 1, lambda_C874)
def lambda_C886():
label("loc_C886")
TurnDirection(0xFE, 0x13, 300)
Yield()
Jump("loc_C886")
QueueWorkItem2(0x3, 1, lambda_C886)
def lambda_C898():
label("loc_C898")
TurnDirection(0xFE, 0x13, 300)
Yield()
Jump("loc_C898")
QueueWorkItem2(0x9, 1, lambda_C898)
BeginChrThread(0x13, 0, 0, 34)
BeginChrThread(0x14, 0, 0, 34)
Sleep(3500)
#C0577
ChrTalk(
0x9,
(
"#6P呼……也给他们\x01",
"添了不少麻烦啊。\x01",
"稍后可得多给人家些小费才行。\x02",
)
)
CloseMessageWindow()
OP_63(0x9, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(500)
EndChrThread(0x0, 0x1)
EndChrThread(0x1, 0x1)
EndChrThread(0x2, 0x1)
EndChrThread(0x3, 0x1)
def lambda_C923():
OP_93(0xFE, 0x87, 0x190)
ExitThread()
QueueWorkItem(0x9, 1, lambda_C923)
def lambda_C930():
OP_93(0xFE, 0x13B, 0x190)
ExitThread()
QueueWorkItem(0x0, 1, lambda_C930)
def lambda_C93D():
OP_93(0xFE, 0x13B, 0x190)
ExitThread()
QueueWorkItem(0x1, 1, lambda_C93D)
def lambda_C94A():
OP_93(0xFE, 0x13B, 0x190)
ExitThread()
QueueWorkItem(0x2, 1, lambda_C94A)
def lambda_C957():
OP_93(0xFE, 0x13B, 0x190)
ExitThread()
QueueWorkItem(0x3, 1, lambda_C957)
Sleep(300)
#C0578
ChrTalk(
0x9,
(
"#6P对了对了,作为我个人的褒奖,\x01",
"也要送给你们一些薄礼。\x02",
)
)
CloseMessageWindow()
OP_9B(0x0, 0x9, 0x0, 0x1F4, 0x3E8, 0x0)
Sleep(200)
SetChrName("")
Sound(17, 0, 100, 0)
FadeToDark(300, 0, 100)
#A0579
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"从库利普主任处收下了包裹。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
OP_5A()
OP_9B(0x1, 0x9, 0xB4, 0x1F4, 0x3E8, 0x0)
#C0580
ChrTalk(
0x101,
(
"#11P#0005F这可真是……\x01",
"多谢您了。\x02",
)
)
CloseMessageWindow()
#C0581
ChrTalk(
0x9,
(
"#6P那么,我就先失陪了,\x01",
"还得去送送搬运公司\x01",
"的员工呢。\x02",
)
)
CloseMessageWindow()
def lambda_CA66():
label("loc_CA66")
TurnDirection(0xFE, 0x9, 300)
Yield()
Jump("loc_CA66")
QueueWorkItem2(0x0, 1, lambda_CA66)
def lambda_CA78():
label("loc_CA78")
TurnDirection(0xFE, 0x9, 300)
Yield()
Jump("loc_CA78")
QueueWorkItem2(0x1, 1, lambda_CA78)
def lambda_CA8A():
label("loc_CA8A")
TurnDirection(0xFE, 0x9, 300)
Yield()
Jump("loc_CA8A")
QueueWorkItem2(0x2, 1, lambda_CA8A)
def lambda_CA9C():
label("loc_CA9C")
TurnDirection(0xFE, 0x9, 300)
Yield()
Jump("loc_CA9C")
QueueWorkItem2(0x3, 1, lambda_CA9C)
SetChrFlags(0x9, 0x40)
SetChrFlags(0x13, 0x80)
SetChrBattleFlags(0x13, 0x8000)
SetChrFlags(0x14, 0x80)
SetChrBattleFlags(0x14, 0x8000)
OP_95(0x9, -7660, 4000, 12460, 2000, 0x0)
Sleep(300)
#C0582
ChrTalk(
0x104,
"#12P#0303F呼,一块石头落了地啊。\x02",
)
CloseMessageWindow()
#C0583
ChrTalk(
0x102,
(
"#11P#0100F虽然四处奔波、不断碰壁,\x01",
"但总算顺利解决了。\x02",
)
)
CloseMessageWindow()
EndChrThread(0x103, 0x1)
TurnDirection(0x103, 0x101, 400)
Sleep(200)
#C0584
ChrTalk(
0x103,
(
"#12P#0205F罗伊德前辈,包裹里\x01",
"是什么东西呢?\x02",
)
)
CloseMessageWindow()
#C0585
ChrTalk(
0x101,
"#5P#0005F啊,对了,我看看。\x02",
)
CloseMessageWindow()
EndChrThread(0x0, 0x1)
EndChrThread(0x1, 0x1)
EndChrThread(0x2, 0x1)
EndChrThread(0x3, 0x1)
def lambda_CBAF():
TurnDirection(0xFE, 0x104, 400)
ExitThread()
QueueWorkItem(0x101, 1, lambda_CBAF)
def lambda_CBBC():
TurnDirection(0xFE, 0x101, 400)
ExitThread()
QueueWorkItem(0x104, 1, lambda_CBBC)
def lambda_CBC9():
TurnDirection(0xFE, 0x102, 400)
ExitThread()
QueueWorkItem(0x103, 1, lambda_CBC9)
def lambda_CBD6():
TurnDirection(0xFE, 0x103, 400)
ExitThread()
QueueWorkItem(0x102, 1, lambda_CBD6)
Sleep(400)
SetChrName("")
FadeToDark(300, 0, 100)
#A0586
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"包裹中装有『射手珠』。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
Sound(17, 0, 100, 0)
#A0587
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_ITEM, '破言之牙'),
scpstr(SCPSTR_CODE_COLOR, 0x0),
"获得了\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(14, 280, 60, 3)
OP_5A()
AddItemNumber('破言之牙', 1)
#C0588
ChrTalk(
0x103,
(
"#12P#0200F这是……结晶回路啊。\x01",
"看起来,好像是无法通过一般渠道\x01",
"购得的种类呢……\x02",
)
)
CloseMessageWindow()
#C0589
ChrTalk(
0x104,
(
"#12P#0305F市政厅的职员会有这种东西,\x01",
"还真有些令人意外啊。\x02",
)
)
CloseMessageWindow()
#C0590
ChrTalk(
0x101,
(
"#5P#0005F是啊,他为什么会有\x01",
"这种东西……\x02",
)
)
CloseMessageWindow()
OP_A7(0x9, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
SetChrPos(0x9, -15910, 4000, 10130, 39)
#N0591
NpcTalk(
0x9,
"男性的声音",
"啊啊,各位……!\x02",
)
CloseMessageWindow()
SetMapObjFlags(0x3, 0x4)
def lambda_CD53():
label("loc_CD53")
TurnDirection(0xFE, 0x9, 400)
Yield()
Jump("loc_CD53")
QueueWorkItem2(0x0, 1, lambda_CD53)
def lambda_CD65():
label("loc_CD65")
TurnDirection(0xFE, 0x9, 400)
Yield()
Jump("loc_CD65")
QueueWorkItem2(0x1, 1, lambda_CD65)
def lambda_CD77():
label("loc_CD77")
TurnDirection(0xFE, 0x9, 400)
Yield()
Jump("loc_CD77")
QueueWorkItem2(0x2, 1, lambda_CD77)
def lambda_CD89():
label("loc_CD89")
TurnDirection(0xFE, 0x9, 400)
Yield()
Jump("loc_CD89")
QueueWorkItem2(0x3, 1, lambda_CD89)
OP_68(-12890, 5500, 12540, 2200)
Sleep(2200)
def lambda_CDAF():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x9, 1, lambda_CDAF)
OP_68(-1650, 5500, 16000, 3500)
OP_95(0x9, -9550, 4000, 14890, 4000, 0x0)
OP_95(0x9, -4010, 4000, 16140, 4000, 0x0)
OP_6F(0x1)
EndChrThread(0x0, 0x1)
EndChrThread(0x1, 0x1)
EndChrThread(0x2, 0x1)
EndChrThread(0x3, 0x1)
#C0592
ChrTalk(
0x9,
(
"#6P听说雕像已经找回来了,\x01",
"是真的吗!?\x02",
)
)
CloseMessageWindow()
OP_93(0x9, 0x2D, 0x1F4)
Sleep(100)
OP_63(0x9, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
#C0593
ChrTalk(
0x9,
(
"#6P哦,是真的!\x01",
"是你们帮忙找到的吧!\x02",
)
)
CloseMessageWindow()
#C0594
ChrTalk(
0x9,
(
"#6P竟然都已经搬回来了,\x01",
"哎呀,真是了不起啊。\x02",
)
)
CloseMessageWindow()
OP_93(0x9, 0x5A, 0x1F4)
Sleep(200)
#C0595
ChrTalk(
0x9,
"#6P太感谢了,特别任务支援科的诸位!\x02",
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x102, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x104, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x103, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1000)
OP_64(0xFFFF)
#C0596
ChrTalk(
0x101,
(
"#11P#0005F那、那个,库利普先生……\x01",
"您不是刚刚才\x01",
"从市政厅出去了吗?\x02",
)
)
CloseMessageWindow()
#C0597
ChrTalk(
0x9,
"#6P哎,你在说什么啊?\x02",
)
CloseMessageWindow()
#C0598
ChrTalk(
0x9,
(
"#6P我刚才一直在对面的大厅里\x01",
"商讨闭幕式的相关事宜。\x02",
)
)
CloseMessageWindow()
#C0599
ChrTalk(
0x102,
(
"#11P#0105F难、难道说……\x01",
"……怎么会…………\x02",
)
)
CloseMessageWindow()
#C0600
ChrTalk(
0x103,
(
"#12P#0203F那就是怪盗B的易容术吗……\x01",
"实在是太惊人了……\x02",
)
)
CloseMessageWindow()
#C0601
ChrTalk(
0x104,
(
"#12P#0301F真不愧是国际有名的怪盗……\x01",
"好像不是那么轻易就能抓到的呢……\x02",
)
)
CloseMessageWindow()
#C0602
ChrTalk(
0x9,
"#6P那个,怎么了吗?\x02",
)
CloseMessageWindow()
#C0603
ChrTalk(
0x9,
(
"#6P不管怎么说,真是多谢各位了!\x01",
"这样一来,就能安心召开闭幕式啦!\x02",
)
)
CloseMessageWindow()
#C0604
ChrTalk(
0x101,
(
"#11P#0000F哈哈哈,是啊。\x02\x03",
"#0003F(怪盗B吗……\x01",
" 虽然看起来好像没有什么恶意,\x01",
" 但毫无疑问是个犯罪者呢。)\x02\x03",
"#0001F(真希望有朝一日能与他\x01",
" 做个彻底的了断啊……)\x02",
)
)
CloseMessageWindow()
FadeToDark(1000, 0, -1)
OP_0D()
Sound(9, 0, 100, 0)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
#A0605
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"任务【市政厅的紧急请求】\x07\x00",
"完成!\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
SetMessageWindowPos(14, 280, 60, 3)
SetChrPos(0x0, 0, 0, -2210, 180)
OP_4C(0x9, 0xFF)
OP_D5(0x1E)
ClearMapObjFlags(0x3, 0x4)
ClearChrFlags(0xD, 0x80)
ClearChrBattleFlags(0xD, 0x8000)
SetChrPos(0x9, -1820, 4000, 17030, 225)
SetChrPos(0xD, -3170, 4000, 16140, 45)
ClearChrFlags(0x9, 0x40)
ClearChrFlags(0xD, 0x10)
OP_29(0x22, 0x4, 0x10)
OP_29(0x22, 0x1, 0x7)
Sleep(500)
EventEnd(0x5)
Return()
# Function_32_C41C end
def Function_33_D241(): pass
label("Function_33_D241")
OP_95(0x14, -1610, 4000, 18500, 1000, 0x0)
OP_93(0x14, 0x0, 0x1F4)
OP_93(0x13, 0xB4, 0x1F4)
Return()
# Function_33_D241 end
def Function_34_D264(): pass
label("Function_34_D264")
OP_95(0xFE, -4770, 4000, 17860, 2000, 0x0)
OP_95(0xFE, -8119, 4000, 12220, 2000, 0x0)
Return()
# Function_34_D264 end
def Function_35_D28D(): pass
label("Function_35_D28D")
EventBegin(0x1)
#C0606
ChrTalk(
0x101,
(
"#0000F(研讨会会场的警备工作\x01",
" 有一科和亚里欧斯先生在负责……)\x02\x03",
"(还是不要过去打扰他们了。)\x02",
)
)
CloseMessageWindow()
SetChrPos(0x0, -13650, 4000, 12700, 45)
EventEnd(0x4)
Return()
# Function_35_D28D end
def Function_36_D306(): pass
label("Function_36_D306")
TalkBegin(0xFF)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA7, 6)), scpexpr(EXPR_END)), "loc_D384")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xBE, 6)), scpexpr(EXPR_END)), "loc_D37F")
SetChrName("")
#A0607
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"门内传来了市长和议员们的声音,\x01",
"好像正在谈论沉闷严肃的话题……\x02\x03",
"还是不要打扰他们为好。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
label("loc_D37F")
Jump("loc_D51B")
label("loc_D384")
FadeToDark(300, 0, 100)
SetChrName("")
SetMessageWindowPos(-1, -1, -1, -1)
#A0608
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"克洛斯贝尔自治州政府\x01",
" 市长办公室 \x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 0)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xB8, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_D4FA")
#C0609
ChrTalk(
0x101,
(
"#0000F这里好像就是……麦克道尔市长\x01",
"的办公室啊。\x02",
)
)
CloseMessageWindow()
#C0610
ChrTalk(
0x102,
(
"#0100F说起来,\x01",
"外公之前也说过\x01",
"让我们随时过来拜访他呢……\x02\x03",
"但他平时非常忙碌,\x01",
"不知道现在是否\x01",
"在房间里。\x02",
)
)
CloseMessageWindow()
#C0611
ChrTalk(
0x101,
(
"#0003F是吗……\x02\x03",
"#0000F不过,既然如此,\x01",
"我们还是应该找个时间去与他\x01",
"打声招呼比较好呢。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0xB8, 6)
OP_65(0x2, 0x1)
SetMapObjFlags(0x1, 0x10)
Jump("loc_D51B")
label("loc_D4FA")
Sound(810, 0, 100, 0)
#A0612
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"大门好像上着锁。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
label("loc_D51B")
TalkEnd(0xFF)
Return()
# Function_36_D306 end
def Function_37_D51F(): pass
label("Function_37_D51F")
TalkBegin(0xFF)
FadeToDark(300, 0, 100)
SetChrName("")
SetMessageWindowPos(-1, -1, -1, -1)
#A0613
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"克洛斯贝尔自治州政府\x01",
" 市政厅行政区域\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_D5A8")
#C0614
ChrTalk(
0x101,
(
"#0003F好像没有必要\x01",
"进去啊。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x1, 0)
label("loc_D5A8")
TalkEnd(0xFF)
Return()
# Function_37_D51F end
SaveToFile()
Try(main)
| [
"[email protected]"
] | |
0a0481107bc945c99cf9933d981d631dccb91e5d | 3fc01457951a956d62f5e8cc0a8067f6796ee200 | /misago/threads/api/postingendpoint/subscribe.py | c158eccbf462feee7a1f7aa449d37b93a4b7ce7f | [] | no_license | kinsney/education | 8bfa00d699a7e84701a8d49af06db22c384e0e8d | 48f832f17c2df7b64647b3db288abccf65868fe6 | refs/heads/master | 2021-05-04T01:15:03.078130 | 2016-12-04T03:18:20 | 2016-12-04T03:18:20 | 71,164,542 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,599 | py | from misago.users.models import (
AUTO_SUBSCRIBE_NONE, AUTO_SUBSCRIBE_NOTIFY, AUTO_SUBSCRIBE_NOTIFY_AND_EMAIL)
from ...models import Subscription
from . import PostingEndpoint, PostingMiddleware
class SubscribeMiddleware(PostingMiddleware):
def use_this_middleware(self):
return self.mode != PostingEndpoint.EDIT
def post_save(self, serializer):
self.subscribe_new_thread()
self.subscribe_replied_thread()
def subscribe_new_thread(self):
if self.mode != PostingEndpoint.START:
return
if self.user.subscribe_to_started_threads == AUTO_SUBSCRIBE_NONE:
return
self.user.subscription_set.create(
category=self.thread.category,
thread=self.thread,
send_email=self.user.subscribe_to_started_threads == AUTO_SUBSCRIBE_NOTIFY_AND_EMAIL
)
def subscribe_replied_thread(self):
if self.mode != PostingEndpoint.REPLY:
return
if self.user.subscribe_to_replied_threads == AUTO_SUBSCRIBE_NONE:
return
try:
subscription = self.user.subscription_set.get(thread=self.thread)
return
except Subscription.DoesNotExist:
pass
# we are replying to thread again?
if self.user.post_set.filter(thread=self.thread).count() > 1:
return
self.user.subscription_set.create(
category=self.thread.category,
thread=self.thread,
send_email=self.user.subscribe_to_replied_threads == AUTO_SUBSCRIBE_NOTIFY_AND_EMAIL
)
| [
"[email protected]"
] | |
e7880e161d34d1259ef590b01aac6ce92b5c121b | ff0b9fad40af0f7a792033884db728756ea83756 | /forge/apps/OptiML/src/NeuralNetwork/examples/mnist/visualize.py | f98383826111dc0c413d51ed09d67edf53958b49 | [] | no_license | das-projects/Grothendieck | 721d7ca60c4838385c43bdc17894cb154507c302 | 4effc374f8050655db4820db3a7deaf63effb2a4 | refs/heads/master | 2020-04-02T06:16:56.494997 | 2016-08-12T10:05:55 | 2016-08-12T10:05:55 | 65,486,230 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 732 | py | import numpy
import scipy.misc
import sys
# Get the image number to display
if len(sys.argv) < 2:
print 'Specify the training image number to show as an input argument'
print 'Requires that train_data.txt be generated.'
print 'Example: >>> python visualize.py 100'
sys.exit(0)
img_num = int(sys.argv[1])
# Read the image from file
print 'Loading training_data.txt...'
train_data_file = open('train_data.txt')
img_str = ''
for i,l in enumerate(train_data_file):
if (i-1) == img_num:
img_str = l
break
# Write the image to a file
img_1D = numpy.fromstring(img_str, dtype=float, sep="\t") * 256
img = numpy.reshape(img_1D, (28,28))
name = 'img_' + str(img_num) + '.png'
scipy.misc.imsave(name, img)
print 'Saved ' + name
| [
"[email protected]"
] | |
3068a726127a93c4aa99d3ed3646918aec29708e | 4389d3bfa4ded480caf5083f410bdd2253fae767 | /20_Project/01_simple_linear_model/test2.py | 74871ccda42383f1a4616b8ad17191981aa2a1ef | [] | no_license | Freshield/LEARN_TENSORFLOW | 4fb7fec0bc7929697549ee52e453b137a24c3383 | 87be0362d24b748c841e5c9e185d2061ffae9272 | refs/heads/master | 2021-01-17T08:06:14.186041 | 2018-06-03T12:42:46 | 2018-06-03T12:42:46 | 83,846,167 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | import numpy as np
import tensorflow as tf
import pandas as pd
sess = tf.InteractiveSession()
a = np.arange(10)
print a
np.random.shuffle(a)
print a
ta = tf.range(0, 10)
tb = tf.random_shuffle(ta)
print ta.eval()
print tb.eval()
dataset = pd.read_csv('ciena_test.csv', header=None)
print dataset.shape
| [
"[email protected]"
] | |
400886929db0a77432f8a35f412a27327f231b80 | 3784495ba55d26e22302a803861c4ba197fd82c7 | /venv/lib/python3.6/site-packages/tensorflow_core/python/keras/api/_v2/keras/applications/__init__.py | dad149e01c0fae0a0684161a8a1002e405429ccc | [
"MIT"
] | permissive | databill86/HyperFoods | cf7c31f5a6eb5c0d0ddb250fd045ca68eb5e0789 | 9267937c8c70fd84017c0f153c241d2686a356dd | refs/heads/master | 2021-01-06T17:08:48.736498 | 2020-02-11T05:02:18 | 2020-02-11T05:02:18 | 241,407,659 | 3 | 0 | MIT | 2020-02-18T16:15:48 | 2020-02-18T16:15:47 | null | UTF-8 | Python | false | false | 1,872 | py | # This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Keras Applications are canned architectures with pre-trained weights.
"""
from __future__ import print_function as _print_function
import sys as _sys
from . import densenet
from . import imagenet_utils
from . import inception_resnet_v2
from . import inception_v3
from . import mobilenet
from . import mobilenet_v2
from . import nasnet
from . import resnet
from . import resnet50
from . import resnet_v2
from . import vgg16
from . import vgg19
from . import xception
from tensorflow.python.keras.applications.densenet import DenseNet121
from tensorflow.python.keras.applications.densenet import DenseNet169
from tensorflow.python.keras.applications.densenet import DenseNet201
from tensorflow.python.keras.applications.inception_resnet_v2 import InceptionResNetV2
from tensorflow.python.keras.applications.inception_v3 import InceptionV3
from tensorflow.python.keras.applications.mobilenet import MobileNet
from tensorflow.python.keras.applications.mobilenet_v2 import MobileNetV2
from tensorflow.python.keras.applications.nasnet import NASNetLarge
from tensorflow.python.keras.applications.nasnet import NASNetMobile
from tensorflow.python.keras.applications.resnet import ResNet101
from tensorflow.python.keras.applications.resnet import ResNet152
from tensorflow.python.keras.applications.resnet import ResNet50
from tensorflow.python.keras.applications.resnet_v2 import ResNet101V2
from tensorflow.python.keras.applications.resnet_v2 import ResNet152V2
from tensorflow.python.keras.applications.resnet_v2 import ResNet50V2
from tensorflow.python.keras.applications.vgg16 import VGG16
from tensorflow.python.keras.applications.vgg19 import VGG19
from tensorflow.python.keras.applications.xception import Xception
del _print_function
| [
"[email protected]"
] | |
42e0505d50651f07009cd94e32945d6b14075fcd | 3354e6bdd4aeb2ddec84e6a8036c90cd24b6577a | /(구)자료구조와 알고리즘/(구)Quizes/backjoon/back_5397.py | 86476955d445acfe52b6e2d808c0b39f9700d356 | [] | no_license | hchayan/Data-Structure-and-Algorithms | 1125d7073b099d8c6aae4b14fbdb5e557dcb9412 | be060447e42235e94f93a0b2f94f84d2fd560ffe | refs/heads/master | 2023-01-05T10:15:02.862700 | 2020-11-04T08:16:56 | 2020-11-04T08:16:56 | 209,513,516 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 982 | py | import sys
'''
for n in range(int(sys.stdin.readline().rstrip())):
word=[]
cursor = 0
for i in sys.stdin.readline().rstrip():
if i == "<":
if cursor != 0:
cursor-=1
elif i == ">":
if cursor != len(word):
cursor+=1
elif i =="-" and len(word) !=0:
word.pop()
else:
word = word[:cursor]+[i]+word[cursor:]
cursor+=1
print(''.join(word))
'''
for _ in range(int(input())):
left_stack = []
right_stack = []
for i in input():
if i == '-':
if left_stack:
left_stack.pop()
elif i == '<':
if left_stack:
right_stack.append(left_stack.pop())
elif i == '>':
if right_stack:
left_stack.append(right_stack.pop())
else:
left_stack.append(i)
left_stack.extend(reversed(right_stack))
print(''.join(left_stack))
| [
"[email protected]"
] | |
9cfcc62db3f7835c694bd3946670e5016afa364f | c1080a13ad19d70429fb3f640989117315ef90bf | /layers/box_utils.py | 22e7dbca5a7e4208d44198c8fc666332ee2ae7a4 | [
"MIT"
] | permissive | PoisonBOx/Active_Learning_Object_Detection | ddc573d289216bee96307c7c68097fb884e37688 | 3d9ad367aa872cbf3e9d71c566042c78fe2d0e76 | refs/heads/master | 2022-04-14T18:35:30.331799 | 2020-03-25T12:48:22 | 2020-03-25T12:48:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,005 | py | # -*- coding: utf-8 -*-
import torch
def point_form(boxes):
""" Convert prior_boxes to (xmin, ymin, xmax, ymax)
representation for comparison to point form ground truth data.
Args:
boxes: (tensor) center-size default boxes from priorbox layers.
Return:
boxes: (tensor) Converted xmin, ymin, xmax, ymax form of boxes.
"""
return torch.cat((boxes[:, :2] - boxes[:, 2:]/2, # xmin, ymin
boxes[:, :2] + boxes[:, 2:]/2), 1) # xmax, ymax
def center_size(boxes):
""" Convert prior_boxes to (cx, cy, w, h)
representation for comparison to center-size form ground truth data.
Args:
boxes: (tensor) point_form boxes
Return:
boxes: (tensor) Converted xmin, ymin, xmax, ymax form of boxes.
"""
return torch.cat(((boxes[:, 2:] + boxes[:, :2])/2, # cx, cy
boxes[:, 2:] - boxes[:, :2]), 1) # w, h
# def center_size2(boxes):
# """
#
# :param boxes:
# :return:
# """
#
# output_boxes[:, 0] = (input_boxes[:, 2] + input_boxes[:, 0]) / 2
# output_boxes[:, 1] = (input_boxes[:, 3] + input_boxes[:, 1]) / 2
# output_boxes[:, 2] = input_boxes[:, 2] - input_boxes[:, 0]
# output_boxes[:, 3] = input_boxes[:, 3] - input_boxes[:, 1]
# return torch.cat((boxes[:, 2:] + (torch.abs(boxes[:, :2]))/2, # cx, cy
# boxes[:, 2:] - boxes[:, :2]), 1) # w, h
def intersect(box_a, box_b):
""" We resize both tensors to [A,B,2] without new malloc:
[A,2] -> [A,1,2] -> [A,B,2]
[B,2] -> [1,B,2] -> [A,B,2]
Then we compute the area of intersect between box_a and box_b.
Args:
box_a: (tensor) bounding boxes, Shape: [A,4].
box_b: (tensor) bounding boxes, Shape: [B,4].
Return:
(tensor) intersection area, Shape: [A,B].
"""
A = box_a.size(0)
B = box_b.size(0)
max_xy = torch.min(box_a[:, 2:].unsqueeze(1).expand(A, B, 2),
box_b[:, 2:].unsqueeze(0).expand(A, B, 2))
min_xy = torch.max(box_a[:, :2].unsqueeze(1).expand(A, B, 2),
box_b[:, :2].unsqueeze(0).expand(A, B, 2))
inter = torch.clamp((max_xy - min_xy), min=0)
return inter[:, :, 0] * inter[:, :, 1]
def jaccard(box_a, box_b):
"""Compute the jaccard overlap of two sets of boxes. The jaccard overlap
is simply the intersection over union of two boxes. Here we operate on
ground truth boxes and default boxes.
E.g.:
A ∩ B / A ∪ B = A ∩ B / (area(A) + area(B) - A ∩ B)
Args:
box_a: (tensor) Ground truth bounding boxes, Shape: [num_objects,4]
box_b: (tensor) Prior boxes from priorbox layers, Shape: [num_priors,4]
Return:
jaccard overlap: (tensor) Shape: [box_a.size(0), box_b.size(0)]
"""
inter = intersect(box_a, box_b)
area_a = ((box_a[:, 2]-box_a[:, 0]) *
(box_a[:, 3]-box_a[:, 1])).unsqueeze(1).expand_as(inter) # [A,B]
area_b = ((box_b[:, 2]-box_b[:, 0]) *
(box_b[:, 3]-box_b[:, 1])).unsqueeze(0).expand_as(inter) # [A,B]
union = area_a + area_b - inter
return inter / union # [A,B]
def match(threshold, truths, priors, variances, labels, loc_t, conf_t, idx,
modeltype = 'SSD300'):
"""Match each prior box with the ground truth box of the highest jaccard
overlap, encode the bounding boxes, then return the matched indices
corresponding to both confidence and location preds.
For KL Loss, we need to predict everything in x1y1x2y2 format. So the matching should not transform to center form
Args:
threshold: (float) The overlap threshold used when mathing boxes.
truths: (tensor) Ground truth boxes, Shape: [num_obj, num_priors].
priors: (tensor) Prior boxes from priorbox layers, Shape: [n_priors,4].
variances: (tensor) Variances corresponding to each prior coord,
Shape: [num_priors, 4].
labels: (tensor) All the class labels for the image, Shape: [num_obj].
loc_t: (tensor) Tensor to be filled w/ encoded location targets.
conf_t: (tensor) Tensor to be filled w/ matched indices for conf preds.
idx: (int) current batch index
Return:
The matched indices corresponding to 1) location and 2) confidence preds.
"""
# jaccard index
# if modeltype != 'SSD300KL':
overlaps = jaccard(
truths,
point_form(priors)
)
# else:
# overlaps = jaccard(
# truths,
# priors # already in point form if KL-Loss is used
# )
# (Bipartite Matching)
# [1,num_objects] best prior for each ground truth
best_prior_overlap, best_prior_idx = overlaps.max(1, keepdim=True)
# [1,num_priors] best ground truth for each prior
best_truth_overlap, best_truth_idx = overlaps.max(0, keepdim=True)
best_truth_idx.squeeze_(0)
best_truth_overlap.squeeze_(0)
best_prior_idx.squeeze_(1)
best_prior_overlap.squeeze_(1)
best_truth_overlap.index_fill_(0, best_prior_idx, 2) # ensure best prior
# TODO refactor: index best_prior_idx with long tensor
# ensure every gt matches with its prior of max overlap
for j in range(best_prior_idx.size(0)):
best_truth_idx[best_prior_idx[j]] = j
matches = truths[best_truth_idx] # Shape: [num_priors,4]
conf = labels[best_truth_idx] + 1 # Shape: [num_priors]
conf[best_truth_overlap < threshold] = 0 # label as background
# If KL Loss, encoding shouldnt use center offset form
loc = encode(matches, priors, variances, modeltype)
loc_t[idx] = loc # [num_priors,4] encoded offsets to learn
conf_t[idx] = conf # [num_priors] top class label for each prior
def encode(matched, priors, variances, modeltype = 'SSD300'):
"""
Use of 'variance' not discussed in paper, see blogpost: https://leimao.github.io/blog/Bounding-Box-Encoding-Decoding/
corroborated by original author: https://github.com/weiliu89/caffe/issues/155#issuecomment-243541464
and more: https://github.com/rykov8/ssd_keras/issues/53
Encode the variances from the priorbox layers into the ground truth boxes
we have matched (based on jaccard overlap) with the prior boxes.
Args:
matched: (tensor) Coords of ground truth for each prior in point-form
Shape: [num_priors, 4].
priors: (tensor) Prior boxes in center-offset form
Shape: [num_priors,4].
variances: (list[float]) Variances of priorboxes
Return:
encoded boxes (tensor), Shape: [num_priors, 4]
"""
# transform everything back to center-form
# dist b/t match center and prior's center
g_cxcy = (matched[:, :2] + matched[:, 2:])/2 - priors[:, :2]
# encode variance
# todo: check of dit goed gaat, priors is in point-form
g_cxcy /= (variances[0] * priors[:, 2:])
# match wh / prior wh
g_wh = (matched[:, 2:] - matched[:, :2]) / priors[:, 2:]
g_wh = torch.log(g_wh) / variances[1]
# return target for smooth_l1_loss
# todo
if modeltype == 'SSD300KL':
# transform to x1y1x2y2 form
return point_form(torch.cat([g_cxcy, g_wh], 1)) # [num_priors,4]
else:
return torch.cat([g_cxcy, g_wh], 1) # [num_priors,4]
# Adapted from https://github.com/Hakuyume/chainer-ssd
def decode(loc, priors, variances, modeltype = 'SSD300'):
"""
Use of 'variance' not discussed in paper, see blogpost: https://leimao.github.io/blog/Bounding-Box-Encoding-Decoding/
Decode locations from predictions using priors to undo
the encoding we did for offset regression at train time.
Args:
loc (tensor): location predictions for loc layers,
Shape: [num_priors,4]
priors (tensor): Prior boxes in center-offset form.
Shape: [num_priors,4].
variances: (list[float]) Variances of priorboxes
Return:
decoded bounding box predictions; decoded in x1y1x2y2-form with x1y1 at upper left and x2y2 lower right, all in a range [0,1]
"""
# todo check of dit goed gaat met heen en weer schrijven van de
if modeltype == 'SSD300KL':
# transform predictions from x1y1x2y2 to cx, cy, w, h form. The variances are precalculated cx,cy,w,h variances
loc = center_size(loc)
boxes = torch.cat((
priors[:, :2] + loc[:, :2] * variances[0] * priors[:, 2:],
priors[:, 2:] * torch.exp(loc[:, 2:] * variances[1])), 1)
boxes[:, :2] -= boxes[:, 2:] / 2
boxes[:, 2:] += boxes[:, :2]
return boxes # [num_priors,4]
# def batch_decode(loc, priors, variances):
# """
# Same as decode, but adjusted to work for batches
#
# Decode locations from predictions using priors to undo
# the encoding we did for offset regression at train time.
# Args:
# loc (tensor): location predictions for loc layers,
# Shape: [ensemble_size,batch,num_priors,4]
# priors (tensor): Prior boxes in center-offset form.
# Shape: [ensemble_size,batch, num_priors,4].
# variances: (list[float]) Variances of priorboxes
# Return:
# decoded bounding box predictions
# """
# boxes = torch.cat((
# priors[:,:, :2] + loc[:,:, :2] * variances[0] * priors[:,:, 2:],
# priors[:,:, 2:] * torch.exp(loc[:,:, 2:] * variances[1])), 1)
# boxes[:,:, :2] -= boxes[:,:, 2:] / 2
# boxes[:,:, 2:] += boxes[:,:, :2]
# return boxes
def log_sum_exp(x):
"""Utility function for computing log_sum_exp while determining
This will be used to determine unaveraged confidence loss across
all examples in a batch.
Args:
x (Variable(tensor)): conf_preds from conf layers
"""
x_max = x.data.max()
return torch.log(torch.sum(torch.exp(x-x_max), 1, keepdim=True)) + x_max
# Original author: Francisco Massa:
# https://github.com/fmassa/object-detection.torch
# Ported to PyTorch by Max deGroot (02/01/2017)
def nms(boxes, scores, overlap=0.5, top_k=200): # todo: overlap default in paper 0.45
"""Apply non-maximum suppression at test time to avoid detecting too many
overlapping bounding boxes for a given object.
Args:
boxes: (tensor) The location preds for the img, Shape: [num_priors,4].
scores: (tensor) The class predscores for the img, Shape:[num_priors].
overlap: (float) The overlap thresh for suppressing unnecessary boxes.
top_k: (int) The Maximum number of box preds to consider. (default in paper = 200)
Return:
The indices of the kept boxes with respect to num_priors.
todo: pure numpy implementation might be faster according to the issues on github
possible implementation https://www.pyimagesearch.com/2015/02/16/faster-non-maximum-suppression-python/
"""
keep = scores.new(scores.size(0)).zero_().long()
if boxes.numel() == 0: #number of elements
return keep # for a class, there are no bounding boxes
x1 = boxes[:, 0]
y1 = boxes[:, 1]
x2 = boxes[:, 2]
y2 = boxes[:, 3]
area = torch.mul(x2 - x1, y2 - y1)
v, idx = scores.sort(0) # sort in ascending order
# I = I[v >= 0.01]
idx = idx[-top_k:] # indices of the top-k largest vals
xx1 = boxes.new()
yy1 = boxes.new()
xx2 = boxes.new()
yy2 = boxes.new()
w = boxes.new()
h = boxes.new()
# keep = torch.Tensor()
count = 0
while idx.numel() > 0:
i = idx[-1] # index of current largest val
# keep.append(i)
keep[count] = i
count += 1
if idx.size(0) == 1:
break
idx = idx[:-1] # remove kept element from view
# load bboxes of next highest vals
torch.index_select(x1, 0, idx, out=xx1)
torch.index_select(y1, 0, idx, out=yy1)
torch.index_select(x2, 0, idx, out=xx2)
torch.index_select(y2, 0, idx, out=yy2)
# store element-wise max with next highest score
xx1 = torch.clamp(xx1, min=x1[i])
yy1 = torch.clamp(yy1, min=y1[i])
xx2 = torch.clamp(xx2, max=x2[i])
yy2 = torch.clamp(yy2, max=y2[i])
w.resize_as_(xx2)
h.resize_as_(yy2)
w = xx2 - xx1
h = yy2 - yy1
# check sizes of xx1 and xx2.. after each iteration
w = torch.clamp(w, min=0.0)
h = torch.clamp(h, min=0.0)
inter = w*h
# IoU = i / (area(a) + area(b) - i)
rem_areas = torch.index_select(area, 0, idx) # load remaining areas)
union = (rem_areas - inter) + area[i]
IoU = inter/union # store result in iou
# keep only elements with an IoU <= overlap
# print(IoU.le(overlap)) #le = less or equal, creates a binary mask
idx = idx[IoU.le(overlap)]
return keep, count
#
# def nms_uncertainty_sampling(boxes, scores, overlap=0.5, top_k=200, object_treshold = None):
# """
# This function takes (un)certainty scores and bounding boxes, and returns the top b
#
#
# """
#
# keep = scores.new(scores.size(0)).zero_().long()
# if boxes.numel() == 0: #number of elements
# return keep # for a class, there are no bounding boxes
# x1 = boxes[:, 0]
# y1 = boxes[:, 1]
# x2 = boxes[:, 2]
# y2 = boxes[:, 3]
# area = torch.mul(x2 - x1, y2 - y1)
# v, idx = scores.sort(0) # sort in ascending order
# # I = I[v >= 0.01]
# idx = idx[-top_k:] # indices of the top-k largest vals
# xx1 = boxes.new()
# yy1 = boxes.new()
# xx2 = boxes.new()
# yy2 = boxes.new()
# w = boxes.new()
# h = boxes.new()
#
# # keep = torch.Tensor()
# count = 0
# while idx.numel() > 0:
# i = idx[-1] # index of current largest val
# # keep.append(i)
# keep[count] = i
# count += 1
# if idx.size(0) == 1:
# break
# idx = idx[:-1] # remove kept element from view
# # load bboxes of next highest vals
# torch.index_select(x1, 0, idx, out=xx1)
# torch.index_select(y1, 0, idx, out=yy1)
# torch.index_select(x2, 0, idx, out=xx2)
# torch.index_select(y2, 0, idx, out=yy2)
# # store element-wise max with next highest score
# xx1 = torch.clamp(xx1, min=x1[i])
# yy1 = torch.clamp(yy1, min=y1[i])
# xx2 = torch.clamp(xx2, max=x2[i])
# yy2 = torch.clamp(yy2, max=y2[i])
# w.resize_as_(xx2)
# h.resize_as_(yy2)
# w = xx2 - xx1
# h = yy2 - yy1
# # check sizes of xx1 and xx2.. after each iteration
# w = torch.clamp(w, min=0.0)
# h = torch.clamp(h, min=0.0)
# inter = w*h
# # IoU = i / (area(a) + area(b) - i)
# rem_areas = torch.index_select(area, 0, idx) # load remaining areas)
# union = (rem_areas - inter) + area[i]
# IoU = inter/union # store result in iou
#
# # keep only elements with an IoU <= overlap
# print(IoU.le(overlap)) #le = less or equal, creates a binary mask
# idx = idx[IoU.le(overlap)]
# return keep, count
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.