blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
774d14b2179139ab271f99c788c217d85202583e | f61db5940e29773aba8fc342a21de00e91a5ab2e | /base/day9/02python操作文件.py | a7f6d0023d40ea4faa5dadda9bbcdb01e1cb4462 | [] | no_license | liyaozr/project | c17a9dcbcda38fe9a15ec4c41a01242a13695991 | 0b0fc10e267ceb19f6792b490fede177035459fe | refs/heads/master | 2020-11-29T18:38:03.297369 | 2020-03-10T01:11:00 | 2020-03-10T01:11:00 | 230,190,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,339 | py | """
============================
Author:柠檬班-木森
Time:2020/1/13 20:28
E-mail:[email protected]
Company:湖南零檬信息技术有限公司
============================
"""
"""
python操作文件
open的常用参数:
第一个:要打开的文件名字或者文件路径
第二个参数:文件打开的模式
r:只读模式
rb:只读模式,以二进制的编码格式去打开文件
第三个参数:
encoding:用来指定打开文件的编码格式(使用rb的时候,不需要加该参数)
"""
# 读取同级目录下的文件,可以直接写文件名
# 打开文件
# f = open("01内置函数的补充.py", "r", encoding="utf8")
# # 读取不在同一个目录下的文件,要写上文件的完整路径
# f = open(r"C:\project\py26_project\py26_01day\02python中的数值.py", "r", encoding="utf8")
#
# # 读取内容
# content = f.read()
#
# # 打印读取出来的内容
# print("文件中读取出来的内容为:", content)
#
# # 关闭文件
# f.close()
# -------------------如何去读取图片、视频等文件----------------
# 读取不在同一个目录下的文件,要写上文件的完整路径
f = open("bj2.png", "rb")
# 读取内容
content = f.read()
# 打印读取出来的内容
print("文件中读取出来的内容为:", content)
# 关闭文件
f.close() | [
"[email protected]"
] | |
de1275ebc2f6aa4b9161b36c637abba3cfb8339b | 055b7c4c2118e6e862cfae344d722e8e90534cb4 | /config.py | 5aa53ff4827b52082755f58b81f4fb855ebf1ae7 | [] | no_license | Omulosi/iReporter | 745b3194f5a06371ca01c4d790cac763a09cf89f | db80d76b84d786330fb389d94c2623cbbad13be9 | refs/heads/develop | 2022-12-09T13:42:32.856875 | 2019-04-23T04:14:27 | 2019-04-23T04:14:27 | 158,638,861 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,162 | py |
'''
instance.config
------------------
This module provides default configuration values.
'''
import os
from datetime import timedelta
from dotenv import load_dotenv
basedir = os.path.abspath(os.path.dirname(__file__))
load_dotenv(os.path.join(basedir, '.env'))
class Config:
'''
Base configuration values
'''
SECRET_KEY = os.environ.get('SECRET_KEY')
JWT_SECRET_KEY = os.environ.get('JWT_SECRET_KEY')
JWT_ACCESS_TOKEN_EXPIRES = timedelta(minutes=60)
JWT_BLACKLIST_ENABLED = True
JWT_BLACKLIST_TOKEN_CHECKS = ['access', 'refresh']
PROPAGATE_EXCEPTIONS = True
#: Database url
DATABASE = os.environ.get('DATABASE_URL')
#: Mail server configuration values
MAIL_SERVER=os.environ.get('MAIL_SERVER')
MAIL_PORT=os.environ.get('MAIL_PORT')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
MAIL_USE_TLS=os.environ.get('MAIL_USE_TLS')
MAIL_USERNAME=os.environ.get('MAIL_USERNAME')
class TestConfig(Config):
'''
configuration values for testing
'''
TESTING = True
DEBUG = True
PROPAGATE_EXCEPTIONS = True
DATABASE = os.environ.get('TEST_DB_URL')
| [
"[email protected]"
] | |
2355da4fe0a15ebbd2427a4c7f7b891e2e2ad149 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/contrib/cv/pose_estimation/Hourglass_for_PyTorch/mmpose-master/demo/mmdetection_cfg/faster_rcnn_r50_fpn_1x_coco.py | 4fb90266f00299d6ac45e49f928e81c2c3eb7535 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 6,403 | py | # -*- coding: utf-8 -*-
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
dist_params = dict(backend='nccl')
log_level = 'INFO'
load_from = None
resume_from = None
workflow = [('train', 1)]
# optimizer
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[8, 11])
total_epochs = 12
model = dict(
type='FasterRCNN',
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=True,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_generator=dict(
type='AnchorGenerator',
scales=[8],
ratios=[0.5, 1.0, 2.0],
strides=[4, 8, 16, 32, 64]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0]),
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='L1Loss', loss_weight=1.0)),
roi_head=dict(
type='StandardRoIHead',
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=80,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=False,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='L1Loss', loss_weight=1.0))))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=True,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=-1,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False))
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100)
# soft-nms is also supported for rcnn testing
# e.g., nms=dict(type='soft_nms', iou_threshold=0.5, min_score=0.05)
)
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
samples_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric='bbox')
| [
"[email protected]"
] | |
dabffd515b7dd2a0abf3bf15380ace94082f2145 | ed2a234be16e5ac95496cd959b531542a087faf6 | /Functions Advanced - Exercise/10. Keyword Arguments Length.py | 1b03e732297da99ed3703c06b09f393e7c4587db | [] | no_license | Spas52/Python_Advanced | efc73eda5d10707f1f1a7407cc697448a985f014 | 7082c8947abba9b348f8372f68d0fc10ffa57fc1 | refs/heads/main | 2023-06-04T13:05:46.394482 | 2021-06-24T00:01:37 | 2021-06-24T00:01:37 | 379,756,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52 | py | def kwargs_length(**kwargs):
return len(kwargs) | [
"[email protected]"
] | |
bc3798906716aa41be8beb4ecc2a2c58459a8f86 | dc767b48d46e2f6b9851ce61914e880fc95fe520 | /myshop/shop/migrations/0001_initial.py | cca6008d38c1c259f458a69c4f61f46f334c2252 | [] | no_license | EdmilsonSantana/django-by-example | c06081a1a3915aaf3996d017fea91c8273cbe2e0 | 7c895b55b8f6fcc05a2d5cd2181bf207dc9256fc | refs/heads/master | 2021-01-12T02:58:49.261515 | 2017-02-28T20:05:29 | 2017-02-28T20:05:29 | 78,144,090 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,044 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-16 23:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=200)),
('slug', models.SlugField(max_length=200, unique=True)),
],
options={
'verbose_name': 'category',
'verbose_name_plural': 'categories',
'ordering': ('name',),
},
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=200)),
('slug', models.SlugField(max_length=200)),
('image', models.ImageField(blank=True, upload_to='products/%Y/%m/%d')),
('description', models.TextField(blank=True)),
('price', models.DecimalField(decimal_places=2, max_digits=10)),
('stock', models.PositiveIntegerField()),
('available', models.BooleanField(default=True)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='products', to='shop.Category')),
],
options={
'ordering': ('-created',),
},
),
migrations.AlterIndexTogether(
name='product',
index_together=set([('id', 'slug')]),
),
]
| [
"[email protected]"
] | |
78a758b50b7c3ecb4bb6e5761d61565d2eb317a5 | 2c5b25d0b5d6ba66d013251f93ebf4c642fd787b | /wrong_answer_codes/Contiguous_Array/Contiguous Array_324757576.py | 1c620fdc45f25037006caf70d00f3c54a4797b19 | [] | no_license | abhinay-b/Leetcode-Submissions | da8099ac54b5d36ae23db42580064d0f9d9bc63b | d034705813f3f908f555f1d1677b827af751bf42 | refs/heads/master | 2022-10-15T22:09:36.328967 | 2020-06-14T15:39:17 | 2020-06-14T15:39:17 | 259,984,100 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 787 | py | class Solution:
def findMaxLength(self, nums: List[int]) -> int:
count = [0]*2
start = end = 0
maxVal = 0
for idx,num in enumerate(nums):
count[num] += 1
diff = abs(count[0] - count[1])
# print(diff,start,end)
if diff > 1:
count[nums[start]] -= 1
start += 1
elif diff == 1 and start > 0 and (count[nums[start-1]] + 1 == count[1-nums[start
-1]]):
start -= 1
count[nums[start]] += 1
end = idx
maxVal = max(maxVal, end - start+1)
elif not diff:
end = idx
maxVal = max(maxVal, end - start+1)
return maxVal
| [
"[email protected]"
] | |
edcb724454b921fe8dc091a316470e10f89459df | 6cea6b8cfeef78b433e296c38ef11f4637609f20 | /src/collectors/ipmisensor/test/testipmisensor.py | 66a79164c5d9b0f45141583e0676c31a4b5b8902 | [
"MIT"
] | permissive | philipcristiano/Diamond | b659d577ec054c06ab99308d6c2ba3163de84e1a | 577270ea820af597458aa5d3325367608cd37845 | refs/heads/master | 2021-01-18T10:04:59.057835 | 2012-08-02T04:08:02 | 2012-08-02T04:08:02 | 3,140,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,392 | py | #!/usr/bin/python
################################################################################
from test import *
from diamond.collector import Collector
from ipmisensor import IPMISensorCollector
################################################################################
class TestIPMISensorCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('IPMISensorCollector', {
'interval': 10,
'bin' : 'true',
'use_sudo' : False
})
self.collector = IPMISensorCollector(config, None)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
with patch('subprocess.Popen.communicate', Mock(return_value =
( self.getFixture('ipmitool.out').getvalue() , '')
)):
self.collector.collect()
self.assertPublishedMany(publish_mock, {
'System.Temp' : 32.000000,
'CPU1.Vcore' : 1.080000,
'CPU2.Vcore' : 1.000000,
'CPU1.VTT' : 1.120000,
'CPU2.VTT' : 1.176000,
'CPU1.DIMM' : 1.512000,
'CPU2.DIMM' : 1.512000,
'+1_5V' : 1.512000,
'+1_8V' : 1.824000,
'+5V' : 4.992000,
'+12V' : 12.031000,
'+1_1V' : 1.112000,
'+3_3V' : 3.288000,
'+3_3VSB' : 3.240000,
'VBAT' : 3.240000,
'Fan1' : 4185.000000,
'Fan2' : 4185.000000,
'Fan3' : 4185.000000,
'Fan7' : 3915.000000,
'Fan8' : 3915.000000,
'Intrusion' : 0.000000,
'PS.Status' : 0.000000,
'P1-DIMM1A.Temp' : 41.000000,
'P1-DIMM1B.Temp' : 39.000000,
'P1-DIMM2A.Temp' : 38.000000,
'P1-DIMM2B.Temp' : 40.000000,
'P1-DIMM3A.Temp' : 37.000000,
'P1-DIMM3B.Temp' : 38.000000,
'P2-DIMM1A.Temp' : 39.000000,
'P2-DIMM1B.Temp' : 38.000000,
'P2-DIMM2A.Temp' : 39.000000,
'P2-DIMM2B.Temp' : 39.000000,
'P2-DIMM3A.Temp' : 39.000000,
'P2-DIMM3B.Temp' : 40.000000,
})
################################################################################
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
5dfe38fc03c0375b3b51d023a6dd2aa1cca6b25d | ac42f1d918bdbd229968cea0954ed75250acd55c | /admin/dashboard/openstack_dashboard/dashboards/physical/hosts/compute/tests.py | 47aa906803025be9db313abb19823b19ec492fcc | [
"Apache-2.0"
] | permissive | naanal/product | 016e18fd2f35608a0d8b8e5d2f75b653bac7111a | bbaa4cd60d4f2cdda6ce4ba3d36312c1757deac7 | refs/heads/master | 2020-04-03T22:40:48.712243 | 2016-11-15T11:22:00 | 2016-11-15T11:22:00 | 57,004,514 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,056 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox3.mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
class EvacuateHostViewTest(test.BaseAdminViewTests):
@test.create_stubs({api.nova: ('hypervisor_list',
'hypervisor_stats',
'service_list')})
def test_index(self):
hypervisor = self.hypervisors.list().pop().hypervisor_hostname
services = [service for service in self.services.list()
if service.binary == 'nova-compute']
api.nova.service_list(IsA(http.HttpRequest),
binary='nova-compute').AndReturn(services)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:evacuate_host',
args=[hypervisor])
res = self.client.get(url)
self.assertTemplateUsed(res,
'physical/hosts/compute/evacuate_host.html')
@test.create_stubs({api.nova: ('hypervisor_list',
'hypervisor_stats',
'service_list',
'evacuate_host')})
def test_successful_post(self):
hypervisor = self.hypervisors.list().pop().hypervisor_hostname
services = [service for service in self.services.list()
if service.binary == 'nova-compute']
api.nova.service_list(IsA(http.HttpRequest),
binary='nova-compute').AndReturn(services)
api.nova.evacuate_host(IsA(http.HttpRequest),
services[1].host,
services[0].host,
False).AndReturn(True)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:evacuate_host',
args=[hypervisor])
form_data = {'current_host': services[1].host,
'target_host': services[0].host,
'on_shared_storage': False}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertNoFormErrors(res)
self.assertMessageCount(success=1)
self.assertRedirectsNoFollow(res, dest_url)
@test.create_stubs({api.nova: ('hypervisor_list',
'hypervisor_stats',
'service_list',
'evacuate_host')})
def test_failing_nova_call_post(self):
hypervisor = self.hypervisors.list().pop().hypervisor_hostname
services = [service for service in self.services.list()
if service.binary == 'nova-compute']
api.nova.service_list(IsA(http.HttpRequest),
binary='nova-compute').AndReturn(services)
api.nova.evacuate_host(IsA(http.HttpRequest),
services[1].host,
services[0].host,
False).AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:evacuate_host',
args=[hypervisor])
form_data = {'current_host': services[1].host,
'target_host': services[0].host,
'on_shared_storage': False}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertMessageCount(error=1)
self.assertRedirectsNoFollow(res, dest_url)
class MigrateHostViewTest(test.BaseAdminViewTests):
def test_index(self):
disabled_services = [service for service in self.services.list()
if service.binary == 'nova-compute'
and service.status == 'disabled']
disabled_service = disabled_services[0]
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:migrate_host',
args=[disabled_service.host])
res = self.client.get(url)
self.assertNoMessages()
self.assertTemplateUsed(res,
'physical/hosts/compute/migrate_host.html')
@test.create_stubs({api.nova: ('migrate_host',)})
def test_maintenance_host_cold_migration_succeed(self):
disabled_services = [service for service in self.services.list()
if service.binary == 'nova-compute'
and service.status == 'disabled']
disabled_service = disabled_services[0]
api.nova.migrate_host(
IsA(http.HttpRequest),
disabled_service.host,
live_migrate=False,
disk_over_commit=False,
block_migration=False
).AndReturn(True)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:migrate_host',
args=[disabled_service.host])
form_data = {'current_host': disabled_service.host,
'migrate_type': 'cold_migrate',
'disk_over_commit': False,
'block_migration': False}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertNoFormErrors(res)
self.assertMessageCount(success=1)
self.assertRedirectsNoFollow(res, dest_url)
@test.create_stubs({api.nova: ('migrate_host',)})
def test_maintenance_host_live_migration_succeed(self):
disabled_services = [service for service in self.services.list()
if service.binary == 'nova-compute'
and service.status == 'disabled']
disabled_service = disabled_services[0]
api.nova.migrate_host(
IsA(http.HttpRequest),
disabled_service.host,
live_migrate=True,
disk_over_commit=False,
block_migration=True
).AndReturn(True)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:migrate_host',
args=[disabled_service.host])
form_data = {'current_host': disabled_service.host,
'migrate_type': 'live_migrate',
'disk_over_commit': False,
'block_migration': True}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertNoFormErrors(res)
self.assertMessageCount(success=1)
self.assertRedirectsNoFollow(res, dest_url)
@test.create_stubs({api.nova: ('migrate_host',)})
def test_maintenance_host_migration_fails(self):
disabled_services = [service for service in self.services.list()
if service.binary == 'nova-compute'
and service.status == 'disabled']
disabled_service = disabled_services[0]
api.nova.migrate_host(
IsA(http.HttpRequest),
disabled_service.host,
live_migrate=True,
disk_over_commit=False,
block_migration=True
).AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:migrate_host',
args=[disabled_service.host])
form_data = {'current_host': disabled_service.host,
'migrate_type': 'live_migrate',
'disk_over_commit': False,
'block_migration': True}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertMessageCount(error=1)
self.assertRedirectsNoFollow(res, dest_url)
class DisableServiceViewTest(test.BaseAdminViewTests):
@test.create_stubs({api.nova: ('hypervisor_list',
'hypervisor_stats')})
def test_index(self):
hypervisor = self.hypervisors.list().pop().hypervisor_hostname
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:disable_service',
args=[hypervisor])
res = self.client.get(url)
template = 'physical/hosts/compute/disable_service.html'
self.assertTemplateUsed(res, template)
@test.create_stubs({api.nova: ('hypervisor_list',
'hypervisor_stats',
'service_disable')})
def test_successful_post(self):
hypervisor = self.hypervisors.list().pop().hypervisor_hostname
services = [service for service in self.services.list()
if service.binary == 'nova-compute']
api.nova.service_disable(IsA(http.HttpRequest),
services[0].host,
'nova-compute',
reason='test disable').AndReturn(True)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:disable_service',
args=[hypervisor])
form_data = {'host': services[0].host,
'reason': 'test disable'}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertNoFormErrors(res)
self.assertMessageCount(success=1)
self.assertRedirectsNoFollow(res, dest_url)
@test.create_stubs({api.nova: ('hypervisor_list',
'hypervisor_stats',
'service_disable')})
def test_failing_nova_call_post(self):
hypervisor = self.hypervisors.list().pop().hypervisor_hostname
services = [service for service in self.services.list()
if service.binary == 'nova-compute']
api.nova.service_disable(
IsA(http.HttpRequest), services[0].host, 'nova-compute',
reason='test disable').AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:disable_service',
args=[hypervisor])
form_data = {'host': services[0].host,
'reason': 'test disable'}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertMessageCount(error=1)
self.assertRedirectsNoFollow(res, dest_url)
| [
"[email protected]"
] | |
a4bcbc3ea13c6d7161096668057371a82bc97ec8 | e7ea544475ebfa70ebdf5d5949bde9e23edc60ba | /gbp/scripts/common/buildpackage.py | e1edfb29587dfad1895660c095e2fe13141cba7b | [] | no_license | dcoshea/git-buildpackage | 80cb7d890222488663a09e3d790fc5e985f791b9 | f4aa76bfcda1ded4649cd071b123ef8d7bf2344d | refs/heads/master | 2020-05-26T21:05:37.574986 | 2017-02-19T13:17:11 | 2017-02-19T13:17:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,061 | py | # vim: set fileencoding=utf-8 :
#
# (C) 2006-2011, 2016 Guido Guenther <[email protected]>
# (C) 2012 Intel Corporation <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, please see
# <http://www.gnu.org/licenses/>
#
"""Common functionality for Debian and RPM buildpackage scripts"""
import os
import os.path
import pipes
import tempfile
import shutil
from gbp.command_wrappers import (CatenateTarArchive, CatenateZipArchive)
from gbp.git import GitRepositoryError
from gbp.errors import GbpError
import gbp.log
# when we want to reference the index in a treeish context we call it:
index_name = "INDEX"
# when we want to reference the working copy in treeish context we call it:
wc_name = "WC"
def sanitize_prefix(prefix):
"""
Sanitize the prefix used for generating source archives
>>> sanitize_prefix('')
'/'
>>> sanitize_prefix('foo/')
'foo/'
>>> sanitize_prefix('/foo/bar')
'foo/bar/'
"""
if prefix:
return prefix.strip('/') + '/'
return '/'
def git_archive_submodules(repo, treeish, output, prefix, comp_type, comp_level,
comp_opts, format='tar'):
"""
Create a source tree archive with submodules.
Concatenates the archives generated by git-archive into one and compresses
the end result.
Exception handling is left to the caller.
"""
prefix = sanitize_prefix(prefix)
tempdir = tempfile.mkdtemp()
main_archive = os.path.join(tempdir, "main.%s" % format)
submodule_archive = os.path.join(tempdir, "submodule.%s" % format)
try:
# generate main (tmp) archive
repo.archive(format=format, prefix=prefix,
output=main_archive, treeish=treeish)
# generate each submodule's archive and append it to the main archive
for (subdir, commit) in repo.get_submodules(treeish):
tarpath = [subdir, subdir[2:]][subdir.startswith("./")]
gbp.log.debug("Processing submodule %s (%s)" % (subdir, commit[0:8]))
repo.archive(format=format, prefix='%s%s/' % (prefix, tarpath),
output=submodule_archive, treeish=commit, cwd=subdir)
if format == 'tar':
CatenateTarArchive(main_archive)(submodule_archive)
elif format == 'zip':
CatenateZipArchive(main_archive)(submodule_archive)
# compress the output
if comp_type:
# Redirect through stdout directly to the correct output file in
# order to avoid determining the output filename of the compressor
try:
comp_level_opt = '-%d' % comp_level if comp_level is not None else ''
except TypeError:
raise GbpError("Invalid compression level '%s'" % comp_level)
ret = os.system("%s --stdout %s %s %s > %s" %
(comp_type, comp_level_opt, comp_opts, main_archive,
output))
if ret:
raise GbpError("Error creating %s: %d" % (output, ret))
else:
shutil.move(main_archive, output)
finally:
shutil.rmtree(tempdir)
def git_archive_single(treeish, output, prefix, comp_type, comp_level, comp_opts, format='tar'):
"""
Create an archive without submodules
Exception handling is left to the caller.
"""
prefix = sanitize_prefix(prefix)
pipe = pipes.Template()
pipe.prepend("git archive --format=%s --prefix=%s %s" % (format, prefix, treeish), '.-')
try:
comp_level_opt = '-%d' % comp_level if comp_level is not None else ''
except TypeError:
raise GbpError("Invalid compression level '%s'" % comp_level)
if comp_type:
pipe.append('%s -c %s %s' % (comp_type, comp_level_opt, comp_opts), '--')
ret = pipe.copy('', output)
if ret:
raise GbpError("Error creating %s: %d" % (output, ret))
# Functions to handle export-dir
def dump_tree(repo, export_dir, treeish, with_submodules, recursive=True):
"dump a tree to output_dir"
output_dir = os.path.dirname(export_dir)
prefix = sanitize_prefix(os.path.basename(export_dir))
if recursive:
paths = []
else:
paths = ["'%s'" % nam for _mod, typ, _sha, nam in
repo.list_tree(treeish) if typ == 'blob']
pipe = pipes.Template()
pipe.prepend('git archive --format=tar --prefix=%s %s -- %s' %
(prefix, treeish, ' '.join(paths)), '.-')
pipe.append('tar -C %s -xf -' % output_dir, '-.')
top = os.path.abspath(os.path.curdir)
try:
ret = pipe.copy('', '')
if ret:
raise GbpError("Error in dump_tree archive pipe")
if recursive and with_submodules:
if repo.has_submodules():
repo.update_submodules()
for (subdir, commit) in repo.get_submodules(treeish):
gbp.log.info("Processing submodule %s (%s)" % (subdir, commit[0:8]))
tarpath = [subdir, subdir[2:]][subdir.startswith("./")]
os.chdir(subdir)
pipe = pipes.Template()
pipe.prepend('git archive --format=tar --prefix=%s%s/ %s' %
(prefix, tarpath, commit), '.-')
pipe.append('tar -C %s -xf -' % output_dir, '-.')
ret = pipe.copy('', '')
os.chdir(top)
if ret:
raise GbpError("Error in dump_tree archive pipe in submodule %s" % subdir)
except OSError as err:
gbp.log.err("Error dumping tree to %s: %s" % (output_dir, err[0]))
return False
except (GitRepositoryError, GbpError) as err:
gbp.log.err(err)
return False
except Exception as e:
gbp.log.err("Error dumping tree to %s: %s" % (output_dir, e))
return False
finally:
os.chdir(top)
return True
def wc_index(repo):
"""Get path of the temporary index file used for exporting working copy"""
return os.path.join(repo.git_dir, "gbp_index")
def write_wc(repo, force=True):
"""write out the current working copy as a treeish object"""
index_file = wc_index(repo)
repo.add_files(repo.path, force=force, index_file=index_file)
tree = repo.write_tree(index_file=index_file)
return tree
def drop_index(repo):
"""drop our custom index"""
index_file = wc_index(repo)
if os.path.exists(index_file):
os.unlink(index_file)
| [
"[email protected]"
] | |
a46afda8041485109144a60243600a990bd2b7d1 | c0d5b7f8e48a26c6ddc63c76c43ab5b397c00028 | /tests/columns/test_array.py | 731e15ff8b962d66534e989094fe5f8cbef23a93 | [
"MIT"
] | permissive | aminalaee/piccolo | f6c5e5e1c128568f7ccb9ad1dfb4746acedae262 | af8d2d45294dcd84f4f9b6028752aa45b699ec15 | refs/heads/master | 2023-07-14T09:44:04.160116 | 2021-07-11T22:56:27 | 2021-07-11T22:56:27 | 386,398,401 | 0 | 0 | MIT | 2021-07-15T19:32:50 | 2021-07-15T19:08:17 | null | UTF-8 | Python | false | false | 2,199 | py | from unittest import TestCase
from piccolo.table import Table
from piccolo.columns.column_types import Array, Integer
from tests.base import postgres_only
class MyTable(Table):
value = Array(base_column=Integer())
class TestArrayPostgres(TestCase):
"""
Make sure an Array column can be created.
"""
def setUp(self):
MyTable.create_table().run_sync()
def tearDown(self):
MyTable.alter().drop_table().run_sync()
def test_storage(self):
"""
Make sure data can be stored and retrieved.
"""
MyTable(value=[1, 2, 3]).save().run_sync()
row = MyTable.objects().first().run_sync()
self.assertEqual(row.value, [1, 2, 3])
@postgres_only
def test_index(self):
"""
Indexes should allow individual array elements to be queried.
"""
MyTable(value=[1, 2, 3]).save().run_sync()
self.assertEqual(
MyTable.select(MyTable.value[0]).first().run_sync(), {"value": 1}
)
@postgres_only
def test_all(self):
"""
Make sure rows can be retrieved where all items in an array match a
given value.
"""
MyTable(value=[1, 1, 1]).save().run_sync()
self.assertEqual(
MyTable.select(MyTable.value)
.where(MyTable.value.all(1))
.first()
.run_sync(),
{"value": [1, 1, 1]},
)
self.assertEqual(
MyTable.select(MyTable.value)
.where(MyTable.value.all(0))
.first()
.run_sync(),
None,
)
def test_any(self):
"""
Make sure rows can be retrieved where any items in an array match a
given value.
"""
MyTable(value=[1, 2, 3]).save().run_sync()
self.assertEqual(
MyTable.select(MyTable.value)
.where(MyTable.value.any(1))
.first()
.run_sync(),
{"value": [1, 2, 3]},
)
self.assertEqual(
MyTable.select(MyTable.value)
.where(MyTable.value.any(0))
.first()
.run_sync(),
None,
)
| [
"[email protected]"
] | |
3f259779a113f38727e5e331c041593a3830edfe | caaf56727714f8c03be38710bc7d0434c3ec5b11 | /tests/components/telegram/test_notify.py | 7488db49d9ea58db8f78e93cab0842fa686ee119 | [
"Apache-2.0"
] | permissive | tchellomello/home-assistant | c8db86880619d7467901fd145f27e0f2f1a79acc | ed4ab403deaed9e8c95e0db728477fcb012bf4fa | refs/heads/dev | 2023-01-27T23:48:17.550374 | 2020-09-18T01:18:55 | 2020-09-18T01:18:55 | 62,690,461 | 8 | 1 | Apache-2.0 | 2023-01-13T06:02:03 | 2016-07-06T04:13:49 | Python | UTF-8 | Python | false | false | 1,598 | py | """The tests for the telegram.notify platform."""
from os import path
from homeassistant import config as hass_config
import homeassistant.components.notify as notify
from homeassistant.components.telegram import DOMAIN
from homeassistant.const import SERVICE_RELOAD
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
async def test_reload_notify(hass):
"""Verify we can reload the notify service."""
with patch("homeassistant.components.telegram_bot.async_setup", return_value=True):
assert await async_setup_component(
hass,
notify.DOMAIN,
{
notify.DOMAIN: [
{
"name": DOMAIN,
"platform": DOMAIN,
"chat_id": 1,
},
]
},
)
await hass.async_block_till_done()
assert hass.services.has_service(notify.DOMAIN, DOMAIN)
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"telegram/configuration.yaml",
)
with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert not hass.services.has_service(notify.DOMAIN, DOMAIN)
assert hass.services.has_service(notify.DOMAIN, "telegram_reloaded")
def _get_fixtures_base_path():
return path.dirname(path.dirname(path.dirname(__file__)))
| [
"[email protected]"
] | |
3169f03ad1a82380f124de333e6a15857ecf1ae8 | 4fc21c3f8dca563ce8fe0975b5d60f68d882768d | /GodwillOnyewuchi/Phase 1/Python Basic 2/day 12 task/task10.py | a4924e40fbc8159a266fbfd0579729acab934db6 | [
"MIT"
] | permissive | Uche-Clare/python-challenge-solutions | 17e53dbedbff2f33e242cf8011696b3059cd96e9 | 49ede6204ee0a82d5507a19fbc7590a1ae10f058 | refs/heads/master | 2022-11-13T15:06:52.846937 | 2020-07-10T20:59:37 | 2020-07-10T20:59:37 | 266,404,840 | 1 | 0 | MIT | 2020-05-23T19:24:56 | 2020-05-23T19:24:55 | null | UTF-8 | Python | false | false | 290 | py | # Python program to get numbers divisible by fifteen from a list using an anonymous function
def divisibleby15(lists):
newList = []
for i in lists:
if i % 15 == 0:
newList.append(i)
return newList
print(divisibleby15([23, 56, 12, 15, 45, 23, 70, 678, 90])) | [
"[email protected]"
] | |
ce6667dc95fdefc8be193b41ae44902d4600a89a | 7a9c01f7029e74c697100e244d26c72d0e283d47 | /models/amenity.py | 9adbf8d9f5418e8b43eeb584cccd1acbde12617c | [] | no_license | toyugo/holbertonschool-AirBnB_clone | 63321296ecee98b1a0cda39c7b155cc2ea5ececb | 5edaeafb6516130f2027b505fe8b168f6f9de174 | refs/heads/main | 2023-03-21T06:32:18.728878 | 2021-03-04T13:08:56 | 2021-03-04T13:08:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 168 | py | #!/usr/bin/python3
""" Module Amenity """
from models.base_model import BaseModel
class Amenity(BaseModel):
""" Class Amenity base en BaseModel """
name = ""
| [
"[email protected]"
] | |
c4b2fcaa8f6499cdca69575ead3662b305b1ccd5 | 4ed33dba672aa6aaef42698ef8437c872b078d37 | /backend/home/migrations/0001_load_initial_data.py | e78b5b69ad3761f691200103468335142fc62434 | [] | no_license | crowdbotics-apps/flat-heart-27928 | aecb93c66e39e94e01cef7fe9506effe994cde18 | ce209de8910b1e9f006814b58a05aed1eeada32d | refs/heads/master | 2023-05-26T14:51:41.045373 | 2021-06-11T20:01:34 | 2021-06-11T20:01:34 | 376,130,678 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 538 | py | from django.db import migrations
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "flat-heart-27928.botics.co"
site_params = {
"name": "Flat Heart",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_site),
]
| [
"[email protected]"
] | |
6bc05f1c24acd83be18b9337a531c43c42f39d63 | 6e928e1651713f945c980bca6d6c02ac5dce249a | /task1/5.py | 64b92c59d071daed1a062f5bbc9c61742d9564d9 | [] | no_license | Akzhan12/pp2 | 97334158b442383df32583ee6c0b9cab92a3ef45 | 56e33fd9119955ea8349172bf3f2cc5fbd814142 | refs/heads/main | 2023-06-28T08:30:11.068397 | 2021-07-29T08:34:43 | 2021-07-29T08:34:43 | 337,359,826 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 211 | py | a = list(map(int,input().split()))
n = int(input()) % len(a)
if n < 0:
n = abs(n)
print(*a[n:],end = " ")
print(*a[0:n])
else:
n = abs(n)
print(*a[-n:],end = " ")
print(*a[0:-n]) | [
"[email protected]"
] | |
b1571f62c847a20ecf7624a5be9945287afced54 | 704976ea552111c6a5af9cd7cb62b9d9abaf3996 | /pypy/module/zlib/test/test_zlib.py | 35fd7147de34051908c2d1acb58fc941e3703da9 | [
"BSD-3-Clause"
] | permissive | mesalock-linux/mesapy | 4f02c5819ce7f2f6e249d34840f1aa097577645d | ed546d59a21b36feb93e2309d5c6b75aa0ad95c9 | refs/heads/mesapy2.7 | 2023-08-16T21:33:02.239581 | 2019-08-13T10:29:43 | 2019-08-13T18:06:45 | 136,080,721 | 396 | 33 | NOASSERTION | 2020-04-01T03:05:18 | 2018-06-04T20:45:17 | Python | UTF-8 | Python | false | false | 10,362 | py | """
Tests for the zlib module.
"""
import sys
try:
import zlib
except ImportError:
import py; py.test.skip("no zlib module on this host Python")
try:
from pypy.module.zlib import interp_zlib
except ImportError:
import py; py.test.skip("no zlib C library on this machine")
def test_unsigned_to_signed_32bit():
assert interp_zlib.unsigned_to_signed_32bit(123) == 123
assert interp_zlib.unsigned_to_signed_32bit(2**31) == -2**31
assert interp_zlib.unsigned_to_signed_32bit(2**32-1) == -1
if sys.maxint > 2**32:
from rpython.rlib.rarithmetic import r_uint
assert interp_zlib.unsigned_to_signed_32bit(r_uint(sys.maxint)) == -1
assert interp_zlib.unsigned_to_signed_32bit(r_uint(sys.maxint+1)) == 0
class AppTestZlib(object):
spaceconfig = dict(usemodules=['zlib'])
def setup_class(cls):
"""
Create a space with the zlib module and import it for use by the tests.
Also create some compressed data with the bootstrap zlib module so that
compression and decompression tests have a little real data to assert
against.
"""
cls.w_zlib = cls.space.getbuiltinmodule('zlib')
expanded = 'some bytes which will be compressed'
cls.w_expanded = cls.space.wrap(expanded)
cls.w_compressed = cls.space.wrap(zlib.compress(expanded))
def test_error(self):
"""
zlib.error should be an exception class.
"""
assert issubclass(self.zlib.error, Exception)
def test_crc32(self):
"""
When called with a string, zlib.crc32 should compute its CRC32 and
return it as a signed 32 bit integer. On 64-bit machines too
(it is a bug in CPython < 2.6 to return unsigned values in this case).
"""
assert self.zlib.crc32('') == 0
assert self.zlib.crc32('\0') == -771559539
assert self.zlib.crc32('hello, world.') == -936931198
def test_crc32_start_value(self):
"""
When called with a string and an integer, zlib.crc32 should compute the
CRC32 of the string using the integer as the starting value.
"""
assert self.zlib.crc32('', 42) == 42
assert self.zlib.crc32('\0', 42) == 163128923
assert self.zlib.crc32('hello, world.', 42) == 1090960721
hello = 'hello, '
hellocrc = self.zlib.crc32(hello)
world = 'world.'
helloworldcrc = self.zlib.crc32(world, hellocrc)
assert helloworldcrc == self.zlib.crc32(hello + world)
def test_crc32_negative_start(self):
v = self.zlib.crc32('', -1)
assert v == -1
def test_crc32_negative_long_start(self):
v = self.zlib.crc32('', -1L)
assert v == -1
assert self.zlib.crc32('foo', -99999999999999999999999) == 1611238463
def test_crc32_long_start(self):
import sys
v = self.zlib.crc32('', sys.maxint*2)
assert v == -2
assert self.zlib.crc32('foo', 99999999999999999999999) == 1635107045
def test_adler32(self):
"""
When called with a string, zlib.adler32() should compute its adler 32
checksum and return it as a signed 32 bit integer.
On 64-bit machines too
(it is a bug in CPython < 2.6 to return unsigned values in this case).
"""
assert self.zlib.adler32('') == 1
assert self.zlib.adler32('\0') == 65537
assert self.zlib.adler32('hello, world.') == 571147447
assert self.zlib.adler32('x' * 23) == -2122904887
def test_adler32_start_value(self):
"""
When called with a string and an integer, zlib.adler32 should compute
the adler 32 checksum of the string using the integer as the starting
value.
"""
assert self.zlib.adler32('', 42) == 42
assert self.zlib.adler32('\0', 42) == 2752554
assert self.zlib.adler32('hello, world.', 42) == 606078176
assert self.zlib.adler32('x' * 23, 42) == -2061104398
hello = 'hello, '
hellosum = self.zlib.adler32(hello)
world = 'world.'
helloworldsum = self.zlib.adler32(world, hellosum)
assert helloworldsum == self.zlib.adler32(hello + world)
assert self.zlib.adler32('foo', -1) == 45547858
assert self.zlib.adler32('foo', 99999999999999999999999) == -114818734
def test_invalidLevel(self):
"""
zlib.compressobj should raise ValueError when an out of bounds level is
passed to it.
"""
raises(ValueError, self.zlib.compressobj, -2)
raises(ValueError, self.zlib.compressobj, 10)
def test_compression(self):
"""
zlib.compressobj should return an object which can be used to compress
bytes.
"""
compressor = self.zlib.compressobj()
bytes = compressor.compress(self.expanded)
raises(OverflowError, compressor.flush, 2**31)
bytes += compressor.flush()
assert bytes == self.compressed
def test_decompression(self):
"""
zlib.decompressobj should return an object which can be used to
decompress bytes.
"""
decompressor = self.zlib.decompressobj()
bytes = decompressor.decompress(self.compressed)
bytes += decompressor.flush()
assert bytes == self.expanded
def test_compress(self):
"""
Test the zlib.compress() function.
"""
bytes = self.zlib.compress(self.expanded)
assert bytes == self.compressed
def test_decompress(self):
"""
Test the zlib.decompress() function.
"""
bytes = self.zlib.decompress(self.compressed)
assert bytes == self.expanded
def test_decompress_invalid_input(self):
"""
Try to feed garbage to zlib.decompress().
"""
raises(self.zlib.error, self.zlib.decompress, self.compressed[:-2])
raises(self.zlib.error, self.zlib.decompress, 'foobar')
def test_bad_arguments(self):
import zlib
raises(ValueError, zlib.decompressobj().flush, 0)
raises(ValueError, zlib.decompressobj().flush, -1)
raises(TypeError, zlib.decompressobj().flush, None)
raises(ValueError, zlib.decompressobj().decompress, b'abc', -1)
raises(TypeError, zlib.decompressobj().decompress, b'abc', None)
raises(TypeError, self.zlib.decompress, self.compressed, None)
raises(OverflowError, self.zlib.decompress, self.compressed, 2**31)
def test_empty_flush(self):
import zlib
co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
assert co.flush() # Returns a zlib header
dco = zlib.decompressobj()
assert dco.flush() == b""
def test_decompress_incomplete_stream(self):
import zlib
# This is 'foo', deflated
x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E'
# For the record
assert zlib.decompress(x) == b'foo'
raises(zlib.error, zlib.decompress, x[:-5])
# Omitting the stream end works with decompressor objects
# (see issue #8672).
dco = zlib.decompressobj()
y = dco.decompress(x[:-5])
y += dco.flush()
assert y == b'foo'
def test_unused_data(self):
"""
Try to feed too much data to zlib.decompress().
It should show up in the unused_data attribute.
"""
d = self.zlib.decompressobj()
s = d.decompress(self.compressed + 'extrastuff', 0)
assert s == self.expanded
assert d.unused_data == 'extrastuff'
assert d.flush() == ''
assert d.unused_data == 'extrastuff'
# try again with several decompression steps
d = self.zlib.decompressobj()
s1 = d.decompress(self.compressed[:10])
assert d.unused_data == ''
s2 = d.decompress(self.compressed[10:-3])
assert d.unused_data == ''
s3 = d.decompress(self.compressed[-3:] + 'spam' * 100)
assert d.unused_data == 'spam' * 100
assert s1 + s2 + s3 == self.expanded
s4 = d.decompress('egg' * 50)
assert d.unused_data == ('spam' * 100) + ('egg' * 50)
assert s4 == ''
def test_max_length(self):
"""
Test the max_length argument of the decompress() method
and the corresponding unconsumed_tail attribute.
"""
d = self.zlib.decompressobj()
data = self.compressed
for i in range(0, 100, 10):
s1 = d.decompress(data, 10)
assert s1 == self.expanded[i:i+10]
data = d.unconsumed_tail
assert not data
def test_max_length_large(self):
import sys
if sys.version_info < (2, 7, 13):
skip("passing a potentially 64-bit int as max_length is not "
"supported before 2.7.13")
d = self.zlib.decompressobj()
assert d.decompress(self.compressed, sys.maxsize) == self.expanded
def test_buffer(self):
"""
We should be able to pass buffer objects instead of strings.
"""
assert self.zlib.crc32(buffer('hello, world.')) == -936931198
assert self.zlib.adler32(buffer('hello, world.')) == 571147447
compressor = self.zlib.compressobj()
bytes = compressor.compress(buffer(self.expanded))
bytes += compressor.flush()
assert bytes == self.compressed
decompressor = self.zlib.decompressobj()
bytes = decompressor.decompress(buffer(self.compressed))
bytes += decompressor.flush()
assert bytes == self.expanded
bytes = self.zlib.compress(buffer(self.expanded))
assert bytes == self.compressed
bytes = self.zlib.decompress(buffer(self.compressed))
assert bytes == self.expanded
def test_flush_with_freed_input(self):
# Issue #16411: decompressor accesses input to last decompress() call
# in flush(), even if this object has been freed in the meanwhile.
input1 = b'abcdefghijklmnopqrstuvwxyz'
input2 = b'QWERTYUIOPASDFGHJKLZXCVBNM'
data = self.zlib.compress(input1)
dco = self.zlib.decompressobj()
dco.decompress(data, 1)
del data
data = self.zlib.compress(input2)
assert dco.flush(1) == input1[1:]
assert dco.unused_data == b''
assert dco.unconsumed_tail == b''
| [
"[email protected]"
] | |
30d22e648e82216e843989a09b25df3c9431291e | 747f759311d404af31c0f80029e88098193f6269 | /addons/library/library_editor_supplier.py | d50451b7ea1eab1cf3f62f26950986f7861e6e54 | [] | no_license | sgeerish/sirr_production | 9b0d0f7804a928c0c582ddb4ccb7fcc084469a18 | 1081f3a5ff8864a31b2dcd89406fac076a908e78 | refs/heads/master | 2020-05-19T07:21:37.047958 | 2013-09-15T13:03:36 | 2013-09-15T13:03:36 | 9,648,444 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 72 | py | /home/openerp/production/extra-addons/library/library_editor_supplier.py | [
"[email protected]"
] | |
e5ab44dc776222c231274dd703bcd5aebdb8b110 | f207586e34b37b13ee6012ea08f174e302fa0078 | /mimo/util/decorate.py | cf41979d6dfcac6b024ecd468df4e0901d8627e7 | [
"MIT"
] | permissive | pnickl/mimo | 92b7858108e077ff43082f15f635d1205120b143 | 81c4bbd2594e2136445009eae752ab8a1602a1cf | refs/heads/master | 2022-12-24T02:10:34.838878 | 2020-08-04T19:24:21 | 2020-08-04T19:24:21 | 302,394,694 | 2 | 0 | MIT | 2020-10-08T16:07:26 | 2020-10-08T16:07:25 | null | UTF-8 | Python | false | false | 1,796 | py | def pass_obs_arg(f):
def wrapper(self, obs=None, **kwargs):
if obs is None:
assert self.has_data()
obs = [_obs for _obs in self.obs]
else:
obs = obs if isinstance(obs, list) else [obs]
return f(self, obs, **kwargs)
return wrapper
def pass_obs_and_labels_arg(f):
def wrapper(self, obs=None, labels=None, **kwargs):
if obs is None or labels is None:
assert self.has_data()
obs = [_obs for _obs in self.obs]
labels = self.labels
else:
obs = obs if isinstance(obs, list) else [obs]
labels = [self.gating.likelihood.rvs(len(_obs)) for _obs in obs]\
if labels is None else labels
return f(self, obs, labels, **kwargs)
return wrapper
def pass_target_and_input_arg(f):
def wrapper(self, y=None, x=None, **kwargs):
if y is None or x is None:
assert self.has_data()
y = [_y for _y in self.target]
x = [_x for _x in self.input]
else:
y = y if isinstance(y, list) else [y]
x = x if isinstance(x, list) else [x]
return f(self, y, x, **kwargs)
return wrapper
def pass_target_input_and_labels_arg(f):
def wrapper(self, y=None, x=None, z=None, **kwargs):
if y is None or x is None and z is None:
assert self.has_data()
y = [_y for _y in self.target]
x = [_x for _x in self.input]
z = self.labels
else:
y = y if isinstance(y, list) else [y]
x = x if isinstance(x, list) else [x]
z = [self.gating.likelihood.rvs(len(_y)) for _y in y]\
if z is None else z
return f(self, y, x, z, **kwargs)
return wrapper
| [
"[email protected]"
] | |
b9c5ca1798fcaffb1707909fd79abe2418769bda | 04ac33f68827aeef7d5bc441d10979143828ef1a | /contactSpider.py | 037682c5a672fc9a935a9454eaef442e24e5a338 | [] | no_license | samshultz/realtor_agent_spider | a06e99af15fc78902c5f44fcb91dd6d55490b14f | 4550301a9e4733ad19bd6fd904e079037847bbf7 | refs/heads/master | 2021-07-05T04:28:17.703484 | 2017-09-30T02:22:34 | 2017-09-30T02:22:34 | 105,333,052 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,933 | py | import scrapy
class ContactSpider(scrapy.Spider):
# name of the spider
name = "contacts"
# the url to start scraping from
start_urls = [
"https://www.realtor.com/realestateagents/Los-Angeles_CA"
]
def parse(self, response):
# check the page for the name of the agent...
for href in response.css("div[itemprop=name] a::attr(href)"):
# ...click on it and call the parse_agent method on each one
yield response.follow(href, self.parse_agent)
# follow pagination links...
# for href in response.css("a.next::attr(href)"):
# #...repeat this method (parse method) on each page
# yield response.follow(href, self.parse)
def parse_agent(self, response):
# get the element containing the address info and extract the text
address = response.css("#modalcontactInfo span[itemprop=streetAddress]::text").extract_first()
# check if the address is available...
if address is not None:
# ... if it is, get the city, state and zipcode from it (this info
# is contained in the last three info in the address)
city, state, zipcode = address.split(",")[-3:]
# separate the address
addr = ''.join(address.split(",")[:-3])
else:
# if the address is not available
# set the city, state, addr and zipcode to empty string
city, state, zipcode = "", "", ""
addr = ""
# return a dictionary of the extracted info
yield {
"name": response.css("#modalcontactInfo p.modal-agent-name::text").extract_first().split(",")[0],
"location": response.css("#modalcontactInfo p.modal-agent-location::text").extract_first().strip(),
"address": addr,
"city": city,
"state": state,
"zipcode": zipcode,
}
| [
"[email protected]"
] | |
43078cfccfee9f2bbde2f0af3de46006b564a128 | 0725ed7ab6be91dfc0b16fef12a8871c08917465 | /tree/is_bst.py | 26ed670c86a2703f7550da0fa62852b62ed81d7b | [] | no_license | siddhism/leetcode | 8cb194156893fd6e9681ef50c84f0355d09e9026 | 877933424e6d2c590d6ac53db18bee951a3d9de4 | refs/heads/master | 2023-03-28T08:14:12.927995 | 2021-03-24T10:46:20 | 2021-03-24T10:46:20 | 212,151,205 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 716 | py | # A binary tree node
import sys
class Node:
# Constructor to create a new node
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def is_bst(node, min_limit, max_limit):
if not node:
return True
if not (min_limit < node.data < max_limit):
return False
l_path = is_bst(node.left, min_limit, node.data)
r_path = is_bst(node.right, node.data, max_limit)
return l_path and r_path
# Driver program to test above function
root = Node(4)
root.left = Node(2)
root.right = Node(5)
root.left.left = Node(1)
root.left.right = Node(3)
if (is_bst(root, -sys.maxint, sys.maxint)):
print "Is BST"
else:
print "Not a BST"
| [
"[email protected]"
] | |
3987405f70f48d91c8ac18c9912585cb8b9c44d3 | 5ba345bc16519d892fb533451eeface7c76a7d48 | /Classification/Logistic-Regression/LogisticRegression.py | 33ac92c5f131dde88d715d277e16cca84ae2164e | [] | no_license | sayands/machine-learning-projects | 337fd2aeb63814b6c47c9b2597bfe1ce4399a1f1 | 8e516c0ac3a96a4058d063b86559ded9be654c35 | refs/heads/master | 2021-05-06T16:26:37.008873 | 2018-08-02T20:27:20 | 2018-08-02T20:27:20 | 113,749,745 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,653 | py | #Logistic Regression
#Importing libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
#Importing the dataset
dataset = pd.read_csv('Social_Network_Ads.csv')
X = dataset.iloc[:, 2:4].values
Y = dataset.iloc[:, 4].values
#Splitting the dataset into the Training Set and Test set
from sklearn.model_selection import train_test_split
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.25, random_state = 0)
#Feature Scaling
from sklearn.preprocessing import StandardScaler
sc_X = StandardScaler()
X_train = sc_X.fit_transform(X_train)
X_test = sc_X.transform(X_test)
#Fitting Logistic Regression To The Training Set
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression(random_state = 0)
classifier.fit(X_train,Y_train)
#Predicting The Test Set Results
y_pred = classifier.predict(X_test)
#Making The Confusion Matrix
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(Y_test, y_pred)
#Visualising The Training Set Results
from matplotlib.colors import ListedColormap
X_set, y_set = X_train, Y_train
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Logistic Regression (Training set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
#Visualising The Test Set Results
from matplotlib.colors import ListedColormap
X_set, y_set = X_test, Y_test
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Logistic Regression (Test set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
| [
"[email protected]"
] | |
fc5d1edb3647e18a663c8c43b897809c51abbf89 | 4c2a391f2f4d7361f2c7111b6d63edf67056f327 | /model/oauth.py | 4c650a7683108b8d5c4e420c7b90b52c00c2172a | [] | no_license | niyoufa/tnd_server | 6d69db32ceb5a6a14417b3e8b0f021fdc0e7e79c | 59c9ac6769773573685be215b4674d77545fe127 | refs/heads/master | 2020-06-23T15:43:28.891619 | 2016-08-26T03:44:01 | 2016-08-26T03:44:01 | 66,613,944 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 408 | py | # -*- coding: utf-8 -*-
"""
author : youfaNi
date : 2016-07-13
"""
from bson.son import SON
import renren.model.model as model
import renren.libs.mongolib as mongo
import renren.consts as consts
import renren.libs.utils as utils
class OauthModel(model.BaseModel,model.Singleton):
__name = "renren.oauth_clients"
def __init__(self):
model.BaseModel.__init__(self,OauthModel.__name) | [
"[email protected]"
] | |
ea71dcf4271de4375a1cd100421e6cb04179b2a8 | ae1d96991a256b905ab8793ebc6063a9628cef02 | /muddery/combat/normal_combat_handler.py | f572690ce4f9a5ce3b3ed3411737fa890fdf193b | [
"BSD-3-Clause"
] | permissive | FWiner/muddery | bd2028e431dbeae16d6db9806cd2e9a7f4c5f22d | f6daa5fab6007e7c830e301718154fbc7b78b2bb | refs/heads/master | 2020-07-31T23:02:54.165362 | 2019-09-04T13:29:59 | 2019-09-04T13:29:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,083 | py | """
Combat handler.
"""
from django.conf import settings
from muddery.utils import defines
from muddery.utils.builder import delete_object
from muddery.combat.base_combat_handler import BaseCombatHandler
class NormalCombatHandler(BaseCombatHandler):
"""
This implements the normal combat handler.
"""
def start_combat(self):
"""
Start a combat, make all NPCs to cast skills automatically.
"""
super(NormalCombatHandler, self).start_combat()
for character in self.characters.values():
if not character.account:
# Monsters auto cast skills
character.start_auto_combat_skill()
def at_server_shutdown(self):
"""
This hook is called whenever the server is shutting down fully
(i.e. not for a restart).
"""
for character in self.characters.values():
# Stop auto cast skills
character.stop_auto_combat_skill()
super(NormalCombatHandler, self).at_server_shutdown()
def show_combat(self, character):
"""
Show combat information to a character.
Args:
character: (object) character
Returns:
None
"""
super(NormalCombatHandler, self).show_combat(character)
# send messages in order
character.msg({"combat_commands": character.get_combat_commands()})
def finish(self):
"""
Finish a combat. Send results to players, and kill all failed characters.
"""
for character in self.characters.values():
# Stop auto cast skills
character.stop_auto_combat_skill()
super(NormalCombatHandler, self).finish()
def set_combat_results(self, winners, losers):
"""
Called when the character wins the combat.
Args:
winners: (List) all combat winners.
losers: (List) all combat losers.
Returns:
None
"""
super(NormalCombatHandler, self).set_combat_results(winners, losers)
# add exp to winners
# get total exp
exp = 0
for loser in losers:
exp += loser.provide_exp(loser)
if exp:
# give experience to the winner
for character in winners:
character.add_exp(exp, combat=True)
for character in winners:
if character.is_typeclass(settings.BASE_PLAYER_CHARACTER_TYPECLASS):
# get object list
loots = None
for loser in losers:
obj_list = loser.loot_handler.get_obj_list(character)
if obj_list:
if not loots:
loots = obj_list
else:
loots.extend(obj_list)
# give objects to winner
if loots:
character.receive_objects(loots, combat=True)
# call quest handler
for loser in losers:
character.quest_handler.at_objective(defines.OBJECTIVE_KILL, loser.get_data_key())
# losers are killed.
for character in losers:
character.die(winners)
def _cleanup_character(self, character):
"""
Remove character from handler and clean
it of the back-reference and cmdset
"""
super(NormalCombatHandler, self)._cleanup_character(character)
if not character.is_typeclass(settings.BASE_PLAYER_CHARACTER_TYPECLASS):
if character.is_temp:
# notify its location
location = character.location
delete_object(character.dbref)
if location:
for content in location.contents:
if content.has_account:
content.show_location()
else:
if character.is_alive():
# Recover all hp.
character.db.hp = character.max_hp
| [
"[email protected]"
] | |
f3a56eab63df2e25ca7185b2b359bdc948581b9a | f20f3ab827eab5ad6a3f97b35d10d7afe2f118d5 | /__init__.py | e910486ed0e4b8b6f2fb6655c4441fbbf9959a91 | [
"MIT"
] | permissive | bradparks/Sprytile__blender_add_on_sprite_sheets_tile_maps | 9adb618bbd0e1f4e9334b8f4e534cff6fa9cc9d7 | 421c7efe3ea9ebd7e0f8dca7fb797eca597964d2 | refs/heads/master | 2021-05-08T04:37:22.745456 | 2017-10-12T15:59:17 | 2017-10-12T15:59:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,891 | py | bl_info = {
"name": "Sprytile Painter",
"author": "Jeiel Aranal",
"version": (0, 4, 24),
"blender": (2, 7, 7),
"description": "A utility for creating tile based low spec scenes with paint/map editor tools",
"location": "View3D > UI panel > Sprytile",
"wiki_url": "https://chemikhazi.github.io/Sprytile/",
"tracker_url": "https://github.com/ChemiKhazi/Sprytile/issues",
"category": "Paint"
}
# Put Sprytile directory is sys.path so modules can be loaded
import os
import sys
import inspect
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0]))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
locals_list = locals()
if "bpy" in locals_list:
from importlib import reload
reload(addon_updater_ops)
reload(sprytile_gui)
reload(sprytile_modal)
reload(sprytile_panel)
reload(sprytile_utils)
reload(sprytile_uv)
reload(tool_build)
reload(tool_paint)
reload(tool_fill)
reload(tool_set_normal)
else:
from . import sprytile_gui, sprytile_modal, sprytile_panel, sprytile_utils, sprytile_uv
from sprytile_tools import *
import bpy
import bpy.utils.previews
from . import addon_updater_ops
from bpy.props import *
import rna_keymap_ui
class SprytileSceneSettings(bpy.types.PropertyGroup):
def set_normal(self, value):
if "lock_normal" not in self.keys():
self["lock_normal"] = False
if self["lock_normal"] is True:
return
if self["normal_mode"] == value:
self["lock_normal"] = not self["lock_normal"]
return
self["normal_mode"] = value
self["lock_normal"] = True
bpy.ops.sprytile.axis_update('INVOKE_REGION_WIN')
def get_normal(self):
if "normal_mode" not in self.keys():
self["normal_mode"] = 3
return self["normal_mode"]
normal_mode = EnumProperty(
items=[
("X", "X", "World X-Axis", 1),
("Y", "Y", "World Y-Axis", 2),
("Z", "Z", "World X-Axis", 3)
],
name="Normal Mode",
description="Normal to create the mesh on",
default='Z',
set=set_normal,
get=get_normal
)
lock_normal = BoolProperty(
name="Lock",
description="Lock normal used to create meshes",
default=False
)
snap_translate = BoolProperty(
name="Snap Translate",
description="Snap pixel translations to pixel grid",
default=True
)
paint_mode = EnumProperty(
items=[
("PAINT", "Paint", "Advanced UV paint tools", 1),
("MAKE_FACE", "Build", "Only create new faces", 3),
("SET_NORMAL", "Set Normal", "Select a normal to use for face creation", 2),
("FILL", "Fill", "Fill the work plane cursor", 4)
],
name="Sprytile Paint Mode",
description="Paint mode",
default='MAKE_FACE'
)
def set_show_tools(self, value):
keys = self.keys()
if "show_tools" not in keys:
self["show_tools"] = False
self["show_tools"] = value
if value is False:
if "paint_mode" not in keys:
self["paint_mode"] = 3
if self["paint_mode"] in {2, 4}:
self["paint_mode"] = 3
def get_show_tools(self):
if "show_tools" not in self.keys():
self["show_tools"] = False
return self["show_tools"]
show_tools = BoolProperty(
default=False,
set=set_show_tools,
get=get_show_tools
)
def set_dummy(self, value):
current_value = self.get_dummy_actual(True)
value = list(value)
for idx in range(len(value)):
if current_value[idx] and current_value[idx] & value[idx]:
value[idx] = False
mode_value_idx = [1, 3, 2, 4]
def get_mode_value(arr_value):
for i in range(len(arr_value)):
if arr_value[i]:
return mode_value_idx[i]
return -1
run_modal = True
paint_mode = get_mode_value(value)
if paint_mode > 0:
self["paint_mode"] = paint_mode
else:
run_modal = False
if "is_running" in self.keys():
if self["is_running"]:
self["is_running"] = False
else:
run_modal = True
if run_modal:
bpy.ops.sprytile.modal_tool('INVOKE_REGION_WIN')
def get_dummy_actual(self, force_real):
if "paint_mode" not in self.keys():
self["paint_mode"] = 3
out_value = [False, False, False, False]
if self["is_running"] or force_real:
index_value_lookup = 1, 3, 2, 4
set_idx = index_value_lookup.index(self["paint_mode"])
out_value[set_idx] = True
return out_value
def get_dummy(self):
if "is_running" not in self.keys():
self["is_running"] = False
is_running = self["is_running"]
return self.get_dummy_actual(is_running)
set_paint_mode = BoolVectorProperty(
name="Set Paint Mode",
description="Set Sprytile Tool Mode",
size=4,
set=set_dummy,
get=get_dummy
)
world_pixels = IntProperty(
name="World Pixel Density",
description="How many pixels are displayed in one world unit",
subtype='PIXEL',
default=32,
min=8,
max=2048
)
paint_normal_vector = FloatVectorProperty(
name="Srpytile Last Paint Normal",
description="Last saved painting normal used by Sprytile",
subtype='DIRECTION',
default=(0.0, 0.0, 1.0)
)
paint_up_vector = FloatVectorProperty(
name="Sprytile Last Paint Up Vector",
description="Last saved painting up vector used by Sprytile",
subtype='DIRECTION',
default=(0.0, 1.0, 0.0)
)
uv_flip_x = BoolProperty(
name="Flip X",
default=False
)
uv_flip_y = BoolProperty(
name="Flip Y",
default=False
)
mesh_rotate = FloatProperty(
name="Grid Rotation",
description="Rotation of mesh creation",
subtype='ANGLE',
unit='ROTATION',
step=9000,
precision=0,
min=-6.28319,
max=6.28319,
default=0.0
)
cursor_snap = EnumProperty(
items=[
('VERTEX', "Vertex", "Snap cursor to nearest vertex", "SNAP_GRID", 1),
('GRID', "Grid", "Snap cursor to grid", "SNAP_VERTEX", 2)
],
name="Cursor snap mode",
description="Sprytile cursor snap mode"
)
cursor_flow = BoolProperty(
name="Cursor Flow",
description="Cursor automatically follows mesh building",
default=False
)
paint_align = EnumProperty(
items=[
('TOP_LEFT', "Top Left", "", 1),
('TOP', "Top", "", 2),
('TOP_RIGHT', "Top Right", "", 3),
('LEFT', "Left", "", 4),
('CENTER', "Center", "", 5),
('RIGHT', "Right", "", 6),
('BOTTOM_LEFT', "Bottom Left", "", 7),
('BOTTOM', "Bottom", "", 8),
('BOTTOM_RIGHT', "Bottom Right", "", 9),
],
name="Paint Align",
description="Paint alignment mode",
default='CENTER'
)
def set_align_toggle(self, value, row):
prev_value = self.get_align_toggle(row)
row_val = 0
if row == 'top':
row_val = 0
elif row == 'middle':
row_val = 3
elif row == 'bottom':
row_val = 6
else:
return
col_val = 0
if value[0] and prev_value[0] != value[0]:
col_val = 1
elif value[1] and prev_value[1] != value[1]:
col_val = 2
elif value[2] and prev_value[2] != value[2]:
col_val = 3
else:
return
self["paint_align"] = row_val + col_val
def set_align_top(self, value):
self.set_align_toggle(value, "top")
def set_align_middle(self, value):
self.set_align_toggle(value, "middle")
def set_align_bottom(self, value):
self.set_align_toggle(value, "bottom")
def get_align_toggle(self, row):
if "paint_align" not in self.keys():
self["paint_align"] = 5
align = self["paint_align"]
if row == 'top':
return align == 1, align == 2, align == 3
if row == 'middle':
return align == 4, align == 5, align == 6
if row == 'bottom':
return align == 7, align == 8, align == 9
return False, False, False
def get_align_top(self):
return self.get_align_toggle("top")
def get_align_middle(self):
return self.get_align_toggle("middle")
def get_align_bottom(self):
return self.get_align_toggle("bottom")
paint_align_top = BoolVectorProperty(
name="Align",
size=3,
set=set_align_top,
get=get_align_top
)
paint_align_middle = BoolVectorProperty(
name="Align",
size=3,
set=set_align_middle,
get=get_align_middle
)
paint_align_bottom = BoolVectorProperty(
name="Align",
size=3,
set=set_align_bottom,
get=get_align_bottom
)
paint_hinting = BoolProperty(
name="Hinting",
description="Selected edge is used as X axis for UV mapping."
)
paint_stretch_x = BoolProperty(
name="Stretch X",
description="Stretch face over X axis of tile"
)
paint_stretch_y = BoolProperty(
name="Stretch Y",
description="Stretch face over Y axis of tile"
)
paint_edge_snap = BoolProperty(
name="Stretch Edge Snap",
description="Snap UV vertices to edges of tile when stretching.",
default=True
)
edge_threshold = FloatProperty(
name="Threshold",
description="Ratio of UV tile near to edge to apply snap",
min=0.01,
max=0.5,
soft_min=0.01,
soft_max=0.5,
default=0.35
)
paint_uv_snap = BoolProperty(
name="UV Snap",
default=True,
description="Snap UV vertices to texture pixels"
)
is_running = BoolProperty(
name="Sprytile Running",
description="Exit Sprytile tool"
)
is_snapping = BoolProperty(
name="Is Cursor Snap",
description="Is cursor snapping currently activated"
)
has_selection = BoolProperty(
name="Has selection",
description="Is there a mesh element selected"
)
is_grid_translate = BoolProperty(
name="Is Grid Translate",
description="Grid translate operator is running"
)
show_extra = BoolProperty(
name="Extra UV Grid Settings",
default=False
)
show_overlay = BoolProperty(
name="Show Grid Overlay",
default=True
)
auto_merge = BoolProperty(
name="Auto Merge",
description="Automatically merge vertices when creating faces",
default=True
)
auto_join = BoolProperty(
name="Join Multi",
description="Join multi tile faces when possible",
default=False
)
def set_reload(self, value):
self["auto_reload"] = value
if value is True:
bpy.ops.sprytile.reload_auto('INVOKE_REGION_WIN')
def get_reload(self):
if "auto_reload" not in self.keys():
self["auto_reload"] = False
return self["auto_reload"]
auto_reload = BoolProperty(
name="Auto",
description="Automatically reload images every few seconds",
default=False,
set=set_reload,
get=get_reload
)
fill_lock_transform = BoolProperty(
name="Lock Transforms",
description="Filled faces keep current rotations",
default=False,
)
axis_plane_display = EnumProperty(
items=[
('OFF', "Off", "Always Off", "RADIOBUT_OFF", 1),
('ON', "On", "Always On", "RADIOBUT_ON", 2),
('MIDDLE_MOUSE', "View", "Only when changing view", "CAMERA_DATA", 3)
],
name="Work Plane Cursor",
description="Display mode of Work Plane Cursor",
default='MIDDLE_MOUSE'
)
axis_plane_settings = BoolProperty(
name="Axis Plane Settings",
description="Show Work Plane Cursor settings",
default=False
)
axis_plane_size = IntVectorProperty(
name="Plane Size",
description="Size of the Work Plane Cursor",
size=2,
default=(2, 2),
min=1,
soft_min=1
)
axis_plane_color = FloatVectorProperty(
name="Plane Color",
description="Color Work Plane Cursor is drawn with",
size=3,
default=(0.7, 0.7, 0.7),
subtype='COLOR'
)
class SprytileMaterialGridSettings(bpy.types.PropertyGroup):
mat_id = StringProperty(
name="Material Id",
description="Name of the material this grid references",
default=""
)
id = IntProperty(
name="Grid ID",
default=-1
)
name = StringProperty(
name="Grid Name"
)
grid = IntVectorProperty(
name="Size",
description="Grid size, in pixels",
min=1,
size=2,
subtype='XYZ',
default=(32, 32)
)
def set_padding(self, value):
current_padding = self.get_padding()
if "grid" not in self.keys():
self["grid"] = (32, 32)
padding_delta = [ (value[0] - current_padding[0]) * 2, (value[1] - current_padding[1]) * 2]
new_grid = [self["grid"][0] - padding_delta[0], self["grid"][1] - padding_delta[1]]
if new_grid[0] < 1 or new_grid[1] < 1:
return
self["grid"] = (new_grid[0], new_grid[1])
self["padding"] = value
def get_padding(self):
if "padding" not in self.keys():
self["padding"] = (0, 0)
return self["padding"]
padding = IntVectorProperty(
name="Padding",
description="Cell padding, in pixels",
min=0,
size=2,
subtype='XYZ',
default=(0, 0),
set=set_padding,
get=get_padding
)
margin = IntVectorProperty(
name="Margin",
description="Spacing between tiles (top, right, bottom, left)",
min=0,
size=4,
subtype='XYZ',
default=(0, 0, 0, 0)
)
offset = IntVectorProperty(
name="Offset",
description="Offset of the grid",
subtype='TRANSLATION',
size=2,
default=(0, 0)
)
rotate = FloatProperty(
name="UV Rotation",
description="Rotation of UV grid",
subtype='ANGLE',
unit='ROTATION',
default=0.0
)
tile_selection = IntVectorProperty(
name="Tile Selection",
size=4,
default=(0, 0, 1, 1)
)
class SprytileMaterialData(bpy.types.PropertyGroup):
def expanded_default(self):
if 'is_expanded' not in self.keys():
self['is_expanded'] = True
def get_expanded(self):
self.expanded_default()
return self['is_expanded']
def set_expanded(self, value):
self.expanded_default()
do_rebuild = self['is_expanded'] is not value
self['is_expanded'] = value
if do_rebuild:
bpy.ops.sprytile.build_grid_list()
mat_id = StringProperty(
name="Material Id",
description="Name of the material this grid references",
default=""
)
is_expanded = BoolProperty(
default=True,
get=get_expanded,
set=set_expanded
)
grids = CollectionProperty(type=SprytileMaterialGridSettings)
class SprytileGridDisplay(bpy.types.PropertyGroup):
mat_id = StringProperty(default="")
grid_id = IntProperty(default=-1)
def get_mat_name(self):
if self.mat_id == "":
return ""
data_idx = bpy.data.materials.find(self.mat_id)
if data_idx < 0:
return ""
return bpy.data.materials[self.mat_id].name
def set_mat_name(self, value):
if self.mat_id == "":
return
data_idx = bpy.data.materials.find(self.mat_id)
if data_idx < 0:
return
bpy.data.materials[self.mat_id].name = value
bpy.ops.sprytile.validate_grids()
mat_name = StringProperty(
get=get_mat_name,
set=set_mat_name
)
class SprytileGridList(bpy.types.PropertyGroup):
def get_idx(self):
if "idx" not in self.keys():
self["idx"] = 0
return self["idx"]
def set_idx(self, value):
# If the selected index is a material entry
# Move to next entry
list_size = len(self.display)
while value < (list_size - 1) and self.display[value].mat_id != "":
value += 1
value = max(0, min(len(self.display)-1, value))
self["idx"] = value
if value < 0 or value >= len(self.display):
return
# Set the object grid id to target grid
target_entry = self.display[value]
if target_entry.grid_id != -1:
bpy.context.object.sprytile_gridid = target_entry.grid_id
display = bpy.props.CollectionProperty(type=SprytileGridDisplay)
idx = IntProperty(
default=0,
get=get_idx,
set=set_idx
)
def setup_props():
bpy.types.Scene.sprytile_data = bpy.props.PointerProperty(type=SprytileSceneSettings)
bpy.types.Scene.sprytile_mats = bpy.props.CollectionProperty(type=SprytileMaterialData)
bpy.types.Scene.sprytile_list = bpy.props.PointerProperty(type=SprytileGridList)
bpy.types.Scene.sprytile_ui = bpy.props.PointerProperty(type=sprytile_gui.SprytileGuiData)
bpy.types.Object.sprytile_gridid = IntProperty(
name="Grid ID",
description="Grid index used for object",
default=-1
)
def teardown_props():
del bpy.types.Scene.sprytile_data
del bpy.types.Scene.sprytile_mats
del bpy.types.Scene.sprytile_list
del bpy.types.Scene.sprytile_ui
del bpy.types.Object.sprytile_gridid
class SprytileAddonPreferences(bpy.types.AddonPreferences):
bl_idname = __package__
preview_transparency = bpy.props.FloatProperty(
name="Preview Alpha",
description="Transparency level of build preview cursor",
default=0.8,
min=0,
max=1
)
# addon updater preferences
auto_check_update = bpy.props.BoolProperty(
name="Auto-check for Update",
description="If enabled, auto-check for updates using an interval",
default=False,
)
updater_intrval_months = bpy.props.IntProperty(
name='Months',
description="Number of months between checking for updates",
default=0,
min=0
)
updater_intrval_days = bpy.props.IntProperty(
name='Days',
description="Number of days between checking for updates",
default=7,
min=0,
)
updater_intrval_hours = bpy.props.IntProperty(
name='Hours',
description="Number of hours between checking for updates",
default=0,
min=0,
max=23
)
updater_intrval_minutes = bpy.props.IntProperty(
name='Minutes',
description="Number of minutes between checking for updates",
default=0,
min=0,
max=59
)
def draw(self, context):
layout = self.layout
layout.prop(self, "preview_transparency")
kc = bpy.context.window_manager.keyconfigs.user
km = kc.keymaps['Mesh']
kmi_idx = km.keymap_items.find('sprytile.modal_tool')
if kmi_idx >= 0:
layout.label(text="Tile Mode Shortcut")
col = layout.column()
kmi = km.keymap_items[kmi_idx]
km = km.active()
col.context_pointer_set("keymap", km)
rna_keymap_ui.draw_kmi([], kc, km, kmi, col, 0)
addon_updater_ops.update_settings_ui(self, context)
def setup_keymap():
km_array = sprytile_modal.SprytileModalTool.keymaps
win_mgr = bpy.context.window_manager
key_config = win_mgr.keyconfigs.addon
keymap = key_config.keymaps.new(name='Mesh', space_type='EMPTY')
km_array[keymap] = [
keymap.keymap_items.new("sprytile.modal_tool", 'SPACE', 'PRESS', ctrl=True, shift=True)
]
keymap = key_config.keymaps.new(name="Sprytile Paint Modal Map", space_type='EMPTY', region_type='WINDOW', modal=True)
km_items = keymap.keymap_items
km_array[keymap] = [
km_items.new_modal('CANCEL', 'ESC', 'PRESS'),
km_items.new_modal('SNAP', 'S', 'ANY'),
km_items.new_modal('FOCUS', 'W', 'PRESS'),
km_items.new_modal('ROTATE_LEFT', 'ONE', 'PRESS'),
km_items.new_modal('ROTATE_RIGHT', 'TWO', 'PRESS'),
km_items.new_modal('FLIP_X', 'THREE', 'PRESS'),
km_items.new_modal('FLIP_Y', 'FOUR', 'PRESS')
]
sprytile_modal.SprytileModalTool.modal_values = [
'Cancel',
'Cursor Snap',
'Cursor Focus',
'Rotate Left',
'Rotate Right',
'Flip X',
'Flip Y'
]
def teardown_keymap():
for keymap in sprytile_modal.SprytileModalTool.keymaps:
kmi_list = keymap.keymap_items
for keymap_item in kmi_list:
keymap.keymap_items.remove(keymap_item)
sprytile_modal.SprytileModalTool.keymaps.clear()
def register():
addon_updater_ops.register(bl_info)
sprytile_panel.icons = bpy.utils.previews.new()
dirname = os.path.dirname(__file__)
icon_names = ('SPRYTILE_ICON_BUILD',
'SPRYTILE_ICON_PAINT',
'SPRYTILE_ICON_FILL',
'SPRYTILE_ICON_NORMAL')
icon_paths = ('icon-build.png',
'icon-paint.png',
'icon-fill.png',
'icon-setnormal.png')
for i in range(0, len(icon_names)):
icon_path = os.path.join(dirname, "icons")
icon_path = os.path.join(icon_path, icon_paths[i])
sprytile_panel.icons.load(icon_names[i], icon_path, 'IMAGE')
bpy.utils.register_class(sprytile_panel.SprytilePanel)
bpy.utils.register_module(__name__)
setup_props()
setup_keymap()
def unregister():
teardown_keymap()
teardown_props()
bpy.utils.unregister_class(sprytile_panel.SprytilePanel)
bpy.utils.unregister_module(__name__)
bpy.utils.previews.remove(sprytile_panel.icons)
# Unregister self from sys.path as well
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0]))
sys.path.remove(cmd_subfolder)
if __name__ == "__main__":
register()
| [
"[email protected]"
] | |
9aefb0ae5bd605c4dae7ca200d14f1508eb9fb11 | f0755c0ca52a0a278d75b76ee5d9b547d9668c0e | /atcoder.jp/abc084/abc084_d/Main.py | 672f72253da43a227e962b8055a0caa9001017ec | [] | no_license | nasama/procon | 7b70c9a67732d7d92775c40535fd54c0a5e91e25 | cd012065162650b8a5250a30a7acb1c853955b90 | refs/heads/master | 2022-07-28T12:37:21.113636 | 2020-05-19T14:11:30 | 2020-05-19T14:11:30 | 263,695,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 567 | py | def primes(n):
is_prime = [1]*(n+1)
is_prime[0] = 0
is_prime[1] = 0
for i in range(2, int(n**0.5) + 1):
if not is_prime[i]:
continue
for j in range(i*2,n+1,i):
is_prime[j] = 0
return is_prime
max = 100001
prime = primes(max)
a = [0]*max
for i in range(max):
if i % 2 == 0:
continue
if prime[i] and prime[(i+1)//2]:
a[i] = 1
s = [0]*(max+1)
for i in range(max):
s[i+1] = s[i] + a[i]
Q = int(input())
for i in range(Q):
l,r = map(int, input().split())
print(s[r+1]-s[l]) | [
"[email protected]"
] | |
fb95a962370d7b4bb6c6d781611394a5ad69f45a | e3fe234510d19c120d56f9a2876b7d508d306212 | /17tensorflow/5_lm/ngram/ngram.py | 6146628f947c8ebec2603563c38c067b7d61b32d | [
"Apache-2.0"
] | permissive | KEVINYZY/python-tutorial | 78b348fb2fa2eb1c8c55d016affb6a9534332997 | ae43536908eb8af56c34865f52a6e8644edc4fa3 | refs/heads/master | 2020-03-30T02:11:03.394073 | 2019-12-03T00:52:10 | 2019-12-03T00:52:10 | 150,617,875 | 0 | 0 | Apache-2.0 | 2018-09-27T16:39:29 | 2018-09-27T16:39:28 | null | UTF-8 | Python | false | false | 3,057 | py | # -*- coding: utf-8 -*-
# Author: XuMing <[email protected]>
# Data: 17/11/29
# Brief:
"""读取语料 生成 n-gram 模型"""
from collections import Counter, defaultdict
from pprint import pprint
from random import random
import jieba
N = 2 # N元模型
START = '$$' # 句首的 token
BREAK = '。!?' # 作为句子结束的符号
IGNORE = '\n “”"《》〈〉()*' # 忽略不计的符号
def process_segs(segments):
"""对 segments (iterator) 进行处理,返回一个 list. 处理规则:
- 忽略 \n、空格、引号、书名号等
- 在断句符号后添加 START token
"""
results = [START for i in range(N - 1)]
for seg in segments:
if seg in IGNORE:
continue
else:
results.append(seg)
if seg in BREAK:
results.extend([START for i in range(N - 1)])
return results
def count_ngram(segments):
"""统计 N-gram 出现次数"""
dct = defaultdict(Counter)
for i in range(N - 1, len(segments)):
context = tuple(segments[i - N + 1:i])
word = segments[i]
dct[context][word] += 1
return dct
def to_prob(dct):
"""将次数字典转换为概率字典"""
prob_dct = dct.copy()
for context, count in prob_dct.items():
total = sum(count.values())
for word in count:
count[word] /= total # works in Python 3
return prob_dct
def generate_word(prob_dct, context):
"""根据 context 及条件概率,随机生成 word"""
r = random()
psum = 0
for word, prob in prob_dct[context].items():
psum += prob
if psum > r:
return word
# return START
def generate_sentences(m, prob_dct):
"""生成 m 个句子"""
sentences = []
text = ''
context = tuple(START for i in range(N - 1))
i = 0
while (i < m):
word = generate_word(prob_dct, context)
text = text + word
context = tuple((list(context) + [word])[1:])
if word in BREAK:
sentences.append(text)
text = ''
context = tuple(START for i in range(N - 1))
i += 1
return sentences
def main():
for N in range(2, 6):
print('\n*** reading corpus ***')
with open('../../../data/tianlongbabu.txt', encoding="utf8") as f:
corpus = f.read()
print('*** cutting corpus ***')
raw_segments = jieba.cut(corpus)
print('*** processing segments ***')
segments = process_segs(raw_segments)
print('*** generating {}-gram count dict ***'.format(N))
dct = count_ngram(segments)
print('*** generating {}-gram probability dict ***'.format(N))
prob_dct = to_prob(dct)
# pprint(prob_dct)
import pickle
pickle.dump(prob_dct)
print('*** generating sentences ***')
with open('generated_{}gram.txt'.format(N), 'w', encoding="utf8") as f:
f.write('\n'.join(generate_sentences(20, prob_dct)))
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
5630da04cc30441eabf72f420f1a24217fbaba01 | e2423781704811bf0a0ecc07f9cb29d0a044ac48 | /tensorflow_datasets/image/bccd/dummy_data_generation.py | bddde3b24d939e2a794def3d52ba9eee64bd8de6 | [
"Apache-2.0"
] | permissive | mbbessa/datasets | af2506a8cf5c46c33143d6e0266ba50d8b4c3fcc | 2a7e8e793197637948ea0e0be4aa02a6aa2f7f55 | refs/heads/master | 2021-11-30T22:28:55.825453 | 2021-11-19T20:49:49 | 2021-11-19T20:52:42 | 171,528,015 | 0 | 0 | Apache-2.0 | 2019-02-19T18:34:26 | 2019-02-19T18:34:26 | null | UTF-8 | Python | false | false | 5,387 | py | # coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Generate bccd data.
"""
import os
import random
from absl import app
from absl import flags
import tensorflow as tf
from tensorflow_datasets.core.utils import py_utils
from tensorflow_datasets.image.bccd import bccd
import tensorflow_datasets.public_api as tfds
from tensorflow_datasets.testing import fake_data_utils
# In TF 2.0, eager execution is enabled by default
tf.compat.v1.disable_eager_execution()
flags.DEFINE_string("tfds_dir", py_utils.tfds_dir(),
"Path to tensorflow_datasets directory")
FLAGS = flags.FLAGS
MIN_OBJECT_HEIGHT_WIDTH = 100
MAX_OBJECT_HEIGHT_WIDTH = 400
MIN_NUM_OBJECTS = 1
MAX_NUM_OBJECTS = 3
def _output_dir():
return os.path.join(FLAGS.tfds_dir, "image", "bccd", "dummy_data")
def _write_text_file(filepath, content):
"""Write a text file given its content."""
dirname = os.path.dirname(filepath)
if not tf.io.gfile.exists(dirname):
tf.io.gfile.makedirs(dirname)
with tf.io.gfile.GFile(filepath, "w") as f:
f.write(content)
def _generate_jpeg(example_id, height, width):
"""Generate a fake jpeg image for the given example id."""
jpeg = fake_data_utils.get_random_jpeg(height=height, width=width)
filepath = os.path.join(
_output_dir(),
"BCCD_Dataset-1.0/BCCD/JPEGImages/BloodImage_{:05d}.jpg".format(
example_id))
dirname = os.path.dirname(filepath)
if not tf.io.gfile.exists(dirname):
tf.io.gfile.makedirs(dirname)
tf.io.gfile.copy(jpeg, filepath, overwrite=True)
def _generate_annotation(example_id, height, width):
"""Generate a fake annotation XML for the given example id."""
# pylint: disable=protected-access
label_names = tfds.features.ClassLabel(names=bccd._CLASS_LABELS).names # pytype: disable=module-attr
# pylint: enable=protected-access
annotation = "<annotation>\n"
annotation += "<folder>JPEGImages</folder>\n"
annotation += "<filename>%d.jpg</filename>\n" % example_id
annotation += "<path>/home/pi/detection_dataset/JPEGImages/%d.jpg</path>" % example_id
annotation += "<source>\n"
annotation += "<database>Unknown</database>\n"
annotation += "</source>"
annotation += "<size>\n"
annotation += "<width>%d</width>\n" % width
annotation += "<height>%d</height>\n" % height
annotation += "</size>\n"
for i in range(random.randint(MIN_NUM_OBJECTS, MAX_NUM_OBJECTS)):
annotation += "<object>\n"
annotation += " <name>%s</name>\n" % random.choice(label_names)
annotation += " <pose>Unspecified</pose>\n"
annotation += " <truncated>0</truncated>\n"
if i > 0:
annotation += " <difficult>%s</difficult>\n" % random.randint(0, 1)
else:
annotation += " <difficult>0</difficult>\n"
obj_w = random.randint(MIN_OBJECT_HEIGHT_WIDTH, MAX_OBJECT_HEIGHT_WIDTH)
obj_h = random.randint(MIN_OBJECT_HEIGHT_WIDTH, MAX_OBJECT_HEIGHT_WIDTH)
obj_x = random.randint(0, width - obj_w)
obj_y = random.randint(0, height - obj_h)
annotation += " <bndbox>\n"
annotation += " <xmin>%d</xmin>\n" % obj_x
annotation += " <ymin>%d</ymin>\n" % obj_y
annotation += " <xmax>%d</xmax>\n" % (obj_x + obj_w - 1)
annotation += " <ymax>%d</ymax>\n" % (obj_y + obj_h - 1)
annotation += " </bndbox>\n"
annotation += "</object>\n"
annotation += "</annotation>\n"
# Add annotation XML to the tar file.
filepath = os.path.join(
_output_dir(),
"BCCD_Dataset-1.0/BCCD/Annotations/BloodImage_{:05d}.xml".format(
example_id))
_write_text_file(filepath, annotation)
def _generate_data_for_set(set_name, example_start, num_examples):
"""Generate different data examples for the train, validation or test sets."""
# Generate JPEG and XML files of each example.
for example_id in range(example_start, example_start + num_examples):
_generate_jpeg(example_id, 480, 640)
_generate_annotation(example_id, 480, 640)
# Add all example ids to the TXT file with all examples in the set.
filepath = os.path.join(
_output_dir(), "BCCD_Dataset-1.0/BCCD/ImageSets/Main/%s.txt" % set_name)
_write_text_file(
filepath, "".join([
"BloodImage_{:05d}\n".format(example_id)
for example_id in range(example_start, example_start + num_examples)
]))
def _generate_trainval_archive():
"""Generate train/val archive."""
_generate_data_for_set("train", example_start=0, num_examples=2)
_generate_data_for_set("val", example_start=2, num_examples=1)
def _generate_test_archive():
"""Generate test archive."""
_generate_data_for_set("test", example_start=3, num_examples=2)
def main(argv):
if len(argv) > 1:
raise app.UsageError("Too many command-line arguments.")
_generate_trainval_archive()
_generate_test_archive()
if __name__ == "__main__":
app.run(main)
| [
"[email protected]"
] | |
a1cc5cf11e5624b2b3f89755554f97571fd1a25b | f759188e90610e08b4d85358abeaf27f2796964e | /tinyos-main/apps/PIR_Sensor/util/Listener.py | 464d97ddd4475819140e31d39a6f13222a0dc46e | [] | no_license | SoftwareDefinedBuildings/KetiMotes | 5555626231edb1cb76cb96bb4134a52d1d88bbb1 | b6dfea4b7d3dd384dd78a91ce62e7990cd337009 | refs/heads/master | 2020-04-06T23:55:42.151717 | 2014-09-11T18:25:17 | 2014-09-11T18:25:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,595 | py |
import socket
import UdpReport
import re
import sys
import time
import threading
port = 7000
stats = {}
class PrintStats(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True
def run(self):
while True:
self.print_stats()
time.sleep(3)
def print_stats(self):
global stats
print "-" * 40
for k, v in stats.iteritems():
print "%s: %i/%i (%0.2f ago) (%0.2f%%)" % (k,
v[0],
v[3] - v[2] + 1,
time.time() - v[1],
100 * float(v[0]) /
(v[3] - v[2] + 1))
print "%i total" % len(stats)
print "-" * 40
if __name__ == '__main__':
s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
s.bind(('', port))
ps = PrintStats()
ps.start()
while True:
data, addr = s.recvfrom(1024)
if (len(data) > 0):
rpt = UdpReport.UdpReport(data=data, data_length=len(data))
print rpt.get_seqno()
print rpt.get_interval()
print rpt.get_readings()
print addr[0]
if not addr[0] in stats:
stats[addr[0]] = (0, time.time(), rpt.get_seqno(), rpt.get_seqno())
cur = stats[addr[0]]
stats[addr[0]] = (cur[0] + 1, time.time(), cur[2], rpt.get_seqno())
| [
"[email protected]"
] | |
5f68224654afb98c99125e28a341ed8dd9de664a | 316c473d020f514ae81b7485b10f6556cf914fc0 | /scrapycrawlspidertest/scrapycrawlspidertest/spiders/universal.py | 38f44ee1edb89a378c243113f5a699a7ccc43884 | [
"Apache-2.0"
] | permissive | silianpan/seal-spider-demo | ca96b12d4b6fff8fe57f8e7822b7c0eb616fc7f3 | 7bdb77465a10a146c4cea8ad5d9ac589c16edd53 | refs/heads/master | 2023-06-20T03:47:04.572721 | 2023-05-24T06:27:13 | 2023-05-24T06:27:13 | 189,963,452 | 1 | 1 | Apache-2.0 | 2022-12-08T03:24:54 | 2019-06-03T08:15:56 | Python | UTF-8 | Python | false | false | 1,855 | py | # -*- coding: utf-8 -*-
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
from scrapycrawlspidertest.utils import get_config
from scrapycrawlspidertest.items import *
from scrapycrawlspidertest.wraps import *
from scrapycrawlspidertest import urls
class UniversalSpider(CrawlSpider):
name = 'universal'
def __init__(self, name, *args, **kwargs):
config = get_config(name)
self.config = config
self.rules = eval(config.get('rules'))
start_urls = config.get('start_urls')
if start_urls:
if start_urls.get('type') == 'static':
self.start_urls = start_urls.get('value')
elif start_urls.get('type') == 'dynamic':
self.start_urls = list(eval('urls.' + start_urls.get('method'))(*start_urls.get('args', [])))
self.allowed_domains = config.get('allowed_domains')
super(UniversalSpider, self).__init__(*args, **kwargs)
def parse_item(self, response):
# 获取item配置
item = self.config.get('item')
if item:
data = eval(item.get('class') + '()')
# 动态获取属性配置
for key, value in item.get('attrs').items():
data[key] = response
for process in value:
type = process.get('type', 'chain')
if type == 'chain':
# 动态调用函数和属性
if process.get('method'):
data[key] = getattr(data[key], process.get('method'))(*process.get('args', []))
elif type == 'wrap':
args = [data[key]] + process.get('args', [])
data[key] = eval(process.get('method'))(*args)
yield data | [
"[email protected]"
] | |
a5d5b5d8d00dd3c8d9faee9c11aeea428df67616 | fd94ec2d4cfcdb8aa41c2ecf92504a6502987b54 | /scripts/EmuMarker.py | 27dad104e3e1fda0c3243804da0f8a2a8f3c2f84 | [
"LicenseRef-scancode-glut",
"BSD-3-Clause",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"Unlicense",
"MIT",
"SGI-B-2.0"
] | permissive | greggman/regal | 70bccfd935c42f2a532471f84f164b9992886bce | 60d5f5f060dcbfa6ff2cdd5bf8823fd5a9cf11db | refs/heads/master | 2020-12-30T19:11:25.692166 | 2012-09-12T14:39:39 | 2012-09-12T14:39:39 | 5,432,185 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 854 | py | #!/usr/bin/python -B
formulae = {
'Insert' : {
'entries' : [ 'glInsertEventMarkerEXT' ],
'impl' : [ '_context->marker->InsertEventMarker( _context, ${arg0plus} );',
'RegalAssert(_context->info);',
'if (!_context->info->gl_ext_debug_marker) return;' ]
},
'Push' : {
'entries' : [ 'glPushGroupMarkerEXT' ],
'impl' : [ '_context->marker->PushGroupMarker( _context, ${arg0plus} );',
'RegalAssert(_context->info);',
'if (!_context->info->gl_ext_debug_marker) return;' ]
},
'Pop' : {
'entries' : [ 'glPopGroupMarkerEXT' ],
'impl' : [ '_context->marker->PopGroupMarker( _context );',
'RegalAssert(_context->info);',
'if (!_context->info->gl_ext_debug_marker) return;' ]
}
}
| [
"[email protected]"
] | |
f4acaf7682a9a1e14d09298963943cca14536cb0 | 7b38197bb4772724f5e875f9d3b79d61050a072b | /BioSTEAM 1.x.x/biorefineries/cornstover/_plot_spearman.py | 0c107226c916f5a7750fac6a89132a83827bf351 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | yalinli2/Bioindustrial-Park | fac6d58d82af56f5081f529c3ee0c65a70fe7bd3 | 196e2d60ec9bf0466ef804d036c995b89bc72f72 | refs/heads/master | 2021-09-24T11:24:26.586458 | 2021-09-09T14:05:33 | 2021-09-09T14:05:33 | 232,337,200 | 2 | 0 | MIT | 2021-09-09T14:05:34 | 2020-01-07T14:04:05 | Jupyter Notebook | UTF-8 | Python | false | false | 2,749 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Sep 16 00:02:57 2019
@author: yoelr
"""
import pandas as pd
from biosteam import colors
from biosteam.evaluation.evaluation_tools import plot_spearman
# %% Plot Spearman correlations
# Replacement parameter labels
replacement_labels = {
'Stream-Ethanol price': 'Ethanol price',
'TEA operating days': 'Operating days',
'Stream-cornstover price': 'Cornstover price',
'Fermentation-R301 efficiency': 'Fermentation efficiency',
'Stream-cellulase price': 'Cellulase price',
'Stream-cornstover flow rate': 'Cornstover flow rate',
'TEA income tax': 'Income tax',
'Saccharification and co fermentation-R301 saccharification conversion': 'Saccharification conversion',
'Saccharification and co fermentation-R301 ethanol conversion': 'Fermentation ethanol conversion',
'Boiler turbogenerator-BT boiler efficiency': 'Boiler efficiency',
'Boiler turbogenerator boiler base cost': 'Boiler base cost',
'Boiler turbogenerator turbogenerator base cost': 'Turbogenerator base cost',
'Pretreatment reactor system base cost': 'Pretreatment reactor base cost',
'Power utility price': 'Electricity price',
'Cooling tower base cost': 'Cooling tower base cost',
'Waste water system cost waste water system base cost': 'Wastewater treatment base cost',
'Waste water system cost waste water system exponent': 'Wastewater treatment exponent'}
def replace_label_text(label_text):
"""Replace label text for graph."""
name, distribution = label_text.split(' [')
lb, mid, ub = eval('[' + distribution)
if 'efficiency' in name:
distribution = f" ({lb:.2f}, {mid:.2f}, {ub:.2f})"
else:
distribution = f" ({lb:.3g}, {mid:.3g}, {ub:.3g})"
pos = name.find(' (')
if pos != -1:
units = str(name[pos:]).replace('(', '[').replace(')', ']')
if units == ' [USD/kg]':
units = ' [$\mathrm{USD} \cdot \mathrm{kg}^{-1}$]'
elif units == ' [USD/kWhr]':
units = ' [$\mathrm{USD} \cdot \mathrm{kWhr}^{-1}$]'
elif units == ' [kg/hr]':
units = ' [$\mathrm{kg} \cdot \mathrm{hr}^{-1}$]'
name = name[:pos]
else:
units = ''
if name in replacement_labels:
name = replacement_labels[name]
return name + units + distribution
# Get data
rhos = pd.read_excel('Spearman correlation cornstover.xlsx',
header=[0], index_col=0).iloc[:, 0]
# Get only important parameters
rhos = rhos[rhos.abs()>0.055]
# Plot and fix axis labels
fig, ax = plot_spearman(rhos, top=10, name='MESP')
labels = [item.get_text() for item in ax.get_yticklabels()]
new_labels = [replace_label_text(i) for i in labels]
ax.set_yticklabels(new_labels) | [
"[email protected]"
] | |
507716ccdcdc0b231befe78143fdbf537dbd0212 | 64cee8c8f33ae6be8edf0daa7a3a83efee86c82c | /cemba_data/tools/hdf5/netndf.py | 81594d52043eb0370f2dfeaaea81a3b7c7138127 | [
"MIT"
] | permissive | shengyongniu/cemba_data | 52881061dac63c5dca4bbedf9bc7f1f345b13575 | 6d076ed7f19ac76650d91fe9172393cc6c10e686 | refs/heads/master | 2021-10-09T14:31:43.849987 | 2018-12-29T23:19:53 | 2018-12-29T23:19:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,259 | py | import numpy as np
import pandas as pd
import xarray as xr
from anndata import AnnData
def _calculate_posterior_mc_rate(mc_array, cov_array, var_dim='chrom100k',
normalize_per_cell=True, clip_norm_value=10):
raw_rate = mc_array / cov_array
cell_rate_mean = raw_rate.mean(dim=var_dim)
cell_rate_var = raw_rate.var(dim=var_dim)
# based on beta distribution mean, var
# a / (a + b) = cell_rate_mean
# a * b / ((a + b) ^ 2 * (a + b + 1)) = cell_rate_var
# calculate alpha beta value for each cell
cell_a = (1 - cell_rate_mean) * (cell_rate_mean ** 2) / cell_rate_var - cell_rate_mean
cell_b = cell_a * (1 / cell_rate_mean - 1)
# cell specific posterior rate
post_rate = (mc_array + cell_a) / (cov_array + cell_a + cell_b)
if normalize_per_cell:
# this is normalize by post_rate per cell, just a mean center
post_rate = post_rate / post_rate.mean(dim='chrom100k')
if clip_norm_value is not None:
post_rate.values[np.where(post_rate.values > clip_norm_value)] = clip_norm_value
return post_rate
class MCDS(xr.Dataset):
def __init__(self, dataset):
super().__init__(data_vars=dataset.data_vars, coords=dataset.coords,
attrs=dataset.attrs, compat='broadcast_equals')
return
@classmethod
def open_dataset(cls, file_path):
return cls(xr.open_dataset(file_path))
def filter_cell_cov(self, dim, da, mc_type,
min_cov=10000, max_cov=None):
"""
Filter cell by total cov for certain mc_type along certain dimension in certain dataarray.
Parameters
----------
dim
region dimension to sum
da
dataarray to do calculation
mc_type
mc_type to sum
min_cov
minimum cov sum, suggest to plot distribution first.
max_cov
maximum cov sum, suggest ot plot distribution first.
Returns
-------
"""
if dim not in self[da].dims:
raise ValueError(f'{dim} is not a dimension of {da}')
cell_sum = self[da] \
.sel(count_type='cov', mc_type=mc_type) \
.squeeze() \
.sum(dim)
if max_cov is None:
max_cov = np.inf
cell_max = cell_sum < max_cov
cell_min = cell_sum > min_cov
cell_mask = np.all(np.vstack([cell_max.values,
cell_min.values]),
axis=0)
select_index = self.get_index('cell')[cell_mask]
return self.loc[dict(cell=select_index)]
def filter_region_cov(self, dim, da, mc_type,
min_cov=None, max_cov=None):
"""
Filter cell by total cov for certain mc_type along certain dimension in certain dataarray.
Parameters
----------
dim
region dimension to filter,
Note when filtering region, sum is always performed on cell.
da
dataarray to do calculation
mc_type
mc_type to sum
min_cov
minimum cov sum, suggest to plot distribution first.
max_cov
maximum cov sum, suggest ot plot distribution first.
Returns
-------
"""
if dim not in self[da].dims:
raise ValueError(f'{dim} is not a dimension of {da}')
region_sum = self[da] \
.sel(count_type='cov', mc_type=mc_type) \
.squeeze() \
.sum('cell')
if max_cov is None:
max_cov = np.inf
region_max = region_sum < max_cov
region_min = region_sum > min_cov
region_mask = np.all(np.vstack([region_max.values,
region_min.values]),
axis=0)
select_index = self.get_index(dim)[region_mask]
filtered_ds = self.loc[{dim: select_index}]
return MCDS(filtered_ds)
def add_mc_rate(self, dim, da,
normalize_per_cell=True,
clip_norm_value=10,
rate_da_suffix='rate'):
if da not in self.data_vars:
raise KeyError(f'{da} is not in this dataset')
if dim not in self[da].dims:
raise KeyError(f'{dim} is not a dimension of {da}')
da_mc = self[da].sel(count_type='mc')
da_cov = self[da].sel(count_type='cov')
rate = _calculate_posterior_mc_rate(mc_array=da_mc.values,
cov_array=da_cov.values,
normalize_per_cell=normalize_per_cell,
clip_norm_value=clip_norm_value)
da_rate = xr.DataArray(data=rate,
coords=da_mc.coords,
dims=da_mc.dims)
self[da + "_" + rate_da_suffix] = da_rate
return
def add_gene_rate(self, dim='gene', da='gene_da',
normalize_per_cell=True, clip_norm_value=10,
rate_da_suffix='rate'):
if da not in self.data_vars:
raise KeyError(f'{da} is not in this dataset')
if dim not in self[da].dims:
raise KeyError(f'{dim} is not a dimension of {da}')
da_mc = self[da].sel(count_type='mc')
da_cov = self[da].sel(count_type='cov')
# for gene, we just use normal rate
rate = da_mc / da_cov
if normalize_per_cell:
cell_overall = da_mc.sum(dim='gene') / da_cov.sum(dim='gene')
rate = rate / cell_overall
if clip_norm_value is not None:
rate.values[np.where(rate.values > clip_norm_value)] = clip_norm_value
self[da + "_" + rate_da_suffix] = rate
return
def to_ann(self, da, var_dim, mc_type, obs_dim='cell'):
index_dict = self[da].indexes
return AnnData(X=self[da].sel(mc_type=mc_type).values.copy(),
obs=pd.DataFrame(index=index_dict[obs_dim]),
var=pd.DataFrame(index=index_dict[var_dim]))
def add_ann_results(self, adata, var_dim, obs_dim='cell'):
# columns from AnnData.obs and AnnData.var go to da.coords
# obsm goes to new da with corresponding new dimension
obs_df = adata.obs
obs_df.index.name = obs_dim # make sure coords added with "cell" index
for col, data in obs_df.iteritems():
self.coords[col] = data
var_df = adata.var
var_df.index.name = var_dim # make sure coords added with "cell" index
for col, data in var_df.iteritems():
self.coords[col] = data
for obsm_key in adata.obsm_keys():
coord_name = obsm_key[2:] # remove 'X_'
obsm_data = adata.obsm[obsm_key]
obsm_df = pd.DataFrame(obsm_data,
index=adata.obs_names,
columns=[f'{coord_name}_{i}' for i in range(obsm_data.shape[1])])
obsm_df.index.name = obs_dim
obsm_df.columns.name = coord_name
self[coord_name + '_coord'] = obsm_df
for varm_key in adata.varm_keys():
coord_name = varm_key
varm_data = adata.varm[varm_key]
varm_df = pd.DataFrame(varm_data,
index=adata.var_names,
columns=[f'{coord_name}_{i}' for i in range(varm_data.shape[1])])
varm_df.index.name = var_dim
varm_df.columns.name = coord_name
self[coord_name + '_coord'] = varm_df
return
def add_dataframe_to_coords(self, df, index_dim):
# add columns to da.coords based on index and index_name
df.index.name = index_dim
for col, data in df.iteritems():
self.coords[col] = data
return
def add_dataframe_to_da(self, df, index_dim, col_dim, da_name):
# add columns to da.coords based on index and index_name
df.index.name = index_dim
df.columns.name = col_dim
self[da_name] = df
return
def get_plot_data(self, genes=None, coord='tsne'):
return
| [
"[email protected]"
] | |
9a1a1bdc04ee35d0744eed400f9333d11541d4ad | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /030_control_flow/003_for/_exercises/_templates/Python 3 Most Nessesary/4.3.Listing 4.7. Enumeration of elements of the list of tuples.py | 8299ab5805fc0af5d024b0635257ef3563f010f0 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 132 | py | # # -*- coding: utf-8 -*-
#
# arr = [(1, 2), (3, 4)] # Список кортежей
# ___ a, b __ ?
# print(a, b)
| [
"[email protected]"
] | |
5cc4b052f8af56030d1a18a236cfee198c0e14a0 | c7a6f8ed434c86b4cdae9c6144b9dd557e594f78 | /ECE364/.PyCharm40/system/python_stubs/348993582/gst/_gst/Date.py | b079f8261188a978cb48855b5781e2227e2dea1e | [] | no_license | ArbalestV/Purdue-Coursework | 75d979bbe72106975812b1d46b7d854e16e8e15e | ee7f86145edb41c17aefcd442fa42353a9e1b5d1 | refs/heads/master | 2020-08-29T05:27:52.342264 | 2018-04-03T17:59:01 | 2018-04-03T17:59:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 822 | py | # encoding: utf-8
# module gst._gst
# from /usr/lib64/python2.6/site-packages/gst-0.10/gst/_gst.so
# by generator 1.136
# no doc
# imports
import gobject as __gobject
import gobject._gobject as __gobject__gobject
import gst as __gst
class Date(__gobject.GBoxed):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
day = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
month = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
year = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__gtype__ = None # (!) real value is ''
| [
"[email protected]"
] | |
71f09344d33d23e6d19b7a1d9894d79eb5f34f8d | 986236feac0d098977dc3f98b705d68155048233 | /0x06-python-classes/100-singly_linked_list.py | 1e413ac97d382295ceaf0a64d2ca75f43de9041b | [] | no_license | Noeuclides/holbertonschool-higher_level_programming | 1f1ec5731840f39ab988593ace190403f701ee67 | fcf0d733b73904a848b5718266a644c4f6452166 | refs/heads/master | 2020-05-18T03:28:56.901071 | 2019-10-03T17:30:20 | 2019-10-03T17:30:20 | 184,145,627 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 352 | py | #!/usr/bin/python3
class Node:
def __init__(self, data=0):
if type(size) is not int:
raise TypeError("size must be an integer")
elif size < 0:
raise ValueError("size must be >= 0")
else:
self.__size = size
def data(self):
return(self.__size ** 2)
def data(self, value):
| [
"[email protected]"
] | |
823b24b4795ed11e530fa7bbbbd864226b91e019 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/coverage-big-4168.py | aaa834c9f7fe2dfdd97888d1a63ea8e948edcaf2 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,348 | py | count:int = 0
count2:int = 0
count3:int = 0
count4:int = 0
count5:int = 0
def foo(s: str) -> int:
return len(s)
def foo2(s: str, s2: str) -> int:
return len(s)
def foo3(s: str, s2: str, s3: str) -> int:
return len(s)
def foo4(s: str, s2: str, s3: str, s4: str) -> int:
return len(s)
def foo5(s: str, s2: str, s3: str, s4: str, s5: str) -> int:
return len(s)
class bar(object):
p: bool = True
def baz(self:"bar", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar2(object):
p: bool = True
p2: bool = True
def baz(self:"bar2", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar2", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar3(object):
p: bool = True
p2: bool = True
p3: bool = True
def baz(self:"bar3", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar3", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar3", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar4(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
def baz(self:"bar4", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar4", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar4", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar4", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar5(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
p5: bool = True
def baz(self:"bar5", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar5", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar5", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar5", xx: [int], xx2: [int], xx3: [int], $ID: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz5(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int], xx5: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
x5:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
y5:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
def qux5(y: int, y2: int, y3: int, y4: int, y5: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
nonlocal x5
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
print(bar().baz([1,2]))
| [
"[email protected]"
] | |
4b3efe39bb0661581d9f7709df149ac517e2a194 | 292cec77b5003a2f80360d0aee77556d12d990f7 | /src/bentoml/_internal/resource.py | b1e467fceda9f9dc881734dfe05cc85bb1bd5c39 | [
"Apache-2.0"
] | permissive | yubozhao/BentoML | 194a6ec804cc1c6dbe7930c49948b6707cbc3c5f | d4bb5cbb90f9a8ad162a417103433b9c33b39c84 | refs/heads/master | 2022-12-17T00:18:55.555897 | 2022-12-06T00:11:39 | 2022-12-06T00:11:39 | 178,978,385 | 3 | 0 | Apache-2.0 | 2020-12-01T18:17:15 | 2019-04-02T01:53:53 | Python | UTF-8 | Python | false | false | 9,725 | py | from __future__ import annotations
import os
import re
import math
import typing as t
import logging
import functools
from abc import ABC
from abc import abstractmethod
import psutil
from ..exceptions import BentoMLConfigException
logger = logging.getLogger(__name__)
_RESOURCE_REGISTRY: dict[str, t.Type[Resource[t.Any]]] = {}
T = t.TypeVar("T")
def get_resource(
resources: dict[str, t.Any], resource_kind: str, validate: bool = True
) -> t.Any:
if resource_kind not in _RESOURCE_REGISTRY:
raise BentoMLConfigException(f"Unknown resource kind '{resource_kind}'.")
resource: t.Type[Resource[t.Any]] = _RESOURCE_REGISTRY[resource_kind]
if resource_kind in resources:
if resources[resource_kind] == "system":
return resource.from_system()
else:
res = resource.from_spec(resources[resource_kind])
if validate:
resource.validate(res)
return res
else:
return None
def system_resources() -> dict[str, t.Any]:
res: dict[str, t.Any] = {}
for resource_kind, resource in _RESOURCE_REGISTRY.items():
res[resource_kind] = resource.from_system()
return res
class Resource(t.Generic[T], ABC):
def __init_subclass__(cls, *, resource_id: str): # pylint: disable=arguments-differ
_RESOURCE_REGISTRY[resource_id] = cls
@classmethod
@abstractmethod
def from_spec(cls, spec: t.Any) -> T:
"""
Get an instance of this resource from user input. For example, a CPU resource
might parse "10m" and return a CPU resource with 0.01 CPUs.
"""
@classmethod
@abstractmethod
def from_system(cls) -> T:
"""
Infer resource value from the system.
"""
@classmethod
@abstractmethod
def validate(cls, val: T):
"""
Validate that the resources are available on the current system.
"""
class CpuResource(Resource[float], resource_id="cpu"):
@classmethod
def from_spec(cls, spec: t.Any) -> float:
"""
Convert spec to CpuResource.
spec can be a float, int or string.
- 1.0 -> 1.0
- 1 -> 1.0
- "1" -> 1.0
- "10m" -> 0.01
"""
if not isinstance(spec, (int, float, str)):
raise TypeError("cpu must be int, float or str")
if isinstance(spec, (int, float)):
return float(spec)
milli_match = re.match("([0-9]+)m", spec)
if milli_match:
return float(milli_match[1]) / 1000.0
try:
return float(spec)
except ValueError:
raise BentoMLConfigException(f"Invalid CPU resource limit '{spec}'. ")
@classmethod
def from_system(cls) -> float:
if psutil.POSIX:
return query_cgroup_cpu_count()
else:
return float(query_os_cpu_count())
@classmethod
def validate(cls, val: float):
if val < 0:
raise BentoMLConfigException(
f"Invalid negative CPU resource limit '{val}'."
)
if not math.isclose(val, cls.from_system()) and val > cls.from_system():
raise BentoMLConfigException(
f"CPU resource limit {val} is greater than the system available: {cls.from_system()}"
)
@functools.lru_cache(maxsize=1)
def query_cgroup_cpu_count() -> float:
# Query active cpu processor count using cgroup v1 API, based on OpenJDK
# implementation for `active_processor_count` using cgroup v1:
# https://github.com/openjdk/jdk/blob/master/src/hotspot/os/linux/cgroupSubsystem_linux.cpp
# For cgroup v2, see:
# https://github.com/openjdk/jdk/blob/master/src/hotspot/os/linux/cgroupV2Subsystem_linux.cpp
# Possible supports: cpuset.cpus on kubernetes
def _read_cgroup_file(filename: str) -> float:
with open(filename, "r", encoding="utf-8") as f:
return int(f.read().strip())
cgroup_root = "/sys/fs/cgroup/"
cfs_quota_us_file = os.path.join(cgroup_root, "cpu", "cpu.cfs_quota_us")
cfs_period_us_file = os.path.join(cgroup_root, "cpu", "cpu.cfs_period_us")
cpu_max_file = os.path.join(cgroup_root, "cpu.max")
quota = None
if os.path.exists(cfs_quota_us_file) and os.path.exists(cfs_period_us_file):
try:
quota = _read_cgroup_file(cfs_quota_us_file) / _read_cgroup_file(
cfs_period_us_file
)
except FileNotFoundError as err:
logger.warning("Caught exception while calculating CPU quota: %s", err)
# reading from cpu.max for cgroup v2
elif os.path.exists(cpu_max_file):
try:
with open(cpu_max_file, "r", encoding="utf-8") as max_file:
cfs_string = max_file.read()
quota_str, period_str = cfs_string.split()
if quota_str.isnumeric() and period_str.isnumeric():
quota = float(quota_str) / float(period_str)
else:
# quota_str is "max" meaning the cpu quota is unset
quota = None
except FileNotFoundError as err:
logger.warning("Caught exception while calculating CPU quota: %s", err)
if quota is not None and quota < 0:
quota = None
elif quota == 0:
quota = 1
os_cpu_count = float(os.cpu_count() or 1.0)
limit_count = math.inf
if quota:
limit_count = quota
return float(min(limit_count, os_cpu_count))
@functools.lru_cache(maxsize=1)
def query_os_cpu_count() -> int:
cpu_count = os.cpu_count()
if cpu_count is not None:
return cpu_count
logger.warning("Failed to determine CPU count, using 1 as default.")
return 1
# class MemResource(Resource, resource_id="mem"):
# @classmethod
# def from_spec(cls, spec: t.Any):
# assert isinstance(mem, (int, str)), "mem must be int or str"
#
# if isinstance(mem, int):
# return mem
#
# unit_match = re.match("([0-9]+)([A-Za-z]{1,2})", mem)
# mem_multipliers = {
# "k": 1000,
# "M": 1000**2,
# "G": 1000**3,
# "T": 1000**4,
# "P": 1000**5,
# "E": 1000**6,
# "Ki": 1024,
# "Mi": 1024**2,
# "Gi": 1024**3,
# "Ti": 1024**4,
# "Pi": 1024**5,
# "Ei": 1024**6,
# }
# if unit_match:
# base = int(unit_match[1])
# unit = unit_match[2]
# if unit in mem_multipliers:
# return base * mem_multipliers[unit]
# raise ValueError(f"Invalid MEM resource limit '{mem}'")
class NvidiaGpuResource(Resource[t.List[int]], resource_id="nvidia.com/gpu"):
@classmethod
def from_spec(cls, spec: int | str | list[int | str]) -> list[int]:
if not isinstance(spec, (int, str, list)):
raise TypeError(
"NVidia GPU device IDs must be int, str or a list specifing the exact GPUs to use."
)
try:
if isinstance(spec, int):
if spec < -1:
raise ValueError
return list(range(spec))
elif isinstance(spec, str):
return cls.from_spec(int(spec))
else:
return [int(x) for x in spec]
except ValueError:
raise BentoMLConfigException(
f"Invalid NVidia GPU resource limit '{spec}'. "
)
@classmethod
@functools.lru_cache(maxsize=1)
def from_system(cls) -> list[int]:
"""
query nvidia gpu count, available on Windows and Linux
"""
import pynvml # type: ignore
try:
pynvml.nvmlInit()
device_count = pynvml.nvmlDeviceGetCount()
return list(range(device_count))
except (pynvml.nvml.NVMLError, OSError):
logger.debug("GPU not detected. Unable to initialize pynvml lib.")
return []
finally:
try:
pynvml.nvmlShutdown()
except Exception: # pylint: disable=broad-except
pass
@classmethod
def validate(cls, val: t.List[int]):
if any([gpu_index < 0 for gpu_index in val]):
raise BentoMLConfigException(f"Negative GPU device in {val}.")
if any([gpu_index >= len(cls.from_system()) for gpu_index in val]):
raise BentoMLConfigException(
f"GPU device index in {val} is greater than the system available: {cls.from_system()}"
)
def get_gpu_memory(dev: int) -> t.Tuple[float, float]:
"""
Return Total Memory and Free Memory in given GPU device. in MiB
"""
import pynvml.nvml # type: ignore
from pynvml.smi import nvidia_smi # type: ignore
unit_multiplier = {
"PiB": 1024.0 * 1024 * 1024,
"TiB": 1024.0 * 1024,
"GiB": 1024.0,
"MiB": 1.0,
"KiB": 1.0 / 1024,
"B": 1.0 / 1024 / 1024,
}
try:
inst = nvidia_smi.getInstance()
query: t.Dict[str, int] = inst.DeviceQuery(dev) # type: ignore
except (pynvml.nvml.NVMLError, OSError):
return 0.0, 0.0
try:
gpus: t.List[t.Dict[str, t.Any]] = query.get("gpu", []) # type: ignore
gpu = gpus[dev]
unit = gpu["fb_memory_usage"]["unit"]
total = gpu["fb_memory_usage"]["total"] * unit_multiplier[unit]
free = gpu["fb_memory_usage"]["free"] * unit_multiplier[unit]
return total, free
except IndexError:
raise ValueError(f"Invalid GPU device index {dev}")
except KeyError:
raise RuntimeError(f"unexpected nvml query result: {query}")
| [
"[email protected]"
] | |
c2ef80a416cc1c202f00d685ef27f6d11b3faf08 | 4fed7ad67d3bb7da502acaf347dff542971c1c4c | /app.py | 24f400a3d432d02740af9391a5b196df5498a484 | [
"MIT"
] | permissive | coolsnake/WebFtp | b62437b895261f3083d3f7d3550b541116b30cef | d76bce2391d393d2eeb92be7700dd49a1663e696 | refs/heads/master | 2021-04-15T14:05:50.752335 | 2017-09-25T10:59:50 | 2017-09-25T10:59:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 818 | py | #!/usr/bin/env python3
import tornado.ioloop
import tornado.web
from controllers import index
from controllers import account
settings = {
'template_path': 'template',
'static_path': 'static',
'static_url_prefix': '/static/',
'cookie_secret': '43809138f51b96f8ac24e79b3a2cb482',
'login_url': '/login',
#'xsrf_cookies': True,
'debug': True,
'autoreload': True,
}
application = tornado.web.Application([
# 主页
(r"/index", index.IndexHandler),
# Admin
(r"/admin", index.AdminHandle),
# 登录
(r"/login", account.LoginHandler),
# 登出
(r"/logout", account.LogoutHandler),
# 上传
(r"/upload", index.UploadFileNginxHandle),
], **settings)
if __name__ == '__main__':
application.listen(8000)
tornado.ioloop.IOLoop.instance().start()
| [
"[email protected]"
] | |
9c26acdd9f243cc659a6ae97ad61d70e3a774709 | af3ec207381de315f4cb6dddba727d16d42d6c57 | /dialogue-engine/src/programy/spelling/textblob_spelling.py | 17dce9132a295389213305638b9ac113ad1c6fc2 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | mcf-yuichi/cotoba-agent-oss | 02a5554fe81ce21517f33229101013b6487f5404 | ce60833915f484c4cbdc54b4b8222d64be4b6c0d | refs/heads/master | 2023-01-12T20:07:34.364188 | 2020-11-11T00:55:16 | 2020-11-11T00:55:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,540 | py | """
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
"""
Copyright (c) 2016-2019 Keith Sterling http://www.keithsterling.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from textblob import TextBlob
from programy.spelling.base import SpellingChecker
class TextBlobSpellingChecker(SpellingChecker):
def __init__(self, spelling_config=None):
SpellingChecker.__init__(self, spelling_config)
def correct(self, phrase):
blob = TextBlob(phrase)
correct_blob = blob.correct()
return str(correct_blob)
| [
"[email protected]"
] | |
2146739f9834f0af7d112fc44b3b75d696d80c39 | f1a51bb6cb5810a2dfac27cbbe32f5c5761bd8ec | /angrmanagement/data/object_container.py | 63b931645b8e1c14d8a5902e8eb52b570ff38979 | [
"BSD-2-Clause"
] | permissive | sraboy/angr-management | 904848408e9eec6662e16d9b69a0991b0374d3c6 | 4c4c1df7bce7083547ae38a19709f33dd10b7e22 | refs/heads/master | 2020-04-30T17:23:24.427321 | 2019-09-21T09:34:21 | 2019-09-21T09:34:21 | 176,977,927 | 0 | 1 | BSD-2-Clause | 2019-03-21T15:52:06 | 2019-03-21T15:52:06 | null | UTF-8 | Python | false | false | 2,374 | py |
from ..utils.namegen import NameGenerator
class EventSentinel:
def __init__(self):
self.am_subscribers = []
def am_subscribe(self, listener):
if listener is not None:
self.am_subscribers.append(listener)
def am_unsubscribe(self, listener):
if listener is not None:
self.am_subscribers.remove(listener)
def am_event(self, **kwargs):
for listener in self.am_subscribers:
listener(**kwargs)
class ObjectContainer(EventSentinel):
def __init__(self, obj, name=None, notes=''):
super(ObjectContainer, self).__init__()
self._am_obj = None
self.am_obj = obj
self.am_name = name if name is not None else NameGenerator.random_name()
self.am_notes = notes
# cause events to propagate upward through nested objectcontainers
@property
def am_obj(self):
return self._am_obj
@am_obj.setter
def am_obj(self, v):
if type(self._am_obj) is ObjectContainer:
self._am_obj.am_unsubscribe(self.__forwarder)
if type(v) is ObjectContainer:
v.am_subscribe(self.__forwarder)
self._am_obj = v
def am_none(self):
return self._am_obj is None
def __forwarder(self, **kwargs):
kwargs['forwarded'] = True
self.am_event(**kwargs)
def __getattr__(self, item):
if item.startswith('am_') or item.startswith('_am_'):
return object.__getattribute__(self, item)
return getattr(self._am_obj, item)
def __setattr__(self, key, value):
if key.startswith('am_') or key.startswith('_am_'):
return object.__setattr__(self, key, value)
setattr(self._am_obj, key, value)
def __getitem__(self, item):
return self._am_obj[item]
def __setitem__(self, key, value):
self._am_obj[key] = value
def __dir__(self):
return dir(self._am_obj) + list(self.__dict__) + list(EventSentinel.__dict__) + ['am_obj', 'am_full']
def __iter__(self):
return iter(self._am_obj)
def __len__(self):
return len(self._am_obj)
def __eq__(self, other):
return self is other or self._am_obj == other
def __ne__(self, other):
return not (self == other)
def __repr__(self):
return '(container %s)%s' % (self.am_name, repr(self._am_obj))
| [
"[email protected]"
] | |
0d62359f4aa4b91e0b20b2e5dc8b3ae4daab4878 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03292/s123105438.py | 9c05d46bc0b47e9e16017b20ebf76cfb8fcba1cc | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69 | py | a,b,c = sorted(map(int,input().split()))
print(max(a,b,c)-min(a,b,c)) | [
"[email protected]"
] | |
a4f282d077acf231c813e0781067964299e282f7 | 6f50d88145923deba55f5df5f88e872a46504f71 | /siteconfig/utils.py | e51be1affa4f0bc68f3bca34d399fc656a2d03cf | [] | no_license | vfxetc/siteconfig | ce85cff95a865a8ab6271f305b70643c364c1952 | 7124e941cf5068a70f07d0011902af797b74657e | refs/heads/master | 2021-09-12T13:00:40.933138 | 2017-08-04T15:08:42 | 2017-08-04T15:08:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 314 | py | import re
def normalize_key(input_):
input_ = re.sub(r'[^\w\[\]]+', '_', input_)
input_ = re.sub(r'^(\w+)', lambda m: m.group(1).upper(), input_)
return input_
def shell_escape(input_):
return str(input_).replace('"', '\\"')
def shell_quote(input_):
return '"%s"' % shell_escape(input_)
| [
"[email protected]"
] | |
30054b750cf65f48a5410dc67d6c9fd17cee69f1 | 53947441840357e3966eda580c6a5de3e0b92613 | /blaze/module/qualname.py | 65b9d324944ebfcc5fd2d0ce2f7ac4a97f5915c3 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
] | permissive | jedbrown/blaze-core | 8b9eda6267253e1609a7382197ffdf67a41407f6 | b2f0d350f5cb7b802819ca46738bacdbe70db13a | refs/heads/master | 2021-01-17T06:24:38.307059 | 2013-04-04T18:24:36 | 2013-04-04T18:24:36 | 9,283,188 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,381 | py | #------------------------------------------------------------------------
# Names
#------------------------------------------------------------------------
class Namespace(object):
def __init__(self, names):
self.names = names
def show(self):
return '.'.join(self.names)
class QualName(object):
def __init__(self, namespace, name):
assert isinstance(namespace, list)
self.namespace = name
self.name = name
def isprim(self):
return self.namespace == ['Prims']
def isqual(self):
return len(self.namespace) > 1
def show(self):
return '.'.join(self.namespace + [self.name])
def __str__(self):
return self.show()
#------------------------------------------------------------------------
# Module
#------------------------------------------------------------------------
class Module(object):
def __init__(self, name):
self.name = name
def alias(self):
pass
def expose(self, sym, sig):
pass
#------------------------------------------------------------------------
# Function References
#------------------------------------------------------------------------
# string -> name
# Reference to a function name
def name(s):
pass
# name -> term
# Reference to a function name
def ref(n):
pass
# string -> term
def fn(s):
pass
| [
"[email protected]"
] | |
bcb660a70c23a57e06a129b7d5e0ac8a48ccc062 | 440736bf8cdfff0b5569105519204414414b455a | /reviewboard/hostingsvcs/tests/test_bitbucket.py | 17ebf6a31a0ba0b0ec1a00ab52f35359a3acad75 | [
"MIT"
] | permissive | bbbbbgit/reviewboard | ef456bbd848a26a0babe9bbf9d1ccfdf6cafcffc | f9cff6699ec387600d1d0540db32d1a0517c203d | refs/heads/master | 2022-07-17T01:38:12.774767 | 2020-05-12T02:43:03 | 2020-05-12T02:43:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 60,819 | py | """Unit tests for the Bitbucket hosting service."""
from __future__ import unicode_literals
import logging
from django.contrib.auth.models import User
from django.test.client import RequestFactory
from django.utils.safestring import SafeText
from djblets.testing.decorators import add_fixtures
from reviewboard.hostingsvcs.bitbucket import BitbucketAuthForm
from reviewboard.hostingsvcs.errors import (AuthorizationError,
RepositoryError)
from reviewboard.hostingsvcs.testing import HostingServiceTestCase
from reviewboard.reviews.models import ReviewRequest
from reviewboard.scmtools.core import Branch, Commit
from reviewboard.scmtools.crypto_utils import (decrypt_password,
encrypt_password)
from reviewboard.scmtools.errors import FileNotFoundError
from reviewboard.site.models import LocalSite
from reviewboard.site.urlresolvers import local_site_reverse
class BitbucketTestCase(HostingServiceTestCase):
"""Base class for Bitbucket test suites."""
service_name = 'bitbucket'
fixtures = ['test_scmtools']
default_account_data = {
'password': encrypt_password(HostingServiceTestCase.default_password),
}
default_repository_extra_data = {
'bitbucket_repo_name': 'myrepo',
}
class BitbucketTests(BitbucketTestCase):
"""Unit tests for the Bitbucket hosting service."""
def test_service_support(self):
"""Testing Bitbucket service support capabilities"""
self.assertTrue(self.service_class.supports_bug_trackers)
self.assertTrue(self.service_class.supports_repositories)
def test_get_repository_fields_with_git_and_personal_plan(self):
"""Testing Bitbucket.get_repository_fields for Git and plan=personal"""
self.assertEqual(
self.get_repository_fields(
'Git',
fields={
'bitbucket_repo_name': 'myrepo',
},
plan='personal'
),
{
'path': '[email protected]:myuser/myrepo.git',
'mirror_path': ('https://[email protected]/myuser/'
'myrepo.git'),
})
def test_get_repository_fields_with_mercurial_and_personal_plan(self):
"""Testing Bitbucket.get_repository_fields for Mercurial and
plan=personal
"""
self.assertEqual(
self.get_repository_fields(
'Mercurial',
fields={
'bitbucket_repo_name': 'myrepo',
},
plan='personal'
),
{
'path': 'https://[email protected]/myuser/myrepo',
'mirror_path': 'ssh://[email protected]/myuser/myrepo',
})
def test_get_repository_fields_with_git_and_team_plan(self):
"""Testing Bitbucket.get_repository_fields for Git and plan=team"""
self.assertEqual(
self.get_repository_fields(
'Git',
fields={
'bitbucket_team_name': 'myteam',
'bitbucket_team_repo_name': 'myrepo',
},
plan='team'
),
{
'path': '[email protected]:myteam/myrepo.git',
'mirror_path': ('https://[email protected]/myteam/'
'myrepo.git'),
})
def test_get_repository_fields_with_mercurial_and_team_plan(self):
"""Testing Bitbucket.get_repository_fields for Mercurial and plan=team
"""
self.assertEqual(
self.get_repository_fields(
'Mercurial',
fields={
'bitbucket_team_name': 'myteam',
'bitbucket_team_repo_name': 'myrepo',
},
plan='team'
),
{
'path': 'https://[email protected]/myteam/myrepo',
'mirror_path': 'ssh://[email protected]/myteam/myrepo',
})
def test_get_repository_fields_with_git_and_other_user_plan(self):
"""Testing Bitbucket.get_repository_fields for Git and plan=other-user
"""
self.assertEqual(
self.get_repository_fields(
'Git',
fields={
'bitbucket_other_user_username': 'someuser',
'bitbucket_other_user_repo_name': 'myrepo',
},
plan='other-user'
),
{
'path': '[email protected]:someuser/myrepo.git',
'mirror_path': ('https://[email protected]/someuser/'
'myrepo.git'),
})
def test_get_repository_fields_with_mercurial_and_other_user_plan(self):
"""Testing Bitbucket.get_repository_fields for Mercurial and
plan=other-user
"""
self.assertEqual(
self.get_repository_fields(
'Mercurial',
fields={
'bitbucket_other_user_username': 'someuser',
'bitbucket_other_user_repo_name': 'myrepo',
},
plan='other-user'
),
{
'path': 'https://[email protected]/someuser/myrepo',
'mirror_path': 'ssh://[email protected]/someuser/myrepo',
})
def test_get_bug_tracker_field_with_personal_plan(self):
"""Testing Bitbucket.get_bug_tracker_field with plan=personal"""
self.assertTrue(self.service_class.get_bug_tracker_requires_username(
plan='personal'))
self.assertEqual(
self.service_class.get_bug_tracker_field(
'personal',
{
'bitbucket_repo_name': 'myrepo',
'hosting_account_username': 'myuser',
}),
'https://bitbucket.org/myuser/myrepo/issue/%s/')
def test_get_bug_tracker_field_with_team_plan(self):
"""Testing Bitbucket.get_bug_tracker_field with plan=team"""
self.assertFalse(self.service_class.get_bug_tracker_requires_username(
plan='team'))
self.assertEqual(
self.service_class.get_bug_tracker_field(
'team',
{
'bitbucket_team_name': 'myteam',
'bitbucket_team_repo_name': 'myrepo',
}),
'https://bitbucket.org/myteam/myrepo/issue/%s/')
def test_get_bug_tracker_field_with_other_user_plan(self):
"""Testing Bitbucket.get_bug_tracker_field with plan=other-user"""
self.assertFalse(self.service_class.get_bug_tracker_requires_username(
plan='other-user'))
self.assertEqual(
self.service_class.get_bug_tracker_field(
'other-user',
{
'bitbucket_other_user_username': 'someuser',
'bitbucket_other_user_repo_name': 'myrepo',
}),
'https://bitbucket.org/someuser/myrepo/issue/%s/')
def test_get_repository_hook_instructions(self):
"""Testing BitBucket.get_repository_hook_instructions"""
account = self.create_hosting_account()
repository = self.create_repository(hosting_account=account)
hooks_uuid = repository.get_or_create_hooks_uuid()
request = RequestFactory().get(path='/')
request.user = User.objects.create(username='test-user')
content = repository.hosting_service.get_repository_hook_instructions(
request=request,
repository=repository)
self.assertIsInstance(content, SafeText)
self.assertIn(
('https://bitbucket.org/myuser/myrepo/admin/addon/admin/'
'bitbucket-webhooks/bb-webhooks-repo-admin'),
content)
self.assertIn(
('http://example.com/repos/1/bitbucket/hooks/%s/close-submitted/'
% hooks_uuid),
content)
self.assertIn('Review Board supports closing', content)
self.assertIn('<code>Review Board</code>', content)
def test_check_repository_with_personal_plan(self):
"""Testing Bitbucket.check_repository with plan=personal"""
with self.setup_http_test(payload=b'{"scm": "git"}',
expected_http_calls=1) as ctx:
ctx.service.check_repository(bitbucket_repo_name='myrepo',
plan='personal',
tool_name='Git')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo'
'?fields=scm'))
def test_check_repository_with_team_plan(self):
"""Testing Bitbucket.check_repository with plan=team"""
with self.setup_http_test(payload=b'{"scm": "git"}',
expected_http_calls=1) as ctx:
ctx.service.check_repository(bitbucket_team_name='myteam',
bitbucket_team_repo_name='myrepo',
tool_name='Git',
plan='team')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myteam/myrepo'
'?fields=scm'))
def test_check_repository_with_other_user_plan(self):
"""Testing Bitbucket.check_repository with plan=other-user"""
with self.setup_http_test(payload=b'{"scm": "git"}',
expected_http_calls=1) as ctx:
ctx.service.check_repository(
bitbucket_other_user_username='someuser',
bitbucket_other_user_repo_name='myrepo',
plan='other-user',
tool_name='Git')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/someuser/myrepo'
'?fields=scm'))
def test_check_repository_with_slash(self):
"""Testing Bitbucket.check_repository with /"""
expected_message = \
'Please specify just the name of the repository, not a path.'
with self.setup_http_test(expected_http_calls=0) as ctx:
with self.assertRaisesMessage(RepositoryError, expected_message):
ctx.service.check_repository(
bitbucket_team_name='myteam',
bitbucket_team_repo_name='myteam/myrepo',
plan='team')
def test_check_repository_with_dot_git(self):
"""Testing Bitbucket.check_repository with .git"""
expected_message = \
'Please specify just the name of the repository without ".git".'
with self.setup_http_test(expected_http_calls=0) as ctx:
with self.assertRaisesMessage(RepositoryError, expected_message):
ctx.service.check_repository(
bitbucket_team_name='myteam',
bitbucket_team_repo_name='myrepo.git',
plan='team')
def test_check_repository_with_type_mismatch(self):
"""Testing Bitbucket.check_repository with type mismatch"""
error_message = (
'The Bitbucket repository being configured does not match the '
'type of repository you have selected.'
)
with self.setup_http_test(payload=b'{"scm": "git"}',
expected_http_calls=1) as ctx:
# Check Git repositories.
with self.assertRaisesMessage(RepositoryError, error_message):
ctx.service.check_repository(
bitbucket_team_name='myteam',
bitbucket_team_repo_name='myrepo',
plan='team',
tool_name='Mercurial')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myteam/myrepo'
'?fields=scm'))
# Now check Mercurial repositories.
with self.setup_http_test(payload=b'{"scm": "hg"}',
expected_http_calls=1) as ctx:
with self.assertRaisesMessage(RepositoryError, error_message):
ctx.service.check_repository(
bitbucket_team_name='myteam',
bitbucket_team_repo_name='myrepo',
plan='team',
tool_name='Git')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myteam/myrepo'
'?fields=scm'))
def test_authorize(self):
"""Testing Bitbucket.authorize"""
hosting_account = self.create_hosting_account(data={})
with self.setup_http_test(payload=b'{}',
hosting_account=hosting_account,
expected_http_calls=1) as ctx:
self.assertFalse(ctx.service.is_authorized())
ctx.service.authorize(username='myuser',
password='abc123')
self.assertIn('password', hosting_account.data)
self.assertNotEqual(hosting_account.data['password'], 'abc123')
self.assertEqual(decrypt_password(hosting_account.data['password']),
'abc123')
self.assertTrue(ctx.service.is_authorized())
ctx.assertHTTPCall(
0,
url='https://bitbucket.org/api/2.0/user',
username='myuser',
password='abc123')
def test_authorize_with_bad_credentials(self):
"""Testing Bitbucket.authorize with bad credentials"""
hosting_account = self.create_hosting_account(data={})
expected_message = (
'Invalid Bitbucket username or password. Make sure you are using '
'your Bitbucket username and not e-mail address, and are using an '
'app password if two-factor authentication is enabled.'
)
with self.setup_http_test(status_code=401,
hosting_account=hosting_account,
expected_http_calls=1) as ctx:
self.assertFalse(ctx.service.is_authorized())
with self.assertRaisesMessage(AuthorizationError,
expected_message):
ctx.service.authorize(username='myuser',
password='abc123')
self.assertNotIn('password', hosting_account.data)
self.assertFalse(ctx.service.is_authorized())
ctx.assertHTTPCall(
0,
url='https://bitbucket.org/api/2.0/user',
username='myuser',
password='abc123')
def test_get_file_with_mercurial_and_base_commit_id(self):
"""Testing Bitbucket.get_file with Mercurial and base commit ID"""
self._test_get_file(
tool_name='Mercurial',
revision='123',
base_commit_id='456',
expected_revision='456')
def test_get_file_with_mercurial_and_revision(self):
"""Testing Bitbucket.get_file with Mercurial and revision"""
self._test_get_file(
tool_name='Mercurial',
revision='123',
base_commit_id=None,
expected_revision='123')
def test_get_file_with_git_and_base_commit_id(self):
"""Testing Bitbucket.get_file with Git and base commit ID"""
self._test_get_file(
tool_name='Git',
revision='123',
base_commit_id='456',
expected_revision='456')
def test_get_file_with_git_and_revision(self):
"""Testing Bitbucket.get_file with Git and revision"""
with self.assertRaises(FileNotFoundError):
self._test_get_file(tool_name='Git',
revision='123',
base_commit_id=None,
expected_revision='123')
def test_get_file_exists_with_mercurial_and_base_commit_id(self):
"""Testing Bitbucket.get_file_exists with Mercurial and base commit ID
"""
self._test_get_file_exists(
tool_name='Mercurial',
revision='123',
base_commit_id='456',
expected_revision='456',
expected_found=True)
def test_get_file_exists_with_mercurial_and_revision(self):
"""Testing Bitbucket.get_file_exists with Mercurial and revision"""
self._test_get_file_exists(
tool_name='Mercurial',
revision='123',
base_commit_id=None,
expected_revision='123',
expected_found=True)
def test_get_file_exists_with_git_and_base_commit_id(self):
"""Testing Bitbucket.get_file_exists with Git and base commit ID"""
self._test_get_file_exists(
tool_name='Git',
revision='123',
base_commit_id='456',
expected_revision='456',
expected_found=True)
def test_get_file_exists_with_git_and_revision(self):
"""Testing Bitbucket.get_file_exists with Git and revision"""
self._test_get_file_exists(
tool_name='Git',
revision='123',
base_commit_id=None,
expected_revision='123',
expected_found=False,
expected_http_called=False)
def test_get_file_exists_with_git_and_404(self):
"""Testing BitBucket.get_file_exists with Git and a 404 error"""
self._test_get_file_exists(
tool_name='Git',
revision='123',
base_commit_id='456',
expected_revision='456',
expected_found=False)
def test_get_branches(self):
"""Testing Bitbucket.get_branches"""
branches_api_response_1 = self.dump_json({
'next': ('https://bitbucket.org/api/2.0/repositories/myuser/'
'myrepo/refs/branches'
'?fields=values.name%2Cvalues.target.hash%2Cnext'
'&pagelen=100&page=2'),
'values': [
{
'name': 'branch1',
'target': {
'hash': '1c44b461cebe5874a857c51a4a13a849a4d1e52d',
},
},
{
'name': 'branch2',
'target': {
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
},
},
],
})
branches_api_response_2 = self.dump_json({
'values': [
{
'name': 'branch3',
'target': {
'hash': 'e5874a857c51a4a13a849a4d1e52d1c44b461ceb',
},
},
{
'name': 'branch4',
'target': {
'hash': 'd286691517e6325fea5c7a21d5e44568f7d33647',
},
},
],
})
get_repository_api_response = self.dump_json({
'mainbranch': {
'name': 'branch3',
},
})
paths = {
'/api/2.0/repositories/myuser/myrepo': {
'payload': get_repository_api_response,
},
('/api/2.0/repositories/myuser/myrepo/refs/branches'
'?fields=values.name%2Cvalues.target.hash%2Cnext&pagelen=100'): {
'payload': branches_api_response_1,
},
('/api/2.0/repositories/myuser/myrepo/refs/branches'
'?fields=values.name%2Cvalues.target.hash%2Cnext&page=2'
'&pagelen=100'): {
'payload': branches_api_response_2,
},
}
with self.setup_http_test(self.make_handler_for_paths(paths),
expected_http_calls=3) as ctx:
repository = self.create_repository(tool_name='Git')
branches = ctx.service.get_branches(repository)
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo'
'?fields=mainbranch.name'))
ctx.assertHTTPCall(
1,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'refs/branches'
'?fields=values.name%2Cvalues.target.hash%2Cnext'
'&pagelen=100'))
ctx.assertHTTPCall(
2,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'refs/branches'
'?fields=values.name%2Cvalues.target.hash%2Cnext'
'&page=2&pagelen=100'))
self.assertEqual(
branches,
[
Branch(id='branch1',
commit='1c44b461cebe5874a857c51a4a13a849a4d1e52d'),
Branch(id='branch2',
commit='44568f7d33647d286691517e6325fea5c7a21d5e'),
Branch(id='branch3',
commit='e5874a857c51a4a13a849a4d1e52d1c44b461ceb',
default=True),
Branch(id='branch4',
commit='d286691517e6325fea5c7a21d5e44568f7d33647'),
])
def test_get_commits(self):
"""Testing Bitbucket.get_commits"""
payload = self.dump_json({
'size': 2,
'values': [
{
'hash': '1c44b461cebe5874a857c51a4a13a849a4d1e52d',
'author': {
'raw': 'Some User 1 <[email protected]>',
},
'date': '2017-01-24T13:11:22+00:00',
'message': 'This is commit 1.',
'parents': [
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
},
],
},
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
'author': {
'raw': 'Some User 2 <[email protected]>',
},
'date': '2017-01-23T08:09:10+00:00',
'message': 'This is commit 2.',
'parents': [
{
'hash': 'e5874a857c51a4a13a849a4d1e52d1c44b461ceb',
},
],
},
],
})
with self.setup_http_test(payload=payload,
expected_http_calls=1) as ctx:
repository = ctx.create_repository(tool_name='Git')
commits = ctx.service.get_commits(repository)
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'commits'
'?fields=values.author.raw%2Cvalues.hash%2Cvalues.date'
'%2Cvalues.message%2Cvalues.parents.hash'
'&pagelen=20'))
self.assertEqual(
commits,
[
Commit(author_name='Some User 1 <[email protected]>',
date='2017-01-24T13:11:22+00:00',
id='1c44b461cebe5874a857c51a4a13a849a4d1e52d',
message='This is commit 1.',
parent='44568f7d33647d286691517e6325fea5c7a21d5e'),
Commit(author_name='Some User 2 <[email protected]>',
date='2017-01-23T08:09:10+00:00',
id='44568f7d33647d286691517e6325fea5c7a21d5e',
message='This is commit 2.',
parent='e5874a857c51a4a13a849a4d1e52d1c44b461ceb'),
])
for commit in commits:
self.assertIsNone(commit.diff)
def test_get_commits_with_start(self):
"""Testing Bitbucket.get_commits with start="""
payload = self.dump_json({
'size': 2,
'values': [
{
'hash': '1c44b461cebe5874a857c51a4a13a849a4d1e52d',
'author': {
'raw': 'Some User 1 <[email protected]>',
},
'date': '2017-01-24T13:11:22+00:00',
'message': 'This is commit 1.',
'parents': [
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
},
],
},
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
'author': {
'raw': 'Some User 2 <[email protected]>',
},
'date': '2017-01-23T08:09:10+00:00',
'message': 'This is commit 2.',
'parents': [
{
'hash': 'e5874a857c51a4a13a849a4d1e52d1c44b461ceb',
},
],
},
],
})
with self.setup_http_test(payload=payload,
expected_http_calls=1) as ctx:
repository = ctx.create_repository(tool_name='Git')
commits = ctx.service.get_commits(
repository,
start='1c44b461cebe5874a857c51a4a13a849a4d1e5')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'commits/1c44b461cebe5874a857c51a4a13a849a4d1e5'
'?fields=values.author.raw%2Cvalues.hash%2Cvalues.date'
'%2Cvalues.message%2Cvalues.parents.hash'
'&pagelen=20'))
self.assertEqual(
commits,
[
Commit(author_name='Some User 1 <[email protected]>',
date='2017-01-24T13:11:22+00:00',
id='1c44b461cebe5874a857c51a4a13a849a4d1e52d',
message='This is commit 1.',
parent='44568f7d33647d286691517e6325fea5c7a21d5e'),
Commit(author_name='Some User 2 <[email protected]>',
date='2017-01-23T08:09:10+00:00',
id='44568f7d33647d286691517e6325fea5c7a21d5e',
message='This is commit 2.',
parent='e5874a857c51a4a13a849a4d1e52d1c44b461ceb'),
])
for commit in commits:
self.assertIsNone(commit.diff)
def test_get_commits_with_branch(self):
"""Testing Bitbucket.get_commits with branch="""
payload = self.dump_json({
'size': 2,
'values': [
{
'hash': '1c44b461cebe5874a857c51a4a13a849a4d1e52d',
'author': {
'raw': 'Some User 1 <[email protected]>',
},
'date': '2017-01-24T13:11:22+00:00',
'message': 'This is commit 1.',
'parents': [
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
},
],
},
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
'author': {
'raw': 'Some User 2 <[email protected]>',
},
'date': '2017-01-23T08:09:10+00:00',
'message': 'This is commit 2.',
'parents': [
{
'hash': 'e5874a857c51a4a13a849a4d1e52d1c44b461ceb',
},
],
},
],
})
with self.setup_http_test(payload=payload,
expected_http_calls=1) as ctx:
repository = ctx.create_repository(tool_name='Git')
commits = ctx.service.get_commits(repository,
branch='master')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'commits/master'
'?fields=values.author.raw%2Cvalues.hash%2Cvalues.date'
'%2Cvalues.message%2Cvalues.parents.hash'
'&pagelen=20'))
self.assertEqual(
commits,
[
Commit(author_name='Some User 1 <[email protected]>',
date='2017-01-24T13:11:22+00:00',
id='1c44b461cebe5874a857c51a4a13a849a4d1e52d',
message='This is commit 1.',
parent='44568f7d33647d286691517e6325fea5c7a21d5e'),
Commit(author_name='Some User 2 <[email protected]>',
date='2017-01-23T08:09:10+00:00',
id='44568f7d33647d286691517e6325fea5c7a21d5e',
message='This is commit 2.',
parent='e5874a857c51a4a13a849a4d1e52d1c44b461ceb'),
])
for commit in commits:
self.assertIsNone(commit.diff)
def test_get_commits_with_start_and_branch(self):
"""Testing Bitbucket.get_commits with start= and branch="""
payload = self.dump_json({
'size': 2,
'values': [
{
'hash': '1c44b461cebe5874a857c51a4a13a849a4d1e52d',
'author': {
'raw': 'Some User 1 <[email protected]>',
},
'date': '2017-01-24T13:11:22+00:00',
'message': 'This is commit 1.',
'parents': [
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
},
],
},
{
'hash': '44568f7d33647d286691517e6325fea5c7a21d5e',
'author': {
'raw': 'Some User 2 <[email protected]>',
},
'date': '2017-01-23T08:09:10+00:00',
'message': 'This is commit 2.',
'parents': [
{
'hash': 'e5874a857c51a4a13a849a4d1e52d1c44b461ceb',
},
],
},
],
})
with self.setup_http_test(payload=payload,
expected_http_calls=1) as ctx:
repository = ctx.create_repository(tool_name='Git')
commits = ctx.service.get_commits(
repository,
start='1c44b461cebe5874a857c51a4a13a849a4d1e52d',
branch='master')
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'commits/1c44b461cebe5874a857c51a4a13a849a4d1e52d'
'?fields=values.author.raw%2Cvalues.hash%2Cvalues.date'
'%2Cvalues.message%2Cvalues.parents.hash'
'&pagelen=20'))
self.assertEqual(
commits,
[
Commit(author_name='Some User 1 <[email protected]>',
date='2017-01-24T13:11:22+00:00',
id='1c44b461cebe5874a857c51a4a13a849a4d1e52d',
message='This is commit 1.',
parent='44568f7d33647d286691517e6325fea5c7a21d5e'),
Commit(author_name='Some User 2 <[email protected]>',
date='2017-01-23T08:09:10+00:00',
id='44568f7d33647d286691517e6325fea5c7a21d5e',
message='This is commit 2.',
parent='e5874a857c51a4a13a849a4d1e52d1c44b461ceb'),
])
for commit in commits:
self.assertIsNone(commit.diff)
def test_get_change(self):
"""Testing BitBucket.get_change"""
commit_sha = '1c44b461cebe5874a857c51a4a13a849a4d1e52d'
parent_sha = '44568f7d33647d286691517e6325fea5c7a21d5e'
paths = {
'/api/2.0/repositories/myuser/myrepo/commit/%s' % commit_sha: {
'payload': self.dump_json({
'hash': commit_sha,
'author': {
'raw': 'Some User <[email protected]>',
},
'date': '2017-01-24T13:11:22+00:00',
'message': 'This is a message.',
'parents': [{'hash': parent_sha}],
}),
},
'/api/2.0/repositories/myuser/myrepo/diff/%s' % commit_sha: {
'payload': b'This is a test \xc7.',
},
}
with self.setup_http_test(self.make_handler_for_paths(paths),
expected_http_calls=2) as ctx:
repository = ctx.create_repository(tool_name='Git')
commit = ctx.service.get_change(repository, commit_sha)
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'commit/1c44b461cebe5874a857c51a4a13a849a4d1e52d'
'?fields=author.raw%2Chash%2Cdate%2Cmessage%2Cparents.hash'))
ctx.assertHTTPCall(
1,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'diff/1c44b461cebe5874a857c51a4a13a849a4d1e52d'))
self.assertEqual(
commit,
Commit(author_name='Some User <[email protected]>',
date='2017-01-24T13:11:22+00:00',
id=commit_sha,
message='This is a message.',
parent=parent_sha))
self.assertEqual(commit.diff, b'This is a test \xc7.\n')
def _test_get_file(self, tool_name, revision, base_commit_id,
expected_revision):
"""Test file fetching.
Args:
tool_name (unicode):
The name of the SCM Tool to test with.
revision (unicode, optional):
The revision to check.
base_commit_id (unicode, optional):
The base commit to fetch against.
expected_revision (unicode, optional):
The revision expected in the payload.
"""
with self.setup_http_test(payload=b'My data',
expected_http_calls=1) as ctx:
repository = ctx.create_repository(tool_name=tool_name)
result = ctx.service.get_file(repository=repository,
path='path',
revision=revision,
base_commit_id=base_commit_id)
ctx.assertHTTPCall(
0,
url=('https://bitbucket.org/api/2.0/repositories/myuser/myrepo/'
'src/%s/path'
% expected_revision))
self.assertIsInstance(result, bytes)
self.assertEqual(result, b'My data')
def _test_get_file_exists(self, tool_name, revision, base_commit_id,
expected_revision, expected_found,
expected_http_called=True):
"""Test file existence checks.
Args:
tool_name (unicode):
The name of the SCM Tool to test with.
revision (unicode, optional):
The revision to check.
base_commit_id (unicode, optional):
The base commit to fetch against.
expected_revision (unicode, optional):
The revision expected in the payload.
expected_found (bool, optional):
Whether a truthy response should be expected.
expected_http_called (bool, optional):
Whether an HTTP request is expected to have been made.
"""
if expected_found:
payload = b'file...'
status_code = None
else:
payload = None
status_code = 404
if expected_http_called:
expected_calls = 1
else:
expected_calls = 0
with self.setup_http_test(payload=payload,
status_code=status_code,
expected_http_calls=expected_calls) as ctx:
repository = ctx.create_repository(tool_name=tool_name)
result = ctx.service.get_file_exists(repository=repository,
path='path',
revision=revision,
base_commit_id=base_commit_id)
if expected_http_called:
ctx.assertHTTPCall(
0,
method='HEAD',
url=('https://bitbucket.org/api/2.0/repositories/myuser/'
'myrepo/src/%s/path'
% expected_revision))
self.assertEqual(result, expected_found)
class BitbucketAuthFormTests(BitbucketTestCase):
"""Unit tests for BitbucketAuthForm."""
def test_clean_hosting_account_username_with_username(self):
"""Testing BitbucketAuthForm.clean_hosting_account_username with
username
"""
form = BitbucketAuthForm(
hosting_service_cls=self.service_class,
data={
'hosting_account_username': 'myuser',
'hosting_account_password': 'mypass',
})
self.assertTrue(form.is_valid())
def test_clean_hosting_account_username_with_email(self):
"""Testing BitbucketAuthForm.clean_hosting_account_username with
e-mail address
"""
form = BitbucketAuthForm(
hosting_service_cls=self.service_class,
data={
'hosting_account_username': '[email protected]',
'hosting_account_password': 'mypass',
})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['hosting_account_username'],
['This must be your Bitbucket username (the same one '
'you would see in URLs for your own repositories), '
'not your Atlassian e-mail address.'])
class CloseSubmittedHookTests(BitbucketTestCase):
"""Unit tests for the Bitbucket close-submitted webhook."""
fixtures = ['test_users', 'test_scmtools']
COMMITS_URL = ('/api/2.0/repositories/test/test/commits'
'?exclude=abc123&include=def123')
def test_close_submitted_hook(self):
"""Testing BitBucket close_submitted hook"""
self._test_post_commit_hook()
@add_fixtures(['test_site'])
def test_close_submitted_hook_with_local_site(self):
"""Testing BitBucket close_submitted hook with a Local Site"""
self._test_post_commit_hook(
LocalSite.objects.get(name=self.local_site_name))
def test_close_submitted_hook_with_truncated_commits(self):
"""Testing BitBucket close_submitted hook with truncated list of
commits
"""
account = self.create_hosting_account()
repository = self.create_repository(hosting_account=account)
# Create two review requests: One per referenced commit.
review_request1 = self.create_review_request(id=99,
repository=repository,
publish=True)
self.assertTrue(review_request1.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
review_request2 = self.create_review_request(id=100,
repository=repository,
publish=True)
self.assertTrue(review_request2.public)
self.assertEqual(review_request2.status,
review_request2.PENDING_REVIEW)
page2_url = '%s&page=2&pagelen=100' % self.COMMITS_URL
paths = {
'%s&pagelen=100' % self.COMMITS_URL: {
'payload': self.dump_json({
'next': page2_url,
'values': [
{
'hash': '1c44b461cebe5874a857c51a4a13a84'
'9a4d1e52d',
'message': 'This is my fancy commit.\n'
'\n'
'Reviewed at http://example.com%s'
% review_request1.get_absolute_url(),
},
],
}),
},
page2_url: {
'payload': self.dump_json({
'values': [
{
'hash': '9fad89712ebe5874a857c5112a3c9d1'
'87ada0dbc',
'message': 'This is another commit\n'
'\n'
'Reviewed at http://example.com%s'
% review_request2.get_absolute_url(),
},
],
}),
}
}
# Simulate the webhook.
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
with self.setup_http_test(self.make_handler_for_paths(paths),
expected_http_calls=2):
self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request1.get_absolute_url(),
truncated=True)
# Check the first review request.
#
# The first review request has an entry in the truncated list and the
# fetched list. We'll make sure we've only processed it once.
review_request1 = ReviewRequest.objects.get(pk=review_request1.pk)
self.assertTrue(review_request1.public)
self.assertEqual(review_request1.status, review_request1.SUBMITTED)
self.assertEqual(review_request1.changedescs.count(), 1)
changedesc = review_request1.changedescs.get()
self.assertEqual(changedesc.text, 'Pushed to master (1c44b46)')
# Check the first review request.
review_request2 = ReviewRequest.objects.get(pk=review_request2.pk)
self.assertTrue(review_request2.public)
self.assertEqual(review_request2.status, review_request2.SUBMITTED)
self.assertEqual(review_request2.changedescs.count(), 1)
changedesc = review_request2.changedescs.get()
self.assertEqual(changedesc.text, 'Pushed to master (9fad897)')
def test_close_submitted_hook_with_truncated_commits_limits(self):
"""Testing BitBucket close_submitted hook with truncated list of
commits obeys limits
"""
paths = {
'%s&pagelen=100' % self.COMMITS_URL: {
'payload': self.dump_json({
'next': '%s&page=2' % self.COMMITS_URL,
'values': [],
}),
},
}
paths.update({
'%s&page=%s&pagelen=100' % (self.COMMITS_URL, i): {
'payload': self.dump_json({
'next': '%s&page=%s' % (self.COMMITS_URL, i + 1),
'values': [],
}),
}
for i in range(1, 10)
})
account = self.create_hosting_account()
repository = self.create_repository(hosting_account=account)
# Create two review requests: One per referenced commit.
review_request1 = self.create_review_request(id=99,
repository=repository,
publish=True)
self.assertTrue(review_request1.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
review_request2 = self.create_review_request(id=100,
repository=repository,
publish=True)
self.assertTrue(review_request2.public)
self.assertEqual(review_request2.status,
review_request2.PENDING_REVIEW)
# Simulate the webhook.
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
# There should have been 5 API requests. We'll never hit the final
# page.
with self.setup_http_test(self.make_handler_for_paths(paths),
expected_http_calls=5):
self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request1.get_absolute_url(),
truncated=True)
# The review requests should not have been updated.
review_request1 = ReviewRequest.objects.get(pk=review_request1.pk)
self.assertTrue(review_request1.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
self.assertEqual(review_request1.changedescs.count(), 0)
# Check the first review request.
review_request2 = ReviewRequest.objects.get(pk=review_request2.pk)
self.assertTrue(review_request2.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
self.assertEqual(review_request2.changedescs.count(), 0)
def test_close_submitted_hook_with_truncated_and_auth_error(self):
"""Testing BitBucket close_submitted hook with truncated list of
commits and authentication error talking to Bitbucket
"""
account = self.create_hosting_account()
repository = self.create_repository(hosting_account=account)
# Create two review requests: One per referenced commit.
review_request1 = self.create_review_request(id=99,
repository=repository,
publish=True)
self.assertTrue(review_request1.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
review_request2 = self.create_review_request(id=100,
repository=repository,
publish=True)
self.assertTrue(review_request2.public)
self.assertEqual(review_request2.status,
review_request2.PENDING_REVIEW)
# Simulate the webhook.
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
with self.setup_http_test(status_code=401,
hosting_account=account,
expected_http_calls=1):
response = self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request1.get_absolute_url(),
truncated=True)
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content,
b'Incorrect username or password configured for '
b'this repository on Review Board.')
# The review requests should not have been updated.
review_request1 = ReviewRequest.objects.get(pk=review_request1.pk)
self.assertTrue(review_request1.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
self.assertEqual(review_request1.changedescs.count(), 0)
# Check the first review request.
review_request2 = ReviewRequest.objects.get(pk=review_request2.pk)
self.assertTrue(review_request2.public)
self.assertEqual(review_request1.status,
review_request1.PENDING_REVIEW)
self.assertEqual(review_request2.changedescs.count(), 0)
def test_close_submitted_hook_with_invalid_repo(self):
"""Testing BitBucket close_submitted hook with invalid repository"""
repository = self.create_repository()
review_request = self.create_review_request(repository=repository,
publish=True)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
response = self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request.get_absolute_url())
self.assertEqual(response.status_code, 404)
review_request = ReviewRequest.objects.get(pk=review_request.pk)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
self.assertEqual(review_request.changedescs.count(), 0)
@add_fixtures(['test_site'])
def test_close_submitted_hook_with_invalid_site(self):
"""Testing BitBucket close_submitted hook with invalid Local Site"""
local_site = LocalSite.objects.get(name=self.local_site_name)
account = self.create_hosting_account(local_site=local_site)
account.save()
repository = self.create_repository(hosting_account=account,
local_site=local_site)
review_request = self.create_review_request(repository=repository,
publish=True)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
local_site_name='badsite',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
response = self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request.get_absolute_url())
self.assertEqual(response.status_code, 404)
review_request = ReviewRequest.objects.get(pk=review_request.pk)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
self.assertEqual(review_request.changedescs.count(), 0)
def test_close_submitted_hook_with_invalid_service_id(self):
"""Testing BitBucket close_submitted hook with invalid hosting
service ID
"""
# We'll test against GitHub for this test.
account = self.create_hosting_account()
account.service_name = 'github'
account.save()
repository = self.create_repository(hosting_account=account)
review_request = self.create_review_request(repository=repository,
publish=True)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
response = self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request.get_absolute_url())
self.assertEqual(response.status_code, 404)
review_request = ReviewRequest.objects.get(pk=review_request.pk)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
self.assertEqual(review_request.changedescs.count(), 0)
def test_close_submitted_hook_with_invalid_review_request(self):
"""Testing BitBucket close_submitted hook with invalid review request
"""
self.spy_on(logging.error)
account = self.create_hosting_account()
repository = self.create_repository(hosting_account=account)
review_request = self.create_review_request(repository=repository,
publish=True)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
response = self._post_commit_hook_payload(
post_url=url,
review_request_url='/r/9999/')
self.assertEqual(response.status_code, 200)
review_request = ReviewRequest.objects.get(pk=review_request.pk)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
self.assertEqual(review_request.changedescs.count(), 0)
self.assertTrue(logging.error.called_with(
'close_all_review_requests: Review request #%s does not exist.',
9999))
def _test_post_commit_hook(self, local_site=None):
"""Testing posting to a commit hook.
This will simulate pushing a commit and posting the resulting webhook
payload from Bitbucket to the handler for the hook.
Args:
local_site (reviewboard.site.models.LocalSite, optional):
The Local Site owning the review request.
"""
account = self.create_hosting_account(local_site=local_site)
repository = self.create_repository(hosting_account=account,
local_site=local_site)
review_request = self.create_review_request(repository=repository,
local_site=local_site,
publish=True)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.PENDING_REVIEW)
url = local_site_reverse(
'bitbucket-hooks-close-submitted',
local_site=local_site,
kwargs={
'repository_id': repository.pk,
'hosting_service_id': 'bitbucket',
'hooks_uuid': repository.get_or_create_hooks_uuid(),
})
self._post_commit_hook_payload(
post_url=url,
review_request_url=review_request.get_absolute_url())
review_request = ReviewRequest.objects.get(pk=review_request.pk)
self.assertTrue(review_request.public)
self.assertEqual(review_request.status, review_request.SUBMITTED)
self.assertEqual(review_request.changedescs.count(), 1)
changedesc = review_request.changedescs.get()
self.assertEqual(changedesc.text, 'Pushed to master (1c44b46)')
def _post_commit_hook_payload(self, post_url, review_request_url,
truncated=False):
"""Post a payload for a hook for testing.
Args:
post_url (unicode):
The URL to post to.
review_request_url (unicode):
The URL of the review request being represented in the
payload.
truncated (bool, optional):
Whether the commit list should be marked truncated.
Results:
django.core.handlers.request.wsgi.WSGIRequest:
The post request.
"""
return self.client.post(
post_url,
content_type='application/json',
data=self.dump_json({
# NOTE: This payload only contains the content we make
# use of in the hook.
'push': {
'changes': [
{
'new': {
'type': 'branch',
'name': 'master',
},
'truncated': truncated,
'commits': [
{
'hash': '1c44b461cebe5874a857c51a4a13a84'
'9a4d1e52d',
'message': 'This is my fancy commit\n'
'\n'
'Reviewed at http://example.com'
'%s'
% review_request_url,
},
],
'links': {
'commits': {
'href': self.COMMITS_URL,
},
},
},
# Some entries containing missing keys.
{
'new': {
'type': 'frobblegobble',
'name': 'master',
},
'truncated': truncated,
'commits': [
{
'hash': '1c44b461cebe5874a857c51a4a13a84'
'9a4d1e52d',
'message': 'This is my fancy commit\n'
'\n'
'Reviewed at http://example.com'
'%s'
% review_request_url,
},
],
'links': {
'commits': {
'href': self.COMMITS_URL,
},
},
},
{
'new': {
'type': 'branch',
'name': 'other',
},
'truncated': truncated,
'commits': [
{
'hash': 'f46a13a1cc43bebea857c558741a484'
'1e52d9a4d',
'message': 'Ignored commit.'
},
],
'links': {},
},
{
'new': {},
'commits': [],
},
{
'new': None,
'commits': None,
},
{
}
],
}
}, for_response=False))
| [
"[email protected]"
] | |
b1b10f74c7b2b141fab2f67520ef2bafb047a1f3 | 051d25888b6a36e50714fa5940f6a31ee951ce77 | /gentb_website/tb_website/apps/dropbox_helper/dropbox_util.py | f43f6ff658915949644e878347d6b70ddd524912 | [
"MIT"
] | permissive | cchoirat/gentb-site | d0d627ffc160c53b61d92dc8f02a11f930a2b09a | 24ebce58cd5f5e0a2f1449e2f14b1f75b592f28f | refs/heads/master | 2021-01-21T02:20:55.909012 | 2015-11-25T18:27:23 | 2015-11-25T18:27:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,476 | py | from os.path import join, isdir
import os
from apps.dropbox_helper.dropbox_retriever import DropboxRetriever
from apps.dropbox_helper.models import DropboxRetrievalLog
from django.conf import settings
def get_dropbox_metadata(predict_dataset):
"""
Wrap the DropboxRetriever function
- (True, DropboxRetrievalLog object)
- (False, error message string)
"""
if predict_dataset is None:
return (False, "The dataset was not found.")
# Initialize
#
dr = DropboxRetriever(predict_dataset.dropbox_url,
destination_dir=predict_dataset.file_directory,
file_patterns=self.predict_dataset.get_file_patterns())
db_log = DropboxRetrievalLog(dataset=predict_dataset)
if dr.err_found:
db_log.file_metadata_err_msg = dr.err_msg
db_log.save()
return (False, dr.err_msg)
# Get the metadata
#
if not dr.step1_retrieve_metadata():
db_log.file_metadata_err_msg = dr.err_msg
db_log.save()
return (False, dr.err_msg)
# Does it have what we want?
#
if not dr.step2_check_file_matches():
db_log.file_metadata_err_msg = dr.err_msg
db_log.save()
return (False, dr.err_msg)
# Yes!
db_log.file_metadata = dr.dropbox_link_metadata
db_log.selected_files = dr.matching_files_metadata
db_log.save()
return (True, dr)
def get_dropbox_metadata_from_link(dropbox_link, file_patterns=None):
"""
Wrap the DropboxRetriever function
- (True, DropboxRetriever object)
- (False, error message string)
"""
if dropbox_link is None:
return (False, "The dataset was not found.")
# This directory doesn't actually get used
#
tmp_dir = join(settings.TB_SHARED_DATAFILE_DIRECTORY, 'tmp')
if not isdir(tmp_dir):
os.makedirs(tmp_dir)
# Initialize
#
if file_patterns:
dr = DropboxRetriever(dropbox_link,\
destination_dir=tmp_dir,\
file_patterns=file_patterns)
else:
dr = DropboxRetriever(dropbox_link,\
destination_dir=tmp_dir)
if dr.err_found:
return (False, dr.err_msg)
# Get the metadata
#
if not dr.step1_retrieve_metadata():
return (False, dr.err_msg)
# Does it have what we want?
#
if not dr.step2_check_file_matches():
return (False, dr.err_msg)
# Yes!
return (True, dr)
| [
"[email protected]"
] | |
c7733e0c97c99db3361255fd039f0965f67fe4ec | a70697ef62978117467695fd3507e4d08e186ab4 | /source/res/scripts/client/gui/Scaleform/locale/BOOTCAMP.py | 4e66b561503dd6a2e8f4fe0c3d1b2e0274984d50 | [] | no_license | chipsi007/WorldOfTanks-Decompiled | d208678a6f2f094b02281d09ecc30f3e32725ce9 | 3b9dc21321429e4dee146c23c7250f2c62757937 | refs/heads/master | 2020-03-19T01:21:09.883951 | 2018-05-04T13:19:56 | 2018-05-04T13:19:56 | 135,538,885 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,664 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/Scaleform/locale/BOOTCAMP.py
from debug_utils import LOG_WARNING
class BOOTCAMP(object):
WELLCOME_BOOTCAMP = '#bootcamp:wellcome/bootcamp'
WELLCOME_BOOTCAMP_DESCRIPTION = '#bootcamp:wellcome/bootcamp/description'
REQUEST_BOOTCAMP_RETURN = '#bootcamp:request/bootcamp/return'
REQUEST_BOOTCAMP_START = '#bootcamp:request/bootcamp/start'
REQUEST_BOOTCAMP_FINISH = '#bootcamp:request/bootcamp/finish'
BTN_SELECT = '#bootcamp:btn/select'
BTN_CONTINUE = '#bootcamp:btn/continue'
BTN_CONTINUE_PREBATTLE = '#bootcamp:btn/continue/prebattle'
GAME_MODE = '#bootcamp:game/mode'
BTN_TUTORIAL_START = '#bootcamp:btn/tutorial/start'
BTN_TUTORIAL_SKIP = '#bootcamp:btn/tutorial/skip'
PROMO_VEHICLEBUYVIEW = '#bootcamp:promo/vehicleBuyView'
AWARD_OPTIONS = '#bootcamp:award/options'
AWARD_OPTIONS_TITLE = '#bootcamp:award/options/title'
AWARD_OPTIONS_NATION_US = '#bootcamp:award/options/nation/us'
AWARD_OPTIONS_NATION_GE = '#bootcamp:award/options/nation/ge'
AWARD_OPTIONS_NATION_USSR = '#bootcamp:award/options/nation/ussr'
AWARD_OPTIONS_NAME_US = '#bootcamp:award/options/name/us'
AWARD_OPTIONS_DESCRIPTION_US = '#bootcamp:award/options/description/us'
AWARD_OPTIONS_NAME_GE = '#bootcamp:award/options/name/ge'
AWARD_OPTIONS_DESCRIPTION_GE = '#bootcamp:award/options/description/ge'
AWARD_OPTIONS_NAME_USSR = '#bootcamp:award/options/name/ussr'
AWARD_OPTIONS_DESCRIPTION_USSR = '#bootcamp:award/options/description/ussr'
HINT_CAMERA_CONTROLS = '#bootcamp:hint/camera/controls'
HINT_MOVE = '#bootcamp:hint/move'
HINT_MOVE_TURRET = '#bootcamp:hint/move/turret'
HINT_SHOOT = '#bootcamp:hint/shoot'
HINT_SNIPER = '#bootcamp:hint/sniper'
HINT_ADVANCED_SNIPER_MAIN = '#bootcamp:hint/advanced/sniper/main'
HINT_ADVANCED_SNIPER_BEFORE = '#bootcamp:hint/advanced/sniper/before'
HINT_MESSAGE_AVOID = '#bootcamp:hint/message/avoid'
HINT_REPAIR_TRACK = '#bootcamp:hint/repair/track'
HINT_USE_EXTINGUISHER = '#bootcamp:hint/use/extinguisher'
HINT_HEAL_CREW = '#bootcamp:hint/heal/crew'
HINT_SNIPER_ON_DISTANCE_MAIN = '#bootcamp:hint/sniper/on/distance/main'
HINT_SNIPER_ON_DISTANCE_EXIT = '#bootcamp:hint/sniper/on/distance/exit'
HINT_NO_MOVE = '#bootcamp:hint/no/move'
HINT_ALLY_SHOOT = '#bootcamp:hint/ally/shoot'
HINT_TARGET_UNLOCK = '#bootcamp:hint/target/unlock'
HINT_USELESS_CONSUMABLES = '#bootcamp:hint/useless/consumables'
HINT_WAIT_RELOAD = '#bootcamp:hint/wait/reload'
HINT_EXIT_GAME_AREA = '#bootcamp:hint/exit/game/area'
HINT_SHOOT_WHILE_MOVING = '#bootcamp:hint/shoot/while/moving'
HINT_SECONDARY_SNIPER = '#bootcamp:hint/secondary/sniper'
HINT_LOW_HP = '#bootcamp:hint/low/hp'
HINT_MISSION3_PLAYERDETECTED = '#bootcamp:hint/mission3/playerdetected'
HINT_MISSION3_FALLBACK = '#bootcamp:hint/mission3/fallback'
HINT_MISSION3_FLANKENEMIES = '#bootcamp:hint/mission3/flankenemies'
HINT_MISSION3_FOLIAGEINTROA = '#bootcamp:hint/mission3/foliageintroa'
HINT_MISSION3_FOLIAGEINTROB = '#bootcamp:hint/mission3/foliageintrob'
HINT_MISSION3_FLANKINGFAILS = '#bootcamp:hint/mission3/flankingfails'
HINT_MISSION3_FLANKINGFAILS2 = '#bootcamp:hint/mission3/flankingfails2'
HINT_MISSION3_FLANKINGWAIT = '#bootcamp:hint/mission3/flankingwait'
HINT_MISSION3_CAPTUREBASE = '#bootcamp:hint/mission3/capturebase'
HINT_MISSION3_CAPTURELOST = '#bootcamp:hint/mission3/capturelost'
HINT_MISSION3_CAPTURETOGETHER = '#bootcamp:hint/mission3/capturetogether'
HINT_MISSION3_CAPTUREHELP = '#bootcamp:hint/mission3/capturehelp'
HINT_MISSION3_CAPTUREINPROGRESS = '#bootcamp:hint/mission3/captureinprogress'
QUEST_TITLE = '#bootcamp:quest/title'
QUEST_NAME = '#bootcamp:quest/name'
QUEST_CONDITION = '#bootcamp:quest/condition'
QUEST_GAMEMODE = '#bootcamp:quest/gamemode'
LOADING_TIP_WASD_HEADER_1 = '#bootcamp:loading/tip/wasd/header/1'
LOADING_TIP_WASD_HEADER_2 = '#bootcamp:loading/tip/wasd/header/2'
LOADING_TIP_WASD_HEADER_3 = '#bootcamp:loading/tip/wasd/header/3'
LOADING_TIP_SNIPER_HEADER_1 = '#bootcamp:loading/tip/sniper/header/1'
LOADING_TIP_SNIPER_HEADER_2 = '#bootcamp:loading/tip/sniper/header/2'
LOADING_TIP_SNIPER_DESCRIPTION_1 = '#bootcamp:loading/tip/sniper/description/1'
LOADING_TIP_SNIPER_DESCRIPTION_2 = '#bootcamp:loading/tip/sniper/description/2'
LOADING_TIP_PENETRATION_HEADER_1 = '#bootcamp:loading/tip/penetration/header/1'
LOADING_TIP_PENETRATION_HEADER_2 = '#bootcamp:loading/tip/penetration/header/2'
LOADING_TIP_PENETRATION_DESCRIPTION_1 = '#bootcamp:loading/tip/penetration/description/1'
LOADING_TIP_PENETRATION_DESCRIPTION_2 = '#bootcamp:loading/tip/penetration/description/2'
LOADING_TIP_VISIBILITY_HEADER_1 = '#bootcamp:loading/tip/visibility/header/1'
LOADING_TIP_VISIBILITY_HEADER_2 = '#bootcamp:loading/tip/visibility/header/2'
LOADING_TIP_VISIBILITY_DESCRIPTION_1 = '#bootcamp:loading/tip/visibility/description/1'
LOADING_TIP_VISIBILITY_DESCRIPTION_2 = '#bootcamp:loading/tip/visibility/description/2'
LOADING_TIP_VISIBILITY_DESCRIPTION_3 = '#bootcamp:loading/tip/visibility/description/3'
LOADING_TIP_VISIBILITY_DESCRIPTION_4 = '#bootcamp:loading/tip/visibility/description/4'
LOADING_TIP_EQUIPMENT_HEADER_1 = '#bootcamp:loading/tip/equipment/header/1'
LOADING_TIP_EQUIPMENT_HEADER_2 = '#bootcamp:loading/tip/equipment/header/2'
LOADING_TIP_EQUIPMENT_HEADER_3 = '#bootcamp:loading/tip/equipment/header/3'
LOADING_TIP_EQUIPMENT_DESCRIPTION_1 = '#bootcamp:loading/tip/equipment/description/1'
LOADING_TIP_EQUIPMENT_DESCRIPTION_2 = '#bootcamp:loading/tip/equipment/description/2'
LOADING_TIP_EQUIPMENT_DESCRIPTION_3 = '#bootcamp:loading/tip/equipment/description/3'
LOADING_TIP_VICTORY_HEADER_1 = '#bootcamp:loading/tip/victory/header/1'
LOADING_TIP_VICTORY_HEADER_2 = '#bootcamp:loading/tip/victory/header/2'
LOADING_TIP_VICTORY_DESCRIPTION_1 = '#bootcamp:loading/tip/victory/description/1'
LOADING_TIP_VICTORY_DESCRIPTION_2 = '#bootcamp:loading/tip/victory/description/2'
LOADING_TIP_CROSSHAIR_HEADER_1 = '#bootcamp:loading/tip/crosshair/header/1'
LOADING_TIP_CROSSHAIR_HEADER_2 = '#bootcamp:loading/tip/crosshair/header/2'
LOADING_TIP_MODULES_HEADER_1 = '#bootcamp:loading/tip/modules/header/1'
LOADING_TIP_MODULES_HEADER_2 = '#bootcamp:loading/tip/modules/header/2'
LOADING_TIP_MODULES_HEADER_3 = '#bootcamp:loading/tip/modules/header/3'
LOADING_TIP_MODULES_DESCRIPTION_2 = '#bootcamp:loading/tip/modules/description/2'
LOADING_TIP_MODULES_DESCRIPTION_3 = '#bootcamp:loading/tip/modules/description/3'
PREBATTLE_HINT_SCORE = '#bootcamp:prebattle/hint/score'
PREBATTLE_HINT_HP = '#bootcamp:prebattle/hint/hp'
PREBATTLE_HINT_MODULES = '#bootcamp:prebattle/hint/modules'
PREBATTLE_HINT_CREW = '#bootcamp:prebattle/hint/crew'
PREBATTLE_HINT_MINIMAP = '#bootcamp:prebattle/hint/minimap'
PREBATTLE_HINT_CONSUMABLES = '#bootcamp:prebattle/hint/consumables'
PREBATTLE_HINT_PENETRATION_CHANCE = '#bootcamp:prebattle/hint/penetration/chance'
PREBATTLE_HINT_PENETRATION_HIGH = '#bootcamp:prebattle/hint/penetration/high'
PREBATTLE_HINT_PENETRATION_LOW = '#bootcamp:prebattle/hint/penetration/low'
MESSAGE_VEHICLE_AWARDED_LABEL = '#bootcamp:message/vehicle/awarded/label'
MESSAGE_VEHICLE_AWARDED_TEXT = '#bootcamp:message/vehicle/awarded/text'
MESSAGE_EXTRA_AWARD_OPTIONS = '#bootcamp:message/extra/award/options'
MESSAGE_CREDITS_LABEL = '#bootcamp:message/credits/label'
MESSAGE_CREDITS_TEXT = '#bootcamp:message/credits/text'
MESSAGE_EXPERIENCE_LABEL = '#bootcamp:message/experience/label'
MESSAGE_EXPERIENCE_TEXT = '#bootcamp:message/experience/text'
MESSAGE_UNLOCK_MODULE_LABEL = '#bootcamp:message/unlock/module/label'
MESSAGE_UNLOCK_MODULE_TEXT = '#bootcamp:message/unlock/module/text'
MESSAGE_NEW_MODULE_LABEL = '#bootcamp:message/new/module/label'
MESSAGE_NEW_MODULE_TEXT = '#bootcamp:message/new/module/text'
MESSAGE_UNLOCK_VEHICLE_LABEL = '#bootcamp:message/unlock/vehicle/label'
MESSAGE_UNLOCK_VEHICLE_TEXT = '#bootcamp:message/unlock/vehicle/text'
MESSAGE_SECOND_VEHICLE_TEXT_NATION_0 = '#bootcamp:message/second/vehicle/text/nation/0'
MESSAGE_SECOND_VEHICLE_TEXT_NATION_1 = '#bootcamp:message/second/vehicle/text/nation/1'
MESSAGE_SECOND_VEHICLE_TEXT_NATION_2 = '#bootcamp:message/second/vehicle/text/nation/2'
MESSAGE_SKILLS_AND_PERKS_LABEL = '#bootcamp:message/skills/and/perks/label'
MESSAGE_SKILLS_AND_PERKS_TEXT = '#bootcamp:message/skills/and/perks/text'
MESSAGE_SIX_SENSE_PERK_LABEL = '#bootcamp:message/six/sense/perk/label'
MESSAGE_SIX_SENSE_PERK_TEXT = '#bootcamp:message/six/sense/perk/text'
MESSAGE_CONSUMABLES_LABEL = '#bootcamp:message/consumables/label'
MESSAGE_CONSUMABLES_TEXT = '#bootcamp:message/consumables/text'
MESSAGE_REPAIR_KIT_LABEL = '#bootcamp:message/repair/kit/label'
MESSAGE_FIRST_AID_KIT_LABEL = '#bootcamp:message/first/aid/kit/label'
MESSAGE_FIRE_EXTINGUISHER_LABEL = '#bootcamp:message/fire/extinguisher/label'
MESSAGE_EQUIPMENT_LABEL = '#bootcamp:message/equipment/label'
MESSAGE_EQUIPMENT_TEXT = '#bootcamp:message/equipment/text'
MESSAGE_BONUS_EQUIPMENT_LABEL = '#bootcamp:message/bonus/equipment/label'
MESSAGE_BONUS_EQUIPMENT_TEXT = '#bootcamp:message/bonus/equipment/text'
MESSAGE_GOLD_LABEL = '#bootcamp:message/gold/label'
MESSAGE_GOLD_TEXT = '#bootcamp:message/gold/text'
MESSAGE_PREMIUM_LABEL = '#bootcamp:message/premium/label'
MESSAGE_PREMIUM_TEXT = '#bootcamp:message/premium/text'
MESSAGE_BONUS_PREMIUM_DAYS = '#bootcamp:message/bonus/premium/days'
MESSAGE_BONUS_PREMIUM_HOURS = '#bootcamp:message/bonus/premium/hours'
MESSAGE_MISSION_ACCOMPLISHED_LABEL = '#bootcamp:message/mission/accomplished/label'
MESSAGE_MISSION_ACCOMPLISHED_TEXT = '#bootcamp:message/mission/accomplished/text'
MESSAGE_BOOTCAMP_GRADUATE_LABEL = '#bootcamp:message/bootcamp/graduate/label'
MESSAGE_BOOTCAMP_GRADUATE_TEXT = '#bootcamp:message/bootcamp/graduate/text'
RESULTLABEL_WIN = '#bootcamp:resultlabel/win'
RESULTLABEL_LOSE = '#bootcamp:resultlabel/lose'
RESULTLABEL_TIE = '#bootcamp:resultlabel/tie'
RESULTLABEL_TECHWIN = '#bootcamp:resultlabel/techwin'
RESULTLABEL_ENDED = '#bootcamp:resultlabel/ended'
WITH_PREMIUM = '#bootcamp:with/premium'
BATTLE_RESULT_DESTROYED = '#bootcamp:battle/result/destroyed'
BATTLE_RESULT_DAMAGE = '#bootcamp:battle/result/damage'
BATTLE_RESULT_BLOCKED = '#bootcamp:battle/result/blocked'
BATTLE_RESULT_DETECTED = '#bootcamp:battle/result/detected'
BATTLE_RESULT_ASSISTED = '#bootcamp:battle/result/assisted'
BATTLE_RESULT_DESCRIPTION_DESTROYED = '#bootcamp:battle/result/description/destroyed'
BATTLE_RESULT_DESCRIPTION_DAMAGE = '#bootcamp:battle/result/description/damage'
BATTLE_RESULT_DESCRIPTION_BLOCKED = '#bootcamp:battle/result/description/blocked'
BATTLE_RESULT_DESCRIPTION_DETECTED = '#bootcamp:battle/result/description/detected'
BATTLE_RESULT_DESCRIPTION_ASSISTED = '#bootcamp:battle/result/description/assisted'
RESULT_AWARD_CADET_LABEL = '#bootcamp:result/award/cadet/label'
RESULT_AWARD_CADET_TEXT = '#bootcamp:result/award/cadet/text'
RESULT_AWARD_TANK_LABEL = '#bootcamp:result/award/tank/label'
RESULT_AWARD_TANK_TEXT = '#bootcamp:result/award/tank/text'
RESULT_AWARD_SNIPER_LABEL = '#bootcamp:result/award/sniper/label'
RESULT_AWARD_SNIPER_TEXT = '#bootcamp:result/award/sniper/text'
RESULT_AWARD_INVADER_LABEL = '#bootcamp:result/award/invader/label'
RESULT_AWARD_INVADER_TEXT = '#bootcamp:result/award/invader/text'
RESULT_AWARD_CREW_LABEL = '#bootcamp:result/award/crew/label'
RESULT_AWARD_CREW_TEXT = '#bootcamp:result/award/crew/text'
RESULT_AWARD_DUEL_LABEL = '#bootcamp:result/award/duel/label'
RESULT_AWARD_DUEL_TEXT = '#bootcamp:result/award/duel/text'
RESULT_AWARD_SHOOT_LABEL = '#bootcamp:result/award/shoot/label'
RESULT_AWARD_SHOOT_TEXT = '#bootcamp:result/award/shoot/text'
RESULT_AWARD_PREMIUM_LABEL = '#bootcamp:result/award/premium/label'
RESULT_AWARD_PREMIUM_TEXT = '#bootcamp:result/award/premium/text'
RESULT_AWARD_GOLD_LABEL = '#bootcamp:result/award/gold/label'
RESULT_AWARD_GOLD_TEXT = '#bootcamp:result/award/gold/text'
RESULT_AWARD_MISSION_LABEL = '#bootcamp:result/award/mission/label'
RESULT_AWARD_MISSION_TEXT = '#bootcamp:result/award/mission/text'
RESULT_AWARD_REPAIRKIT_LABEL = '#bootcamp:result/award/repairkit/label'
RESULT_AWARD_REPAIRKIT_TEXT = '#bootcamp:result/award/repairkit/text'
RESULT_AWARD_MEDICALKIT_LABEL = '#bootcamp:result/award/medicalkit/label'
RESULT_AWARD_MEDICALKIT_TEXT = '#bootcamp:result/award/medicalkit/text'
RESULT_AWARD_EXTINGUISHER_LABEL = '#bootcamp:result/award/extinguisher/label'
RESULT_AWARD_EXTINGUISHER_TEXT = '#bootcamp:result/award/extinguisher/text'
RESULT_AWARD_TOOLBOX_LABEL = '#bootcamp:result/award/toolbox/label'
RESULT_AWARD_TOOLBOX_TEXT = '#bootcamp:result/award/toolbox/text'
MESSAGE_INTRO_LESSON_II_LABEL = '#bootcamp:message/intro/lesson/ii/label'
MESSAGE_INTRO_LESSON_III_LABEL = '#bootcamp:message/intro/lesson/iii/label'
MESSAGE_INTRO_LESSON_III_CREW_LABEL = '#bootcamp:message/intro/lesson/iii/crew/label'
MESSAGE_INTRO_LESSON_IV_LABEL = '#bootcamp:message/intro/lesson/iv/label'
MESSAGE_INTRO_LESSON_V_LABEL = '#bootcamp:message/intro/lesson/v/label'
INVITATION_NOTE_SQUAD = '#bootcamp:invitation/note/squad'
INVITATION_NOTE_EVENT = '#bootcamp:invitation/note/event'
INVITATION_NOTE_FALLOUT = '#bootcamp:invitation/note/fallout'
INVITATION_NOTE_TRAINING = '#bootcamp:invitation/note/training'
INVITATION_NOTE_COMPANY = '#bootcamp:invitation/note/company'
INVITATION_NOTE_TOURNAMENT = '#bootcamp:invitation/note/tournament'
INVITATION_NOTE_CLAN = '#bootcamp:invitation/note/clan'
INVITATION_NOTE_UNIT = '#bootcamp:invitation/note/unit'
INVITATION_NOTE_SORTIE = '#bootcamp:invitation/note/sortie'
INVITATION_NOTE_FORT_BATTLE = '#bootcamp:invitation/note/fort/battle'
INVITATION_NOTE_CLUBS = '#bootcamp:invitation/note/clubs'
INVITATION_NOTE_EXTERNAL = '#bootcamp:invitation/note/external'
QUEUE_TITLE = '#bootcamp:queue/title'
QUEUE_QUEUE_TOO_LONG = '#bootcamp:queue/queue/too/long'
QUEUE_UNITS = '#bootcamp:queue/units'
QUEUE_MESSAGE = '#bootcamp:queue/message'
QUEUE_MORE_N_MINUTES = '#bootcamp:queue/more/n/minutes'
QUEUE_PLAYER_WAITING_TIME = '#bootcamp:queue/player-waiting-time'
QUEUE_SKIP_TUTORIAL = '#bootcamp:queue/skip/tutorial'
QUEUE_CANCEL_QUEUE = '#bootcamp:queue/cancel/queue'
TRANSITION_TITLE = '#bootcamp:transition/title'
BILL_TENSON = '#bootcamp:Bill Tenson'
BENEDIKT_DRESDNER = '#bootcamp:Benedikt Dresdner'
HEIKO_RIHTER = '#bootcamp:Heiko Rihter'
JOHN_ALBERT = '#bootcamp:John Albert'
DENIS_GORDIENKO = '#bootcamp:Denis Gordienko'
HASSO_MIRATO = '#bootcamp:Hasso Mirato'
RALF_HOFER = '#bootcamp:Ralf Hofer'
GERHARD_BRAUN = '#bootcamp:Gerhard Braun'
SAMUEL_BRONN = '#bootcamp:Samuel Bronn'
VALERIY_GAYDUCHENKO = '#bootcamp:Valeriy Gayduchenko'
MARK_LITENGEN = '#bootcamp:Mark Litengen'
ETIEN_ASIEOS = '#bootcamp:Etien Asieos'
ALEKSANDR_ANTONUK = '#bootcamp:Aleksandr Antonuk'
PETR_SERGEEV = '#bootcamp:Petr Sergeev'
PASCAL_RAYMOND = '#bootcamp:Pascal Raymond'
ALEKSEY_EGOROV = '#bootcamp:Aleksey Egorov'
OLIVER_GREENE = '#bootcamp:Oliver Greene'
JOHN_KING = '#bootcamp:John King'
MIRON_NEBALUIEV = '#bootcamp:Miron Nebaluiev'
FRIDRIH_SIMANN = '#bootcamp:Fridrih Simann'
MATT_UNDERLAY = '#bootcamp:Matt Underlay'
JAMES_BROUNGE = '#bootcamp:James Brounge'
ODA_NISURA = '#bootcamp:Oda Nisura'
GAVRIL_STOLBOV = '#bootcamp:Gavril Stolbov'
FABIAN_HAUPT = '#bootcamp:Fabian Haupt'
FRANK_DIMMELTON = '#bootcamp:Frank Dimmelton'
JOHN_DICKER = '#bootcamp:John Dicker'
KONRAD_CERSTVY = '#bootcamp:Konrad Cerstvy'
RICHARD_BOGELBER = '#bootcamp:Richard Bogelber'
KEIKO_SIMURA = '#bootcamp:Keiko Simura'
SHENG_EN = '#bootcamp:Sheng En'
SIEGWARD_EBER = '#bootcamp:Siegward Eber'
KARL_HIMMELSBERG = '#bootcamp:Karl Himmelsberg'
LEV_SHAPIRO = '#bootcamp:Lev Shapiro'
PAUL_BOUTIN = '#bootcamp:Paul Boutin'
TEODOR_SIMMERSBEE = '#bootcamp:Teodor Simmersbee'
CLAUD_GAULT = '#bootcamp:Claud Gault'
YU_DAN = '#bootcamp:Yu Dan'
ISIDZUKURI_SOMA = '#bootcamp:Isidzukuri Soma'
MITROFAN_MORDA = '#bootcamp:Mitrofan Morda'
YAKO_SIMAMURA = '#bootcamp:Yako Simamura'
LIN_SHIN = '#bootcamp:Lin Shin'
RADOSH_ZRVECKA = '#bootcamp:Radosh Zrvecka'
OTTO_VON_VALEN = '#bootcamp:Otto Von Valen'
VITALII_ROMANOV = '#bootcamp:Vitalii Romanov'
GUNTHER_FRANKE = '#bootcamp:Gunther Franke'
ALEKSANDR_FESICH = '#bootcamp:Aleksandr Fesich'
VENIAMIN_RAGOZIN = '#bootcamp:Veniamin Ragozin'
PAUL_KELLER = '#bootcamp:Paul Keller'
JING_JIE = '#bootcamp:Jing Jie'
JOHN_LAMB = '#bootcamp:John Lamb'
CORY_PRESTON = '#bootcamp:Cory Preston'
KARL_ERIK_OLOFSSON = '#bootcamp:Karl-Erik Olofsson'
ROBERT_BEASLEY = '#bootcamp:Robert Beasley'
JOHN_PAYNE = '#bootcamp:John Payne'
ELIAS_FREDRIKSSON = '#bootcamp:Elias Fredriksson'
JEAN_CHRISTOPHE_MOREL = '#bootcamp:Jean-Christophe Morel'
THOMAS_MERRITT = '#bootcamp:Thomas Merritt'
FEDOR_BELKIN = '#bootcamp:Fedor Belkin'
VLADIMIR_KAIDUN = '#bootcamp:Vladimir Kaidun'
PAUL_DAVIS = '#bootcamp:Paul Davis'
CORNELIUS_HOLST = '#bootcamp:Cornelius Holst'
AKENO_KIDO = '#bootcamp:Akeno Kido'
ANDRII_KOZYRA = '#bootcamp:Andrii Kozyra'
LEE_LIANG = '#bootcamp:Lee Liang'
NICHOLAS_WILKINSON = '#bootcamp:Nicholas Wilkinson'
IGOR_GONCHARENKO = '#bootcamp:Igor Goncharenko'
ALEKSANDR_USTINOV = '#bootcamp:Aleksandr Ustinov'
YURIY_KRILO = '#bootcamp:Yuriy Krilo'
FUDO_SUGIMOTO = '#bootcamp:Fudo Sugimoto'
ALEKSEY_KLUCHIKOV = '#bootcamp:Aleksey Kluchikov'
CHARLES_BAKER = '#bootcamp:Charles Baker'
LUDVIK_BENES = '#bootcamp:Ludvik Benes'
JURGEN_WOLF = '#bootcamp:Jurgen Wolf'
JOSEPH_ONEAL = '#bootcamp:Joseph ONeal'
BATTLE_RESULT_ENUM = (BATTLE_RESULT_DESTROYED,
BATTLE_RESULT_DAMAGE,
BATTLE_RESULT_BLOCKED,
BATTLE_RESULT_DETECTED,
BATTLE_RESULT_ASSISTED,
BATTLE_RESULT_DESCRIPTION_DESTROYED,
BATTLE_RESULT_DESCRIPTION_DAMAGE,
BATTLE_RESULT_DESCRIPTION_BLOCKED,
BATTLE_RESULT_DESCRIPTION_DETECTED,
BATTLE_RESULT_DESCRIPTION_ASSISTED)
BATTLE_RESULT_DESCRIPTION_ENUM = (BATTLE_RESULT_DESCRIPTION_DESTROYED,
BATTLE_RESULT_DESCRIPTION_DAMAGE,
BATTLE_RESULT_DESCRIPTION_BLOCKED,
BATTLE_RESULT_DESCRIPTION_DETECTED,
BATTLE_RESULT_DESCRIPTION_ASSISTED)
@classmethod
def battle_result(cls, key0):
outcome = '#bootcamp:battle/result/{}'.format(key0)
if outcome not in cls.BATTLE_RESULT_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def battle_result_description(cls, key0):
outcome = '#bootcamp:battle/result/description/{}'.format(key0)
if outcome not in cls.BATTLE_RESULT_DESCRIPTION_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
| [
"[email protected]"
] | |
32f23cb372dfdf98567ae16228bdbb95e6934524 | 74549d7c57b4746ac2a9c275aa12bfc577b0e8af | /hogwartsEmailAdderss.py | 245b738aa52c6cc12e18274915c1042e79fc0fa9 | [] | no_license | abidkhan484/hackerrank_solution | af9dbf6ec1ead920dc18df233f40db0c867720b4 | b0a98e4bdfa71a4671999f16ab313cc5c76a1b7a | refs/heads/master | 2022-05-02T11:13:29.447127 | 2022-04-13T03:02:59 | 2022-04-13T03:02:59 | 99,207,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 354 | py | #!/bin/python3
import string
def isValid(email):
for i in range(5):
if email[i] not in string.ascii_lowercase:
return 'No'
else:
if email[5:] != '@hogwarts.com':
return 'No'
return 'Yes'
if __name__ == "__main__":
s = input().strip()
result = isValid(s)
print(result)
| [
"[email protected]"
] | |
f401333e5549b41f09b8c1318936448c3a83d737 | 98e1716c1c3d071b2fedef0ac029eb410f55762c | /part9-Manipulating-DataFrames-with-pandas/No08-Changing-index-of-a-DataFrame.py | c7f11201fbcc7e64a48481ca0e8a27f8c2375844 | [] | no_license | iamashu/Data-Camp-exercise-PythonTrack | 564531bcf1dff119949cbb75e1fd63d89cb2779f | c72a4e806494f0e263ced9594597dc8882c2131c | refs/heads/master | 2020-07-22T00:23:12.024386 | 2019-04-12T09:24:42 | 2019-04-12T09:24:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,477 | py | #Changing index of a DataFrame
'''
As you saw in the previous exercise, indexes are immutable objects. This means that if you want to change or modify the index in a DataFrame, then you need to change the whole index. You will do this now, using a list comprehension to create the new index.
A list comprehension is a succinct way to generate a list in one line. For example, the following list comprehension generates a list that contains the cubes of all numbers from 0 to 9: cubes = [i**3 for i in range(10)]. This is equivalent to the following code:
cubes = []
for i in range(10):
cubes.append(i**3)
Before getting started, print the sales DataFrame in the IPython Shell and verify that the index is given by month abbreviations containing lowercase characters.
Instructions
100 XP
Create a list new_idx with the same elements as in sales.index, but with all characters capitalized.
Assign new_idx to sales.index.
Print the sales dataframe. This has been done for you, so hit 'Submit Answer' and to see how the index changed.
'''
# Code
# Create the list of new indexes: new_idx
new_idx = [i.upper() for i in sales.index]
#my error: new_idx = [sales.index.upper() for sales.index in sales.index]
# Assign new_idx to sales.index
sales.index = new_idx
# Print the sales DataFrame
print(sales)
'''result
eggs salt spam
JAN 47 12.0 17
FEB 110 50.0 31
MAR 221 89.0 72
APR 77 87.0 20
MAY 132 NaN 52
JUN 205 60.0 55
''' | [
"[email protected]"
] | |
0d788a1849f7407ed0c963a2f1ff2282a44211eb | a8599b7cb0f1deac1b8a62a35f3f1c95c6d0e7ba | /lookerapi/models/prefetch_access_filter_value.py | 57b4a9ca5a801c241765ae52b290b512cbe56f01 | [
"MIT"
] | permissive | llooker/python_sdk | b82b1dbe30a734b1cc1e1bcafd3d2ac7ce9fa705 | 8364839b1de0519771f2f749e45b4e6cb1c75577 | refs/heads/master | 2020-03-30T08:40:42.562469 | 2020-01-16T00:08:31 | 2020-01-16T00:08:31 | 151,030,473 | 13 | 10 | MIT | 2020-01-16T00:08:32 | 2018-10-01T03:07:09 | Python | UTF-8 | Python | false | false | 6,563 | py | # coding: utf-8
"""
Looker API 3.0 Reference
### Authorization The Looker API uses Looker **API3** credentials for authorization and access control. Looker admins can create API3 credentials on Looker's **Admin/Users** page. Pass API3 credentials to the **/login** endpoint to obtain a temporary access_token. Include that access_token in the Authorization header of Looker API requests. For details, see [Looker API Authorization](https://looker.com/docs/r/api/authorization) ### Client SDKs The Looker API is a RESTful system that should be usable by any programming language capable of making HTTPS requests. Client SDKs for a variety of programming languages can be generated from the Looker API's Swagger JSON metadata to streamline use of the Looker API in your applications. A client SDK for Ruby is available as an example. For more information, see [Looker API Client SDKs](https://looker.com/docs/r/api/client_sdks) ### Try It Out! The 'api-docs' page served by the Looker instance includes 'Try It Out!' buttons for each API method. After logging in with API3 credentials, you can use the \"Try It Out!\" buttons to call the API directly from the documentation page to interactively explore API features and responses. ### Versioning Future releases of Looker will expand this API release-by-release to securely expose more and more of the core power of Looker to API client applications. API endpoints marked as \"beta\" may receive breaking changes without warning. Stable (non-beta) API endpoints should not receive breaking changes in future releases. For more information, see [Looker API Versioning](https://looker.com/docs/r/api/versioning)
OpenAPI spec version: 3.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class PrefetchAccessFilterValue(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, model=None, field=None, value=None, can=None):
"""
PrefetchAccessFilterValue - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'model': 'str',
'field': 'str',
'value': 'str',
'can': 'dict(str, bool)'
}
self.attribute_map = {
'model': 'model',
'field': 'field',
'value': 'value',
'can': 'can'
}
self._model = model
self._field = field
self._value = value
self._can = can
@property
def model(self):
"""
Gets the model of this PrefetchAccessFilterValue.
Access filter model name.
:return: The model of this PrefetchAccessFilterValue.
:rtype: str
"""
return self._model
@model.setter
def model(self, model):
"""
Sets the model of this PrefetchAccessFilterValue.
Access filter model name.
:param model: The model of this PrefetchAccessFilterValue.
:type: str
"""
self._model = model
@property
def field(self):
"""
Gets the field of this PrefetchAccessFilterValue.
Access filter field name.
:return: The field of this PrefetchAccessFilterValue.
:rtype: str
"""
return self._field
@field.setter
def field(self, field):
"""
Sets the field of this PrefetchAccessFilterValue.
Access filter field name.
:param field: The field of this PrefetchAccessFilterValue.
:type: str
"""
self._field = field
@property
def value(self):
"""
Gets the value of this PrefetchAccessFilterValue.
Access filter value
:return: The value of this PrefetchAccessFilterValue.
:rtype: str
"""
return self._value
@value.setter
def value(self, value):
"""
Sets the value of this PrefetchAccessFilterValue.
Access filter value
:param value: The value of this PrefetchAccessFilterValue.
:type: str
"""
self._value = value
@property
def can(self):
"""
Gets the can of this PrefetchAccessFilterValue.
Operations the current user is able to perform on this object
:return: The can of this PrefetchAccessFilterValue.
:rtype: dict(str, bool)
"""
return self._can
@can.setter
def can(self, can):
"""
Sets the can of this PrefetchAccessFilterValue.
Operations the current user is able to perform on this object
:param can: The can of this PrefetchAccessFilterValue.
:type: dict(str, bool)
"""
self._can = can
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, PrefetchAccessFilterValue):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
] | |
3d456ff2fdf7f69d9519317f0a9a47b44322d273 | f4b75e06e456dbd065dc57f07d55a2f5ec4ad688 | /openstates/data/migrations/0012_person_current_role.py | d9e866c1e3a313f007b32336097bd875c571590a | [
"MIT"
] | permissive | openstates/openstates-core | 19bf927a2e72c8808a5601f4454846acaf32218a | 3055632ea7ddab6432cc009989ffb437aed6e530 | refs/heads/main | 2023-09-05T10:30:58.866474 | 2023-09-01T15:43:59 | 2023-09-01T15:43:59 | 251,511,904 | 19 | 27 | MIT | 2023-09-06T19:30:03 | 2020-03-31T05:47:28 | Python | UTF-8 | Python | false | false | 477 | py | # Generated by Django 3.0.5 on 2020-08-04 15:24
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("data", "0011_auto_20200804_1108")]
operations = [
migrations.AddField(
model_name="person",
name="current_role",
field=django.contrib.postgres.fields.jsonb.JSONField(
default=None, null=True
),
)
]
| [
"[email protected]"
] | |
2c876b1f1e1c38c15823f76a07a89de077f6621b | b6ba70e8535ccd4df1d14dc7c07a093642e281a0 | /examples/function_and_class/knn.py | f7a495bf0ea5c9b8a18579b30927ac40d3e3b7a3 | [
"MIT"
] | permissive | Obarads/torchpcp | 99b233c086617697dfc7e5f0c04ae16aff247daf | 86e19cc5c1196f22f609f2d98504b913272cbba8 | refs/heads/master | 2023-07-05T20:44:20.485218 | 2021-08-16T01:53:47 | 2021-08-16T01:53:47 | 293,244,333 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 51,451 | py | import numpy as np
import torch
from torch.utils.data import DataLoader
# local package
from libs import tpcpath
from libs.dataset import SimpleSceneDataset
from libs.three_nn import three_nn # PointRCNN
# torch-points-kernels
import torch_points_kernels as tpk
# torchpcp pacakage
from torchpcp.modules.functional.other import index2points
from torchpcp.modules.functional.sampling import furthest_point_sampling
from torchpcp.modules.functional import nns
from torchpcp.utils.monitor import timecheck
from torchpcp.utils import pytorch_tools
# pytorch_tools.set_seed(0)
device = pytorch_tools.select_device("cuda")
def speed_test(method, loader):
for i, data in enumerate(loader): pass # for speed processing
# print name
if method == 0:
t_name = "original c++ impl. time"
elif method == 1:
t_name = "original py impl. time"
elif method == 2:
t_name = "other c++ impl. time"
elif method == 3:
t_name = "tpk impl. time"
else:
raise NotImplementedError()
# timer start
t = timecheck()
for _ in range(100):
for i, data in enumerate(loader):
point_clouds, sem_labels, ins_labels = data
point_clouds = torch.transpose(point_clouds[:, :, :3].to(device), 1, 2)
center_idxs = furthest_point_sampling(point_clouds, 1024)
center_pc = index2points(point_clouds, center_idxs)
if method == 0:
pred, _ = nns.k_nearest_neighbors(center_pc, point_clouds, k=3)
elif method == 1:
pred, _ = nns.py_k_nearest_neighbors(center_pc, point_clouds, k=3, memory_saving=False)
elif method == 2:
pred = three_nn(center_pc.transpose(1,2).contiguous(), point_clouds.transpose(1,2).contiguous())
elif method == 3:
pred, _ = tpk.knn(point_clouds.transpose(1,2).contiguous(), center_pc.transpose(1,2).contiguous(), 3)
else:
raise NotImplementedError()
# timer end
timecheck(t, t_name)
gt, _ = nns.py_k_nearest_neighbors(center_pc, point_clouds, k=3, memory_saving=False)
print(False in (pred == gt))
np.set_printoptions(threshold=np.inf)
torch.set_printoptions(threshold=np.inf)
# get dataset
dataset = SimpleSceneDataset()
points, sem_label, ins_label = dataset[0]
pc = torch.tensor([points[:, :3]], device="cuda").transpose(1,2)
# compare knn
k = 20
idx, dist = nns.k_nearest_neighbors(pc, pc, k)
idx2, dist2 = nns.py_k_nearest_neighbors(pc, pc, k)
check_idx = idx == idx2
if True:
for ib in range(len(check_idx)):
b_idxs = check_idx[ib]
for i_n in range(len(b_idxs)):
n_idxs = b_idxs[i_n]
if False in n_idxs:
for i_p in range(len(n_idxs)):
k_idxs = n_idxs[i_p]
if False == k_idxs:
print("pyknn ib {}, in {}, ip {} dist {} idx {}".format(ib, i_n, i_p, dist[ib, i_n, i_p], idx[ib, i_n, i_p]))
# print("pybq and dist2 {}".format(dist[ib, i_n, i_p]))
print("dist {} idx {}".format(dist2[ib, i_n, i_p], idx2[ib, i_n, i_p]))
# else:
# print("pyknn ib {}, in {}, ip {} dist {} idx {}".format(ib, i_n, i_p, dist[ib, i_n, i_p], idx[ib, i_n, i_p]))
print("CHECK1:", False in (check_idx))
# ↑ result (a part)
"""
pyknn ib 0, in 3397, ip 5 dist 0.0005849996232427657 idx 2927
dist 0.0005850791931152344 idx 542
pyknn ib 0, in 3398, ip 10 dist 0.009334004484117031 idx 3892
dist 0.00933384895324707 idx 1082
pyknn ib 0, in 3398, ip 11 dist 0.009334005415439606 idx 1082
dist 0.00933384895324707 idx 3892
pyknn ib 0, in 3402, ip 6 dist 0.01157099287956953 idx 1313
dist 0.011570453643798828 idx 2092
pyknn ib 0, in 3402, ip 7 dist 0.01157099287956953 idx 2092
dist 0.011570453643798828 idx 1313
pyknn ib 0, in 3402, ip 9 dist 0.013051005080342293 idx 2169
dist 0.013050079345703125 idx 2962
pyknn ib 0, in 3402, ip 10 dist 0.013051005080342293 idx 2962
dist 0.013050079345703125 idx 2169
pyknn ib 0, in 3402, ip 14 dist 0.019140997901558876 idx 128
dist 0.019140243530273438 idx 1118
pyknn ib 0, in 3402, ip 15 dist 0.019140997901558876 idx 1118
dist 0.019140243530273438 idx 128
pyknn ib 0, in 3403, ip 2 dist 0.0013459994224831462 idx 3251
dist 0.001345992088317871 idx 3264
pyknn ib 0, in 3403, ip 3 dist 0.0013459994224831462 idx 3264
dist 0.001345992088317871 idx 3251
pyknn ib 0, in 3404, ip 13 dist 0.005812999792397022 idx 2701
dist 0.0058135986328125 idx 3754
pyknn ib 0, in 3404, ip 14 dist 0.005812999792397022 idx 3754
dist 0.0058135986328125 idx 2701
pyknn ib 0, in 3407, ip 4 dist 0.002997000701725483 idx 1947
dist 0.0029970109462738037 idx 3656
pyknn ib 0, in 3407, ip 5 dist 0.002997000701725483 idx 3656
dist 0.0029970109462738037 idx 1947
pyknn ib 0, in 3409, ip 0 dist 0.0 idx 1269
dist -2.384185791015625e-07 idx 3409
pyknn ib 0, in 3409, ip 1 dist 0.0 idx 3409
dist -2.384185791015625e-07 idx 1269
pyknn ib 0, in 3409, ip 17 dist 0.008043000474572182 idx 1042
dist 0.00804293155670166 idx 1482
pyknn ib 0, in 3409, ip 18 dist 0.008043000474572182 idx 1482
dist 0.00804293155670166 idx 1042
pyknn ib 0, in 3410, ip 11 dist 0.005334001034498215 idx 1024
dist 0.00533401221036911 idx 2834
pyknn ib 0, in 3410, ip 12 dist 0.005334001034498215 idx 2834
dist 0.00533401221036911 idx 1024
pyknn ib 0, in 3413, ip 0 dist 0.0 idx 152
dist 0.0 idx 3413
pyknn ib 0, in 3413, ip 1 dist 0.0 idx 3413
dist 0.0 idx 152
pyknn ib 0, in 3413, ip 14 dist 0.010057998821139336 idx 2104
dist 0.010058045387268066 idx 2583
pyknn ib 0, in 3413, ip 15 dist 0.010057998821139336 idx 2583
dist 0.010058045387268066 idx 2104
pyknn ib 0, in 3414, ip 9 dist 0.00410101655870676 idx 2055
dist 0.004100799560546875 idx 2936
pyknn ib 0, in 3414, ip 10 dist 0.00410101655870676 idx 2936
dist 0.004100799560546875 idx 2055
pyknn ib 0, in 3415, ip 1 dist 0.00021200145420152694 idx 1176
dist 0.0002117156982421875 idx 3866
pyknn ib 0, in 3415, ip 2 dist 0.00021200145420152694 idx 3866
dist 0.0002117156982421875 idx 1176
pyknn ib 0, in 3415, ip 11 dist 0.007836993783712387 idx 3519
dist 0.007837295532226562 idx 4066
pyknn ib 0, in 3415, ip 12 dist 0.007836993783712387 idx 4066
dist 0.007837295532226562 idx 3519
pyknn ib 0, in 3416, ip 4 dist 0.001897998503409326 idx 3325
dist 0.001898050308227539 idx 3729
pyknn ib 0, in 3416, ip 5 dist 0.001897998503409326 idx 3729
dist 0.001898050308227539 idx 3325
pyknn ib 0, in 3421, ip 7 dist 0.004901004955172539 idx 1018
dist 0.00490117073059082 idx 3982
pyknn ib 0, in 3421, ip 8 dist 0.004901004955172539 idx 3982
dist 0.00490117073059082 idx 1018
pyknn ib 0, in 3429, ip 3 dist 0.00023299954773392528 idx 1024
dist 0.00023300200700759888 idx 2834
pyknn ib 0, in 3429, ip 4 dist 0.00023299954773392528 idx 2834
dist 0.00023300200700759888 idx 1024
pyknn ib 0, in 3430, ip 13 dist 0.00894500408321619 idx 810
dist 0.008943557739257812 idx 3766
pyknn ib 0, in 3430, ip 14 dist 0.00894500408321619 idx 3766
dist 0.008943557739257812 idx 810
pyknn ib 0, in 3440, ip 4 dist 0.003592999652028084 idx 2143
dist 0.0035930201411247253 idx 4072
pyknn ib 0, in 3440, ip 5 dist 0.003592999652028084 idx 4072
dist 0.0035930201411247253 idx 2143
pyknn ib 0, in 3440, ip 11 dist 0.006083999294787645 idx 2987
dist 0.006083987653255463 idx 868
pyknn ib 0, in 3440, ip 12 dist 0.006084000691771507 idx 868
dist 0.00608399510383606 idx 2987
pyknn ib 0, in 3440, ip 18 dist 0.010029001161456108 idx 0
dist 0.010029010474681854 idx 1032
pyknn ib 0, in 3440, ip 19 dist 0.010029001161456108 idx 1032
dist 0.010029010474681854 idx 0
pyknn ib 0, in 3441, ip 16 dist 0.007686000782996416 idx 0
dist 0.007686004042625427 idx 1032
pyknn ib 0, in 3441, ip 17 dist 0.007686000782996416 idx 1032
dist 0.007686004042625427 idx 1850
pyknn ib 0, in 3441, ip 18 dist 0.007686000782996416 idx 1850
dist 0.007686004042625427 idx 0
pyknn ib 0, in 3442, ip 11 dist 0.004209999926388264 idx 1947
dist 0.004209995269775391 idx 3656
pyknn ib 0, in 3442, ip 12 dist 0.004209999926388264 idx 3656
dist 0.004209995269775391 idx 1947
pyknn ib 0, in 3445, ip 10 dist 0.004147999454289675 idx 488
dist 0.004148006439208984 idx 2434
pyknn ib 0, in 3445, ip 11 dist 0.004147999454289675 idx 2434
dist 0.004148006439208984 idx 488
pyknn ib 0, in 3456, ip 7 dist 0.0031460002064704895 idx 1024
dist 0.0031460076570510864 idx 2834
pyknn ib 0, in 3456, ip 8 dist 0.0031460002064704895 idx 2834
dist 0.0031460076570510864 idx 1024
pyknn ib 0, in 3456, ip 9 dist 0.004821000155061483 idx 363
dist 0.004821024835109711 idx 694
pyknn ib 0, in 3456, ip 10 dist 0.004821000155061483 idx 694
dist 0.004821024835109711 idx 363
pyknn ib 0, in 3456, ip 11 dist 0.004900998901575804 idx 2953
dist 0.004901014268398285 idx 3429
pyknn ib 0, in 3456, ip 12 dist 0.004900998901575804 idx 3429
dist 0.004901014268398285 idx 2953
pyknn ib 0, in 3461, ip 13 dist 0.013138998299837112 idx 1212
dist 0.013138949871063232 idx 3522
pyknn ib 0, in 3461, ip 14 dist 0.013138998299837112 idx 3522
dist 0.013138949871063232 idx 1212
pyknn ib 0, in 3462, ip 0 dist 0.0 idx 3462
dist 0.0 idx 3465
pyknn ib 0, in 3462, ip 1 dist 0.0 idx 3465
dist 0.0 idx 3462
pyknn ib 0, in 3465, ip 0 dist 0.0 idx 3462
dist 0.0 idx 3465
pyknn ib 0, in 3465, ip 1 dist 0.0 idx 3465
dist 0.0 idx 3462
pyknn ib 0, in 3470, ip 9 dist 0.0076049999333918095 idx 281
dist 0.007604971528053284 idx 2437
pyknn ib 0, in 3470, ip 10 dist 0.0076049999333918095 idx 2437
dist 0.007604971528053284 idx 281
pyknn ib 0, in 3472, ip 18 dist 0.014760998077690601 idx 158
dist 0.014760971069335938 idx 1404
pyknn ib 0, in 3472, ip 19 dist 0.014760998077690601 idx 1404
dist 0.014760971069335938 idx 158
pyknn ib 0, in 3474, ip 12 dist 0.010493999347090721 idx 666
dist 0.010493874549865723 idx 2543
pyknn ib 0, in 3474, ip 13 dist 0.010493999347090721 idx 2543
dist 0.010493874549865723 idx 666
pyknn ib 0, in 3474, ip 15 dist 0.011125998571515083 idx 1590
dist 0.011126160621643066 idx 2927
pyknn ib 0, in 3474, ip 16 dist 0.011125998571515083 idx 2927
dist 0.011126160621643066 idx 1590
pyknn ib 0, in 3480, ip 4 dist 0.012818001210689545 idx 1951
dist 0.012817949056625366 idx 2710
pyknn ib 0, in 3480, ip 5 dist 0.012818001210689545 idx 2710
dist 0.012817949056625366 idx 1951
pyknn ib 0, in 3481, ip 2 dist 0.000298000784823671 idx 128
dist 0.0002970695495605469 idx 1118
pyknn ib 0, in 3481, ip 3 dist 0.000298000784823671 idx 1118
dist 0.0002970695495605469 idx 128
pyknn ib 0, in 3481, ip 9 dist 0.004514000378549099 idx 1313
dist 0.004513740539550781 idx 2092
pyknn ib 0, in 3481, ip 10 dist 0.004514000378549099 idx 2092
dist 0.004513740539550781 idx 1313
pyknn ib 0, in 3488, ip 6 dist 0.005204994697123766 idx 1304
dist 0.005205094814300537 idx 2654
pyknn ib 0, in 3488, ip 7 dist 0.005204994697123766 idx 2654
dist 0.005205094814300537 idx 1304
pyknn ib 0, in 3488, ip 11 dist 0.00811500009149313 idx 2397
dist 0.008115053176879883 idx 4050
pyknn ib 0, in 3488, ip 12 dist 0.00811500009149313 idx 4050
dist 0.008115053176879883 idx 2397
pyknn ib 0, in 3491, ip 18 dist 0.008860995061695576 idx 1304
dist 0.008860945701599121 idx 2654
pyknn ib 0, in 3491, ip 19 dist 0.008860995061695576 idx 2654
dist 0.008860945701599121 idx 1304
pyknn ib 0, in 3495, ip 0 dist 0.0 idx 1425
dist 0.0 idx 3495
pyknn ib 0, in 3495, ip 1 dist 0.0 idx 3495
dist 0.0 idx 1425
pyknn ib 0, in 3500, ip 18 dist 0.013730003498494625 idx 1487
dist 0.01372992992401123 idx 3004
pyknn ib 0, in 3500, ip 19 dist 0.013730003498494625 idx 3004
dist 0.01372992992401123 idx 1487
pyknn ib 0, in 3501, ip 2 dist 0.0018849981715902686 idx 1225
dist 0.0018854141235351562 idx 1637
pyknn ib 0, in 3501, ip 3 dist 0.0018849981715902686 idx 1637
dist 0.0018854141235351562 idx 1225
pyknn ib 0, in 3504, ip 3 dist 0.0021460067946463823 idx 869
dist 0.0021452903747558594 idx 2454
pyknn ib 0, in 3504, ip 4 dist 0.0021460067946463823 idx 2454
dist 0.0021452903747558594 idx 869
pyknn ib 0, in 3507, ip 10 dist 0.006412987597286701 idx 1487
dist 0.0064127445220947266 idx 3004
pyknn ib 0, in 3507, ip 11 dist 0.006412987597286701 idx 3004
dist 0.0064127445220947266 idx 1487
pyknn ib 0, in 3508, ip 1 dist 0.001189997885376215 idx 1052
dist 0.0011899471282958984 idx 2093
pyknn ib 0, in 3508, ip 2 dist 0.001189997885376215 idx 2093
dist 0.0011899471282958984 idx 1052
pyknn ib 0, in 3509, ip 1 dist 0.0012090002419427037 idx 1304
dist 0.001208961009979248 idx 2654
pyknn ib 0, in 3509, ip 2 dist 0.0012090002419427037 idx 2654
dist 0.001208961009979248 idx 1304
pyknn ib 0, in 3511, ip 8 dist 0.002437000395730138 idx 2701
dist 0.002437591552734375 idx 3754
pyknn ib 0, in 3511, ip 9 dist 0.002437000395730138 idx 3754
dist 0.002437591552734375 idx 2701
pyknn ib 0, in 3512, ip 10 dist 0.006725003011524677 idx 937
dist 0.006725311279296875 idx 2228
pyknn ib 0, in 3512, ip 11 dist 0.006725003011524677 idx 2228
dist 0.006725311279296875 idx 937
pyknn ib 0, in 3512, ip 18 dist 0.014550035819411278 idx 1400
dist 0.014549732208251953 idx 3069
pyknn ib 0, in 3512, ip 19 dist 0.014550035819411278 idx 3069
dist 0.014549732208251953 idx 1400
pyknn ib 0, in 3519, ip 0 dist 0.0 idx 3519
dist 0.0 idx 4066
pyknn ib 0, in 3519, ip 1 dist 0.0 idx 4066
dist 0.0 idx 3519
pyknn ib 0, in 3519, ip 2 dist 0.0004419961478561163 idx 829
dist 0.00044155120849609375 idx 3173
pyknn ib 0, in 3519, ip 3 dist 0.0004419961478561163 idx 3173
dist 0.00044155120849609375 idx 829
pyknn ib 0, in 3520, ip 1 dist 0.00022599961084779352 idx 818
dist 0.00022605061531066895 idx 3318
pyknn ib 0, in 3520, ip 2 dist 0.00022599961084779352 idx 3318
dist 0.00022605061531066895 idx 818
pyknn ib 0, in 3524, ip 10 dist 0.006727982312440872 idx 810
dist 0.006728172302246094 idx 3766
pyknn ib 0, in 3524, ip 11 dist 0.006727982312440872 idx 3766
dist 0.006728172302246094 idx 810
pyknn ib 0, in 3525, ip 4 dist 0.001409997814334929 idx 1649
dist 0.0014100074768066406 idx 3471
pyknn ib 0, in 3525, ip 5 dist 0.001409997814334929 idx 3471
dist 0.0014100074768066406 idx 1649
pyknn ib 0, in 3526, ip 6 dist 0.00230899965390563 idx 1123
dist 0.0023088455200195312 idx 2937
pyknn ib 0, in 3526, ip 7 dist 0.00230899965390563 idx 2937
dist 0.0023088455200195312 idx 1123
pyknn ib 0, in 3527, ip 10 dist 0.009864001534879208 idx 1212
dist 0.009863987565040588 idx 3522
pyknn ib 0, in 3527, ip 11 dist 0.009864001534879208 idx 3522
dist 0.009863987565040588 idx 1212
pyknn ib 0, in 3528, ip 3 dist 0.0019439997849985957 idx 1947
dist 0.0019440054893493652 idx 3656
pyknn ib 0, in 3528, ip 4 dist 0.0019439997849985957 idx 3656
dist 0.0019440054893493652 idx 1947
pyknn ib 0, in 3530, ip 11 dist 0.004930997267365456 idx 1871
dist 0.0049304962158203125 idx 3541
pyknn ib 0, in 3530, ip 12 dist 0.004930997267365456 idx 3541
dist 0.0049304962158203125 idx 1871
pyknn ib 0, in 3535, ip 2 dist 0.00040199910290539265 idx 2176
dist 0.00040340423583984375 idx 2815
pyknn ib 0, in 3535, ip 3 dist 0.00040199910290539265 idx 2815
dist 0.00040340423583984375 idx 2176
pyknn ib 0, in 3541, ip 0 dist 0.0 idx 1871
dist -1.9073486328125e-06 idx 3541
pyknn ib 0, in 3541, ip 1 dist 0.0 idx 3541
dist -1.9073486328125e-06 idx 1871
pyknn ib 0, in 3546, ip 11 dist 0.005651996936649084 idx 2316
dist 0.005651950836181641 idx 2761
pyknn ib 0, in 3546, ip 12 dist 0.005651996936649084 idx 2761
dist 0.005651950836181641 idx 2316
pyknn ib 0, in 3553, ip 16 dist 0.003945999778807163 idx 2502
dist 0.003945887088775635 idx 3168
pyknn ib 0, in 3553, ip 17 dist 0.003945999778807163 idx 3168
dist 0.003945887088775635 idx 2502
pyknn ib 0, in 3560, ip 11 dist 0.00826500728726387 idx 2745
dist 0.008264780044555664 idx 3933
pyknn ib 0, in 3560, ip 12 dist 0.00826500728726387 idx 3933
dist 0.008264780044555664 idx 2745
pyknn ib 0, in 3567, ip 7 dist 0.004524988122284412 idx 1297
dist 0.00452423095703125 idx 2984
pyknn ib 0, in 3567, ip 8 dist 0.004524995107203722 idx 2984
dist 0.00452423095703125 idx 1297
pyknn ib 0, in 3575, ip 2 dist 0.0017249988159164786 idx 2854
dist 0.0017247200012207031 idx 3996
pyknn ib 0, in 3575, ip 3 dist 0.0017249988159164786 idx 3996
dist 0.0017247200012207031 idx 2854
pyknn ib 0, in 3579, ip 1 dist 0.00023399594647344202 idx 810
dist 0.000232696533203125 idx 3766
pyknn ib 0, in 3579, ip 2 dist 0.00023399594647344202 idx 3766
dist 0.000232696533203125 idx 810
pyknn ib 0, in 3579, ip 10 dist 0.008099023252725601 idx 1093
dist 0.008098602294921875 idx 2320
pyknn ib 0, in 3579, ip 11 dist 0.008099023252725601 idx 2320
dist 0.008098602294921875 idx 1093
pyknn ib 0, in 3581, ip 9 dist 0.006384999491274357 idx 2176
dist 0.00638580322265625 idx 2815
pyknn ib 0, in 3581, ip 10 dist 0.006384999491274357 idx 2815
dist 0.00638580322265625 idx 2176
pyknn ib 0, in 3582, ip 11 dist 0.006272999569773674 idx 1952
dist 0.00627291202545166 idx 2823
pyknn ib 0, in 3582, ip 12 dist 0.006272999569773674 idx 2823
dist 0.00627291202545166 idx 1952
pyknn ib 0, in 3583, ip 1 dist 0.0015939960721880198 idx 1313
dist 0.0015935897827148438 idx 2092
pyknn ib 0, in 3583, ip 2 dist 0.0015939960721880198 idx 2092
dist 0.0015935897827148438 idx 1313
pyknn ib 0, in 3594, ip 12 dist 0.006574000231921673 idx 849
dist 0.006572723388671875 idx 1128
pyknn ib 0, in 3594, ip 13 dist 0.006574000231921673 idx 1128
dist 0.006572723388671875 idx 849
pyknn ib 0, in 3598, ip 18 dist 0.008245003409683704 idx 193
dist 0.008244991302490234 idx 231
pyknn ib 0, in 3598, ip 19 dist 0.008245003409683704 idx 231
dist 0.008244991302490234 idx 193
pyknn ib 0, in 3600, ip 11 dist 0.0064890035428106785 idx 1059
dist 0.006489008665084839 idx 3232
pyknn ib 0, in 3600, ip 12 dist 0.0064890035428106785 idx 3232
dist 0.006489008665084839 idx 1059
pyknn ib 0, in 3610, ip 14 dist 0.007984996773302555 idx 1925
dist 0.00798499584197998 idx 2416
pyknn ib 0, in 3610, ip 15 dist 0.007984996773302555 idx 2416
dist 0.00798499584197998 idx 1925
pyknn ib 0, in 3612, ip 5 dist 0.0035329984966665506 idx 2316
dist 0.003532886505126953 idx 2761
pyknn ib 0, in 3612, ip 6 dist 0.0035329984966665506 idx 2761
dist 0.003532886505126953 idx 2316
pyknn ib 0, in 3621, ip 10 dist 0.006896963343024254 idx 937
dist 0.006896495819091797 idx 2228
pyknn ib 0, in 3621, ip 11 dist 0.006896963343024254 idx 2228
dist 0.006896495819091797 idx 937
pyknn ib 0, in 3627, ip 11 dist 0.008366001769900322 idx 767
dist 0.008365988731384277 idx 1375
pyknn ib 0, in 3627, ip 13 dist 0.008366001769900322 idx 1375
dist 0.008365988731384277 idx 767
pyknn ib 0, in 3629, ip 12 dist 0.0073130009695887566 idx 403
dist 0.007312774658203125 idx 2045
pyknn ib 0, in 3629, ip 13 dist 0.0073130009695887566 idx 2045
dist 0.007312774658203125 idx 403
pyknn ib 0, in 3631, ip 0 dist 0.0 idx 3631
dist 0.0 idx 4089
pyknn ib 0, in 3631, ip 1 dist 0.0 idx 4089
dist 0.0 idx 3631
pyknn ib 0, in 3634, ip 18 dist 0.009790007025003433 idx 2397
dist 0.00978994369506836 idx 4050
pyknn ib 0, in 3634, ip 19 dist 0.009790007025003433 idx 4050
dist 0.00978994369506836 idx 2397
pyknn ib 0, in 3635, ip 0 dist 0.0 idx 1906
dist 9.5367431640625e-07 idx 3635
pyknn ib 0, in 3635, ip 1 dist 0.0 idx 3635
dist 9.5367431640625e-07 idx 1906
pyknn ib 0, in 3635, ip 7 dist 0.006624998524785042 idx 2790
dist 0.006625652313232422 idx 3722
pyknn ib 0, in 3635, ip 8 dist 0.006624998524785042 idx 3722
dist 0.006625652313232422 idx 2790
pyknn ib 0, in 3637, ip 18 dist 0.010817998088896275 idx 1123
dist 0.010816574096679688 idx 2937
pyknn ib 0, in 3637, ip 19 dist 0.010817998088896275 idx 2937
dist 0.010816574096679688 idx 1123
pyknn ib 0, in 3638, ip 0 dist 0.0 idx 2780
dist -4.76837158203125e-07 idx 3638
pyknn ib 0, in 3638, ip 1 dist 0.0 idx 3638
dist -4.76837158203125e-07 idx 2780
pyknn ib 0, in 3642, ip 4 dist 0.0014510012697428465 idx 363
dist 0.0014509856700897217 idx 694
pyknn ib 0, in 3642, ip 5 dist 0.0014510012697428465 idx 694
dist 0.0014509856700897217 idx 363
pyknn ib 0, in 3645, ip 8 dist 0.0035089822486042976 idx 615
dist 0.0035085678100585938 idx 2297
pyknn ib 0, in 3645, ip 9 dist 0.0035089822486042976 idx 2297
dist 0.0035085678100585938 idx 615
pyknn ib 0, in 3653, ip 8 dist 0.005330007988959551 idx 236
dist 0.0053310394287109375 idx 1959
pyknn ib 0, in 3653, ip 9 dist 0.005330007988959551 idx 1959
dist 0.0053310394287109375 idx 236
pyknn ib 0, in 3653, ip 12 dist 0.007883005775511265 idx 483
dist 0.007884025573730469 idx 2764
pyknn ib 0, in 3653, ip 13 dist 0.007883005775511265 idx 2764
dist 0.007884025573730469 idx 483
pyknn ib 0, in 3655, ip 10 dist 0.005937003064900637 idx 962
dist 0.005936622619628906 idx 3213
pyknn ib 0, in 3655, ip 11 dist 0.005937003064900637 idx 3213
dist 0.005936622619628906 idx 962
pyknn ib 0, in 3656, ip 0 dist 0.0 idx 1947
dist 5.960464477539063e-08 idx 3656
pyknn ib 0, in 3656, ip 1 dist 0.0 idx 3656
dist 5.960464477539063e-08 idx 1947
pyknn ib 0, in 3668, ip 4 dist 0.002050999319180846 idx 829
dist 0.0020503997802734375 idx 3173
pyknn ib 0, in 3668, ip 5 dist 0.002050999319180846 idx 3173
dist 0.0020503997802734375 idx 829
pyknn ib 0, in 3668, ip 9 dist 0.0033809910528361797 idx 3519
dist 0.0033788681030273438 idx 4066
pyknn ib 0, in 3668, ip 10 dist 0.0033809910528361797 idx 4066
dist 0.0033788681030273438 idx 3519
pyknn ib 0, in 3673, ip 11 dist 0.005558010656386614 idx 1052
dist 0.0055577754974365234 idx 2093
pyknn ib 0, in 3673, ip 12 dist 0.005558010656386614 idx 2093
dist 0.0055577754974365234 idx 1052
pyknn ib 0, in 3674, ip 2 dist 0.0007700005662627518 idx 344
dist 0.0007699877023696899 idx 1751
pyknn ib 0, in 3674, ip 3 dist 0.0007700005662627518 idx 1751
dist 0.0007699877023696899 idx 344
pyknn ib 0, in 3675, ip 10 dist 0.006197004113346338 idx 3140
dist 0.006197214126586914 idx 2736
pyknn ib 0, in 3675, ip 11 dist 0.006197008304297924 idx 2736
dist 0.006197214126586914 idx 3140
pyknn ib 0, in 3682, ip 2 dist 0.000953999871853739 idx 1031
dist 0.0009539127349853516 idx 1100
pyknn ib 0, in 3682, ip 3 dist 0.000953999871853739 idx 1100
dist 0.0009539127349853516 idx 2711
pyknn ib 0, in 3682, ip 4 dist 0.000953999871853739 idx 2711
dist 0.0009539127349853516 idx 1031
pyknn ib 0, in 3682, ip 5 dist 0.002277006395161152 idx 744
dist 0.002276897430419922 idx 3909
pyknn ib 0, in 3682, ip 6 dist 0.002277006395161152 idx 3909
dist 0.002276897430419922 idx 744
pyknn ib 0, in 3685, ip 6 dist 0.0022769994102418423 idx 1032
dist 0.002277001738548279 idx 1850
pyknn ib 0, in 3685, ip 7 dist 0.0022769994102418423 idx 1850
dist 0.002277001738548279 idx 1032
pyknn ib 0, in 3686, ip 18 dist 0.012762016616761684 idx 962
dist 0.012761116027832031 idx 3213
pyknn ib 0, in 3686, ip 19 dist 0.012762016616761684 idx 3213
dist 0.012761116027832031 idx 962
pyknn ib 0, in 3699, ip 9 dist 0.006161997560411692 idx 158
dist 0.006161689758300781 idx 1404
pyknn ib 0, in 3699, ip 10 dist 0.006161997560411692 idx 1404
dist 0.006161689758300781 idx 158
pyknn ib 0, in 3703, ip 0 dist 0.0 idx 963
dist 0.0 idx 3703
pyknn ib 0, in 3703, ip 1 dist 0.0 idx 3703
dist 0.0 idx 963
pyknn ib 0, in 3705, ip 7 dist 0.005197002552449703 idx 869
dist 0.005197048187255859 idx 2454
pyknn ib 0, in 3705, ip 8 dist 0.005197002552449703 idx 2454
dist 0.005197048187255859 idx 869
pyknn ib 0, in 3706, ip 1 dist 0.0005649998784065247 idx 497
dist 0.0005649328231811523 idx 2785
pyknn ib 0, in 3706, ip 2 dist 0.0005649998784065247 idx 2785
dist 0.0005649328231811523 idx 497
pyknn ib 0, in 3707, ip 0 dist 0.0 idx 719
dist -4.76837158203125e-07 idx 3707
pyknn ib 0, in 3707, ip 1 dist 0.0 idx 3707
dist -4.76837158203125e-07 idx 719
pyknn ib 0, in 3714, ip 5 dist 0.0016370017547160387 idx 2328
dist 0.0016369819641113281 idx 3774
pyknn ib 0, in 3714, ip 6 dist 0.0016370017547160387 idx 3774
dist 0.0016369819641113281 idx 2328
pyknn ib 0, in 3715, ip 3 dist 0.000400999968405813 idx 636
dist 0.0004010051488876343 idx 3362
pyknn ib 0, in 3715, ip 4 dist 0.000400999968405813 idx 3362
dist 0.0004010051488876343 idx 636
pyknn ib 0, in 3717, ip 10 dist 0.0069059995003044605 idx 767
dist 0.0069060176610946655 idx 1375
pyknn ib 0, in 3717, ip 12 dist 0.0069059995003044605 idx 1375
dist 0.0069060176610946655 idx 767
pyknn ib 0, in 3720, ip 5 dist 0.0029769993852823973 idx 1024
dist 0.0029769912362098694 idx 2834
pyknn ib 0, in 3720, ip 6 dist 0.0029769993852823973 idx 2834
dist 0.0029769912362098694 idx 1024
pyknn ib 0, in 3720, ip 8 dist 0.0040999967604875565 idx 2953
dist 0.004099994897842407 idx 3429
pyknn ib 0, in 3720, ip 9 dist 0.0040999967604875565 idx 3429
dist 0.004099994897842407 idx 2953
pyknn ib 0, in 3721, ip 1 dist 0.0002810000441968441 idx 1647
dist 0.0002810955047607422 idx 2457
pyknn ib 0, in 3721, ip 2 dist 0.0002810000441968441 idx 2457
dist 0.0002810955047607422 idx 1647
pyknn ib 0, in 3721, ip 12 dist 0.006165006663650274 idx 282
dist 0.0061647891998291016 idx 2323
pyknn ib 0, in 3721, ip 13 dist 0.006165006663650274 idx 2323
dist 0.0061647891998291016 idx 282
pyknn ib 0, in 3722, ip 0 dist 0.0 idx 2790
dist 0.0 idx 3722
pyknn ib 0, in 3722, ip 1 dist 0.0 idx 3722
dist 0.0 idx 2790
pyknn ib 0, in 3722, ip 14 dist 0.006624998524785042 idx 1906
dist 0.006625652313232422 idx 3635
pyknn ib 0, in 3722, ip 15 dist 0.006624998524785042 idx 3635
dist 0.006625652313232422 idx 1906
pyknn ib 0, in 3723, ip 11 dist 0.006090002600103617 idx 497
dist 0.006090044975280762 idx 2785
pyknn ib 0, in 3723, ip 12 dist 0.006090002600103617 idx 2785
dist 0.006090044975280762 idx 497
pyknn ib 0, in 3725, ip 9 dist 0.004778000060468912 idx 683
dist 0.004777997732162476 idx 3377
pyknn ib 0, in 3725, ip 10 dist 0.004778000060468912 idx 2705
dist 0.004777997732162476 idx 683
pyknn ib 0, in 3725, ip 11 dist 0.004778000060468912 idx 3377
dist 0.004777997732162476 idx 2705
pyknn ib 0, in 3728, ip 5 dist 0.003074999898672104 idx 722
dist 0.0030749994330108166 idx 3776
pyknn ib 0, in 3728, ip 6 dist 0.003074999898672104 idx 3776
dist 0.0030749994330108166 idx 722
pyknn ib 0, in 3731, ip 16 dist 0.01155802421271801 idx 2854
dist 0.011558055877685547 idx 3996
pyknn ib 0, in 3731, ip 17 dist 0.01155802421271801 idx 3996
dist 0.011558055877685547 idx 2854
pyknn ib 0, in 3734, ip 2 dist 0.0009229975985363126 idx 1649
dist 0.0009236335754394531 idx 3471
pyknn ib 0, in 3734, ip 3 dist 0.0009229975985363126 idx 3471
dist 0.0009236335754394531 idx 1649
pyknn ib 0, in 3737, ip 0 dist 0.0 idx 3261
dist 0.0 idx 3737
pyknn ib 0, in 3737, ip 1 dist 0.0 idx 3737
dist 0.0 idx 3261
pyknn ib 0, in 3738, ip 3 dist 0.0022369935177266598 idx 959
dist 0.0022363662719726562 idx 2733
pyknn ib 0, in 3738, ip 4 dist 0.0022370037622749805 idx 2733
dist 0.0022363662719726562 idx 959
pyknn ib 0, in 3740, ip 1 dist 0.0019520005444064736 idx 1647
dist 0.0019516944885253906 idx 2457
pyknn ib 0, in 3740, ip 2 dist 0.0019520005444064736 idx 2457
dist 0.0019516944885253906 idx 1647
pyknn ib 0, in 3740, ip 4 dist 0.0031559993512928486 idx 282
dist 0.0031557083129882812 idx 2323
pyknn ib 0, in 3740, ip 5 dist 0.0031559993512928486 idx 2323
dist 0.0031557083129882812 idx 282
pyknn ib 0, in 3742, ip 11 dist 0.007034010253846645 idx 937
dist 0.007033348083496094 idx 2228
pyknn ib 0, in 3742, ip 12 dist 0.007034010253846645 idx 2228
dist 0.007033348083496094 idx 937
pyknn ib 0, in 3746, ip 4 dist 0.0036910013295710087 idx 1342
dist 0.003690958023071289 idx 2235
pyknn ib 0, in 3746, ip 5 dist 0.0036910013295710087 idx 2235
dist 0.003690958023071289 idx 1342
pyknn ib 0, in 3746, ip 6 dist 0.003737997729331255 idx 847
dist 0.003737926483154297 idx 2932
pyknn ib 0, in 3746, ip 7 dist 0.003737997729331255 idx 2932
dist 0.003737926483154297 idx 847
pyknn ib 0, in 3749, ip 11 dist 0.004900988657027483 idx 2055
dist 0.004900932312011719 idx 2936
pyknn ib 0, in 3749, ip 12 dist 0.004900988657027483 idx 2936
dist 0.004900932312011719 idx 2055
pyknn ib 0, in 3752, ip 6 dist 0.0036260022316128016 idx 1248
dist 0.0036258697509765625 idx 3939
pyknn ib 0, in 3752, ip 7 dist 0.0036260022316128016 idx 3939
dist 0.0036258697509765625 idx 1248
pyknn ib 0, in 3753, ip 10 dist 0.005917004309594631 idx 1881
dist 0.0059168338775634766 idx 2188
pyknn ib 0, in 3753, ip 11 dist 0.005917004309594631 idx 2188
dist 0.0059168338775634766 idx 1881
pyknn ib 0, in 3754, ip 0 dist 0.0 idx 2701
dist 0.0 idx 3754
pyknn ib 0, in 3754, ip 1 dist 0.0 idx 3754
dist 0.0 idx 2701
pyknn ib 0, in 3758, ip 18 dist 0.01014699973165989 idx 127
dist 0.010146856307983398 idx 2848
pyknn ib 0, in 3758, ip 19 dist 0.01014699973165989 idx 2848
dist 0.010146856307983398 idx 127
pyknn ib 0, in 3765, ip 3 dist 0.001645983662456274 idx 2854
dist 0.0016455650329589844 idx 3996
pyknn ib 0, in 3765, ip 4 dist 0.001645983662456274 idx 3996
dist 0.0016455650329589844 idx 2854
pyknn ib 0, in 3766, ip 0 dist 0.0 idx 810
dist -1.9073486328125e-06 idx 3766
pyknn ib 0, in 3766, ip 1 dist 0.0 idx 3766
dist -1.9073486328125e-06 idx 810
pyknn ib 0, in 3767, ip 5 dist 0.004002002067863941 idx 3251
dist 0.0040018558502197266 idx 3264
pyknn ib 0, in 3767, ip 6 dist 0.004002002067863941 idx 3264
dist 0.0040018558502197266 idx 3251
pyknn ib 0, in 3768, ip 9 dist 0.005507001653313637 idx 1167
dist 0.005507469177246094 idx 2174
pyknn ib 0, in 3768, ip 10 dist 0.005507001653313637 idx 2174
dist 0.005507469177246094 idx 1167
pyknn ib 0, in 3774, ip 0 dist 0.0 idx 2328
dist 0.0 idx 3774
pyknn ib 0, in 3774, ip 1 dist 0.0 idx 3774
dist 0.0 idx 2328
pyknn ib 0, in 3776, ip 0 dist 0.0 idx 722
dist 0.0 idx 3776
pyknn ib 0, in 3776, ip 1 dist 0.0 idx 3776
dist 0.0 idx 722
pyknn ib 0, in 3777, ip 0 dist 0.0 idx 1062
dist 5.960464477539063e-08 idx 3777
pyknn ib 0, in 3777, ip 2 dist 0.0 idx 3777
dist 5.960464477539063e-08 idx 1062
pyknn ib 0, in 3778, ip 14 dist 0.006602999288588762 idx 722
dist 0.006602998822927475 idx 3776
pyknn ib 0, in 3778, ip 15 dist 0.006602999288588762 idx 3776
dist 0.006602998822927475 idx 722
pyknn ib 0, in 3785, ip 17 dist 0.010439997538924217 idx 363
dist 0.01043999195098877 idx 694
pyknn ib 0, in 3785, ip 18 dist 0.010439997538924217 idx 694
dist 0.01043999195098877 idx 363
pyknn ib 0, in 3791, ip 16 dist 0.010437000542879105 idx 722
dist 0.01043699961155653 idx 3776
pyknn ib 0, in 3791, ip 17 dist 0.010437000542879105 idx 3776
dist 0.01043699961155653 idx 722
pyknn ib 0, in 3792, ip 3 dist 0.001552001223899424 idx 1871
dist 0.0015506744384765625 idx 3541
pyknn ib 0, in 3792, ip 4 dist 0.001552001223899424 idx 3541
dist 0.0015506744384765625 idx 1871
pyknn ib 0, in 3792, ip 18 dist 0.013074001297354698 idx 1167
dist 0.013072013854980469 idx 2174
pyknn ib 0, in 3792, ip 19 dist 0.013074001297354698 idx 2174
dist 0.013072013854980469 idx 1167
pyknn ib 0, in 3796, ip 18 dist 0.011904004961252213 idx 963
dist 0.011904239654541016 idx 3703
pyknn ib 0, in 3796, ip 19 dist 0.011904004961252213 idx 3703
dist 0.011904239654541016 idx 963
pyknn ib 0, in 3797, ip 1 dist 0.00010100007784785703 idx 3325
dist 0.00010102987289428711 idx 3729
pyknn ib 0, in 3797, ip 2 dist 0.00010100007784785703 idx 3729
dist 0.00010102987289428711 idx 3325
pyknn ib 0, in 3799, ip 3 dist 0.00041699971188791096 idx 2266
dist 0.0004177093505859375 idx 3388
pyknn ib 0, in 3799, ip 4 dist 0.00041699971188791096 idx 3388
dist 0.0004177093505859375 idx 2266
pyknn ib 0, in 3802, ip 14 dist 0.011641998775303364 idx 3383
dist 0.011641979217529297 idx 3439
pyknn ib 0, in 3802, ip 15 dist 0.011641998775303364 idx 3439
dist 0.011641979217529297 idx 3383
pyknn ib 0, in 3805, ip 7 dist 0.0040910011157393456 idx 792
dist 0.004091024398803711 idx 974
pyknn ib 0, in 3805, ip 8 dist 0.0040910011157393456 idx 974
dist 0.004091024398803711 idx 792
pyknn ib 0, in 3807, ip 14 dist 0.008724001236259937 idx 744
dist 0.008723974227905273 idx 3909
pyknn ib 0, in 3807, ip 15 dist 0.008724001236259937 idx 3909
dist 0.008723974227905273 idx 744
pyknn ib 0, in 3818, ip 2 dist 0.000551996985450387 idx 2458
dist 0.0005519390106201172 idx 2645
pyknn ib 0, in 3818, ip 3 dist 0.000551996985450387 idx 2645
dist 0.0005519390106201172 idx 2458
pyknn ib 0, in 3821, ip 14 dist 0.004901000298559666 idx 2701
dist 0.004901885986328125 idx 3754
pyknn ib 0, in 3821, ip 15 dist 0.004901000298559666 idx 3754
dist 0.004901885986328125 idx 2701
pyknn ib 0, in 3826, ip 12 dist 0.010700996033847332 idx 1031
dist 0.01070094108581543 idx 1100
pyknn ib 0, in 3826, ip 13 dist 0.010700996033847332 idx 1100
dist 0.01070094108581543 idx 1031
pyknn ib 0, in 3826, ip 16 dist 0.012152014300227165 idx 744
dist 0.01215219497680664 idx 3909
pyknn ib 0, in 3826, ip 17 dist 0.012152014300227165 idx 3909
dist 0.01215219497680664 idx 744
pyknn ib 0, in 3833, ip 9 dist 0.003604000201448798 idx 281
dist 0.003603994846343994 idx 2437
pyknn ib 0, in 3833, ip 10 dist 0.003604000201448798 idx 2437
dist 0.003603994846343994 idx 281
pyknn ib 0, in 3835, ip 7 dist 0.004132998175919056 idx 1093
dist 0.004132270812988281 idx 2320
pyknn ib 0, in 3835, ip 8 dist 0.004132998175919056 idx 2320
dist 0.004132270812988281 idx 1093
pyknn ib 0, in 3835, ip 18 dist 0.010659990832209587 idx 810
dist 0.010659217834472656 idx 3766
pyknn ib 0, in 3835, ip 19 dist 0.010659990832209587 idx 3766
dist 0.010659217834472656 idx 810
pyknn ib 0, in 3842, ip 8 dist 0.005413996987044811 idx 2767
dist 0.005414038896560669 idx 3503
pyknn ib 0, in 3842, ip 9 dist 0.005413996987044811 idx 3503
dist 0.005414038896560669 idx 2767
pyknn ib 0, in 3843, ip 12 dist 0.005192999728024006 idx 847
dist 0.005192995071411133 idx 2932
pyknn ib 0, in 3843, ip 13 dist 0.005192999728024006 idx 2932
dist 0.005192995071411133 idx 847
pyknn ib 0, in 3851, ip 7 dist 0.0037380007561296225 idx 1333
dist 0.003738000988960266 idx 1388
pyknn ib 0, in 3851, ip 8 dist 0.0037380007561296225 idx 1388
dist 0.003738000988960266 idx 1333
pyknn ib 0, in 3853, ip 2 dist 0.0003970006946474314 idx 818
dist 0.00039702653884887695 idx 3318
pyknn ib 0, in 3853, ip 3 dist 0.0003970006946474314 idx 3318
dist 0.00039702653884887695 idx 818
pyknn ib 0, in 3853, ip 18 dist 0.025717997923493385 idx 3462
dist 0.025718003511428833 idx 3465
pyknn ib 0, in 3853, ip 19 dist 0.025717997923493385 idx 3465
dist 0.025718003511428833 idx 3462
pyknn ib 0, in 3858, ip 3 dist 0.004686000291258097 idx 636
dist 0.004686005413532257 idx 3362
pyknn ib 0, in 3858, ip 4 dist 0.004686000291258097 idx 3362
dist 0.004686005413532257 idx 636
pyknn ib 0, in 3859, ip 8 dist 0.0045179990120232105 idx 2502
dist 0.004517912864685059 idx 3168
pyknn ib 0, in 3859, ip 9 dist 0.0045179990120232105 idx 3168
dist 0.004517912864685059 idx 2502
pyknn ib 0, in 3859, ip 17 dist 0.0062250057235360146 idx 2316
dist 0.006224989891052246 idx 2761
pyknn ib 0, in 3859, ip 18 dist 0.0062250057235360146 idx 2761
dist 0.006224989891052246 idx 2316
pyknn ib 0, in 3860, ip 1 dist 0.001565000507980585 idx 768
dist 0.00156499445438385 idx 2765
pyknn ib 0, in 3860, ip 2 dist 0.001565000507980585 idx 2765
dist 0.00156499445438385 idx 768
pyknn ib 0, in 3864, ip 8 dist 0.0025200005620718002 idx 1219
dist 0.0025200843811035156 idx 3352
pyknn ib 0, in 3864, ip 9 dist 0.0025200005620718002 idx 3352
dist 0.0025200843811035156 idx 1219
pyknn ib 0, in 3866, ip 9 dist 0.008149027824401855 idx 3519
dist 0.008148193359375 idx 4066
pyknn ib 0, in 3866, ip 10 dist 0.008149027824401855 idx 4066
dist 0.008148193359375 idx 3519
pyknn ib 0, in 3866, ip 18 dist 0.011526023969054222 idx 2377
dist 0.011525154113769531 idx 3132
pyknn ib 0, in 3866, ip 19 dist 0.011526023969054222 idx 3132
dist 0.011525154113769531 idx 2377
pyknn ib 0, in 3867, ip 9 dist 0.004133999813348055 idx 857
dist 0.004133939743041992 idx 2424
pyknn ib 0, in 3867, ip 10 dist 0.004133999813348055 idx 2424
dist 0.004133939743041992 idx 857
pyknn ib 0, in 3868, ip 18 dist 0.012170001864433289 idx 1333
dist 0.012169986963272095 idx 1388
pyknn ib 0, in 3868, ip 19 dist 0.012170001864433289 idx 1388
dist 0.012169986963272095 idx 1333
pyknn ib 0, in 3869, ip 3 dist 0.002033001510426402 idx 938
dist 0.0020329952239990234 idx 1350
pyknn ib 0, in 3869, ip 4 dist 0.002033001510426402 idx 1350
dist 0.0020329952239990234 idx 938
pyknn ib 0, in 3872, ip 4 dist 0.0024029994383454323 idx 683
dist 0.0024029985070228577 idx 2705
pyknn ib 0, in 3872, ip 5 dist 0.0024029994383454323 idx 2705
dist 0.0024029985070228577 idx 3377
pyknn ib 0, in 3872, ip 6 dist 0.0024029994383454323 idx 3377
dist 0.0024029985070228577 idx 683
pyknn ib 0, in 3873, ip 4 dist 0.0017179968999698758 idx 1304
dist 0.001717984676361084 idx 2654
pyknn ib 0, in 3873, ip 5 dist 0.0017179968999698758 idx 2654
dist 0.001717984676361084 idx 1304
pyknn ib 0, in 3874, ip 10 dist 0.007155999541282654 idx 366
dist 0.0071561336517333984 idx 3555
pyknn ib 0, in 3874, ip 11 dist 0.007155999541282654 idx 3555
dist 0.0071561336517333984 idx 366
pyknn ib 0, in 3883, ip 2 dist 0.0005449995514936745 idx 158
dist 0.0005464553833007812 idx 1404
pyknn ib 0, in 3883, ip 3 dist 0.0005449995514936745 idx 1404
dist 0.0005464553833007812 idx 158
pyknn ib 0, in 3884, ip 7 dist 0.0038480001967400312 idx 1018
dist 0.003847837448120117 idx 3982
pyknn ib 0, in 3884, ip 8 dist 0.0038480001967400312 idx 3982
dist 0.003847837448120117 idx 1018
pyknn ib 0, in 3887, ip 10 dist 0.006790000945329666 idx 3107
dist 0.006789207458496094 idx 1966
pyknn ib 0, in 3887, ip 11 dist 0.006790002807974815 idx 1966
dist 0.006789207458496094 idx 3107
pyknn ib 0, in 3890, ip 0 dist 0.0 idx 1426
dist 0.0 idx 3890
pyknn ib 0, in 3890, ip 1 dist 0.0 idx 3890
dist 0.0 idx 1426
pyknn ib 0, in 3890, ip 6 dist 0.0032750004902482033 idx 2328
dist 0.0032750368118286133 idx 3774
pyknn ib 0, in 3890, ip 7 dist 0.0032750004902482033 idx 3774
dist 0.0032750368118286133 idx 2328
pyknn ib 0, in 3894, ip 4 dist 0.002228998579084873 idx 1254
dist 0.0022287368774414062 idx 2552
pyknn ib 0, in 3894, ip 5 dist 0.002228998579084873 idx 2552
dist 0.0022287368774414062 idx 1254
pyknn ib 0, in 3899, ip 11 dist 0.006374003365635872 idx 497
dist 0.006373763084411621 idx 2785
pyknn ib 0, in 3899, ip 12 dist 0.006374003365635872 idx 2785
dist 0.006373763084411621 idx 497
pyknn ib 0, in 3902, ip 2 dist 0.000996999442577362 idx 1333
dist 0.0009969770908355713 idx 1388
pyknn ib 0, in 3902, ip 3 dist 0.000996999442577362 idx 1388
dist 0.0009969770908355713 idx 1333
pyknn ib 0, in 3906, ip 11 dist 0.008240998722612858 idx 768
dist 0.00824098289012909 idx 2765
pyknn ib 0, in 3906, ip 12 dist 0.008240998722612858 idx 2765
dist 0.00824098289012909 idx 768
pyknn ib 0, in 3909, ip 0 dist 0.0 idx 744
dist 0.0 idx 3909
pyknn ib 0, in 3909, ip 1 dist 0.0 idx 3909
dist 0.0 idx 744
pyknn ib 0, in 3909, ip 3 dist 0.0003130019176751375 idx 1031
dist 0.00031256675720214844 idx 2711
pyknn ib 0, in 3909, ip 4 dist 0.0003130019176751375 idx 1100
dist 0.00031256675720214844 idx 1031
pyknn ib 0, in 3909, ip 5 dist 0.0003130019176751375 idx 2711
dist 0.00031256675720214844 idx 1100
pyknn ib 0, in 3911, ip 3 dist 0.0003949997771997005 idx 2143
dist 0.00039499253034591675 idx 4072
pyknn ib 0, in 3911, ip 4 dist 0.0003949997771997005 idx 4072
dist 0.00039499253034591675 idx 2143
pyknn ib 0, in 3912, ip 18 dist 0.007466008421033621 idx 476
dist 0.007465362548828125 idx 784
pyknn ib 0, in 3912, ip 19 dist 0.007466008421033621 idx 784
dist 0.007465362548828125 idx 476
pyknn ib 0, in 3916, ip 12 dist 0.00708600040525198 idx 488
dist 0.007086008787155151 idx 2434
pyknn ib 0, in 3916, ip 13 dist 0.00708600040525198 idx 2434
dist 0.007086008787155151 idx 488
pyknn ib 0, in 3918, ip 13 dist 0.009893002919852734 idx 3325
dist 0.009893059730529785 idx 3729
pyknn ib 0, in 3918, ip 14 dist 0.009893002919852734 idx 3729
dist 0.009893059730529785 idx 3325
pyknn ib 0, in 3918, ip 18 dist 0.012113000266253948 idx 1426
dist 0.012112975120544434 idx 3890
pyknn ib 0, in 3918, ip 19 dist 0.012113000266253948 idx 3890
dist 0.012112975120544434 idx 1426
pyknn ib 0, in 3921, ip 3 dist 0.003403998911380768 idx 281
dist 0.003403991460800171 idx 2437
pyknn ib 0, in 3921, ip 4 dist 0.003403998911380768 idx 2437
dist 0.003403991460800171 idx 281
pyknn ib 0, in 3931, ip 5 dist 0.0017810005228966475 idx 2701
dist 0.001781463623046875 idx 3754
pyknn ib 0, in 3931, ip 6 dist 0.0017810005228966475 idx 3754
dist 0.001781463623046875 idx 2701
pyknn ib 0, in 3933, ip 0 dist 0.0 idx 2745
dist -4.76837158203125e-07 idx 3933
pyknn ib 0, in 3933, ip 1 dist 0.0 idx 3933
dist -4.76837158203125e-07 idx 2745
pyknn ib 0, in 3936, ip 13 dist 0.013471986167132854 idx 1031
dist 0.013471603393554688 idx 1100
pyknn ib 0, in 3936, ip 14 dist 0.013471986167132854 idx 1100
dist 0.013471603393554688 idx 2711
pyknn ib 0, in 3936, ip 15 dist 0.013471986167132854 idx 2711
dist 0.013471603393554688 idx 1031
pyknn ib 0, in 3936, ip 16 dist 0.013521000742912292 idx 744
dist 0.013520956039428711 idx 3909
pyknn ib 0, in 3936, ip 17 dist 0.013521000742912292 idx 3909
dist 0.013520956039428711 idx 744
pyknn ib 0, in 3938, ip 4 dist 0.004649000242352486 idx 1555
dist 0.004649162292480469 idx 2866
pyknn ib 0, in 3938, ip 5 dist 0.004649000242352486 idx 2866
dist 0.004649162292480469 idx 1555
pyknn ib 0, in 3939, ip 0 dist 0.0 idx 1248
dist 0.0 idx 3939
pyknn ib 0, in 3939, ip 1 dist 0.0 idx 3939
dist 0.0 idx 1248
pyknn ib 0, in 3940, ip 2 dist 0.002899995306506753 idx 1231
dist 0.002899169921875 idx 533
pyknn ib 0, in 3940, ip 3 dist 0.002900000661611557 idx 533
dist 0.002899646759033203 idx 1231
pyknn ib 0, in 3940, ip 14 dist 0.00899400096386671 idx 1021
dist 0.008993148803710938 idx 1857
pyknn ib 0, in 3940, ip 15 dist 0.00899400096386671 idx 1857
dist 0.008993148803710938 idx 1021
pyknn ib 0, in 3942, ip 1 dist 0.0005389998550526798 idx 2767
dist 0.0005390048027038574 idx 3503
pyknn ib 0, in 3942, ip 2 dist 0.0005389998550526798 idx 3503
dist 0.0005390048027038574 idx 2767
pyknn ib 0, in 3943, ip 5 dist 0.002406000392511487 idx 1426
dist 0.002406001091003418 idx 3890
pyknn ib 0, in 3943, ip 6 dist 0.002406000392511487 idx 3890
dist 0.002406001091003418 idx 1426
pyknn ib 0, in 3943, ip 9 dist 0.004201000090688467 idx 2328
dist 0.004200935363769531 idx 3774
pyknn ib 0, in 3943, ip 10 dist 0.004201000090688467 idx 3774
dist 0.004200935363769531 idx 2328
pyknn ib 0, in 3950, ip 8 dist 0.003989000804722309 idx 1718
dist 0.003989040851593018 idx 2775
pyknn ib 0, in 3950, ip 9 dist 0.003989000804722309 idx 2775
dist 0.003989040851593018 idx 1718
pyknn ib 0, in 3951, ip 8 dist 0.004125999752432108 idx 2266
dist 0.004126548767089844 idx 3388
pyknn ib 0, in 3951, ip 9 dist 0.004125999752432108 idx 3388
dist 0.004126548767089844 idx 2266
pyknn ib 0, in 3952, ip 5 dist 0.003297999268397689 idx 744
dist 0.0032978057861328125 idx 3909
pyknn ib 0, in 3952, ip 6 dist 0.003297999268397689 idx 3909
dist 0.0032978057861328125 idx 744
pyknn ib 0, in 3952, ip 9 dist 0.005625006277114153 idx 1031
dist 0.005624532699584961 idx 2711
pyknn ib 0, in 3952, ip 11 dist 0.005625006277114153 idx 2711
dist 0.005624532699584961 idx 1031
pyknn ib 0, in 3953, ip 9 dist 0.009367999620735645 idx 542
dist 0.009368062019348145 idx 1590
pyknn ib 0, in 3953, ip 10 dist 0.009367999620735645 idx 1590
dist 0.009368062019348145 idx 542
pyknn ib 0, in 3953, ip 16 dist 0.014188993722200394 idx 798
dist 0.014188885688781738 idx 3397
pyknn ib 0, in 3953, ip 17 dist 0.014188996516168118 idx 3397
dist 0.014189004898071289 idx 798
pyknn ib 0, in 3955, ip 6 dist 0.0023769985418766737 idx 3325
dist 0.002377033233642578 idx 3729
pyknn ib 0, in 3955, ip 7 dist 0.0023769985418766737 idx 3729
dist 0.002377033233642578 idx 3325
pyknn ib 0, in 3959, ip 6 dist 0.0041410005651414394 idx 2780
dist 0.004140615463256836 idx 3638
pyknn ib 0, in 3959, ip 7 dist 0.0041410005651414394 idx 3638
dist 0.004140615463256836 idx 2780
pyknn ib 0, in 3962, ip 4 dist 0.001824999344535172 idx 768
dist 0.0018250048160552979 idx 2765
pyknn ib 0, in 3962, ip 5 dist 0.001824999344535172 idx 2765
dist 0.0018250048160552979 idx 768
pyknn ib 0, in 3962, ip 11 dist 0.0032780019100755453 idx 281
dist 0.003278002142906189 idx 2437
pyknn ib 0, in 3962, ip 12 dist 0.0032780019100755453 idx 2437
dist 0.003278002142906189 idx 281
pyknn ib 0, in 3971, ip 12 dist 0.007450995501130819 idx 1157
dist 0.007451057434082031 idx 2023
pyknn ib 0, in 3971, ip 13 dist 0.007450995501130819 idx 2023
dist 0.007451057434082031 idx 1157
pyknn ib 0, in 3976, ip 1 dist 0.0003900023002643138 idx 963
dist 0.00039005279541015625 idx 3703
pyknn ib 0, in 3976, ip 2 dist 0.0003900023002643138 idx 3703
dist 0.00039005279541015625 idx 963
pyknn ib 0, in 3977, ip 5 dist 0.003753000171855092 idx 767
dist 0.0037530213594436646 idx 1375
pyknn ib 0, in 3977, ip 6 dist 0.003753000171855092 idx 954
dist 0.0037530213594436646 idx 767
pyknn ib 0, in 3977, ip 7 dist 0.003753000171855092 idx 1375
dist 0.0037530213594436646 idx 954
pyknn ib 0, in 3982, ip 0 dist 0.0 idx 1018
dist 0.0 idx 3982
pyknn ib 0, in 3982, ip 1 dist 0.0 idx 3982
dist 0.0 idx 1018
pyknn ib 0, in 3987, ip 3 dist 0.0009689986472949386 idx 282
dist 0.0009691715240478516 idx 2323
pyknn ib 0, in 3987, ip 4 dist 0.0009689986472949386 idx 2323
dist 0.0009691715240478516 idx 282
pyknn ib 0, in 3988, ip 8 dist 0.004057002253830433 idx 2745
dist 0.004056692123413086 idx 3933
pyknn ib 0, in 3988, ip 9 dist 0.004057002253830433 idx 3933
dist 0.004056692123413086 idx 2745
pyknn ib 0, in 3989, ip 7 dist 0.007250993978232145 idx 1031
dist 0.007250308990478516 idx 1100
pyknn ib 0, in 3989, ip 8 dist 0.007250993978232145 idx 1100
dist 0.007250308990478516 idx 1031
pyknn ib 0, in 3989, ip 14 dist 0.010538001544773579 idx 744
dist 0.010537862777709961 idx 3909
pyknn ib 0, in 3989, ip 15 dist 0.010538001544773579 idx 3909
dist 0.010537862777709961 idx 744
pyknn ib 0, in 3996, ip 0 dist 0.0 idx 2854
dist 0.0 idx 3996
pyknn ib 0, in 3996, ip 1 dist 0.0 idx 3996
dist 0.0 idx 2854
pyknn ib 0, in 3999, ip 12 dist 0.010126007720828056 idx 1018
dist 0.010125875473022461 idx 3982
pyknn ib 0, in 3999, ip 13 dist 0.010126007720828056 idx 3982
dist 0.010125875473022461 idx 1018
pyknn ib 0, in 4005, ip 11 dist 0.004081999883055687 idx 193
dist 0.004082083702087402 idx 231
pyknn ib 0, in 4005, ip 12 dist 0.004081999883055687 idx 231
dist 0.004082083702087402 idx 193
pyknn ib 0, in 4007, ip 7 dist 0.0034770015627145767 idx 1800
dist 0.003475189208984375 idx 2901
pyknn ib 0, in 4007, ip 8 dist 0.0034770015627145767 idx 2901
dist 0.003475189208984375 idx 1800
pyknn ib 0, in 4011, ip 4 dist 0.0019210001919418573 idx 281
dist 0.0019209980964660645 idx 2437
pyknn ib 0, in 4011, ip 5 dist 0.0019210001919418573 idx 2437
dist 0.0019209980964660645 idx 281
pyknn ib 0, in 4011, ip 8 dist 0.0032220007851719856 idx 768
dist 0.0032220035791397095 idx 2765
pyknn ib 0, in 4011, ip 9 dist 0.0032220007851719856 idx 2765
dist 0.0032220035791397095 idx 768
pyknn ib 0, in 4015, ip 6 dist 0.0012199964839965105 idx 1925
dist 0.0012197494506835938 idx 2416
pyknn ib 0, in 4015, ip 7 dist 0.0012199964839965105 idx 2416
dist 0.0012197494506835938 idx 1925
pyknn ib 0, in 4017, ip 9 dist 0.0036890103947371244 idx 366
dist 0.003688812255859375 idx 3555
pyknn ib 0, in 4017, ip 10 dist 0.0036890103947371244 idx 3555
dist 0.003688812255859375 idx 366
pyknn ib 0, in 4019, ip 8 dist 0.003205000888556242 idx 1225
dist 0.0032033920288085938 idx 1637
pyknn ib 0, in 4019, ip 9 dist 0.003205000888556242 idx 1637
dist 0.0032033920288085938 idx 1225
pyknn ib 0, in 4026, ip 7 dist 0.002222006907686591 idx 595
dist 0.0022215843200683594 idx 2077
pyknn ib 0, in 4026, ip 8 dist 0.002222006907686591 idx 2077
dist 0.0022215843200683594 idx 595
pyknn ib 0, in 4032, ip 12 dist 0.007041999604552984 idx 792
dist 0.0070416927337646484 idx 974
pyknn ib 0, in 4032, ip 13 dist 0.007041999604552984 idx 974
dist 0.0070416927337646484 idx 792
pyknn ib 0, in 4038, ip 2 dist 0.0010400002356618643 idx 636
dist 0.0010400041937828064 idx 3362
pyknn ib 0, in 4038, ip 3 dist 0.0010400002356618643 idx 3362
dist 0.0010400041937828064 idx 636
pyknn ib 0, in 4047, ip 5 dist 0.0014030011370778084 idx 719
dist 0.0014026165008544922 idx 3707
pyknn ib 0, in 4047, ip 6 dist 0.0014030011370778084 idx 3707
dist 0.0014026165008544922 idx 719
pyknn ib 0, in 4050, ip 0 dist 0.0 idx 2397
dist 0.0 idx 4050
pyknn ib 0, in 4050, ip 1 dist 0.0 idx 4050
dist 0.0 idx 2397
pyknn ib 0, in 4058, ip 6 dist 0.0026439952198415995 idx 1225
dist 0.0026445388793945312 idx 1637
pyknn ib 0, in 4058, ip 7 dist 0.0026439952198415995 idx 1637
dist 0.0026445388793945312 idx 1225
pyknn ib 0, in 4058, ip 8 dist 0.0026499878149479628 idx 29
dist 0.0026502609252929688 idx 1466
pyknn ib 0, in 4058, ip 9 dist 0.0026499878149479628 idx 1466
dist 0.0026502609252929688 idx 29
pyknn ib 0, in 4058, ip 19 dist 0.009620998986065388 idx 1176
dist 0.00962066650390625 idx 2555
pyknn ib 0, in 4061, ip 10 dist 0.006673999130725861 idx 3462
dist 0.006673991680145264 idx 3465
pyknn ib 0, in 4061, ip 11 dist 0.006673999130725861 idx 3465
dist 0.006673991680145264 idx 3462
pyknn ib 0, in 4062, ip 3 dist 0.001948998891748488 idx 315
dist 0.0019488334655761719 idx 3219
pyknn ib 0, in 4062, ip 4 dist 0.001948998891748488 idx 3219
dist 0.0019488334655761719 idx 315
pyknn ib 0, in 4063, ip 1 dist 6.1000100686214864e-05 idx 1951
dist 6.097555160522461e-05 idx 2710
pyknn ib 0, in 4063, ip 2 dist 6.1000100686214864e-05 idx 2710
dist 6.097555160522461e-05 idx 1951
pyknn ib 0, in 4063, ip 9 dist 0.006617994979023933 idx 1161
dist 0.006617993116378784 idx 2641
pyknn ib 0, in 4063, ip 10 dist 0.006617994979023933 idx 2641
dist 0.006617993116378784 idx 1161
pyknn ib 0, in 4065, ip 5 dist 0.0018000002019107342 idx 2790
dist 0.001800537109375 idx 3722
pyknn ib 0, in 4065, ip 6 dist 0.0018000002019107342 idx 3722
dist 0.001800537109375 idx 2790
pyknn ib 0, in 4065, ip 8 dist 0.0027250039856880903 idx 1906
dist 0.0027251243591308594 idx 3635
pyknn ib 0, in 4065, ip 9 dist 0.0027250039856880903 idx 3635
dist 0.0027251243591308594 idx 1906
pyknn ib 0, in 4066, ip 0 dist 0.0 idx 3519
dist 0.0 idx 4066
pyknn ib 0, in 4066, ip 1 dist 0.0 idx 4066
dist 0.0 idx 3519
pyknn ib 0, in 4066, ip 2 dist 0.0004419961478561163 idx 829
dist 0.00044155120849609375 idx 3173
pyknn ib 0, in 4066, ip 3 dist 0.0004419961478561163 idx 3173
dist 0.00044155120849609375 idx 829
pyknn ib 0, in 4070, ip 9 dist 0.004648996517062187 idx 1248
dist 0.004649162292480469 idx 3939
pyknn ib 0, in 4070, ip 10 dist 0.004648996517062187 idx 3939
dist 0.004649162292480469 idx 1248
pyknn ib 0, in 4072, ip 0 dist 0.0 idx 2143
dist 0.0 idx 4072
pyknn ib 0, in 4072, ip 1 dist 0.0 idx 4072
dist 0.0 idx 2143
pyknn ib 0, in 4074, ip 1 dist 0.0005799981881864369 idx 1259
dist 0.0005799531936645508 idx 310
pyknn ib 0, in 4074, ip 2 dist 0.0005799999344162643 idx 310
dist 0.0005799531936645508 idx 1259
pyknn ib 0, in 4083, ip 3 dist 0.0021730000153183937 idx 965
dist 0.0021734237670898438 idx 1552
pyknn ib 0, in 4083, ip 4 dist 0.0021730000153183937 idx 1552
dist 0.0021734237670898438 idx 965
pyknn ib 0, in 4087, ip 2 dist 0.00047799956519156694 idx 991
dist 0.000476837158203125 idx 2895
pyknn ib 0, in 4087, ip 3 dist 0.00047799956519156694 idx 2895
dist 0.000476837158203125 idx 991
pyknn ib 0, in 4088, ip 15 dist 0.012462000362575054 idx 965
dist 0.012462615966796875 idx 1552
pyknn ib 0, in 4088, ip 16 dist 0.012462000362575054 idx 1552
dist 0.012462615966796875 idx 965
pyknn ib 0, in 4089, ip 0 dist 0.0 idx 3631
dist 0.0 idx 4089
pyknn ib 0, in 4089, ip 1 dist 0.0 idx 4089
dist 0.0 idx 3631
CHECK1: True
"""
# print(idx)
# speed test
# loader = DataLoader(
# dataset,
# batch_size=2,
# num_workers=8,
# pin_memory=True,
# shuffle=False
# )
# speed_test(0, loader)
# speed_test(1, loader)
# speed_test(2, loader)
# speed_test(3, loader)
| [
"[email protected]"
] | |
497e345288a9d28536fdbaf5f67a2102b003849e | 7652b3d21519771aa073c4f4a9d66f4f4d5db013 | /creating-project/project/project_app/urls.py | de7fffc7ea068fde214f0d92d79c134b3e945a32 | [] | no_license | pavkozlov/NETOLOGY-Django-homeworks | 9c64cde294590c8a85c5f89fd2190fe989720c84 | c331fa10906470c974802932e9d7d7526841f6f1 | refs/heads/master | 2022-11-27T22:36:12.537296 | 2019-07-17T16:19:11 | 2019-07-17T16:19:11 | 189,250,824 | 0 | 1 | null | 2022-11-22T03:14:37 | 2019-05-29T15:20:09 | Python | UTF-8 | Python | false | false | 282 | py | from django.urls import path
from .views import stations_view
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('stations/', stations_view, name='stations_view')
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| [
"[email protected]"
] | |
dcf60d425a75a5583dc890529bb1f1fffe42a262 | 428ee863e50fecfaedbbf64f3da95e9acb746ae4 | /src/tamsin/main.py | a9ea83fa097c8b5749742963afb74886d3b5d15a | [
"BSD-3-Clause",
"Unlicense",
"LicenseRef-scancode-public-domain"
] | permissive | catseye/Tamsin | ba53a0ee4ac882486a958e6ba7225f19eea763ef | 1c9e7ade052d734fa1753d612f2426ac067d5252 | refs/heads/master | 2021-01-17T09:21:25.202969 | 2016-03-31T15:00:14 | 2016-03-31T15:00:14 | 19,212,331 | 12 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,766 | py | # encoding: UTF-8
# Copyright (c)2014 Chris Pressey, Cat's Eye Technologies.
# Distributed under a BSD-style license; see LICENSE for more information.
import os
import subprocess
import sys
from tamsin.buffer import FileBuffer, StringBuffer
from tamsin.event import DebugEventListener
from tamsin.term import Atom
from tamsin.scanner import (
Scanner, EOF, UTF8ScannerEngine, TamsinScannerEngine
)
from tamsin.parser import Parser
from tamsin.interpreter import Interpreter
from tamsin.desugarer import Desugarer
from tamsin.analyzer import Analyzer
from tamsin.compiler import Compiler # to be replaced by...
from tamsin.codegen import CodeGen
from tamsin.backends.c import Emitter
def parse(filename):
with open(filename, 'r') as f:
scanner = Scanner(
FileBuffer(f, filename=filename),
#StringBuffer(f.read(), filename=filename),
engines=(TamsinScannerEngine(),)
)
parser = Parser(scanner)
ast = parser.grammar()
desugarer = Desugarer(ast)
ast = desugarer.desugar(ast)
return ast
def parse_and_check_args(args):
ast = None
for arg in args:
next_ast = parse(arg)
if ast is None:
ast = next_ast
else:
ast.incorporate(next_ast)
analyzer = Analyzer(ast)
ast = analyzer.analyze(ast)
return ast
def run(ast, listeners=None):
scanner = Scanner(
FileBuffer(sys.stdin, filename='<stdin>'),
#StringBuffer(sys.stdin.read(), filename='<stdin>'),
engines=(UTF8ScannerEngine(),),
listeners=listeners
)
interpreter = Interpreter(
ast, scanner, listeners=listeners
)
(succeeded, result) = interpreter.interpret_program(ast)
if not succeeded:
sys.stderr.write(str(result) + "\n")
sys.exit(1)
print str(result)
def main(args, tamsin_dir='.'):
listeners = []
if args[0] == '--debug':
listeners.append(DebugEventListener())
args = args[1:]
if args[0] == 'scan':
with open(args[1], 'r') as f:
scanner = Scanner(
FileBuffer(f, filename=args[1]),
engines=(TamsinScannerEngine(),),
listeners=listeners
)
tok = None
while tok is not EOF:
tok = scanner.scan()
if tok is not EOF:
print Atom(tok).repr()
print
elif args[0] == 'parse':
parser = Parser.for_file(args[1])
ast = parser.grammar()
print str(ast)
elif args[0] == 'desugar':
parser = Parser.for_file(args[1])
ast = parser.grammar()
desugarer = Desugarer(ast)
ast = desugarer.desugar(ast)
print str(ast)
elif args[0] == 'analyze':
ast = parse_and_check_args(args[1:])
print str(ast)
elif args[0] == 'compile':
ast = parse_and_check_args(args[1:])
compiler = Compiler(ast, sys.stdout)
compiler.compile()
elif args[0] == 'codegen':
ast = parse_and_check_args(args[1:])
generator = CodeGen(ast)
result = generator.generate()
emitter = Emitter(result, sys.stdout)
emitter.go()
elif args[0] == 'doublecompile':
# http://www.youtube.com/watch?v=6WxJECOFg8w
ast = parse_and_check_args(args[1:])
c_filename = 'foo.c'
exe_filename = './foo'
with open(c_filename, 'w') as f:
compiler = Compiler(ast, f)
compiler.compile()
c_src_dir = os.path.join(tamsin_dir, 'c_src')
command = ("gcc", "-g", "-I%s" % c_src_dir, "-L%s" % c_src_dir,
c_filename, "-o", exe_filename, "-ltamsin")
try:
subprocess.check_call(command)
exit_code = 0
except subprocess.CalledProcessError:
exit_code = 1
#subprocess.call(('rm', '-f', c_filename))
sys.exit(exit_code)
elif args[0] == 'loadngo':
ast = parse_and_check_args(args[1:])
c_filename = 'foo.c'
exe_filename = './foo'
with open(c_filename, 'w') as f:
compiler = Compiler(ast, f)
compiler.compile()
c_src_dir = os.path.join(tamsin_dir, 'c_src')
command = ("gcc", "-g", "-I%s" % c_src_dir, "-L%s" % c_src_dir,
c_filename, "-o", exe_filename, "-ltamsin")
try:
subprocess.check_call(command)
subprocess.check_call((exe_filename,))
exit_code = 0
except subprocess.CalledProcessError:
exit_code = 1
subprocess.call(('rm', '-f', c_filename, exe_filename))
sys.exit(exit_code)
else:
ast = parse_and_check_args(args)
run(ast, listeners=listeners)
| [
"[email protected]"
] | |
5b3e342ade56e396a3dfad0237f974e5082e1bc9 | 114b61513733083555924fc8ab347335e10471ae | /stackone/stackone/viewModel/MultipartPostHandler.py | df8c4aa526fbfe18ae6a303322624a6199dcffe3 | [] | no_license | smarkm/ovm | 6e3bea19816affdf919cbd0aa81688e6c56e7565 | cd30ad5926f933e6723805d380e57c638ee46bac | refs/heads/master | 2021-01-21T04:04:28.637901 | 2015-08-31T03:05:03 | 2015-08-31T03:05:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,033 | py | #!/usr/bin/python
####
# 02/2006 Will Holcomb <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# 7/26/07 Slightly modified by Brian Schneider
# in order to support unicode files ( multipart_encode function )
"""
Usage:
Enables the use of multipart/form-data for posting forms
Inspirations:
Upload files in python:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
urllib2_file:
Fabien Seisen: <[email protected]>
Example:
import MultipartPostHandler, urllib2, cookielib
cookies = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies),
MultipartPostHandler.MultipartPostHandler)
params = { "username" : "bob", "password" : "riviera",
"file" : open("filename", "rb") }
opener.open("http://wwww.bobsite.com/upload/", params)
Further Example:
The main function of this file is a sample which downloads a page and
then uploads it to the W3C validator.
"""
import urllib
import urllib2
import mimetools, mimetypes
import os, stat
from cStringIO import StringIO
import sys
class Callable:
def __init__(self, anycallable):
self.__call__ = anycallable
# Controls how sequences are uncoded. If true, elements may be given multiple values by
# assigning a sequence.
doseq = 1
class MultipartPostHandler(urllib2.BaseHandler):
handler_order = urllib2.HTTPHandler.handler_order - 10 # needs to run first
def http_request(self, request):
data = request.get_data()
if data is not None and type(data) != str:
v_files = []
v_vars = []
try:
for(key, value) in data.items():
if type(value) == file:
v_files.append((key, value))
else:
v_vars.append((key, value))
except TypeError:
systype, value, traceback = sys.exc_info()
raise TypeError, "not a valid non-string sequence or mapping object", traceback
if len(v_files) == 0:
data = urllib.urlencode(v_vars, doseq)
else:
boundary, data = self.multipart_encode(v_vars, v_files)
contenttype = 'multipart/form-data; boundary=%s' % boundary
if(request.has_header('Content-Type')
and request.get_header('Content-Type').find('multipart/form-data') != 0):
print "Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data')
request.add_unredirected_header('Content-Type', contenttype)
request.add_data(data)
return request
def multipart_encode(vars, files, boundary = None, buf = None):
if boundary is None:
boundary = mimetools.choose_boundary()
if buf is None:
buf = StringIO()
for(key, value) in vars:
buf.write('--%s\r\n' % boundary)
buf.write('Content-Disposition: form-data; name="%s"' % key)
buf.write('\r\n\r\n' + value + '\r\n')
for(key, fd) in files:
file_size = os.fstat(fd.fileno())[stat.ST_SIZE]
filename = fd.name.split('/')[-1]
contenttype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
buf.write('--%s\r\n' % boundary)
buf.write('Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key, filename))
buf.write('Content-Type: %s\r\n' % contenttype)
# buffer += 'Content-Length: %s\r\n' % file_size
fd.seek(0)
buf.write('\r\n' + fd.read() + '\r\n')
buf.write('--' + boundary + '--\r\n\r\n')
buf = buf.getvalue()
return boundary, buf
multipart_encode = Callable(multipart_encode)
https_request = http_request
def main():
import tempfile, sys
validatorURL = "http://localhost"
opener = urllib2.build_opener(MultipartPostHandler)
def validateFile(url):
temp = tempfile.mkstemp(suffix=".html")
os.write(temp[0], opener.open(url).read())
params = { "ss" : "0", # show source
"doctype" : "Inline",
"uploaded_file" : open(temp[1], "rb") }
print opener.open(validatorURL, params).read()
os.remove(temp[1])
if len(sys.argv[1:]) > 0:
for arg in sys.argv[1:]:
validateFile(arg)
else:
validateFile("http://www.google.com")
if __name__=="__main__":
main() | [
"[email protected]"
] | |
2332d5c21dfd47be0eab2e6439fbacef32c5aeb3 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R1/benchmark/startPyquil199.py | b8ba1c63c355402f38a256e26772b3f9cb67ca75 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,775 | py | # qubit number=2
# total number=33
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=1
prog += RX(-0.09738937226128368,2) # number=2
prog += H(1) # number=30
prog += CZ(2,1) # number=31
prog += H(1) # number=32
prog += H(1) # number=3
prog += CNOT(1,0) # number=4
prog += Y(1) # number=15
prog += CNOT(1,0) # number=10
prog += H(1) # number=19
prog += CZ(0,1) # number=20
prog += RX(-0.6000441968356504,1) # number=28
prog += H(1) # number=21
prog += CNOT(0,1) # number=22
prog += X(1) # number=23
prog += H(2) # number=29
prog += CNOT(0,1) # number=24
prog += CNOT(0,1) # number=18
prog += Z(1) # number=11
prog += CNOT(1,0) # number=12
prog += CNOT(2,1) # number=26
prog += Y(1) # number=14
prog += CNOT(1,0) # number=5
prog += X(1) # number=6
prog += Z(1) # number=8
prog += X(1) # number=7
prog += RX(-2.42845112122491,1) # number=25
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('1q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil199.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| [
"[email protected]"
] | |
4fe20784e210003df990201f226915a4f8702cd0 | d7016f69993570a1c55974582cda899ff70907ec | /sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2021_10_01_dataplanepreview/operations/_code_containers_operations.py | 39304e6ffb704920562356f3609fa0b63f3eb4b9 | [
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-python-cwi",
"LGPL-2.1-or-later",
"PSF-2.0",
"LGPL-2.0-or-later",
"GPL-3.0-or-later",
"GPL-1.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"Python-2.0",
"MPL-2.0",
"LicenseRef-scancode-other-copyleft",
"HPND",
"ODbL-1.0",
"GPL-3.0-only",
"ZPL-2.1",
"MIT",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 19,903 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
# fmt: off
def build_list_request(
subscription_id, # type: str
resource_group_name, # type: str
registry_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
skiptoken = kwargs.pop('skiptoken', None) # type: Optional[str]
api_version = "2021-10-01-dataplanepreview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"registryName": _SERIALIZER.url("registry_name", registry_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
if skiptoken is not None:
query_parameters['$skiptoken'] = _SERIALIZER.query("skiptoken", skiptoken, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_delete_request(
name, # type: str
subscription_id, # type: str
resource_group_name, # type: str
registry_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = "2021-10-01-dataplanepreview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{name}')
path_format_arguments = {
"name": _SERIALIZER.url("name", name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"registryName": _SERIALIZER.url("registry_name", registry_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_request(
name, # type: str
subscription_id, # type: str
resource_group_name, # type: str
registry_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = "2021-10-01-dataplanepreview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{name}')
path_format_arguments = {
"name": _SERIALIZER.url("name", name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"registryName": _SERIALIZER.url("registry_name", registry_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_or_update_request(
name, # type: str
subscription_id, # type: str
resource_group_name, # type: str
registry_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2021-10-01-dataplanepreview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{name}')
path_format_arguments = {
"name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"registryName": _SERIALIZER.url("registry_name", registry_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
# fmt: on
class CodeContainersOperations(object):
"""CodeContainersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.machinelearningservices.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
resource_group_name, # type: str
registry_name, # type: str
skiptoken=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.CodeContainerResourceArmPaginatedResult"]
"""List containers.
List containers.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param registry_name: Name of Azure Machine Learning registry.
:type registry_name: str
:param skiptoken: Continuation token for pagination.
:type skiptoken: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CodeContainerResourceArmPaginatedResult or the
result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainerResourceArmPaginatedResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainerResourceArmPaginatedResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
registry_name=registry_name,
skiptoken=skiptoken,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
registry_name=registry_name,
skiptoken=skiptoken,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("CodeContainerResourceArmPaginatedResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes'} # type: ignore
@distributed_trace
def delete(
self,
name, # type: str
resource_group_name, # type: str
registry_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Delete container.
Delete container.
:param name: Container name.
:type name: str
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param registry_name: Name of Azure Machine Learning registry.
:type registry_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
name=name,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
registry_name=registry_name,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{name}'} # type: ignore
@distributed_trace
def get(
self,
name, # type: str
resource_group_name, # type: str
registry_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.CodeContainerData"
"""Get container.
Get container.
:param name: Container name.
:type name: str
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param registry_name: Name of Azure Machine Learning registry.
:type registry_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CodeContainerData, or the result of cls(response)
:rtype: ~azure.mgmt.machinelearningservices.models.CodeContainerData
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainerData"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
name=name,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
registry_name=registry_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('CodeContainerData', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{name}'} # type: ignore
@distributed_trace
def create_or_update(
self,
name, # type: str
resource_group_name, # type: str
registry_name, # type: str
body, # type: "_models.CodeContainerData"
**kwargs # type: Any
):
# type: (...) -> "_models.CodeContainerData"
"""Create or update container.
Create or update container.
:param name: Container name.
:type name: str
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param registry_name: Name of Azure Machine Learning registry.
:type registry_name: str
:param body: Container entity to create or update.
:type body: ~azure.mgmt.machinelearningservices.models.CodeContainerData
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CodeContainerData, or the result of cls(response)
:rtype: ~azure.mgmt.machinelearningservices.models.CodeContainerData
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainerData"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(body, 'CodeContainerData')
request = build_create_or_update_request(
name=name,
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
registry_name=registry_name,
content_type=content_type,
json=_json,
template_url=self.create_or_update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('CodeContainerData', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{name}'} # type: ignore
| [
"[email protected]"
] | |
f427572dcc294f2f278b1dc156e4b0e0c130a115 | a4c5a56ed6d3c4299213ff8fd0e4f37719e063ff | /tests/test_override.py | ec0ad1f2bfab03914d5df5c21408b1e52fcbb993 | [
"BSD-3-Clause"
] | permissive | pyecore/motra | 76add183cf2777bef5916b88e30dd2b3eef8cb06 | c0b3e8e54b46572c3bc10bb2b719102e267c371b | refs/heads/main | 2023-09-02T12:44:37.688979 | 2021-10-27T05:53:01 | 2021-10-27T05:53:01 | 395,357,398 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 965 | py | import pytest
import inspect
import pyecore.ecore as ecore
from motra import m2m
@pytest.fixture(scope='module')
def t1():
# Define a transformation meta-data
t = m2m.Transformation('t1', inputs=['in_model'], outputs=['in_model'])
@t.mapping(when=lambda self: self.name.startswith('Egg'))
def r1(self: ecore.EClass):
self.name = self.name + '_egg'
@t.mapping(when=lambda self: self.name.startswith('Spam'))
def r1(self: ecore.EClass):
self.name = self.name + '_spam'
return t, r1
def test__override_with_when(t1):
t, r1 = t1
# Fake main for the mapping execution
result1 = None
result2 = None
def fake_main(in_model):
nonlocal result1
nonlocal result2
result1 = r1(ecore.EClass('Spam'))
result2 = r1(ecore.EClass('Egg'))
t._main = fake_main
t.run(in_model=ecore.EPackage())
assert result1.name == "Spam_spam"
assert result2.name == "Egg_egg"
| [
"[email protected]"
] | |
5dcc386e96726fe2001888a8096c2940980aae92 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/aphidivor.py | 488d7159a51c32e35a6a3d61bdf217023f68d3e4 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 63 | py | ii = [('RennJIT.py', 2), ('WestJIT2.py', 3), ('WestJIT.py', 2)] | [
"[email protected]"
] | |
cacff14e0b1b2678353ba2d462a5de00a04555a7 | 55ab4d0aecc49078e7a0f47a05457c9602327ed7 | /egs/madcat_arabic/v1/local/create_mask_from_page_image.py | b4147dcd3851a52f5a1a9319a6986519f66ac00b | [
"Apache-2.0"
] | permissive | aarora8/waldo | 56a171f0b2048d980023173ab38f5248db936eeb | ad08a05fa9e9890ad986f11d4bca3c773b228d87 | refs/heads/master | 2020-03-14T04:43:47.513263 | 2018-06-07T05:09:47 | 2018-06-07T05:09:47 | 131,447,076 | 0 | 0 | Apache-2.0 | 2018-04-28T22:00:19 | 2018-04-28T22:00:19 | null | UTF-8 | Python | false | false | 5,535 | py | #!/usr/bin/env python3
# Copyright 2018 Johns Hopkins University (author: Ashish Arora)
# Apache 2.0
""" This module will be used for creating text localization mask on page image.
Given the word segmentation (bounding box around a word) for every word, it will
extract line segmentation. To extract line segmentation, it will take word bounding
boxes of a line as input, will create a minimum area bounding box that will contain
all corner points of word bounding boxes. The obtained bounding box (will not necessarily
be vertically or horizontally aligned).
"""
import xml.dom.minidom as minidom
from waldo.data_manipulation import *
from waldo.core_config import CoreConfig
from waldo.mar_utils import compute_hull
from scipy.spatial import ConvexHull
from waldo.data_transformation import scale_down_image_with_objects, \
make_square_image_with_padding
def get_mask_from_page_image(madcat_file_path, image_file_name, max_size):
""" Given a page image, extracts the page image mask from it.
Input
-----
image_file_name (string): complete path and name of the page image.
madcat_file_path (string): complete path and name of the madcat xml file
corresponding to the page image.
"""
objects = _get_bounding_box(madcat_file_path)
img = Image.open(image_file_name).convert("RGB")
im_arr = np.array(img)
config = CoreConfig()
config.num_colors = 3
image_with_objects = {
'img': im_arr,
'objects': objects
}
im_height = im_arr.shape[0]
im_width = im_arr.shape[1]
validated_objects = []
for original_object in image_with_objects['objects']:
ordered_polygon_points = original_object['polygon']
object = {}
resized_pp = []
for point in ordered_polygon_points:
new_point = _validate_and_update_point(point, im_width, im_height)
resized_pp.append(new_point)
object['polygon'] = resized_pp
validated_objects.append(object)
validated_image_with_objects = {
'img': im_arr,
'objects': validated_objects
}
scaled_image_with_objects = scale_down_image_with_objects(validated_image_with_objects, config,
max_size)
img_padded = make_square_image_with_padding(scaled_image_with_objects['img'], 3, 255)
padded_image_with_objects = {
'img': img_padded,
'objects': scaled_image_with_objects['objects']
}
y = convert_to_mask(padded_image_with_objects, config)
return y
def _get_bounding_box(madcat_file_path):
""" Given word boxes of each line, return bounding box for each
line in sorted order
Input
-----
image_file_name (string): complete path and name of the page image.
madcat_file_path (string): complete path and name of the madcat xml file
corresponding to the page image.
"""
objects = []
doc = minidom.parse(madcat_file_path)
zone = doc.getElementsByTagName('zone')
for node in zone:
object = {}
token_image = node.getElementsByTagName('token-image')
mbb_input = []
for token_node in token_image:
word_point = token_node.getElementsByTagName('point')
for word_node in word_point:
word_coordinate = (int(word_node.getAttribute('x')), int(word_node.getAttribute('y')))
mbb_input.append(word_coordinate)
points = get_minimum_bounding_box(mbb_input)
points = tuple(points)
points_ordered = [points[index] for index in ConvexHull(points).vertices]
object['polygon'] = points_ordered
objects.append(object)
return objects
def _validate_and_update_point(pt0, im_width, im_height, pt1=(0, 0)):
new_point = pt0
if pt0[0] < 0:
new_point = _get_pointx_inside_origin(pt0, pt1)
if pt0[0] > im_width:
new_point = _get_pointx_inside_width(pt0, pt1, im_width)
if pt0[1] < 0:
new_point = _get_pointy_inside_origin(pt0, pt1)
if pt0[1] > im_height:
new_point = _get_pointy_inside_height(pt0, pt1, im_height)
return new_point
def _get_pointx_inside_origin(pt0, pt1):
""" Given a point pt0, return an updated point that is
inside orgin. It finds line equation and uses it to
get updated point x value inside origin
Returns
-------
(float, float): updated point
"""
return (0, pt0[1])
# TODO
def _get_pointx_inside_width(pt0, pt1, im_width):
""" Given a point pt0, return an updated point that is
inside image width. It finds line equation and uses it to
get updated point x value inside image width
Returns
-------
(float, float): updated point
"""
return (im_width, pt0[1])
# TODO
def _get_pointy_inside_origin(pt0, pt1):
""" Given a point pt0, return an updated point that is
inside orgin. It finds line equation and uses it to
get updated point y value inside origin
Returns
-------
(float, float): updated point
"""
return (pt0[0], 0)
# TODO
def _get_pointy_inside_height(pt0, pt1, im_height):
""" Given a point pt0, return an updated point that is
inside image height. It finds line equation and uses it to
get updated point y value inside image height
Returns
-------
(float, float): updated point
"""
return (pt0[0], im_height)
# TODO
| [
"[email protected]"
] | |
358e0825a1854b062e87d35611e52cd3c239266d | 21540ab033e180a3d94b270b7faffac7fe4af68f | /wordshop2/Project_01_10_page62-63/Project_05.py | e45ba58fc5058ea1e533a49592edf98b0103a792 | [] | no_license | tuan102081/wordshop1.2.3.5 | eaa344bdb04f565d1354b9476b4d4ecafc5cc7f3 | 70e75b56f48a2e5b1622d956f33831f80e64d368 | refs/heads/master | 2023-07-14T23:26:31.089484 | 2021-08-30T18:53:24 | 2021-08-30T18:53:24 | 401,411,439 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 607 | py | """
Author: Nguyen Duy Tuan
Date: 29/08/2021
Program: project_05_page_62.py
Problem:
An object’s momentum is its mass multiplied by its velocity. Write a program
that accepts an object’s mass (in kilograms) and velocity (in meters per second) as
inputs and then outputs its momentum.
Solution:
Display:
Enter of mass(kg): 51
Enter of velocity(m/s): 60
Object’s momentum = 3060.0 (kgm/s)
"""
mass = float(input("Enter of mass(kg): "))
V = float(input("Enter of velocity(m/s): "))
M = mass * V
print("\nObject’s momentum = " + str(round(M, 2)) + " (kgm/s)")
| [
"[email protected]"
] | |
1c0c27da1a5ffd2ada1e238f96d4179c01990331 | 2cb9d1bf6e674049dd03b04e5714d12a268425a4 | /sariq_dev/darslar/10_dars_uy_ishi_5_.py | eb5ae2c36841e0c7e64652eb8400aa8e43b095c9 | [] | no_license | RuzimovJavlonbek/anvar.nazrullayevning-mohirdev.uz-platformasidagi-dasturlash.asoslari.python-kursidagi-amaliyotlar | 02885608c40e9dd9ae0d13013619ef787240bcf6 | f999be39d0e3c7edb990f9c3c29edbeeb0e19c2d | refs/heads/main | 2023-07-02T20:16:56.146956 | 2021-08-06T03:12:49 | 2021-08-06T03:12:49 | 390,466,668 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | a = float(input("a="))
if a < 0:
print(" Manfiy son")
else:
print(" Musbat son")
input()
| [
"[email protected]"
] | |
6fd7363389f6b9bfc67823959049f44b95b6f19e | 83aa59c568d0b84eee76226776da7f1d49384f10 | /tccli/services/ape/ape_client.py | a2fa953563d4f64679515e3129265264be734748 | [
"Apache-2.0"
] | permissive | jjandnn/tencentcloud-cli | c492f5f19cacaee85fc2e7b0de0d8aa8ef5dd009 | ac3c697fb06acdd2e3e711b11069d33754889340 | refs/heads/master | 2023-05-05T00:27:54.406654 | 2021-05-27T00:40:52 | 2021-05-27T00:40:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,552 | py | # -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.ape.v20200513 import ape_client as ape_client_v20200513
from tencentcloud.ape.v20200513 import models as models_v20200513
def doDescribeAuthUsers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAuthUsersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAuthUsers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDownloadInfos(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDownloadInfosRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDownloadInfos(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBatchDescribeOrderCertificate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BatchDescribeOrderCertificateRequest()
model.from_json_string(json.dumps(args))
rsp = client.BatchDescribeOrderCertificate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeImages(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeImagesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeImages(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateOrderAndDownloads(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateOrderAndDownloadsRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateOrderAndDownloads(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeImage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeImageRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeImage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateOrderAndPay(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateOrderAndPayRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateOrderAndPay(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBatchDescribeOrderImage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ApeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BatchDescribeOrderImageRequest()
model.from_json_string(json.dumps(args))
rsp = client.BatchDescribeOrderImage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20200513": ape_client_v20200513,
}
MODELS_MAP = {
"v20200513": models_v20200513,
}
ACTION_MAP = {
"DescribeAuthUsers": doDescribeAuthUsers,
"DescribeDownloadInfos": doDescribeDownloadInfos,
"BatchDescribeOrderCertificate": doBatchDescribeOrderCertificate,
"DescribeImages": doDescribeImages,
"CreateOrderAndDownloads": doCreateOrderAndDownloads,
"DescribeImage": doDescribeImage,
"CreateOrderAndPay": doCreateOrderAndPay,
"BatchDescribeOrderImage": doBatchDescribeOrderImage,
}
AVAILABLE_VERSION_LIST = [
"v20200513",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["ape"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["ape"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param
| [
"[email protected]"
] | |
ec662f925b59e24fde024e4243aba389f33e0432 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/pa3/benchmarks/sieve-14.py | 52bce3a36228b57f2739edf857ed492498c0ab0c | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,587 | py | # A resizable list of integers
class Vector(object):
items: [$ID] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
# Data
v:Vector = None
i:int = 0
# Crunch
v = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"[email protected]"
] | |
d003323768ea7f4519c90921a33b9eb198732852 | 69033ac834a34f10df535f102197d3af05e5ee69 | /cmstack/codegen/tvmgen/tvm_translation.py | 94d23726e2acc6b4c753f6d6f9920df4d7801b75 | [
"Apache-2.0"
] | permissive | he-actlab/cdstack | 126c3699074bf6ef30f9f9246704069d27e9e614 | 38f605cfa299bf97b5875a19f9fd811a2671d56f | refs/heads/master | 2023-04-10T10:42:10.199207 | 2019-10-03T02:12:49 | 2019-10-03T02:12:49 | 354,713,812 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,582 | py | import tvm
import numpy as np
from hdfg import hdfgutils
from hdfg import load_store
from tvm.contrib import graph_runtime
from tvm.relay import op as _op
from hdfg.passes.flatten import flatten_graph, is_literal, is_number
from hdfg.passes.node_mapping import map_nodes
from codegen.codegen_utils import CMLANG_CAST_MAP
from tvm import relay
from tvm.relay.testing.init import Xavier
import codegen as c
from tvm.relay.testing import layers, init
from hdfg.hdfg_pb2 import Component, Program
from hdfg.visualize import *
import inspect
import json
import importlib
def benchmark_execution(mod,
params,
measure=True,
data_shape=(1, 3, 224, 224),
out_shape=(1, 1000),
dtype='float32'):
def get_tvm_output(mod, data, params, target, ctx, dtype='float32'):
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod, target, params=params)
m = graph_runtime.create(graph, lib, ctx)
# set inputs
m.set_input("data", data)
m.set_input(**params)
m.run()
out = m.get_output(0, tvm.nd.empty(out_shape, dtype))
if measure:
print("Evaluate graph runtime inference time cost...")
ftimer = m.module.time_evaluator("run", ctx, number=1, repeat=20)
# Measure in millisecond.
prof_res = np.array(ftimer().results) *1000
print("Mean inference time (std dev): %.2f ms (%.2f ms)" %
(np.mean(prof_res), np.std(prof_res)))
return out.asnumpy()
# random input
data = np.random.uniform(size=data_shape).astype(dtype)
target = "llvm"
ctx = tvm.cpu(0)
tvm_out = get_tvm_output(mod, tvm.nd.array(data.astype(dtype)), params,
target, ctx, dtype)
class TvmTranslation(object):
def __init__(self, onnx_proto, run_async=False):
self.input_proto = onnx_proto
self.output_dir, self.output_file = os.path.split(self.input_proto)
self.proto_name = self.output_file.split('.')[0]
self.program = load_store.load_program(self.input_proto)
self.graph = self.program.graph
self.templates = self.program.templates
self.components = {}
self.includes = []
self.functions = []
self.structs = []
self.signature_map = {}
self.initializer = None
self.header = []
self.exec = []
self.run_async = run_async
self.load_config()
self.tvm_passes()
self.create_tvm_graph()
self.execute_graph()
def load_config(self):
config_path = os.path.dirname(os.path.realpath(__file__)) + "/tvm_config.json"
with open(config_path, 'r') as config_file:
config_data = config_file.read()
self.translator_config = json.loads(config_data)
def execute_graph(self):
mod, params = init.create_workload(self.tvm_func)
benchmark_execution(mod, params, data_shape=(1, 3, 416, 416), out_shape=(1, 125, 14, 14))
# benchmark_execution(mod, params)
# print(f"Module: {mod}")
# #
# # for p in params.keys():
# # print(f"Key: {p}, shape: {params[p].shape}")
def tvm_passes(self):
self.tvm_graph = Component(name="tvm_graph_" + str(self.proto_name))
edge_node_ids = {'edges': {},
'nodes': {}}
self.tvm_graph.statement_graphs.extend([])
map_nodes(self.graph, self.templates, [], self.translator_config)
#
flatten_graph(self.tvm_graph, self.graph, self.templates, '', edge_node_ids, {})
flattened_graph_attr = hdfgutils.make_attribute('flattened', self.tvm_graph)
self.program.attributes['flattened_graph'].CopyFrom(flattened_graph_attr)
def create_tvm_graph(self):
self.graph_variables = {}
output_id = None
assert len(self.tvm_graph.input) == 1
input_name = self.tvm_graph.input[0]
input_shape = self.get_arg_attribute("shape", input_name)
self.graph_variables[input_name] = self.get_func('tvm.relay.var')(input_name, shape=input_shape)
for n in self.tvm_graph.sub_graph:
op_cat = hdfgutils.get_attribute_value(n.attributes['op_cat'])
if op_cat == 'mapped_node':
op_context = str(n.name).rsplit("/", 1)
if len(op_context) > 1 and op_context[0] != 'main':
scope = op_context[0] + '/'
else:
scope = ''
op_config = self.translator_config['ops'][n.op_type]
op_func = self.get_func(op_config['op_name'])
args, kwargs, output_id = self.create_op_args(n.op_type, n, self.templates[n.op_type], scope)
if len(output_id) == 1:
self.graph_variables[output_id[0]] = op_func(*args, **kwargs)
if output_id[0] in list(self.tvm_graph.edge_info):
iedge = self.tvm_graph.edge_info[output_id[0]]
if iedge.name != output_id[0]:
self.graph_variables[str(iedge.name)] = self.graph_variables[output_id[0]]
else:
temp = op_func(*args, **kwargs)
if not hasattr(temp, '__len__'):
logging.error(f"Size mismatch between output of {n.op_type} which has length 1 output"
f"Supplied config outputs: {output_id}")
exit(1)
elif len(temp) != len(output_id):
logging.error(f"Size mismatch between output of {n.op_type} which has length {len(temp)} output"
f"Supplied config outputs: {output_id}")
exit(1)
for i in range(len(temp)):
self.graph_variables[output_id[i]] = temp[i]
if output_id[i] in list(self.tvm_graph.edge_info):
iedge = self.tvm_graph.edge_info[output_id[i]]
if iedge.name != output_id[i]:
self.graph_variables[str(iedge.name)] = self.graph_variables[output_id[i]]
if not output_id:
logging.error(f"No nodes mapped for graph")
exit(1)
elif len(output_id) != 1:
logging.error(f"More than one output supplied for graph: {output_id}")
exit(1)
self.tvm_func = relay.Function(relay.analysis.free_vars(self.graph_variables[output_id[0]]), self.graph_variables[output_id[0]])
def create_op_args(self, op_name, node, node_signature, scope):
op_config = self.translator_config['ops'][op_name]
instance_args = hdfgutils.get_attribute_value(node.attributes['ordered_args'])
signature_args = hdfgutils.get_attribute_value(node_signature.attributes['ordered_args'])
default_map = self.create_default_map(self.templates[op_name])
for i in range(len(instance_args)):
instance_args[i] = scope + instance_args[i]
args = self.get_ordered_args(op_config, signature_args, instance_args, default_map, op_name, scope)
kwargs = self.get_kwargs(op_config, signature_args, instance_args,default_map, op_name, scope)
output_keys = self.get_output_keys(op_config, signature_args, instance_args, op_name, scope)
return args, kwargs, output_keys
def get_ordered_args(self, op_config, signature_args, instance_args,default_map, op, scope):
args = []
for a in op_config['positional_arguments']:
if a not in op_config['arg_map'].keys():
logging.error(f"{a} not found in argument map for op {op}. Please check config")
exit(1)
arg = op_config['arg_map'][a]['key']
if arg not in signature_args:
logging.error(f"Argument {arg} not found in signature list {signature_args} for op {op}")
exit(1)
idx = signature_args.index(arg)
if idx >= len(instance_args):
if default_map[signature_args[idx]] is None:
logging.error(f"Error! No default argument for unspecified parameter {arg} in {op}, name: {signature_args[idx]}")
exit(1)
if op_config['arg_map'][a]['init_func']:
var = self.init_var(op_config['arg_map'][a], default_map[signature_args[idx]], literal=True)
elif op_config['arg_map'][a]['type'] in CMLANG_CAST_MAP.keys():
var = default_map[signature_args[idx]]
else:
logging.error(f"Unable to resolve argument {default_map[signature_args[idx]]} for keyword {a}={signature_args[arg]}")
var = None
exit(1)
else:
instance_arg = instance_args[idx]
if instance_arg in list(self.tvm_graph.edge_info):
edge = self.tvm_graph.edge_info[instance_arg]
ename = edge.name
else:
ename = instance_arg
if ename in self.graph_variables.keys() and instance_arg not in self.graph_variables.keys():
var = self.graph_variables[ename]
elif instance_arg not in self.graph_variables.keys():
if op_config['arg_map'][a]['init_func']:
var = self.init_var(op_config['arg_map'][a], instance_arg)
if op_config['arg_map'][a]['arg_type'] != 'parameter':
self.graph_variables[instance_arg] = var
elif op_config['arg_map'][a]['type'] in CMLANG_CAST_MAP.keys():
var = CMLANG_CAST_MAP[op_config['arg_map'][a]['type']](instance_arg)
else:
logging.error(f"Unable to resolve argument {instance_arg} for keyword {a}={signature_args[arg]}")
var = None
exit(1)
else:
var = self.graph_variables[instance_arg]
args.append(var)
return args
def get_kwargs(self, op_config, signature_args, instance_args,default_map, op, scope):
kwargs = {}
for k in op_config['keyword_arguments'].keys():
if op_config['keyword_arguments'][k] not in op_config['arg_map'].keys():
logging.error(f"Key id {k} with value {op_config['keyword_arguments'][k]} not found in argument map for op {op}."
f" Please check config")
exit(1)
id = op_config['keyword_arguments'][k]
arg = op_config['arg_map'][id]['key']
if arg not in signature_args:
logging.error(f"Argument {arg} not found in signature list {signature_args} for op {op}")
exit(1)
idx = signature_args.index(arg)
if idx >= len(instance_args):
if default_map[signature_args[idx]] is None:
logging.error(f"Error! No default argument for unspecified parameter {arg} in {op}, name: {signature_args[idx]}")
exit(1)
if op_config['arg_map'][id]['init_func']:
var = self.init_var(op_config['arg_map'][id], default_map[signature_args[idx]], literal=True)
elif op_config['arg_map'][id]['type'] in CMLANG_CAST_MAP.keys():
var = default_map[signature_args[idx]]
else:
logging.error(f"Unable to resolve argument {default_map[signature_args[idx]]} for keyword {id}={signature_args[arg]}")
var = None
exit(1)
else:
instance_arg = instance_args[idx]
if instance_arg in list(self.tvm_graph.edge_info):
edge = self.tvm_graph.edge_info[instance_arg]
ename = edge.name
else:
ename = instance_arg
if ename in self.graph_variables.keys() and instance_arg not in self.graph_variables.keys():
var = self.graph_variables[ename]
elif instance_arg not in self.graph_variables.keys():
if op_config['arg_map'][id]['init_func']:
var = self.init_var(op_config['arg_map'][id], instance_arg)
if op_config['arg_map'][id]['arg_type'] != 'parameter':
self.graph_variables[instance_arg] = var
elif op_config['arg_map'][id]['type'] in CMLANG_CAST_MAP.keys():
var = CMLANG_CAST_MAP[op_config['arg_map'][id]['type']](instance_arg)
else:
logging.error(f"Unable to resolve argument {instance_arg} for keyword {id}={signature_args[arg]}")
exit(1)
else:
var = self.graph_variables[instance_arg]
kwargs[k] = var
return kwargs
def get_output_keys(self, op_config, signature_args, instance_args, op, scope):
output_keys = []
for o in op_config['op_output']:
if o not in op_config['arg_map'].keys():
logging.error(f"Key id {o} with value {op_config['keyword_arguments'][o]} not found in argument map for op {op}."
f" Please check config")
exit(1)
arg = op_config['arg_map'][o]['key']
if arg not in signature_args:
logging.error(f"Argument {arg} not found in signature list {signature_args} for op {op}")
exit(1)
idx = signature_args.index(arg)
if idx >= len(instance_args):
logging.error(f"Error! Cannot assign output {o} to unspecified parameter {signature_args[idx]}")
exit(1)
output_keys.append(instance_args[idx])
return output_keys
def create_default_map(self, template):
default_map = {}
ordered_args = hdfgutils.get_attribute_value(template.attributes['ordered_args'])
for a in ordered_args:
if a not in list(template.edge_info):
logging.error(f"Argument {a} not found in edges for {template.op_type}")
edge = template.edge_info[a]
if 'default' in list(edge.attributes):
dtype = hdfgutils.get_attribute_value(edge.attributes['type'])
default_map[a] = CMLANG_CAST_MAP[dtype](hdfgutils.get_attribute_value(edge.attributes['default']))
else:
default_map[a] = None
return default_map
def init_var(self, var, instance_name, literal=False):
args = []
kwargs = {}
arg_type = var['arg_type']
if isinstance(instance_name, str):
id = instance_name.rsplit('/', 1)
if len(id) > 1:
id = id[-1]
else:
id = id[0]
else:
id = str(instance_name).rsplit('/', 1)
if len(id) > 1:
id = id[-1]
else:
id = id[0]
if arg_type == 'parameter' and not literal and not is_literal(id):
if instance_name not in list(self.tvm_graph.edge_info):
logging.error(f"Unable to get value for parameter {instance_name}")
exit(1)
edge = self.tvm_graph.edge_info[instance_name]
if 'value' not in list(edge.attributes):
logging.error(f"Could not find literal for parameter argument {instance_name}.\n"
f"Possible attributes: {list(edge.attributes)}")
exit(1)
value = hdfgutils.get_attribute_value(edge.attributes['value'])
elif is_literal(id) and isinstance(instance_name, str):
if id in list(self.tvm_graph.edge_info):
edge = self.tvm_graph.edge_info[id]
value = hdfgutils.get_attribute_value(edge.attributes['value'])
elif instance_name in list(self.tvm_graph.edge_info):
edge = self.tvm_graph.edge_info[instance_name]
value = hdfgutils.get_attribute_value(edge.attributes['value'])
else:
logging.error(f"Could not find literal for parameter argument {instance_name} with id {id}.\n"
f"var: {var['key']}")
exit(1)
else:
value = instance_name
for a in var['init_func_args']:
arg_result = self.get_arg_attribute(a, value, literal=literal)
args.append(arg_result)
for k in var['init_func_kw'].keys():
kwargs[k] = self.get_arg_attribute(var['init_func_kw'][k], value, literal=literal)
if len(kwargs.keys()) == 0:
var = self.get_func(var['init_func'])(*args)
else:
var = self.get_func(var['init_func'])(*args, **kwargs)
return var
def get_arg_attribute(self, key, instance_name, literal=False):
if isinstance(key, list):
arg = []
for k in key:
arg.append(self.get_arg_attribute(k, instance_name, literal=literal))
return arg
elif key == 'name':
return instance_name
elif key == 'shape':
if literal:
logging.error(f"Cannot get shape for literal value {instance_name} as attribute")
exit(1)
edge = self.tvm_graph.edge_info[instance_name]
if 'dimensions' not in list(edge.attributes):
logging.error(f"No dimensions for edge {instance_name}")
tuple_dims = ()
else:
dimensions = hdfgutils.get_attribute_value(edge.attributes['dimensions'])
tuple_dims = tuple(int(d) if is_number(d) else d for d in dimensions)
return tuple_dims
elif key == 'type':
if literal:
return type(instance_name).__name__
edge = self.tvm_graph.edge_info[instance_name]
if 'type' not in list(edge.attributes):
logging.error(f"No type for edge {instance_name}")
dtype = 'float32'
else:
dtype = hdfgutils.get_attribute_value(edge.attributes['type'])
return dtype
elif instance_name in self.graph_variables.keys():
return self.graph_variables[instance_name]
else:
logging.error(f"Could not create attribute for {instance_name} with key {key}.")
exit(1)
def get_args(self, names, vars):
args = []
for n in names:
if n not in vars.keys():
logging.error(f"Operation argument {n} not in created variables: {vars.keys()}")
else:
args.append(vars[n])
return args
def arg_conversion(self, instance_arg, target_arg):
if isinstance(target_arg, tuple):
result = tuple(instance_arg for _ in range(len(target_arg)))
return result
else:
return instance_arg
def get_func(self, function_name):
mod_id, func_id = function_name.rsplit('.', 1)
mod = importlib.import_module(mod_id)
func = getattr(mod, func_id)
return func
| [
"[email protected]"
] | |
ba1688de52c9aed52049beab16fcbf7d463add7d | a04296ba7b09f3a7b7540a14e8ef6fcf683ed392 | /common/mergelist.py | dac26a7dac26dd95d77cfed036796fb50f267e7f | [
"MIT"
] | permissive | Hasi-liu/FXTest | 0a3acf9d27d9f784f378fc9f9c13deb9e678adbe | 150012f87021b6b8204fd342c62538c10d8dfa85 | refs/heads/master | 2023-05-11T00:27:57.224448 | 2020-05-10T02:58:11 | 2020-05-10T02:58:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 551 | py | """
@author: lileilei
@file: mergelist.py
@time: 2018/1/29 13:55
"""
"""
list的合并
"""
def hebinglist(list1:list):
new=[]
for m in list1:
for h in m:
new.append(h)
return new
def listmax(list2:list):
list_int=[]
for i in list2:
try:
list_int.append(float(i))
except:
list_int.append(0)
nsm=0
for j in range(len(list_int)):
nsm+=float(list_int[j])
ma=max(list_int)
minx=min(list_int)
pingjun=nsm/(len(list_int))
return ma,minx,pingjun | [
"[email protected]"
] | |
f0457b814ef72bf357cd55551afddde24bb8f179 | 9cbc458ae2fa1f2be6eeb6fb4f4dfc49db464f1b | /financial/productgroup/migrations/0001_initial.py | 14fd26e06c60016d2d9401b4c4f5ffac79deec65 | [] | no_license | reykennethdmolina/projectfinsys | 45f8bd3248ad4b11c78cee6beefab040e6d58343 | a8604b9450b890e26b8f59f6acd76d64c415ccce | refs/heads/master | 2021-01-11T17:36:01.648840 | 2017-01-23T11:21:04 | 2017-01-23T11:21:04 | 79,797,274 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,745 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2017-01-17 06:07
from __future__ import unicode_literals
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Productgroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=10, unique=True)),
('description', models.CharField(max_length=250)),
('status', models.CharField(choices=[('A', 'Active'), ('I', 'Inactive'), ('C', 'Cancelled'), ('O', 'Posted'), ('P', 'Printed')], default='A', max_length=1)),
('enterdate', models.DateTimeField(auto_now_add=True)),
('modifydate', models.DateTimeField(default=datetime.datetime(2017, 1, 17, 14, 7, 34, 668000))),
('isdeleted', models.IntegerField(default=0)),
('enterby', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='productgroup_enter', to=settings.AUTH_USER_MODEL)),
('modifyby', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='productgroup_modify', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-pk'],
'db_table': 'productgroup',
'permissions': (('view_productgroup', 'Can view productgroup'),),
},
),
]
| [
"[email protected]"
] | |
eec679bc8e8a903c116e1c4a9cc0fcfed3bde0af | f38c30595023f4f272121576b9e62ed2adbed7de | /contact_list.py | 8c047db9d0fd3e5d8d18d73a7614d2fe2b25233d | [] | no_license | devArist/contact_app_project | f1f19ed2cb4a9261575e5f182e4dcb28ba44e082 | 81d1d639d2e7a362490397d334345ce24a154789 | refs/heads/main | 2023-07-12T22:49:19.816760 | 2021-08-30T15:56:44 | 2021-08-30T15:56:44 | 401,285,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 161 | py | from kivymd.uix.boxlayout import MDBoxLayout
from kivy.lang import Builder
Builder.load_file('contact_list.kv')
class ContactBoxLayout(MDBoxLayout):
pass | [
"[email protected]"
] | |
e2a354f7de78bb119094313ee9b25118e374ca6c | ba2d449486c58578581b8de7b2b6f21074be6274 | /02 Linked Lists/2-8-Loop-Detection.py | 6af5ea7f052ea96436e98812922ad1180e7fa7bb | [] | no_license | theoliao1998/Cracking-the-Coding-Interview | 4e0abef8659a0abf33e09ee78ce2f445f8b5d591 | 814b9163f68795238d17aad5b91327fbceadf49e | refs/heads/master | 2020-12-09T12:46:10.845579 | 2020-07-25T05:39:19 | 2020-07-25T05:39:19 | 233,306,927 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,334 | py | # Loop Detection: Given a circular linked list, implement an algorithm that returns the node at the
# beginning of the loop.
# DEFINITION
# Circular linked list: A (corrupt) linked list in which a node's next pointer points to an earlier node, so
# as to make a loop in the linked list.
# EXAMPLE
# Input: A -> B -> C - > D -> E -> C [the same C as earlier]
# Output: C
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
def append(self, x):
n = self
while n.next:
n = n.next
n.next = ListNode(x)
def loopDectection(n):
def getLoopLength(n):
slow, fast = n, n.next
length = 0
while fast:
length += 1
if slow == fast:
return length
slow = slow.next
if not fast.next:
return 0
fast = fast.next.next
l = getLoopLength(n)
if not l:
return None
slow = n
fast = n
for _ in range(l):
fast = fast.next
while slow != fast:
slow, fast = slow.next, fast.next
return slow
# A = ListNode(1)
# B = ListNode(2)
# C = ListNode(3)
# D = ListNode(4)
# E = ListNode(5)
# A.next = B
# B.next = C
# C.next = D
# D.next = E
# E.next = C
# print(loopDectection(A).val)
| [
"[email protected]"
] | |
b26c45b8a0ae7b082b96599a14f020f230ee3eca | 8b16bd61c79113ff575def261e12f0e2125e4d90 | /browser/migrations/0004_search_history.py | 44305eecb73f1f97b11f14b0373386db7ecccd80 | [] | no_license | alvarantson/veebikaabits2.0 | 88f99e2fff8d0ef76daec3d3d3f4d6e19ed6d274 | 412d7d2fdc35582ba7210ea6108087a0d5ac9d7e | refs/heads/master | 2020-12-02T10:59:05.173248 | 2020-01-03T10:00:36 | 2020-01-03T10:00:36 | 230,990,687 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 999 | py | # Generated by Django 2.1.3 on 2018-11-28 11:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('browser', '0003_blacklist'),
]
operations = [
migrations.CreateModel(
name='search_history',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('search_item', models.CharField(max_length=999)),
('time_elapsed', models.CharField(max_length=999)),
('search_datetime', models.CharField(max_length=999)),
('items_total', models.CharField(max_length=999)),
('items_okidoki', models.CharField(max_length=999)),
('items_osta', models.CharField(max_length=999)),
('items_soov', models.CharField(max_length=999)),
('items_kuldnebors', models.CharField(max_length=999)),
],
),
]
| [
"[email protected]"
] | |
9337e099bf9ff81091912bb90e98f59afe773fe5 | d7ca36f20465870e67e7d6832f8e1b8348af12fc | /test/test_linear.py | ca9b35b8ac8886a9b4d3721c6f3eb6f8eb94d575 | [] | no_license | hlcr/LanguageNetworkAnalysis | c109e670534367c782fb71697a92a3ca95aba098 | 65f6c8086f3e4282b15359cc99cf57a682e6b814 | refs/heads/master | 2020-04-24T07:40:04.100213 | 2020-04-17T09:02:05 | 2020-04-17T09:02:05 | 171,805,371 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 546 | py | from sklearn import linear_model
import numpy as np
import matplotlib.pyplot as plt
# 线性回归
clf = linear_model.LinearRegression()
# 训练
np.array([1, 2, 3])
clf.fit(np.array([2, 4, 6]).reshape(-1,1), np.array([0, 2, 4]).reshape(-1,1))
# 表达式参数
a, b = clf.coef_, clf.intercept_
print(a)
print(b)
# # 画图
# # 1.真实的点
# plt.scatter(df['square_feet'], df['price'], color='blue')
#
# # 2.拟合的直线
# plt.plot(df['square_feet'], regr.predict(df['square_feet'].reshape(-1,1)), color='red', linewidth=4)
#
# plt.show() | [
"[email protected]"
] | |
55be3d9790defafee827f8d11c0671d535c13396 | 302f142abcb9cd8cc9a6f87cda1a3580cce8ad21 | /.env/lib/python3.6/site-packages/pygments/lexers/unicon.py | c27b7fe0cf8f2b46b226365542c31220719cf12e | [
"Apache-2.0"
] | permissive | Najafova/hls4ml_test | 7229a85d5759eac4b177a47f9af7c6b4899dd006 | 21f9951b3f0ca88cc2312a8863977c1477b9233e | refs/heads/master | 2023-01-19T15:11:18.861261 | 2020-11-22T17:17:51 | 2020-11-22T17:17:51 | 289,086,269 | 0 | 0 | Apache-2.0 | 2020-11-22T17:17:52 | 2020-08-20T18:59:09 | Python | UTF-8 | Python | false | false | 17,817 | py | # -*- coding: utf-8 -*-
"""
pygments.lexers.unicon
~~~~~~~~~~~~~~~~~~~~~~
Lexers for the Icon and Unicon languages, including ucode VM.
:copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, words, using, this
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['IconLexer', 'UcodeLexer', 'UniconLexer']
class UniconLexer(RegexLexer):
"""
For Unicon source code.
.. versionadded:: 2.4
"""
name = 'Unicon'
aliases = ['unicon']
filenames = ['*.icn']
mimetypes = ['text/unicon']
flags = re.MULTILINE
tokens = {
'root': [
(r'[^\S\n]+', Text),
(r'#.*?\n', Comment.Single),
(r'[^\S\n]+', Text),
(r'class|method|procedure', Keyword.Declaration, 'subprogram'),
(r'(record)(\s+)(\w+)',
bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
(r'(#line|\$C|\$Cend|\$define|\$else|\$endif|\$error|\$ifdef|'
r'\$ifndef|\$include|\$line|\$undef)\b', Keyword.PreProc),
(r'(&null|&fail)\b', Keyword.Constant),
(r'&allocated|&ascii|&clock|&collections|&column|&col|&control|'
r'&cset|¤t|&dateline|&date|&digits|&dump|'
r'&errno|&errornumber|&errortext|&errorvalue|&error|&errout|'
r'&eventcode|&eventvalue|&eventsource|&e|'
r'&features|&file|&host|&input|&interval|&lcase|&letters|'
r'&level|&line|&ldrag|&lpress|&lrelease|'
r'&main|&mdrag|&meta|&mpress|&mrelease|&now|&output|'
r'&phi|&pick|&pi|&pos|&progname|'
r'&random|&rdrag|®ions|&resize|&row|&rpress|&rrelease|'
r'&shift|&source|&storage|&subject|'
r'&time|&trace|&ucase|&version|'
r'&window|&x|&y', Keyword.Reserved),
(r'(by|of|not|to)\b', Keyword.Reserved),
(r'(global|local|static|abstract)\b', Keyword.Reserved),
(r'package|link|import', Keyword.Declaration),
(words((
'break', 'case', 'create', 'critical', 'default', 'end', 'all',
'do', 'else', 'every', 'fail', 'if', 'import', 'initial',
'initially', 'invocable', 'next',
'repeat', 'return', 'suspend',
'then', 'thread', 'until', 'while'), prefix=r'\b', suffix=r'\b'),
Keyword.Reserved),
(words((
'Abort', 'abs', 'acos', 'Active', 'Alert', 'any', 'Any', 'Arb',
'Arbno', 'args', 'array', 'asin', 'atan', 'atanh', 'Attrib',
'Bal', 'bal', 'Bg', 'Break', 'Breakx',
'callout', 'center', 'char', 'chdir', 'chmod', 'chown', 'chroot',
'classname', 'Clip', 'Clone', 'close', 'cofail', 'collect',
'Color', 'ColorValue', 'condvar', 'constructor', 'copy',
'CopyArea', 'cos', 'Couple', 'crypt', 'cset', 'ctime',
'dbcolumns', 'dbdriver', 'dbkeys', 'dblimits', 'dbproduct',
'dbtables', 'delay', 'delete', 'detab', 'display', 'DrawArc',
'DrawCircle', 'DrawCube', 'DrawCurve', 'DrawCylinder',
'DrawDisk', 'DrawImage', 'DrawLine', 'DrawPoint', 'DrawPolygon',
'DrawRectangle', 'DrawSegment', 'DrawSphere', 'DrawString',
'DrawTorus', 'dtor',
'entab', 'EraseArea', 'errorclear', 'Event', 'eventmask',
'EvGet', 'EvSend', 'exec', 'exit', 'exp', 'Eye',
'Fail', 'fcntl', 'fdup', 'Fence', 'fetch', 'Fg', 'fieldnames',
'filepair', 'FillArc', 'FillCircle', 'FillPolygon',
'FillRectangle', 'find', 'flock', 'flush', 'Font', 'fork',
'FreeColor', 'FreeSpace', 'function',
'get', 'getch', 'getche', 'getegid', 'getenv', 'geteuid',
'getgid', 'getgr', 'gethost', 'getpgrp', 'getpid', 'getppid',
'getpw', 'getrusage', 'getserv', 'GetSpace', 'gettimeofday',
'getuid', 'globalnames', 'GotoRC', 'GotoXY', 'gtime', 'hardlink',
'iand', 'icom', 'IdentityMatrix', 'image', 'InPort', 'insert',
'Int86', 'integer', 'ioctl', 'ior', 'ishift', 'istate', 'ixor',
'kbhit', 'key', 'keyword', 'kill',
'left', 'Len', 'list', 'load', 'loadfunc', 'localnames',
'lock', 'log', 'Lower', 'lstat',
'many', 'map', 'match', 'MatrixMode', 'max', 'member',
'membernames', 'methodnames', 'methods', 'min', 'mkdir', 'move',
'MultMatrix', 'mutex',
'name', 'NewColor', 'Normals', 'NotAny', 'numeric',
'open', 'opencl', 'oprec', 'ord', 'OutPort',
'PaletteChars', 'PaletteColor', 'PaletteKey', 'paramnames',
'parent', 'Pattern', 'Peek', 'Pending', 'pipe', 'Pixel',
'PlayAudio', 'Poke', 'pop', 'PopMatrix', 'Pos', 'pos',
'proc', 'pull', 'push', 'PushMatrix', 'PushRotate', 'PushScale',
'PushTranslate', 'put',
'QueryPointer',
'Raise', 'read', 'ReadImage', 'readlink', 'reads', 'ready',
'real', 'receive', 'Refresh', 'Rem', 'remove', 'rename',
'repl', 'reverse', 'right', 'rmdir', 'Rotate', 'Rpos',
'Rtab', 'rtod', 'runerr',
'save', 'Scale', 'seek', 'select', 'send', 'seq',
'serial', 'set', 'setenv', 'setgid', 'setgrent',
'sethostent', 'setpgrp', 'setpwent', 'setservent',
'setuid', 'signal', 'sin', 'sort', 'sortf', 'Span',
'spawn', 'sql', 'sqrt', 'stat', 'staticnames', 'stop',
'StopAudio', 'string', 'structure', 'Succeed', 'Swi',
'symlink', 'sys_errstr', 'system', 'syswrite',
'Tab', 'tab', 'table', 'tan',
'Texcoord', 'Texture', 'TextWidth', 'Translate',
'trap', 'trim', 'truncate', 'trylock', 'type',
'umask', 'Uncouple', 'unlock', 'upto', 'utime',
'variable', 'VAttrib',
'wait', 'WAttrib', 'WDefault', 'WFlush', 'where',
'WinAssociate', 'WinButton', 'WinColorDialog', 'WindowContents',
'WinEditRegion', 'WinFontDialog', 'WinMenuBar', 'WinOpenDialog',
'WinPlayMedia', 'WinSaveDialog', 'WinScrollBar', 'WinSelectDialog',
'write', 'WriteImage', 'writes', 'WSection',
'WSync'), prefix=r'\b', suffix=r'\b'),
Name.Function),
include('numbers'),
(r'<@|<<@|>@|>>@|\.>|->|===|~===|\*\*|\+\+|--|\.|~==|~=|<=|>=|==|'
r'=|<<=|<<|>>=|>>|:=:|:=|->|<->|\+:=|\|', Operator),
(r'"(?:[^\\"]|\\.)*"', String),
(r"'(?:[^\\']|\\.)*'", String.Character),
(r'[*<>+=/&!?@~\\-]', Operator),
(r'\^', Operator),
(r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
(r"[\[\]]", Punctuation),
(r"<>|=>|[()|:;,.'`{}%&?]", Punctuation),
(r'\n+', Text),
],
'numbers': [
(r'\b([+-]?([2-9]|[12][0-9]|3[0-6])[rR][0-9a-zA-Z]+)\b', Number.Hex),
(r'[+-]?[0-9]*\.([0-9]*)([Ee][+-]?[0-9]*)?', Number.Float),
(r'\b([+-]?[0-9]+[KMGTPkmgtp]?)\b', Number.Integer),
],
'subprogram': [
(r'\(', Punctuation, ('#pop', 'formal_part')),
(r';', Punctuation, '#pop'),
(r'"[^"]+"|\w+', Name.Function),
include('root'),
],
'type_def': [
(r'\(', Punctuation, 'formal_part'),
],
'formal_part': [
(r'\)', Punctuation, '#pop'),
(r'\w+', Name.Variable),
(r',', Punctuation),
(r'(:string|:integer|:real)\b', Keyword.Reserved),
include('root'),
],
}
class IconLexer(RegexLexer):
"""
Lexer for Icon.
.. versionadded:: 1.6
"""
name = 'Icon'
aliases = ['icon']
filenames = ['*.icon', '*.ICON']
mimetypes = []
flags = re.MULTILINE
tokens = {
'root': [
(r'[^\S\n]+', Text),
(r'#.*?\n', Comment.Single),
(r'[^\S\n]+', Text),
(r'class|method|procedure', Keyword.Declaration, 'subprogram'),
(r'(record)(\s+)(\w+)',
bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
(r'(#line|\$C|\$Cend|\$define|\$else|\$endif|\$error|\$ifdef|'
r'\$ifndef|\$include|\$line|\$undef)\b', Keyword.PreProc),
(r'(&null|&fail)\b', Keyword.Constant),
(r'&allocated|&ascii|&clock|&collections|&column|&col|&control|'
r'&cset|¤t|&dateline|&date|&digits|&dump|'
r'&errno|&errornumber|&errortext|&errorvalue|&error|&errout|'
r'&eventcode|&eventvalue|&eventsource|&e|'
r'&features|&file|&host|&input|&interval|&lcase|&letters|'
r'&level|&line|&ldrag|&lpress|&lrelease|'
r'&main|&mdrag|&meta|&mpress|&mrelease|&now|&output|'
r'&phi|&pick|&pi|&pos|&progname|'
r'&random|&rdrag|®ions|&resize|&row|&rpress|&rrelease|'
r'&shift|&source|&storage|&subject|'
r'&time|&trace|&ucase|&version|'
r'&window|&x|&y', Keyword.Reserved),
(r'(by|of|not|to)\b', Keyword.Reserved),
(r'(global|local|static)\b', Keyword.Reserved),
(r'link', Keyword.Declaration),
(words((
'break', 'case', 'create', 'default', 'end', 'all',
'do', 'else', 'every', 'fail', 'if', 'initial',
'invocable', 'next',
'repeat', 'return', 'suspend',
'then', 'until', 'while'), prefix=r'\b', suffix=r'\b'),
Keyword.Reserved),
(words((
'abs', 'acos', 'Active', 'Alert', 'any',
'args', 'array', 'asin', 'atan', 'atanh', 'Attrib',
'bal', 'Bg',
'callout', 'center', 'char', 'chdir', 'chmod', 'chown', 'chroot',
'Clip', 'Clone', 'close', 'cofail', 'collect',
'Color', 'ColorValue', 'condvar', 'copy',
'CopyArea', 'cos', 'Couple', 'crypt', 'cset', 'ctime',
'delay', 'delete', 'detab', 'display', 'DrawArc',
'DrawCircle', 'DrawCube', 'DrawCurve', 'DrawCylinder',
'DrawDisk', 'DrawImage', 'DrawLine', 'DrawPoint', 'DrawPolygon',
'DrawRectangle', 'DrawSegment', 'DrawSphere', 'DrawString',
'DrawTorus', 'dtor',
'entab', 'EraseArea', 'errorclear', 'Event', 'eventmask',
'EvGet', 'EvSend', 'exec', 'exit', 'exp', 'Eye',
'fcntl', 'fdup', 'fetch', 'Fg', 'fieldnames',
'FillArc', 'FillCircle', 'FillPolygon',
'FillRectangle', 'find', 'flock', 'flush', 'Font',
'FreeColor', 'FreeSpace', 'function',
'get', 'getch', 'getche', 'getenv',
'GetSpace', 'gettimeofday',
'getuid', 'globalnames', 'GotoRC', 'GotoXY', 'gtime', 'hardlink',
'iand', 'icom', 'IdentityMatrix', 'image', 'InPort', 'insert',
'Int86', 'integer', 'ioctl', 'ior', 'ishift', 'istate', 'ixor',
'kbhit', 'key', 'keyword', 'kill',
'left', 'Len', 'list', 'load', 'loadfunc', 'localnames',
'lock', 'log', 'Lower', 'lstat',
'many', 'map', 'match', 'MatrixMode', 'max', 'member',
'membernames', 'methodnames', 'methods', 'min', 'mkdir', 'move',
'MultMatrix', 'mutex',
'name', 'NewColor', 'Normals', 'numeric',
'open', 'opencl', 'oprec', 'ord', 'OutPort',
'PaletteChars', 'PaletteColor', 'PaletteKey', 'paramnames',
'parent', 'Pattern', 'Peek', 'Pending', 'pipe', 'Pixel',
'Poke', 'pop', 'PopMatrix', 'Pos', 'pos',
'proc', 'pull', 'push', 'PushMatrix', 'PushRotate', 'PushScale',
'PushTranslate', 'put',
'QueryPointer',
'Raise', 'read', 'ReadImage', 'readlink', 'reads', 'ready',
'real', 'receive', 'Refresh', 'Rem', 'remove', 'rename',
'repl', 'reverse', 'right', 'rmdir', 'Rotate', 'Rpos',
'rtod', 'runerr',
'save', 'Scale', 'seek', 'select', 'send', 'seq',
'serial', 'set', 'setenv',
'setuid', 'signal', 'sin', 'sort', 'sortf',
'spawn', 'sql', 'sqrt', 'stat', 'staticnames', 'stop',
'string', 'structure', 'Swi',
'symlink', 'sys_errstr', 'system', 'syswrite',
'tab', 'table', 'tan',
'Texcoord', 'Texture', 'TextWidth', 'Translate',
'trap', 'trim', 'truncate', 'trylock', 'type',
'umask', 'Uncouple', 'unlock', 'upto', 'utime',
'variable',
'wait', 'WAttrib', 'WDefault', 'WFlush', 'where',
'WinAssociate', 'WinButton', 'WinColorDialog', 'WindowContents',
'WinEditRegion', 'WinFontDialog', 'WinMenuBar', 'WinOpenDialog',
'WinPlayMedia', 'WinSaveDialog', 'WinScrollBar', 'WinSelectDialog',
'write', 'WriteImage', 'writes', 'WSection',
'WSync'), prefix=r'\b', suffix=r'\b'),
Name.Function),
include('numbers'),
(r'===|~===|\*\*|\+\+|--|\.|==|~==|<=|>=|=|~=|<<=|<<|>>=|>>|'
r':=:|:=|<->|<-|\+:=|\|\||\|', Operator),
(r'"(?:[^\\"]|\\.)*"', String),
(r"'(?:[^\\']|\\.)*'", String.Character),
(r'[*<>+=/&!?@~\\-]', Operator),
(r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
(r"[\[\]]", Punctuation),
(r"<>|=>|[()|:;,.'`{}%\^&?]", Punctuation),
(r'\n+', Text),
],
'numbers': [
(r'\b([+-]?([2-9]|[12][0-9]|3[0-6])[rR][0-9a-zA-Z]+)\b', Number.Hex),
(r'[+-]?[0-9]*\.([0-9]*)([Ee][+-]?[0-9]*)?', Number.Float),
(r'\b([+-]?[0-9]+[KMGTPkmgtp]?)\b', Number.Integer),
],
'subprogram': [
(r'\(', Punctuation, ('#pop', 'formal_part')),
(r';', Punctuation, '#pop'),
(r'"[^"]+"|\w+', Name.Function),
include('root'),
],
'type_def': [
(r'\(', Punctuation, 'formal_part'),
],
'formal_part': [
(r'\)', Punctuation, '#pop'),
(r'\w+', Name.Variable),
(r',', Punctuation),
(r'(:string|:integer|:real)\b', Keyword.Reserved),
include('root'),
],
}
class UcodeLexer(RegexLexer):
"""
Lexer for Icon ucode files.
.. versionadded:: 2.4
"""
name = 'ucode'
aliases = ['ucode']
filenames = ['*.u', '*.u1', '*.u2']
mimetypes = []
flags = re.MULTILINE
tokens = {
'root': [
(r'(#.*\n)', Comment),
(words((
'con', 'declend', 'end',
'global',
'impl', 'invocable',
'lab', 'link', 'local',
'record',
'uid', 'unions',
'version'),
prefix=r'\b', suffix=r'\b'),
Name.Function),
(words((
'colm', 'filen', 'line', 'synt'),
prefix=r'\b', suffix=r'\b'),
Comment),
(words((
'asgn',
'bang', 'bscan',
'cat', 'ccase', 'chfail',
'coact', 'cofail', 'compl',
'coret', 'create', 'cset',
'diff', 'div', 'dup',
'efail', 'einit', 'end', 'eqv', 'eret',
'error', 'escan', 'esusp',
'field',
'goto',
'init', 'int', 'inter',
'invoke',
'keywd',
'lconcat', 'lexeq', 'lexge',
'lexgt', 'lexle', 'lexlt', 'lexne',
'limit', 'llist', 'lsusp',
'mark', 'mark0', 'minus', 'mod', 'mult',
'neg', 'neqv', 'nonnull', 'noop', 'null',
'number', 'numeq', 'numge', 'numgt',
'numle', 'numlt', 'numne',
'pfail', 'plus', 'pnull', 'pop', 'power',
'pret', 'proc', 'psusp', 'push1', 'pushn1',
'random', 'rasgn', 'rcv', 'rcvbk', 'real',
'refresh', 'rswap',
'sdup', 'sect', 'size', 'snd', 'sndbk',
'str', 'subsc', 'swap',
'tabmat', 'tally', 'toby', 'trace',
'unmark',
'value', 'var'), prefix=r'\b', suffix=r'\b'),
Keyword.Declaration),
(words((
'any',
'case',
'endcase', 'endevery', 'endif',
'endifelse', 'endrepeat', 'endsuspend',
'enduntil', 'endwhile', 'every',
'if', 'ifelse',
'repeat',
'suspend',
'until',
'while'),
prefix=r'\b', suffix=r'\b'),
Name.Constant),
(r'\d+(\s*|\.$|$)', Number.Integer),
(r'[+-]?\d*\.\d+(E[-+]?\d+)?', Number.Float),
(r'[+-]?\d+\.\d*(E[-+]?\d+)?', Number.Float),
(r"(<>|=>|[()|:;,.'`]|[{}]|[%^]|[&?])", Punctuation),
(r'\s+\b', Text),
(r'[\w-]+', Text),
],
}
| [
"[email protected]"
] | |
8dc206ecc05711beff9e20fc9c645ee81ed652dd | 1abcd4686acf314a044a533d2a541e83da835af7 | /backjoon_level_python/1701.py | 4112638d3382c51244a90f87bed9c9d769c2e387 | [] | no_license | HoYoung1/backjoon-Level | 166061b2801514b697c9ec9013db883929bec77e | f8e49c8d2552f6d62be5fb904c3d6548065c7cb2 | refs/heads/master | 2022-05-01T05:17:11.305204 | 2022-04-30T06:01:45 | 2022-04-30T06:01:45 | 145,084,813 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 567 | py | def get_failure_array(text):
failure = [0] * len(text)
j = 0
for i in range(1, len(text)):
while j > 0 and text[i] != text[j]:
j = failure[j - 1]
if text[i] == text[j]:
failure[i] = j + 1
j += 1
return failure
def solve(input_text):
max_value = 0
for i in range(len(input_text)-1):
failure = get_failure_array(input_text[i:])
max_value = max(max_value, max(failure))
return max_value
if __name__ == '__main__':
input_text = input()
print(solve(input_text))
| [
"[email protected]"
] | |
7f559d2862ef1e3f93bcde50464d07a9767ac80e | 3d88748960deb31c674525df2bd9d79ba1d2db1a | /pythonlib/bin/pyfftr | 4127562195c91f40b757688d14d9e521c09d2ba6 | [
"BSD-2-Clause"
] | permissive | johnkerl/scripts-math | 1a0eb6ce86fd09d593c82540638252af5036c535 | cb29e52fec10dd00b33c3a697dec0267a87ab8bb | refs/heads/main | 2022-01-31T17:46:05.002494 | 2022-01-17T20:40:31 | 2022-01-17T20:40:31 | 13,338,494 | 5 | 3 | null | null | null | null | UTF-8 | Python | false | false | 3,546 | #!/usr/bin/python -Wall
# ----------------------------------------------------------------
# John Kerl
# [email protected]
# 2006-03-20
# (Ported to Python 2006-03-06)
#
# This is a radix-2 fast Fourier transform. Example:
#
# xxx cmts re real-to-complex transform; numerical recipes
# ----------------------------------------------------------------
from __future__ import division # 1/2 = 0.5, not 0.
import sys
import pyfft_m
import pyrcio_m
from math import *
# ----------------------------------------------------------------
def usage():
print >> sys.stderr, "Usage:", sys.argv[0], "[options] [file name]"
print >> sys.stderr, "If the file name is omitted, input is taken from standard input."
print >> sys.stderr, "Format is in decimal real, one sample per line. E.g."
print >> sys.stderr, " 1.0"
print >> sys.stderr, " 2.0"
print >> sys.stderr, " 3.0"
print >> sys.stderr, " 4.0"
print >> sys.stderr, "Options:"
print >> sys.stderr, " -fi: input folding"
print >> sys.stderr, " -nfi: no input folding"
print >> sys.stderr, " -fo: output folding"
print >> sys.stderr, " -nfo: no output folding"
print >> sys.stderr, " -fwd: forward FFT (exp(-i 2 pi k/N) kernel)"
print >> sys.stderr, " -rev: reverse FFT (exp( i 2 pi k/N) kernel)"
print >> sys.stderr, " -s: scaling"
print >> sys.stderr, " -ns: no scaling"
print >> sys.stderr, " -dft: Use DFT. Allows N not to be a power of 2."
sys.exit(1)
# ================================================================
# Start of program
fold_in = 0
fold_out = 0
forward = 1
scale = 1
use_dft = 0
file_name = "-"
argc = len(sys.argv)
argi = 1
while (argi < argc):
arg = sys.argv[argi]
if (arg[0] != '-'):
break
if (arg == "-fi"):
fold_in = 1
elif (arg == "-nfi"):
fold_in = 0
elif (arg == "-fo"):
fold_out = 1
elif (arg == "-nfo"):
fold_out = 0
elif (arg == "-fwd"):
forward = 1
elif (arg == "-rev"):
forward = 0
elif (arg == "-s"):
scale = 1
elif (arg == "-ns"):
scale = 0
elif (arg == "-dft"):
use_dft = 1
elif (arg == "-ndft"):
use_dft = 0
else:
usage()
argi += 1
if ((argc - argi) == 1):
file_name = sys.argv[argi]
elif ((argc - argi) == 0):
file_name = "-"
else:
usage()
# real input f_j: j = 0 .. N-1
# split: fe_j, fo_j: j = 0 .. N/2-1
# h_j = fe_j + i fo_j: j = 0 .. N/2-1
# By linearity: H_k = Fe_k + i Fo_k: k = 0 .. N/2-1
# Fe_k = sum_{j=0}^{N/2-1} f_{2j} w_N^2 [note w_N^2 = w_{N/2}]
# Fo_k = sum_{j=0}^{N/2-1} f_{2j+1} w_N^2
# F_k = Fe_k + w_N^k Fo_k
# F_k = 1/2(H_k + H_{N/2-k}^*) - i/2(H_k - H_{N/2-k}^*) w_N^k
# Save only 1st half of F_k: k = 0 .. N/2-1
# Need H_{N/2}: but = H_0. (Why?)
# -- Inverse --
# Fe_k = 1/2(F_k + F_{N/2-k}^*)
# "peel" F_{N/2} "from" F_0
# Fo_k = 1/2 w_N^{-k}(F_{N/2} - F_{N/2-k}^*)
# H_k = Fe_k + i Fo_k
f = pyrcio_m.read_real_vector(file_name)
print "f:"
pyrcio_m.print_real_vector(f)
print
N = len(f)
N2 = int(N/2)
print "N =", N
print "N2 =", N2
h = []
for j in range(0, N2):
h.append(f[2*j] + 1j*f[2*j+1])
print "h:"
pyrcio_m.print_complex_vector(h)
print
if (use_dft):
H = pyfft_m.dft(h, fold_in, fold_out, forward, scale)
else:
H = pyfft_m.fft(h, fold_in, fold_out, forward, scale)
H.append(H[0]) # Append H[N/2]
print "H:"
pyrcio_m.print_complex_vector(H)
print
w_N = complex(cos(2*pi/N), sin(2*pi/N))
F = []
for k in range(0, N2+1):
Hnegkbar = H[N2-k].conjugate()
Fek = 0.5*(H[k] + Hnegkbar)
Fok = 0.5*(H[k] - Hnegkbar) * (-1j)
F.append(Fek + Fok * w_N**k)
print "F:"
pyrcio_m.print_complex_vector(F)
print
| [
"[email protected]"
] | ||
0266fc2e290229ee4fb5b79ceec76bc0a22a0e42 | facb8b9155a569b09ba66aefc22564a5bf9cd319 | /wp2/merra_scripts/01_netCDF_extraction/merra902TG/128-tideGauge.py | a22e221a52b78d90b03e7e76dfe5eb0acc6f5054 | [] | no_license | moinabyssinia/modeling-global-storm-surges | 13e69faa8f45a1244a964c5de4e2a5a6c95b2128 | 6e385b2a5f0867df8ceabd155e17ba876779c1bd | refs/heads/master | 2023-06-09T00:40:39.319465 | 2021-06-25T21:00:44 | 2021-06-25T21:00:44 | 229,080,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,075 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Jun 01 10:00:00 2020
MERRAv2 netCDF extraction script - template
To create an extraction script for each tide gauge
@author: Michael Tadesse
"""
import os
import pandas as pd
from d_merra_define_grid import Coordinate, findPixels, findindx
from c_merra_read_netcdf import readnetcdf
from f_merra_subset import subsetter
def extract_data(delta= 3):
"""
This is the master function that calls subsequent functions
to extract uwnd, vwnd, slp for the specified
tide gauges
delta: distance (in degrees) from the tide gauge
"""
print('Delta = {}'.format(delta), '\n')
#defining the folders for predictors
dir_in = "/lustre/fs0/home/mtadesse/MERRAv2/data"
surge_path = "/lustre/fs0/home/mtadesse/obs_surge"
csv_path = "/lustre/fs0/home/mtadesse/merraLocalized"
#cd to the obs_surge dir to get TG information
os.chdir(surge_path)
tg_list = os.listdir()
#cd to the obs_surge dir to get TG information
os.chdir(dir_in)
years = os.listdir()
#################################
#looping through the year folders
#################################
#to mark the first csv
firstCsv = True;
for yr in years:
os.chdir(dir_in)
#print(yr, '\n')
os.chdir(os.path.join(dir_in, yr))
####################################
#looping through the daily .nc files
####################################
for dd in os.listdir():
os.chdir(os.path.join(dir_in, yr)) #back to the predictor folder
print(dd, '\n')
#########################################
#get netcdf components - predictor file
#########################################
nc_file = readnetcdf(dd)
lon, lat, time, predSLP, predU10, predV10 = \
nc_file[0], nc_file[1], nc_file[2], nc_file[3], nc_file[4]\
, nc_file[5]
x = 128
y = 129
#looping through individual tide gauges
for t in range(x, y):
#the name of the tide gauge - for saving purposes
# tg = tg_list[t].split('.mat.mat.csv')[0]
tg = tg_list[t]
#extract lon and lat data from surge csv file
#print(tg, '\n')
os.chdir(surge_path)
if os.stat(tg).st_size == 0:
print('\n', "This tide gauge has no surge data!", '\n')
continue
surge = pd.read_csv(tg, header = None)
#surge_with_date = add_date(surge)
#define tide gauge coordinate(lon, lat)
tg_cord = Coordinate(surge.iloc[0,0], surge.iloc[0,1])
#find closest grid points and their indices
close_grids = findPixels(tg_cord, delta, lon, lat)
ind_grids = findindx(close_grids, lon, lat)
#loop through preds#
#subset predictor on selected grid size
predictors = {'slp':predSLP, 'wnd_u':predU10, \
'wnd_v':predV10}
for xx in predictors.keys():
pred_new = subsetter(dd, predictors[xx], ind_grids, time)
if xx == 'slp':
if firstCsv:
finalSLP = pred_new
else:
finalSLP = pd.concat([finalSLP, pred_new], axis = 0)
print(finalSLP.shape)
elif xx == 'wnd_u':
if firstCsv:
finalUwnd = pred_new
else:
finalUwnd = pd.concat([finalUwnd, pred_new], axis = 0)
elif xx == 'wnd_v':
if firstCsv:
finalVwnd = pred_new
firstCsv = False;
else:
finalVwnd = pd.concat([finalVwnd, pred_new], axis = 0)
#create directories to save pred_new
os.chdir(csv_path)
#tide gauge directory
tg_name_old = tg.split('.mat.mat.csv')[0]
tg_name = '-'.join([str(t), tg_name_old])
try:
os.makedirs(tg_name)
os.chdir(tg_name) #cd to it after creating it
except FileExistsError:
#directory already exists
os.chdir(tg_name)
#save as csv
finalSLP.to_csv('slp.csv')
finalUwnd.to_csv('wnd_u.csv')
finalVwnd.to_csv('wnd_v.csv')
#run script
extract_data(delta= 3)
| [
"[email protected]"
] | |
9886045608f2213f99a41a0af0b8b79aa8486538 | 69a4db25d9f7d4e67cf2bcfe005e5cba9915180a | /examprep.py | 4eae0ad01d13431e655ff277605755e813e07ef2 | [] | no_license | riley-csp-2019-20/final-exam-semester-1-taylor77205 | ca3211a606819eab48d118bb6e5dc08dcf190b9c | ee37ca47c1090b8a23a6d3ed01448ed1494d9183 | refs/heads/master | 2020-11-25T12:22:28.536638 | 2019-12-19T16:39:33 | 2019-12-19T16:39:33 | 228,657,543 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 827 | py | import turtle as trtl
shape = "arrow"
player = trtl.Turtle(shape = shape)
player.speed(0)
player.penup()
shape = "circle"
circle = trtl.Turtle(shape = shape)
circle.penup()
def up():
player.setheading(90)
player.forward(10)
circle.setheading(90)
circle.forward(10)
def down():
player.setheading(270)
player.forward(10)
circle.setheading(270)
circle.forward(10)
def right():
player.setheading(00)
player.forward(10)
circle.setheading(00)
circle.forward(10)
def left():
player.setheading(180)
player.forward(10)
circle.setheading(180)
circle.forward(10)
wn=trtl.Screen()
wn.onkeypress(up,"Up")
wn.onkeypress(down,"Down")
wn.onkeypress(right,"Right")
wn.onkeypress(left,"Left")
wn.listen()
wn.mainloop() | [
"[email protected]"
] | |
b7dc54b2539acc9351aa7b5e664d819614a5d304 | 727f1bc2205c88577b419cf0036c029b8c6f7766 | /out-bin/py/google/fhir/models/run_locally.runfiles/com_google_fhir/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/debug/lib/profiling.py | 1d60d22ff7008d67f2fdc68c9e659f0c0474b8a5 | [
"Apache-2.0"
] | permissive | rasalt/fhir | 55cf78feed3596a3101b86f9e9bbf6652c6ed4ad | d49883cc4d4986e11ca66058d5a327691e6e048a | refs/heads/master | 2020-04-13T00:16:54.050913 | 2019-01-15T14:22:15 | 2019-01-15T14:22:15 | 160,260,223 | 0 | 0 | Apache-2.0 | 2018-12-03T22:07:01 | 2018-12-03T22:07:01 | null | UTF-8 | Python | false | false | 181 | py | /home/rkharwar/.cache/bazel/_bazel_rkharwar/c4bcd65252c8f8250f091ba96375f9a5/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/debug/lib/profiling.py | [
"[email protected]"
] | |
85d0e15806dc792d31a9296899a3bdbc2c0db268 | 0f8909782b5150783b738df3875c91509a92a33b | /scena/c0450.bin.py | 94d0262c76e9b95ffec8999949ad301746850f0a | [] | no_license | uyjulian/ao_text | e40cd982bcdd5ea9ffd6f0f2e97ce9b92749b63a | 5cc5468aeb64fa97935f334a627357ec10e22307 | refs/heads/master | 2021-06-26T21:43:07.489898 | 2017-07-05T16:55:01 | 2017-07-05T16:55:01 | 96,562,461 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139,980 | py | from ScenarioHelper import *
def main():
CreateScenaFile(
"c0450.bin", # FileName
"c0450", # MapName
"c0450", # Location
0x0024, # MapIndex
"ed7113",
0x00002000, # Flags
("", "", "", "", "", ""), # include
0x00, # PlaceNameNumber
0x00, # PreInitFunctionIndex
b'\x00\xff\xff', # Unknown_51
# Information
[0, 0, -1000, 0, 0, 0, 24000, 500, 30, 45, 0, 360, 0, 0, 0, 0, 0, 1, 36, 0, 6, 0, 7],
)
BuildStringList((
"c0450", # 0
"受付カイル", # 1
"ドリス", # 2
"アーロン", # 3
"レティシア支配人", # 4
"ミンネス", # 5
"観光客", # 6
"観光客", # 7
"市民", # 8
"女の子", # 9
"市民", # 10
"市民", # 11
"観光客", # 12
"市民", # 13
"市民", # 14
"市民", # 15
"デリック", # 16
))
AddCharChip((
"chr/ch45200.itc", # 00
"chr/ch22000.itc", # 01
"chr/ch25700.itc", # 02
"chr/ch27500.itc", # 03
"chr/ch27900.itc", # 04
"chr/ch33002.itc", # 05
"chr/ch32402.itc", # 06
"chr/ch22002.itc", # 07
"chr/ch22300.itc", # 08
"chr/ch24400.itc", # 09
"chr/ch21300.itc", # 0A
"chr/ch33000.itc", # 0B
"chr/ch21002.itc", # 0C
"chr/ch20302.itc", # 0D
"chr/ch23800.itc", # 0E
))
DeclNpc(65440, 0, 59970, 270, 261, 0x0, 0, 1, 0, 0, 0, 0, 13, 255, 0)
DeclNpc(4090, 9, 59900, 225, 261, 0x0, 0, 2, 0, 0, 2, 0, 15, 255, 0)
DeclNpc(50740, 0, 9750, 90, 261, 0x0, 0, 3, 0, 0, 1, 0, 14, 255, 0)
DeclNpc(4294963306, 0, 7000, 90, 261, 0x0, 0, 4, 0, 0, 0, 0, 11, 255, 0)
DeclNpc(168410, 0, 5519, 180, 389, 0x0, 0, 0, 0, 0, 0, 0, 9, 255, 0)
DeclNpc(60049, 150, 65010, 180, 389, 0x0, 0, 5, 0, 255, 255, 0, 16, 255, 0)
DeclNpc(61630, 150, 65010, 180, 389, 0x0, 0, 6, 0, 255, 255, 0, 17, 255, 0)
DeclNpc(189949, 500, 58349, 90, 389, 0x0, 0, 7, 0, 255, 255, 0, 18, 255, 0)
DeclNpc(190759, 0, 61840, 45, 389, 0x0, 0, 8, 0, 0, 4, 0, 19, 255, 0)
DeclNpc(153649, 0, 61220, 180, 389, 0x0, 0, 9, 0, 0, 0, 0, 20, 255, 0)
DeclNpc(153639, 0, 60250, 0, 389, 0x0, 0, 10, 0, 0, 0, 0, 21, 255, 0)
DeclNpc(115000, 0, 62779, 0, 389, 0x0, 0, 11, 0, 0, 0, 0, 22, 255, 0)
DeclNpc(112550, 500, 6699, 0, 389, 0x0, 0, 12, 0, 255, 255, 0, 23, 255, 0)
DeclNpc(112550, 500, 9300, 180, 389, 0x0, 0, 13, 0, 255, 255, 0, 24, 255, 0)
DeclNpc(115000, 0, 8409, 45, 389, 0x0, 0, 14, 0, 0, 5, 0, 25, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclActor(68130, 10, 11650, 1200, 68130, 1500, 11650, 0x007C, 0, 26, 0x0000)
DeclActor(4294963796, 0, 7000, 1500, 4294963306, 1500, 7000, 0x007E, 0, 10, 0x0000)
DeclActor(64300, 0, 59970, 1500, 65440, 1500, 59970, 0x007E, 0, 12, 0x0000)
DeclActor(117500, 0, 4000, 1200, 117500, 650, 4000, 0x007C, 0, 8, 0x0000)
DeclActor(68130, 10, 11650, 1200, 68130, 1500, 11650, 0x007C, 0, 27, 0x0000)
ChipFrameInfo(1008, 0) # 0
ScpFunction((
"Function_0_3F0", # 00, 0
"Function_1_4A0", # 01, 1
"Function_2_501", # 02, 2
"Function_3_52C", # 03, 3
"Function_4_557", # 04, 4
"Function_5_582", # 05, 5
"Function_6_5AD", # 06, 6
"Function_7_735", # 07, 7
"Function_8_8DA", # 08, 8
"Function_9_989", # 09, 9
"Function_10_B23", # 0A, 10
"Function_11_B27", # 0B, 11
"Function_12_1F34", # 0C, 12
"Function_13_1F38", # 0D, 13
"Function_14_3052", # 0E, 14
"Function_15_3E46", # 0F, 15
"Function_16_4873", # 10, 16
"Function_17_4985", # 11, 17
"Function_18_4A94", # 12, 18
"Function_19_4B20", # 13, 19
"Function_20_4B93", # 14, 20
"Function_21_4BC0", # 15, 21
"Function_22_4BE5", # 16, 22
"Function_23_4D32", # 17, 23
"Function_24_4DDB", # 18, 24
"Function_25_4E46", # 19, 25
"Function_26_4E71", # 1A, 26
"Function_27_4EA6", # 1B, 27
"Function_28_4ED8", # 1C, 28
"Function_29_5B19", # 1D, 29
"Function_30_7861", # 1E, 30
"Function_31_78AC", # 1F, 31
"Function_32_78F0", # 20, 32
"Function_33_793B", # 21, 33
"Function_34_7986", # 22, 34
"Function_35_79D1", # 23, 35
"Function_36_7A1C", # 24, 36
"Function_37_7A67", # 25, 37
"Function_38_7AB2", # 26, 38
"Function_39_7AFD", # 27, 39
"Function_40_7B48", # 28, 40
"Function_41_7B93", # 29, 41
"Function_42_7BDE", # 2A, 42
))
def Function_0_3F0(): pass
label("Function_0_3F0")
Switch(
(scpexpr(EXPR_RAND), scpexpr(EXPR_PUSH_LONG, 0x8), scpexpr(EXPR_IMOD), scpexpr(EXPR_END)),
(0, "loc_428"),
(1, "loc_434"),
(2, "loc_440"),
(3, "loc_44C"),
(4, "loc_458"),
(5, "loc_464"),
(6, "loc_470"),
(SWITCH_DEFAULT, "loc_47C"),
)
label("loc_428")
OP_A0(0xFE, 1450, 0x0, 0xFB)
Jump("loc_488")
label("loc_434")
OP_A0(0xFE, 1550, 0x0, 0xFB)
Jump("loc_488")
label("loc_440")
OP_A0(0xFE, 1600, 0x0, 0xFB)
Jump("loc_488")
label("loc_44C")
OP_A0(0xFE, 1400, 0x0, 0xFB)
Jump("loc_488")
label("loc_458")
OP_A0(0xFE, 1650, 0x0, 0xFB)
Jump("loc_488")
label("loc_464")
OP_A0(0xFE, 1350, 0x0, 0xFB)
Jump("loc_488")
label("loc_470")
OP_A0(0xFE, 1500, 0x0, 0xFB)
Jump("loc_488")
label("loc_47C")
OP_A0(0xFE, 1500, 0x0, 0xFB)
Jump("loc_488")
label("loc_488")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_49F")
OP_A0(0xFE, 1500, 0x0, 0xFB)
Jump("loc_488")
label("loc_49F")
Return()
# Function_0_3F0 end
def Function_1_4A0(): pass
label("Function_1_4A0")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_500")
OP_95(0xFE, 72280, 0, 9750, 1000, 0x0)
OP_95(0xFE, 72280, 0, 5580, 1000, 0x0)
OP_95(0xFE, 50740, 0, 5580, 1000, 0x0)
OP_95(0xFE, 50740, 0, 9750, 1000, 0x0)
Jump("Function_1_4A0")
label("loc_500")
Return()
# Function_1_4A0 end
def Function_2_501(): pass
label("Function_2_501")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_52B")
OP_94(0xFE, 0x604, 0xD714, 0x17C0, 0xFB9A, 0x3E8)
Sleep(300)
Jump("Function_2_501")
label("loc_52B")
Return()
# Function_2_501 end
def Function_3_52C(): pass
label("Function_3_52C")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_556")
OP_94(0xFE, 0xFAD1, 0x141E, 0x11B66, 0x2652, 0x3E8)
Sleep(300)
Jump("Function_3_52C")
label("loc_556")
Return()
# Function_3_52C end
def Function_4_557(): pass
label("Function_4_557")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_581")
OP_94(0xFE, 0x2E158, 0xEA92, 0x2F5B2, 0xF604, 0x3E8)
Sleep(300)
Jump("Function_4_557")
label("loc_581")
Return()
# Function_4_557 end
def Function_5_582(): pass
label("Function_5_582")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_5AC")
OP_94(0xFE, 0x1BEC2, 0x1E0A, 0x1C6E2, 0x2AD0, 0x3E8)
Sleep(300)
Jump("Function_5_582")
label("loc_5AC")
Return()
# Function_5_582 end
def Function_6_5AD(): pass
label("Function_6_5AD")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_END)), "loc_5BB")
Jump("loc_734")
label("loc_5BB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_END)), "loc_624")
ClearChrFlags(0xF, 0x80)
ClearChrFlags(0x10, 0x80)
ClearChrFlags(0x11, 0x80)
ClearChrFlags(0x12, 0x80)
ClearChrFlags(0x13, 0x80)
ClearChrFlags(0x14, 0x80)
ClearChrFlags(0x15, 0x80)
ClearChrFlags(0x16, 0x80)
SetChrChipByIndex(0xF, 0x7)
SetChrSubChip(0xF, 0x0)
EndChrThread(0xF, 0x0)
SetChrBattleFlags(0xF, 0x4)
SetChrChipByIndex(0x14, 0xC)
SetChrSubChip(0x14, 0x0)
EndChrThread(0x14, 0x0)
SetChrBattleFlags(0x14, 0x4)
SetChrChipByIndex(0x15, 0xD)
SetChrSubChip(0x15, 0x0)
EndChrThread(0x15, 0x0)
SetChrBattleFlags(0x15, 0x4)
Jump("loc_734")
label("loc_624")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 1)), scpexpr(EXPR_END)), "loc_632")
Jump("loc_734")
label("loc_632")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_END)), "loc_640")
Jump("loc_734")
label("loc_640")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_END)), "loc_64E")
Jump("loc_734")
label("loc_64E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_END)), "loc_65C")
Jump("loc_734")
label("loc_65C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 5)), scpexpr(EXPR_END)), "loc_66A")
Jump("loc_734")
label("loc_66A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 0)), scpexpr(EXPR_END)), "loc_678")
Jump("loc_734")
label("loc_678")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x160, 0)), scpexpr(EXPR_END)), "loc_69B")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x2)"), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x174, 0)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_696")
ClearChrFlags(0xC, 0x80)
label("loc_696")
Jump("loc_734")
label("loc_69B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_END)), "loc_6A9")
Jump("loc_734")
label("loc_6A9")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 4)), scpexpr(EXPR_END)), "loc_6B7")
Jump("loc_734")
label("loc_6B7")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 0)), scpexpr(EXPR_END)), "loc_6C5")
Jump("loc_734")
label("loc_6C5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_6FF")
ClearChrFlags(0xD, 0x80)
ClearChrFlags(0xE, 0x80)
SetChrChipByIndex(0xD, 0x5)
SetChrSubChip(0xD, 0x0)
EndChrThread(0xD, 0x0)
SetChrBattleFlags(0xD, 0x4)
SetChrChipByIndex(0xE, 0x6)
SetChrSubChip(0xE, 0x0)
EndChrThread(0xE, 0x0)
SetChrBattleFlags(0xE, 0x4)
Jump("loc_734")
label("loc_6FF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_734")
ClearChrFlags(0xD, 0x80)
ClearChrFlags(0xE, 0x80)
SetChrChipByIndex(0xD, 0x5)
SetChrSubChip(0xD, 0x0)
EndChrThread(0xD, 0x0)
SetChrBattleFlags(0xD, 0x4)
SetChrChipByIndex(0xE, 0x6)
SetChrSubChip(0xE, 0x0)
EndChrThread(0xE, 0x0)
SetChrBattleFlags(0xE, 0x4)
label("loc_734")
Return()
# Function_6_5AD end
def Function_7_735(): pass
label("Function_7_735")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x12A, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_751")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x232), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7CF")
label("loc_751")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x142, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_76D")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x7D), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7CF")
label("loc_76D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_780")
Jump("loc_7CF")
label("loc_780")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_79C")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x97), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7CF")
label("loc_79C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x181, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_7B8")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x233), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7CF")
label("loc_7B8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x181, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_7CF")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x97), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
label("loc_7CF")
OP_65(0x0, 0x1)
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x2)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x174, 5)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_7FD")
ClearMapObjFlags(0x1, 0x10)
OP_66(0x0, 0x1)
label("loc_7FD")
OP_65(0x4, 0x1)
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x2)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x174, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_82C")
ClearMapObjFlags(0x1, 0x10)
OP_66(0x4, 0x1)
label("loc_82C")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0x4), scpexpr(EXPR_PUSH_LONG, 0x5), scpexpr(EXPR_GE), scpexpr(EXPR_END)), "loc_84B")
OP_10(0x0, 0x0)
OP_10(0x12, 0x1)
OP_10(0x11, 0x0)
OP_10(0x13, 0x1)
Jump("loc_857")
label("loc_84B")
OP_10(0x0, 0x1)
OP_10(0x12, 0x0)
OP_10(0x11, 0x1)
OP_10(0x13, 0x0)
label("loc_857")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_PUSH_VALUE_INDEX, 0x4), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_89D")
SetMapObjFrame(0xFF, "hikari00", 0x0, 0x1)
SetMapObjFrame(0xFF, "c0450:Layer15", 0x0, 0x1)
Sound(128, 1, 50, 0)
label("loc_89D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 2)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_8D9")
OP_7D(0xD2, 0xD2, 0xE6, 0x0, 0x0)
SetMapObjFrame(0xFF, "hikari00", 0x0, 0x1)
SetMapObjFrame(0xFF, "c0450:Layer15", 0x0, 0x1)
label("loc_8D9")
Return()
# Function_7_735 end
def Function_8_8DA(): pass
label("Function_8_8DA")
SetChrName("")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"『おいしい鍋料理 圧力鍋編』がある。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
Jc((scpexpr(EXPR_EXEC_OP, "GetItemNumber('料理手册', 0x0)"), scpexpr(EXPR_END)), "loc_985")
Jc((scpexpr(EXPR_EXEC_OP, "OP_B2(0x6)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_985")
FadeToDark(300, 0, 100)
Sound(17, 0, 100, 0)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"『満腹寄せ鍋』\x07\x00",
"のレシピを覚えた。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
OP_0D()
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
label("loc_985")
TalkEnd(0xFF)
Return()
# Function_8_8DA end
def Function_9_989(): pass
label("Function_9_989")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x10)"), scpexpr(EXPR_END)), "loc_B1F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_A7C")
ChrTalk(
0xC,
(
"おや、皆様……\x01",
"まだ私に何か御用ですかな?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"我がクインシー社とアルモリカ村の\x01",
"『アルモリカ・ハニーカンパニー』計画は\x01",
"徐々に進行しております。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"今後の展望を、皆様も\x01",
"楽しみにしてくださると幸いですよ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 6)
Jump("loc_B1F")
label("loc_A7C")
ChrTalk(
0xC,
(
"我がクインシー社とアルモリカ村の\x01",
"『アルモリカ・ハニーカンパニー』計画は\x01",
"徐々に進行しております。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"今後の展望を、皆様も\x01",
"楽しみにしてくださると幸いですよ。\x02",
)
)
CloseMessageWindow()
label("loc_B1F")
TalkEnd(0xFE)
Return()
# Function_9_989 end
def Function_10_B23(): pass
label("Function_10_B23")
Call(0, 11)
Return()
# Function_10_B23 end
def Function_11_B27(): pass
label("Function_11_B27")
TalkBegin(0xB)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1BF, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_DAE")
OP_63(0xB, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
ChrTalk(
0xB,
"あら、皆様は警察の……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00000Fええ、こちらの状況を\x01",
"聞かせてもらっていいですか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"はい、当ホテルでは只今\x01",
"元からの宿泊客を含め、\x01",
"避難者を多数受け入れております。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"備蓄食料もございますので、\x01",
"1ヶ月程度は凌げる見込みですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"とにかく、詳しい情報が\x01",
"入らないことが皆様の不安に\x01",
"つながっているという状態ですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"現状、最小限の混乱で済んでいますが\x01",
"これが続くとなると……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#00104Fそうですか……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00001F俺たちは、これからすぐに\x01",
"事態収束のため行動を開始します。\x02\x03",
"なので、しばらくの間\x01",
"このまま様子を見て頂けますか。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"ええ、かしこまりました。\x01",
"皆様もお気を付け下さいませ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x1BF, 6)
label("loc_DAE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x136, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_PUSH_VALUE_INDEX, 0x4), scpexpr(EXPR_PUSH_LONG, 0x7), scpexpr(EXPR_LSS), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_F3B")
ChrTalk(
0xB,
"《ホテル・ミレニアム》へようこそ。\x02",
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"うふふ、当ホテルでは\x01",
"お客様の様々なニーズに\x01",
"お応えしておりますわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"何かご所望がございましたら、\x01",
"いつでも仰ってくださいませ。\x02",
)
)
CloseMessageWindow()
OP_5A()
FadeToDark(300, 0, 100)
SetMessageWindowPos(-1, -1, -1, -1)
Sound(814, 0, 100, 0)
SetChrName("")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"ホテルや宿酒場に宿泊すると\x01",
"CPを回復する事ができます。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"通常の宿酒場ではCP100、\x01",
"高級ホテルではCP200が回復します。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(14, 280, 60, 3)
OP_5A()
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_F38")
SetScenarioFlags(0x0, 0)
label("loc_F38")
SetScenarioFlags(0x136, 5)
label("loc_F3B")
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
label("loc_F45")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_1F30")
FadeToDark(300, 0, 100)
Menu(
0,
-1,
-1,
1,
(
"話をする\x01", # 0
"休憩をする\x01", # 1
"やめる\x01", # 2
)
)
MenuEnd(0x0)
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEQ), scpexpr(EXPR_END)), "loc_FA1")
OP_60(0x0)
FadeToBright(300, 0)
OP_0D()
label("loc_FA1")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_FC1")
OP_AF(0x45)
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_1F2B")
label("loc_FC1")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_NEQ), scpexpr(EXPR_END)), "loc_FD5")
Jump("loc_1F2B")
label("loc_FD5")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_1F2B")
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_END)), "loc_1100")
ChrTalk(
0xB,
(
"大統領拘束の一報を受け、\x01",
"避難者の方々もそれぞれ\x01",
"ご自宅に戻られましたわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"大樹については詳細不明ですが……\x01",
"とりあえずモヤが晴れたことで\x01",
"街もそれなりに落ち着いた印象です。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"とにかく、私たちは\x01",
"今の内に各種出来る準備を\x01",
"進めておかなければいけません。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1100")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_END)), "loc_12CB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_123C")
ChrTalk(
0xB,
(
"戒厳令と外出禁止令の通告を受け、\x01",
"当ホテルでは避難者の受け入れを\x01",
"すぐに検討したのですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"流石に、あのモヤと\x01",
"人形兵士の出現は想定外でした。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"どうやら、皆さんの大統領への不満も\x01",
"極限まで高まっているようですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"今はどちらかというと、\x01",
"不安の方が大きいという印象ですね。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_12C6")
label("loc_123C")
ChrTalk(
0xB,
(
"どうやら、皆さんの大統領への不満も\x01",
"極限まで高まっているようですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"今はどちらかというと、\x01",
"不安の方が大きいという印象ですね。\x02",
)
)
CloseMessageWindow()
label("loc_12C6")
Jump("loc_1F2B")
label("loc_12CB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 1)), scpexpr(EXPR_END)), "loc_145F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_13A7")
ChrTalk(
0xB,
(
"演説の様子は\x01",
"ホテルの導力ネットを通じて\x01",
"拝見しましたが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"流石に外国からいらした\x01",
"お客様の動揺には\x01",
"凄まじいものがありました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"何とか皆さん、本国まで\x01",
"辿り着けるとよいのですが……\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_145A")
label("loc_13A7")
ChrTalk(
0xB,
(
"導力鉄道は本日をもって\x01",
"運行を停止するそうですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"とにかく、お客様方が\x01",
"帰路につけないのでは話になりません。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"飛行船の運航状況を含め、\x01",
"徹底的に情報を集めませんと。\x02",
)
)
CloseMessageWindow()
label("loc_145A")
Jump("loc_1F2B")
label("loc_145F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_END)), "loc_1519")
ChrTalk(
0xB,
(
"夕暮れと炎の色に染まる歓楽街……\x01",
"あの日の光景は、まさに\x01",
"悪夢としか言い様がありません。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"……ともかく、一刻も早く\x01",
"日常を取り戻せるよう\x01",
"出来ることを尽くしませんと。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1519")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_END)), "loc_15A2")
ChrTalk(
0xB,
(
"マインツ方面では\x01",
"今も警備隊の皆さんが\x01",
"奮戦されているそうですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"本当に警備隊の皆さんは\x01",
"私たち市民の誇りですわ。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_15A2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_END)), "loc_1762")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_16D4")
ChrTalk(
0xB,
(
"昨日は列車事故の影響で、\x01",
"鉄道のダイヤが大きく乱れたため……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"結局、クロスベル滞在を\x01",
"一日延長した人もいらっしゃいました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"とはいえ、何とか今朝までに\x01",
"完全に復旧できて幸いでしたわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"足止めされるになった\x01",
"お客様も、今朝には全員\x01",
"無事に送り出すことができましたので。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_175D")
label("loc_16D4")
ChrTalk(
0xB,
(
"足止めされることになった\x01",
"お客様も、今朝には全員\x01",
"無事に送り出すことができました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"これも全ては、\x01",
"警備隊の皆様のおかげですわね。\x02",
)
)
CloseMessageWindow()
label("loc_175D")
Jump("loc_1F2B")
label("loc_1762")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 5)), scpexpr(EXPR_END)), "loc_1813")
ChrTalk(
0xB,
(
"何でも西クロスベル街道方面で\x01",
"列車事故が起きたそうですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"今日これから帰路につく\x01",
"お客様がたを混乱させないためにも\x01",
"まずは情報を集めないといけませんね。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1813")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 0)), scpexpr(EXPR_END)), "loc_19CF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1916")
ChrTalk(
0xB,
(
"導力ネットによる予約サービスも\x01",
"おかげ様で好評を頂いておりますわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"現状まだまだ利用される方が\x01",
"少ないのも確かではありますが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"更なるネットワーク拡充の折には、\x01",
"通信器によるご予約件数を\x01",
"必ず上回ると確信しておりますわ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_19CA")
label("loc_1916")
ChrTalk(
0xB,
(
"導力ネットの素晴らしい点は\x01",
"たとえ受付時間外であっても\x01",
"予約を頂ける所にありますわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"通信器と違って、導力メールは\x01",
"24時間いつでも送受信することが\x01",
"可能でございますからね。\x02",
)
)
CloseMessageWindow()
label("loc_19CA")
Jump("loc_1F2B")
label("loc_19CF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x160, 0)), scpexpr(EXPR_END)), "loc_1A61")
ChrTalk(
0xB,
"国家独立の是非、でございますか……\x02",
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"大変難しい問題ではありますが、\x01",
"それを市民に問う事は\x01",
"非常に意義のある事だと思いますわ。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1A61")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_END)), "loc_1ACB")
ChrTalk(
0xB,
(
"うふふ、ついに\x01",
"本会議が始まりますわね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"ディーター市長には\x01",
"頑張って頂きませんと。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1ACB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 4)), scpexpr(EXPR_END)), "loc_1BA3")
ChrTalk(
0xB,
(
"私が支配人に就く以前から、\x01",
"当ホテルには過去に様々な要人の方を\x01",
"ご招待させて頂いた実績がございます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"今回は話がなく残念でしたが、\x01",
"各国首脳の皆様にもいつかの折には\x01",
"ご宿泊して頂きたいものですね。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1BA3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 0)), scpexpr(EXPR_END)), "loc_1C5D")
ChrTalk(
0xB,
(
"ホテル業に携わるものとして\x01",
"明日からの通商会議は\x01",
"いやでも注目してしまいますわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"結ばれる協定の内容によっては\x01",
"今後の観光客の数などに\x01",
"影響も出て来るでしょうからね。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1C5D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_1DBE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1D19")
ChrTalk(
0xB,
"今日は雨でございますわね。\x02",
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"当ホテルでは、雨の日でも\x01",
"お楽しみ頂ける観光スポットを\x01",
"ご紹介致しております。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"ぜひ、お気軽に\x01",
"お問い合わせくださいませ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_1DB9")
label("loc_1D19")
ChrTalk(
0xB,
(
"基本的に、この歓楽街は\x01",
"雨の日でもお楽しみ頂ける\x01",
"場所がほとんどですわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"それでも外出が億劫な方には、\x01",
"当ホテルの各種ルームサービスも\x01",
"オススメですわよ。\x02",
)
)
CloseMessageWindow()
label("loc_1DB9")
Jump("loc_1F2B")
label("loc_1DBE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_1F2B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1E88")
ChrTalk(
0xB,
"《ホテル・ミレニアム》へようこそ。\x02",
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"うふふ、当ホテルでは\x01",
"お客様の様々なニーズに\x01",
"お応えしておりますわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"何かご要望がございましたら、\x01",
"いつでも仰ってくださいませ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_1F2B")
label("loc_1E88")
ChrTalk(
0xB,
(
"エステや食事のルームサービス、\x01",
"各種ブッキングサービスに\x01",
"導力ネットを用いたご予約サービス……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"当ホテルでは、お客様の様々な\x01",
"ニーズにお応えしておりますわ。\x02",
)
)
CloseMessageWindow()
label("loc_1F2B")
Jump("loc_F45")
label("loc_1F30")
TalkEnd(0xB)
Return()
# Function_11_B27 end
def Function_12_1F34(): pass
label("Function_12_1F34")
Call(0, 13)
Return()
# Function_12_1F34 end
def Function_13_1F38(): pass
label("Function_13_1F38")
TalkBegin(0x8)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x136, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_PUSH_VALUE_INDEX, 0x4), scpexpr(EXPR_PUSH_LONG, 0x7), scpexpr(EXPR_LSS), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_20A9")
ChrTalk(
0x8,
(
"いらっしゃいませ。\x01",
"《ホテル・ミレニアム》へようこそ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"当フロントでは当日の宿泊予約も\x01",
"受け付けておりますので、\x01",
"どうぞお気軽にお申し付け下さいませ。\x02",
)
)
CloseMessageWindow()
OP_5A()
FadeToDark(300, 0, 100)
SetMessageWindowPos(-1, -1, -1, -1)
Sound(814, 0, 100, 0)
SetChrName("")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"ホテルや宿酒場に宿泊すると\x01",
"CPを回復する事ができます。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"通常の宿酒場ではCP100、\x01",
"高級ホテルではCP200が回復します。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(14, 280, 60, 3)
OP_5A()
SetScenarioFlags(0x136, 5)
label("loc_20A9")
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
label("loc_20B3")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_304E")
FadeToDark(300, 0, 100)
Menu(
0,
-1,
-1,
1,
(
"話をする\x01", # 0
"休憩をする\x01", # 1
"やめる\x01", # 2
)
)
MenuEnd(0x0)
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEQ), scpexpr(EXPR_END)), "loc_210F")
OP_60(0x0)
FadeToBright(300, 0)
OP_0D()
label("loc_210F")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_212F")
OP_AF(0x45)
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_3049")
label("loc_212F")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_NEQ), scpexpr(EXPR_END)), "loc_2143")
Jump("loc_3049")
label("loc_2143")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_3049")
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_END)), "loc_227C")
ChrTalk(
0x8,
(
"支配人の指示で、有事のための\x01",
"備えをこれまで以上に\x01",
"強化することになりました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"ただし、各種物資に関しては\x01",
"自治州内の限りある商品を無闇に\x01",
"買い占めるワケにもいきませんからね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"政府への相談も視野に入れ、\x01",
"外国方面から買い集める手段を\x01",
"さっそく探り始めている所です。\x02",
)
)
CloseMessageWindow()
Jump("loc_3049")
label("loc_227C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_END)), "loc_2379")
ChrTalk(
0x8,
(
"モヤの出現と同時にここへ\x01",
"駆け込んだ方々の様子は、\x01",
"それはもうパニック状態でした。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"人形の兵士が市民を襲う事は\x01",
"一応ないと分かってからは、\x01",
"少しは落ち着きましたが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"とにかく、一刻も早くこの状況を\x01",
"何とかして頂きたいものですね。\x02",
)
)
CloseMessageWindow()
Jump("loc_3049")
label("loc_2379")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 1)), scpexpr(EXPR_END)), "loc_247B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_23F0")
ChrTalk(
0x8,
(
"……今朝の演説には\x01",
"本当に驚かされました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"大統領の主張、\x01",
"理解はできるのですが……\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2476")
label("loc_23F0")
ChrTalk(
0x8,
(
"……ふむ、とりあえず\x01",
"余計な事は口に出すべきでは\x01",
"ありませんね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"何はともあれ……\x01",
"しばらくは成り行きを\x01",
"見守るしかありません。\x02",
)
)
CloseMessageWindow()
label("loc_2476")
Jump("loc_3049")
label("loc_247B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_END)), "loc_254D")
ChrTalk(
0x8,
(
"襲撃の日、当ホテルには幸いにも\x01",
"大した被害はありませんでしたが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"アルカンシェルは……\x01",
"本当に酷いものです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"何が目的かは知りませんが……\x01",
"このような所業、\x01",
"許されるはずがありません。\x02",
)
)
CloseMessageWindow()
Jump("loc_3049")
label("loc_254D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_END)), "loc_2600")
ChrTalk(
0x8,
(
"マインツで起こっている事件は\x01",
"帝国の陰謀ではないかと\x01",
"考えておられる方も多いみたいですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"それがもし本当だとしたら……\x01",
"不戦条約は一体何だったんでしょうか?\x02",
)
)
CloseMessageWindow()
Jump("loc_3049")
label("loc_2600")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_END)), "loc_26B2")
ChrTalk(
0x8,
(
"昨日は事故の影響で\x01",
"宿泊をキャンセルしたいという\x01",
"連絡を多く受けたのです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"大陸横断鉄道はまさに\x01",
"我々にとっても生命線……\x01",
"被害が最小限で済んで一安心ですよ。\x02",
)
)
CloseMessageWindow()
Jump("loc_3049")
label("loc_26B2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 5)), scpexpr(EXPR_END)), "loc_272A")
ChrTalk(
0x8,
"ふむ、列車事故ですか……\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"クロスベルでは\x01",
"比較的珍しいことですが……\x01",
"一体原因は何なんでしょうね。\x02",
)
)
CloseMessageWindow()
Jump("loc_3049")
label("loc_272A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 0)), scpexpr(EXPR_END)), "loc_287B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_27FC")
ChrTalk(
0x8,
(
"最近ドリスさんの仕事ぶりが\x01",
"めきめき良くなっているのです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"掃除の手際はもちろん、\x01",
"お客様からの評判も上々でしてね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"これも教育係を務める\x01",
"アーロンさんの指導の賜物ですね。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2876")
label("loc_27FC")
ChrTalk(
0x8,
(
"最近ドリスさんの仕事ぶりが\x01",
"めきめき良くなっているのです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"これも教育係を務める\x01",
"アーロンさんの指導の賜物ですね。\x02",
)
)
CloseMessageWindow()
label("loc_2876")
Jump("loc_3049")
label("loc_287B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x160, 0)), scpexpr(EXPR_END)), "loc_2A1F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_297B")
ChrTalk(
0x8,
(
"私はクロスベル人ですが、\x01",
"以前は帝国のホテルに\x01",
"勤めていた事があるのです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"こう言っては何ですが、\x01",
"帝国にいた頃は貴族の方々に\x01",
"神経をすり減らす日々でしてね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"こちらに来てからは、伸び伸び\x01",
"仕事をさせて頂いておりますよ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2A1A")
label("loc_297B")
ChrTalk(
0x8,
(
"帝国のホテルはサービス技術を\x01",
"学ぶには良い環境でしたが、\x01",
"その分気疲れも相当なものでした。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"こちらに来てからは、伸び伸び\x01",
"仕事をさせて頂いておりますよ。\x02",
)
)
CloseMessageWindow()
label("loc_2A1A")
Jump("loc_3049")
label("loc_2A1F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_END)), "loc_2B8D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2B04")
ChrTalk(
0x8,
(
"昨日、仕事帰りに\x01",
"遠目ながらオルキスタワーを\x01",
"拝見させて頂いたのですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"いやはや、あの迫力たるや\x01",
"話に聞いていた以上でしたね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"今度はぜひとも近くに行って、\x01",
"ビルを見上げてみたいものです。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2B88")
label("loc_2B04")
ChrTalk(
0x8,
(
"遠目ながら、オルキルタワーの\x01",
"迫力には本当に圧倒されました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"今度はぜひとも近くに行って、\x01",
"ビルを見上げてみたいものですね。\x02",
)
)
CloseMessageWindow()
label("loc_2B88")
Jump("loc_3049")
label("loc_2B8D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 4)), scpexpr(EXPR_END)), "loc_2CDE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2C57")
ChrTalk(
0x8,
(
"お客様は除幕式の様子を\x01",
"ご見学されましたか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"何でも花火も打ち上げられ、\x01",
"大変見応えのある式典だったとか。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"オルキスタワーの威容……\x01",
"私も早く拝見したいものですね。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2CD9")
label("loc_2C57")
ChrTalk(
0x8,
(
"除幕式では花火も打ち上げられ、\x01",
"大変見応えがあったと聞きました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"オルキスタワーの威容……\x01",
"私も早く拝見したいものですね。\x02",
)
)
CloseMessageWindow()
label("loc_2CD9")
Jump("loc_3049")
label("loc_2CDE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 0)), scpexpr(EXPR_END)), "loc_2E60")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2DD2")
ChrTalk(
0x8,
(
"いらっしゃいませ。\x01",
"本日も良い天気でございますね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"街は警察による警戒体制が\x01",
"敷かれておりますが、\x01",
"観光日和には違いありません。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"どこか良いスポットを\x01",
"お探しでしたら、目的に合わせて\x01",
"ご案内させて頂きますよ?\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2E5B")
label("loc_2DD2")
ChrTalk(
0x8,
(
"いらっしゃいませ。\x01",
"本日も良い天気でございますね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"どこか良いスポットを\x01",
"お探しでしたら、目的に合わせて\x01",
"ご案内させて頂きますよ?\x02",
)
)
CloseMessageWindow()
label("loc_2E5B")
Jump("loc_3049")
label("loc_2E60")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_2FA3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2F2A")
ChrTalk(
0x8,
(
"おはようございます。\x01",
"本日もようこそいらっしゃいませ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"雨天時のお出掛けの際は\x01",
"仰っていただければ、\x01",
"傘のご提供もさせて頂きます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
"どうぞお気軽にお申し付け下さい。\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2F9E")
label("loc_2F2A")
ChrTalk(
0x8,
(
"雨天時のお出掛けの際は\x01",
"仰っていただければ、\x01",
"傘のご提供もさせて頂きます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
"どうぞお気軽にお申し付け下さい。\x02",
)
CloseMessageWindow()
label("loc_2F9E")
Jump("loc_3049")
label("loc_2FA3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_3049")
ChrTalk(
0x8,
(
"いらっしゃいませ。\x01",
"《ホテル・ミレニアム》へようこそ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"当フロントでは当日の宿泊予約も\x01",
"受け付けておりますので、\x01",
"どうぞお気軽にお申し付け下さいませ。\x02",
)
)
CloseMessageWindow()
label("loc_3049")
Jump("loc_20B3")
label("loc_304E")
TalkEnd(0x8)
Return()
# Function_13_1F38 end
def Function_14_3052(): pass
label("Function_14_3052")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_END)), "loc_312D")
ChrTalk(
0xFE,
(
"先ほどレティシア支配人から\x01",
"当分は利益度外視で営業を行うとの\x01",
"意思表明がございました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"今はクロスベル全体が困難にある時……\x01",
"私もこれまでの経験を全て活かすつもりで\x01",
"全力で支配人を支える所存です。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_312D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_END)), "loc_31E6")
ChrTalk(
0xFE,
(
"この度のホテルの無償提供で、\x01",
"支配人がミラではなく人を大切される\x01",
"方であることがよく分かりました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"そんな方の元で仕事が出来るのは\x01",
"本当に幸せなことだと思いますよ。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_31E6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 1)), scpexpr(EXPR_END)), "loc_32AE")
ChrTalk(
0xFE,
(
"独立宣言以降、お客様の数は\x01",
"日に日に減ってはいたのですが……\x01",
"今朝の演説は決定的ですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"こういう時にする話でもありませんが……\x01",
"当ホテルも経営方針を見直さざるを\x01",
"得ないでしょう。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_32AE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_END)), "loc_3415")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_338D")
ChrTalk(
0xFE,
(
"襲撃事件が街に残した爪痕は\x01",
"余りに大きいと言う他ありません。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"警備隊の被害は甚大、\x01",
"それにあのイリア嬢まで……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"このようなことは\x01",
"二度と起こってはならない……\x01",
"ただ、そう思うばかりです。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 2)
Jump("loc_3410")
label("loc_338D")
ChrTalk(
0xFE,
(
"警備隊の被害は甚大、\x01",
"それにあのイリア嬢まで……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"このようなことは\x01",
"二度と起こってはならない……\x01",
"ただ、そう思うばかりです。\x02",
)
)
CloseMessageWindow()
label("loc_3410")
Jump("loc_3E42")
label("loc_3415")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_END)), "loc_34A6")
ChrTalk(
0xFE,
(
"昨日起こった襲撃事件……\x01",
"まだまだ事態は収束に\x01",
"向かってくれないようですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"マインツの住民の\x01",
"皆様のことが本当に心配です。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_34A6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_END)), "loc_354D")
ChrTalk(
0xFE,
(
"脱線事故は不可思議な\x01",
"魔獣の仕業と聞きましたが……\x01",
"何とも不気味な話でございますね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"何か不吉なことの前触れ、\x01",
"などとは考えたくないものですが……\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_354D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 5)), scpexpr(EXPR_END)), "loc_360A")
ChrTalk(
0xFE,
(
"そろそろチェックインのお客様が\x01",
"見え出す時間なのですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"どうやら列車事故の影響が\x01",
"さっそく出ているようですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"皆さん、無事に\x01",
"到着して頂けるとよいのですが……\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_360A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 0)), scpexpr(EXPR_END)), "loc_36B4")
ChrTalk(
0xFE,
(
"今の時期、デラックスルームが\x01",
"空室になることは\x01",
"そう珍しいことではありません。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"つまり、確実に\x01",
"ご宿泊になられたい方には\x01",
"今が狙い目ということですね。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_36B4")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x160, 0)), scpexpr(EXPR_END)), "loc_389A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_37DB")
ChrTalk(
0xFE,
(
"国家独立の是非……\x01",
"基本的に賛成意見が多いものの\x01",
"様々な意見があるようですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"私のような年寄りは、\x01",
"どうしても2大国の脅威について\x01",
"ばかり考えてしまいますが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"当ホテルがそうであったように、\x01",
"クロスベル自治州も今こそ変化が\x01",
"必要な時期なのかもしれませんね。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 2)
Jump("loc_3895")
label("loc_37DB")
ChrTalk(
0xFE,
(
"私のような年寄りは、\x01",
"どうしても2大国の脅威について\x01",
"ばかり考えてしまいますが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"当ホテルがそうであったように、\x01",
"クロスベル自治州も今こそ変化が\x01",
"必要な時期なのかもしれませんね。\x02",
)
)
CloseMessageWindow()
label("loc_3895")
Jump("loc_3E42")
label("loc_389A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_END)), "loc_3929")
ChrTalk(
0xFE,
(
"いよいよ通商会議の\x01",
"本会議が始まるわけですな。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"いやはや、ディーター市長と\x01",
"マクダエル議長には\x01",
"期待せずにはおれませんね。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_3929")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 4)), scpexpr(EXPR_END)), "loc_39C7")
ChrTalk(
0xFE,
(
"オルキスタワーには全てのフロアに\x01",
"導力ネットが引かれているそうですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"ふふ、当ホテルの導力ネット予約も\x01",
"ますます盛況になりそうですな。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_39C7")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 0)), scpexpr(EXPR_END)), "loc_3B40")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3AA4")
ChrTalk(
0xFE,
(
"少し前まで頼りない所もあった\x01",
"ドリスさんですが、なかなかどうして\x01",
"最近は安心して見ていられますよ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"後進の成長を肌で感じられる……\x01",
"教育係を任された者として\x01",
"これ以上の喜びはありませんな。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 2)
Jump("loc_3B3B")
label("loc_3AA4")
ChrTalk(
0xFE,
(
"ドリスさんが成長してくれて\x01",
"本当に嬉しく思います。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"後進の成長を肌で感じられる……\x01",
"教育係を任された者として\x01",
"これ以上の喜びはありませんからね。\x02",
)
)
CloseMessageWindow()
label("loc_3B3B")
Jump("loc_3E42")
label("loc_3B40")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_3CEA")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3C37")
ChrTalk(
0xFE,
(
"記念祭ほどの盛況さはないものの、\x01",
"当ホテルの客足は\x01",
"順調に推移してございます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"支配人肝いりの導力ネットによる\x01",
"予約システムも反響は上々ですし……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"いやはや、《ホテル・ミレニアム》の\x01",
"未来は明るうございますな。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 2)
Jump("loc_3CE5")
label("loc_3C37")
ChrTalk(
0xFE,
(
"伝統と革新の融合……\x01",
"それがレティシア支配人の目指す\x01",
"当ホテルのあり方でございます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"私も最初は戸惑いもしましたが、\x01",
"今では支配人のことを\x01",
"全面的に信頼しておりますよ。\x02",
)
)
CloseMessageWindow()
label("loc_3CE5")
Jump("loc_3E42")
label("loc_3CEA")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_3E42")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3DBF")
ChrTalk(
0xFE,
(
"当ホテルは今年で開業60周年……\x01",
"ちなみに私がここで働き始めて\x01",
"早30年以上の歳月が経ちました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"気づけば一番の古株ですよ。\x01",
"いやはや、時代の流れというのは\x01",
"本当に早いものですね。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 2)
Jump("loc_3E42")
label("loc_3DBF")
ChrTalk(
0xFE,
(
"このホテルで働き始めて\x01",
"早30年以上……\x01",
"気づけば一番の古株ですよ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"いやはや、時代の流れというのは\x01",
"本当に早いものですね。\x02",
)
)
CloseMessageWindow()
label("loc_3E42")
TalkEnd(0xFE)
Return()
# Function_14_3052 end
def Function_15_3E46(): pass
label("Function_15_3E46")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_END)), "loc_3EEE")
ChrTalk(
0xFE,
(
"ようやく、避難者の方々を一通り\x01",
"お見送りすることができました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"こんな状況ですが……\x01",
"皆さんから、お礼の言葉をいただけて\x01",
"本当に嬉しかったです。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_3EEE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_END)), "loc_3F95")
ChrTalk(
0xFE,
(
"状況がどうあれ……\x01",
"ホテルがここまで忙しくなるのは\x01",
"ずいぶん久しぶりです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"ホテル・ミレニアムの一員として、\x01",
"全力でサービスに努めさせて頂きます。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_3F95")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 1)), scpexpr(EXPR_END)), "loc_40A3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_403F")
ChrTalk(
0xFE,
(
"私にはうまく事態を\x01",
"飲み込めないのですが……\x01",
"今は複雑な気持ちで一杯です。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"確かに私たちは投票によって、\x01",
"独立に賛成したわけですけど……\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 3)
Jump("loc_409E")
label("loc_403F")
ChrTalk(
0xFE,
(
"……手を止めると、何だか\x01",
"色々考えちゃってダメですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"さてと……仕事に励みませんと。\x02",
)
CloseMessageWindow()
label("loc_409E")
Jump("loc_486F")
label("loc_40A3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_END)), "loc_4184")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x8F, 0x0, 0x2)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x8F, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x8F, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x198, 4)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x199, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_40D5")
Call(0, 42)
Return()
label("loc_40D5")
ChrTalk(
0xFE,
(
"魔獣の咆哮に銃撃の音、\x01",
"それに警官隊の方々の怒号に悲鳴……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"……襲撃の日のことを思い出すと、\x01",
"今でも震えが止まりません……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"どうして、この街でこんなことが……\x02",
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_4184")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_END)), "loc_41ED")
ChrTalk(
0xFE,
(
"マインツの事件……\x01",
"本当にとんでもない話ですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"一刻も早く解決して欲しいです……\x02",
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_41ED")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_END)), "loc_42A5")
ChrTalk(
0xFE,
(
"昨日の列車事故では\x01",
"多くの怪我人が出たそうですが、\x01",
"幸い死者は出なかったそうですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"でも中にはかなり\x01",
"重傷の方もいたとか……\x01",
"とにかく、早く良くなって欲しいです。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_42A5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 5)), scpexpr(EXPR_END)), "loc_4306")
ChrTalk(
0xFE,
(
"列車の事故だなんて……\x01",
"本当に恐ろしいですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"……乗客の方々が心配です。\x02",
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_4306")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 0)), scpexpr(EXPR_END)), "loc_4414")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_438C")
ChrTalk(
0xFE,
"さてと、今日も頑張ってお仕事です。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"あっと、お客様への笑顔も\x01",
"忘れないようにしませんと。(ニコリ)\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 3)
Jump("loc_440F")
label("loc_438C")
ChrTalk(
0xFE,
(
"誰かのために汗を掻くのって\x01",
"本当に気持ち良いですよね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"何ていうか、\x01",
"自分って必要とされてるんだな、\x01",
"って実感できるんです。\x02",
)
)
CloseMessageWindow()
label("loc_440F")
Jump("loc_486F")
label("loc_4414")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x160, 0)), scpexpr(EXPR_END)), "loc_44B8")
ChrTalk(
0xFE,
(
"アルカンシェルの\x01",
"リニューアル公演の日が\x01",
"いよいよ近づいて来ましたね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"流石にチケットは\x01",
"取れませんでしたけど、\x01",
"どんな舞台になるか楽しみです。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_44B8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_END)), "loc_4604")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_4580")
ChrTalk(
0xFE,
(
"お客様、タイムズ百貨店の\x01",
"屋上へはもう行かれましたか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"何でもオルキスタワーを観る\x01",
"絶景スポットだそうですよ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"もしまだでしたら、\x01",
"行ってみてはいかがでしょうか?\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 3)
Jump("loc_45FF")
label("loc_4580")
ChrTalk(
0xFE,
(
"タイムズ百貨店の屋上は\x01",
"オルキスタワーを観る\x01",
"絶景スポットだそうですよ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"お客様もぜひ、\x01",
"行ってみてはいかがでしょうか?\x02",
)
)
CloseMessageWindow()
label("loc_45FF")
Jump("loc_486F")
label("loc_4604")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 4)), scpexpr(EXPR_END)), "loc_46BE")
ChrTalk(
0xFE,
(
"VIPの方々は除幕式の後、\x01",
"それぞれ色々な場所を\x01",
"ご訪問されるご予定だそうですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"どこかで拝見できたりすると\x01",
"嬉しいんですけど……\x01",
"ガードが固いから難しいですよね。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_46BE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 0)), scpexpr(EXPR_END)), "loc_474C")
ChrTalk(
0xFE,
(
"最近アーロンさんに叱られることが\x01",
"少なくなって来たんです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"私、成長してるんでしょうか?\x01",
"ふふ、だとしたら嬉しいですね。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_474C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_47F8")
ChrTalk(
0xFE,
(
"雨の日はどうしても\x01",
"泥汚れが付いてしまうので、\x01",
"カーペット掃除が大変なんです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"でも綺麗になって行く所が\x01",
"目に見えるのって、\x01",
"けっこう快感なんですよね。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_47F8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_486F")
ChrTalk(
0xFE,
(
"おはようございます。\x01",
"ご宿泊のお客様でしょうか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"外出の際は、お部屋の鍵を\x01",
"忘れずにお掛けくださいね。\x02",
)
)
CloseMessageWindow()
label("loc_486F")
TalkEnd(0xFE)
Return()
# Function_15_3E46 end
def Function_16_4873(): pass
label("Function_16_4873")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_48FC")
ChrTalk(
0xFE,
"ふむ、今日は何をして過ごそうか。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"支配人の言う通り、ホテルの\x01",
"サービスを味わい尽くすというのも\x01",
"案外いいかもな。\x02",
)
)
CloseMessageWindow()
Jump("loc_4981")
label("loc_48FC")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_4981")
ChrTalk(
0xFE,
(
"このホテル、部屋はもちろん\x01",
"サービスも一流だな。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"ふむ、今後もクロスベル旅行の際は\x01",
"ぜひここを利用するようにしよう。\x02",
)
)
CloseMessageWindow()
label("loc_4981")
TalkEnd(0xFE)
Return()
# Function_16_4873 end
def Function_17_4985(): pass
label("Function_17_4985")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_4A00")
ChrTalk(
0xFE,
(
"ふふ、確かにホテルで\x01",
"過ごすのも悪くなさそうね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"私としては、カジノに\x01",
"入り浸りたいところだけど。\x02",
)
)
CloseMessageWindow()
Jump("loc_4A90")
label("loc_4A00")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_4A90")
ChrTalk(
0xFE,
(
"ふふ、今回は導力鉄道で\x01",
"来たのだけど、移動の疲れが\x01",
"すっかり取れたわよ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"少し値段は張るけど、\x01",
"値段以上の価値があるのは確かね。\x02",
)
)
CloseMessageWindow()
label("loc_4A90")
TalkEnd(0xFE)
Return()
# Function_17_4985 end
def Function_18_4A94(): pass
label("Function_18_4A94")
TalkBegin(0xFE)
ChrTalk(
0xFE,
(
"僕と妹は家に帰ってる途中、\x01",
"突然モヤに巻き込まれて\x01",
"このホテルに避難してきたんだ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"……ホント、\x01",
"命からがらって気分だったよ。\x02",
)
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_18_4A94 end
def Function_19_4B20(): pass
label("Function_19_4B20")
TalkBegin(0xFE)
ChrTalk(
0xFE,
(
"お兄ちゃん、モヤが出た時\x01",
"私のことをおんぶして\x01",
"走り回ってくれたんだよ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"えへへ、かっこよかったな噴\x02",
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_19_4B20 end
def Function_20_4B93(): pass
label("Function_20_4B93")
TalkBegin(0xFE)
ChrTalk(
0xFE,
"今頃みんな心配してるだろうな……\x02",
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_20_4B93 end
def Function_21_4BC0(): pass
label("Function_21_4BC0")
TalkBegin(0xFE)
ChrTalk(
0xFE,
"早くお家に帰りたいわ……\x02",
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_21_4BC0 end
def Function_22_4BE5(): pass
label("Function_22_4BE5")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_4CCA")
ChrTalk(
0xFE,
(
"街を守っていた結界は\x01",
"一体どこへ行ったんだ……!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"それに、あの気味の悪い\x01",
"化物は一体なんなんだ……!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"大統領演説の日に故郷#4Rく に#に\x01",
"帰り損ねただけでこの仕打ち……\x01",
"もういい加減にしてくれたまえ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 5)
Jump("loc_4D2E")
label("loc_4CCA")
ChrTalk(
0xFE,
(
"大統領演説の日に故郷#4Rく に#に\x01",
"帰り損ねただけでこの仕打ち……\x01",
"もういい加減にしてくれたまえ。\x02",
)
)
CloseMessageWindow()
label("loc_4D2E")
TalkEnd(0xFE)
Return()
# Function_22_4BE5 end
def Function_23_4D32(): pass
label("Function_23_4D32")
TalkBegin(0xFE)
ChrTalk(
0xFE,
(
"こんな状況になると\x01",
"分かっていれば、すぐに\x01",
"家に帰っていたんだがなぁ……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"まあただ、こんないい部屋に\x01",
"無償で通してくれたことは\x01",
"すごくラッキーだったけどね。\x02",
)
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_23_4D32 end
def Function_24_4DDB(): pass
label("Function_24_4DDB")
TalkBegin(0xFE)
ChrTalk(
0xFE,
(
"ホテルに避難できたのは\x01",
"ほんと不幸中の幸いね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"でもこの状況……\x01",
"一体いつまで続くのかしら?\x02",
)
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_24_4DDB end
def Function_25_4E46(): pass
label("Function_25_4E46")
TalkBegin(0xFE)
ChrTalk(
0xFE,
"えへへ、このお部屋おっきーね♪\x02",
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_25_4E46 end
def Function_26_4E71(): pass
label("Function_26_4E71")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x2)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x174, 5)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_4EA5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x174, 6)), scpexpr(EXPR_END)), "loc_4EA2")
Call(0, 29)
Jump("loc_4EA5")
label("loc_4EA2")
Call(0, 28)
label("loc_4EA5")
Return()
# Function_26_4E71 end
def Function_27_4EA6(): pass
label("Function_27_4EA6")
TalkBegin(0xFF)
Sound(807, 0, 100, 0)
SetChrName("")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"扉には鍵がかかっている。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
TalkEnd(0xFF)
Return()
# Function_27_4EA6 end
def Function_28_4ED8(): pass
label("Function_28_4ED8")
EventBegin(0x0)
FadeToDark(500, 0, -1)
OP_0D()
SetChrFlags(0xA, 0x80)
EndChrThread(0xA, 0x0)
LoadChrToIndex("chr/ch32300.itc", 0x1E)
ClearChrFlags(0x17, 0x80)
ClearChrBattleFlags(0x17, 0x8000)
SetChrChipByIndex(0x17, 0x1E)
SetChrSubChip(0x17, 0x0)
SetChrPos(0x17, 68000, 0, 12400, 315)
OP_68(68140, 1500, 9270, 0)
MoveCamera(312, 19, 0, 0)
OP_6E(400, 0)
SetCameraDistance(21270, 0)
SetChrPos(0x101, 67400, 0, 9530, 0)
SetChrPos(0x102, 68780, 0, 9180, 0)
SetChrPos(0x103, 66670, 0, 8520, 0)
SetChrPos(0x104, 68370, 0, 8240, 0)
SetChrPos(0x109, 67410, 0, 7270, 0)
SetChrPos(0x105, 69430, 0, 7040, 0)
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
FadeToBright(1000, 0)
OP_0D()
ChrTalk(
0x17,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"──ええ、それではまた明日。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"今後ともよろしくお願いします。\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x104, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x109, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x105, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
SetChrPos(0x17, 68000, 0, 13400, 315)
OP_A7(0x17, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
ClearMapObjFlags(0x1, 0x10)
Sound(103, 0, 100, 0)
OP_71(0x1, 0x0, 0x10, 0x0, 0x0)
OP_79(0x1)
def lambda_50CD():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x17, 2, lambda_50CD)
def lambda_50DE():
OP_97(0xFE, 0x0, 0x0, 0xFFFFF79A, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x17, 1, lambda_50DE)
WaitChrThread(0x17, 1)
OP_71(0x1, 0x10, 0x0, 0x0, 0x0)
OP_79(0x1)
Sound(104, 0, 100, 0)
OP_63(0x17, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
ChrTalk(
0x17,
"おや……あんたたちは。\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00000Fえっと、すみません。\x02\x03",
"アルモリカ村の\x01",
"デリックさんですよね?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"ああ、その通りだが……\x01",
"俺に何か用なのか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00000F申し遅れました……\x01",
"警察の特務支援課の者です。\x02\x03",
"少し話をお聞かせ願えませんか?\x02",
)
)
CloseMessageWindow()
OP_63(0x17, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x17)
ChrTalk(
0x17,
"……なるほどな。\x02",
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"あんたたちは村長……\x01",
"親父の差し金だな?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"警察まで呼ぶなんて……\x01",
"フン、ご苦労なことだ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#00105Fえ、えっとあの……\x02",
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"……大体見当はついてる。\x01",
"俺の最近の行動を\x01",
"洗おうって言うんだろう。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"別に後ろ暗いことを\x01",
"しているわけじゃないんだ、\x01",
"なんでも聞いてみろ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
"#10303F(ふむ……意外な反応だね。)\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003F……では、単刀直入に聞きます。\x02\x03",
"#00001Fここ数日、あなたは\x01",
"ミンネスさんという方と\x01",
"付き合いがあるそうですが……\x02\x03",
"一体、どういう目的が?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"……まあ、いいだろう。\x01",
"いまさら知ったところで\x01",
"親父にはどうにもできまい。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"少し前から、ミンネスさんには\x01",
"あることについて世話になっている。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
"主に、村の改革についてな。\x02",
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x104, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x109, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x105, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
ChrTalk(
0x109,
"#10105Fむ、村の改革ですか……?\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00005Fそ、そんな大事なことを\x01",
"村長さんに黙って\x01",
"進めているんですか?\x02\x03",
"#00006Fいくらなんでも、\x01",
"それはよくないような……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"村長……親父には\x01",
"今まで何度も話したさ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"だが、返す言葉は決まって\x01",
"『あるべき姿を見失うな』だの\x01",
"『急激な変化はよくない』だの……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"だが、現状を維持しても\x01",
"あんな田舎の村に\x01",
"未来があるとは思えない。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"村を存続させるには、\x01",
"改革が絶対に必要なんだ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"親父はそこのところを、\x01",
"分かってないんだ……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
(
"#00203Fなるほど……\x01",
"そんな中、そのミンネスという\x01",
"人物に出会ったわけですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"……彼は、親父と違って\x01",
"俺の相談に乗ってくれた。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"そして、アルモリカ村の\x01",
"養蜂業に大きな可能性を\x01",
"見出してくれたらしくてな。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"近々、彼と協力して\x01",
"大きな事業を立ち上げる\x01",
"計画もあるんだ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00306Fな、なんつーか\x01",
"途方もねえ話だなあ……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"フン……\x01",
"俺が話せるのはこの位だ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"もういいだろう?\x01",
"そろそろ村に帰らせてもらうぞ。\x02",
)
)
CloseMessageWindow()
def lambda_58B1():
OP_95(0xFE, 74620, 0, 5690, 2000, 0x0)
ExitThread()
QueueWorkItem(0x17, 1, lambda_58B1)
Sleep(2000)
def lambda_58CE():
OP_93(0xFE, 0x87, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 1, lambda_58CE)
Sleep(50)
def lambda_58DE():
OP_93(0xFE, 0x87, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 1, lambda_58DE)
Sleep(50)
def lambda_58EE():
OP_93(0xFE, 0x87, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 1, lambda_58EE)
Sleep(50)
def lambda_58FE():
OP_93(0xFE, 0x87, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 1, lambda_58FE)
Sleep(50)
def lambda_590E():
OP_93(0xFE, 0x87, 0x1F4)
ExitThread()
QueueWorkItem(0x109, 1, lambda_590E)
Sleep(50)
def lambda_591E():
OP_93(0xFE, 0x87, 0x1F4)
ExitThread()
QueueWorkItem(0x105, 1, lambda_591E)
WaitChrThread(0x17, 1)
SetChrFlags(0x17, 0x80)
OP_0D()
ChrTalk(
0x102,
"#00105Fあっ……\x02",
)
CloseMessageWindow()
ChrTalk(
0x109,
"#10106F行ってしまいましたね……\x02",
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x101)
OP_68(68210, 1500, 8580, 2000)
OP_6F(0x1)
ChrTalk(
0x101,
(
"#00003Fとにかく……\x01",
"折角ここまできたんだ。\x02\x03",
"#00000Fここは一つ、\x01",
"ミンネスという男に、\x01",
"直接会ってみよう。\x02",
)
)
CloseMessageWindow()
def lambda_5A07():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x102, 1, lambda_5A07)
Sleep(50)
def lambda_5A17():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x103, 1, lambda_5A17)
Sleep(50)
def lambda_5A27():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x104, 1, lambda_5A27)
Sleep(50)
def lambda_5A37():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x109, 1, lambda_5A37)
Sleep(50)
def lambda_5A47():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x105, 1, lambda_5A47)
ChrTalk(
0x104,
(
"#00303Fなるほど……\x01",
"色々と分かるかも知れねえな。\x02\x03",
"#00300Fよっしゃ、そんじゃ\x01",
"早速突入してみるとするか。\x02",
)
)
CloseMessageWindow()
FadeToDark(500, 0, -1)
OP_0D()
SetScenarioFlags(0x174, 6)
OP_29(0x82, 0x1, 0x6)
OP_D7(0x1E)
SetChrFlags(0x4, 0x80)
SetChrBattleFlags(0x4, 0x8000)
SetChrFlags(0x5, 0x80)
SetChrBattleFlags(0x5, 0x8000)
ClearChrFlags(0xA, 0x80)
SetChrPos(0xA, 50740, 0, 9750, 90)
BeginChrThread(0xA, 0, 0, 1)
SetChrPos(0x0, 68510, 0, 9710, 0)
OP_69(0xFF, 0x0)
EventEnd(0x5)
Return()
# Function_28_4ED8 end
def Function_29_5B19(): pass
label("Function_29_5B19")
EventBegin(0x0)
FadeToDark(500, 0, -1)
OP_0D()
SetChrFlags(0xA, 0x80)
EndChrThread(0xA, 0x0)
OP_4B(0xC, 0xFF)
OP_68(68560, 1500, 10330, 0)
MoveCamera(315, 26, 0, 0)
OP_6E(400, 0)
SetCameraDistance(20260, 0)
SetChrPos(0x101, 67900, 0, 11200, 0)
SetChrPos(0x102, 69690, 0, 9980, 315)
SetChrPos(0x103, 66720, 0, 10430, 45)
SetChrPos(0x104, 68370, 0, 9740, 0)
SetChrPos(0x109, 67410, 0, 8770, 0)
SetChrPos(0x105, 69430, 0, 8540, 0)
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
FadeToBright(1000, 0)
OP_0D()
ChrTalk(
0x101,
(
"#00001Fそれじゃあ……\x01",
"早速入ってみるぞ。\x02",
)
)
CloseMessageWindow()
Sleep(600)
Sound(808, 0, 100, 0)
Sleep(1000)
SetMessageWindowPos(330, 20, -1, -1)
SetChrName("中年の声")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"おや……\x01",
"どちらさまですかな?\x02\x03",
"ルームサービスを\x01",
"頼んだ覚えはありませぬが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00000Fミンネスさん……ですね?\x02\x03",
"#00004F突然すみません、\x01",
"クロスベル警察・\x01",
"特務支援課の者です。\x02\x03",
"#00000F2、3、お聞きしたいことが\x01",
"あるのですが……\x02",
)
)
CloseMessageWindow()
SetChrName("中年の声")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"おやおや、\x01",
"警察の方がわざわざ……\x02\x03",
"そういうことなら\x01",
"どうぞ、お入りください。\x01",
"鍵は開いておりますゆえ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00305F(な、なんだかえらくあっさり\x01",
" 入れてくれるんだな。)\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003F(俺たちが考えている以上の\x01",
" やり手なのかもしれないな……)\x02\x03",
"#00005Fえっと……\x01",
"それでは、失礼します。\x02",
)
)
CloseMessageWindow()
ClearMapObjFlags(0x1, 0x10)
Sound(103, 0, 100, 0)
OP_71(0x1, 0x0, 0x10, 0x0, 0x0)
OP_79(0x1)
FadeToDark(500, 0, -1)
OP_0D()
OP_71(0x1, 0x10, 0x0, 0x0, 0x0)
OP_79(0x1)
OP_68(169250, 1500, 2800, 0)
MoveCamera(311, 16, 0, 0)
OP_6E(400, 0)
SetCameraDistance(19200, 0)
SetChrPos(0xC, 168410, 0, 5520, 180)
SetChrPos(0x101, 168960, 0, -2080, 0)
SetChrPos(0x102, 168960, 0, -2080, 0)
SetChrPos(0x103, 168960, 0, -2080, 0)
SetChrPos(0x104, 168960, 0, -2080, 0)
SetChrPos(0x109, 168960, 0, -2080, 0)
SetChrPos(0x105, 168960, 0, -2080, 0)
OP_A7(0x101, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x102, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x103, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x104, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x109, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x105, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
FadeToBright(500, 0)
OP_0D()
OP_68(169610, 1500, 3430, 3000)
BeginChrThread(0x101, 3, 0, 30)
Sleep(500)
BeginChrThread(0x102, 3, 0, 31)
Sleep(500)
BeginChrThread(0x103, 3, 0, 32)
Sleep(500)
BeginChrThread(0x104, 3, 0, 33)
Sleep(500)
BeginChrThread(0x109, 3, 0, 34)
Sleep(500)
BeginChrThread(0x105, 3, 0, 35)
WaitChrThread(0x105, 3)
OP_6F(0x1)
ChrTalk(
0xC,
(
"#11Pお初にお目にかかります。\x01",
"私がミンネスにございますが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P本日はどういった\x01",
"ご用件でしょう?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003F先ほども言った通り……\x01",
"いくつか質問をさせて\x01",
"いただこうと思います。\x02\x03",
"#00001Fご協力いただけますか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pもちろんですとも。\x01",
"私に協力できることなら何なりと……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pなにか、この辺りで\x01",
"事件でも起こりましたかな?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003Fいえ……\x01",
"聞きたいことというのは\x01",
"あなたについてです。\x02\x03",
"あなたがどういった人物なのか、\x01",
"アルモリカ村でなにをしようと\x01",
"しているのか……\x02\x03",
"#00001F一通り、お聞かせ願いたいのですが。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"#11Pほう……?\x02",
)
CloseMessageWindow()
StopBGM(0xBB8)
ChrTalk(
0xC,
(
"#11Pまあいいでしょう。\x01",
"それくらいは詮無きことです。\x02",
)
)
CloseMessageWindow()
WaitBGM()
Sleep(10)
PlayBGM("ed7111", 0)
ChrTalk(
0xC,
(
"#11Pコホン……私はある会社で役員を\x01",
"させてもらっている者でしてね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P仕事内容は、商品開発から\x01",
"営業まで幅広くしております。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pアルモリカ村へは、わが社……\x01",
"『クインシー社』の重要な取引きのため\x01",
"訪問させていただいた次第です。\x02",
)
)
CloseMessageWindow()
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
ChrTalk(
0x102,
(
"#00105Fえ……ええっ!\x01",
"あのクインシー社ですか?\x02",
)
)
CloseMessageWindow()
def lambda_6361():
TurnDirection(0xFE, 0x102, 500)
ExitThread()
QueueWorkItem(0x101, 1, lambda_6361)
Sleep(50)
def lambda_6371():
TurnDirection(0xFE, 0x102, 500)
ExitThread()
QueueWorkItem(0x103, 1, lambda_6371)
Sleep(50)
def lambda_6381():
TurnDirection(0xFE, 0x102, 500)
ExitThread()
QueueWorkItem(0x104, 1, lambda_6381)
Sleep(50)
def lambda_6391():
TurnDirection(0xFE, 0x102, 500)
ExitThread()
QueueWorkItem(0x109, 1, lambda_6391)
Sleep(300)
ChrTalk(
0x104,
(
"#00305F初めて聞く名前だが……\x01",
"お嬢は知ってるのかよ?\x02",
)
)
CloseMessageWindow()
TurnDirection(0x102, 0x104, 500)
Sleep(300)
ChrTalk(
0x102,
(
"#00105Fえっと……クインシー社というのは、\x01",
"外国の有名なお菓子メーカーなの。\x02\x03",
"#00104F製菓業界でもかなりの大企業で、\x01",
"確か、クロスベルにも\x01",
"商品が輸入されてたと思うわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00005Fああ、そういえば子供の頃、\x01",
"そんなメーカーのチョコレートを\x01",
"よく買って食べてたような……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00303Fうーん、メーカーなんぞ\x01",
"あまり意識して見ないからなあ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pふふ、それもまた\x01",
"仕方のないことでありましょう。\x02",
)
)
CloseMessageWindow()
def lambda_656C():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 1, lambda_656C)
Sleep(50)
def lambda_657C():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 1, lambda_657C)
Sleep(50)
def lambda_658C():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 1, lambda_658C)
Sleep(50)
def lambda_659C():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 1, lambda_659C)
Sleep(50)
def lambda_65AC():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x105, 1, lambda_65AC)
Sleep(300)
ChrTalk(
0xC,
(
"#11P私自身、この立場にはいますが\x01",
"甘い物は苦手でしてねぇ。\x01",
"昔は本当に疎いものでした。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P長年営業方面で活躍したおかげで\x01",
"力を認められ、今の地位に\x01",
"つかせてもらったわけですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P……おっと、\x01",
"話が逸れてしまいましたかな?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00005Fあ……い、いえ。\x01",
"こちらこそ失礼しました。\x02\x03",
"#00003F……コホン。\x01",
"先ほど、アルモリカ村で\x01",
"『取引き』と仰いましたね。\x02\x03",
"#00001Fその『取引き』とは……\x01",
"村長の息子、デリックさんに\x01",
"関係のあることなんですね?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#00100F何でも、村の発展に\x01",
"関係のあることのようですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pおや……\x01",
"そこまで知っておいででしたか。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pふむ、デリックさん自ら\x01",
"情報を解禁したというのなら、\x01",
"隠す意味はありませんな。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pふふ、彼とは友好的な関係を\x01",
"築かせていただいております。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
"#00203Fやはり……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
"#00001F詳しく聞かせていただけますか?\x02",
)
CloseMessageWindow()
ChrTalk(
0xC,
"#11Pふふ、いいでしょう。\x02",
)
CloseMessageWindow()
OP_68(167980, 1500, 3640, 3000)
def lambda_68F2():
OP_95(0xFE, 164960, 0, 5520, 2000, 0x0)
ExitThread()
QueueWorkItem(0xC, 1, lambda_68F2)
Sleep(500)
def lambda_690F():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 1, lambda_690F)
Sleep(50)
def lambda_691F():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 1, lambda_691F)
Sleep(50)
def lambda_692F():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 1, lambda_692F)
Sleep(50)
def lambda_693F():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 1, lambda_693F)
Sleep(50)
def lambda_694F():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x109, 1, lambda_694F)
Sleep(50)
def lambda_695F():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x105, 1, lambda_695F)
WaitChrThread(0xC, 1)
OP_6F(0x1)
ChrTalk(
0xC,
(
"#5P我がクインシー社は、\x01",
"製菓業界の未来の為、\x01",
"日々、研鑽を重ねています。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#5Pそんな中、私は本社より\x01",
"ある使命を賜って参りました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#5Pそれは、このクロスベルへの\x01",
"クインシー社の進出、\x01",
"その足がかりを模索することです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#00105Fつまり……\x01",
"クインシー社の子会社を\x01",
"クロスベルに?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"#5Pふふ、その通りです。\x02",
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#5Pそして、手始めに市内の百貨店に\x01",
"ヒントを探しに行った所で……\x01",
"私は出会ったのです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#5Pかのアルモリカ村で作られるという、\x01",
"大変質のよい『蜂蜜』をね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x109,
(
"#10100F蜂蜜……アルモリカの\x01",
"レンゲ畑で作られるアレですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003Fハロルドさんもその質は\x01",
"保証していたっけ……\x02",
)
)
CloseMessageWindow()
OP_68(169610, 1500, 3430, 3000)
def lambda_6BD6():
OP_95(0xFE, 168410, 0, 5520, 2000, 0x0)
ExitThread()
QueueWorkItem(0xC, 1, lambda_6BD6)
Sleep(500)
def lambda_6BF3():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 1, lambda_6BF3)
Sleep(50)
def lambda_6C03():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 1, lambda_6C03)
Sleep(50)
def lambda_6C13():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 1, lambda_6C13)
Sleep(50)
def lambda_6C23():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 1, lambda_6C23)
Sleep(50)
def lambda_6C33():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x109, 1, lambda_6C33)
Sleep(50)
def lambda_6C43():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x105, 1, lambda_6C43)
WaitChrThread(0xC, 1)
def lambda_6C54():
OP_93(0xFE, 0xB4, 0x1F4)
ExitThread()
QueueWorkItem(0xC, 1, lambda_6C54)
OP_6F(0x1)
Sleep(300)
ChrTalk(
0xC,
(
"#11P豊かな自然のもと\x01",
"代々受け継がれてきた\x01",
"レンゲ畑によって生まれる蜂蜜。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pそれを見たとき、天啓の如く\x01",
"新たな製菓ブランドを立ち上げる\x01",
"一つの計画が生まれたのです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pその計画名こそ……\x01",
"『アルモリカ・ハニーカンパニー』。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x109,
"#10105Fアルモリカ・ハニーカンパニー……\x02",
)
CloseMessageWindow()
ChrTalk(
0x104,
"#00306Fな、なにやら凄そうな響きだな。\x02",
)
CloseMessageWindow()
TurnDirection(0xC, 0x101, 500)
ChrTalk(
0xC,
(
"#11Pつまりは、アルモリカ村の蜂蜜を\x01",
"ふんだんに使用したお菓子を\x01",
"提供していくわけです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pしかし、そのためには現地の、\x01",
"アルモリカ村の方々の協力が\x01",
"必要不可欠でした。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pそこで私は、アルモリカ村の\x01",
"次期村長であるデリックさんに、\x01",
"この話を持ちかけたのでございます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P製菓工場の建造、そして\x01",
"この新会社の経営をしてみないか、とね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00005Fデリックさんに\x01",
"クインシー社の子会社を……!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P無論、そのノウハウや販売ラインは\x01",
"我が社で用意し、以降、レンゲ畑は\x01",
"こちらのスタッフで管理する……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P一切のお手を煩わせない、\x01",
"そして村人たちの苦労を減らすという\x01",
"条件を提示させていただきました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#00105Fでも、工場なんて……\x01",
"いったいどこに建設する\x01",
"おつもりなのですか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pそれに関しては今までの取引きで、\x01",
"村の私有地を貸していただけることに\x01",
"相成りましてね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pもともと物置程度にしか\x01",
"使っておられなかったそうなので、\x01",
"快諾していただきました次第です。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
(
"#10304F確かにその条件なら、\x01",
"話に乗ってくれる可能性は\x01",
"かなり高いだろうね。\x02\x03",
"村の改革を願うデリック君ならば\x01",
"なおさら……\x02\x03",
"#10302Fまさにあなたにとっても、\x01",
"デリック君にとっても\x01",
"悪い話じゃなかったわけだ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"#11Pフフ、その通り。\x02",
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P実際、彼の才能と強い責任感は\x01",
"それに値するものと感じましたから。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P……ふふ、私の話はこんなところです。\x01",
"ご理解いただけましたかな?\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x102, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x103, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x104, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x109, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x105, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x101)
OP_64(0x102)
OP_64(0x103)
OP_64(0x104)
OP_64(0x109)
OP_64(0x105)
ChrTalk(
0x101,
(
"#00003F……お話を聞かせていただき\x01",
"ありがとうございます。\x02\x03",
"#00000Fおかげさまで色々と\x01",
"分からなかった部分に\x01",
"答えが見出せそうです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"#11Pおや、もう話はいいのですかな?\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00000Fええ、お時間をとらせて\x01",
"申し訳ありませんでした。\x02\x03",
"自分たちはこれで\x01",
"失礼させていただきます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pいえいえ、何のこれしき。\x01",
"またいつでも\x01",
"いらっしゃってください。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pどうかお気をつけて\x01",
"帰られますよう。\x02",
)
)
CloseMessageWindow()
FadeToDark(1000, 0, -1)
OP_0D()
StopBGM(0x7D0)
OP_68(68880, 1500, 9870, 0)
MoveCamera(315, 26, 0, 0)
OP_6E(400, 0)
SetCameraDistance(21000, 0)
SetChrPos(0x101, 68000, 0, 13400, 180)
SetChrPos(0x102, 68000, 0, 13400, 180)
SetChrPos(0x103, 68000, 0, 13400, 180)
SetChrPos(0x104, 68000, 0, 13400, 180)
SetChrPos(0x109, 68000, 0, 13400, 180)
SetChrPos(0x105, 68000, 0, 13400, 180)
OP_A7(0x101, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
OP_A7(0x102, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
OP_A7(0x103, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
OP_A7(0x104, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
OP_A7(0x109, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
OP_A7(0x105, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
WaitBGM()
Sleep(10)
PlayBGM("ed7113", 0)
FadeToBright(1000, 0)
OP_0D()
ClearMapObjFlags(0x1, 0x10)
Sound(103, 0, 100, 0)
OP_71(0x1, 0x0, 0x10, 0x0, 0x0)
OP_79(0x1)
BeginChrThread(0x105, 3, 0, 41)
Sleep(500)
BeginChrThread(0x109, 3, 0, 40)
Sleep(500)
OP_68(69520, 1500, 7610, 3000)
BeginChrThread(0x104, 3, 0, 39)
Sleep(500)
BeginChrThread(0x103, 3, 0, 38)
Sleep(500)
BeginChrThread(0x102, 3, 0, 37)
Sleep(500)
BeginChrThread(0x101, 3, 0, 36)
WaitChrThread(0x101, 3)
OP_71(0x1, 0x10, 0x0, 0x0, 0x0)
OP_79(0x1)
Sound(104, 0, 100, 0)
OP_6F(0x1)
ChrTalk(
0x102,
(
"#00106Fふう……\x01",
"なんていうか……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
(
"#10300Fフフ、なんだか凄い話を\x01",
"聞かされてしまったね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
(
"#00203Fあのミンネスという男……\x01",
"予想以上の凄腕だったようですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00306F話は小難しかったが、\x01",
"確かに儲かりそうな話だったし……\x02\x03",
"#00301Fしかし、ありゃあ……\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x102, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x103, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x104, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x109, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x105, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x101)
OP_64(0x102)
OP_64(0x103)
OP_64(0x104)
OP_64(0x109)
OP_64(0x105)
ChrTalk(
0x109,
(
"#10101F……でも、これで一通りの情報は\x01",
"手に入れられましたね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003Fああ……\x01",
"一旦アルモリカ村に戻ろう。\x02\x03",
"#00001Fトルタ村長に報告しなきゃな。\x02",
)
)
CloseMessageWindow()
FadeToDark(500, 0, -1)
OP_0D()
StopBGM(0xBB8)
WaitBGM()
SetChrFlags(0x4, 0x80)
SetChrBattleFlags(0x4, 0x8000)
SetChrFlags(0x5, 0x80)
SetChrBattleFlags(0x5, 0x8000)
SetScenarioFlags(0x22, 1)
NewScene("t0010", 0, 0, 0)
IdleLoop()
Return()
# Function_29_5B19 end
def Function_30_7861(): pass
label("Function_30_7861")
def lambda_7866():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 2, lambda_7866)
def lambda_7877():
OP_98(0xFE, 0x0, 0x0, 0x9C4, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_7877)
WaitChrThread(0x101, 1)
OP_95(0x101, 167730, 0, 2860, 2000, 0x0)
OP_93(0x101, 0x0, 0x1F4)
Return()
# Function_30_7861 end
def Function_31_78AC(): pass
label("Function_31_78AC")
def lambda_78B1():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 2, lambda_78B1)
def lambda_78C2():
OP_98(0xFE, 0x0, 0x0, 0x9C4, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_78C2)
WaitChrThread(0x102, 1)
OP_95(0x102, 169150, 0, 2870, 2000, 0x0)
Return()
# Function_31_78AC end
def Function_32_78F0(): pass
label("Function_32_78F0")
def lambda_78F5():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 2, lambda_78F5)
def lambda_7906():
OP_98(0xFE, 0x0, 0x0, 0x9C4, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x103, 1, lambda_7906)
WaitChrThread(0x103, 1)
OP_95(0x103, 170230, 0, 1900, 2000, 0x0)
OP_93(0x103, 0x0, 0x1F4)
Return()
# Function_32_78F0 end
def Function_33_793B(): pass
label("Function_33_793B")
def lambda_7940():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 2, lambda_7940)
def lambda_7951():
OP_98(0xFE, 0x0, 0x0, 0x9C4, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x104, 1, lambda_7951)
WaitChrThread(0x104, 1)
OP_95(0x104, 167400, 0, 1860, 2000, 0x0)
OP_93(0x104, 0x0, 0x1F4)
Return()
# Function_33_793B end
def Function_34_7986(): pass
label("Function_34_7986")
def lambda_798B():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x109, 2, lambda_798B)
def lambda_799C():
OP_98(0xFE, 0x0, 0x0, 0x9C4, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x109, 1, lambda_799C)
WaitChrThread(0x109, 1)
OP_95(0x109, 168250, 0, 1200, 2000, 0x0)
OP_93(0x109, 0x0, 0x1F4)
Return()
# Function_34_7986 end
def Function_35_79D1(): pass
label("Function_35_79D1")
def lambda_79D6():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x105, 2, lambda_79D6)
def lambda_79E7():
OP_98(0xFE, 0x0, 0x0, 0x9C4, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x105, 1, lambda_79E7)
WaitChrThread(0x105, 1)
OP_95(0x105, 169670, 0, 1220, 2000, 0x0)
OP_93(0x105, 0x0, 0x1F4)
Return()
# Function_35_79D1 end
def Function_36_7A1C(): pass
label("Function_36_7A1C")
def lambda_7A21():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 2, lambda_7A21)
def lambda_7A32():
OP_98(0xFE, 0x0, 0x0, 0xFFFFF63C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_7A32)
WaitChrThread(0x101, 1)
OP_95(0x101, 68440, 0, 10210, 2000, 0x0)
OP_93(0x101, 0xB4, 0x1F4)
Return()
# Function_36_7A1C end
def Function_37_7A67(): pass
label("Function_37_7A67")
def lambda_7A6C():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 2, lambda_7A6C)
def lambda_7A7D():
OP_98(0xFE, 0x0, 0x0, 0xFFFFF63C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_7A7D)
WaitChrThread(0x102, 1)
OP_95(0x102, 67120, 0, 8910, 2000, 0x0)
OP_93(0x102, 0x5A, 0x1F4)
Return()
# Function_37_7A67 end
def Function_38_7AB2(): pass
label("Function_38_7AB2")
def lambda_7AB7():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 2, lambda_7AB7)
def lambda_7AC8():
OP_98(0xFE, 0x0, 0x0, 0xFFFFF63C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x103, 1, lambda_7AC8)
WaitChrThread(0x103, 1)
OP_95(0x103, 70510, 0, 9150, 2000, 0x0)
OP_93(0x103, 0xE1, 0x1F4)
Return()
# Function_38_7AB2 end
def Function_39_7AFD(): pass
label("Function_39_7AFD")
def lambda_7B02():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 2, lambda_7B02)
def lambda_7B13():
OP_98(0xFE, 0x0, 0x0, 0xFFFFF63C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x104, 1, lambda_7B13)
WaitChrThread(0x104, 1)
OP_95(0x104, 67540, 0, 7130, 2000, 0x0)
OP_93(0x104, 0x2D, 0x1F4)
Return()
# Function_39_7AFD end
def Function_40_7B48(): pass
label("Function_40_7B48")
def lambda_7B4D():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x109, 2, lambda_7B4D)
def lambda_7B5E():
OP_98(0xFE, 0x0, 0x0, 0xFFFFF63C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x109, 1, lambda_7B5E)
WaitChrThread(0x109, 1)
OP_95(0x109, 69250, 0, 6220, 2000, 0x0)
OP_93(0x109, 0x0, 0x1F4)
Return()
# Function_40_7B48 end
def Function_41_7B93(): pass
label("Function_41_7B93")
def lambda_7B98():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x105, 2, lambda_7B98)
def lambda_7BA9():
OP_98(0xFE, 0x0, 0x0, 0xFFFFF63C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x105, 1, lambda_7BA9)
WaitChrThread(0x105, 1)
OP_95(0x105, 70730, 0, 7540, 2000, 0x0)
OP_93(0x105, 0x10E, 0x1F4)
Return()
# Function_41_7B93 end
def Function_42_7BDE(): pass
label("Function_42_7BDE")
TalkBegin(0x9)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x198, 7)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_7D75")
ChrTalk(
0x9,
"さて、お掃除お掃除っと……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003F(彼女なら『メイド』枠で\x01",
" ミスコンに出場できそうだな。)\x02\x03",
"#00000Fあの、すみません。\x01",
"ちょっと相談なのですが……\x02",
)
)
CloseMessageWindow()
SetChrName("")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"チャリティイベントの\x01",
"ミスコンへの参加を頼んでみた。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
ChrTalk(
0x9,
"ミ、ミスコン……ですか?\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"あ、あの、すみません……\x01",
"お気持ちはうれしいのですが\x01",
"仕事を抜けられないもので……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003Fそうですか……\x01",
"いえ、失礼しました。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x198, 7)
Jump("loc_7DD9")
label("loc_7D75")
ChrTalk(
0x9,
(
"ミスコンへのお誘いは\x01",
"ちょっと……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"お気持ちはうれしいのですが\x01",
"仕事を抜けられないもので……\x02",
)
)
CloseMessageWindow()
label("loc_7DD9")
TalkEnd(0x9)
Return()
# Function_42_7BDE end
SaveToFile()
Try(main)
| [
"[email protected]"
] | |
30723c2e851a6064831ceee31779a2e0923f132d | 8de2a78facbdedb033e349692c71e33ce6f47315 | /string_format.py | bda55de3713cf8d1cf8a87976aba26d564aa51b8 | [] | no_license | KshitjMaheshwari/python38-GLA | 230e4ce96c4416bbc7b11477772a827ee0d62a46 | 4d29d377ab38f75510f995293f62b7c59229423b | refs/heads/master | 2022-04-17T05:17:15.205216 | 2020-02-18T19:21:18 | 2020-02-18T19:21:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 720 | py | '''
str.format() is one of the string formatting methods in Python3,
which allows multiple substitutions and value formatting.
This method lets us concatenate elements within a string through positional formatting.
'''
a = 10
b = 30
c = a + b
temp = 'result is %d of %d and %d'
f = temp % (c, a, b)
print(f)
# tag f format
a = 10
b = 30
c = a + b
temp = f"result is {c} of {a} and {b} 😙"
print(temp) # result is 40 of 10 and 30
f = f'result is {c} of {a} {b}'
print(f) # result is 40 of 10 30
dh = 'result is {} of {} {}'
f = dh.format(c, a, b)
print(f) # result is 40 of 10 30
f = 'result is %d of %d %d' % (c, a, b)
print(f)
k = 'my name is and my record is {:10d}'.format(22223)
print(k)
| [
"[email protected]"
] | |
3d635f23f15d180a8acda2ef07e91f7f9fb3984e | 9818262abff066b528a4c24333f40bdbe0ae9e21 | /Day 60/TheBomberMan.py | 46f6d9fdaed89da0f250aff715ff45b108c9a598 | [
"MIT"
] | permissive | skdonepudi/100DaysOfCode | 749f62eef5826cb2ec2a9ab890fa23e784072703 | af4594fb6933e4281d298fa921311ccc07295a7c | refs/heads/master | 2023-02-01T08:51:33.074538 | 2020-12-20T14:02:36 | 2020-12-20T14:02:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,211 | py | ''''
Bomberman lives in a rectangular grid. Each cell in the grid either contains a bomb or nothing at all.
Each bomb can be planted in any cell of the grid but once planted, it will detonate after exactly 3 seconds. Once a bomb detonates, it's destroyed — along with anything in its four neighboring cells. This means that if a bomb detonates in cell , any valid cells and are cleared. If there is a bomb in a neighboring cell, the neighboring bomb is destroyed without detonating, so there's no chain reaction.
Bomberman is immune to bombs, so he can move freely throughout the grid. Here's what he does:
Initially, Bomberman arbitrarily plants bombs in some of the cells, the initial state.
After one second, Bomberman does nothing.
After one more second, Bomberman plants bombs in all cells without bombs, thus filling the whole grid with bombs. No bombs detonate at this point.
After one more second, any bombs planted exactly three seconds ago will detonate. Here, Bomberman stands back and observes.
Bomberman then repeats steps 3 and 4 indefinitely.
Note that during every second Bomberman plants bombs, the bombs are planted simultaneously (i.e., at the exact same moment), and any bombs planted at the same time will detonate at the same time.
Given the initial configuration of the grid with the locations of Bomberman's first batch of planted bombs, determine the state of the grid after seconds.
For example, if the initial grid looks like:
...
.O.
...
it looks the same after the first second. After the second second, Bomberman has placed all his charges:
OOO
OOO
OOO
At the third second, the bomb in the middle blows up, emptying all surrounding cells:
...
...
...
Function Description
Complete the bomberMan function in the editory below. It should return an array of strings that represent the grid in its final state.
bomberMan has the following parameter(s):
n: an integer, the number of seconds to simulate
grid: an array of strings that represents the grid
Input Format
The first line contains three space-separated integers , , and , The number of rows, columns and seconds to simulate.
Each of the next lines contains a row of the matrix as a single string of characters. The . character denotes an empty cell, and the O character (ascii 79) denotes a bomb.
Constraints
Subtask
for of the maximum score.
Output Format
Print the grid's final state. This means lines where each line contains characters, and each character is either a . or an O (ascii 79). This grid must represent the state of the grid after seconds.
Sample Input
6 7 3
.......
...O...
....O..
.......
OO.....
OO.....
Sample Output
OOO.OOO
OO...OO
OOO...O
..OO.OO
...OOOO
...OOOO
'''
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the bomberMan function below.
def bomberMan(n, grid):
result = [[i for i in r] for r in grid]
passed = 1
coords = [[x, y] for x in range(r) for y in range(c) if grid[x][y]=="O"]
if n in [0, 1]: return grid
elif n % 2 == 0: return ['O' * len(x) for x in grid]
while passed < 4+n%4:
passed += 1
if passed%2 == 0:
result = [["O" for i in range(c)] for j in range(r)]
elif passed%2 == 1:
for coord in coords:
row, col = coord[0], coord[1]
result[row][col] = "."
if 0<=row-1<=r-1:
result[row-1][col] = "."
if 0<=row+1<=r-1:
result[row+1][col] = "."
if 0<=col-1<=c-1:
result[row][col-1] = "."
if 0<=col+1<=c-1:
result[row][col+1] = "."
coords = [[x, y] for x in range(r) for y in range(c) if result[x][y]=="O"]
for i in range(r):
result[i] = ''.join(result[i])
return result
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
rcn = input().split()
r = int(rcn[0])
c = int(rcn[1])
n = int(rcn[2])
grid = []
for _ in range(r):
grid_item = input()
grid.append(grid_item)
result = bomberMan(n, grid)
fptr.write('\n'.join(result))
fptr.write('\n')
fptr.close()
| [
"[email protected]"
] | |
6773f61b800ed243653848153717040551b46c5c | 56789f51d1feb757171b151b56c59143e74c6fe1 | /projects/examples/dragon_button_relay_push_only/pinButton.py | f0fc6e7fb8a5a84c6121ba1877377927a1833a31 | [] | no_license | aid402/micropython_project | 235926120e8a78033572386b9407a5eb6e7f473e | 9111398492f0cf511da8e6f83b34d8e4e4f90278 | refs/heads/master | 2020-07-21T10:29:16.935739 | 2018-11-02T22:07:14 | 2018-11-02T22:07:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,843 | py |
from machine import Pin
import time
import relay
class PinButton:
'''
B
'''
# init
def __init__(self, pinNum, Pull, debug=False, relay_control=None):
self._pin = Pin(pinNum, Pin.IN, Pull )
self.debug = debug
self.status = 0
self.value = None
self._value = None
self.relay = relay.RELAY(relay_control)
self.button = None # Generator instance
# self.button = self.makebutton() # Generator instance
#
# next(self.button)
# for _ in range(128):
# next(self.button)
# time.sleep_ms(1)
def makebutton(self):
delays = -25 # mS delay
while True:
self._value = self._pin.value()
t_start = time.ticks_ms()
self.status = 1
if self._value == 0:
while time.ticks_diff(t_start, time.ticks_ms()) <= delays:
self.status = 10
yield None
self.relay.set_state(1)
self.value = self._value
self.status = 11
else:
self.value = 1
self.relay.set_state(0)
self.status = 12
yield None
def start(self):
self.button = self.makebutton() # Generator instance
next(self.button)
def stop(self):
self.button = None # Generator instance
@property
def push(self):
'''
T
'''
try:
next(self.button)
except StopIteration:
if self.debug:
print("StopIteration")
return -255
value = self.value
if self.status == 0:
value = -1
return value
| [
"[email protected]"
] | |
a293a4c0f1bef50f86231c141441a29c0ea77f66 | b51fcaacf7a43cfc4e378b27090c652ed5bd8ee2 | /pyfx/tests/test_spreadhandler.py | 4dad32859c0dafba258a980ee780e00e99c632b1 | [] | no_license | tetocode/fxarb | 56526308eb91616eb60b13152ad03dab73de7ca4 | 00261dc6832047375499363af2db44efa2d36008 | refs/heads/master | 2022-10-18T16:45:51.971435 | 2020-06-03T16:19:39 | 2020-06-03T16:19:39 | 269,136,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,846 | py | import copy
from collections import deque
from datetime import datetime, timedelta
import gevent
import pytest
import pytz
from pyfx.pricehandler import PriceHandler, Price
from pyfx.spreadhandler import SpreadHandler, Spread
def test_spread_handler():
h = SpreadHandler(PriceHandler())
assert h.prices == {}
now = datetime.utcnow().replace(tzinfo=pytz.utc)
now2 = now + timedelta(minutes=1, seconds=1)
prices = [Price('xxx', 'USD/JPY', now, 0.01, 0.02)]
h.handle(prices=copy.deepcopy(prices))
assert h.prices == {'xxx': {'USD/JPY': deque(prices)}}
expected = {
('xxx', 'xxx'): {
'USD/JPY': deque([
Spread(('xxx', 'xxx'), 'USD/JPY', now, 0.01, 0.02)
])
}
}
assert h.spreads == expected
prices = [
Price('xxx', 'USD/JPY', now2, 0.01, 0.03),
Price('xxx', 'EUR/JPY', now, 0.03, 0.05),
Price('yyy', 'EUR/JPY', now2, 0.06, 0.08),
]
h.handle(prices=copy.deepcopy(prices))
expected = {
('xxx', 'xxx'): {
'USD/JPY': deque([
Spread(('xxx', 'xxx'), 'USD/JPY', now, 0.01, 0.02),
Spread(('xxx', 'xxx'), 'USD/JPY', now2, 0.01, 0.03)
]),
'EUR/JPY': deque([
Spread(('xxx', 'xxx'), 'EUR/JPY', now, 0.03, 0.05),
])
},
('xxx', 'yyy'): {
'EUR/JPY': deque([
Spread(('xxx', 'yyy'), 'EUR/JPY', now2, 0.03, 0.08)
])
},
('yyy', 'xxx'): {
'EUR/JPY': deque([
Spread(('yyy', 'xxx'), 'EUR/JPY', now2, 0.06, 0.05)
])
},
('yyy', 'yyy'): {
'EUR/JPY': deque([
Spread(('yyy', 'yyy'), 'EUR/JPY', now2, 0.06, 0.08)
])
}
}
assert h.spreads == expected
| [
"_"
] | _ |
c950deb33595ab7513145a259c0dad0684cff22f | e5255d7588b117f000c8e11a57127d7bbb63a6e6 | /collection/j1/01_getImageJson.py | 6d1e626ddc17536930faed75ea9b0610302058d2 | [] | no_license | nakamura196/toyo_images | 4134e9ae7d5790e04c157195ecdea10f952dbbf2 | 60c71b23b6028c639c4f9b1ee3083c083421a336 | refs/heads/master | 2020-08-25T13:46:25.334222 | 2019-10-25T03:15:06 | 2019-10-25T03:15:06 | 216,973,729 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,596 | py | import urllib.request
from bs4 import BeautifulSoup
from time import sleep
import json
import hashlib
import os
from PIL import Image
import requests
import shutil
import urllib.parse
def download_img(url, file_name):
print("img="+url)
r = requests.get(url, stream=True)
if r.status_code == 200:
with open(file_name, 'wb') as f:
r.raw.decode_content = True
shutil.copyfileobj(r.raw, f)
def dwn(url):
html = requests.get(url).text
soup = BeautifulSoup(html, "html.parser")
img = soup.find("img")
src = urllib.parse.urljoin(url, img.get("src"))
opath = src.replace("http://124.33.215.236/", "../../")
if not os.path.exists(opath):
tmp = os.path.split(opath)
os.makedirs(tmp[0], exist_ok=True)
download_img(src, opath)
url = "http://124.33.215.236/gazou/index_img.php?tg=J1"
html = urllib.request.urlopen(url)
soup = BeautifulSoup(html, "html.parser")
aas = soup.find_all("a")
urls = []
for a in aas:
href = urllib.parse.urljoin(url, a.get("href"))
urls.append(href)
for url0 in sorted(urls):
if "201511" in url0:
print("url0="+url0)
id = url0.split("lstdir=")[1].split("&")[0]
try:
html = requests.get(url0).text
except Exception as e:
print(e)
continue
soup = BeautifulSoup(html, "html.parser")
dwn(url0)
aas = soup.find_all("a")
for a in aas:
href = urllib.parse.urljoin(url0, a.get("href"))
if "201511.php" in href:
dwn(href) | [
"[email protected]"
] | |
c5025700fd6858b320117ab2a06db5014ae2496a | 0e94b21a64e01b992cdc0fff274af8d77b2ae430 | /python/022_Objective.py | 8b3d80fef29ab63035d097dd75d51e71daa5b828 | [] | no_license | yangnaGitHub/LearningProcess | 1aed2da306fd98f027dcca61309082f42b860975 | 250a8b791f7deda1e716f361a2f847f4d12846d3 | refs/heads/master | 2020-04-15T16:49:38.053846 | 2019-09-05T05:52:04 | 2019-09-05T05:52:04 | 164,852,337 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,045 | py | #可以通过类名访问其属性
#类对象支持两种操作==>属性引用(obj.name)和实例化
#将对象创建为有初始状态的,定义一个名为__init__()的特殊方法(自动调用,构造方法)
#使用def关键字可以为类定义一个方法,类方法必须包含参数self,且为第一个参数
#支持类的继承,圆括号中基类的顺序,若是基类中有相同的方法名,从左到右查找基类中是否包含方法
#两个下划线开头声明该属和方法为私有,不能在类地外部被使用或直接访问
class people:
name = ""
age = 0
__weight = 0#私有属性,在类外部无法直接进行访问
def __init__(self, name, age, weight):
self.name = name
self.age = age
self.__weight = weight
def speak(self):
print("%s ==> %d" % (self.name, self.age))
class student(people):
grade = 0
def __init__(self, name, age, weight, grade):
people.__init__(self, name, age, weight)
self.grade = grade
def speak(self):
print("%s ==> %d ==> %d" % (self.name, self.age, self.grade))
stu = student("natasha", 22, 58, 2)
stu.speak()
#重写,子类重写父类的方法
class Parent:
def method(self):
print("Parent")
class Child(Parent):
def method(self):
print("Child")
child = Child()
child.method()
#类的专有方法
#__init__构造函数,在生成对象时调用
#__del__析构函数
#__repr__打印
#__setitem__按照索引赋值
#__getitem__按照索引获取值
#__len__获得长度
#__cmp__比较运算
#__call__函数调用
#__add__加运算
#__sub__减运算
#__mul__乘运算
#__div__除运算
#__mod__求余运算
#__pow__乘方
#支持运算符重载
class Vector:
def __init__(self, val1, val2):
self.val1 = val1
self.val2 = val2
def __str__(self):
return "Vector(%d, %d)" % (self.val1, self.val2)
def __add__(self, other):
return Vector(self.val1 + other.val1, self.val2 + other.val2)
v1 = Vector(2, 10)
v2 = Vector(5, -2)
print(v1 + v2)
| [
"[email protected]"
] | |
e5c95f65e2d375ab804087caa24c1424a0aba734 | 291f0aa9a40eeca26fb08106c952b9347db7dba7 | /nz_crawl_demo/day2/requests/biquge.py | 4436df5628c3550c69cfc0f0492fb0cc28404bae | [
"Apache-2.0"
] | permissive | gaohj/nzflask_bbs | fad10b93f8f495a94d5d6db6f5c60d85c1c85518 | 36a94c380b78241ed5d1e07edab9618c3e8d477b | refs/heads/master | 2022-12-12T21:43:17.417294 | 2020-03-20T10:28:22 | 2020-03-20T10:28:22 | 239,702,874 | 0 | 2 | Apache-2.0 | 2022-12-08T03:50:07 | 2020-02-11T07:34:01 | JavaScript | UTF-8 | Python | false | false | 569 | py | import requests
url = "http://www.xbiquge.la/login.php?jumpurl=http://www.xbiquge.la/"
data = {
"LoginForm[username]":"kangbazi666",
"LoginForm[password]":'kangbazi666',
}
headers = {
'User-Agent':"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:64.0) Gecko/20100101 Firefox/64.0"
}
#登录
session = requests.Session() #实例化一个session对象
session.post(url,data=data,headers=headers)
res = session.get("http://www.xbiquge.la/modules/article/bookcase.php")
with open('biquge.html','w',encoding='utf-8') as fp:
fp.write(res.content.decode('utf-8'))
| [
"[email protected]"
] | |
4ad8fd01c03a6ae1a29510b7ddaba5625e4d100c | 2b398353f5b0529ac666ef180e9dc966474a70c0 | /vspk/v6/nunetworkperformancebinding.py | 0c35e1c30289cec4db7f7ef8fd9e2d6a7936ffec | [
"BSD-3-Clause"
] | permissive | nuagenetworks/vspk-python | e0c4570be81da2a4d8946299cb44eaf9559e0170 | 9a44d3015aa6424d0154c8c8a42297669cce11f9 | refs/heads/master | 2023-06-01T01:12:47.011489 | 2023-05-12T19:48:52 | 2023-05-12T19:48:52 | 53,171,411 | 21 | 18 | BSD-3-Clause | 2020-12-16T12:36:58 | 2016-03-04T23:10:58 | Python | UTF-8 | Python | false | false | 12,223 | py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUPermissionsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUNetworkPerformanceBinding(NURESTObject):
""" Represents a NetworkPerformanceBinding in the VSD
Notes:
Association of Network Performance Measurement policies enable the measurement of path SLA metrics between NSGs in the domain.
"""
__rest_name__ = "networkperformancebinding"
__resource_name__ = "networkperformancebindings"
## Constants
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a NetworkPerformanceBinding instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> networkperformancebinding = NUNetworkPerformanceBinding(id=u'xxxx-xxx-xxx-xxx', name=u'NetworkPerformanceBinding')
>>> networkperformancebinding = NUNetworkPerformanceBinding(data=my_dict)
"""
super(NUNetworkPerformanceBinding, self).__init__()
# Read/Write Attributes
self._last_updated_by = None
self._last_updated_date = None
self._read_only = None
self._embedded_metadata = None
self._entity_scope = None
self._creation_date = None
self._priority = None
self._associated_network_measurement_id = None
self._owner = None
self._external_id = None
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="read_only", remote_name="readOnly", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="priority", remote_name="priority", attribute_type=int, is_required=False, is_unique=True)
self.expose_attribute(local_name="associated_network_measurement_id", remote_name="associatedNetworkMeasurementID", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def read_only(self):
""" Get read_only value.
Notes:
Determines whether this entity is read only. Read only objects cannot be modified or deleted.
This attribute is named `readOnly` in VSD API.
"""
return self._read_only
@read_only.setter
def read_only(self, value):
""" Set read_only value.
Notes:
Determines whether this entity is read only. Read only objects cannot be modified or deleted.
This attribute is named `readOnly` in VSD API.
"""
self._read_only = value
@property
def embedded_metadata(self):
""" Get embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
return self._embedded_metadata
@embedded_metadata.setter
def embedded_metadata(self, value):
""" Set embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
self._embedded_metadata = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
self._creation_date = value
@property
def priority(self):
""" Get priority value.
Notes:
Priority of the associated Network Performance Measurement
"""
return self._priority
@priority.setter
def priority(self, value):
""" Set priority value.
Notes:
Priority of the associated Network Performance Measurement
"""
self._priority = value
@property
def associated_network_measurement_id(self):
""" Get associated_network_measurement_id value.
Notes:
Associated Network Performance Measurement ID
This attribute is named `associatedNetworkMeasurementID` in VSD API.
"""
return self._associated_network_measurement_id
@associated_network_measurement_id.setter
def associated_network_measurement_id(self, value):
""" Set associated_network_measurement_id value.
Notes:
Associated Network Performance Measurement ID
This attribute is named `associatedNetworkMeasurementID` in VSD API.
"""
self._associated_network_measurement_id = value
@property
def owner(self):
""" Get owner value.
Notes:
Identifies the user that has created this object.
"""
return self._owner
@owner.setter
def owner(self, value):
""" Set owner value.
Notes:
Identifies the user that has created this object.
"""
self._owner = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
| [
"[email protected]"
] | |
4edc4a4117a2f5785f06ed7c041ecc6251e057d3 | 13f900b9dc0c3e838ff788febaa59514b97d1128 | /Proyecto/apps.py | 40c7b3b40f6d31687e5ba04a1ee90b01b19feb2f | [] | no_license | JorgitoR/App-Proyectos-Slabcode | 68439c5fe0dbe58a004b9f04be807f6756d84a7f | 173ea655bf00f8b5ae7fb0eb4ee0cf0ed5e6f3a7 | refs/heads/main | 2023-04-12T21:52:16.339073 | 2021-04-10T21:02:57 | 2021-04-10T21:02:57 | 356,660,392 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 148 | py | from django.apps import AppConfig
class ProyectoConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'Proyecto'
| [
"[email protected]"
] | |
e9bb27222c38f40ffe7f88c5cf3722d5dd47c363 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/10/usersdata/124/24836/submittedfiles/testes.py | 5775a4b04ac6e07e20b13628e10307df3311b756 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,107 | py | # -*- coding: utf-8 -*-
from __future__ import division
import math
def vabsol(x):
if x < 0:
x = -1*x
return x
def calculopi(y):
c = 3
d = 2
for i in range (0, y, 1):
if i%2 != 0:
c = c - (4/(d*(d+1)*(d+2)))
elif i%2 == 0:
c = c + (4/(d*(d+1)*(d+2)))
d = d + 2
return c
def cos(z, epsilon):
cosz = 1
v = 2
fat = 1
cont = 0
d = (z**v)/fat
while epsilon <= d:
for i in range (v, 0, -1):
fat = fat*i
if cont%2 != 0:
cosz = cosz + d
elif cont%2 == 0:
cosz = cosz - d
v = v + 2
fat = 1
cont = cont + 1
return cosz
def razaurea(m, epsilon):
pi = calculopi(m)
fi = 2*cos(pi/5, epsilon)
return fi
m = int(input('Digite o número m de termos da fórmula de pi: '))
epsilon = input('Digite o epsilon para o cálculo da razão áurea: ')
m = vabsol(m)
print('Valor aproximado de pi: %.15f' %calculopi(m))
print('Valor aproximado da razão áurea: %.15f' %razaurea(m, epsilon)) | [
"[email protected]"
] | |
aa293e1ff78c775da8ee0d65c93d61dbe77e9ece | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_212/ch38_2020_06_18_18_19_09_636520.py | a1acd720111d63b2d1b433ca15896300cc635a3a | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | def quantos_uns (n):
soma = 0
i=0
num = str(n)
while i <= len(num):
if num[i] == '1':
soma += 1
i +=1
return soma
| [
"[email protected]"
] | |
9165fa645530445bd62b1dd6a0a62069ada7bff7 | 06e34e2dface0b87fa785cab7e65422a5f20ba18 | /Solutions/900-RLE-Iterator/python.py | df44e067f90f609efe109d47495f2673b48fe69d | [] | no_license | JerryHu1994/LeetCode-Practice | c9841b0ce70451c19c8a429a3898c05b6233e1d4 | b0ce69985c51a9a794397cd98a996fca0e91d7d1 | refs/heads/master | 2022-02-10T04:42:28.033364 | 2022-01-02T04:44:22 | 2022-01-02T04:44:22 | 117,118,143 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 752 | py | class RLEIterator(object):
def __init__(self, A):
"""
:type A: List[int]
"""
self.li = A
def next(self, n):
"""
:type n: int
:rtype: int
"""
if len(self.li) == 0: return -1
cnt = n
while cnt > 0:
if len(self.li) == 0: return -1
if cnt <= self.li[0]:
ret = self.li[1]
self.li[0] -= cnt
return ret
else:
cnt -= self.li[0]
self.li.pop(0)
self.li.pop(0)
return -1
# Your RLEIterator object will be instantiated and called as such:
# obj = RLEIterator(A)
# param_1 = obj.next(n) | [
"[email protected]"
] | |
818d347d5ad5029e8246fe46f97504bcf6646510 | 8a42be3f930d8a215394a96ad2e91c95c3b7ff86 | /Build/Instalation/GeneralDb/Marathon/MarathonTests_3.5.2/HSQL_RecordEditor1/TestCases/SaveAs/SaveAsXml1.py | 7e17d8d59bd69361fc57951c63a851daf3fe52ae | [] | no_license | java-tools/jrec | 742e741418c987baa4350390d126d74c0d7c4689 | 9ece143cdd52832804eca6f3fb4a1490e2a6f891 | refs/heads/master | 2021-09-27T19:24:11.979955 | 2017-11-18T06:35:31 | 2017-11-18T06:35:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,710 | py | #{{{ Marathon
from default import *
#}}} Marathon
from Modules import commonBits
def test():
set_java_recorded_version("1.6.0_22")
if frame(' - Open File:0'):
select('File', commonBits.sampleDir() + 'DTAR020_tst1.bin')
click('Edit')
close()
if window('Record Editor'):
click('Export')
if frame('Export - DTAR020_tst1.bin:0'):
## select('JTabbedPane_16', 'Xml')
select('File Name_2', 'Xml')
select('Edit Output File', 'true')
click('save file')
close()
if frame('Tree View - DTAR020_tst1.bin.xml:0'):
select('net.sf.RecordEditor.utils.swing.treeTable.JTreeTable_10', 'rows:[9],columns:[Xml~Namespace]')
assert_content('net.sf.RecordEditor.utils.swing.treeTable.JTreeTable_10', [ ['', '', 'UTF-8', '1.0', 'false', '', '', '', '', '', '', ''],
['', '', '', '', '', '', '', '', '', '', '', ''],
['', '', '', '', '63604808', '20', '40118', '170', '1', '4.87', 'True', ''],
['', '', '', '', '69684558', '20', '40118', '280', '1', '19.00', 'True', ''],
['', '', '', '', '69684558', '20', '40118', '280', '-1', '-19.00', 'True', ''],
['', '', '', '', '69694158', '20', '40118', '280', '1', '5.01', 'True', ''],
['', '', '', '', '62684671', '20', '40118', '685', '1', '69.99', 'True', ''],
['', '', '', '', '62684671', '20', '40118', '685', '-1', '-69.99', 'True', ''],
['', '', '', '', '61664713', '59', '40118', '335', '1', '17.99', 'True', ''],
['', '', '', '', '61664713', '59', '40118', '335', '-1', '-17.99', 'True', ''],
['', '', '', '', '61684613', '59', '40118', '335', '1', '12.99', 'True', ''],
['', '', '', '', '68634752', '59', '40118', '410', '1', '8.99', 'True', ''],
['', '', '', '', '60694698', '59', '40118', '620', '1', '3.99', 'True', ''],
['', '', '', '', '60664659', '59', '40118', '620', '1', '3.99', 'True', ''],
['', '', '', '', '60614487', '59', '40118', '878', '1', '5.95', 'True', ''],
['', '', '', '', '68654655', '166', '40118', '60', '1', '5.08', 'True', ''],
['', '', '', '', '69624033', '166', '40118', '80', '1', '18.19', 'True', ''],
['', '', '', '', '60604100', '166', '40118', '80', '1', '13.30', 'True', ''],
['', '', '', '', '68674560', '166', '40118', '170', '1', '5.99', 'True', '']
])
select('net.sf.RecordEditor.utils.swing.treeTable.JTreeTable_10', 'rows:[9],columns:[Xml~Namespace]')
click('Close')
## select('net.sf.RecordEditor.utils.swing.treeTable.JTreeTable_10', 'rows:[9],columns:[Xml~Namespace]')
close()
select_menu('Window>>DTAR020_tst1.bin>>Table: ')
## window_closed('Record Editor')
close()
pass
| [
"bruce_a_martin@b856f413-25aa-4700-8b60-b3441822b2ec"
] | bruce_a_martin@b856f413-25aa-4700-8b60-b3441822b2ec |
6fd2f9cac3bf22b97948b2a190ce4a65f9c488ae | 4554f8d3ab1a6267b17dad2b4d2c47b0abe8d746 | /benchmarking/lab_driver.py | 03e7770f8347f387876b15dba21e7f83f446d948 | [
"Apache-2.0"
] | permissive | jteller/FAI-PEP | 44fead3ca26f4844067d455c86ac8c5bfaf79a14 | 73b8a08815675135e9da7d68375d1218cbd04eaa | refs/heads/master | 2020-04-29T06:04:19.197966 | 2019-03-15T23:32:54 | 2019-03-15T23:32:54 | 175,904,011 | 0 | 0 | Apache-2.0 | 2019-03-15T23:30:04 | 2019-03-15T23:30:04 | null | UTF-8 | Python | false | false | 5,441 | py | #!/usr/bin/env python
##############################################################################
# Copyright 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
##############################################################################
from __future__ import absolute_import, division, print_function, unicode_literals
import argparse
import json
import os
from download_benchmarks.download_benchmarks import DownloadBenchmarks
from run_remote import RunRemote
from run_lab import RunLab
from harness import BenchmarkDriver
from repo_driver import RepoDriver as OSS_RepoDriver
from utils.custom_logger import getLogger, setLoggerLevel
parser = argparse.ArgumentParser(description="Download models from dewey")
parser.add_argument("--app_id",
help="The app id you use to upload/download your file for everstore")
parser.add_argument("-b", "--benchmark_file",
help="Specify the json file for the benchmark or a number of benchmarks")
parser.add_argument("--lab", action="store_true",
help="Indicate whether the run is lab run.")
parser.add_argument("--logger_level", default="warning",
choices=["info", "warning", "error"],
help="Specify the logger level")
parser.add_argument("--remote", action="store_true",
help="Submit the job to remote devices to run the benchmark.")
parser.add_argument("--root_model_dir", required=True,
help="The root model directory if the meta data of the model uses "
"relative directory, i.e. the location field starts with //")
parser.add_argument("--token",
help="The token you use to upload/download your file for everstore")
parser.add_argument("-c", "--custom_binary",
help="Specify the custom binary that you want to run.")
parser.add_argument("--pre_built_binary",
help="Specify the pre_built_binary to bypass the building process.")
parser.add_argument("--user_string",
help="If set, use this instead of the $USER env variable as the user string.")
class LabDriver(object):
def __init__(self, raw_args=None):
self.args, self.unknowns = parser.parse_known_args(raw_args)
setLoggerLevel(self.args.logger_level)
def run(self):
if not self.args.lab and not self.args.remote:
assert self.args.benchmark_file, \
"--benchmark_file (-b) must be specified"
if self.args.benchmark_file:
getLogger().info("Checking benchmark files to download")
dbench = DownloadBenchmarks(self.args,
getLogger())
dbench.run(self.args.benchmark_file)
if self.args.remote:
unique_args = [
"--app_id", self.args.app_id,
"--token", self.args.token,
]
if self.args.benchmark_file:
unique_args.extend([
"--benchmark_file", self.args.benchmark_file,
])
if self.args.pre_built_binary:
unique_args.extend([
"--pre_built_binary", self.args.pre_built_binary,
])
if self.args.user_string:
unique_args.extend([
"--user_string", self.args.user_string,
])
# hack to remove --repo from the argument list since python2
# argparse doesn't support allow_abbrev to be False, and it is
# the prefix of --repo_dir
if '--repo' in self.unknowns:
index = self.unknowns.index('--repo')
new_unknowns = self.unknowns[:index]
new_unknowns.extend(self.unknowns[index + 2:])
self.unknowns = new_unknowns
app_class = RunRemote
elif self.args.lab:
unique_args = [
"--app_id", self.args.app_id,
"--token", self.args.token,
]
app_class = RunLab
elif self.args.custom_binary or self.args.pre_built_binary:
if self.args.custom_binary:
binary = self.args.custom_binary
else:
binary = self.args.pre_built_binary
repo_info = {
"treatment": {
"program": binary, "commit": "-1", "commit_time": 0
}
}
unique_args = [
"--info \'", json.dumps(repo_info) + '\'',
"--benchmark_file", self.args.benchmark_file,
]
app_class = BenchmarkDriver
else:
if self.args.user_string:
usr_string = self.args.user_string
else:
usr_string = os.environ["USER"]
unique_args = [
"--benchmark_file", self.args.benchmark_file,
"--user_string", usr_string,
]
app_class = OSS_RepoDriver
raw_args = []
raw_args.extend(unique_args)
raw_args.extend(["--root_model_dir", self.args.root_model_dir])
raw_args.extend(["--logger_level", self.args.logger_level])
raw_args.extend(self.unknowns)
getLogger().info("Running {} with raw_args {}".format(app_class, raw_args))
app = app_class(raw_args=raw_args)
app.run()
if __name__ == "__main__":
raw_args = None
app = LabDriver(raw_args=raw_args)
app.run()
| [
"[email protected]"
] | |
9b1cc198ba049ed2a1e88ee56531681e0b4e438a | f4aec883b8073c4139046590d03907a751db6ab8 | /tests/snippets/pipeline/pipeline.py | 3d18442921f639077263c258ec8797f616f848ce | [] | no_license | turnhq/nucling | 1699d2a19154c4332c9836eace03ee21ae72ed41 | 56426954c6ca48e4f6d5314f9a7807dac986bce9 | refs/heads/master | 2020-03-28T06:56:30.360598 | 2019-04-10T21:10:33 | 2019-04-10T21:10:33 | 147,871,208 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,429 | py | import unittest
from nucling.snippet.pipelines import (
Pipeline, Pipeline_manager, Transform_keys_camel_case_to_snake,
Remove_nones,
)
class Pipeline_with_FUN( Pipeline ):
def FUN( p, x ):
return x + 15
class Test_pipeline( unittest.TestCase ):
def setUp( self ):
self.p = Pipeline()
self.q = Pipeline()
def test_when_the_class_dont_have_fun_should_raise_no_implemented( self ):
with self.assertRaises( NotImplementedError ):
Pipeline().process( {} )
def test_when_the_instance_is_assing_fun_should_run_the_function( self ):
result = Pipeline( fun=lambda x: x + 10 ).process( 10 )
self.assertEqual( result, 20 )
def test_when_the_pipiline_have_FUN_should_run_the_function( self ):
result = Pipeline_with_FUN().process( 40 )
self.assertEqual( result, 55 )
def test_when_combine_with_another_thing_should_return_a_manaager( self ):
result = self.p | self.q
self.assertIsInstance( result, Pipeline_manager )
def test_the_new_manager_should_contain_the_pipeline_and_the_other( self ):
result = self.p | self.q
self.assertIs( result.children[0], self.p )
self.assertIs( result.children[1], self.q )
def test_do_or_to_the_class_should_be_a_manager_with_both_class( self ):
result = Pipeline | Pipeline
self.assertIsInstance( result, Pipeline_manager )
self.assertIsInstance( result.children[0], type )
self.assertIsInstance( result.children[1], type )
class Test_camel_case( unittest.TestCase ):
def setUp( self ):
self.prev_dict = { 'HelloWorld': 'hello_world' }
self.result_dict = { 'hello_world': 'hello_world' }
def test_transform_key_to_camel_to_sanke_should_transform_the_keys( self ):
result = Transform_keys_camel_case_to_snake().process( self.prev_dict )
self.assertDictEqual( result, self.result_dict )
class Test_remove_nones( unittest.TestCase ):
def setUp( self ):
self.prev_dict = { 'nones': None, 'hello_world': 'hello_world' }
self.result_dict = { 'hello_world': 'hello_world' }
def test_remove_nones_should_no_return_a_none( self ):
result = Remove_nones().process(
{ 'day': None, 'month': None, 'year': '100' } )
result = Remove_nones().process( self.prev_dict )
self.assertDictEqual( result, self.result_dict )
| [
"[email protected]"
] | |
dd6f5f85aed5431dcc5d6f54a167b1a3ad008d74 | 07daef15cc1cfe45712811c83a771a044dd03ebf | /myvenv/Scripts/django-admin.py | 98020e79406622606dcd802efa3e5ce85fc40b4c | [] | no_license | rkdtmddnjs97/kmu_likelion_lesson_8th | c29024ccac54f623cd6cbf7ee3921ded54204eb5 | e9b2992b233c1d8a2e00f33d6716a6042ac49a19 | refs/heads/master | 2022-06-20T05:56:33.998526 | 2020-05-14T06:37:55 | 2020-05-14T06:37:55 | 263,804,501 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | #!c:\users\rkdtm\desktop\kmu_likelion8th\myvenv\scripts\python.exe
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| [
"[email protected]"
] | |
2cc6fe236c84cda705a1b8fec0493df1b53fd497 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03026/s973955145.py | a2924f6209bebcf3663a5e647cd0aaf7dd7eaa40 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | #実験用の写経
N = int(input())
g = {i: [] for i in range(N)}
for i in range(N - 1):
a, b = map(int, input().split())
g[a - 1].append(b - 1)
g[b - 1].append(a - 1)
c = list(map(int, input().split()))
c.sort()
print(sum(c[:-1]))
nums = [0] * N
stack = [0]
while stack:
d = stack.pop()
nums[d] = c.pop()
for node in g[d]:
if nums[node] == 0:
stack.append(node)
print(' '.join(map(str, nums)))
| [
"[email protected]"
] | |
3412bc422fb6235564f0b57b12435dcbc6b538bf | eb9c3dac0dca0ecd184df14b1fda62e61cc8c7d7 | /google/cloud/kms/v1/kms-v1-py/google/cloud/kms_v1/services/key_management_service/__init__.py | 728218e181e680e1d75ce85d1b9e2142874feb86 | [
"Apache-2.0"
] | permissive | Tryweirder/googleapis-gen | 2e5daf46574c3af3d448f1177eaebe809100c346 | 45d8e9377379f9d1d4e166e80415a8c1737f284d | refs/heads/master | 2023-04-05T06:30:04.726589 | 2021-04-13T23:35:20 | 2021-04-13T23:35:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 795 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .client import KeyManagementServiceClient
from .async_client import KeyManagementServiceAsyncClient
__all__ = (
'KeyManagementServiceClient',
'KeyManagementServiceAsyncClient',
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
de3e7c7a99d56bbcd47d532b61dd9abdb705c05a | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nncuthbert.py | 68a51f0d1e582e2bf577eed1be5fba6c8208bebd | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 139 | py | ii = [('PettTHE.py', 1), ('ClarGE2.py', 2), ('BuckWGM.py', 2), ('DibdTRL2.py', 1), ('WadeJEB.py', 1), ('MartHRW.py', 1), ('BrewDTO.py', 1)] | [
"[email protected]"
] | |
01aba594e0438ffdd0367eefacb37bc81bbda437 | ff91e5f5815b97317f952038e19af5208ef12d84 | /square2.py | 98eb0412527458a11fcc5211790ef83d9f7ee25a | [] | no_license | BryanPachas-lpsr/class-samples | c119c7c1280ca2a86f24230d85f6c712f18d9be8 | a194201dce28299bd522295110814c045927ef5b | refs/heads/master | 2021-01-17T07:03:42.968452 | 2016-06-12T22:35:54 | 2016-06-12T22:35:54 | 48,007,820 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 191 | py | #square.py
import turtle
#make out turtle
buzz = turtle.Turtle()
#buzz makes a square
lines = 0
while lines < 4:
buzz.forward(150)
buzz.left(90)
lines = lines + 1
turtle.exitonclick()
| [
"lps@lps-1011PX.(none)"
] | lps@lps-1011PX.(none) |
23603c3747093f0f01f514546c24ce3bad2ff880 | fe6f6d11dde2a3205ae9758c7d4eb1f824b84102 | /venv/lib/python2.7/site-packages/logilab/common/test/unittest_ureports_html.py | c849c4f82d85d7321cc94b30f3be83ecd578cec2 | [
"MIT"
] | permissive | mutaihillary/mycalculator | ebf12a5ac90cb97c268b05606c675d64e7ccf8a6 | 55685dd7c968861f18ae0701129f5af2bc682d67 | refs/heads/master | 2023-01-10T14:56:11.780045 | 2016-09-20T12:30:21 | 2016-09-20T12:30:21 | 68,580,251 | 0 | 0 | MIT | 2022-12-26T20:15:21 | 2016-09-19T07:27:48 | Python | UTF-8 | Python | false | false | 2,918 | py | # copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:[email protected]
#
# This file is part of logilab-common.
#
# logilab-common is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option) any
# later version.
#
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
'''unit tests for ureports.html_writer
'''
__revision__ = "$Id: unittest_ureports_html.py,v 1.3 2005-05-27 12:27:08 syt Exp $"
from utils import WriterTC
from logilab.common.testlib import TestCase, unittest_main
from logilab.common.ureports.html_writer import *
class HTMLWriterTC(TestCase, WriterTC):
def setUp(self):
self.writer = HTMLWriter(1)
# Section tests ###########################################################
section_base = '''<div>
<h1>Section title</h1>
<p>Section\'s description.
Blabla bla</p></div>
'''
section_nested = '''<div>\n<h1>Section title</h1>\n<p>Section\'s description.\nBlabla bla</p><div>\n<h2>Subsection</h2>\n<p>Sub section description</p></div>\n</div>\n'''
# List tests ##############################################################
list_base = '''<ul>\n<li>item1</li>\n<li>item2</li>\n<li>item3</li>\n<li>item4</li>\n</ul>\n'''
nested_list = '''<ul>
<li><p>blabla<ul>
<li>1</li>
<li>2</li>
<li>3</li>
</ul>
</p></li>
<li>an other point</li>
</ul>
'''
# Table tests #############################################################
table_base = '''<table>\n<tr class="odd">\n<td>head1</td>\n<td>head2</td>\n</tr>\n<tr class="even">\n<td>cell1</td>\n<td>cell2</td>\n</tr>\n</table>\n'''
field_table = '''<table class="field" id="mytable">\n<tr class="odd">\n<td>f1</td>\n<td>v1</td>\n</tr>\n<tr class="even">\n<td>f22</td>\n<td>v22</td>\n</tr>\n<tr class="odd">\n<td>f333</td>\n<td>v333</td>\n</tr>\n</table>\n'''
advanced_table = '''<table class="whatever" id="mytable">\n<tr class="header">\n<th>field</th>\n<th>value</th>\n</tr>\n<tr class="even">\n<td>f1</td>\n<td>v1</td>\n</tr>\n<tr class="odd">\n<td>f22</td>\n<td>v22</td>\n</tr>\n<tr class="even">\n<td>f333</td>\n<td>v333</td>\n</tr>\n<tr class="odd">\n<td> <a href="http://www.perdu.com">toi perdu ?</a></td>\n<td> </td>\n</tr>\n</table>\n'''
# VerbatimText tests ######################################################
verbatim_base = '''<pre>blablabla</pre>'''
if __name__ == '__main__':
unittest_main()
| [
"[email protected]"
] | |
f7ecb98c52d86587f015570263ac5a20bdfbe240 | 0567fcd808397a7024b5009cc290de1c414eff06 | /src/1658.minimum-operations-to-reduce-x-to-zero.py | 7f3176eb03955d6bbc0e2d39d5a8afa61e2fd290 | [] | no_license | tientheshy/leetcode-solutions | d3897035a7fd453b9f47647e95f0f92a03bff4f3 | 218a8a97e3926788bb6320dda889bd379083570a | refs/heads/master | 2023-08-23T17:06:52.538337 | 2021-10-03T01:47:50 | 2021-10-03T01:47:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,481 | py | #
# @lc app=leetcode id=1658 lang=python3
#
# [1658] Minimum Operations to Reduce X to Zero
#
# @lc code=start
# TAGS: Greedy, Sliding Window
class Solution:
# LTE. Time and Space O(N^2).
def minOperations(self, nums: List[int], x: int) -> int:
q = [(x, 0, len(nums) - 1)]
visited = {}
depth = 0
while q:
cur = []
for x, left, right in q:
if x == 0: return depth
if (left, right) in visited and visited[(left, right)] <= depth: continue
visited[(left, right)] = depth
if x < 0 or left > right:
continue
cur.append((x - nums[left], left + 1, right))
cur.append((x - nums[right], left, right - 1))
depth += 1
q = cur
return -1
# Think in reverse, instead of finding the minmum prefix + suffix, we can find the subarray with maximum length
def minOperations(self, nums: List[int], x: int) -> int:
prefix_sum = [0]
for num in nums:
prefix_sum.append(prefix_sum[-1] + num)
y = prefix_sum[-1] - x
ans = -1
visited = {}
for i, num in enumerate(prefix_sum):
if y + num not in visited:
visited[y + num] = i
if num in visited:
ans = max(ans, i - visited[num])
if ans == -1: return -1
return len(nums) - ans
# @lc code=end
| [
"[email protected]"
] | |
61b401769d07af9b8953298be222d7c0e8eef4b8 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_287/ch6_2020_03_09_20_09_01_051283.py | 43555c7f67b482084ac43022e30452bb1cd602a2 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 61 | py | def celsius_para_fahrenheit(C):
F = 9/5*C+32
return F | [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.