blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5e66744ad4708e85441db3bb050153bb35222b9a
|
8a013bce3506b5ca627de50f61b51a11ac48df05
|
/finances/fm_qiankang/IC/IC_JRQ/IC_HouseLoanCnt.py
|
a2d21a7ff1f92fecf980aa67db6808d5ae830e65
|
[] |
no_license
|
bigdata234/prefect-python
|
b8e327b5bd94db31c3366ed45ab6e67c5b0f8dbd
|
b3b69367d20a41c3fbf3105b5ede2ee173544321
|
refs/heads/master
| 2020-03-16T14:24:27.796994 | 2018-08-10T10:23:48 | 2018-08-10T10:23:48 | 132,715,618 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,118 |
py
|
# -*- coding: utf-8 -*-
# @Time : 2018/4/21 15:46
# @Author : Jiao Ruiqiang
# @Email : [email protected]
# @File : IC_HouseLoanCnt.py
# @Software: PyCharm
'''eof
name:个人购房贷款笔数
code:IC_HouseLoanCnt
tableName:
columnName:
groups:个人征信
dependencies:IC
type:常用指标
datasourceType:在线指标
description:
eof'''
import sys
import pandas as pd
import datetime
reload(sys)
sys.setdefaultencoding('utf-8')
'''
个人购房贷款笔数
从信用提示(ICRCreditCue)提取(HouseLoanCnt)字段
'''
def IC_HouseLoanCnt():
null_type_list = ['', None, 'null', '/', 'Null']
try:
if IC.has_key('ICRCreditCue') and \
IC['ICRCreditCue'] not in null_type_list and \
len(IC['ICRCreditCue']) == 1 and \
IC['ICRCreditCue'][0].has_key('HouseLoanCnt') and \
IC['ICRCreditCue'][0]['HouseLoanCnt'] not in null_type_list:
return int(IC['ICRCreditCue'][0]['HouseLoanCnt'])
else:
return u'缺失值'
except:
return u'缺失值'
result = IC_HouseLoanCnt()
|
[
"[email protected]"
] | |
20e10dee2476cb446eac70e4873787dc387fa6a6
|
c8b535407ddf3551ca16d21bd9f2c4f991028010
|
/assesment/settings.py
|
6b2f049d4a5c29ccaef367a74a85ed6c6e8fe050
|
[] |
no_license
|
manish3135/pre-assesment
|
148355708a387ba456ce6a0c0a8a5bbfd79f1018
|
0ffc96a900e58ec06862333c7ab98d5f1cdcc049
|
refs/heads/master
| 2020-03-28T01:07:59.915836 | 2018-09-05T11:12:53 | 2018-09-05T11:12:53 | 147,479,279 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,381 |
py
|
"""
Django settings for assesment project.
Generated by 'django-admin startproject' using Django 1.11.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATE_DIR = os.path.join(BASE_DIR,'templates')
STATIC_DIR = os.path.join(BASE_DIR,'static')
MEDIA_DIR = os.path.join(BASE_DIR,'media')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'dvm7ar1a($q6g^j=k$3t65p!l_^5ajq1=3)c)c#8wni4=bd1l^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'login',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'assesment.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIR,],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'assesment.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [STATIC_DIR,]
MEDIA_ROOT = MEDIA_DIR
MEDIA_URL = '/media/'
LOGIN_URL = '/login/user_login/'
|
[
"[email protected]"
] | |
e8a66deaca4a86cd89f40a33e293daf79a905ad0
|
117eeab22c41e0df7880783b7ad439ab66a6ff7d
|
/03/scripts/PDC_3.py
|
6e73f91fdd1667dcb2d30de2a8ddbb8f877db8a3
|
[] |
no_license
|
simonwicky/TCP-Labs
|
8242cedea16a3fbbc674910bde394398058e60b1
|
2e24fa80c54dc36837e33474d0a61511e370e120
|
refs/heads/master
| 2023-01-31T02:52:08.490641 | 2020-12-16T16:19:07 | 2020-12-16T16:19:07 | 296,391,973 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 960 |
py
|
import socket
import argparse
import time
parser = argparse.ArgumentParser()
parser.add_argument(
"server", help="the IPv4 address of the server or its domain name.",
)
parser.add_argument(
"port", type=int, help="server port number.",
)
results = parser.parse_args()
command = "RESET:20"
sock4 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock6 = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
sock4.settimeout(0)
sock6.settimeout(0)
while 1:
sock4.sendto(command.encode(), (results.server, results.port))
sock6.sendto(command.encode(), (results.server, results.port))
time.sleep(1)
try:
data4, addr4 = sock4.recvfrom(100)
except BlockingIOError:
print("No data from ipv4")
else:
print(data4.decode())
break
try:
data6, addr6 = sock6.recvfrom(100)
except BlockingIOError:
print("No data from ipv6")
else:
print(data6.decode())
break
|
[
"[email protected]"
] | |
f0457b814ef72bf357cd55551afddde24bb8f179
|
9cbc458ae2fa1f2be6eeb6fb4f4dfc49db464f1b
|
/financial/productgroup/migrations/0001_initial.py
|
14fd26e06c60016d2d9401b4c4f5ffac79deec65
|
[] |
no_license
|
reykennethdmolina/projectfinsys
|
45f8bd3248ad4b11c78cee6beefab040e6d58343
|
a8604b9450b890e26b8f59f6acd76d64c415ccce
|
refs/heads/master
| 2021-01-11T17:36:01.648840 | 2017-01-23T11:21:04 | 2017-01-23T11:21:04 | 79,797,274 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,745 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2017-01-17 06:07
from __future__ import unicode_literals
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Productgroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=10, unique=True)),
('description', models.CharField(max_length=250)),
('status', models.CharField(choices=[('A', 'Active'), ('I', 'Inactive'), ('C', 'Cancelled'), ('O', 'Posted'), ('P', 'Printed')], default='A', max_length=1)),
('enterdate', models.DateTimeField(auto_now_add=True)),
('modifydate', models.DateTimeField(default=datetime.datetime(2017, 1, 17, 14, 7, 34, 668000))),
('isdeleted', models.IntegerField(default=0)),
('enterby', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='productgroup_enter', to=settings.AUTH_USER_MODEL)),
('modifyby', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='productgroup_modify', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-pk'],
'db_table': 'productgroup',
'permissions': (('view_productgroup', 'Can view productgroup'),),
},
),
]
|
[
"[email protected]"
] | |
d003323768ea7f4519c90921a33b9eb198732852
|
69033ac834a34f10df535f102197d3af05e5ee69
|
/cmstack/codegen/tvmgen/tvm_translation.py
|
94d23726e2acc6b4c753f6d6f9920df4d7801b75
|
[
"Apache-2.0"
] |
permissive
|
he-actlab/cdstack
|
126c3699074bf6ef30f9f9246704069d27e9e614
|
38f605cfa299bf97b5875a19f9fd811a2671d56f
|
refs/heads/master
| 2023-04-10T10:42:10.199207 | 2019-10-03T02:12:49 | 2019-10-03T02:12:49 | 354,713,812 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 19,582 |
py
|
import tvm
import numpy as np
from hdfg import hdfgutils
from hdfg import load_store
from tvm.contrib import graph_runtime
from tvm.relay import op as _op
from hdfg.passes.flatten import flatten_graph, is_literal, is_number
from hdfg.passes.node_mapping import map_nodes
from codegen.codegen_utils import CMLANG_CAST_MAP
from tvm import relay
from tvm.relay.testing.init import Xavier
import codegen as c
from tvm.relay.testing import layers, init
from hdfg.hdfg_pb2 import Component, Program
from hdfg.visualize import *
import inspect
import json
import importlib
def benchmark_execution(mod,
params,
measure=True,
data_shape=(1, 3, 224, 224),
out_shape=(1, 1000),
dtype='float32'):
def get_tvm_output(mod, data, params, target, ctx, dtype='float32'):
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod, target, params=params)
m = graph_runtime.create(graph, lib, ctx)
# set inputs
m.set_input("data", data)
m.set_input(**params)
m.run()
out = m.get_output(0, tvm.nd.empty(out_shape, dtype))
if measure:
print("Evaluate graph runtime inference time cost...")
ftimer = m.module.time_evaluator("run", ctx, number=1, repeat=20)
# Measure in millisecond.
prof_res = np.array(ftimer().results) *1000
print("Mean inference time (std dev): %.2f ms (%.2f ms)" %
(np.mean(prof_res), np.std(prof_res)))
return out.asnumpy()
# random input
data = np.random.uniform(size=data_shape).astype(dtype)
target = "llvm"
ctx = tvm.cpu(0)
tvm_out = get_tvm_output(mod, tvm.nd.array(data.astype(dtype)), params,
target, ctx, dtype)
class TvmTranslation(object):
def __init__(self, onnx_proto, run_async=False):
self.input_proto = onnx_proto
self.output_dir, self.output_file = os.path.split(self.input_proto)
self.proto_name = self.output_file.split('.')[0]
self.program = load_store.load_program(self.input_proto)
self.graph = self.program.graph
self.templates = self.program.templates
self.components = {}
self.includes = []
self.functions = []
self.structs = []
self.signature_map = {}
self.initializer = None
self.header = []
self.exec = []
self.run_async = run_async
self.load_config()
self.tvm_passes()
self.create_tvm_graph()
self.execute_graph()
def load_config(self):
config_path = os.path.dirname(os.path.realpath(__file__)) + "/tvm_config.json"
with open(config_path, 'r') as config_file:
config_data = config_file.read()
self.translator_config = json.loads(config_data)
def execute_graph(self):
mod, params = init.create_workload(self.tvm_func)
benchmark_execution(mod, params, data_shape=(1, 3, 416, 416), out_shape=(1, 125, 14, 14))
# benchmark_execution(mod, params)
# print(f"Module: {mod}")
# #
# # for p in params.keys():
# # print(f"Key: {p}, shape: {params[p].shape}")
def tvm_passes(self):
self.tvm_graph = Component(name="tvm_graph_" + str(self.proto_name))
edge_node_ids = {'edges': {},
'nodes': {}}
self.tvm_graph.statement_graphs.extend([])
map_nodes(self.graph, self.templates, [], self.translator_config)
#
flatten_graph(self.tvm_graph, self.graph, self.templates, '', edge_node_ids, {})
flattened_graph_attr = hdfgutils.make_attribute('flattened', self.tvm_graph)
self.program.attributes['flattened_graph'].CopyFrom(flattened_graph_attr)
def create_tvm_graph(self):
self.graph_variables = {}
output_id = None
assert len(self.tvm_graph.input) == 1
input_name = self.tvm_graph.input[0]
input_shape = self.get_arg_attribute("shape", input_name)
self.graph_variables[input_name] = self.get_func('tvm.relay.var')(input_name, shape=input_shape)
for n in self.tvm_graph.sub_graph:
op_cat = hdfgutils.get_attribute_value(n.attributes['op_cat'])
if op_cat == 'mapped_node':
op_context = str(n.name).rsplit("/", 1)
if len(op_context) > 1 and op_context[0] != 'main':
scope = op_context[0] + '/'
else:
scope = ''
op_config = self.translator_config['ops'][n.op_type]
op_func = self.get_func(op_config['op_name'])
args, kwargs, output_id = self.create_op_args(n.op_type, n, self.templates[n.op_type], scope)
if len(output_id) == 1:
self.graph_variables[output_id[0]] = op_func(*args, **kwargs)
if output_id[0] in list(self.tvm_graph.edge_info):
iedge = self.tvm_graph.edge_info[output_id[0]]
if iedge.name != output_id[0]:
self.graph_variables[str(iedge.name)] = self.graph_variables[output_id[0]]
else:
temp = op_func(*args, **kwargs)
if not hasattr(temp, '__len__'):
logging.error(f"Size mismatch between output of {n.op_type} which has length 1 output"
f"Supplied config outputs: {output_id}")
exit(1)
elif len(temp) != len(output_id):
logging.error(f"Size mismatch between output of {n.op_type} which has length {len(temp)} output"
f"Supplied config outputs: {output_id}")
exit(1)
for i in range(len(temp)):
self.graph_variables[output_id[i]] = temp[i]
if output_id[i] in list(self.tvm_graph.edge_info):
iedge = self.tvm_graph.edge_info[output_id[i]]
if iedge.name != output_id[i]:
self.graph_variables[str(iedge.name)] = self.graph_variables[output_id[i]]
if not output_id:
logging.error(f"No nodes mapped for graph")
exit(1)
elif len(output_id) != 1:
logging.error(f"More than one output supplied for graph: {output_id}")
exit(1)
self.tvm_func = relay.Function(relay.analysis.free_vars(self.graph_variables[output_id[0]]), self.graph_variables[output_id[0]])
def create_op_args(self, op_name, node, node_signature, scope):
op_config = self.translator_config['ops'][op_name]
instance_args = hdfgutils.get_attribute_value(node.attributes['ordered_args'])
signature_args = hdfgutils.get_attribute_value(node_signature.attributes['ordered_args'])
default_map = self.create_default_map(self.templates[op_name])
for i in range(len(instance_args)):
instance_args[i] = scope + instance_args[i]
args = self.get_ordered_args(op_config, signature_args, instance_args, default_map, op_name, scope)
kwargs = self.get_kwargs(op_config, signature_args, instance_args,default_map, op_name, scope)
output_keys = self.get_output_keys(op_config, signature_args, instance_args, op_name, scope)
return args, kwargs, output_keys
def get_ordered_args(self, op_config, signature_args, instance_args,default_map, op, scope):
args = []
for a in op_config['positional_arguments']:
if a not in op_config['arg_map'].keys():
logging.error(f"{a} not found in argument map for op {op}. Please check config")
exit(1)
arg = op_config['arg_map'][a]['key']
if arg not in signature_args:
logging.error(f"Argument {arg} not found in signature list {signature_args} for op {op}")
exit(1)
idx = signature_args.index(arg)
if idx >= len(instance_args):
if default_map[signature_args[idx]] is None:
logging.error(f"Error! No default argument for unspecified parameter {arg} in {op}, name: {signature_args[idx]}")
exit(1)
if op_config['arg_map'][a]['init_func']:
var = self.init_var(op_config['arg_map'][a], default_map[signature_args[idx]], literal=True)
elif op_config['arg_map'][a]['type'] in CMLANG_CAST_MAP.keys():
var = default_map[signature_args[idx]]
else:
logging.error(f"Unable to resolve argument {default_map[signature_args[idx]]} for keyword {a}={signature_args[arg]}")
var = None
exit(1)
else:
instance_arg = instance_args[idx]
if instance_arg in list(self.tvm_graph.edge_info):
edge = self.tvm_graph.edge_info[instance_arg]
ename = edge.name
else:
ename = instance_arg
if ename in self.graph_variables.keys() and instance_arg not in self.graph_variables.keys():
var = self.graph_variables[ename]
elif instance_arg not in self.graph_variables.keys():
if op_config['arg_map'][a]['init_func']:
var = self.init_var(op_config['arg_map'][a], instance_arg)
if op_config['arg_map'][a]['arg_type'] != 'parameter':
self.graph_variables[instance_arg] = var
elif op_config['arg_map'][a]['type'] in CMLANG_CAST_MAP.keys():
var = CMLANG_CAST_MAP[op_config['arg_map'][a]['type']](instance_arg)
else:
logging.error(f"Unable to resolve argument {instance_arg} for keyword {a}={signature_args[arg]}")
var = None
exit(1)
else:
var = self.graph_variables[instance_arg]
args.append(var)
return args
def get_kwargs(self, op_config, signature_args, instance_args,default_map, op, scope):
kwargs = {}
for k in op_config['keyword_arguments'].keys():
if op_config['keyword_arguments'][k] not in op_config['arg_map'].keys():
logging.error(f"Key id {k} with value {op_config['keyword_arguments'][k]} not found in argument map for op {op}."
f" Please check config")
exit(1)
id = op_config['keyword_arguments'][k]
arg = op_config['arg_map'][id]['key']
if arg not in signature_args:
logging.error(f"Argument {arg} not found in signature list {signature_args} for op {op}")
exit(1)
idx = signature_args.index(arg)
if idx >= len(instance_args):
if default_map[signature_args[idx]] is None:
logging.error(f"Error! No default argument for unspecified parameter {arg} in {op}, name: {signature_args[idx]}")
exit(1)
if op_config['arg_map'][id]['init_func']:
var = self.init_var(op_config['arg_map'][id], default_map[signature_args[idx]], literal=True)
elif op_config['arg_map'][id]['type'] in CMLANG_CAST_MAP.keys():
var = default_map[signature_args[idx]]
else:
logging.error(f"Unable to resolve argument {default_map[signature_args[idx]]} for keyword {id}={signature_args[arg]}")
var = None
exit(1)
else:
instance_arg = instance_args[idx]
if instance_arg in list(self.tvm_graph.edge_info):
edge = self.tvm_graph.edge_info[instance_arg]
ename = edge.name
else:
ename = instance_arg
if ename in self.graph_variables.keys() and instance_arg not in self.graph_variables.keys():
var = self.graph_variables[ename]
elif instance_arg not in self.graph_variables.keys():
if op_config['arg_map'][id]['init_func']:
var = self.init_var(op_config['arg_map'][id], instance_arg)
if op_config['arg_map'][id]['arg_type'] != 'parameter':
self.graph_variables[instance_arg] = var
elif op_config['arg_map'][id]['type'] in CMLANG_CAST_MAP.keys():
var = CMLANG_CAST_MAP[op_config['arg_map'][id]['type']](instance_arg)
else:
logging.error(f"Unable to resolve argument {instance_arg} for keyword {id}={signature_args[arg]}")
exit(1)
else:
var = self.graph_variables[instance_arg]
kwargs[k] = var
return kwargs
def get_output_keys(self, op_config, signature_args, instance_args, op, scope):
output_keys = []
for o in op_config['op_output']:
if o not in op_config['arg_map'].keys():
logging.error(f"Key id {o} with value {op_config['keyword_arguments'][o]} not found in argument map for op {op}."
f" Please check config")
exit(1)
arg = op_config['arg_map'][o]['key']
if arg not in signature_args:
logging.error(f"Argument {arg} not found in signature list {signature_args} for op {op}")
exit(1)
idx = signature_args.index(arg)
if idx >= len(instance_args):
logging.error(f"Error! Cannot assign output {o} to unspecified parameter {signature_args[idx]}")
exit(1)
output_keys.append(instance_args[idx])
return output_keys
def create_default_map(self, template):
default_map = {}
ordered_args = hdfgutils.get_attribute_value(template.attributes['ordered_args'])
for a in ordered_args:
if a not in list(template.edge_info):
logging.error(f"Argument {a} not found in edges for {template.op_type}")
edge = template.edge_info[a]
if 'default' in list(edge.attributes):
dtype = hdfgutils.get_attribute_value(edge.attributes['type'])
default_map[a] = CMLANG_CAST_MAP[dtype](hdfgutils.get_attribute_value(edge.attributes['default']))
else:
default_map[a] = None
return default_map
def init_var(self, var, instance_name, literal=False):
args = []
kwargs = {}
arg_type = var['arg_type']
if isinstance(instance_name, str):
id = instance_name.rsplit('/', 1)
if len(id) > 1:
id = id[-1]
else:
id = id[0]
else:
id = str(instance_name).rsplit('/', 1)
if len(id) > 1:
id = id[-1]
else:
id = id[0]
if arg_type == 'parameter' and not literal and not is_literal(id):
if instance_name not in list(self.tvm_graph.edge_info):
logging.error(f"Unable to get value for parameter {instance_name}")
exit(1)
edge = self.tvm_graph.edge_info[instance_name]
if 'value' not in list(edge.attributes):
logging.error(f"Could not find literal for parameter argument {instance_name}.\n"
f"Possible attributes: {list(edge.attributes)}")
exit(1)
value = hdfgutils.get_attribute_value(edge.attributes['value'])
elif is_literal(id) and isinstance(instance_name, str):
if id in list(self.tvm_graph.edge_info):
edge = self.tvm_graph.edge_info[id]
value = hdfgutils.get_attribute_value(edge.attributes['value'])
elif instance_name in list(self.tvm_graph.edge_info):
edge = self.tvm_graph.edge_info[instance_name]
value = hdfgutils.get_attribute_value(edge.attributes['value'])
else:
logging.error(f"Could not find literal for parameter argument {instance_name} with id {id}.\n"
f"var: {var['key']}")
exit(1)
else:
value = instance_name
for a in var['init_func_args']:
arg_result = self.get_arg_attribute(a, value, literal=literal)
args.append(arg_result)
for k in var['init_func_kw'].keys():
kwargs[k] = self.get_arg_attribute(var['init_func_kw'][k], value, literal=literal)
if len(kwargs.keys()) == 0:
var = self.get_func(var['init_func'])(*args)
else:
var = self.get_func(var['init_func'])(*args, **kwargs)
return var
def get_arg_attribute(self, key, instance_name, literal=False):
if isinstance(key, list):
arg = []
for k in key:
arg.append(self.get_arg_attribute(k, instance_name, literal=literal))
return arg
elif key == 'name':
return instance_name
elif key == 'shape':
if literal:
logging.error(f"Cannot get shape for literal value {instance_name} as attribute")
exit(1)
edge = self.tvm_graph.edge_info[instance_name]
if 'dimensions' not in list(edge.attributes):
logging.error(f"No dimensions for edge {instance_name}")
tuple_dims = ()
else:
dimensions = hdfgutils.get_attribute_value(edge.attributes['dimensions'])
tuple_dims = tuple(int(d) if is_number(d) else d for d in dimensions)
return tuple_dims
elif key == 'type':
if literal:
return type(instance_name).__name__
edge = self.tvm_graph.edge_info[instance_name]
if 'type' not in list(edge.attributes):
logging.error(f"No type for edge {instance_name}")
dtype = 'float32'
else:
dtype = hdfgutils.get_attribute_value(edge.attributes['type'])
return dtype
elif instance_name in self.graph_variables.keys():
return self.graph_variables[instance_name]
else:
logging.error(f"Could not create attribute for {instance_name} with key {key}.")
exit(1)
def get_args(self, names, vars):
args = []
for n in names:
if n not in vars.keys():
logging.error(f"Operation argument {n} not in created variables: {vars.keys()}")
else:
args.append(vars[n])
return args
def arg_conversion(self, instance_arg, target_arg):
if isinstance(target_arg, tuple):
result = tuple(instance_arg for _ in range(len(target_arg)))
return result
else:
return instance_arg
def get_func(self, function_name):
mod_id, func_id = function_name.rsplit('.', 1)
mod = importlib.import_module(mod_id)
func = getattr(mod, func_id)
return func
|
[
"[email protected]"
] | |
ec662f925b59e24fde024e4243aba389f33e0432
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/pa3/benchmarks/sieve-14.py
|
52bce3a36228b57f2739edf857ed492498c0ab0c
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,587 |
py
|
# A resizable list of integers
class Vector(object):
items: [$ID] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
# Data
v:Vector = None
i:int = 0
# Crunch
v = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
|
[
"[email protected]"
] | |
baf708e314909392f9b21a7438729f05ede2a3fe
|
bf3d5dd23f46091aa7b76eef97b394a9664193b8
|
/FaceD.py
|
fb97ccc0007f19193b7ab154730788cdf4cb2610
|
[] |
no_license
|
DEV-AB-03/FaceRecognition-using-SVM-and-OpenCV
|
d717a1e939a2c8f48f7face4b5d6ec74ce6c166e
|
1b46aebca36831d4eedfd06fc33334accd3e0cf8
|
refs/heads/master
| 2023-07-25T06:20:34.000461 | 2021-09-06T08:45:34 | 2021-09-06T08:45:34 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 899 |
py
|
import cv2
import numpy as np
import os
cam = cv2.VideoCapture(0)
cl = cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
name = input("Enter your Name: ")
frames = []
outputs = []
while True:
ret, frame = cam.read()
if ret:
faces = cl.detectMultiScale(frame)
for x,y,w,h in faces:
cut = frame[y:y+h,x:x+h]
fix = cv2.resize(cut,(100,100))
gray = cv2.cvtColor(fix, cv2.COLOR_BGR2GRAY)
cv2.imshow("Cut", gray)
if cv2.waitKey(1) == ord("c"):
frames.append(gray.flatten())
outputs.append([name])
if cv2.waitKey(1) == ord("q"):
break
X = np.array(frames)
y = np.array(outputs)
data = np.hstack([y, X])
f_name = "face_data.npy"
if os.path.exists(f_name):
old = np.load(f_name)
data = np.vstack([old, data])
np.save(f_name, data)
cam.release()
cv2.destroyAllWindows()
|
[
"[email protected]"
] | |
334020818b42ba8d14977db789dc6055115ea242
|
9bb6883e170e822a011096a993c8efa9e6af290a
|
/venv/bin/django-admin
|
8a9d97684661f3e6e6f0043d19524a2d89192669
|
[] |
no_license
|
alexandersilvera/Centro
|
ab4182d8334e16c97a0e91d4230f298b9135cd9f
|
1562e210d0788cca8f13c02b504523355667a3fa
|
refs/heads/master
| 2023-04-29T09:06:35.033052 | 2020-07-26T23:00:05 | 2020-07-26T23:00:05 | 224,926,832 | 0 | 0 | null | 2023-04-21T20:41:40 | 2019-11-29T21:54:44 |
Python
|
UTF-8
|
Python
| false | false | 304 |
#!/home/alexander/PycharmProjects/Centro/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
|
[
"[email protected]"
] | ||
ba1688de52c9aed52049beab16fcbf7d463add7d
|
a04296ba7b09f3a7b7540a14e8ef6fcf683ed392
|
/common/mergelist.py
|
dac26a7dac26dd95d77cfed036796fb50f267e7f
|
[
"MIT"
] |
permissive
|
Hasi-liu/FXTest
|
0a3acf9d27d9f784f378fc9f9c13deb9e678adbe
|
150012f87021b6b8204fd342c62538c10d8dfa85
|
refs/heads/master
| 2023-05-11T00:27:57.224448 | 2020-05-10T02:58:11 | 2020-05-10T02:58:11 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 551 |
py
|
"""
@author: lileilei
@file: mergelist.py
@time: 2018/1/29 13:55
"""
"""
list的合并
"""
def hebinglist(list1:list):
new=[]
for m in list1:
for h in m:
new.append(h)
return new
def listmax(list2:list):
list_int=[]
for i in list2:
try:
list_int.append(float(i))
except:
list_int.append(0)
nsm=0
for j in range(len(list_int)):
nsm+=float(list_int[j])
ma=max(list_int)
minx=min(list_int)
pingjun=nsm/(len(list_int))
return ma,minx,pingjun
|
[
"[email protected]"
] | |
1a0c04adcfd792bf56fda3703659c4610f36e3cf
|
e440f1e8136f79f59c472ecf450d6676395dbb05
|
/modules/tasks/led_task.py
|
d8a43e77d5acc43b68746fbc8291a3f336ac8b72
|
[] |
no_license
|
henrynester/FlightComputer-sw
|
5e56a3a25d8986b625229254f8ea45ed9ca204d3
|
c9254e340e53022bfd9ebdaf783900124a17ebc0
|
refs/heads/master
| 2023-07-02T20:45:57.408041 | 2021-08-12T14:05:38 | 2021-08-12T14:05:38 | 376,691,813 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 934 |
py
|
from modules.tasks.task import Task
from modules.mcl.system_state import SystemState, State
# import threading
from modules.drivers.led import LEDDriver, LEDColor
# class KeyboardThread(threading.Thread):
# def __init__(self, input_cbk=None, name='keyboard-input-thread'):
# self.input_cbk = input_cbk
# super(KeyboardThread, self).__init__(name=name)
# self.start()
# def run(self):
# while True:
# try:
# self.input_cbk(input()) # waits to get input + Return
# except (EOFError):
# return
class LEDTask(Task):
def __init__(self, state: SystemState):
# self.next_phase: Phase = None
self.driver = LEDDriver()
super().__init__('LED', state)
def actuate(self):
# print(self.state.phase)
self.driver.color = LEDColor.RED
def deinitialize(self):
self.driver.deinitialize()
|
[
"[email protected]"
] | |
e2a354f7de78bb119094313ee9b25118e374ca6c
|
ba2d449486c58578581b8de7b2b6f21074be6274
|
/02 Linked Lists/2-8-Loop-Detection.py
|
6af5ea7f052ea96436e98812922ad1180e7fa7bb
|
[] |
no_license
|
theoliao1998/Cracking-the-Coding-Interview
|
4e0abef8659a0abf33e09ee78ce2f445f8b5d591
|
814b9163f68795238d17aad5b91327fbceadf49e
|
refs/heads/master
| 2020-12-09T12:46:10.845579 | 2020-07-25T05:39:19 | 2020-07-25T05:39:19 | 233,306,927 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,334 |
py
|
# Loop Detection: Given a circular linked list, implement an algorithm that returns the node at the
# beginning of the loop.
# DEFINITION
# Circular linked list: A (corrupt) linked list in which a node's next pointer points to an earlier node, so
# as to make a loop in the linked list.
# EXAMPLE
# Input: A -> B -> C - > D -> E -> C [the same C as earlier]
# Output: C
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
def append(self, x):
n = self
while n.next:
n = n.next
n.next = ListNode(x)
def loopDectection(n):
def getLoopLength(n):
slow, fast = n, n.next
length = 0
while fast:
length += 1
if slow == fast:
return length
slow = slow.next
if not fast.next:
return 0
fast = fast.next.next
l = getLoopLength(n)
if not l:
return None
slow = n
fast = n
for _ in range(l):
fast = fast.next
while slow != fast:
slow, fast = slow.next, fast.next
return slow
# A = ListNode(1)
# B = ListNode(2)
# C = ListNode(3)
# D = ListNode(4)
# E = ListNode(5)
# A.next = B
# B.next = C
# C.next = D
# D.next = E
# E.next = C
# print(loopDectection(A).val)
|
[
"[email protected]"
] | |
238e5c216e8f2186525a25554986f4209c1d13f8
|
f1aafebdef5195b3654d9df533de8b28278ce087
|
/Metodos-de-encadenamiento.py
|
6f7a31787b813b3cdb473aafe33515844ebea57e
|
[] |
no_license
|
clauchile/poo-python
|
57347684eb6d34868ba01d1c4de3f06fb605bd31
|
57e42ba5fe584799cd59b57e707b58f7a4b4c8b8
|
refs/heads/master
| 2023-06-05T12:34:33.316931 | 2021-06-30T03:32:28 | 2021-06-30T03:32:28 | 377,326,567 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,215 |
py
|
class Usuario:
def __init__(self,name,apellido):
self.name = name
self.apellido = apellido
self.account_balance = 0
def __str__(self):
return f"El nombre del Usario es: {self.name}"
def make_deposit(self, amount):
self.account_balance += amount
return self
def make_withdrawal(self, amount):
self.account_balance-=amount
return self
def display_user_balance(self):
print(f'Usuario: {self.name} {self.apellido}, Saldo: {self.account_balance}')
return self
# def transfer_money(self, other_usuario, amount):
# other_usuario.make_deposit = other_usuario.account_balance +=amount
# instancia1
UsuarioOne = Usuario("Claudio","Bravo")
UsuarioOne.make_deposit(500).make_deposit(5000).make_deposit(3300).make_withdrawal(2000).display_user_balance()
#Instancia2
UsuarioTwo= Usuario("Charles","Aranguiz")
UsuarioTwo.make_deposit(5900).make_deposit(5000).make_withdrawal(2000).make_withdrawal(2780).display_user_balance()
#Instancia3
UsuarioThree= Usuario("Eugenio","Mena")
UsuarioThree.make_deposit(15200).make_deposit(2000).make_withdrawal(1780).make_withdrawal(2780).display_user_balance()
|
[
"[email protected]"
] | |
c9e8030c514263ec296fa2e2a9458e304eb83d2d
|
66b3bef39a9e9c4b3b1cfedb456ab40060a29bcf
|
/crawler.py
|
939cf26873791fcb8819dc2bfb3e64723e7b3f67
|
[] |
no_license
|
MiniHanWang/NASDAQ-crawler
|
e9f45e85c3c793c12d65a6989bf8f8d3f0062388
|
79072e8db46d94015caf5020ee45f1918bfc1e88
|
refs/heads/main
| 2023-03-30T04:13:42.757793 | 2021-03-08T09:16:02 | 2021-03-08T09:16:02 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,396 |
py
|
#@Time:3/8/20212:19 PM
#@Author: Mini(Wang Han)
#@Site:
#@File:crawler.py
'''
目标:获取https://www.cnyes.com/usastock/stocks/NASDAQ.html
NASDAQ 使用當日開盤價; High: 當日最高價; Low: 當日最低價; Close: 當日收盤價; 成交量(Volume), 調整後股價(Adjust);
'''
import urllib.request
from bs4 import BeautifulSoup
import pymysql.cursors
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.common.proxy import ProxyType
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import time
for m in range(1,208):
url3 = 'https://app.quotemedia.com/quotetools/clientForward?targetURL=https%3A%2F%2Fstage.cnyes.com%2Fusastock%2FHistoryQM.aspx%3Fcode%3DCOMP&targetsym=&targettype=&targetex=&qmpage=true&action=showHistory&symbol=COMP&page=' + str(
m) + '&startDay=1&startMonth=0&startYear=2001&endDay=31&endMonth=11&endYear=2020&perPage=25 '
bs = webdriver.Chrome(r"G:\projects\Po\crawler\chrome\chromedriver.exe")
bs.get(url3)
bs.maximize_window()
for i in range(0, 1):
bs.execute_script(
"window.scrollTo(0, document.body.scrollHeight); var lenOfPage=document.body.scrollHeight; return lenOfPage;")
time.sleep(1)
bs.execute_script(
"window.scrollTo(0, document.body.scrollHeight); var lenOfPage=document.body.scrollHeight; return lenOfPage;")
time.sleep(1)
bs.execute_script(
"window.scrollTo(0, document.body.scrollHeight); var lenOfPage=document.body.scrollHeight; return lenOfPage;")
time.sleep(1)
bs.execute_script(
"window.scrollTo(0, document.body.scrollHeight); var lenOfPage=document.body.scrollHeight; return lenOfPage;")
time.sleep(1)
bs.execute_script(
"window.scrollTo(0, document.body.scrollHeight); var lenOfPage=document.body.scrollHeight; return lenOfPage;")
time.sleep(1)
print("finish scroll to the end ", str(i), "times")
# html3 = urllib.request.urlopen(url3).read().decode('utf-8')
html3 = bs.page_source
soup3 = BeautifulSoup(html3, 'lxml')
# 获取数据
result32 = soup3.find_all(attrs={"class": "qm_history_historyContent"})
result32 = str(result32)
soup32 = BeautifulSoup(result32, 'lxml')
result33 = soup32.find_all(attrs={"class": "qm_maintext"})
item = {}
item['date'] = []
item['open'] = []
item['high'] = []
item['low'] = []
item['close'] = []
item['volume'] = []
item['adjust'] = []
for j in range(1, 26):
i = j * 11
item['date'].append(result33[i].string.replace(" ", ""))
item['open'].append(result33[i + 1].string.replace(" ", ""))
item['high'].append(result33[i + 2].string.replace(" ", ""))
item['low'].append(result33[i + 3].string.replace(" ", ""))
item['close'].append(result33[i + 4].string.replace(" ", ""))
item['volume'].append(result33[i + 10].string.replace(" ", ""))
item['adjust'].append(result33[i + 8].string.replace(" ", ""))
print(item)
'''
数据库操作
'''
for i in range(0, len(item["date"])):
date = item['date'][i]
open = item['open'][i]
high = item['high'][i]
low = item['low'][i]
close = item['close'][i]
volume = item['volume'][i]
adjust = item['adjust'][i]
try:
# 获取数据库链接
connection = pymysql.connect(host="DESKTOP-D1AIO16", port=3307, user="mini", passwd="wangmianny111",
db="stock_manager", charset='utf8')
# 获取会话指针
with connection.cursor() as cursor:
# 创建sql语句
sql = "insert into nasdaq (date,open,high,low,close,volume,adjust) values (%s,%s,%s,%s,%s,%s,%s)"
# 执行sql语句
cursor.execute(sql, (
date, open, high, low, close, volume, adjust))
# 提交数据库
connection.commit()
finally:
connection.close()
print("sucess with page",m)
|
[
"[email protected]"
] | |
9337e099bf9ff81091912bb90e98f59afe773fe5
|
d7ca36f20465870e67e7d6832f8e1b8348af12fc
|
/test/test_linear.py
|
ca9b35b8ac8886a9b4d3721c6f3eb6f8eb94d575
|
[] |
no_license
|
hlcr/LanguageNetworkAnalysis
|
c109e670534367c782fb71697a92a3ca95aba098
|
65f6c8086f3e4282b15359cc99cf57a682e6b814
|
refs/heads/master
| 2020-04-24T07:40:04.100213 | 2020-04-17T09:02:05 | 2020-04-17T09:02:05 | 171,805,371 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 546 |
py
|
from sklearn import linear_model
import numpy as np
import matplotlib.pyplot as plt
# 线性回归
clf = linear_model.LinearRegression()
# 训练
np.array([1, 2, 3])
clf.fit(np.array([2, 4, 6]).reshape(-1,1), np.array([0, 2, 4]).reshape(-1,1))
# 表达式参数
a, b = clf.coef_, clf.intercept_
print(a)
print(b)
# # 画图
# # 1.真实的点
# plt.scatter(df['square_feet'], df['price'], color='blue')
#
# # 2.拟合的直线
# plt.plot(df['square_feet'], regr.predict(df['square_feet'].reshape(-1,1)), color='red', linewidth=4)
#
# plt.show()
|
[
"[email protected]"
] | |
eec679bc8e8a903c116e1c4a9cc0fcfed3bde0af
|
f38c30595023f4f272121576b9e62ed2adbed7de
|
/contact_list.py
|
8c047db9d0fd3e5d8d18d73a7614d2fe2b25233d
|
[] |
no_license
|
devArist/contact_app_project
|
f1f19ed2cb4a9261575e5f182e4dcb28ba44e082
|
81d1d639d2e7a362490397d334345ce24a154789
|
refs/heads/main
| 2023-07-12T22:49:19.816760 | 2021-08-30T15:56:44 | 2021-08-30T15:56:44 | 401,285,071 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 161 |
py
|
from kivymd.uix.boxlayout import MDBoxLayout
from kivy.lang import Builder
Builder.load_file('contact_list.kv')
class ContactBoxLayout(MDBoxLayout):
pass
|
[
"[email protected]"
] | |
967b9b7a488621c301d0a13a06ac329ee1c59a89
|
3de732d6807286a0b9175b21f7c468482d5f656a
|
/nmr_analysis/nmran/dataio.py
|
112f13cad9ac6debea7d3cd2beeede6f5db1da08
|
[] |
no_license
|
serjinio/python_tutors
|
bfc94c22ddf4f2bb93d94a4f28c0287dafe1f9eb
|
ee3919d24c2e7073c35491a1dfb00edea20af4be
|
refs/heads/master
| 2021-01-17T10:32:31.194560 | 2016-04-20T12:36:56 | 2016-04-20T12:36:56 | 24,996,954 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,096 |
py
|
"""
Module with read & write procedures for datasets.
"""
import os
import pandas as pd
import numpy as np
import logging
import common
KNOWN_FORMATS = ['.smd', '.dat']
def loaddir(path):
"""Loads all known file formats in the specfied directory. """
files = listdir(path)
dataobjs = {}
logging.debug('loading all data files from "{}"'
.format(path))
for f in files:
dataobjs[os.path.basename(f)] = load(f)
return dataobjs
def listdir(path):
files = [f for f in os.listdir(path) if
os.path.isfile(os.path.join(path, f))]
known_files = []
for f in files:
for ext in KNOWN_FORMATS:
if f.endswith(ext):
known_files.append(os.path.join(path, f))
continue
return known_files
def load(filepath):
"""General purpose load procedure for datasets of different formats."""
if not any(filepath.endswith(fmt) for fmt in KNOWN_FORMATS):
raise ValueError(('Input file seems to be of unknown format. '
'supported formats are: {}'.format(KNOWN_FORMATS)))
if filepath.endswith(KNOWN_FORMATS[0]):
return load_smd(filepath)
elif filepath.endswith(KNOWN_FORMATS[1]):
return load_dat(filepath)
def load_smd(filepath):
logging.debug('reading data file: "{}"'.format(filepath))
if not os.path.exists(filepath):
raise ValueError('Given file path: "{}" does not exists.'
.format(filepath))
df = pd.read_csv(filepath, sep='\s+', header=4)
# header params
with file(filepath, 'rb') as input_file:
npoints, dwell, sf1 = [input_file.readline()
for i in range(0, 3)]
npoints, dwell, sf1 = int(npoints[6:]), float(dwell[3:]), float(sf1[4:])
logging.debug('header data: npoints={}; dwell={}, sf1={}'
.format(npoints, dwell, sf1))
# name
df.columns = ['Re', 'Im']
# put data into complex values
df['fid'] = df['Re'] + df['Im'] * 1j
del df['Re']
del df['Im']
# make up time column (us)
df['t'] = [dwell * n for n in range(len(df))]
# index by time
df = df.set_index('t')
logging.debug('read dataframe of length: {}'.format(len(df)))
return df
def load_dat(filepath):
logging.debug('reading data file: "{}"'.format(filepath))
if not os.path.exists(filepath):
raise ValueError('Given file path: "{}" does not exists.'
.format(filepath))
df = pd.read_csv(filepath, sep='\s+', header=None, skipfooter=5)
# remove unnecessary columns
del df[5]
del df[4]
del df[3]
# name
df.columns = ['t', 'Re', 'Im']
# time in us
df['t'] = df['t'] * 1e6
# put data into complex values
df['fid'] = df['Re'] + df['Im'] * 1j
del df['Re']
del df['Im']
# index by time
df = df.set_index('t')
logging.debug('read dataframe of length: {}'.format(len(df)))
return df
if __name__ == '__main__':
common.configure_logging()
|
[
"[email protected]"
] | |
957ff1653c85cfecd70d02ebd89f8eacdd44670d
|
edadb81a126c0e683143ccd3e672978486077670
|
/tf_eager/models/alexnet.py
|
065a1a9e328cc3ed86236b3a0c45698c6de9e265
|
[
"MIT"
] |
permissive
|
SimmonsZhang/imagenet
|
8ea3c662a667a2b3bea57340d527624ea0e2b94b
|
3aba6440fc59296676b56fe25e5b8cb79b4af61e
|
refs/heads/master
| 2020-08-25T01:00:56.557792 | 2019-10-24T11:58:00 | 2019-10-24T11:58:00 | 216,938,512 | 0 | 0 |
MIT
| 2019-10-23T01:04:27 | 2019-10-23T01:04:26 | null |
UTF-8
|
Python
| false | false | 2,660 |
py
|
"""
Written by Matteo Dunnhofer - 2017
Definition of AlexNet architecture
"""
import tensorflow as tf
import tensorflow.contrib.eager as tfe
class AlexNet(tfe.Network):
def __init__(self, cfg, training):
super(AlexNet, self).__init__()
self.cfg = cfg
self.training = training
# convolutional layers
conv_init = tf.contrib.layers.xavier_initializer_conv2d()
self.conv1 = self.track_layer(tf.layers.Conv2D(96, 11, 4, 'SAME',
activation=tf.nn.relu,
kernel_initializer=conv_init))
self.pool1 = self.track_layer(tf.layers.MaxPooling2D(3, 2, 'VALID'))
self.conv2 = self.track_layer(tf.layers.Conv2D(256, 5, 1, 'SAME',
activation=tf.nn.relu,
kernel_initializer=conv_init))
self.pool2 = self.track_layer(tf.layers.MaxPooling2D(3, 2, 'VALID'))
self.conv3 = self.track_layer(tf.layers.Conv2D(384, 3, 1, 'SAME',
activation=tf.nn.relu,
kernel_initializer=conv_init))
self.conv4 = self.track_layer(tf.layers.Conv2D(384, 3, 1, 'SAME',
activation=tf.nn.relu,
kernel_initializer=conv_init))
self.conv5 = self.track_layer(tf.layers.Conv2D(256, 3, 1, 'SAME',
activation=tf.nn.relu,
kernel_initializer=conv_init))
self.pool5 = self.track_layer(tf.layers.MaxPooling2D(3, 2, 'VALID'))
# fully connected layers
fc_init = tf.contrib.layers.xavier_initializer()
self.fc1 = self.track_layer(tf.layers.Dense(4096,
activation=tf.nn.relu,
kernel_initializer=fc_init))
self.drop1 = self.track_layer(tf.layers.Dropout(self.cfg.DROPOUT))
self.fc2 = self.track_layer(tf.layers.Dense(4096,
activation=tf.nn.relu,
kernel_initializer=fc_init))
self.drop2 = self.track_layer(tf.layers.Dropout(self.cfg.DROPOUT))
self.out = self.track_layer(tf.layers.Dense(self.cfg.NUM_CLASSES,
kernel_initializer=fc_init))
def call(self, x):
""" Function that executes the model """
output = self.conv1(x)
output = tf.nn.lrn(output, depth_radius=2, bias=1.0, alpha=2e-05, beta=0.75)
output = self.pool1(output)
output = self.conv2(output)
output = tf.nn.lrn(output, depth_radius=2, bias=1.0, alpha=2e-05, beta=0.75)
output = self.pool2(output)
output = self.conv3(output)
output = self.conv4(output)
output = self.conv5(output)
output = self.pool5(output)
output = tf.layers.flatten(output)
output = self.fc1(output)
if self.training:
output = self.drop1(output)
output = self.fc2(output)
if self.training:
output = self.drop2(output)
output = self.out(output)
return output
|
[
"[email protected]"
] | |
4dba97d9673dcdda5542db94d65b9043335784e1
|
e7ad0a29b006bae9bd39818631b1ceb27e617783
|
/contrib/bitrpc/bitrpc.py
|
103bc5694bcaf577208d8ded734bb626c6bdfacc
|
[
"MIT"
] |
permissive
|
underline-project/underline-master
|
5526d69b6481b1426237df020ff002d013cf9395
|
922cdcd36f3bb41bf645cbba5a393d5bd6e10ac4
|
refs/heads/master
| 2021-04-03T05:07:13.771086 | 2018-03-11T16:40:58 | 2018-03-11T16:40:58 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,840 |
py
|
from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:2332")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:2332")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Underline address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Underline address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
|
[
"[email protected]"
] | |
f8fd819578cdf0c4ba25044707bcb670940df771
|
954c28ccc46aa1f50dd67e35a468321a8592bcf6
|
/client/verta/verta/deployment/__init__.py
|
21d01d9d293f8b5ad1c9326a7a2f59074b0cefa9
|
[
"Apache-2.0"
] |
permissive
|
takeo91/modeldb
|
b697604da5d396628c1da5b32905bc09fd97f8e6
|
696e9d62157a5dfbd5db28d149b135d34479d154
|
refs/heads/master
| 2022-11-23T14:56:25.048959 | 2020-07-24T23:38:56 | 2020-07-24T23:38:56 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 131 |
py
|
# -*- coding: utf-8 -*-
from .deployedmodel import (
DeployedModel,
prediction_input_unpack,
prediction_io_cleanup,
)
|
[
"[email protected]"
] | |
3fac1ab5ba74db726d6c882728732192f1436cb1
|
d4ae54e561ae64285366279c420d86684e04aa37
|
/setup.py
|
f671c6f47778ed291797d818bab3c340eb794abc
|
[] |
no_license
|
GrayDevs/Python4Security
|
f4886e022e07339cedcd0a5c86eefb5db6e25993
|
f118f1547e16ef285e5af770d51d97356bcb19d3
|
refs/heads/master
| 2022-11-06T02:27:12.902588 | 2020-06-21T08:50:05 | 2020-06-21T08:50:05 | 263,555,914 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 518 |
py
|
from setuptools import setup, find_packages
setup(
name='python4Security',
version='2.1.11',
description='Searches through git repositories for high entropy strings, digging deep into commit history.',
url='https://github.com/GrayDevs/Python4Security',
authors='Zakaria DJAZARI, Antoine PINON',
license='GNU',
packages=['python4Security'],
install_requires=[
#
],
entry_points = {
'console_scripts': ['python4Security = Python4Security.python4Security:main'],
},
)
|
[
"[email protected]"
] | |
b26c45b8a0ae7b082b96599a14f020f230ee3eca
|
8b16bd61c79113ff575def261e12f0e2125e4d90
|
/browser/migrations/0004_search_history.py
|
44305eecb73f1f97b11f14b0373386db7ecccd80
|
[] |
no_license
|
alvarantson/veebikaabits2.0
|
88f99e2fff8d0ef76daec3d3d3f4d6e19ed6d274
|
412d7d2fdc35582ba7210ea6108087a0d5ac9d7e
|
refs/heads/master
| 2020-12-02T10:59:05.173248 | 2020-01-03T10:00:36 | 2020-01-03T10:00:36 | 230,990,687 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 999 |
py
|
# Generated by Django 2.1.3 on 2018-11-28 11:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('browser', '0003_blacklist'),
]
operations = [
migrations.CreateModel(
name='search_history',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('search_item', models.CharField(max_length=999)),
('time_elapsed', models.CharField(max_length=999)),
('search_datetime', models.CharField(max_length=999)),
('items_total', models.CharField(max_length=999)),
('items_okidoki', models.CharField(max_length=999)),
('items_osta', models.CharField(max_length=999)),
('items_soov', models.CharField(max_length=999)),
('items_kuldnebors', models.CharField(max_length=999)),
],
),
]
|
[
"[email protected]"
] | |
55be3d9790defafee827f8d11c0671d535c13396
|
302f142abcb9cd8cc9a6f87cda1a3580cce8ad21
|
/.env/lib/python3.6/site-packages/pygments/lexers/unicon.py
|
c27b7fe0cf8f2b46b226365542c31220719cf12e
|
[
"Apache-2.0"
] |
permissive
|
Najafova/hls4ml_test
|
7229a85d5759eac4b177a47f9af7c6b4899dd006
|
21f9951b3f0ca88cc2312a8863977c1477b9233e
|
refs/heads/master
| 2023-01-19T15:11:18.861261 | 2020-11-22T17:17:51 | 2020-11-22T17:17:51 | 289,086,269 | 0 | 0 |
Apache-2.0
| 2020-11-22T17:17:52 | 2020-08-20T18:59:09 |
Python
|
UTF-8
|
Python
| false | false | 17,817 |
py
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.unicon
~~~~~~~~~~~~~~~~~~~~~~
Lexers for the Icon and Unicon languages, including ucode VM.
:copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, words, using, this
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['IconLexer', 'UcodeLexer', 'UniconLexer']
class UniconLexer(RegexLexer):
"""
For Unicon source code.
.. versionadded:: 2.4
"""
name = 'Unicon'
aliases = ['unicon']
filenames = ['*.icn']
mimetypes = ['text/unicon']
flags = re.MULTILINE
tokens = {
'root': [
(r'[^\S\n]+', Text),
(r'#.*?\n', Comment.Single),
(r'[^\S\n]+', Text),
(r'class|method|procedure', Keyword.Declaration, 'subprogram'),
(r'(record)(\s+)(\w+)',
bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
(r'(#line|\$C|\$Cend|\$define|\$else|\$endif|\$error|\$ifdef|'
r'\$ifndef|\$include|\$line|\$undef)\b', Keyword.PreProc),
(r'(&null|&fail)\b', Keyword.Constant),
(r'&allocated|&ascii|&clock|&collections|&column|&col|&control|'
r'&cset|¤t|&dateline|&date|&digits|&dump|'
r'&errno|&errornumber|&errortext|&errorvalue|&error|&errout|'
r'&eventcode|&eventvalue|&eventsource|&e|'
r'&features|&file|&host|&input|&interval|&lcase|&letters|'
r'&level|&line|&ldrag|&lpress|&lrelease|'
r'&main|&mdrag|&meta|&mpress|&mrelease|&now|&output|'
r'&phi|&pick|&pi|&pos|&progname|'
r'&random|&rdrag|®ions|&resize|&row|&rpress|&rrelease|'
r'&shift|&source|&storage|&subject|'
r'&time|&trace|&ucase|&version|'
r'&window|&x|&y', Keyword.Reserved),
(r'(by|of|not|to)\b', Keyword.Reserved),
(r'(global|local|static|abstract)\b', Keyword.Reserved),
(r'package|link|import', Keyword.Declaration),
(words((
'break', 'case', 'create', 'critical', 'default', 'end', 'all',
'do', 'else', 'every', 'fail', 'if', 'import', 'initial',
'initially', 'invocable', 'next',
'repeat', 'return', 'suspend',
'then', 'thread', 'until', 'while'), prefix=r'\b', suffix=r'\b'),
Keyword.Reserved),
(words((
'Abort', 'abs', 'acos', 'Active', 'Alert', 'any', 'Any', 'Arb',
'Arbno', 'args', 'array', 'asin', 'atan', 'atanh', 'Attrib',
'Bal', 'bal', 'Bg', 'Break', 'Breakx',
'callout', 'center', 'char', 'chdir', 'chmod', 'chown', 'chroot',
'classname', 'Clip', 'Clone', 'close', 'cofail', 'collect',
'Color', 'ColorValue', 'condvar', 'constructor', 'copy',
'CopyArea', 'cos', 'Couple', 'crypt', 'cset', 'ctime',
'dbcolumns', 'dbdriver', 'dbkeys', 'dblimits', 'dbproduct',
'dbtables', 'delay', 'delete', 'detab', 'display', 'DrawArc',
'DrawCircle', 'DrawCube', 'DrawCurve', 'DrawCylinder',
'DrawDisk', 'DrawImage', 'DrawLine', 'DrawPoint', 'DrawPolygon',
'DrawRectangle', 'DrawSegment', 'DrawSphere', 'DrawString',
'DrawTorus', 'dtor',
'entab', 'EraseArea', 'errorclear', 'Event', 'eventmask',
'EvGet', 'EvSend', 'exec', 'exit', 'exp', 'Eye',
'Fail', 'fcntl', 'fdup', 'Fence', 'fetch', 'Fg', 'fieldnames',
'filepair', 'FillArc', 'FillCircle', 'FillPolygon',
'FillRectangle', 'find', 'flock', 'flush', 'Font', 'fork',
'FreeColor', 'FreeSpace', 'function',
'get', 'getch', 'getche', 'getegid', 'getenv', 'geteuid',
'getgid', 'getgr', 'gethost', 'getpgrp', 'getpid', 'getppid',
'getpw', 'getrusage', 'getserv', 'GetSpace', 'gettimeofday',
'getuid', 'globalnames', 'GotoRC', 'GotoXY', 'gtime', 'hardlink',
'iand', 'icom', 'IdentityMatrix', 'image', 'InPort', 'insert',
'Int86', 'integer', 'ioctl', 'ior', 'ishift', 'istate', 'ixor',
'kbhit', 'key', 'keyword', 'kill',
'left', 'Len', 'list', 'load', 'loadfunc', 'localnames',
'lock', 'log', 'Lower', 'lstat',
'many', 'map', 'match', 'MatrixMode', 'max', 'member',
'membernames', 'methodnames', 'methods', 'min', 'mkdir', 'move',
'MultMatrix', 'mutex',
'name', 'NewColor', 'Normals', 'NotAny', 'numeric',
'open', 'opencl', 'oprec', 'ord', 'OutPort',
'PaletteChars', 'PaletteColor', 'PaletteKey', 'paramnames',
'parent', 'Pattern', 'Peek', 'Pending', 'pipe', 'Pixel',
'PlayAudio', 'Poke', 'pop', 'PopMatrix', 'Pos', 'pos',
'proc', 'pull', 'push', 'PushMatrix', 'PushRotate', 'PushScale',
'PushTranslate', 'put',
'QueryPointer',
'Raise', 'read', 'ReadImage', 'readlink', 'reads', 'ready',
'real', 'receive', 'Refresh', 'Rem', 'remove', 'rename',
'repl', 'reverse', 'right', 'rmdir', 'Rotate', 'Rpos',
'Rtab', 'rtod', 'runerr',
'save', 'Scale', 'seek', 'select', 'send', 'seq',
'serial', 'set', 'setenv', 'setgid', 'setgrent',
'sethostent', 'setpgrp', 'setpwent', 'setservent',
'setuid', 'signal', 'sin', 'sort', 'sortf', 'Span',
'spawn', 'sql', 'sqrt', 'stat', 'staticnames', 'stop',
'StopAudio', 'string', 'structure', 'Succeed', 'Swi',
'symlink', 'sys_errstr', 'system', 'syswrite',
'Tab', 'tab', 'table', 'tan',
'Texcoord', 'Texture', 'TextWidth', 'Translate',
'trap', 'trim', 'truncate', 'trylock', 'type',
'umask', 'Uncouple', 'unlock', 'upto', 'utime',
'variable', 'VAttrib',
'wait', 'WAttrib', 'WDefault', 'WFlush', 'where',
'WinAssociate', 'WinButton', 'WinColorDialog', 'WindowContents',
'WinEditRegion', 'WinFontDialog', 'WinMenuBar', 'WinOpenDialog',
'WinPlayMedia', 'WinSaveDialog', 'WinScrollBar', 'WinSelectDialog',
'write', 'WriteImage', 'writes', 'WSection',
'WSync'), prefix=r'\b', suffix=r'\b'),
Name.Function),
include('numbers'),
(r'<@|<<@|>@|>>@|\.>|->|===|~===|\*\*|\+\+|--|\.|~==|~=|<=|>=|==|'
r'=|<<=|<<|>>=|>>|:=:|:=|->|<->|\+:=|\|', Operator),
(r'"(?:[^\\"]|\\.)*"', String),
(r"'(?:[^\\']|\\.)*'", String.Character),
(r'[*<>+=/&!?@~\\-]', Operator),
(r'\^', Operator),
(r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
(r"[\[\]]", Punctuation),
(r"<>|=>|[()|:;,.'`{}%&?]", Punctuation),
(r'\n+', Text),
],
'numbers': [
(r'\b([+-]?([2-9]|[12][0-9]|3[0-6])[rR][0-9a-zA-Z]+)\b', Number.Hex),
(r'[+-]?[0-9]*\.([0-9]*)([Ee][+-]?[0-9]*)?', Number.Float),
(r'\b([+-]?[0-9]+[KMGTPkmgtp]?)\b', Number.Integer),
],
'subprogram': [
(r'\(', Punctuation, ('#pop', 'formal_part')),
(r';', Punctuation, '#pop'),
(r'"[^"]+"|\w+', Name.Function),
include('root'),
],
'type_def': [
(r'\(', Punctuation, 'formal_part'),
],
'formal_part': [
(r'\)', Punctuation, '#pop'),
(r'\w+', Name.Variable),
(r',', Punctuation),
(r'(:string|:integer|:real)\b', Keyword.Reserved),
include('root'),
],
}
class IconLexer(RegexLexer):
"""
Lexer for Icon.
.. versionadded:: 1.6
"""
name = 'Icon'
aliases = ['icon']
filenames = ['*.icon', '*.ICON']
mimetypes = []
flags = re.MULTILINE
tokens = {
'root': [
(r'[^\S\n]+', Text),
(r'#.*?\n', Comment.Single),
(r'[^\S\n]+', Text),
(r'class|method|procedure', Keyword.Declaration, 'subprogram'),
(r'(record)(\s+)(\w+)',
bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
(r'(#line|\$C|\$Cend|\$define|\$else|\$endif|\$error|\$ifdef|'
r'\$ifndef|\$include|\$line|\$undef)\b', Keyword.PreProc),
(r'(&null|&fail)\b', Keyword.Constant),
(r'&allocated|&ascii|&clock|&collections|&column|&col|&control|'
r'&cset|¤t|&dateline|&date|&digits|&dump|'
r'&errno|&errornumber|&errortext|&errorvalue|&error|&errout|'
r'&eventcode|&eventvalue|&eventsource|&e|'
r'&features|&file|&host|&input|&interval|&lcase|&letters|'
r'&level|&line|&ldrag|&lpress|&lrelease|'
r'&main|&mdrag|&meta|&mpress|&mrelease|&now|&output|'
r'&phi|&pick|&pi|&pos|&progname|'
r'&random|&rdrag|®ions|&resize|&row|&rpress|&rrelease|'
r'&shift|&source|&storage|&subject|'
r'&time|&trace|&ucase|&version|'
r'&window|&x|&y', Keyword.Reserved),
(r'(by|of|not|to)\b', Keyword.Reserved),
(r'(global|local|static)\b', Keyword.Reserved),
(r'link', Keyword.Declaration),
(words((
'break', 'case', 'create', 'default', 'end', 'all',
'do', 'else', 'every', 'fail', 'if', 'initial',
'invocable', 'next',
'repeat', 'return', 'suspend',
'then', 'until', 'while'), prefix=r'\b', suffix=r'\b'),
Keyword.Reserved),
(words((
'abs', 'acos', 'Active', 'Alert', 'any',
'args', 'array', 'asin', 'atan', 'atanh', 'Attrib',
'bal', 'Bg',
'callout', 'center', 'char', 'chdir', 'chmod', 'chown', 'chroot',
'Clip', 'Clone', 'close', 'cofail', 'collect',
'Color', 'ColorValue', 'condvar', 'copy',
'CopyArea', 'cos', 'Couple', 'crypt', 'cset', 'ctime',
'delay', 'delete', 'detab', 'display', 'DrawArc',
'DrawCircle', 'DrawCube', 'DrawCurve', 'DrawCylinder',
'DrawDisk', 'DrawImage', 'DrawLine', 'DrawPoint', 'DrawPolygon',
'DrawRectangle', 'DrawSegment', 'DrawSphere', 'DrawString',
'DrawTorus', 'dtor',
'entab', 'EraseArea', 'errorclear', 'Event', 'eventmask',
'EvGet', 'EvSend', 'exec', 'exit', 'exp', 'Eye',
'fcntl', 'fdup', 'fetch', 'Fg', 'fieldnames',
'FillArc', 'FillCircle', 'FillPolygon',
'FillRectangle', 'find', 'flock', 'flush', 'Font',
'FreeColor', 'FreeSpace', 'function',
'get', 'getch', 'getche', 'getenv',
'GetSpace', 'gettimeofday',
'getuid', 'globalnames', 'GotoRC', 'GotoXY', 'gtime', 'hardlink',
'iand', 'icom', 'IdentityMatrix', 'image', 'InPort', 'insert',
'Int86', 'integer', 'ioctl', 'ior', 'ishift', 'istate', 'ixor',
'kbhit', 'key', 'keyword', 'kill',
'left', 'Len', 'list', 'load', 'loadfunc', 'localnames',
'lock', 'log', 'Lower', 'lstat',
'many', 'map', 'match', 'MatrixMode', 'max', 'member',
'membernames', 'methodnames', 'methods', 'min', 'mkdir', 'move',
'MultMatrix', 'mutex',
'name', 'NewColor', 'Normals', 'numeric',
'open', 'opencl', 'oprec', 'ord', 'OutPort',
'PaletteChars', 'PaletteColor', 'PaletteKey', 'paramnames',
'parent', 'Pattern', 'Peek', 'Pending', 'pipe', 'Pixel',
'Poke', 'pop', 'PopMatrix', 'Pos', 'pos',
'proc', 'pull', 'push', 'PushMatrix', 'PushRotate', 'PushScale',
'PushTranslate', 'put',
'QueryPointer',
'Raise', 'read', 'ReadImage', 'readlink', 'reads', 'ready',
'real', 'receive', 'Refresh', 'Rem', 'remove', 'rename',
'repl', 'reverse', 'right', 'rmdir', 'Rotate', 'Rpos',
'rtod', 'runerr',
'save', 'Scale', 'seek', 'select', 'send', 'seq',
'serial', 'set', 'setenv',
'setuid', 'signal', 'sin', 'sort', 'sortf',
'spawn', 'sql', 'sqrt', 'stat', 'staticnames', 'stop',
'string', 'structure', 'Swi',
'symlink', 'sys_errstr', 'system', 'syswrite',
'tab', 'table', 'tan',
'Texcoord', 'Texture', 'TextWidth', 'Translate',
'trap', 'trim', 'truncate', 'trylock', 'type',
'umask', 'Uncouple', 'unlock', 'upto', 'utime',
'variable',
'wait', 'WAttrib', 'WDefault', 'WFlush', 'where',
'WinAssociate', 'WinButton', 'WinColorDialog', 'WindowContents',
'WinEditRegion', 'WinFontDialog', 'WinMenuBar', 'WinOpenDialog',
'WinPlayMedia', 'WinSaveDialog', 'WinScrollBar', 'WinSelectDialog',
'write', 'WriteImage', 'writes', 'WSection',
'WSync'), prefix=r'\b', suffix=r'\b'),
Name.Function),
include('numbers'),
(r'===|~===|\*\*|\+\+|--|\.|==|~==|<=|>=|=|~=|<<=|<<|>>=|>>|'
r':=:|:=|<->|<-|\+:=|\|\||\|', Operator),
(r'"(?:[^\\"]|\\.)*"', String),
(r"'(?:[^\\']|\\.)*'", String.Character),
(r'[*<>+=/&!?@~\\-]', Operator),
(r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
(r"[\[\]]", Punctuation),
(r"<>|=>|[()|:;,.'`{}%\^&?]", Punctuation),
(r'\n+', Text),
],
'numbers': [
(r'\b([+-]?([2-9]|[12][0-9]|3[0-6])[rR][0-9a-zA-Z]+)\b', Number.Hex),
(r'[+-]?[0-9]*\.([0-9]*)([Ee][+-]?[0-9]*)?', Number.Float),
(r'\b([+-]?[0-9]+[KMGTPkmgtp]?)\b', Number.Integer),
],
'subprogram': [
(r'\(', Punctuation, ('#pop', 'formal_part')),
(r';', Punctuation, '#pop'),
(r'"[^"]+"|\w+', Name.Function),
include('root'),
],
'type_def': [
(r'\(', Punctuation, 'formal_part'),
],
'formal_part': [
(r'\)', Punctuation, '#pop'),
(r'\w+', Name.Variable),
(r',', Punctuation),
(r'(:string|:integer|:real)\b', Keyword.Reserved),
include('root'),
],
}
class UcodeLexer(RegexLexer):
"""
Lexer for Icon ucode files.
.. versionadded:: 2.4
"""
name = 'ucode'
aliases = ['ucode']
filenames = ['*.u', '*.u1', '*.u2']
mimetypes = []
flags = re.MULTILINE
tokens = {
'root': [
(r'(#.*\n)', Comment),
(words((
'con', 'declend', 'end',
'global',
'impl', 'invocable',
'lab', 'link', 'local',
'record',
'uid', 'unions',
'version'),
prefix=r'\b', suffix=r'\b'),
Name.Function),
(words((
'colm', 'filen', 'line', 'synt'),
prefix=r'\b', suffix=r'\b'),
Comment),
(words((
'asgn',
'bang', 'bscan',
'cat', 'ccase', 'chfail',
'coact', 'cofail', 'compl',
'coret', 'create', 'cset',
'diff', 'div', 'dup',
'efail', 'einit', 'end', 'eqv', 'eret',
'error', 'escan', 'esusp',
'field',
'goto',
'init', 'int', 'inter',
'invoke',
'keywd',
'lconcat', 'lexeq', 'lexge',
'lexgt', 'lexle', 'lexlt', 'lexne',
'limit', 'llist', 'lsusp',
'mark', 'mark0', 'minus', 'mod', 'mult',
'neg', 'neqv', 'nonnull', 'noop', 'null',
'number', 'numeq', 'numge', 'numgt',
'numle', 'numlt', 'numne',
'pfail', 'plus', 'pnull', 'pop', 'power',
'pret', 'proc', 'psusp', 'push1', 'pushn1',
'random', 'rasgn', 'rcv', 'rcvbk', 'real',
'refresh', 'rswap',
'sdup', 'sect', 'size', 'snd', 'sndbk',
'str', 'subsc', 'swap',
'tabmat', 'tally', 'toby', 'trace',
'unmark',
'value', 'var'), prefix=r'\b', suffix=r'\b'),
Keyword.Declaration),
(words((
'any',
'case',
'endcase', 'endevery', 'endif',
'endifelse', 'endrepeat', 'endsuspend',
'enduntil', 'endwhile', 'every',
'if', 'ifelse',
'repeat',
'suspend',
'until',
'while'),
prefix=r'\b', suffix=r'\b'),
Name.Constant),
(r'\d+(\s*|\.$|$)', Number.Integer),
(r'[+-]?\d*\.\d+(E[-+]?\d+)?', Number.Float),
(r'[+-]?\d+\.\d*(E[-+]?\d+)?', Number.Float),
(r"(<>|=>|[()|:;,.'`]|[{}]|[%^]|[&?])", Punctuation),
(r'\s+\b', Text),
(r'[\w-]+', Text),
],
}
|
[
"[email protected]"
] | |
5acd280678c9ecc4c64d14ff1131b8accbebe375
|
68850abdd5061455db54865817292a3362b1b3c7
|
/tests/test_feature_file_writer.py
|
c9e552763d085b6a9b259a102c33a5c7f2db6506
|
[
"MIT"
] |
permissive
|
finbourne/lusid-features-python
|
ae87964d36e96ffb2e03a9939365908e909dcde8
|
3a9843ddc14e732ca14b88b2aa679131c7511010
|
refs/heads/master
| 2023-06-02T18:17:07.723321 | 2021-06-16T12:58:33 | 2021-06-16T12:58:33 | 295,369,464 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 862 |
py
|
import unittest
from lusidfeature.feature_extractor import extract_all_features_from_package
from lusidfeature.features_temp_file_manager import FeaturesTempFileManager
from lusidfeature.get_project_root import get_project_root
class FeatureFileWriterTests(unittest.TestCase):
def test_if_writer_writes_test_features_correctly(self):
package = "tests.dummyfiles.valid"
feature_list_from_function = "\n".join(extract_all_features_from_package(package, get_project_root()))
feature_list_temp = FeaturesTempFileManager.create_temp_file(feature_list_from_function)
feature_list_from_file = feature_list_temp.read()
FeaturesTempFileManager.delete_temp_file(feature_list_temp)
self.assertGreater(len(feature_list_from_function), 0)
self.assertEqual(feature_list_from_function, feature_list_from_file)
|
[
"[email protected]"
] | |
46b00cb0f15c6822be74df51275fb2d9ba9d9311
|
bf50a0d18521c904aa5f069cac7f16416b6dd93a
|
/DeSpeelhal-dag.py
|
7498a057e6d1b9236f498875bf1e942780067f96
|
[] |
no_license
|
Alexcode100/van-input-naar-output
|
a5fbf792ae0d777f07369b3aaeea1bddb008a407
|
11e61dad4e512f1c2d58b79cd8955e57458082d5
|
refs/heads/main
| 2023-08-08T03:54:12.382141 | 2021-09-27T08:06:55 | 2021-09-27T08:06:55 | 404,743,094 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 187 |
py
|
toeganskaartjes = 7.45*3
VIPVR = (45/5)*0.37
answer = (toeganskaartjes + VIPVR)
print("Dit geweldige dagje-uit met 4 mensen in de Speelhal met 45 minuten VR kost je maar" + str(answer))
|
[
"[email protected]"
] | |
0266fc2e290229ee4fb5b79ceec76bc0a22a0e42
|
facb8b9155a569b09ba66aefc22564a5bf9cd319
|
/wp2/merra_scripts/01_netCDF_extraction/merra902TG/128-tideGauge.py
|
a22e221a52b78d90b03e7e76dfe5eb0acc6f5054
|
[] |
no_license
|
moinabyssinia/modeling-global-storm-surges
|
13e69faa8f45a1244a964c5de4e2a5a6c95b2128
|
6e385b2a5f0867df8ceabd155e17ba876779c1bd
|
refs/heads/master
| 2023-06-09T00:40:39.319465 | 2021-06-25T21:00:44 | 2021-06-25T21:00:44 | 229,080,191 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,075 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 01 10:00:00 2020
MERRAv2 netCDF extraction script - template
To create an extraction script for each tide gauge
@author: Michael Tadesse
"""
import os
import pandas as pd
from d_merra_define_grid import Coordinate, findPixels, findindx
from c_merra_read_netcdf import readnetcdf
from f_merra_subset import subsetter
def extract_data(delta= 3):
"""
This is the master function that calls subsequent functions
to extract uwnd, vwnd, slp for the specified
tide gauges
delta: distance (in degrees) from the tide gauge
"""
print('Delta = {}'.format(delta), '\n')
#defining the folders for predictors
dir_in = "/lustre/fs0/home/mtadesse/MERRAv2/data"
surge_path = "/lustre/fs0/home/mtadesse/obs_surge"
csv_path = "/lustre/fs0/home/mtadesse/merraLocalized"
#cd to the obs_surge dir to get TG information
os.chdir(surge_path)
tg_list = os.listdir()
#cd to the obs_surge dir to get TG information
os.chdir(dir_in)
years = os.listdir()
#################################
#looping through the year folders
#################################
#to mark the first csv
firstCsv = True;
for yr in years:
os.chdir(dir_in)
#print(yr, '\n')
os.chdir(os.path.join(dir_in, yr))
####################################
#looping through the daily .nc files
####################################
for dd in os.listdir():
os.chdir(os.path.join(dir_in, yr)) #back to the predictor folder
print(dd, '\n')
#########################################
#get netcdf components - predictor file
#########################################
nc_file = readnetcdf(dd)
lon, lat, time, predSLP, predU10, predV10 = \
nc_file[0], nc_file[1], nc_file[2], nc_file[3], nc_file[4]\
, nc_file[5]
x = 128
y = 129
#looping through individual tide gauges
for t in range(x, y):
#the name of the tide gauge - for saving purposes
# tg = tg_list[t].split('.mat.mat.csv')[0]
tg = tg_list[t]
#extract lon and lat data from surge csv file
#print(tg, '\n')
os.chdir(surge_path)
if os.stat(tg).st_size == 0:
print('\n', "This tide gauge has no surge data!", '\n')
continue
surge = pd.read_csv(tg, header = None)
#surge_with_date = add_date(surge)
#define tide gauge coordinate(lon, lat)
tg_cord = Coordinate(surge.iloc[0,0], surge.iloc[0,1])
#find closest grid points and their indices
close_grids = findPixels(tg_cord, delta, lon, lat)
ind_grids = findindx(close_grids, lon, lat)
#loop through preds#
#subset predictor on selected grid size
predictors = {'slp':predSLP, 'wnd_u':predU10, \
'wnd_v':predV10}
for xx in predictors.keys():
pred_new = subsetter(dd, predictors[xx], ind_grids, time)
if xx == 'slp':
if firstCsv:
finalSLP = pred_new
else:
finalSLP = pd.concat([finalSLP, pred_new], axis = 0)
print(finalSLP.shape)
elif xx == 'wnd_u':
if firstCsv:
finalUwnd = pred_new
else:
finalUwnd = pd.concat([finalUwnd, pred_new], axis = 0)
elif xx == 'wnd_v':
if firstCsv:
finalVwnd = pred_new
firstCsv = False;
else:
finalVwnd = pd.concat([finalVwnd, pred_new], axis = 0)
#create directories to save pred_new
os.chdir(csv_path)
#tide gauge directory
tg_name_old = tg.split('.mat.mat.csv')[0]
tg_name = '-'.join([str(t), tg_name_old])
try:
os.makedirs(tg_name)
os.chdir(tg_name) #cd to it after creating it
except FileExistsError:
#directory already exists
os.chdir(tg_name)
#save as csv
finalSLP.to_csv('slp.csv')
finalUwnd.to_csv('wnd_u.csv')
finalVwnd.to_csv('wnd_v.csv')
#run script
extract_data(delta= 3)
|
[
"[email protected]"
] | |
0b4c45ac94af4811b5d7aa498bdbb91498487d6e
|
63c5d6d9a3a224a2668ea5cf7ed22024d4352057
|
/Lab1/Task1/calculator.py
|
85a8677a79ea296bc79a26a8c3ea1196b04d5bf4
|
[] |
no_license
|
ianlaiky/SITUniPython1002
|
973ff72a7b0e3eaaa0f6760a19524e5bf0b7d725
|
1245ea6a9213208594bce9ab8aead72212174303
|
refs/heads/master
| 2020-04-16T17:07:17.366569 | 2019-04-05T11:03:58 | 2019-04-05T11:03:58 | 165,763,402 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 305 |
py
|
import sys
error = False
avg = float(0.0)
a = 0.0
b = 0.0
c = 0.0
try:
a = float(sys.argv[1])
b = float(sys.argv[2])
c = float(sys.argv[3])
except ValueError:
error = True
print "Your input is invalid!"
if not error:
avg = (float(a) + b + c) / 3
print "Average:%.2f" % avg
|
[
"[email protected]"
] | |
41a80caf90f70b43ffaf80ca74feb0b2d953f1af
|
c89e74b8ff66f9dbbadd2a683e0b50b7637edd46
|
/src/datasets.py
|
9558c69280ab2e46872718c3d97fb458e1fece1f
|
[] |
no_license
|
mbran142/trump_tweet_generator
|
37ebd390c6c1d4ba8f8df24383e745fc3e207068
|
3beafb8178d937c155d6ea515e80700eefff8511
|
refs/heads/main
| 2023-03-31T13:38:36.199177 | 2021-04-09T22:23:58 | 2021-04-09T22:23:58 | 325,672,467 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,585 |
py
|
import torch
import json
from torch.utils.data import Dataset, Subset
import numpy as np
import os
# dataset class. Used to obtain total dataset at each epoch of training.
class tweet_dataset(Dataset):
def __init__(self, tweet_list):
self.data = tweet_list
def __len__(self):
return len(self.data)
def __getitem__(self, item):
return self.data[item]
# build training and validation datasets
def build_tweet_datasets(tweet_filename, ratio = 0.2):
# load file
with open(tweet_filename) as file:
tweet_data = json.load(file)
# convert to tensor format
tweet_data = [string_to_tensor(tweet) for tweet in tweet_data]
size = len(tweet_data)
indices = np.arange(len(tweet_data))
np.random.shuffle(indices)
val_i = indices[: int(size * ratio)]
train_i = indices[int(size * ratio) :]
val = Subset(tweet_data, val_i)
train = Subset(tweet_data, train_i)
return train, val
# convert string into tensor. Values go from 0-31.
def string_to_tensor(input, pad = True):
# encoding:
# [a-z] = 0-25
# space = 26
# period = 27 ! and ? are replaced with .
# comma = 28
# '#' symbol = 29 all digits are replaced with #
# <start> = 30
# <pad> = 31 tweets are padded to length 290
out_list = [ 30 ]
for char in input:
if char in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ':
out_list.append(ord(char) - ord('A'))
elif char in 'abcdefghijklmnopqrstuvwxyz':
out_list.append(ord(char) - ord('a'))
elif char == ' ':
out_list.append(26)
elif char in '.!?':
out_list.append(27)
elif char == ',':
out_list.append(28)
elif char in '0123456789':
out_list.append(29)
elif char == '&':
out_list += [0, 13, 3] # 'and'
if pad:
padding = [ 31 ] * (290 - len(out_list))
out_list += padding
return torch.tensor(out_list)
# convert tensor to string based on above format
def tensor_to_string(input):
output = ''
for val in input:
if val < 26:
output += chr(val + ord('a'))
elif val == 26:
output += ' '
elif val == 27:
output += '.'
elif val == 28:
output += ','
elif val == 29:
output += '#'
elif val == 31:
break
return output
|
[
"[email protected]"
] | |
9886045608f2213f99a41a0af0b8b79aa8486538
|
69a4db25d9f7d4e67cf2bcfe005e5cba9915180a
|
/examprep.py
|
4eae0ad01d13431e655ff277605755e813e07ef2
|
[] |
no_license
|
riley-csp-2019-20/final-exam-semester-1-taylor77205
|
ca3211a606819eab48d118bb6e5dc08dcf190b9c
|
ee37ca47c1090b8a23a6d3ed01448ed1494d9183
|
refs/heads/master
| 2020-11-25T12:22:28.536638 | 2019-12-19T16:39:33 | 2019-12-19T16:39:33 | 228,657,543 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 827 |
py
|
import turtle as trtl
shape = "arrow"
player = trtl.Turtle(shape = shape)
player.speed(0)
player.penup()
shape = "circle"
circle = trtl.Turtle(shape = shape)
circle.penup()
def up():
player.setheading(90)
player.forward(10)
circle.setheading(90)
circle.forward(10)
def down():
player.setheading(270)
player.forward(10)
circle.setheading(270)
circle.forward(10)
def right():
player.setheading(00)
player.forward(10)
circle.setheading(00)
circle.forward(10)
def left():
player.setheading(180)
player.forward(10)
circle.setheading(180)
circle.forward(10)
wn=trtl.Screen()
wn.onkeypress(up,"Up")
wn.onkeypress(down,"Down")
wn.onkeypress(right,"Right")
wn.onkeypress(left,"Left")
wn.listen()
wn.mainloop()
|
[
"[email protected]"
] | |
42a948dbd46123b3c3dd592eb6d4fe160f02fae2
|
16e7f0588f0ac9f3f9c7e1272c82478ecbb14293
|
/manage.py
|
4f86d73217766ec133f68abda873a839feedd5db
|
[] |
no_license
|
deena09/listOfBooks
|
8ffc90edaa79b156487c7aac7258a7f326094f7b
|
8cc692f83fe796b9290893a0aa6ba5c7b983f758
|
refs/heads/master
| 2020-07-28T23:35:04.756290 | 2014-03-31T08:38:14 | 2014-03-31T08:38:14 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 254 |
py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "listOfBooks.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
[
"[email protected]"
] | |
8dc206ecc05711beff9e20fc9c645ee81ed652dd
|
1abcd4686acf314a044a533d2a541e83da835af7
|
/backjoon_level_python/1701.py
|
4112638d3382c51244a90f87bed9c9d769c2e387
|
[] |
no_license
|
HoYoung1/backjoon-Level
|
166061b2801514b697c9ec9013db883929bec77e
|
f8e49c8d2552f6d62be5fb904c3d6548065c7cb2
|
refs/heads/master
| 2022-05-01T05:17:11.305204 | 2022-04-30T06:01:45 | 2022-04-30T06:01:45 | 145,084,813 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 567 |
py
|
def get_failure_array(text):
failure = [0] * len(text)
j = 0
for i in range(1, len(text)):
while j > 0 and text[i] != text[j]:
j = failure[j - 1]
if text[i] == text[j]:
failure[i] = j + 1
j += 1
return failure
def solve(input_text):
max_value = 0
for i in range(len(input_text)-1):
failure = get_failure_array(input_text[i:])
max_value = max(max_value, max(failure))
return max_value
if __name__ == '__main__':
input_text = input()
print(solve(input_text))
|
[
"[email protected]"
] | |
6350e7fa49e2ce61ba60225c83d5827aa6da7a6f
|
7703a8b5ebdcdf38ce296d84899392213c741236
|
/ush/hafs/ww3.py
|
ad9e7da05a45678e0037d4f5b80de4811ef8b363
|
[] |
no_license
|
hafs-community/HAFS
|
b1add5a86b18e00e6a13830dfaa770a3560fd8ab
|
cba6b3649eb7a25bb8be392db1901f47d3287c93
|
refs/heads/develop
| 2023-08-16T21:25:31.668385 | 2023-07-28T20:24:05 | 2023-07-28T20:24:05 | 176,363,600 | 22 | 48 | null | 2023-08-31T22:06:20 | 2019-03-18T20:23:54 |
Fortran
|
UTF-8
|
Python
| false | false | 44,649 |
py
|
#! /usr/bin/env python3
"""This module handles WW3 related scripts for HAFS system."""
__all__ = ['WW3Init', 'WW3Post']
import os, sys, re
import produtil.datastore, produtil.fileop, produtil.cd, produtil.run, produtil.log
import produtil.dbnalert
import tcutil.numerics
import hafs.hafstask, hafs.exceptions
import hafs.namelist, hafs.input
import hafs.launcher, hafs.config
from produtil.datastore import FileProduct, RUNNING, COMPLETED, FAILED, UpstreamFile
from produtil.fileop import make_symlink, deliver_file, wait_for_files
from produtil.dbnalert import DBNAlert
from produtil.cd import NamedDir, TempDir
from produtil.run import mpi, mpirun, run, runstr, checkrun, exe, bigexe, alias
from tcutil.numerics import to_datetime, to_datetime_rel, to_fraction, to_timedelta
from hafs.exceptions import WW3InputError
prodnames={
'mod_def': ( './mod_def.ww3', '{intercom}/ww3/mod_def.ww3' ),
'ww3_mesh': ( './ww3_mesh.nc', '{intercom}/ww3/ww3_mesh.nc' ),
'wind': ( './wind.ww3', '{intercom}/ww3/wind.ww3' ),
'current': ( './current.ww3', '{intercom}/ww3/current.ww3' ),
'restart': ( './restart.ww3', '{intercom}/ww3/restart_init.ww3' ) }
# 'ww3_shel': ( './ww3_shel.inp', '{intercom}/ww3/ww3_shel.inp' ) }
########################################################################
class WW3Init(hafs.hafstask.HAFSTask):
def __init__(self,dstore,conf,section,taskname=None,fcstlen=126,
outstep=21600, pntstep=21600, rststep=21600, **kwargs):
"""Creates a WW3Init
dstore - the produtil.datastore.Datastore to use
conf - the HAFSConfig to use
section - the section name for this task
taskname - the task name. Default: section
fcstlen - the forecast length in hours
outstep - the output step in seconds
pntstep - the pntout step in seconds
rststep - the restart output step in seconds
Other keyword arguments are passed to the superclass constructor."""
super(WW3Init,self).__init__(dstore,conf,section,taskname=taskname,**kwargs)
self._make_products()
self.fcstlen=float(fcstlen)
self.outstep=int(outstep)
self.pntstep=int(pntstep)
self.rststep=int(rststep)
def _make_products(self):
"""Creates FileProduct objects for all output files. The
outdir is the directory to which the WW3 package output its
final files."""
self._products=dict()
atime=tcutil.numerics.to_datetime(self.conf.cycle)
ww3_bdy=self.confstr('ww3_bdy','no')
if ww3_bdy == 'yes':
prodnames['nest']=( './nest.ww3', '{intercom}/ww3/nest.ww3' )
with self.dstore.transaction():
for prodname,filepaths in prodnames.items():
(localpath,compath)=filepaths
prod=produtil.datastore.FileProduct(
self.dstore,prodname,self.taskname)
prod.location=self.timestr(compath,atime,atime)
prod['localpath'] = localpath
self._products[prodname]=( prod,localpath )
def products(self,name=None,**kwargs):
"""Iterate over all products."""
for prodname,stuff in self._products.items():
(prod,localpath)=stuff
if name is None or name==prod.prodname:
yield prod
def inputiter(self):
atime=to_datetime(self.conf.cycle)
etime=to_datetime_rel(3600*self.fcstlen,atime)
interval=to_fraction(self.confint('input_step',6*3600))
dataset=self.confstr('gfs_dataset','gfs')
item=self.confstr('gfs_item','gfs')
epsilon=to_timedelta(interval/10)
ende=to_datetime_rel(epsilon,etime)
when=atime
while when<ende:
yield dict(self.taskvars,dataset=dataset,item=item,ftime=when,atime=atime)
when=to_datetime_rel(interval,when)
ww3_bdy=self.confstr('ww3_bdy','no')
if ww3_bdy == 'yes':
atime=to_datetime(self.conf.cycle)
wtime=to_datetime_rel(-6*3600,atime)
dataset=self.confstr('gfswave_dataset','gfswave')
item=self.confstr('ww3bdy_item','ww3bdy_ibp')
when=wtime
yield dict(self.taskvars,dataset=dataset,item=item,ftime=when,atime=when,optional=True)
ww3_rst=self.confstr('ww3_rst','no')
print('ww3_rst=%s'%(ww3_rst))
if ww3_rst == 'yes' or ww3_rst == 'always':
atime=to_datetime(self.conf.cycle)
wtime=to_datetime_rel(-6*3600,atime)
dataset=self.confstr('gdaswave_dataset','gdaswave')
item=self.confstr('ww3rst_item','ww3rst_gnh_10m')
when=wtime
yield dict(self.taskvars,dataset=dataset,item=item,ftime=when,atime=when,optional=True)
def gfsgrib2iter(self):
logger=self.log()
atime=to_datetime(self.conf.cycle) # sim start time
etime=to_datetime_rel(self.fcstlen*3600,atime) # sim end time
interval=to_fraction(self.confint('input_step',6*3600))
dataset=self.confstr('gfs_dataset','gfs')
item=self.confstr('gfs_item','gfs')
hd=self.confstr('catalog','hafsdata')
dc=hafs.input.DataCatalog(self.conf,hd,atime)
epsilon=to_timedelta(interval/10)
ende=to_datetime_rel(epsilon,etime)
when=atime
fhour=0
maxwait=self.confint('max_grib_wait',900)
sleeptime=self.confint('grib_sleep_time',20)
min_size=self.confint('min_grib_size',1)
min_mtime_age=self.confint('min_grib_age',30)
while when<ende:
thefile=dc.locate(dataset=dataset,item=item,ftime=when,atime=atime,**self.taskvars)
waited=wait_for_files(
[thefile],logger,maxwait=maxwait,sleeptime=sleeptime,
min_size=min_size,min_mtime_age=min_mtime_age)
if not waited:
msg='FATAL ERROR: %s: did not exist or was too small after %d seconds'%(
thefile,maxwait)
self.log().error(msg)
raise hafs.exceptions.WW3InputError(msg)
sys.exit(2)
yield thefile
fhour=fhour+interval/3600
when=to_datetime_rel(interval,when)
def deliver_products(self):
logger=self.log()
for prodname,stuff in self._products.items():
(prod,localpath)=stuff
prod.deliver(frominfo=localpath,keep=True,logger=logger)
def run(self):
"""Runs the WW3 initialization"""
logger=self.log()
dummycurr=True
usegfswind=self.confstr('usegfswind','yes')
if usegfswind == 'yes':
dummywind=False
elif usegfswind == 'no':
dummywind=True
else:
# Wrong usegfswind value
logger.warning('Wrong usegfswind value: %s. Assume usegfswind=yes.'
'Set dummywind to False.'%(usegfswind,))
usegfswind='yes'
dummywind=False
ww3_bdy=self.confstr('ww3_bdy','no')
ww3_rst=self.confstr('ww3_rst','no')
try:
self.state=RUNNING
redirect=self.confbool('redirect',True)
with NamedDir(self.workdir,keep=not self.scrub,logger=logger,rm_first=True) as d:
# Run ww3_grid
def link(s,t):
make_symlink(s,t,force=True,logger=logger)
deliver_file(self.icstr('{grid_inp}'),'ww3_grid.inp',keep=True,logger=logger)
link(self.icstr('{grid_bot}'),'.')
if ww3_bdy == 'yes':
link(self.icstr('{grid_msk2}'),self.icstr('./ww3_grid_{vit[basin1lc]}.msk'))
else:
link(self.icstr('{grid_msk}'),'.')
link(self.icstr('{grid_obr}'),'.')
link(self.getexe('ww3_grid'),'ww3_grid')
#checkrun(exe(self.getexe('ww3_grid'))>='ww3_grid.log',logger=logger)
cmd=exe('./ww3_grid')
if redirect: cmd = cmd>='ww3_grid.log'
checkrun(cmd,logger=logger)
# Copy over the pre-generated grid mesh file
deliver_file(self.icstr('{grid_mesh}'),'./ww3_mesh.nc')
if usegfswind == 'yes':
# Extract gfs wind from gfs grib2 data
ncfile='gfs.uvgrd10m.nc'
produtil.fileop.remove_file(ncfile,logger=logger)
cmd=alias(bigexe(self.getexe('wgrib2','wgrib2')))
for f in self.gfsgrib2iter():
logger.info('Extracting wind at 10 m from %s'%(f))
subset=''
for line in runstr(cmd[f],logger=logger).splitlines(True):
if re.search(':[UV]GRD:10 m above ground:',line):
subset+=line
runme=cmd[f,'-i', '-append', '-netcdf', ncfile] << subset
checkrun(runme, logger=logger)
if produtil.fileop.isnonempty(ncfile):
dummywind=False
else:
dummywind=True
produtil.log.jlogger.warning(
'ww3init: will use dummy wind because %s is missing '
'or empty.'%(ncfile,))
if dummywind:
# Run ww3_prep for dummy wind
deliver_file(self.icstr('{wind_inp}'),'ww3_prep.inp',keep=True,logger=logger)
link(self.getexe('ww3_prep'),'ww3_prep')
#checkrun(exe(self.getexe('ww3_prep'))>='ww3_prep_wind.log',logger=logger)
cmd=exe('./ww3_prep')
if redirect: cmd = cmd>='ww3_prep_wind.log'
checkrun(cmd,logger=logger)
else:
# Run ww3_prnc for prep gfs wind
deliver_file(self.icstr('{prnc_inp_gfswind}'),'ww3_prnc.inp',keep=True,logger=logger)
link(self.getexe('ww3_prnc'),'ww3_prnc')
cmd=exe('./ww3_prnc')
if redirect: cmd = cmd>='ww3_prnc_wind.log'
checkrun(cmd,logger=logger)
if dummycurr:
# Run ww3_prep for dummy current
deliver_file(self.icstr('{curr_inp}'),'ww3_prep.inp',keep=True,logger=logger)
# Prepare the curdummy.dat
with open('./curdummy.dat','w') as of:
for x in range(6):
of.write('0. 0. 0.\n')
link(self.getexe('ww3_prep'),'ww3_prep')
#checkrun(exe(self.getexe('ww3_prep'))>='ww3_prep_curr.log')
cmd=exe('./ww3_prep')
if redirect: cmd = cmd>='ww3_prep_curr.log'
checkrun(cmd,logger=logger)
else:
# Extract current from global ocean model
logger.error('Not implemented yet')
have_restart=False
if os.environ.get('ww3_force_cold_start','no').lower() == 'yes':
logger.warning('ww3_force_cold_start is yes and will generate restart.ww3.')
else:
oldrst='(unknown)'
oldconffile=self.icstr('{oldcom}/{old_out_prefix}.{RUN}.conf')
if produtil.fileop.isnonempty(oldconffile):
logger.info('%s: prior cycle exists.'%(oldconffile,))
oldconf=hafs.launcher.HAFSLauncher()
oldconf.read(oldconffile)
oldrst=self.icstr('{oldcom}/{old_out_prefix}.{RUN}.ww3.restart.f006')
if not oldconf.getbool('config','run_wave'):
logger.info('restart.ww3: will generate restart.ww3 because prior cycle did not run wave.')
elif not oldconf.getstr('config','wave_model').lower() == 'ww3':
logger.info('restart.ww3: will generate restart.ww3 because prior cycle did not run WW3.')
elif produtil.fileop.isnonempty(oldrst):
produtil.fileop.deliver_file(oldrst,'restart.ww3',logger=logger)
have_restart=True
logger.info('%s: warm start from prior cycle 6-h output restart file.'%(oldrst,))
else:
logger.critical('FATAL ERROR: exiting because piror cycle %s is missing or empty.'%(oldrst,))
logger.critical('FATAL ERROR: if desired, set force_cold_start or ww3_force_cold_start = yes can bypass this failure.')
sys.exit(2)
else:
logger.info('restart.ww3: will generate restart.ww3 because prior cycle does not exist.')
if (not have_restart and ww3_rst == 'yes') or ww3_rst == 'always':
try:
with NamedDir('ww3gint',keep=True,logger=logger) as nameddir:
logger.info('ww3_grid: generating mod_def.ww3 for gnh_10m gridi from gdaswave')
make_symlink('../mod_def.ww3','mod_def.hafs_ww3',force=True,logger=logger)
make_symlink(self.getexe('ww3_grid'),'ww3_grid',force=True,logger=logger)
deliver_file(self.icstr('{grid_gnh_10m_inp}'),'ww3_grid.inp',keep=True,logger=logger)
cmd=exe('./ww3_grid')
if redirect: cmd = cmd>='ww3_grid.log'
checkrun(cmd,logger=logger)
deliver_file('./mod_def.ww3','./mod_def.gnh_10m',keep=False,logger=logger)
logger.info('ww3_gint: generating restart.ww3 by using ww3_gint with restart files from gdaswave')
make_symlink(self.getexe('ww3_gint'),'ww3_gint',force=True,logger=logger)
#Get restart.gnh_10m
self.get_ww3rst_inputs()
#Prepare the namelist
self.make_gint_inp(logger)
#run ww3_gint
cmd=exe('./ww3_gint')
if redirect: cmd = cmd>='ww3_gint.log'
checkrun(cmd,logger=logger)
deliver_file('./restart.hafs_ww3','../restart.ww3',keep=False,logger=logger)
if produtil.fileop.isnonempty('restart.ww3'):
have_restart=True
except Exception as ee:
produtil.log.jlogger.warning(
'restart.ww3: will generate dummy because ww3_gint '
'did not run successfully.',exc_info=True)
if not have_restart:
logger.info('restart.ww3: generating dummy with ww3_strt')
# Run ww3_strt
deliver_file(self.icstr('{strt_inp}'),'ww3_strt.inp',keep=True,logger=logger)
link(self.getexe('ww3_strt'),'ww3_strt')
cmd=exe('./ww3_strt')
if redirect: cmd = cmd>='ww3_strt.log'
checkrun(cmd,logger=logger)
if ww3_bdy == 'yes':
try:
logger.info('ww3_bound: generating ww3 boundary condition')
self.get_ww3bdy_inputs()
# Run ww3_bound
deliver_file(self.icstr('{bound_inp}'),'ww3_bound.inp',keep=True,logger=logger)
link(self.getexe('ww3_bound'),'ww3_bound')
cmd=exe('./ww3_bound')
if redirect: cmd = cmd>='ww3_bound.log'
checkrun(cmd,logger=logger)
except Exception as ee:
self._products.pop('nest',None)
prodnames.pop('nest',None)
produtil.log.jlogger.warning(
'ww3_bound: will run without input boundary condition because ww3_bound '
'did not run successfully.',exc_info=True)
#if redirect: self._copy_log()
## Prepare ww3_shel.inp
#ni=hafs.namelist.NamelistInserter(self.conf,self.section)
#shel_inp=self.icstr('{shel_inp}')
#atime=to_datetime(self.conf.cycle) # sim start time
#etime=to_datetime_rel(self.fcstlen*3600,atime) # sim end time
#flddt=int(self.outstep)
#pntdt=int(self.pntstep)
##flddt=self.conf.getint('forecast_products','ww3_output_step',10800)
##pntdt=self.conf.getint('forecast_products','ww3_pntout_step',10800)
#if pntdt > 0:
# # Point output requested, need to provide buoy information
# buoy_inp=self.icstr('{buoy_inp}')
# with open(buoy_inp,'r') as bf:
# #Read the file content and take out the eof character in the end.
# buoyfile=bf.read()[:-1]
#elif pntdt == 0:
# # Point output no requested, no further info needed
# buoyfile='$'
#else:
# # Wrong pntdt value
# logger.warning('Wrong ww3_pntout_step value: %d. Set ww3_pntout_step = 0'%(pntdt,))
# pntdt=0
# self.pntout=0
# buoyfile='$'
#ci=self.conf.getfloat('config','cycling_interval',6)
#retime=to_datetime_rel(ci*3600*1,atime) # restart end time
#invars=dict()
#invars.update(RUN_BEG=atime.strftime('%Y%m%d %H%M%S'),
# RUN_END=etime.strftime('%Y%m%d %H%M%S'),
# FLD_BEG=atime.strftime('%Y%m%d %H%M%S'),
# FLD_END=etime.strftime('%Y%m%d %H%M%S'),
# FLD_DT=int(flddt),
# PNT_BEG=atime.strftime('%Y%m%d %H%M%S'),
# PNT_END=etime.strftime('%Y%m%d %H%M%S'),
# PNT_DT=int(pntdt),
# BUOY_FILE=buoyfile,
# RST_BEG=atime.strftime('%Y%m%d %H%M%S'),
# RST_END=retime.strftime('%Y%m%d %H%M%S'),
# RST_DT=int(self.rststep) )
#with open(shel_inp,'rt') as nf:
# with open('ww3_shel.inp','wt') as of:
# of.write(ni.parse(nf,logger=logger,source=shel_inp,
# raise_all=True,atime=self.conf.cycle,**invars))
self.deliver_products()
self.state=COMPLETED
except Exception as e:
logger.error('Unhandled exception in wave init: %s'
%(str(e),),exc_info=True)
self.state=FAILED
#self._copy_log()
raise
def _copy_log(self):
logger=self.log()
for lf in [ 'ww3_grid.log', 'ww3_prep_wind.log', 'ww3_prep_curr.log',
'ww3_strt.log', 'ww3_untarbdy.log', 'ww3_bound.log' ]:
comloc=self.icstr('{com}/{out_prefix}.{RUN}.{lf}.ww3',lf=lf)
if os.path.exists(lf):
deliver_file(lf,comloc,keep=True,logger=logger)
def get_ww3bdy_inputs(self):
"""!Obtains WW3 input boundary condition data, links or copies to ww3init dir.
WW3 input boundary data comes from previous cycle's gfswave."""
logger=self.log()
redirect=self.confbool('redirect',True)
atime=to_datetime(self.conf.cycle)
wtime=to_datetime_rel(-6*3600,atime)
ww3catalog=self.confstr('catalog','hafsdata')
ww3dc=hafs.input.DataCatalog(self.conf,ww3catalog,wtime)
dataset=self.confstr('gfswave_dataset','gfswave')
item=self.confstr('ww3bdy_item','ww3bdy_ibp')
when=wtime
for itry in range(3):
when=to_datetime_rel(-6*3600*itry,wtime)
ww3bdyfile=ww3dc.locate(dataset,item,atime=when,logger=logger)
if not ww3bdyfile:
logger.info('%s: cannot decide data location for this time.'%(
when.strftime('%Y%m%d%H'),))
ok=True
(L,S) = produtil.fileop.lstat_stat(ww3bdyfile)
if S is None:
logger.info('%s: does not exist'%(ww3bdyfile,))
ok=False
if S.st_size<10000:
logger.info('%s: too small (should be >=%d bytes)'%(
ww3bdyfile,10000))
ok=False
if not ok: continue
# We get here if the ww3bdyfile exists and is big enough.
ww3bdyspectar='gfs.t'+when.strftime('%H')+'z.ibp_tar'
make_symlink(ww3bdyfile,ww3bdyspectar,force=True,logger=logger)
ww3bdyfbase=self.icstr('./gfswave.HWRF{vit[basin1lc]}*')
#cmd=exe('tar')['-zxvf', ww3bdyspectar, '--wildcards', ww3bdyfbase]
cmd=exe('tar')['-xvf', ww3bdyspectar, '--wildcards', ww3bdyfbase]
if redirect: cmd = cmd>='ww3_untarbdy.log'
checkrun(cmd,logger=logger)
return
def get_ww3rst_inputs(self):
"""!Obtains global gdaswave restart file, links or copies to ww3init dir.
WW3 input restart file comes from current cycle's gdaswave."""
logger=self.log()
atime=to_datetime(self.conf.cycle)
wtime=to_datetime_rel(-6*3600,atime)
ww3catalog=self.confstr('catalog','hafsdata')
ww3dc=hafs.input.DataCatalog(self.conf,ww3catalog,atime)
dataset=self.confstr('gdaswave_dataset','gdaswave')
item=self.confstr('ww3rst_item','ww3rst_gnh_10m')
when=wtime
ww3rstfile=ww3dc.locate(dataset,item,atime=when,logger=logger)
if not ww3rstfile:
logger.info('%s: cannot decide data location for this time.'%(
when.strftime('%Y%m%d%H'),))
ok=True
(L,S) = produtil.fileop.lstat_stat(ww3rstfile)
if S is None:
logger.info('%s: does not exist'%(ww3bdyfile,))
ok=False
if S.st_size<10000:
logger.info('%s: too small (should be >=%d bytes)'%(
ww3rstfile,10000))
ok=False
if not ok:
logger.warning('%s: ww3rst file from gdaswave not ok for this time.'%(
when.strftime('%Y%m%d%H'),))
# We get here if the ww3rstfile exists and is big enough.
make_symlink(ww3rstfile,'restart.gnh_10m',force=True,logger=logger)
return
def make_gint_inp(self,logger):
# Prepare ww3_gint.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
gint_inp=self.confstr('gint_inp','')
if not gint_inp: gint_inp=self.icstr('{PARMww3}/ww3_gint.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(RUN_BEG=atime.strftime('%Y%m%d %H%M%S'))
with open(gint_inp,'rt') as nf:
with open('ww3_gint.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=gint_inp,
raise_all=True,atime=self.conf.cycle,**invars))
########################################################################
ww3postprodnames={
'ww3outgrd': ( './out_grd.ww3', '{com}/{out_prefix}.{RUN}.out_grd.ww3' ),
'ww3grb2': ( './gribfile', '{com}/{out_prefix}.{RUN}.ww3.grb2' ),
'ww3grb2idx': ( './gribfile.idx', '{com}/{out_prefix}.{RUN}.ww3.grb2.idx' ),
'ww3ounf': ( './ww3.%Y.nc', '{com}/{out_prefix}.{RUN}.ww3_ounf.nc' ),
'ww3outpnt': ( './out_pnt.ww3', '{com}/{out_prefix}.{RUN}.out_pnt.ww3' ),
'ww3ounpspec': ( './ww3.%Y_spec.nc', '{com}/{out_prefix}.{RUN}.ww3_ounp_spec.nc' ),
'ww3outpbull': ( './ww3_bull.tar', '{com}/{out_prefix}.{RUN}.ww3_bull.tar' ),
'ww3outpcbull': ( './ww3_cbull.tar', '{com}/{out_prefix}.{RUN}.ww3_cbull.tar' ),
'ww3outpcsbull': ( './ww3_csbull.tar', '{com}/{out_prefix}.{RUN}.ww3_csbull.tar' ),
'ww3outpspec': ( './ww3_spec.tar', '{com}/{out_prefix}.{RUN}.ww3_spec.tar' ) }
class WW3Post(hafs.hafstask.HAFSTask):
"""Run WW3 post-process."""
def __init__(self,dstore,conf,section,fcstlen=126,outstep=10800,pntstep=10800,**kwargs):
super(WW3Post,self).__init__(dstore,conf,section,**kwargs)
self.fcstlen=float(fcstlen)
self.outstep=int(outstep)
self.pntstep=int(pntstep)
self._make_products()
self._ncks_path=False
def _make_products(self):
"""Creates FileProduct objects for all WW3Post output files. The
outdir is the directory to which the WW3Post package output its
final files."""
self._products=dict()
atime=tcutil.numerics.to_datetime(self.conf.cycle)
with self.dstore.transaction():
for prodname,filepaths in ww3postprodnames.items():
(localpath,compath)=filepaths
localpath=self.conf.cycle.strftime(localpath)
prod=produtil.datastore.FileProduct(
self.dstore,prodname,self.taskname)
prod.location=self.timestr(compath,atime,atime)
prod['localpath'] = localpath
self._products[prodname]=( prod,localpath )
def products(self,name=None):
"""Iterate over all products."""
for prodname,stuff in self._products.items():
(prod,localpath)=stuff
if name is None or name==prod.prodname:
yield prod
def __copy_ncks(self,source,target,ignore):
ncks=self.ncks_path
logger=self.log()
produtil.fileop.remove_file(target,logger=logger)
checkrun(bigexe(ncks)['-4','-L','6',source,target]<'/dev/null',
logger=logger)
@property
def ncks_path(self):
"""Returns the path to ncks. Returns None if ncks cannot be
found. This function will only search for ncks once, and will
cache the result. Set self._ncks_path=False to force a
recheck."""
if self._ncks_path is False:
ncks=self.getexe('ncks','')
if not self._ncks_path:
ncks=produtil.fileop.find_exe('ncks',raise_missing=False)
assert(ncks is None or
(isinstance(ncks,str) and ncks!=''))
self._ncks_path=ncks
return self._ncks_path
def run(self):
"""Run the WW3 post."""
logger=self.log()
redirect=self.confbool('redirect',True)
self.state=RUNNING
# The line below makes a DBNAlert object, which can be reused for the later alerts.
alerter=produtil.dbnalert.DBNAlert(['MODEL','{type}','{job}','{location}'])
modelrun=self.icstr('{RUN}').upper()
try:
with NamedDir(self.workdir,keep=True,logger=logger,rm_first=True) as d:
# Prepare mod_def.ww3
ww3moddef=self.icstr('{intercom}/ww3/mod_def.ww3')
if not os.path.exists(ww3moddef):
logger.error('%s: mod_def.ww3 not yet available from forecast'%(
ww3moddef,))
deliver_file(ww3moddef,'mod_def.ww3',force=True,logger=logger)
# Prepare and deliver out_grd.ww3
if self.outstep>0:
ww3out=self.icstr('{WORKhafs}/forecast/out_grd.ww3')
if not os.path.exists(ww3out):
logger.error('%s: out_grd.ww3 not yet available from forecast'%(
ww3out,))
deliver_file(ww3out,'out_grd.ww3',force=True,logger=logger)
(prod,localpath)=self._products['ww3outgrd']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
# Prepare and deliver out_pnt.ww3
if self.pntstep>0:
ww3pnt=self.icstr('{WORKhafs}/forecast/out_pnt.ww3')
if not os.path.exists(ww3pnt):
logger.error('%s: out_pnt.ww3 not yet available from forecast'%(
ww3pnt,))
deliver_file(ww3pnt,'out_pnt.ww3',force=True,logger=logger)
(prod,localpath)=self._products['ww3outpnt']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
# For field output in grib2 format
ww3_grib_post=self.confstr('ww3_grib_post','yes',section='ww3post')
if ww3_grib_post == 'yes' and self.outstep>0:
make_symlink(self.getexe('ww3_grib'),'ww3_grib',force=True,logger=logger)
# Prepare the namelist
self.make_grib_inp(logger)
cmd=exe('./ww3_grib')
if redirect: cmd = cmd>='ww3_grib.log'
checkrun(cmd,logger=logger)
indexfile='gribfile.idx'
wgrib2=self.getexe('wgrib2')
logger.info('ww3post: Generating grib idx file for gribfile')
checkrun(bigexe(wgrib2)['-s','gribfile'] > indexfile,logger=logger)
os.system('chmod -x '+indexfile) # Remove the -x permission
(prod,localpath)=self._products['ww3grb2']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
alerter(location=prod.location, type=modelrun+'_WW3GB2')
(prod,localpath)=self._products['ww3grb2idx']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
alerter(location=prod.location, type=modelrun+'_WW3GB2_WIDX')
# For point output ww3_outp
ww3_outp_bull_post=self.confstr('ww3_outp_bull_post','yes',section='ww3post')
ww3_outp_spec_post=self.confstr('ww3_outp_spec_post','yes',section='ww3post')
if self.pntstep>0:
make_symlink(self.getexe('ww3_outp'),'ww3_outp',force=True,logger=logger)
# Need to get information about the total number of buoys and their IDs
self.make_outp_info_inp(logger)
cmd=exe('./ww3_outp')
cmd = cmd>='ww3_outp_info.log'
checkrun(cmd,logger=logger)
fname='ww3_outp_info.log'
with open(fname) as f:
ww3_outp_info = f.readlines()
indices = [i for i, elem in enumerate(ww3_outp_info) if '----------' in elem]
buoys=ww3_outp_info[indices[0]+1:indices[1]-2]
# For point bullitin output
if ww3_outp_bull_post == 'yes':
filebull=[]
filecbull=[]
filecsbull=[]
filelog=[]
commands=list()
for i, buoy in enumerate(buoys):
ipnt=i+1
buoyid=buoy.split()[0]
buoylon=buoy.split()[1]
buoylat=buoy.split()[2]
logger.info('ww3_outp_bull for buoy: %i, %s, %s, %s'%(ipnt,buoyid,buoylon,buoylat))
with NamedDir('ww3outpbull.%s'%(buoyid,),keep=True,logger=logger) as nameddir:
self.make_outp_bull_inp(ipnt,logger)
make_symlink('../mod_def.ww3','mod_def.ww3',force=True,logger=logger)
make_symlink('../out_pnt.ww3','out_pnt.ww3',force=True,logger=logger)
make_symlink(self.getexe('ww3_outp'),'ww3_outp',force=True,logger=logger)
buoybull=buoyid+'.bull'
buoycbull=buoyid+'.cbull'
buoycsv=buoyid+'.csv'
buoycsbull=buoyid+'.csbull'
buoylog='ww3_outp_bull_'+buoyid+'.log'
filebull.append(buoybull)
filecbull.append(buoycbull)
filecsbull.append(buoycsbull)
filelog.append(buoylog)
cmd=('cd '+nameddir.dirname+' && '+
'./ww3_outp > ../'+buoylog+' && '+
'mv '+buoybull+' ../ && '+
'mv '+buoycbull+' ../ && '+
'mv '+buoycsv+' ../'+buoycsbull+' && '+
'cd ../')
commands.append(cmd)
cmdfname='command.file.ww3outpbull'
with open(cmdfname,'wt') as cfpf:
cfpf.write('\n'.join(commands))
threads=os.environ['TOTAL_TASKS']
logger.info('ww3_outp_bull total threads: %s ',threads)
mpiserial_path=os.environ.get('MPISERIAL','*MISSING*')
if mpiserial_path=='*MISSING*':
mpiserial_path=self.getexe('mpiserial')
cmd2=mpirun(mpi(mpiserial_path)['-m',cmdfname],allranks=True)
checkrun(cmd2)
# Tar the outputs and diliver to com dir
cmd=exe('tar')['-cvf', 'ww3_bull.tar'][filebull]
checkrun(cmd,logger=logger)
cmd=exe('tar')['-cvf', 'ww3_cbull.tar'][filecbull]
checkrun(cmd,logger=logger)
cmd=exe('tar')['-cvf', 'ww3_csbull.tar'][filecsbull]
checkrun(cmd,logger=logger)
cmd=exe('cat')[filelog] >> 'ww3_outp_bull.log'
checkrun(cmd,logger=logger)
(prod,localpath)=self._products['ww3outpbull']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
alerter(location=prod.location, type=modelrun+'_WW3TAR')
(prod,localpath)=self._products['ww3outpcbull']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
alerter(location=prod.location, type=modelrun+'_WW3TAR')
(prod,localpath)=self._products['ww3outpcsbull']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
alerter(location=prod.location, type=modelrun+'_WW3TAR')
# For point spec output
if ww3_outp_spec_post == 'yes':
fileout=[]
filelog=[]
commands=list()
ww3tstr=self.conf.cycle.strftime('%y%m%d%H')
for i, buoy in enumerate(buoys):
ipnt=i+1
buoyid=buoy.split()[0]
buoylon=buoy.split()[1]
buoylat=buoy.split()[2]
logger.info('ww3_outp_spec for buoy: %i, %s, %s, %s'%(ipnt,buoyid,buoylon,buoylat))
with NamedDir('ww3outpspec.%s'%(buoyid,),keep=True,logger=logger) as nameddir:
self.make_outp_spec_inp(ipnt,logger)
make_symlink('../mod_def.ww3','mod_def.ww3',force=True,logger=logger)
make_symlink('../out_pnt.ww3','out_pnt.ww3',force=True,logger=logger)
make_symlink(self.getexe('ww3_outp'),'ww3_outp',force=True,logger=logger)
buoyspc='ww3.'+ww3tstr+'.spc'
buoyout=buoyid+'.spc'
buoylog='ww3_outp_spec_'+buoyid+'.log'
fileout.append(buoyout)
filelog.append(buoylog)
cmd=('cd '+nameddir.dirname+' && '+
'./ww3_outp > ../'+buoylog+' && '+
'mv '+buoyspc+' ../'+buoyout+' && '+
'cd ../')
commands.append(cmd)
cmdfname='command.file.ww3outpspec'
with open(cmdfname,'wt') as cfpf:
cfpf.write('\n'.join(commands))
threads=os.environ['TOTAL_TASKS']
logger.info('ww3_outp_spec total threads: %s ',threads)
mpiserial_path=os.environ.get('MPISERIAL','*MISSING*')
if mpiserial_path=='*MISSING*':
mpiserial_path=self.getexe('mpiserial')
cmd2=mpirun(mpi(mpiserial_path)['-m',cmdfname],allranks=True)
checkrun(cmd2)
# Tar the outputs and deliver to com dir
cmd=exe('tar')['-cvf', 'ww3_spec.tar'][fileout]
checkrun(cmd,logger=logger)
cmd=exe('cat')[filelog] >> 'ww3_outp_spec.log'
checkrun(cmd,logger=logger)
(prod,localpath)=self._products['ww3outpspec']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=None)
alerter(location=prod.location, type=modelrun+'_WW3TAR')
# Additional ww3post products
# For field output in netcdf format
ww3_ounf_post=self.confstr('ww3_ounf_post','yes',section='ww3post')
if ww3_ounf_post == 'yes' and self.outstep>0:
make_symlink(self.getexe('ww3_ounf'),'ww3_ounf',force=True,logger=logger)
# Prepare the namelist
self.make_ounf_inp(logger)
# Run ww3_ounf
cmd=exe('./ww3_ounf')
if redirect: cmd = cmd>='ww3_ounf.log'
checkrun(cmd,logger=logger)
(prod,localpath)=self._products['ww3ounf']
logger.info('Delivering ww3ounf from %s to %s'%(localpath,prod.location))
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=self.__copy_ncks)
# For point spec output in netcdf format
ww3_ounp_spec_post=self.confstr('ww3_ounp_spec_post','yes',section='ww3post')
if ww3_ounp_spec_post == 'yes' and self.pntstep>0:
make_symlink(self.getexe('ww3_ounp'),'ww3_ounp',force=True,logger=logger)
# Prepare the namelist
self.make_ounp_spec_inp(logger)
# Run ww3_ounp
cmd=exe('./ww3_ounp')
if redirect: cmd = cmd>='ww3_ounp.log'
checkrun(cmd,logger=logger)
(prod,localpath)=self._products['ww3ounpspec']
prod.deliver(frominfo=localpath,location=prod.location,logger=logger,copier=self.__copy_ncks)
self.state=COMPLETED
except Exception as e:
self.state=FAILED
logger.error("WW3 post failed: %s"%(str(e),),exc_info=True)
raise
def make_grib_inp(self,logger):
# Prepare ww3_grib.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
grib_inp=self.confstr('grib_inp','')
if not grib_inp: grib_inp=self.icstr('{PARMww3}/ww3_grib.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(FLD_BEG=atime.strftime('%Y%m%d %H%M%S'),
FLD_DT=int(self.outstep),
RUN_BEG=atime.strftime('%Y%m%d %H%M%S'))
with open(grib_inp,'rt') as nf:
with open('ww3_grib.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=grib_inp,
raise_all=True,atime=self.conf.cycle,**invars))
def make_ounf_inp(self,logger):
# Prepare ww3_ounf.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
ounf_inp=self.confstr('ounf_inp','')
if not ounf_inp: ounf_inp=self.icstr('{PARMww3}/ww3_ounf.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(FLD_BEG=atime.strftime('%Y%m%d %H%M%S'),
FLD_DT=int(self.outstep))
with open(ounf_inp,'rt') as nf:
with open('ww3_ounf.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=ounf_inp,
raise_all=True,atime=self.conf.cycle,**invars))
def make_ounp_spec_inp(self,logger):
# Prepare ww3_ounp.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
ounp_spec_inp=self.confstr('ounp_spec_inp','')
if not ounp_spec_inp: ounp_spec_inp=self.icstr('{PARMww3}/ww3_ounp_spec.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(PNT_BEG=atime.strftime('%Y%m%d %H%M%S'),
PNT_DT=int(self.pntstep))
with open(ounp_spec_inp,'rt') as nf:
with open('ww3_ounp.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=ounp_spec_inp,
raise_all=True,atime=self.conf.cycle,**invars))
def make_outp_info_inp(self,logger):
# Prepare ww3_outp.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
outp_info_inp=self.confstr('outp_info_inp','')
if not outp_info_inp: outp_info_inp=self.icstr('{PARMww3}/ww3_outp_info.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(PNT_BEG=atime.strftime('%Y%m%d %H%M%S'),
PNT_DT=int(self.pntstep))
with open(outp_info_inp,'rt') as nf:
with open('ww3_outp.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=outp_info_inp,
raise_all=True,atime=self.conf.cycle,**invars))
def make_outp_bull_inp(self,ipnt,logger):
# Prepare ww3_outp.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
outp_bull_inp=self.confstr('outp_bull_inp','')
if not outp_bull_inp: outp_bull_inp=self.icstr('{PARMww3}/ww3_outp_bull.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(PNT_BEG=atime.strftime('%Y%m%d %H%M%S'),
PNT_DT=int(self.pntstep),
PNT_NUM=int(ipnt),
RUN_BEG=atime.strftime('%Y%m%d %H%M%S'))
with open(outp_bull_inp,'rt') as nf:
with open('ww3_outp.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=outp_bull_inp,
raise_all=True,atime=self.conf.cycle,**invars))
def make_outp_spec_inp(self,ipnt,logger):
# Prepare ww3_outp.inp
ni=hafs.namelist.NamelistInserter(self.conf,self.section)
outp_spec_inp=self.confstr('outp_spec_inp','')
if not outp_spec_inp: outp_spec_inp=self.icstr('{PARMww3}/ww3_outp_spec.inp_tmpl')
atime=to_datetime(self.conf.cycle) # sim start time
invars=dict()
invars.update(PNT_BEG=atime.strftime('%Y%m%d %H%M%S'),
PNT_DT=int(self.pntstep),
PNT_NUM=int(ipnt),
RUN_BEG=atime.strftime('%Y%m%d %H%M%S'))
with open(outp_spec_inp,'rt') as nf:
with open('ww3_outp.inp','wt') as of:
of.write(ni.parse(nf,logger=logger,source=outp_spec_inp,
raise_all=True,atime=self.conf.cycle,**invars))
|
[
"[email protected]"
] | |
2ddcbebaa4ee0eaa393fdb136aa5b664b5bac984
|
d52e51f473bf4eb62956cebdccd956ac4df6cccd
|
/Python_Part6/converters.py
|
7cb6f915dab1bdbd54aa7b7205259a6f1e795f81
|
[] |
no_license
|
Tduderocks/PracticeProblems
|
74ccafd50e6900a6ae138bb6c4d758735ee452e1
|
eca1be589df9e513e4e0a6121245f4e8c9a3c214
|
refs/heads/master
| 2022-11-18T06:59:17.323121 | 2020-07-18T02:39:09 | 2020-07-18T02:39:09 | 265,915,337 | 0 | 1 | null | 2020-07-18T02:39:10 | 2020-05-21T17:51:11 |
C++
|
UTF-8
|
Python
| false | false | 227 |
py
|
from streams import Processor
class Uppercase(Processor):
def converter(self, data):
return data.upper()
if __name__=="__main__":
import sys
obj = Uppercase(open('trispam.txt'), sys.stdout)
obj.process()
|
[
"[email protected]"
] | |
38837fa30d68450ab5537a582f3c8dded81d532b
|
f4017c78dbf2c134eb32ebd6c9e787c7fa0bc2d5
|
/07_django/base_prj/sns/urls.py
|
531b75db595ae02d451805a5d20830b51f4aaf86
|
[] |
no_license
|
raccooncho/TIL-1
|
394a156935dbc7528ee4cf990db6a61f6a377cdb
|
f757178136bfafbc704b15d2891cec0302b33b2f
|
refs/heads/master
| 2020-04-22T19:40:14.393893 | 2019-02-11T09:00:11 | 2019-02-11T09:00:11 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 532 |
py
|
from django.urls import path
from . import views
app_name = 'sns'
urlpatterns = [
path('', views.posting_list, name='posting_list'),
path('<int:posting_id>/', views.posting_detail, name='posting_detail'),
path('new/', views.create_posting, name='create_posting'),
path('<int:posting_id>/edit/', views.edit_posting, name='edit_posting'),
path('<int:posting_id>/delete/', views.delete_posting, name='delete_posting'),
path('<int:posting_id>/comments/create/', views.create_comment, name='create_comment'),
]
|
[
"[email protected]"
] | |
b7dc54b2539acc9351aa7b5e664d819614a5d304
|
727f1bc2205c88577b419cf0036c029b8c6f7766
|
/out-bin/py/google/fhir/models/run_locally.runfiles/com_google_fhir/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/debug/lib/profiling.py
|
1d60d22ff7008d67f2fdc68c9e659f0c0474b8a5
|
[
"Apache-2.0"
] |
permissive
|
rasalt/fhir
|
55cf78feed3596a3101b86f9e9bbf6652c6ed4ad
|
d49883cc4d4986e11ca66058d5a327691e6e048a
|
refs/heads/master
| 2020-04-13T00:16:54.050913 | 2019-01-15T14:22:15 | 2019-01-15T14:22:15 | 160,260,223 | 0 | 0 |
Apache-2.0
| 2018-12-03T22:07:01 | 2018-12-03T22:07:01 | null |
UTF-8
|
Python
| false | false | 181 |
py
|
/home/rkharwar/.cache/bazel/_bazel_rkharwar/c4bcd65252c8f8250f091ba96375f9a5/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/debug/lib/profiling.py
|
[
"[email protected]"
] | |
377bae42543144719de068e821985cbb2ae30779
|
bfc0f551b5a58a7ca84cb0e69290a39f3b944d10
|
/as/as/urls.py
|
88bdc54f6daeabc279d79b9a5daeee84081fba0d
|
[] |
no_license
|
saraswati789/skk
|
07f8fc01c5dd6946205b4d4f4b4fe4fa23819756
|
8fb9c9651e1a987fc88e4e949bf7989bfd0f8161
|
refs/heads/master
| 2021-04-15T15:36:26.283994 | 2018-03-28T16:56:28 | 2018-03-28T16:56:28 | 126,667,779 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 813 |
py
|
"""as URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
urlpatterns = [
path('admin/', admin.site.urls),
path('polls/',include('polls.urls')),
]
|
[
"[email protected]"
] | |
58c2eabab1455d6a5cabb4a86e37fce6b704531b
|
b1c7e19d412ee049a60da0fe3fdf3da19ea4afa9
|
/gipsyClasses/Ui_regrid.py
|
82c7a1651475b20ad8639fa18ba604a00eeb6104
|
[] |
no_license
|
susanasanchez/GUIPSY
|
01af8f285fe86c07a3cb5f3d440f1ed34d0fb67a
|
710abf18f312381a4903ec0d6ab9c0bb601a0050
|
refs/heads/master
| 2016-09-05T21:21:24.003911 | 2015-03-04T13:29:11 | 2015-03-04T13:29:11 | 9,743,813 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,636 |
py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'regrid.ui'
#
# Created: Wed Jun 1 13:15:38 2011
# by: PyQt4 UI code generator 4.7.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_regrid(object):
def setupUi(self, regrid):
regrid.setObjectName("regrid")
regrid.resize(400, 188)
regrid.setFrameShape(QtGui.QFrame.StyledPanel)
regrid.setFrameShadow(QtGui.QFrame.Raised)
self.gridLayout = QtGui.QGridLayout(regrid)
self.gridLayout.setObjectName("gridLayout")
self.label = QtGui.QLabel(regrid)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.axnameBox = QtGui.QComboBox(regrid)
self.axnameBox.setObjectName("axnameBox")
self.gridLayout.addWidget(self.axnameBox, 0, 1, 1, 1)
spacerItem = QtGui.QSpacerItem(152, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem, 0, 2, 1, 1)
self.label_2 = QtGui.QLabel(regrid)
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.cdeltLine = QtGui.QLineEdit(regrid)
self.cdeltLine.setObjectName("cdeltLine")
self.gridLayout.addWidget(self.cdeltLine, 1, 1, 1, 1)
self.unitLabel = QtGui.QLabel(regrid)
self.unitLabel.setEnabled(False)
self.unitLabel.setObjectName("unitLabel")
self.gridLayout.addWidget(self.unitLabel, 1, 2, 1, 1)
self.label_3 = QtGui.QLabel(regrid)
self.label_3.setObjectName("label_3")
self.gridLayout.addWidget(self.label_3, 2, 0, 1, 1)
self.ipolBox = QtGui.QComboBox(regrid)
self.ipolBox.setObjectName("ipolBox")
self.ipolBox.addItem("")
self.ipolBox.addItem("")
self.ipolBox.addItem("")
self.ipolBox.addItem("")
self.gridLayout.addWidget(self.ipolBox, 2, 1, 1, 1)
self.label_4 = QtGui.QLabel(regrid)
self.label_4.setObjectName("label_4")
self.gridLayout.addWidget(self.label_4, 3, 0, 1, 1)
self.widthLine = QtGui.QLineEdit(regrid)
self.widthLine.setObjectName("widthLine")
self.gridLayout.addWidget(self.widthLine, 3, 1, 1, 1)
self.retranslateUi(regrid)
QtCore.QMetaObject.connectSlotsByName(regrid)
def retranslateUi(self, regrid):
regrid.setWindowTitle(QtGui.QApplication.translate("regrid", "Frame", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("regrid", "AXNAME", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("regrid", "CDELT", None, QtGui.QApplication.UnicodeUTF8))
self.unitLabel.setText(QtGui.QApplication.translate("regrid", "UNIT", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("regrid", "IPOL", None, QtGui.QApplication.UnicodeUTF8))
self.ipolBox.setItemText(0, QtGui.QApplication.translate("regrid", "Linear", None, QtGui.QApplication.UnicodeUTF8))
self.ipolBox.setItemText(1, QtGui.QApplication.translate("regrid", "Sinc", None, QtGui.QApplication.UnicodeUTF8))
self.ipolBox.setItemText(2, QtGui.QApplication.translate("regrid", "Spline", None, QtGui.QApplication.UnicodeUTF8))
self.ipolBox.setItemText(3, QtGui.QApplication.translate("regrid", "Average", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("regrid", "WIDTH", None, QtGui.QApplication.UnicodeUTF8))
|
[
"[email protected]"
] | |
7f559d2862ef1e3f93bcde50464d07a9767ac80e
|
3d88748960deb31c674525df2bd9d79ba1d2db1a
|
/pythonlib/bin/pyfftr
|
4127562195c91f40b757688d14d9e521c09d2ba6
|
[
"BSD-2-Clause"
] |
permissive
|
johnkerl/scripts-math
|
1a0eb6ce86fd09d593c82540638252af5036c535
|
cb29e52fec10dd00b33c3a697dec0267a87ab8bb
|
refs/heads/main
| 2022-01-31T17:46:05.002494 | 2022-01-17T20:40:31 | 2022-01-17T20:40:31 | 13,338,494 | 5 | 3 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,546 |
#!/usr/bin/python -Wall
# ----------------------------------------------------------------
# John Kerl
# [email protected]
# 2006-03-20
# (Ported to Python 2006-03-06)
#
# This is a radix-2 fast Fourier transform. Example:
#
# xxx cmts re real-to-complex transform; numerical recipes
# ----------------------------------------------------------------
from __future__ import division # 1/2 = 0.5, not 0.
import sys
import pyfft_m
import pyrcio_m
from math import *
# ----------------------------------------------------------------
def usage():
print >> sys.stderr, "Usage:", sys.argv[0], "[options] [file name]"
print >> sys.stderr, "If the file name is omitted, input is taken from standard input."
print >> sys.stderr, "Format is in decimal real, one sample per line. E.g."
print >> sys.stderr, " 1.0"
print >> sys.stderr, " 2.0"
print >> sys.stderr, " 3.0"
print >> sys.stderr, " 4.0"
print >> sys.stderr, "Options:"
print >> sys.stderr, " -fi: input folding"
print >> sys.stderr, " -nfi: no input folding"
print >> sys.stderr, " -fo: output folding"
print >> sys.stderr, " -nfo: no output folding"
print >> sys.stderr, " -fwd: forward FFT (exp(-i 2 pi k/N) kernel)"
print >> sys.stderr, " -rev: reverse FFT (exp( i 2 pi k/N) kernel)"
print >> sys.stderr, " -s: scaling"
print >> sys.stderr, " -ns: no scaling"
print >> sys.stderr, " -dft: Use DFT. Allows N not to be a power of 2."
sys.exit(1)
# ================================================================
# Start of program
fold_in = 0
fold_out = 0
forward = 1
scale = 1
use_dft = 0
file_name = "-"
argc = len(sys.argv)
argi = 1
while (argi < argc):
arg = sys.argv[argi]
if (arg[0] != '-'):
break
if (arg == "-fi"):
fold_in = 1
elif (arg == "-nfi"):
fold_in = 0
elif (arg == "-fo"):
fold_out = 1
elif (arg == "-nfo"):
fold_out = 0
elif (arg == "-fwd"):
forward = 1
elif (arg == "-rev"):
forward = 0
elif (arg == "-s"):
scale = 1
elif (arg == "-ns"):
scale = 0
elif (arg == "-dft"):
use_dft = 1
elif (arg == "-ndft"):
use_dft = 0
else:
usage()
argi += 1
if ((argc - argi) == 1):
file_name = sys.argv[argi]
elif ((argc - argi) == 0):
file_name = "-"
else:
usage()
# real input f_j: j = 0 .. N-1
# split: fe_j, fo_j: j = 0 .. N/2-1
# h_j = fe_j + i fo_j: j = 0 .. N/2-1
# By linearity: H_k = Fe_k + i Fo_k: k = 0 .. N/2-1
# Fe_k = sum_{j=0}^{N/2-1} f_{2j} w_N^2 [note w_N^2 = w_{N/2}]
# Fo_k = sum_{j=0}^{N/2-1} f_{2j+1} w_N^2
# F_k = Fe_k + w_N^k Fo_k
# F_k = 1/2(H_k + H_{N/2-k}^*) - i/2(H_k - H_{N/2-k}^*) w_N^k
# Save only 1st half of F_k: k = 0 .. N/2-1
# Need H_{N/2}: but = H_0. (Why?)
# -- Inverse --
# Fe_k = 1/2(F_k + F_{N/2-k}^*)
# "peel" F_{N/2} "from" F_0
# Fo_k = 1/2 w_N^{-k}(F_{N/2} - F_{N/2-k}^*)
# H_k = Fe_k + i Fo_k
f = pyrcio_m.read_real_vector(file_name)
print "f:"
pyrcio_m.print_real_vector(f)
print
N = len(f)
N2 = int(N/2)
print "N =", N
print "N2 =", N2
h = []
for j in range(0, N2):
h.append(f[2*j] + 1j*f[2*j+1])
print "h:"
pyrcio_m.print_complex_vector(h)
print
if (use_dft):
H = pyfft_m.dft(h, fold_in, fold_out, forward, scale)
else:
H = pyfft_m.fft(h, fold_in, fold_out, forward, scale)
H.append(H[0]) # Append H[N/2]
print "H:"
pyrcio_m.print_complex_vector(H)
print
w_N = complex(cos(2*pi/N), sin(2*pi/N))
F = []
for k in range(0, N2+1):
Hnegkbar = H[N2-k].conjugate()
Fek = 0.5*(H[k] + Hnegkbar)
Fok = 0.5*(H[k] - Hnegkbar) * (-1j)
F.append(Fek + Fok * w_N**k)
print "F:"
pyrcio_m.print_complex_vector(F)
print
|
[
"[email protected]"
] | ||
ffef5fe55cc0e74428eb17f91e5360d2181dec9f
|
6016473a1b3ca2398d7418fa39d04f3c86903b90
|
/color_distillation/utils/draw_curve.py
|
fe38e220631b6d196db630d87fd072a3136d911a
|
[] |
no_license
|
isaak4/color_distillation_inference_example
|
ac9e23dcb3f95f85773c5c803447d029f5da227d
|
829b03ce5848186f6a060106466c65b30eadb6d9
|
refs/heads/main
| 2023-06-15T10:45:06.857371 | 2021-07-02T11:32:02 | 2021-07-02T11:32:02 | 382,314,037 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,223 |
py
|
import matplotlib.pyplot as plt
def draw_curve(path, x_epoch, train_loss, train_prec, og_test_loss, og_test_prec,
masked_test_loss=None, masked_test_prec=None, prec_labels=['train', 'test (og)', 'test (masked)'],
loss_labels=None):
fig = plt.figure()
ax0 = fig.add_subplot(121, title="loss")
ax1 = fig.add_subplot(122, title="prec")
if loss_labels is None:
loss_labels = prec_labels
ax0.plot(x_epoch, train_loss, 'bo-', label=loss_labels[0] + ': {:.3f}'.format(train_loss[-1]))
ax1.plot(x_epoch, train_prec, 'bo-', label=prec_labels[0] + ': {:.3f}'.format(train_prec[-1]))
ax0.plot(x_epoch, og_test_loss, 'ro-', label=loss_labels[1] + ': {:.3f}'.format(og_test_loss[-1]))
ax1.plot(x_epoch, og_test_prec, 'ro-', label=prec_labels[1] + ': {:.3f}'.format(og_test_prec[-1]))
if masked_test_loss is not None:
ax0.plot(x_epoch, masked_test_loss, 'go-', label=loss_labels[2] + ': {:.3f}'.format(masked_test_loss[-1]))
if masked_test_prec is not None:
ax1.plot(x_epoch, masked_test_prec, 'go-', label=prec_labels[2] + ': {:.3f}'.format(masked_test_prec[-1]))
ax0.legend()
ax1.legend()
fig.savefig(path)
plt.close(fig)
|
[
"[email protected]"
] | |
9421c7c230bd5255857e0c1b374614d0eb3f2649
|
c4567902c0142f0b5cbf13c12d5ac6825af4d0af
|
/experiments/experiment.py
|
df46428889b8f5f95476c1a9414edfa048103358
|
[] |
no_license
|
jlwalke2/deep-reinforcement-learning
|
ec945334261b01de4eb03eb958883d6e00067e4e
|
74ca4316fc995528409589fe5114e26e1f244543
|
refs/heads/master
| 2021-01-16T18:22:49.025946 | 2018-07-10T18:20:08 | 2018-07-10T18:20:08 | 100,067,311 | 0 | 0 | null | 2018-07-10T18:20:09 | 2017-08-11T20:18:24 |
Python
|
UTF-8
|
Python
| false | false | 4,113 |
py
|
from deeprl.utils.async import ModelManager
import multiprocessing
import os, os.path
from warnings import warn
from deeprl.utils import History
import logging, logging.handlers
import pickle
import matplotlib.pyplot as plt
def _run_agent(func, agent, queue):
# Get the root logger in the agent's process
logger = logging.getLogger()
# Delete existing loggers and force everything through the queue
logger.handlers.clear()
logger.setLevel(logging.INFO)
logger.addHandler(logging.handlers.QueueHandler(queue))
# Run the agent
try:
logger.info(f'Process {multiprocessing.current_process().name} is starting...')
return func(agent)
finally:
logger.info(f'Process {multiprocessing.current_process().name} terminated.')
class Experiment(object):
def __init__(self,
name: str,
agents: list):
"""Define an experiment to be executed.
:param name: user-friendly name for the experiment. Results will also be stored in a subdir with this name.
"""
self.name = name
# Assume agent is list of (agent, num_copies, init_func)
self._agents = agents
self._HISTORY_PATH = f'{self.name}/history.h5'
def run(self, force_rerun: bool =False):
if force_rerun and os.path.exists(self.name):
os.rmdir(self.name)
# If the results directory and the history data is present, then don't rerun
if os.path.isdir(self.name) and os.path.exists(self._HISTORY_PATH):
warn(f'Experiment not run because {os.path.abspath(self._HISTORY_PATH)} already exists.')
return
# Create a directory to store results in
if not os.path.isdir(self.name):
os.mkdir(self.name)
manager = ModelManager()
manager.start()
history = manager.Monitor()
log_queue = manager.Queue()
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
handler.setFormatter(logging.Formatter('[%(asctime)s|%(levelname)s] %(message)s'))
listener = logging.handlers.QueueListener(log_queue, handler, respect_handler_level=True)
listener.start()
processes = []
for agent, num_copies, func in self._agents:
# Clone the agent N times
config = pickle.dumps(agent)
for i in range(num_copies):
instance = pickle.loads(config)
instance.name += f'_{i}'
instance.history = history
# Fork separate processes for each agent to run in
processes.append(
multiprocessing.Process(target=_run_agent, args=(func, instance, log_queue), name=instance.name))
for p in processes:
p.start()
for p in processes:
p.join()
listener.stop()
history.save(self._HISTORY_PATH)
def get_plot(self, df, metric: str, intervals: bool =True, **kwargs: 'passed to Pandas .plot()'):
if 'title' not in kwargs:
kwargs['title'] = metric
df = df.pivot(columns='sender', values=metric).groupby(lambda colname: colname.rsplit('_', maxsplit=1)[0], axis=1)
mean = df.mean()
p = mean.plot(**kwargs)
if intervals:
min = df.min()
max = df.max()
for col in mean.columns:
p.fill_between(mean.index, min[col], max[col], alpha=0.2)
def get_plots(self, metrics: list, shape: tuple =None):
if shape is None:
shape = (1, len(metrics))
else:
assert shape[0] * shape[1] >= len(metrics), 'Subplot layout of {} does not subplots for {} metrics.'.format(shape, len(metrics))
history = History.load(self._HISTORY_PATH)
episode_df = history.get_episode_metrics().set_index('episode', inplace=False)
fig, axes = plt.subplots(nrows=shape[0], ncols=shape[1], figsize=(8, 4))
for i in range(len(metrics)):
self.get_plot(episode_df, metrics[i], ax=axes[i], title=metrics[i])
return fig
|
[
"[email protected]"
] | |
571e4ffadec73e14c60280b824a93bdd21068930
|
23b280daf002610f05bc59bc09e24ce2c41594d5
|
/Chapter 4/4_1_Pizzas.py
|
12852e849a1d6e686934e19a1b9e473cd37c84b9
|
[] |
no_license
|
juliannepeeling/class-work
|
d4dfb0bc12989b31f91db975fcba0a48558c25c1
|
b7114d9096fff380d1b2cfac6352df2ea1cf51de
|
refs/heads/master
| 2020-04-10T03:41:54.128023 | 2018-12-07T06:04:06 | 2018-12-07T06:04:06 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 248 |
py
|
pizzas = ['margherita', 'mushroom', 'spinach']
for pizza in pizzas:
print(pizza)
for pizza in pizzas:
print("I like " + pizza + " pizza.")
print("I like pizza a lot." + "\nIn fact, I like it so much I have to avoid it." + "\nPizza is addictive.")
|
[
"[email protected]"
] | |
2447183c7ac3209683adb61de74f7f5c95cdd11f
|
dfa4f521a5aa0f828421dfc1fce6f568bad36b24
|
/Piscine/02/ex04/first_child.py
|
6703931c249b0b1c200421c799a75cfd1c6e38d2
|
[] |
no_license
|
1337hunter/42Datascience
|
f467980fad23f3ea9222bfc5a98f56d9fcac5f0a
|
5689426a64162b2e870b6c11a51a66ea84a631fb
|
refs/heads/main
| 2023-06-09T10:54:20.528186 | 2021-06-30T15:31:28 | 2021-06-30T15:31:28 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,761 |
py
|
import sys
from random import randint
class Research:
class Calculations:
def __init__(self, data):
self.data = data
def counts(self):
heads = 0
tails = 0
for peace in self.data:
heads += peace[0]
tails += peace[1]
return heads, tails
def fractions(self):
heads, tails = self.counts()
return heads * 100 / (heads + tails), tails * 100/ (heads + tails)
def __init__(self, file_name):
self.file_name = file_name
def check_file_header(self, origin, line):
check = line.split(',')
if not line or len(check) != 2:
raise Exception('Bad file header')
if origin[0][-1].isspace() or origin[0][0].isspace() or (len(origin[1]) > 1 and origin[1][0].isspace()) or origin[1][-1] != '\n':
raise Exception('Bad file content', "Bad header alignment")
if len(origin[1]) > 2 and origin[1][-2].isspace():
raise Exception('Bad file content', "Bad header alignment")
def check_content_alignent(self, line):
if len(line[0]) > 1 or (len(line[1]) > 2 and line[1][1] != '\n'):
raise Exception('Bad file content', "Bad data alignment")
def validate_content(self, content):
if len(content) != 2:
raise Exception('Bad file content', 'Not enougth data content')
if not content[0].isnumeric() or not content[1].isnumeric():
print(content[0], not content[0].isdigit(), content[1], not content[1].isdigit())
raise Exception('Bad file content', "Table data must be digital")
if int(content[0]) not in (1, 0) or int(content[1]) not in (1, 0):
raise Exception('Bad file content', "Data content must be 0 or 1")
if int(content[0]) == int(content[1]):
raise Exception('Bad file content', "(contains equel numbers)")
return [int(content[0]), int(content[1])]
def file_reader(self, has_header=True):
with open(self.file_name, "r") as file:
line = file.readline()
if line and has_header:
self.check_file_header(line.split(','), line.strip())
line = file.readline()
strs = 0
data = []
while line:
strs += 1
self.check_content_alignent(line.split(','))
data += [self.validate_content(line.strip().split(','))]
line = file.readline()
if strs < 1:
raise Exception('File has no content')
return data
class Analitics(Research.Calculations):
def predict_random(self, n):
i = 0
predictions = []
while i < n:
predict = randint(0, 1)
predictions += [[predict, 0 if predict == 1 else 1]]
i += 1
return predictions
def predict_last(self):
return self.data[-1]
def CheckArguments(av):
if len(av) != 1:
raise Exception('Wrong argument', "The argument number is not one")
def CreateDatatable(path):
with open(path, 'w+') as file:
file.write('head,tail\n0,1\n1,0\n0,1\n1,0\n0,1\n0,1\n0,1\n1,0\n1,0\n0,1\n1,0\n')
def main(av):
CheckArguments(av)
CreateDatatable(av[0])
class_inst = Research(av[0])
data = class_inst.file_reader()
calculations = class_inst.Calculations(data)
print(data)
heads, tails = calculations.counts()
print(heads, tails)
heads, tails = calculations.fractions()
print(heads, tails)
analitics = Analitics(data)
print(analitics.predict_random(10))
print(analitics.predict_last())
if __name__ == '__main__':
main(sys.argv[1:])
|
[
"[email protected]"
] | |
30723c2e851a6064831ceee31779a2e0923f132d
|
8de2a78facbdedb033e349692c71e33ce6f47315
|
/string_format.py
|
bda55de3713cf8d1cf8a87976aba26d564aa51b8
|
[] |
no_license
|
KshitjMaheshwari/python38-GLA
|
230e4ce96c4416bbc7b11477772a827ee0d62a46
|
4d29d377ab38f75510f995293f62b7c59229423b
|
refs/heads/master
| 2022-04-17T05:17:15.205216 | 2020-02-18T19:21:18 | 2020-02-18T19:21:18 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 720 |
py
|
'''
str.format() is one of the string formatting methods in Python3,
which allows multiple substitutions and value formatting.
This method lets us concatenate elements within a string through positional formatting.
'''
a = 10
b = 30
c = a + b
temp = 'result is %d of %d and %d'
f = temp % (c, a, b)
print(f)
# tag f format
a = 10
b = 30
c = a + b
temp = f"result is {c} of {a} and {b} 😙"
print(temp) # result is 40 of 10 and 30
f = f'result is {c} of {a} {b}'
print(f) # result is 40 of 10 30
dh = 'result is {} of {} {}'
f = dh.format(c, a, b)
print(f) # result is 40 of 10 30
f = 'result is %d of %d %d' % (c, a, b)
print(f)
k = 'my name is and my record is {:10d}'.format(22223)
print(k)
|
[
"[email protected]"
] | |
338783eada3f8db553ed010a6cffd666dbb8ac77
|
4d95b102cea659cb24a58165a16238ce639dfd70
|
/backend.py
|
a698433cac0b1f38624f89ea006564cf9d0c1dd5
|
[] |
no_license
|
apurv100/library-app-
|
63f2eddeb9687cc60f78c2be8d105236de4554ae
|
92516f3c003105feab43c7311b520eb217d3beba
|
refs/heads/master
| 2022-11-22T03:49:12.046378 | 2020-07-22T07:48:02 | 2020-07-22T07:48:02 | 281,609,271 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,437 |
py
|
import sqlite3
def connect():
conn=sqlite3.connect("books.db")
cur=conn.cursor()
cur.execute("CREATE TABLE IF NOT EXISTS book (id INTEGER PRIMARY KEY , title text, author text, year integer, isbn intrger)")
conn.commit()
conn.close()
def insert(title,author,year,isbn):
conn=sqlite3.connect("books.db")
cur=conn.cursor()
##id is auto incremented so noneed to paas id
cur.execute("INSERT INTO book VALUES (NULL,?,?,?,?)",(title,author,year,isbn))
conn.commit()
conn.close()
def view():
conn=sqlite3.connect("books.db")
cur=conn.cursor()
cur.execute("SELECT * FROM book")
rows=cur.fetchall()
conn.close()
return rows
def search(title="",author="",year="",isbn=""):
conn=sqlite3.connect("books.db")
cur=conn.cursor()
cur.execute("SELECT * FROM book WHERE title=? OR author=? OR Year=? OR isbn=?",(title,author,year,isbn))
rows=cur.fetchall()
conn.close()
return rows
def delete(id):
conn=sqlite3.connect("books.db")
cur=conn.cursor()
cur.execute("DELETE FROM book WHERE id=?",(id,))
conn.commit()
conn.close()
def update(id,title,author,year,isbn):
conn=sqlite3.connect("books.db")
cur=conn.cursor()
cur.execute("UPDATE book SET title=?, author=?, year=?, isbn=? WHERE id=?",(title,author,year,isbn,id))
conn.commit()
conn.close()
connect()
|
[
"[email protected]"
] | |
bc33f82575efbebdfbe5f39f868ecd13e3a38d05
|
835886390ff946a32ce34f09da8f37830e3f8991
|
/pipeline/welddettection/computervision/linedetection.py
|
7bd9e1da571e3ab8b070d08da6559c21be80305e
|
[] |
no_license
|
sauravag/marpi16
|
7180921885969258b8694838d0e6cd433078c088
|
f58f9b481778ad401e34e76afaaad081c85f8090
|
refs/heads/master
| 2021-01-12T17:07:27.707233 | 2017-01-16T16:10:36 | 2017-01-16T16:10:36 | 71,513,518 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,304 |
py
|
import cv2
import numpy as np
from matplotlib import pyplot as plt
import sys
import os
def line_detect(filepath):
print filepath
#Read the input image
# Input: image as .jpg
img = cv2.imread(filepath)
# make a copy of the image
# Input 1: image
# Input 2: top border width in number of pixels
# Input 3: bottom border width in number of pixels
# Input 4: left border width in number of pixels
# Input 5: right border width in number of pixels
# Input 6: type of border added
imgcopy = cv2.copyMakeBorder(img,0,0,0,0,cv2.BORDER_REPLICATE)
# convert input image to grayscale
# Input 1: image
# Input 2: color space conversion code
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
# blur gray image to remove noise
# Input 1: image
# Input 2: gaussian kernel height (must be odd number and greater than zero)
# Input 3: gaussian kernel width (must be odd number and greater than zero)
# Input 4:
blur = cv2.GaussianBlur(gray,(5,5),0)
# detect edges in image (image is now gray and noise removed)
# Input 1: image
# Input 2: minval (pixels with intensity gradient below this is discarded)
# Input 3: maxVal (poxels with intesity gradient above this are imediately edges)
# Input 4: size of Sobel kernel
edges = cv2.Canny(blur,150,200,apertureSize = 3)
# Detect Lines and draw lines on image copy
lines = cv2.HoughLines(edges,1,np.pi/90,110)
# What if there was no line
if lines == None:
print "No line detected"
exit()
for rho,theta in lines[0]:
if (np.pi/70 <= theta <= np.pi/7) or (2.056 < theta < 4.970) or (1.570 <= theta <= 1.600): #(2,6 <=theta <= 26) or (theta >118 and theta <= 285)
a = np.cos(theta)
b = np.sin(theta)
x0 = a*rho
y0 = b*rho
x1 = int(x0 + 1000*(-b))
y1 = int(y0 + 1000*(a))
x2 = int(x0 - 1000*(-b))
y2 = int(y0 - 1000*(a))
cv2.line(imgcopy,(x1,y1),(x2,y2),(0,255,0),2)
return imgcopy
'''#plot original input image
plt.subplot(2,2,1),plt.imshow(img,cmap = 'gray')
plt.title('Original'), plt.xticks([]), plt.yticks([])
#plot grey image with noise removed
plt.subplot(2,2,2),plt.imshow(blur,cmap = 'gray')
plt.title('Blurred'), plt.xticks([]), plt.yticks([])
#plot image of detected lines
plt.subplot(2,2,3),plt.imshow(edges,cmap = 'gray')
plt.title('Edges'), plt.xticks([]), plt.yticks([])
#plot original image with dected lines drawn on top
plt.subplot(2,2,4),plt.imshow(imgcopy,cmap = 'gray')
plt.title('Lines'), plt.xticks([]), plt.yticks([])
plt.show()'''
if __name__ == "__main__":
basePath = '/home/elijah/Documents/AggieChallenge/WeldSamples/'
for fn in os.listdir(basePath):
#print os.path.isfile(fn)
print fn
if os.path.isfile(basePath + fn) == True:
try:
# Attempt to open an image file
img = line_detect(basePath + fn)
(name, extension) = os.path.splitext(fn)
cv2.imwrite(basePath+name+'_detection.jpg',img)
except IOError, e:
# Report error, and then skip to the next argument
print "Problem opening", fn, ":", e
continue
|
[
"[email protected]"
] | |
24456658a8e458ffa2be70f0c0307983658f39e6
|
e1a97361711186574b44de6373f44cd3be59203e
|
/code/model/GATest.py
|
3e546a1f33a089a506cc1c6f4f0b397bb69ac6e1
|
[] |
no_license
|
wawltor/Preudential
|
ba6e7673b4cf850f8a1d238b56ff94ae6730f8fe
|
942d75265ca434bd926a858916b56787e192addf
|
refs/heads/master
| 2021-01-10T01:39:49.683178 | 2016-03-06T02:19:41 | 2016-03-06T02:19:41 | 53,233,412 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 13,374 |
py
|
import numpy as np
import pandas as pd
from sklearn.preprocessing import LabelEncoder, StandardScaler
from scipy.optimize import minimize
from ml_metrics import *
from utils import *
def MungeData(train, test):
le = LabelEncoder()
train.Product_Info_2.fillna('Z0', inplace=True)
test.Product_Info_2.fillna('Z0', inplace=True)
train.insert(0, 'Product_Info_2_N', train.Product_Info_2.str[1:])
train.insert(0, 'Product_Info_2_C', train.Product_Info_2.str[0])
train.drop('Product_Info_2', inplace=True, axis=1)
test.insert(0, 'Product_Info_2_N', test.Product_Info_2.str[1:])
test.insert(0, 'Product_Info_2_C', test.Product_Info_2.str[0])
test.drop('Product_Info_2', inplace=True, axis=1)
le.fit(list(train.Product_Info_2_C)+list(test.Product_Info_2_C))
train.Product_Info_2_C = le.transform(train.Product_Info_2_C)
test.Product_Info_2_C = le.transform(test.Product_Info_2_C)
trainids = train.Id
testids = test.Id
train.drop('Id', axis=1, inplace=True)
test.drop('Id', axis=1, inplace=True)
responses = train.Response.values
train.drop('Response', inplace=True, axis=1)
train = train.astype(float)
test = test.astype(float)
train.insert(0,
'SumMedicalKeywords',
train[train.columns[train.columns.str.contains('Medical_Keyword')]]
.sum(axis=1, skipna=True))
test.insert(0,
'SumMedicalKeywords',
test[test.columns[test.columns.str.contains('Medical_Keyword')]]
.sum(axis=1, skipna=True))
train.insert(0,
'SumEmploymentInfo',
train[train.columns[train.columns.str.contains('InsuredInfo')]]
.sum(axis=1, skipna=True))
test.insert(0,
'SumEmploymentInfo',
test[test.columns[test.columns.str.contains('InsuredInfo')]]
.sum(axis=1, skipna=True))
train.insert(0,
'SumMedicalHistory',
train[train.columns[train.columns.str.contains('Medical_History')]]
.sum(axis=1, skipna=True))
test.insert(0,
'SumMedicalHistory',
test[test.columns[test.columns.str.contains('Medical_History')]]
.sum(axis=1, skipna=True))
train.fillna(-1, inplace=True)
test.fillna(-1, inplace=True)
features = train.columns
ss = StandardScaler()
train[features] = ss.fit_transform(train[features].values)
test[features] = ss.transform(test[features].values)
train['Response'] = responses
train.insert(0, 'Id', trainids)
test.insert(0, 'Id', testids)
return train, test
def Response1(data):
p = ((np.sin(np.cosh(data["Medical_History_23"])) + (((1.0/(1.0 + np.exp(- (data["Product_Info_4"] + data["Medical_History_4"])))) * 2.0) - np.maximum( (data["BMI"]), (((((data["SumMedicalKeywords"] + (data["Ins_Age"] - data["Medical_History_40"]))/2.0) + data["Medical_Keyword_3"])/2.0))))) +
(((((((data["Insurance_History_5"] - (data["Insurance_History_2"] + data["Medical_History_30"])) - np.maximum( (data["InsuredInfo_5"]), (data["Medical_History_5"]))) + data["Medical_History_13"])/2.0) / 2.0) + ((np.minimum( (data["InsuredInfo_6"]), ((data["Medical_History_15"] + data["Medical_History_4"]))) + data["Medical_History_15"])/2.0))/2.0) +
np.tanh(np.minimum( (data["Medical_History_20"]), ((np.minimum( (np.ceil((np.round(((data["Product_Info_4"] + data["Medical_History_7"])/2.0)) * 2.0))), ((((data["Medical_History_27"] - ((data["Medical_Keyword_38"] + data["Product_Info_4"])/2.0)) + data["Medical_History_11"])/2.0))) - data["InsuredInfo_7"])))) +
np.tanh(((data["Medical_History_31"] + (((((data["Medical_Keyword_15"] + (((data["Family_Hist_2"] + np.maximum( (data["Medical_Keyword_41"]), (data["Medical_History_3"])))/2.0) - data["Medical_History_18"]))/2.0) + ((data["Medical_Keyword_25"] + data["Medical_History_1"])/2.0)) + np.sin(np.ceil(data["Product_Info_2_N"])))/2.0))/2.0)) +
np.minimum( (np.minimum( ((data["InsuredInfo_2"] * data["Medical_History_35"])), (np.floor(np.cos(np.minimum( (data["Medical_History_35"]), (np.sinh((data["Medical_History_15"] * 5.4285697937))))))))), (np.cos(np.maximum( (data["Ins_Age"]), ((data["Medical_History_28"] * data["Employment_Info_3"])))))) +
np.minimum( (np.ceil((data["Medical_Keyword_22"] + np.ceil((data["Medical_History_24"] + np.maximum( (0.0588234998), ((np.floor(data["Product_Info_2_C"]) + data["Medical_Keyword_33"])))))))), (np.minimum( (np.cos(np.ceil(data["Medical_History_11"]))), (np.maximum( (data["SumMedicalKeywords"]), (data["Medical_History_39"])))))) +
(0.0588234998 * ((data["Medical_History_6"] + (((-(data["InsuredInfo_1"])) - (data["Product_Info_4"] + data["Insurance_History_8"])) + data["Medical_History_17"])) + (((data["InsuredInfo_6"] + data["Family_Hist_4"]) - data["Medical_History_40"]) + data["Medical_History_33"]))) +
(-((((data["SumMedicalKeywords"] - np.cos(data["Medical_History_11"])) / 2.0) * (-(np.minimum( (np.cos((1.0/(1.0 + np.exp(- (data["SumMedicalKeywords"] - data["Medical_Keyword_3"])))))), (np.round(((((data["Medical_Keyword_3"] + data["Medical_History_30"])/2.0) + data["Medical_History_5"])/2.0))))))))) +
((np.sinh(np.minimum( (0.1384620070), (np.cos(data["Ins_Age"])))) + np.minimum( (np.tanh(np.minimum( ((data["Medical_Keyword_9"] * data["Medical_Keyword_37"])), ((data["Medical_History_19"] * data["InsuredInfo_2"]))))), (((data["Employment_Info_1"] + (1.0/(1.0 + np.exp(- data["Medical_History_33"]))))/2.0))))/2.0) +
(((((((data["Medical_Keyword_34"] + data["Medical_Keyword_12"])/2.0) - (data["Product_Info_2_C"] + np.maximum( (0.0943396017), (data["Medical_History_1"])))) + data["InsuredInfo_7"])/2.0) + (data["Medical_History_13"] * ((data["Medical_History_28"] + data["Medical_History_23"])/2.0))) * 0.0943396017))
return p+4.5
def Response2(data):
p = ((np.cos(((data["Medical_Keyword_3"] + (-(data["BMI"])))/2.0)) - ((data["BMI"] + ((((data["SumMedicalKeywords"] + data["InsuredInfo_5"])/2.0) - (np.cos(data["Medical_History_40"]) + np.minimum( (data["Product_Info_4"]), (data["Medical_History_23"])))) - data["Medical_History_4"]))/2.0)) +
np.round(np.tanh((((data["Medical_History_15"] - ((data["Insurance_History_2"] + data["InsuredInfo_7"])/2.0)) + np.ceil(np.minimum( (data["Product_Info_4"]), (np.ceil(np.floor((data["Medical_History_15"] + np.round((data["Medical_History_27"] - np.tanh(data["Medical_History_15"]))))))))))/2.0))) +
np.tanh((data["Medical_History_20"] + (((-(((((data["Medical_History_18"] + data["Medical_Keyword_3"])/2.0) + np.maximum( (data["Medical_History_5"]), (data["Medical_History_30"])))/2.0))) + ((((data["InsuredInfo_6"] + data["SumMedicalKeywords"])/2.0) + ((data["Family_Hist_4"] + data["Medical_History_13"])/2.0))/2.0))/2.0))) +
np.tanh(((((np.minimum( (data["Medical_History_11"]), ((data["Medical_History_31"] + (data["Medical_History_28"] * data["SumEmploymentInfo"])))) + ((data["Insurance_History_5"] + (data["Medical_Keyword_41"] - data["Ins_Age"]))/2.0))/2.0) + (((data["Medical_History_40"] + data["InsuredInfo_5"])/2.0) - data["Medical_Keyword_38"]))/2.0)) +
((data["Medical_History_35"] * data["InsuredInfo_2"]) + np.minimum( (np.cos(data["Ins_Age"])), ((((((((data["Medical_History_3"] + np.ceil(data["Medical_Keyword_15"]))/2.0) + np.minimum( (data["Medical_History_7"]), (data["Medical_History_1"])))/2.0) / 2.0) + np.tanh(np.ceil(data["Medical_History_24"])))/2.0)))) +
np.sin(np.minimum( (data["Medical_History_33"]), (((data["Medical_Keyword_25"] + (((((np.floor(data["Ins_Age"]) * np.floor(((data["BMI"] * 2.0) + data["Medical_History_5"]))) + ((data["Medical_Keyword_22"] + data["Medical_History_17"])/2.0))/2.0) + np.cos(data["BMI"]))/2.0))/2.0)))) +
np.sin((np.minimum( (data["Medical_History_11"]), (np.minimum( (np.floor(np.maximum( (data["SumMedicalKeywords"]), (np.maximum( (data["Medical_History_39"]), (data["InsuredInfo_6"])))))), ((0.0943396017 * (((data["InsuredInfo_6"] + data["BMI"]) + (data["Medical_History_1"] - data["InsuredInfo_1"]))/2.0)))))) * 2.0)) +
(np.sin(np.maximum( ((data["Medical_History_5"] + np.sin((np.ceil(data["Medical_History_15"]) * 2.0)))), (np.maximum( (np.sinh(data["Medical_Keyword_33"])), (np.minimum( (0.1384620070), (((data["Ins_Age"] + np.ceil(data["Product_Info_2_N"]))/2.0)))))))) / 2.0) +
(np.minimum( (np.cos((data["Wt"] + np.round(data["Medical_History_4"])))), (np.ceil(((1.0/(1.0 + np.exp(- data["Medical_Keyword_3"]))) - np.cosh(np.minimum( (data["Product_Info_4"]), (np.sinh((data["InsuredInfo_2"] * data["Medical_Keyword_9"]))))))))) / 2.0) +
(0.0588234998 * ((((np.ceil(data["Employment_Info_2"]) - data["Product_Info_4"]) + (np.maximum( (data["Family_Hist_2"]), (data["Medical_History_21"])) + data["Family_Hist_5"])) + (data["BMI"] - data["Insurance_History_8"])) + np.minimum( (data["Medical_History_6"]), (data["Product_Info_2_N"])))))
return p+4.5
def Response3(data):
p = (((data["Medical_History_4"] + (np.cos(data["BMI"]) - ((data["Medical_Keyword_3"] + (data["BMI"] - (1.6304299831 + np.minimum( (data["Product_Info_4"]), (((data["Medical_History_40"] + data["Medical_History_13"])/2.0)))))) - np.ceil(data["Medical_History_23"]))))/2.0) +
((((data["Medical_History_15"] + ((data["InsuredInfo_6"] + ((data["Medical_History_20"] - data["Insurance_History_2"]) - np.maximum( (data["InsuredInfo_5"]), (np.maximum( (data["Medical_History_30"]), (data["Medical_History_5"]))))))/2.0))/2.0) + np.minimum( (np.ceil(data["Product_Info_4"])), ((data["InsuredInfo_7"] * data["Medical_History_18"]))))/2.0) +
(((((data["Family_Hist_2"] + (data["Medical_History_27"] - data["Medical_Keyword_38"]))/2.0) + ((((data["BMI"] * data["Medical_Keyword_3"]) - data["Ins_Age"]) + (data["Insurance_History_5"] + (data["Medical_Keyword_15"] + (data["BMI"] * data["Ins_Age"]))))/2.0))/2.0) / 2.0) +
(np.tanh(data["Medical_History_11"]) + np.floor((np.sin((0.1384620070 * np.minimum( (np.round(((604) * np.minimum( (data["Medical_History_15"]), (np.maximum( (data["Employment_Info_2"]), ((-(data["Medical_History_28"]))))))))), (0.1384620070)))) / 2.0))) +
np.tanh((data["Medical_History_31"] + ((np.minimum( (np.ceil(data["Medical_History_24"])), ((np.minimum( (data["Medical_History_4"]), (data["Wt"])) * (-(np.cos(data["Ins_Age"])))))) + np.round(((data["Medical_Keyword_41"] + np.minimum( (data["Medical_History_7"]), (data["Product_Info_4"])))/2.0)))/2.0))) +
((data["Medical_History_35"] * data["InsuredInfo_2"]) + np.sin(np.minimum( (data["Medical_History_17"]), ((((((np.maximum( (data["Medical_History_3"]), (data["Medical_History_1"])) + ((data["Medical_Keyword_25"] + data["Medical_History_33"])/2.0))/2.0) + np.ceil(np.minimum( (data["Medical_History_11"]), (data["Product_Info_2_N"]))))/2.0) / 2.0))))) +
np.minimum( ((0.1384620070 * ((data["Medical_History_6"] + (data["Family_Hist_3"] + ((((data["Medical_History_15"] - data["Medical_History_19"]) - data["Medical_Keyword_9"]) - (data["Medical_History_40"] - data["Family_Hist_4"])) + data["Medical_Keyword_33"])))/2.0))), (np.cos(data["Ins_Age"]))) +
np.minimum( ((np.floor(np.maximum( (data["Product_Info_2_C"]), ((data["SumMedicalKeywords"] * np.round((((((data["Medical_Keyword_38"] + data["InsuredInfo_2"])/2.0) / 2.0) + (data["Medical_Keyword_3"] / 2.0))/2.0)))))) / 2.0)), (np.sin(np.maximum( (data["BMI"]), ((1.0/(1.0 + np.exp(- data["Medical_Keyword_3"])))))))) +
(0.6029409766 - (1.0/(1.0 + np.exp(- ((((data["Product_Info_2_C"] + np.maximum( ((np.maximum( (data["Medical_Keyword_23"]), (data["Medical_Keyword_37"])) + (data["Insurance_History_8"] + data["Medical_History_18"]))), ((data["Medical_History_30"] + data["Medical_History_5"]))))/2.0) + np.maximum( (data["Medical_History_23"]), (data["InsuredInfo_1"])))/2.0))))) +
(((np.minimum( (data["Medical_History_32"]), ((data["Employment_Info_1"] + np.maximum( (data["InsuredInfo_6"]), (data["Medical_History_39"]))))) / 2.0) + np.minimum( (0.3183098733), (((np.maximum( (data["Medical_History_21"]), (data["Medical_History_1"])) + (1.0/(1.0 + np.exp(- np.minimum( (data["SumEmploymentInfo"]), (data["InsuredInfo_6"]))))))/2.0))))/2.0))
return p+4.5
if __name__ == "__main__":
train = pd.read_csv('../../data/train.csv')
test = pd.read_csv('../../data/test.csv')
train, test = MungeData(train, test)
p1 = Response1(train)
p2 = Response2(train)
p3 = Response3(train)
t1 = Response1(test)
t2 = Response2(test)
t3 = Response3(test)
trainResponse = 0.32018*(p1-p3)+0.35505*(p2-p3)+p3
testResponse = 0.32018*(t1-t3)+0.35505*(t2-t3)+t3
for i in range(1,4):
cutpoints = np.array([2.781097, 3.846915, 4.294624, 4.994817, 5.540523, 6.221271, 6.574580])
res = minimize(minimize_quadratic_weighted_kappa,cutpoints,(trainResponse,train['Response']),method='Nelder-Mead')
cutpoints = np.sort(res.x)
cutpoints = np.concatenate([[-99999999999999999],cutpoints,[999999999999999]])
y_test = pd.cut(testResponse,bins=cutpoints,labels=[1,2,3,4,5,6,7,8])
data = pd.DataFrame({'Id':test['Id'],'Response':y_test})
data.to_csv("ga.csv",index=False)
|
[
"[email protected]"
] | |
2389bb8407f091ab646d6bfd9d0808422f80767c
|
9abba9c3f3df3a79abcf437ae164387bbe70304c
|
/nb_author_id.py
|
652771b5ff51260f9c72a339f56030987aad5e04
|
[] |
no_license
|
Amanchouhan192/MachineLearningProject1
|
da3aecfdecffe35285ebe668f32c2f757b732bfd
|
44f0ddf85207a652a9e2ac76338f4ed92adb9d95
|
refs/heads/master
| 2021-05-09T05:01:24.251888 | 2018-02-02T21:51:02 | 2018-02-02T21:51:02 | 119,297,210 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,094 |
py
|
#!/usr/bin/python
"""
This is the code to accompany the Lesson 1 (Naive Bayes) mini-project.
Use a Naive Bayes Classifier to identify emails by their authors
authors and labels:
Sara has label 0
Chris has label 1
"""
import sys
from time import time
sys.path.append("../tools/")
from email_preprocess import preprocess
### features_train and features_test are the features for the training
### and testing datasets, respectively
### labels_train and labels_test are the corresponding item labels
features_train, features_test, labels_train, labels_test = preprocess()
#########################################################
### your code goes here ###
from sklearn import datasets
iris = datasets.load_iris()
from sklearn.naive_bayes import GaussianNB
gnb = GaussianNB()
y_pred = gnb.fit(iris.data, iris.target).predict(iris.data)
print("Number of mislabeled points out of a total %d points : %d" % (iris.data.shape[0],(iris.target != y_pred).sum()))
#########################################################
|
[
"[email protected]"
] | |
1c7839058256268dba6c645505924064d175cd11
|
713fc732a037447897092722647e28cb7a9711a8
|
/manage.py
|
2bab7faec9fd6bbfa9821bc1234c21c80064773a
|
[] |
no_license
|
jkachhadia/StatsBoy
|
9612eec07b44cf34f76c63eddbb085daa7869640
|
ad9bb1f921dcb4c74b1ba842b015445c1e0abe33
|
refs/heads/master
| 2021-01-18T00:46:20.848151 | 2016-07-26T22:09:10 | 2016-07-26T22:09:10 | 64,026,809 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 688 |
py
|
import os
from app import create_app,db
from app.models import User, Role
from flask.ext.script import Manager,Shell
from flask.ext.migrate import Migrate, MigrateCommand
app=create_app(os.getenv('BLOGPOLE_CONFIG') or 'default')
manager=Manager(app)
migrate=Migrate(app,db)
def make_shell_context():
return dict(app=app,db=db,User=User,Role=Role)
manager.add_command("shell",Shell(make_context=make_shell_context))
manager.add_command("db",MigrateCommand)
@manager.command
def test():
"""Run the Unit Tests."""
import unittest
tests=unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__=='__main__':
manager.run()
|
[
"[email protected]"
] | |
9fff28963650cf8d535041789a1b98c4bb5fc753
|
2d68ea9ea5311748254d2d4c3ba84da82b3171a1
|
/mysite/settings.py
|
3a978f09c5cd9fd39efbcdb7bb6916a2633be1d3
|
[] |
no_license
|
pm-3/mon-nouveau-blog
|
4b8c6bb6bec7687f7e8a63db1c08050cc74c31b6
|
a3f1d6ce15dd89a94f31345621f8ef4a9787dcc1
|
refs/heads/master
| 2022-06-11T16:02:54.347311 | 2020-05-09T15:36:37 | 2020-05-09T15:36:37 | 262,520,171 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,270 |
py
|
"""
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 2.2.12.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '6a#qd^ck1%-id_u^_a2d@qxf$e1!x8ag%dw+cbd4dcjmu#!yk9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog.apps.BlogConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
TIME_ZONE = 'UTC'
TIME_ZONE = 'Europe/Paris'
LANGUAGE_CODE = 'fr-fr'
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR,'static')
ALLOWED_HOSTS = ['127.0.0.1', '.pythonanywhere.com']
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
|
[
"[email protected]"
] | |
563296302dbd9a726b0a6847c0d8a46d8ecf273c
|
1e18eb6d58dd66cf4d9a8933b80d9dd6c5359c71
|
/HELLOPYTHON/team1_api_email/common.py
|
5606e9f62a14f57237597da9025a141384190352
|
[] |
no_license
|
YeogangLee/python
|
3d1ca9e5724e9281965282f40d908b86b1c0b7d6
|
a94cd540eb2b6ff9ec3cac9e46a73fcc6bbe1c72
|
refs/heads/master
| 2023-06-20T06:30:55.561942 | 2021-07-19T09:40:39 | 2021-07-19T09:40:39 | 381,004,759 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,736 |
py
|
import os
import pickle
# Gmail API utils
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# Request all access (permission to read/send/receive emails, manage the inbox, and more)
SCOPES = ['https://mail.google.com/']
our_email = '[email protected]'
def gmail_authenticate():
creds = None
# the file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first time
if os.path.exists("token.pickle"):
with open("token.pickle", "rb") as token:
creds = pickle.load(token)
# if there are no (valid) credentials availablle, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file('credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# save the credentials for the next run
with open("token.pickle", "wb") as token:
pickle.dump(creds, token)
return build('gmail', 'v1', credentials=creds)
def search_messages(service, query):
result = service.users().messages().list(userId='me',q=query).execute()
messages = [ ]
if 'messages' in result:
messages.extend(result['messages'])
while 'nextPageToken' in result:
page_token = result['nextPageToken']
result = service.users().messages().list(userId='me',q=query, pageToken=page_token).execute()
if 'messages' in result:
messages.extend(result['messages'])
return messages
|
[
"[email protected]"
] | |
250c628eb53cff177552f32732050b5b6bfc9d05
|
68e4826da5a714abb2c71b214db27ad6286acff1
|
/ml_12_nlp/src/train.py
|
ca7399d1624364d18a6d6c8ddb4555be79987d93
|
[] |
no_license
|
danhphan/ApMLPs
|
ce3162c83d3c64a57a8f35e16caf4a9891e3f0ff
|
cf4f26ce5081b63827453f72e2d0339e27774f35
|
refs/heads/main
| 2023-03-16T01:43:41.227199 | 2021-03-05T07:44:11 | 2021-03-05T07:44:11 | 338,702,957 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,331 |
py
|
import io
import numpy as np
import pandas as pd
import torch
from torch.utils.data import DataLoader
import tensorflow as tf
from sklearn import metrics
import config, dataset, engine, lstm
def load_vectors(fname):
# Taken from: fasttext.cc/docs/en/english-vectors.html
fin = io.open(fname, 'r', encoding='utf-8', newline='\n', errors='ignore')
n, d = map(int, fin.readline().split())
data = {}
for line in fin:
tokens = line.rstrip().split(' ')
data[tokens[0]] = list(map(float, tokens[1:]))
return data
def create_embedding_matrix(word_index, embedding_dict):
"""
This function creates the embedding matrix
:param word_index: a dictionary with word:index_value
:param embedding_dict: a dictionary with word:embedding_vector
:return: a numpy array with embedding vectors for all known words
"""
embedding_matrix = np.zeros((len(word_index) + 1, 300))
for word, i in word_index.items():
if word in embedding_dict:
embedding_matrix[i] = embedding_dict[word]
return embedding_matrix
def run(df, fold):
train_df = df[df.kfold != fold].reset_index(drop=True)
valid_df = df[df.kfold == fold].reset_index(drop=True)
print("Fitting tokenizer")
# Use tokenizer from tf.keras
tokenizer = tf.keras.preprocessing.text.Tokenizer()
tokenizer.fit_on_texts(df.review.values.tolist())
# Convert data to sequences
xtrain = tokenizer.texts_to_sequences(train_df.review.values)
xvalid = tokenizer.texts_to_sequences(valid_df.review.values)
# Zero pad given the maximum length
xtrain = tf.keras.preprocessing.sequence.pad_sequences(xtrain, maxlen=config.MAX_LEN)
xvalid = tf.keras.preprocessing.sequence.pad_sequences(xvalid, maxlen=config.MAX_LEN)
# Generate dataset and dataloader
train_ds = dataset.ImdbDataset(reviews=xtrain, targets=train_df.sentiment.values)
train_dl = DataLoader(train_ds, batch_size=config.TRAIN_BATCH_SIZE, num_workers=4)
valid_ds = dataset.ImdbDataset(reviews=xvalid, targets=valid_df.sentiment.values)
valid_dl = DataLoader(valid_ds, batch_size=config.VALID_BATCH_SIXE, num_workers=4)
print("Loading embedding")
embedding_dict = load_vectors("../data/crawl-300d-2M.vec")
embedding_matrix = create_embedding_matrix(tokenizer.word_index, embedding_dict)
# Create device, and model
device = torch.device("cuda")
model = lstm.LSTM(embedding_matrix)
model.to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=1e-3)
print("Training model")
best_accuracy = 0
early_stop_counter = 0
for epoch in range(config.EPOCHS):
engine.train(train_dl, model, optimizer, device)
outputs, targets = engine.evaluate(valid_dl, model, device)
# using 0.5 threshold after sigmoid
outputs = np.array(outputs) >= 0.5
accuracy = metrics.accuracy_score(targets, outputs)
# Store best accuracy
if accuracy > best_accuracy:
best_accuracy = accuracy
print(f"Fold={fold}, Epoch={epoch}, Accuracy={accuracy}")
else:
early_stop_counter += 1
if early_stop_counter > 2:
break
if __name__ == "__main__":
df = pd.read_csv("../data/imdb_folds.csv")
for fold_ in range(5):
run(df, fold_)
|
[
"[email protected]"
] | |
b2b236a9c1db58d83bed685b97be121cf5684d1c
|
11fe289ef112dba477399034458b7bdec061098f
|
/Avocados/Train_avocado.py
|
1abc89fa9c431211be2297076e010f937b2aab77
|
[] |
no_license
|
HSIYJND/CSCI547_HSIProject
|
34399c936d374f8e931b4858c33c536466d69bf1
|
63eb7c7a444290e548f6d1ee8f06f0e3fe72a383
|
refs/heads/master
| 2022-11-16T15:45:25.451851 | 2020-07-14T08:54:00 | 2020-07-14T08:54:00 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,985 |
py
|
from keras.callbacks import ModelCheckpoint
# from keras.optimizers import Adam
from sklearn.model_selection import StratifiedKFold
import numpy as np
from operator import truediv
import h5py
import cv2
from sklearn.metrics import precision_recall_fscore_support, accuracy_score, cohen_kappa_score, confusion_matrix
import pickle
from scipy.signal import find_peaks
from Avocados.networks import *
import keras.backend as k
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
Data augmentation functions
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
def add_rotation_flip(x, y):
x = np.reshape(x, (x.shape[0], x.shape[1], x.shape[2], x.shape[3], 1))
# Flip horizontally
x_h = np.flip(x[:, :, :, :, :], 1)
# Flip vertically
x_v = np.flip(x[:, :, :, :, :], 2)
# Flip horizontally and vertically
x_hv = np.flip(x_h[:, :, :, :, :], 2)
# Concatenate
x = np.concatenate((x, x_h, x_v, x_hv))
y = np.concatenate((y, y, y, y))
return x, y
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
LOAD HDF5 FILE
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
hdf5_file = h5py.File('avocado_dataset_w64.hdf5', "r")
train_x = np.array(hdf5_file["train_img"][...])
train_y = np.array(hdf5_file["train_labels"][...])
# Average consecutive bands
img2 = np.zeros((train_x.shape[0], int(train_x.shape[1] / 2), int(train_x.shape[2] / 2), int(train_x.shape[3] / 2)))
for n in range(0, train_x.shape[0]):
xt = cv2.resize(np.float32(train_x[n, :, :, :]), (32, 32), interpolation=cv2.INTER_CUBIC)
for i in range(0, train_x.shape[3], 2):
img2[n, :, :, int(i / 2)] = (xt[:, :, i] + xt[:, :, i + 1]) / 2.
train_x = img2
# Select most relevant bands
nbands = 5
count = 0
with open('avocadoselection.p', 'rb') as f:
saliency = pickle.load(f)
peaks, _ = find_peaks(saliency, height=5, distance=5)
saliency = np.flip(np.argsort(saliency))
indexes = []
for i in range(0, len(saliency)):
if saliency[i] in peaks:
indexes.append(saliency[i])
indexes = indexes[0:nbands]
indexes.sort()
temp = np.zeros((train_x.shape[0], train_x.shape[1], train_x.shape[2], nbands))
for nb in range(0, nbands):
temp[:, :, :, nb] = train_x[:, :, :, indexes[nb]]
train_x = temp
train_x, train_y = add_rotation_flip(train_x, train_y)
print(train_x.shape)
# train_x = np.reshape(train_x, (train_x.shape[0], train_x.shape[1], train_x.shape[2], train_x.shape[3]))
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
TRAIN PROPOSED NETWORK
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
windowSize = train_x.shape[1]
classes = 2
kfold = StratifiedKFold(n_splits=10, shuffle=True, random_state=7)
cvoa = []
cvaa = []
cvka = []
cvpre = []
cvrec = []
cvf1 = []
cva1 = []
cva2 = []
cva3 = []
def categorical_accuracy(y_true, y_pred):
return k.cast(k.equal(k.argmax(y_true, axis=-1),
k.argmax(y_pred, axis=-1)),
k.floatx())
def AA_andEachClassAccuracy(confusion_m):
list_diag = np.diag(confusion_m)
list_raw_sum = np.sum(confusion_m, axis=1)
each_ac = np.nan_to_num(truediv(list_diag, list_raw_sum))
average_acc = np.mean(each_ac)
return each_ac, average_acc
data = 'AVOCADO'
# Load model
print("Loading model...")
model = hyper3dnet2(img_shape=(windowSize, windowSize, train_x.shape[3], 1), classes=int(classes))
model.summary()
ntrain = 1
for train, test in kfold.split(train_x, train_y):
ytrain = train_y[train]
ytest = train_y[test]
xtrain = train_x[train]
xtest = train_x[test]
# Compile model
model = hyper3dnet2(img_shape=(windowSize, windowSize, train_x.shape[3], 1), classes=classes)
model.compile(optimizer='adadelta', loss='binary_crossentropy', metrics=['acc'])
# optimizer = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-08)
# model.compile(optimizer=optimizer, loss='binary_crossentropy', metrics=['acc'])
filepath = "weights5-hyper3dnet" + data + str(ntrain) + "-best_3layers_4filters.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]
ep = 600
# Train model on dataset
print(data + ": Training" + str(ntrain) + "begins...")
history = model.fit(x=xtrain, y=ytrain, validation_data=(xtest, ytest),
batch_size=16, epochs=ep, callbacks=callbacks_list)
# Evaluate network
model.load_weights("weights5-hyper3dnet" + data + str(ntrain) + "-best_3layers_4filters.h5")
model.compile(optimizer='adadelta', loss='binary_crossentropy', metrics=['acc'])
ypred = model.predict(xtest)
ypred = ypred.round()
# Calculate metrics
oa = accuracy_score(ytest, ypred)
confusion = confusion_matrix(ytest, ypred)
each_acc, aa = AA_andEachClassAccuracy(confusion)
kappa = cohen_kappa_score(ytest, ypred)
prec, rec, f1, support = precision_recall_fscore_support(ytest, ypred, average='macro')
# Add metrics to the list
cvoa.append(oa * 100)
cvaa.append(aa * 100)
cvka.append(kappa * 100)
cvpre.append(prec * 100)
cvrec.append(rec * 100)
cvf1.append(f1 * 100)
print("%s: %.3f%%" % (model.metrics_names[1], oa * 100))
file_name = "report_ntrain_selected" + str(ntrain) + ".txt"
with open(file_name, 'w') as x_file:
x_file.write("Overall accuracy%.3f%%" % (float(oa)))
ntrain += 1
bestindex = np.argmax(cvoa) + 1
model.load_weights("weights5-hyper3dnet" + data + str(bestindex) + "-best_3layers_4filters.h5")
model.save(data + "_hyper3dnet_4layers_8filters_selected5.h5")
file_name = "classification_report_" + data + ".txt"
with open(file_name, 'w') as x_file:
x_file.write("Overall accuracy%.3f%% (+/- %.3f%%)" % (float(np.mean(cvoa)), float(np.std(cvoa))))
x_file.write('\n')
x_file.write("Average accuracy%.3f%% (+/- %.3f%%)" % (float(np.mean(cvaa)), float(np.std(cvaa))))
x_file.write('\n')
x_file.write("Kappa accuracy%.3f%% (+/- %.3f%%)" % (float(np.mean(cvka)), float(np.std(cvka))))
x_file.write('\n')
x_file.write("Precision accuracy%.3f%% (+/- %.3f%%)" % (float(np.mean(cvpre)), float(np.std(cvpre))))
x_file.write('\n')
x_file.write("Recall accuracy%.3f%% (+/- %.3f%%)" % (float(np.mean(cvrec)), float(np.std(cvrec))))
x_file.write('\n')
x_file.write("F1 accuracy%.3f%% (+/- %.3f%%)" % (float(np.mean(cvf1)), float(np.std(cvf1))))
|
[
"[email protected]"
] | |
536b321d0b17e7122c19e06557caddcc1526eadb
|
1bd0f1d2a83c8ff477f11e5b5e733857a4843110
|
/news/BD.py
|
03db7d3cb893191c47c86e586217e494b79d1e53
|
[] |
no_license
|
Kashyap10/NewsScrapping
|
1034ab3c4b33682e22c8e0b13f89393a65bb1a6f
|
d099532920735f4283a35b63b60c257c5c464b89
|
refs/heads/master
| 2023-01-07T00:37:57.329450 | 2020-10-28T06:53:18 | 2020-10-28T06:53:18 | 299,408,729 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,855 |
py
|
from os import path
import sys
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
import datetime
from helper import Helper
import requests
from bs4 import BeautifulSoup
from crawler import *
from DbOps import DbOperations,QueryType
import hashlib
import logging
import urllib3
urllib3.disable_warnings()
class BD(object):
def __init__(self, url, body=None, headers=None, logger=None):
"""
Set initial paramaeters
:param url: scraping url
:param body: scraping url body
:param headers: scraping url header
:param logger: logger object
"""
self.url = url
self.body = body
self.headers = headers
self.news_collection = Helper.getNewsCollection()
self.logger = logger
def crawler(self):
try:
counter = 1
data = []
while True:
response = crawler.MakeRequest(self.url,"Get")
soup = BeautifulSoup(response.content, "html.parser")
if response.status_code == 200:
boxs = soup.find_all("div",{"class":'item'})
for box in boxs:
date = Helper.parse_date(box.find("p",{"class":"fade"}).text)
if date:
if date.year < datetime.datetime.now().year:
break
url = "https://www.bd.com/" + box.find("a")['href']
# Check if already present
unqUrl = hashlib.md5(url.encode()).hexdigest()
chkIsExists = DbOperations.GetData(self.news_collection, {"news_url_uid": str(unqUrl)},
{}, QueryType.one)
if (chkIsExists):
print("Already saved. url - ( " + url + " )")
continue
datadict = Helper.get_news_dict()
datadict.update({"url":"https://www.bd.com/" + box.find("a")['href']})
description = self.fetchDescription("https://www.bd.com/" + box.find("a")['href'])
datadict.update({
"date": Helper.parse_date(box.find("p",{"class":"fade"}).text),
"news_provider": "Becton, Dickinson and Company",
"formatted_sub_header": box.find("a").text.strip(),
"publishedAt": Helper.parse_date(box.find("p",{"class":"fade"}).text),
"description": description,
"title": box.find("a").text.strip(),
"news_title_uid": hashlib.md5(box.find("a").text.strip().encode()).hexdigest(),
"link": url,
"text":description,
"ticker": "bd_scrapped", "industry_name": "Becton, Dickinson and Company",
"company_id" : "Becton, Dickinson and Company",
"news_url_uid" : hashlib.md5(url.encode()).hexdigest()
})
data.append(datadict)
else:
break
DbOperations.InsertIntoMongo(self.news_collection,data)
except Exception as e:
self.logger.error(f"Error Occured : \n", exc_info=True)
def fetchDescription(self,url):
article = ''
try:
# print(url)
description = crawler.MakeRequest(url, "Get")
articlesoup = BeautifulSoup(description.content, 'html.parser')
# print(articlesoup)
articlesoupobj = articlesoup.find("div",{"class":"container"})
articles = articlesoupobj.find_all("p",attrs={'class': None})
for art in articles:
article += art.text + "\n"
except Exception as e:
self.logger.error(f"Error Occured : \n", exc_info=True)
return article
#Create and configure logger
logging.basicConfig(filename="news_scraping_logs.log",
format='%(asctime)s %(message)s',
filemode='a')
logger = logging.getLogger()
obj = BD('https://www.bd.com/en-us/company/news-and-media/press-releases?page=1',logger=logger)
obj.crawler()
news_collection = Helper.getNewsCollection()
processed_collection = Helper.getProcessNewsCollection()
news_log_collection = Helper.getLogCollection()
isInserted,rowCount = Helper.processNews(news_collection,processed_collection,'Becton, Dickinson and Company')
print('Total rows added Process collection => ' + str(rowCount))
# UPDATING LOG COLLECTION
if (isInserted):
Helper.makeLog(news_log_collection,processed_collection,'Becton, Dickinson and Company')
|
[
"[email protected]"
] | |
4dc29beff3a9cfeca71bdc1ab509235639e7ed7c
|
bc7ff82c5f86dace3a0ee5def97c95154ba8ae4e
|
/StreamlitDemo/summarizer.py
|
27b2a4a3e3b0c8c9d0771ee33288f18f0efa1c0d
|
[] |
no_license
|
peterson1999/AbstractGenerator
|
9c1dff9f2c5dfe6ca7014eb3e5ec2b260e269b00
|
d71ba19a4c9583880627edd337f04f8d3662577d
|
refs/heads/main
| 2023-05-28T10:22:47.082386 | 2021-06-10T04:08:43 | 2021-06-10T04:08:43 | 368,217,750 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,510 |
py
|
from transformers import PegasusForConditionalGeneration, PegasusTokenizer
import torch
from summa import keywords,summarizer
from sentence_splitter import SentenceSplitter, split_text_into_sentences
import numpy as np
class Summarizer:
model_name = 'tuner007/pegasus_paraphrase'
torch_device = 'cuda' if torch.cuda.is_available() else 'cpu'
model = PegasusForConditionalGeneration.from_pretrained(model_name).to(torch_device)
tokenizer = PegasusTokenizer.from_pretrained(model_name)
def textrank(introduction,methodology,results,conclusion,ratio1,ratio2,ratio3,ratio4):
splitter = SentenceSplitter(language='en')
sentence_introduction = splitter.split(summarizer.summarize(introduction,ratio=ratio1/100))
sentence_methodology = splitter.split(summarizer.summarize(methodology,ratio=ratio2/100))
sentence_results = splitter.split(summarizer.summarize(results,ratio=ratio3/100))
sentence_conclusion = splitter.split(summarizer.summarize(conclusion,ratio=ratio4/100))
all_sentences = np.concatenate((np.array(sentence_introduction), np.array(sentence_methodology),np.array(sentence_results),np.array(sentence_conclusion)))
print(all_sentences)
return all_sentences
def Paraphrase(self,sentence_list):
paraphrase = []
for i in sentence_list:
a = self.get_response(self,i,1)
paraphrase.append(a)
paraphrase_string=[]
for j in paraphrase:
for x in j:
paraphrase_string.append(x)
s = ''.join(paraphrase_string)
return s
def joinString(self,paraphrased_list):
s = ''.join(paraphrased_list)
return s
def get_response(self,input_text,num_return_sequences):
batch = self.tokenizer.prepare_seq2seq_batch([input_text],truncation=True,padding='longest',max_length=60, return_tensors="pt").to(self.torch_device)
print("done")
translated = self.model.generate(**batch,min_length=10,max_length=60,num_beams=10, num_return_sequences=num_return_sequences, temperature=1.5)
print("done1")
tgt_text = self.tokenizer.batch_decode(translated, skip_special_tokens=True)
return tgt_text
def getSummary(self,introduction,ratio1,methodology,ratio2,results,ratio3,conclusion,ratio4):
sentence=self.textrank(introduction,methodology,results,conclusion,ratio1,ratio2,ratio3,ratio4)
paraphrase=self.Paraphrase(self,sentence)
return paraphrase
|
[
"[email protected]"
] | |
a43c61533385eb8038e78345c245682f7be310b5
|
02ea81644b2d68a08e2f33392a9e158f425a498b
|
/run.py
|
8a4995e7f50094bed891fbee4dbdf3da0a6dcb55
|
[] |
no_license
|
BillyMan00/EA_ST
|
58c53f9165139df19c29d64a3dd529bb6f9e8dad
|
9c92399f02262a8439573141cfc62e9381c6dc50
|
refs/heads/main
| 2023-04-24T07:31:51.225031 | 2021-05-12T09:10:29 | 2021-05-12T09:10:29 | 366,655,978 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 137 |
py
|
import os
from app import app, fask_sqlachemy
if __name__ == '__main__':
app().run()
app.run(host='0.0.0.0', port=5000, debug=True)
|
[
"[email protected]"
] | |
6773f61b800ed243653848153717040551b46c5c
|
56789f51d1feb757171b151b56c59143e74c6fe1
|
/projects/examples/dragon_button_relay_push_only/pinButton.py
|
f0fc6e7fb8a5a84c6121ba1877377927a1833a31
|
[] |
no_license
|
aid402/micropython_project
|
235926120e8a78033572386b9407a5eb6e7f473e
|
9111398492f0cf511da8e6f83b34d8e4e4f90278
|
refs/heads/master
| 2020-07-21T10:29:16.935739 | 2018-11-02T22:07:14 | 2018-11-02T22:07:14 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,843 |
py
|
from machine import Pin
import time
import relay
class PinButton:
'''
B
'''
# init
def __init__(self, pinNum, Pull, debug=False, relay_control=None):
self._pin = Pin(pinNum, Pin.IN, Pull )
self.debug = debug
self.status = 0
self.value = None
self._value = None
self.relay = relay.RELAY(relay_control)
self.button = None # Generator instance
# self.button = self.makebutton() # Generator instance
#
# next(self.button)
# for _ in range(128):
# next(self.button)
# time.sleep_ms(1)
def makebutton(self):
delays = -25 # mS delay
while True:
self._value = self._pin.value()
t_start = time.ticks_ms()
self.status = 1
if self._value == 0:
while time.ticks_diff(t_start, time.ticks_ms()) <= delays:
self.status = 10
yield None
self.relay.set_state(1)
self.value = self._value
self.status = 11
else:
self.value = 1
self.relay.set_state(0)
self.status = 12
yield None
def start(self):
self.button = self.makebutton() # Generator instance
next(self.button)
def stop(self):
self.button = None # Generator instance
@property
def push(self):
'''
T
'''
try:
next(self.button)
except StopIteration:
if self.debug:
print("StopIteration")
return -255
value = self.value
if self.status == 0:
value = -1
return value
|
[
"[email protected]"
] | |
e2e25e295253bb7104fd7c529de8684d9f0f5e42
|
74a0c77fed1c2815fb57c86657334a39044d9e35
|
/draw-matches.py
|
7329578bf781f1a0ef9dfca6db00672a822f58d5
|
[] |
no_license
|
alexkhrystoforov/3d-reconstruction
|
f17822db96c9b37a2924d1301fb313ce5de7c5b2
|
44614fd62179ecee148eeee33ad9836bf7280c1c
|
refs/heads/main
| 2023-01-29T20:37:54.071759 | 2020-12-11T16:44:06 | 2020-12-11T16:44:06 | 320,629,917 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,122 |
py
|
import cv2
import numpy as np
class Matcher:
def __init__(self, algorithm, img0, img1):
self.algorithm = algorithm
self.kp1, self.des1 = algorithm.detectAndCompute(img0, None)
self.kp2, self.des2 = algorithm.detectAndCompute(img1, None)
self.norm_hamming = cv2.NORM_HAMMING2
def BF_matcher(self, mode):
if mode == 'SIFT' or mode == 'KAZE':
bf = cv2.BFMatcher(crossCheck=True)
if mode == 'ORB':
bf = cv2.BFMatcher(self.norm_hamming, crossCheck=True)
matches = bf.match(self.des2, self.des1)
matches = sorted(matches, key=lambda x: x.distance)
best_matches = find_best_matches(matches)
return best_matches
def find_best_matches(matches):
"""
Filter matches by distance
Args:
matches: list
Returns:
best_matches: list
"""
best_matches = []
for m in matches:
if m.distance < 200: # Матчинг - мы сравниваем 2 вектора. сортируем матчи по растоянию,
# тут можно делать приемлимое расстояние больше или меньше
best_matches.append(m)
return best_matches
def draw_matches(imgL, imgR, mode='SIFT'):
MIN_MATCH_COUNT = 100 # минимальное число заматченных точек, чтоб отрисовало
if mode == 'ORB':
algorithm = cv2.ORB_create(nfeatures=10000, scoreType=cv2.ORB_FAST_SCORE)
if mode == 'SIFT':
algorithm = cv2.SIFT_create()
if mode == 'KAZE':
algorithm = cv2.KAZE_create()
matcher = Matcher(algorithm, imgL, imgR)
bf_matches = matcher.BF_matcher(mode)
print(f'количество заматченный точек после фильтра: {mode}', len(bf_matches))
if len(bf_matches) > MIN_MATCH_COUNT:
src_pts = np.float32([matcher.kp2[m.queryIdx].pt for m in bf_matches]).reshape(-1, 1, 2)
dst_pts = np.float32([matcher.kp1[m.trainIdx].pt for m in bf_matches]).reshape(-1, 1, 2)
matrix, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC, 5.0)
matchesMask = mask.ravel().tolist()
draw_params = dict(outImg=None,
# matchColor=(0, 255, 0),
# matchesMask=matchesMask, # draw only inliers
flags=2)
result_mathing_bf = cv2.drawMatches(imgR, matcher.kp2, imgL, matcher.kp1, bf_matches, **draw_params)
cv2.namedWindow('draw_matches', cv2.WINDOW_NORMAL)
cv2.imshow('draw_matches', result_mathing_bf)
cv2.waitKey(0)
if mode == 'KAZE':
cv2.imwrite('res_kaze.png', result_mathing_bf)
if mode == 'SIFT':
cv2.imwrite('res_sift.png', result_mathing_bf)
if mode == 'ORB':
cv2.imwrite('res_orb.png', result_mathing_bf)
imgL = cv2.imread('sets/ringL.png')
imgR = cv2.imread('sets/ringR.png')
draw_matches(imgL, imgR, mode='SIFT')
# draw_matches(imgL, imgR, mode='KAZE')
# draw_matches(imgL, imgR, mode='ORB')
|
[
"[email protected]"
] | |
c5025700fd6858b320117ab2a06db5014ae2496a
|
0e94b21a64e01b992cdc0fff274af8d77b2ae430
|
/python/022_Objective.py
|
8b3d80fef29ab63035d097dd75d51e71daa5b828
|
[] |
no_license
|
yangnaGitHub/LearningProcess
|
1aed2da306fd98f027dcca61309082f42b860975
|
250a8b791f7deda1e716f361a2f847f4d12846d3
|
refs/heads/master
| 2020-04-15T16:49:38.053846 | 2019-09-05T05:52:04 | 2019-09-05T05:52:04 | 164,852,337 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,045 |
py
|
#可以通过类名访问其属性
#类对象支持两种操作==>属性引用(obj.name)和实例化
#将对象创建为有初始状态的,定义一个名为__init__()的特殊方法(自动调用,构造方法)
#使用def关键字可以为类定义一个方法,类方法必须包含参数self,且为第一个参数
#支持类的继承,圆括号中基类的顺序,若是基类中有相同的方法名,从左到右查找基类中是否包含方法
#两个下划线开头声明该属和方法为私有,不能在类地外部被使用或直接访问
class people:
name = ""
age = 0
__weight = 0#私有属性,在类外部无法直接进行访问
def __init__(self, name, age, weight):
self.name = name
self.age = age
self.__weight = weight
def speak(self):
print("%s ==> %d" % (self.name, self.age))
class student(people):
grade = 0
def __init__(self, name, age, weight, grade):
people.__init__(self, name, age, weight)
self.grade = grade
def speak(self):
print("%s ==> %d ==> %d" % (self.name, self.age, self.grade))
stu = student("natasha", 22, 58, 2)
stu.speak()
#重写,子类重写父类的方法
class Parent:
def method(self):
print("Parent")
class Child(Parent):
def method(self):
print("Child")
child = Child()
child.method()
#类的专有方法
#__init__构造函数,在生成对象时调用
#__del__析构函数
#__repr__打印
#__setitem__按照索引赋值
#__getitem__按照索引获取值
#__len__获得长度
#__cmp__比较运算
#__call__函数调用
#__add__加运算
#__sub__减运算
#__mul__乘运算
#__div__除运算
#__mod__求余运算
#__pow__乘方
#支持运算符重载
class Vector:
def __init__(self, val1, val2):
self.val1 = val1
self.val2 = val2
def __str__(self):
return "Vector(%d, %d)" % (self.val1, self.val2)
def __add__(self, other):
return Vector(self.val1 + other.val1, self.val2 + other.val2)
v1 = Vector(2, 10)
v2 = Vector(5, -2)
print(v1 + v2)
|
[
"[email protected]"
] | |
a293a4c0f1bef50f86231c141441a29c0ea77f66
|
b51fcaacf7a43cfc4e378b27090c652ed5bd8ee2
|
/pyfx/tests/test_spreadhandler.py
|
4dad32859c0dafba258a980ee780e00e99c632b1
|
[] |
no_license
|
tetocode/fxarb
|
56526308eb91616eb60b13152ad03dab73de7ca4
|
00261dc6832047375499363af2db44efa2d36008
|
refs/heads/master
| 2022-10-18T16:45:51.971435 | 2020-06-03T16:19:39 | 2020-06-03T16:19:39 | 269,136,511 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,846 |
py
|
import copy
from collections import deque
from datetime import datetime, timedelta
import gevent
import pytest
import pytz
from pyfx.pricehandler import PriceHandler, Price
from pyfx.spreadhandler import SpreadHandler, Spread
def test_spread_handler():
h = SpreadHandler(PriceHandler())
assert h.prices == {}
now = datetime.utcnow().replace(tzinfo=pytz.utc)
now2 = now + timedelta(minutes=1, seconds=1)
prices = [Price('xxx', 'USD/JPY', now, 0.01, 0.02)]
h.handle(prices=copy.deepcopy(prices))
assert h.prices == {'xxx': {'USD/JPY': deque(prices)}}
expected = {
('xxx', 'xxx'): {
'USD/JPY': deque([
Spread(('xxx', 'xxx'), 'USD/JPY', now, 0.01, 0.02)
])
}
}
assert h.spreads == expected
prices = [
Price('xxx', 'USD/JPY', now2, 0.01, 0.03),
Price('xxx', 'EUR/JPY', now, 0.03, 0.05),
Price('yyy', 'EUR/JPY', now2, 0.06, 0.08),
]
h.handle(prices=copy.deepcopy(prices))
expected = {
('xxx', 'xxx'): {
'USD/JPY': deque([
Spread(('xxx', 'xxx'), 'USD/JPY', now, 0.01, 0.02),
Spread(('xxx', 'xxx'), 'USD/JPY', now2, 0.01, 0.03)
]),
'EUR/JPY': deque([
Spread(('xxx', 'xxx'), 'EUR/JPY', now, 0.03, 0.05),
])
},
('xxx', 'yyy'): {
'EUR/JPY': deque([
Spread(('xxx', 'yyy'), 'EUR/JPY', now2, 0.03, 0.08)
])
},
('yyy', 'xxx'): {
'EUR/JPY': deque([
Spread(('yyy', 'xxx'), 'EUR/JPY', now2, 0.06, 0.05)
])
},
('yyy', 'yyy'): {
'EUR/JPY': deque([
Spread(('yyy', 'yyy'), 'EUR/JPY', now2, 0.06, 0.08)
])
}
}
assert h.spreads == expected
|
[
"_"
] |
_
|
01a4aa6337e6bc6ec2edab4573fca5afa99ee6a3
|
6144ef47ff27f761e92806d5dcf5e6bbe3c0e690
|
/app/app/apps.py
|
72cf624fbe41016985688fd409513d1562929766
|
[] |
no_license
|
dalai2/stellarpolaris
|
8e53323bdfa25331ce2e2e2ec68dd039ae3d883d
|
02f5f8c43fe7ab83dd879c96ca2a8db5dedc40a4
|
refs/heads/main
| 2023-04-11T08:46:35.152942 | 2021-04-21T03:38:05 | 2021-04-21T03:38:05 | 360,023,318 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 302 |
py
|
from django.apps import AppConfig
class MyAppConfig(AppConfig):
name = "app"
def ready(self):
from polaris.integrations import register_integrations
from .integrations import MyRailsIntegration
register_integrations(
rails=MyRailsIntegration()
)
|
[
"[email protected]"
] | |
3d635f23f15d180a8acda2ef07e91f7f9fb3984e
|
9818262abff066b528a4c24333f40bdbe0ae9e21
|
/Day 60/TheBomberMan.py
|
46f6d9fdaed89da0f250aff715ff45b108c9a598
|
[
"MIT"
] |
permissive
|
skdonepudi/100DaysOfCode
|
749f62eef5826cb2ec2a9ab890fa23e784072703
|
af4594fb6933e4281d298fa921311ccc07295a7c
|
refs/heads/master
| 2023-02-01T08:51:33.074538 | 2020-12-20T14:02:36 | 2020-12-20T14:02:36 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,211 |
py
|
''''
Bomberman lives in a rectangular grid. Each cell in the grid either contains a bomb or nothing at all.
Each bomb can be planted in any cell of the grid but once planted, it will detonate after exactly 3 seconds. Once a bomb detonates, it's destroyed — along with anything in its four neighboring cells. This means that if a bomb detonates in cell , any valid cells and are cleared. If there is a bomb in a neighboring cell, the neighboring bomb is destroyed without detonating, so there's no chain reaction.
Bomberman is immune to bombs, so he can move freely throughout the grid. Here's what he does:
Initially, Bomberman arbitrarily plants bombs in some of the cells, the initial state.
After one second, Bomberman does nothing.
After one more second, Bomberman plants bombs in all cells without bombs, thus filling the whole grid with bombs. No bombs detonate at this point.
After one more second, any bombs planted exactly three seconds ago will detonate. Here, Bomberman stands back and observes.
Bomberman then repeats steps 3 and 4 indefinitely.
Note that during every second Bomberman plants bombs, the bombs are planted simultaneously (i.e., at the exact same moment), and any bombs planted at the same time will detonate at the same time.
Given the initial configuration of the grid with the locations of Bomberman's first batch of planted bombs, determine the state of the grid after seconds.
For example, if the initial grid looks like:
...
.O.
...
it looks the same after the first second. After the second second, Bomberman has placed all his charges:
OOO
OOO
OOO
At the third second, the bomb in the middle blows up, emptying all surrounding cells:
...
...
...
Function Description
Complete the bomberMan function in the editory below. It should return an array of strings that represent the grid in its final state.
bomberMan has the following parameter(s):
n: an integer, the number of seconds to simulate
grid: an array of strings that represents the grid
Input Format
The first line contains three space-separated integers , , and , The number of rows, columns and seconds to simulate.
Each of the next lines contains a row of the matrix as a single string of characters. The . character denotes an empty cell, and the O character (ascii 79) denotes a bomb.
Constraints
Subtask
for of the maximum score.
Output Format
Print the grid's final state. This means lines where each line contains characters, and each character is either a . or an O (ascii 79). This grid must represent the state of the grid after seconds.
Sample Input
6 7 3
.......
...O...
....O..
.......
OO.....
OO.....
Sample Output
OOO.OOO
OO...OO
OOO...O
..OO.OO
...OOOO
...OOOO
'''
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the bomberMan function below.
def bomberMan(n, grid):
result = [[i for i in r] for r in grid]
passed = 1
coords = [[x, y] for x in range(r) for y in range(c) if grid[x][y]=="O"]
if n in [0, 1]: return grid
elif n % 2 == 0: return ['O' * len(x) for x in grid]
while passed < 4+n%4:
passed += 1
if passed%2 == 0:
result = [["O" for i in range(c)] for j in range(r)]
elif passed%2 == 1:
for coord in coords:
row, col = coord[0], coord[1]
result[row][col] = "."
if 0<=row-1<=r-1:
result[row-1][col] = "."
if 0<=row+1<=r-1:
result[row+1][col] = "."
if 0<=col-1<=c-1:
result[row][col-1] = "."
if 0<=col+1<=c-1:
result[row][col+1] = "."
coords = [[x, y] for x in range(r) for y in range(c) if result[x][y]=="O"]
for i in range(r):
result[i] = ''.join(result[i])
return result
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
rcn = input().split()
r = int(rcn[0])
c = int(rcn[1])
n = int(rcn[2])
grid = []
for _ in range(r):
grid_item = input()
grid.append(grid_item)
result = bomberMan(n, grid)
fptr.write('\n'.join(result))
fptr.write('\n')
fptr.close()
|
[
"[email protected]"
] | |
b3710ae9ec3af1c15b025ac8c04e99d880bafac7
|
b72e35e103abf704ea2dd0bae547aeb4c48da81c
|
/augmentation/filtering.py
|
bfb3693ba6d6c842e068621fbaaf76c3389660ce
|
[
"MIT"
] |
permissive
|
adrianB3/cv_labs
|
d1076eca4139446cdc35dedafea0e9e4c8689b32
|
38d9a28fd67f3e7e5e4247efd777c1bbaa6e552e
|
refs/heads/master
| 2023-07-09T21:07:18.004040 | 2021-08-08T11:12:47 | 2021-08-08T11:12:47 | 298,360,580 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,460 |
py
|
import cv2
import numpy as np
from augmentation.data_types import Augmentation, Data
from ast import literal_eval as make_tuple
class Blur(Augmentation):
def __init__(self, params):
self.params = params
def process(self, data: Data):
kernel_size = make_tuple(self.params['kernel_size'])
img = data.data['image']
if self.params['type'] == 'box':
img = cv2.boxFilter(src=img, ddepth=0, ksize=kernel_size)
if self.params['type'] == 'gaussian':
img = cv2.GaussianBlur(src=img, ksize=kernel_size, sigmaX=0)
if self.params['type'] == 'median':
img = cv2.medianBlur(src=img, ksize=kernel_size[0])
data.data['image'] = img
class BilateralFilter(Augmentation):
def __init__(self, params):
self.params = params
def process(self, data: Data):
img = data.data['image'].astype('uint8')
d = self.params['d']
sigma_color = self.params['sigma_color']
sigma_space = self.params['sigma_space']
img = cv2.bilateralFilter(src=img, d=d, sigmaColor=sigma_color, sigmaSpace=sigma_space)
data.data['image'] = img
class Sharpen(Augmentation):
def process(self, data: Data):
img = data.data['image']
kernel = np.array([[-1, -1, -1],
[-1, 9, -1],
[-1, -1, -1]])
img = cv2.filter2D(img, -1, kernel)
data.data['image'] = img
|
[
"[email protected]"
] | |
85e07113f27abb67535f11386589b525a89bcc03
|
85c499ad1daca3d9f40c4ab19fdef36622722393
|
/ControlFarmacia2/Aplicaciones/clientes/migrations/0002_auto_20180520_1241.py
|
7bef073168b3878c7b44c8402f4d513435b08348
|
[] |
no_license
|
eriksuarezsoto/controlinventario2
|
06247c6232ad7535b67d12e30cffce5c94991459
|
1d7fc1becbad0e347a0b1d8165f99f260bbff6dc
|
refs/heads/master
| 2020-03-18T04:41:05.275094 | 2018-05-21T17:08:54 | 2018-05-21T17:08:54 | 134,300,334 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,675 |
py
|
# Generated by Django 2.0.4 on 2018-05-20 16:41
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('clientes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Clientes2',
fields=[
('rut', models.PositiveIntegerField(default=0, primary_key=True, serialize=False)),
('digitoverificador', models.CharField(default=0, max_length=1)),
('nombres', models.CharField(default='', max_length=100)),
('apellidos', models.CharField(default='', max_length=100)),
('direccion', models.CharField(default='', max_length=100)),
('correo', models.EmailField(default='', max_length=254)),
('telefono', models.PositiveSmallIntegerField(default=0)),
('fechacreacion', models.DateField(auto_now_add=True)),
('Sexo', models.CharField(choices=[('F', 'Femenino'), ('M', 'Masculino')], default='F', max_length=1)),
],
),
migrations.CreateModel(
name='Registro_ventas2',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('vale', models.IntegerField(default=0)),
('rut', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='clientes.Clientes2')),
],
),
migrations.DeleteModel(
name='Clientes',
),
migrations.DeleteModel(
name='Registro_ventas',
),
]
|
[
"[email protected]"
] | |
f2d8d7ebc25d7b92a0ebe190b9bc4b2c5175a61f
|
ffce0e6cb6fd13e23b3ec71a2cb119bce10cb4da
|
/examples/mnist/train_mnist.py
|
4273795a24322e31ab0bcd680993bffebe1fc1f9
|
[
"MIT"
] |
permissive
|
EricCJoyce/Neuron-C
|
303e49423f0e0d2dbc897de0609ae192439032cb
|
314c23612d486ac1bdac562cb7ad1d0dbad8472f
|
refs/heads/master
| 2022-02-13T19:32:20.953472 | 2022-01-04T18:28:04 | 2022-01-04T18:28:04 | 254,206,027 | 5 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,993 |
py
|
'''
Eric C. Joyce, Stevens Institute of Technology, 2020
Train a convolutional neural network to classify hand-written digits using Keras and the MNIST data set.
Notice that if we are to convert weights learned in Keras to an independent C program, then we have to
control the arrangements of outputs. That is the function of the Lambda layers below. Output from
convolution must be flattened, and then the flattened output must be re-routed to match the format of
Neuron-C's Accumulation Layer type.
'''
from keras import models
from keras.layers import Input, Dense, Flatten, Conv2D, Lambda, concatenate, Dropout
from keras.callbacks import ModelCheckpoint
from keras.datasets import mnist
from keras.utils import to_categorical
import numpy as np
import matplotlib.pyplot as plt
epochs = 20
batchSize = 128
def main():
(train_images, train_labels), (test_images, test_labels) = mnist.load_data()
train_images = train_images.reshape( (60000, 28, 28, 1) ) # Reshape, clean up
train_images = train_images.astype('float32') / 255.0
test_images = test_images.reshape( (10000, 28, 28, 1) )
test_images = test_images.astype('float32') / 255.0
train_labels = to_categorical(train_labels)
test_labels = to_categorical(test_labels)
imgInput = Input(shape=(28, 28, 1)) # (height, width, channels)
# 8 filters, h, w h, w
conv3x3 = Conv2D(8, (3, 3), activation='relu', padding='valid', input_shape=(28, 28, 1), name='conv3x3')(imgInput)
# 16 filters, h, w h, w
conv5x5 = Conv2D(16, (5, 5), activation='relu', padding='valid', input_shape=(28, 28, 1), name='conv5x5')(imgInput)
flat3x3 = Flatten(name='flat3x3')(conv3x3) # length = 8 * (28 - 3 + 1) * (28 - 3 + 1) = 5408
lambda3x3_0 = Lambda(lambda x: x[:, 0::8])(flat3x3) # 8 filters: take every 8th from [0]
lambda3x3_1 = Lambda(lambda x: x[:, 1::8])(flat3x3) # 8 filters: take every 8th from [1]
lambda3x3_2 = Lambda(lambda x: x[:, 2::8])(flat3x3) # 8 filters: take every 8th from [2]
lambda3x3_3 = Lambda(lambda x: x[:, 3::8])(flat3x3) # 8 filters: take every 8th from [3]
lambda3x3_4 = Lambda(lambda x: x[:, 4::8])(flat3x3) # 8 filters: take every 8th from [4]
lambda3x3_5 = Lambda(lambda x: x[:, 5::8])(flat3x3) # 8 filters: take every 8th from [5]
lambda3x3_6 = Lambda(lambda x: x[:, 6::8])(flat3x3) # 8 filters: take every 8th from [6]
lambda3x3_7 = Lambda(lambda x: x[:, 7::8])(flat3x3) # 8 filters: take every 8th from [7]
flat5x5 = Flatten(name='flat5x5')(conv5x5) # length = 16 * (28 - 5 + 1) * (28 - 5 + 1) = 9216
lambda5x5_0 = Lambda(lambda x: x[:, 0::16])(flat5x5) # 16 filters: take every 16th from [0]
lambda5x5_1 = Lambda(lambda x: x[:, 1::16])(flat5x5) # 16 filters: take every 16th from [1]
lambda5x5_2 = Lambda(lambda x: x[:, 2::16])(flat5x5) # 16 filters: take every 16th from [2]
lambda5x5_3 = Lambda(lambda x: x[:, 3::16])(flat5x5) # 16 filters: take every 16th from [3]
lambda5x5_4 = Lambda(lambda x: x[:, 4::16])(flat5x5) # 16 filters: take every 16th from [4]
lambda5x5_5 = Lambda(lambda x: x[:, 5::16])(flat5x5) # 16 filters: take every 16th from [5]
lambda5x5_6 = Lambda(lambda x: x[:, 6::16])(flat5x5) # 16 filters: take every 16th from [6]
lambda5x5_7 = Lambda(lambda x: x[:, 7::16])(flat5x5) # 16 filters: take every 16th from [7]
lambda5x5_8 = Lambda(lambda x: x[:, 8::16])(flat5x5) # 16 filters: take every 16th from [8]
lambda5x5_9 = Lambda(lambda x: x[:, 9::16])(flat5x5) # 16 filters: take every 16th from [9]
lambda5x5_10 = Lambda(lambda x: x[:, 10::16])(flat5x5) # 16 filters: take every 16th from [10]
lambda5x5_11 = Lambda(lambda x: x[:, 11::16])(flat5x5) # 16 filters: take every 16th from [11]
lambda5x5_12 = Lambda(lambda x: x[:, 12::16])(flat5x5) # 16 filters: take every 16th from [12]
lambda5x5_13 = Lambda(lambda x: x[:, 13::16])(flat5x5) # 16 filters: take every 16th from [13]
lambda5x5_14 = Lambda(lambda x: x[:, 14::16])(flat5x5) # 16 filters: take every 16th from [14]
lambda5x5_15 = Lambda(lambda x: x[:, 15::16])(flat5x5) # 16 filters: take every 16th from [15]
# Output length = 14624
convConcat = concatenate([lambda3x3_0, lambda3x3_1, lambda3x3_2, lambda3x3_3, \
lambda3x3_4, lambda3x3_5, lambda3x3_6, lambda3x3_7, \
lambda5x5_0, lambda5x5_1, lambda5x5_2, lambda5x5_3, \
lambda5x5_4, lambda5x5_5, lambda5x5_6, lambda5x5_7, \
lambda5x5_8, lambda5x5_9, lambda5x5_10, lambda5x5_11, \
lambda5x5_12, lambda5x5_13, lambda5x5_14, lambda5x5_15 ])
dropout1 = Dropout(0.5)(convConcat)
dense100 = Dense(100, activation='relu', name='dense400')(dropout1)
dropout2 = Dropout(0.5)(dense100)
dense10 = Dense(10, activation='softmax', name='dense10')(dropout2)
model = models.Model(inputs=imgInput, output=dense10)
model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])
model.summary() # Print the details
filepath = 'mnist_{epoch:02d}.h5'
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacksList = [checkpoint]
history = model.fit( [train_images], [train_labels], \
epochs=epochs, batch_size=batchSize, callbacks=callbacksList, \
validation_data=[test_images, test_labels] )
loss = history.history['loss']
val_loss = history.history['val_loss']
fig = plt.figure(figsize=(6, 4))
plt.plot(range(1, epochs + 1), loss, 'bo', label='Train.Loss')
plt.plot(range(1, epochs + 1), val_loss, 'r', label='Val.Loss')
plt.xlabel('Epochs')
plt.ylabel('Error')
plt.title('Training and Validation Loss')
plt.legend()
plt.tight_layout()
plt.show()
return
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
85d0e15806dc792d31a9296899a3bdbc2c0db268
|
0f8909782b5150783b738df3875c91509a92a33b
|
/scena/c0450.bin.py
|
94d0262c76e9b95ffec8999949ad301746850f0a
|
[] |
no_license
|
uyjulian/ao_text
|
e40cd982bcdd5ea9ffd6f0f2e97ce9b92749b63a
|
5cc5468aeb64fa97935f334a627357ec10e22307
|
refs/heads/master
| 2021-06-26T21:43:07.489898 | 2017-07-05T16:55:01 | 2017-07-05T16:55:01 | 96,562,461 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 139,980 |
py
|
from ScenarioHelper import *
def main():
CreateScenaFile(
"c0450.bin", # FileName
"c0450", # MapName
"c0450", # Location
0x0024, # MapIndex
"ed7113",
0x00002000, # Flags
("", "", "", "", "", ""), # include
0x00, # PlaceNameNumber
0x00, # PreInitFunctionIndex
b'\x00\xff\xff', # Unknown_51
# Information
[0, 0, -1000, 0, 0, 0, 24000, 500, 30, 45, 0, 360, 0, 0, 0, 0, 0, 1, 36, 0, 6, 0, 7],
)
BuildStringList((
"c0450", # 0
"受付カイル", # 1
"ドリス", # 2
"アーロン", # 3
"レティシア支配人", # 4
"ミンネス", # 5
"観光客", # 6
"観光客", # 7
"市民", # 8
"女の子", # 9
"市民", # 10
"市民", # 11
"観光客", # 12
"市民", # 13
"市民", # 14
"市民", # 15
"デリック", # 16
))
AddCharChip((
"chr/ch45200.itc", # 00
"chr/ch22000.itc", # 01
"chr/ch25700.itc", # 02
"chr/ch27500.itc", # 03
"chr/ch27900.itc", # 04
"chr/ch33002.itc", # 05
"chr/ch32402.itc", # 06
"chr/ch22002.itc", # 07
"chr/ch22300.itc", # 08
"chr/ch24400.itc", # 09
"chr/ch21300.itc", # 0A
"chr/ch33000.itc", # 0B
"chr/ch21002.itc", # 0C
"chr/ch20302.itc", # 0D
"chr/ch23800.itc", # 0E
))
DeclNpc(65440, 0, 59970, 270, 261, 0x0, 0, 1, 0, 0, 0, 0, 13, 255, 0)
DeclNpc(4090, 9, 59900, 225, 261, 0x0, 0, 2, 0, 0, 2, 0, 15, 255, 0)
DeclNpc(50740, 0, 9750, 90, 261, 0x0, 0, 3, 0, 0, 1, 0, 14, 255, 0)
DeclNpc(4294963306, 0, 7000, 90, 261, 0x0, 0, 4, 0, 0, 0, 0, 11, 255, 0)
DeclNpc(168410, 0, 5519, 180, 389, 0x0, 0, 0, 0, 0, 0, 0, 9, 255, 0)
DeclNpc(60049, 150, 65010, 180, 389, 0x0, 0, 5, 0, 255, 255, 0, 16, 255, 0)
DeclNpc(61630, 150, 65010, 180, 389, 0x0, 0, 6, 0, 255, 255, 0, 17, 255, 0)
DeclNpc(189949, 500, 58349, 90, 389, 0x0, 0, 7, 0, 255, 255, 0, 18, 255, 0)
DeclNpc(190759, 0, 61840, 45, 389, 0x0, 0, 8, 0, 0, 4, 0, 19, 255, 0)
DeclNpc(153649, 0, 61220, 180, 389, 0x0, 0, 9, 0, 0, 0, 0, 20, 255, 0)
DeclNpc(153639, 0, 60250, 0, 389, 0x0, 0, 10, 0, 0, 0, 0, 21, 255, 0)
DeclNpc(115000, 0, 62779, 0, 389, 0x0, 0, 11, 0, 0, 0, 0, 22, 255, 0)
DeclNpc(112550, 500, 6699, 0, 389, 0x0, 0, 12, 0, 255, 255, 0, 23, 255, 0)
DeclNpc(112550, 500, 9300, 180, 389, 0x0, 0, 13, 0, 255, 255, 0, 24, 255, 0)
DeclNpc(115000, 0, 8409, 45, 389, 0x0, 0, 14, 0, 0, 5, 0, 25, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclActor(68130, 10, 11650, 1200, 68130, 1500, 11650, 0x007C, 0, 26, 0x0000)
DeclActor(4294963796, 0, 7000, 1500, 4294963306, 1500, 7000, 0x007E, 0, 10, 0x0000)
DeclActor(64300, 0, 59970, 1500, 65440, 1500, 59970, 0x007E, 0, 12, 0x0000)
DeclActor(117500, 0, 4000, 1200, 117500, 650, 4000, 0x007C, 0, 8, 0x0000)
DeclActor(68130, 10, 11650, 1200, 68130, 1500, 11650, 0x007C, 0, 27, 0x0000)
ChipFrameInfo(1008, 0) # 0
ScpFunction((
"Function_0_3F0", # 00, 0
"Function_1_4A0", # 01, 1
"Function_2_501", # 02, 2
"Function_3_52C", # 03, 3
"Function_4_557", # 04, 4
"Function_5_582", # 05, 5
"Function_6_5AD", # 06, 6
"Function_7_735", # 07, 7
"Function_8_8DA", # 08, 8
"Function_9_989", # 09, 9
"Function_10_B23", # 0A, 10
"Function_11_B27", # 0B, 11
"Function_12_1F34", # 0C, 12
"Function_13_1F38", # 0D, 13
"Function_14_3052", # 0E, 14
"Function_15_3E46", # 0F, 15
"Function_16_4873", # 10, 16
"Function_17_4985", # 11, 17
"Function_18_4A94", # 12, 18
"Function_19_4B20", # 13, 19
"Function_20_4B93", # 14, 20
"Function_21_4BC0", # 15, 21
"Function_22_4BE5", # 16, 22
"Function_23_4D32", # 17, 23
"Function_24_4DDB", # 18, 24
"Function_25_4E46", # 19, 25
"Function_26_4E71", # 1A, 26
"Function_27_4EA6", # 1B, 27
"Function_28_4ED8", # 1C, 28
"Function_29_5B19", # 1D, 29
"Function_30_7861", # 1E, 30
"Function_31_78AC", # 1F, 31
"Function_32_78F0", # 20, 32
"Function_33_793B", # 21, 33
"Function_34_7986", # 22, 34
"Function_35_79D1", # 23, 35
"Function_36_7A1C", # 24, 36
"Function_37_7A67", # 25, 37
"Function_38_7AB2", # 26, 38
"Function_39_7AFD", # 27, 39
"Function_40_7B48", # 28, 40
"Function_41_7B93", # 29, 41
"Function_42_7BDE", # 2A, 42
))
def Function_0_3F0(): pass
label("Function_0_3F0")
Switch(
(scpexpr(EXPR_RAND), scpexpr(EXPR_PUSH_LONG, 0x8), scpexpr(EXPR_IMOD), scpexpr(EXPR_END)),
(0, "loc_428"),
(1, "loc_434"),
(2, "loc_440"),
(3, "loc_44C"),
(4, "loc_458"),
(5, "loc_464"),
(6, "loc_470"),
(SWITCH_DEFAULT, "loc_47C"),
)
label("loc_428")
OP_A0(0xFE, 1450, 0x0, 0xFB)
Jump("loc_488")
label("loc_434")
OP_A0(0xFE, 1550, 0x0, 0xFB)
Jump("loc_488")
label("loc_440")
OP_A0(0xFE, 1600, 0x0, 0xFB)
Jump("loc_488")
label("loc_44C")
OP_A0(0xFE, 1400, 0x0, 0xFB)
Jump("loc_488")
label("loc_458")
OP_A0(0xFE, 1650, 0x0, 0xFB)
Jump("loc_488")
label("loc_464")
OP_A0(0xFE, 1350, 0x0, 0xFB)
Jump("loc_488")
label("loc_470")
OP_A0(0xFE, 1500, 0x0, 0xFB)
Jump("loc_488")
label("loc_47C")
OP_A0(0xFE, 1500, 0x0, 0xFB)
Jump("loc_488")
label("loc_488")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_49F")
OP_A0(0xFE, 1500, 0x0, 0xFB)
Jump("loc_488")
label("loc_49F")
Return()
# Function_0_3F0 end
def Function_1_4A0(): pass
label("Function_1_4A0")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_500")
OP_95(0xFE, 72280, 0, 9750, 1000, 0x0)
OP_95(0xFE, 72280, 0, 5580, 1000, 0x0)
OP_95(0xFE, 50740, 0, 5580, 1000, 0x0)
OP_95(0xFE, 50740, 0, 9750, 1000, 0x0)
Jump("Function_1_4A0")
label("loc_500")
Return()
# Function_1_4A0 end
def Function_2_501(): pass
label("Function_2_501")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_52B")
OP_94(0xFE, 0x604, 0xD714, 0x17C0, 0xFB9A, 0x3E8)
Sleep(300)
Jump("Function_2_501")
label("loc_52B")
Return()
# Function_2_501 end
def Function_3_52C(): pass
label("Function_3_52C")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_556")
OP_94(0xFE, 0xFAD1, 0x141E, 0x11B66, 0x2652, 0x3E8)
Sleep(300)
Jump("Function_3_52C")
label("loc_556")
Return()
# Function_3_52C end
def Function_4_557(): pass
label("Function_4_557")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_581")
OP_94(0xFE, 0x2E158, 0xEA92, 0x2F5B2, 0xF604, 0x3E8)
Sleep(300)
Jump("Function_4_557")
label("loc_581")
Return()
# Function_4_557 end
def Function_5_582(): pass
label("Function_5_582")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_5AC")
OP_94(0xFE, 0x1BEC2, 0x1E0A, 0x1C6E2, 0x2AD0, 0x3E8)
Sleep(300)
Jump("Function_5_582")
label("loc_5AC")
Return()
# Function_5_582 end
def Function_6_5AD(): pass
label("Function_6_5AD")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_END)), "loc_5BB")
Jump("loc_734")
label("loc_5BB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_END)), "loc_624")
ClearChrFlags(0xF, 0x80)
ClearChrFlags(0x10, 0x80)
ClearChrFlags(0x11, 0x80)
ClearChrFlags(0x12, 0x80)
ClearChrFlags(0x13, 0x80)
ClearChrFlags(0x14, 0x80)
ClearChrFlags(0x15, 0x80)
ClearChrFlags(0x16, 0x80)
SetChrChipByIndex(0xF, 0x7)
SetChrSubChip(0xF, 0x0)
EndChrThread(0xF, 0x0)
SetChrBattleFlags(0xF, 0x4)
SetChrChipByIndex(0x14, 0xC)
SetChrSubChip(0x14, 0x0)
EndChrThread(0x14, 0x0)
SetChrBattleFlags(0x14, 0x4)
SetChrChipByIndex(0x15, 0xD)
SetChrSubChip(0x15, 0x0)
EndChrThread(0x15, 0x0)
SetChrBattleFlags(0x15, 0x4)
Jump("loc_734")
label("loc_624")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 1)), scpexpr(EXPR_END)), "loc_632")
Jump("loc_734")
label("loc_632")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_END)), "loc_640")
Jump("loc_734")
label("loc_640")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_END)), "loc_64E")
Jump("loc_734")
label("loc_64E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_END)), "loc_65C")
Jump("loc_734")
label("loc_65C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 5)), scpexpr(EXPR_END)), "loc_66A")
Jump("loc_734")
label("loc_66A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 0)), scpexpr(EXPR_END)), "loc_678")
Jump("loc_734")
label("loc_678")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x160, 0)), scpexpr(EXPR_END)), "loc_69B")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x2)"), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x174, 0)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_696")
ClearChrFlags(0xC, 0x80)
label("loc_696")
Jump("loc_734")
label("loc_69B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_END)), "loc_6A9")
Jump("loc_734")
label("loc_6A9")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 4)), scpexpr(EXPR_END)), "loc_6B7")
Jump("loc_734")
label("loc_6B7")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 0)), scpexpr(EXPR_END)), "loc_6C5")
Jump("loc_734")
label("loc_6C5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_6FF")
ClearChrFlags(0xD, 0x80)
ClearChrFlags(0xE, 0x80)
SetChrChipByIndex(0xD, 0x5)
SetChrSubChip(0xD, 0x0)
EndChrThread(0xD, 0x0)
SetChrBattleFlags(0xD, 0x4)
SetChrChipByIndex(0xE, 0x6)
SetChrSubChip(0xE, 0x0)
EndChrThread(0xE, 0x0)
SetChrBattleFlags(0xE, 0x4)
Jump("loc_734")
label("loc_6FF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_734")
ClearChrFlags(0xD, 0x80)
ClearChrFlags(0xE, 0x80)
SetChrChipByIndex(0xD, 0x5)
SetChrSubChip(0xD, 0x0)
EndChrThread(0xD, 0x0)
SetChrBattleFlags(0xD, 0x4)
SetChrChipByIndex(0xE, 0x6)
SetChrSubChip(0xE, 0x0)
EndChrThread(0xE, 0x0)
SetChrBattleFlags(0xE, 0x4)
label("loc_734")
Return()
# Function_6_5AD end
def Function_7_735(): pass
label("Function_7_735")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x12A, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_751")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x232), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7CF")
label("loc_751")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x142, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_76D")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x7D), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7CF")
label("loc_76D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_780")
Jump("loc_7CF")
label("loc_780")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_79C")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x97), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7CF")
label("loc_79C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x181, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_7B8")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x233), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7CF")
label("loc_7B8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x181, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_7CF")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x97), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
label("loc_7CF")
OP_65(0x0, 0x1)
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x2)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x174, 5)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_7FD")
ClearMapObjFlags(0x1, 0x10)
OP_66(0x0, 0x1)
label("loc_7FD")
OP_65(0x4, 0x1)
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x2)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x174, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_82C")
ClearMapObjFlags(0x1, 0x10)
OP_66(0x4, 0x1)
label("loc_82C")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0x4), scpexpr(EXPR_PUSH_LONG, 0x5), scpexpr(EXPR_GE), scpexpr(EXPR_END)), "loc_84B")
OP_10(0x0, 0x0)
OP_10(0x12, 0x1)
OP_10(0x11, 0x0)
OP_10(0x13, 0x1)
Jump("loc_857")
label("loc_84B")
OP_10(0x0, 0x1)
OP_10(0x12, 0x0)
OP_10(0x11, 0x1)
OP_10(0x13, 0x0)
label("loc_857")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_PUSH_VALUE_INDEX, 0x4), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_89D")
SetMapObjFrame(0xFF, "hikari00", 0x0, 0x1)
SetMapObjFrame(0xFF, "c0450:Layer15", 0x0, 0x1)
Sound(128, 1, 50, 0)
label("loc_89D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 2)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_8D9")
OP_7D(0xD2, 0xD2, 0xE6, 0x0, 0x0)
SetMapObjFrame(0xFF, "hikari00", 0x0, 0x1)
SetMapObjFrame(0xFF, "c0450:Layer15", 0x0, 0x1)
label("loc_8D9")
Return()
# Function_7_735 end
def Function_8_8DA(): pass
label("Function_8_8DA")
SetChrName("")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"『おいしい鍋料理 圧力鍋編』がある。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
Jc((scpexpr(EXPR_EXEC_OP, "GetItemNumber('料理手册', 0x0)"), scpexpr(EXPR_END)), "loc_985")
Jc((scpexpr(EXPR_EXEC_OP, "OP_B2(0x6)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_985")
FadeToDark(300, 0, 100)
Sound(17, 0, 100, 0)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"『満腹寄せ鍋』\x07\x00",
"のレシピを覚えた。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
OP_0D()
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
label("loc_985")
TalkEnd(0xFF)
Return()
# Function_8_8DA end
def Function_9_989(): pass
label("Function_9_989")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x10)"), scpexpr(EXPR_END)), "loc_B1F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_A7C")
ChrTalk(
0xC,
(
"おや、皆様……\x01",
"まだ私に何か御用ですかな?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"我がクインシー社とアルモリカ村の\x01",
"『アルモリカ・ハニーカンパニー』計画は\x01",
"徐々に進行しております。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"今後の展望を、皆様も\x01",
"楽しみにしてくださると幸いですよ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 6)
Jump("loc_B1F")
label("loc_A7C")
ChrTalk(
0xC,
(
"我がクインシー社とアルモリカ村の\x01",
"『アルモリカ・ハニーカンパニー』計画は\x01",
"徐々に進行しております。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"今後の展望を、皆様も\x01",
"楽しみにしてくださると幸いですよ。\x02",
)
)
CloseMessageWindow()
label("loc_B1F")
TalkEnd(0xFE)
Return()
# Function_9_989 end
def Function_10_B23(): pass
label("Function_10_B23")
Call(0, 11)
Return()
# Function_10_B23 end
def Function_11_B27(): pass
label("Function_11_B27")
TalkBegin(0xB)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1BF, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_DAE")
OP_63(0xB, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
ChrTalk(
0xB,
"あら、皆様は警察の……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00000Fええ、こちらの状況を\x01",
"聞かせてもらっていいですか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"はい、当ホテルでは只今\x01",
"元からの宿泊客を含め、\x01",
"避難者を多数受け入れております。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"備蓄食料もございますので、\x01",
"1ヶ月程度は凌げる見込みですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"とにかく、詳しい情報が\x01",
"入らないことが皆様の不安に\x01",
"つながっているという状態ですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"現状、最小限の混乱で済んでいますが\x01",
"これが続くとなると……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#00104Fそうですか……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00001F俺たちは、これからすぐに\x01",
"事態収束のため行動を開始します。\x02\x03",
"なので、しばらくの間\x01",
"このまま様子を見て頂けますか。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"ええ、かしこまりました。\x01",
"皆様もお気を付け下さいませ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x1BF, 6)
label("loc_DAE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x136, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_PUSH_VALUE_INDEX, 0x4), scpexpr(EXPR_PUSH_LONG, 0x7), scpexpr(EXPR_LSS), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_F3B")
ChrTalk(
0xB,
"《ホテル・ミレニアム》へようこそ。\x02",
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"うふふ、当ホテルでは\x01",
"お客様の様々なニーズに\x01",
"お応えしておりますわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"何かご所望がございましたら、\x01",
"いつでも仰ってくださいませ。\x02",
)
)
CloseMessageWindow()
OP_5A()
FadeToDark(300, 0, 100)
SetMessageWindowPos(-1, -1, -1, -1)
Sound(814, 0, 100, 0)
SetChrName("")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"ホテルや宿酒場に宿泊すると\x01",
"CPを回復する事ができます。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"通常の宿酒場ではCP100、\x01",
"高級ホテルではCP200が回復します。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(14, 280, 60, 3)
OP_5A()
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_F38")
SetScenarioFlags(0x0, 0)
label("loc_F38")
SetScenarioFlags(0x136, 5)
label("loc_F3B")
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
label("loc_F45")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_1F30")
FadeToDark(300, 0, 100)
Menu(
0,
-1,
-1,
1,
(
"話をする\x01", # 0
"休憩をする\x01", # 1
"やめる\x01", # 2
)
)
MenuEnd(0x0)
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEQ), scpexpr(EXPR_END)), "loc_FA1")
OP_60(0x0)
FadeToBright(300, 0)
OP_0D()
label("loc_FA1")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_FC1")
OP_AF(0x45)
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_1F2B")
label("loc_FC1")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_NEQ), scpexpr(EXPR_END)), "loc_FD5")
Jump("loc_1F2B")
label("loc_FD5")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_1F2B")
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_END)), "loc_1100")
ChrTalk(
0xB,
(
"大統領拘束の一報を受け、\x01",
"避難者の方々もそれぞれ\x01",
"ご自宅に戻られましたわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"大樹については詳細不明ですが……\x01",
"とりあえずモヤが晴れたことで\x01",
"街もそれなりに落ち着いた印象です。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"とにかく、私たちは\x01",
"今の内に各種出来る準備を\x01",
"進めておかなければいけません。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1100")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_END)), "loc_12CB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_123C")
ChrTalk(
0xB,
(
"戒厳令と外出禁止令の通告を受け、\x01",
"当ホテルでは避難者の受け入れを\x01",
"すぐに検討したのですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"流石に、あのモヤと\x01",
"人形兵士の出現は想定外でした。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"どうやら、皆さんの大統領への不満も\x01",
"極限まで高まっているようですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"今はどちらかというと、\x01",
"不安の方が大きいという印象ですね。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_12C6")
label("loc_123C")
ChrTalk(
0xB,
(
"どうやら、皆さんの大統領への不満も\x01",
"極限まで高まっているようですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"今はどちらかというと、\x01",
"不安の方が大きいという印象ですね。\x02",
)
)
CloseMessageWindow()
label("loc_12C6")
Jump("loc_1F2B")
label("loc_12CB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 1)), scpexpr(EXPR_END)), "loc_145F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_13A7")
ChrTalk(
0xB,
(
"演説の様子は\x01",
"ホテルの導力ネットを通じて\x01",
"拝見しましたが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"流石に外国からいらした\x01",
"お客様の動揺には\x01",
"凄まじいものがありました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"何とか皆さん、本国まで\x01",
"辿り着けるとよいのですが……\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_145A")
label("loc_13A7")
ChrTalk(
0xB,
(
"導力鉄道は本日をもって\x01",
"運行を停止するそうですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"とにかく、お客様方が\x01",
"帰路につけないのでは話になりません。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"飛行船の運航状況を含め、\x01",
"徹底的に情報を集めませんと。\x02",
)
)
CloseMessageWindow()
label("loc_145A")
Jump("loc_1F2B")
label("loc_145F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_END)), "loc_1519")
ChrTalk(
0xB,
(
"夕暮れと炎の色に染まる歓楽街……\x01",
"あの日の光景は、まさに\x01",
"悪夢としか言い様がありません。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"……ともかく、一刻も早く\x01",
"日常を取り戻せるよう\x01",
"出来ることを尽くしませんと。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1519")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_END)), "loc_15A2")
ChrTalk(
0xB,
(
"マインツ方面では\x01",
"今も警備隊の皆さんが\x01",
"奮戦されているそうですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"本当に警備隊の皆さんは\x01",
"私たち市民の誇りですわ。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_15A2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_END)), "loc_1762")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_16D4")
ChrTalk(
0xB,
(
"昨日は列車事故の影響で、\x01",
"鉄道のダイヤが大きく乱れたため……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"結局、クロスベル滞在を\x01",
"一日延長した人もいらっしゃいました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"とはいえ、何とか今朝までに\x01",
"完全に復旧できて幸いでしたわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"足止めされるになった\x01",
"お客様も、今朝には全員\x01",
"無事に送り出すことができましたので。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_175D")
label("loc_16D4")
ChrTalk(
0xB,
(
"足止めされることになった\x01",
"お客様も、今朝には全員\x01",
"無事に送り出すことができました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"これも全ては、\x01",
"警備隊の皆様のおかげですわね。\x02",
)
)
CloseMessageWindow()
label("loc_175D")
Jump("loc_1F2B")
label("loc_1762")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 5)), scpexpr(EXPR_END)), "loc_1813")
ChrTalk(
0xB,
(
"何でも西クロスベル街道方面で\x01",
"列車事故が起きたそうですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"今日これから帰路につく\x01",
"お客様がたを混乱させないためにも\x01",
"まずは情報を集めないといけませんね。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1813")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 0)), scpexpr(EXPR_END)), "loc_19CF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1916")
ChrTalk(
0xB,
(
"導力ネットによる予約サービスも\x01",
"おかげ様で好評を頂いておりますわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"現状まだまだ利用される方が\x01",
"少ないのも確かではありますが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"更なるネットワーク拡充の折には、\x01",
"通信器によるご予約件数を\x01",
"必ず上回ると確信しておりますわ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_19CA")
label("loc_1916")
ChrTalk(
0xB,
(
"導力ネットの素晴らしい点は\x01",
"たとえ受付時間外であっても\x01",
"予約を頂ける所にありますわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"通信器と違って、導力メールは\x01",
"24時間いつでも送受信することが\x01",
"可能でございますからね。\x02",
)
)
CloseMessageWindow()
label("loc_19CA")
Jump("loc_1F2B")
label("loc_19CF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x160, 0)), scpexpr(EXPR_END)), "loc_1A61")
ChrTalk(
0xB,
"国家独立の是非、でございますか……\x02",
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"大変難しい問題ではありますが、\x01",
"それを市民に問う事は\x01",
"非常に意義のある事だと思いますわ。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1A61")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_END)), "loc_1ACB")
ChrTalk(
0xB,
(
"うふふ、ついに\x01",
"本会議が始まりますわね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"ディーター市長には\x01",
"頑張って頂きませんと。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1ACB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 4)), scpexpr(EXPR_END)), "loc_1BA3")
ChrTalk(
0xB,
(
"私が支配人に就く以前から、\x01",
"当ホテルには過去に様々な要人の方を\x01",
"ご招待させて頂いた実績がございます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"今回は話がなく残念でしたが、\x01",
"各国首脳の皆様にもいつかの折には\x01",
"ご宿泊して頂きたいものですね。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1BA3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 0)), scpexpr(EXPR_END)), "loc_1C5D")
ChrTalk(
0xB,
(
"ホテル業に携わるものとして\x01",
"明日からの通商会議は\x01",
"いやでも注目してしまいますわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"結ばれる協定の内容によっては\x01",
"今後の観光客の数などに\x01",
"影響も出て来るでしょうからね。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F2B")
label("loc_1C5D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_1DBE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1D19")
ChrTalk(
0xB,
"今日は雨でございますわね。\x02",
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"当ホテルでは、雨の日でも\x01",
"お楽しみ頂ける観光スポットを\x01",
"ご紹介致しております。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"ぜひ、お気軽に\x01",
"お問い合わせくださいませ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_1DB9")
label("loc_1D19")
ChrTalk(
0xB,
(
"基本的に、この歓楽街は\x01",
"雨の日でもお楽しみ頂ける\x01",
"場所がほとんどですわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"それでも外出が億劫な方には、\x01",
"当ホテルの各種ルームサービスも\x01",
"オススメですわよ。\x02",
)
)
CloseMessageWindow()
label("loc_1DB9")
Jump("loc_1F2B")
label("loc_1DBE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_1F2B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1E88")
ChrTalk(
0xB,
"《ホテル・ミレニアム》へようこそ。\x02",
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"うふふ、当ホテルでは\x01",
"お客様の様々なニーズに\x01",
"お応えしておりますわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"何かご要望がございましたら、\x01",
"いつでも仰ってくださいませ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_1F2B")
label("loc_1E88")
ChrTalk(
0xB,
(
"エステや食事のルームサービス、\x01",
"各種ブッキングサービスに\x01",
"導力ネットを用いたご予約サービス……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xB,
(
"当ホテルでは、お客様の様々な\x01",
"ニーズにお応えしておりますわ。\x02",
)
)
CloseMessageWindow()
label("loc_1F2B")
Jump("loc_F45")
label("loc_1F30")
TalkEnd(0xB)
Return()
# Function_11_B27 end
def Function_12_1F34(): pass
label("Function_12_1F34")
Call(0, 13)
Return()
# Function_12_1F34 end
def Function_13_1F38(): pass
label("Function_13_1F38")
TalkBegin(0x8)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x136, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_PUSH_VALUE_INDEX, 0x4), scpexpr(EXPR_PUSH_LONG, 0x7), scpexpr(EXPR_LSS), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_20A9")
ChrTalk(
0x8,
(
"いらっしゃいませ。\x01",
"《ホテル・ミレニアム》へようこそ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"当フロントでは当日の宿泊予約も\x01",
"受け付けておりますので、\x01",
"どうぞお気軽にお申し付け下さいませ。\x02",
)
)
CloseMessageWindow()
OP_5A()
FadeToDark(300, 0, 100)
SetMessageWindowPos(-1, -1, -1, -1)
Sound(814, 0, 100, 0)
SetChrName("")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"ホテルや宿酒場に宿泊すると\x01",
"CPを回復する事ができます。\x02",
)
)
CloseMessageWindow()
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"通常の宿酒場ではCP100、\x01",
"高級ホテルではCP200が回復します。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(14, 280, 60, 3)
OP_5A()
SetScenarioFlags(0x136, 5)
label("loc_20A9")
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
label("loc_20B3")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_304E")
FadeToDark(300, 0, 100)
Menu(
0,
-1,
-1,
1,
(
"話をする\x01", # 0
"休憩をする\x01", # 1
"やめる\x01", # 2
)
)
MenuEnd(0x0)
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEQ), scpexpr(EXPR_END)), "loc_210F")
OP_60(0x0)
FadeToBright(300, 0)
OP_0D()
label("loc_210F")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_212F")
OP_AF(0x45)
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_3049")
label("loc_212F")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_NEQ), scpexpr(EXPR_END)), "loc_2143")
Jump("loc_3049")
label("loc_2143")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_3049")
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x63), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_END)), "loc_227C")
ChrTalk(
0x8,
(
"支配人の指示で、有事のための\x01",
"備えをこれまで以上に\x01",
"強化することになりました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"ただし、各種物資に関しては\x01",
"自治州内の限りある商品を無闇に\x01",
"買い占めるワケにもいきませんからね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"政府への相談も視野に入れ、\x01",
"外国方面から買い集める手段を\x01",
"さっそく探り始めている所です。\x02",
)
)
CloseMessageWindow()
Jump("loc_3049")
label("loc_227C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_END)), "loc_2379")
ChrTalk(
0x8,
(
"モヤの出現と同時にここへ\x01",
"駆け込んだ方々の様子は、\x01",
"それはもうパニック状態でした。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"人形の兵士が市民を襲う事は\x01",
"一応ないと分かってからは、\x01",
"少しは落ち着きましたが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"とにかく、一刻も早くこの状況を\x01",
"何とかして頂きたいものですね。\x02",
)
)
CloseMessageWindow()
Jump("loc_3049")
label("loc_2379")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 1)), scpexpr(EXPR_END)), "loc_247B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_23F0")
ChrTalk(
0x8,
(
"……今朝の演説には\x01",
"本当に驚かされました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"大統領の主張、\x01",
"理解はできるのですが……\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2476")
label("loc_23F0")
ChrTalk(
0x8,
(
"……ふむ、とりあえず\x01",
"余計な事は口に出すべきでは\x01",
"ありませんね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"何はともあれ……\x01",
"しばらくは成り行きを\x01",
"見守るしかありません。\x02",
)
)
CloseMessageWindow()
label("loc_2476")
Jump("loc_3049")
label("loc_247B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_END)), "loc_254D")
ChrTalk(
0x8,
(
"襲撃の日、当ホテルには幸いにも\x01",
"大した被害はありませんでしたが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"アルカンシェルは……\x01",
"本当に酷いものです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"何が目的かは知りませんが……\x01",
"このような所業、\x01",
"許されるはずがありません。\x02",
)
)
CloseMessageWindow()
Jump("loc_3049")
label("loc_254D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_END)), "loc_2600")
ChrTalk(
0x8,
(
"マインツで起こっている事件は\x01",
"帝国の陰謀ではないかと\x01",
"考えておられる方も多いみたいですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"それがもし本当だとしたら……\x01",
"不戦条約は一体何だったんでしょうか?\x02",
)
)
CloseMessageWindow()
Jump("loc_3049")
label("loc_2600")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_END)), "loc_26B2")
ChrTalk(
0x8,
(
"昨日は事故の影響で\x01",
"宿泊をキャンセルしたいという\x01",
"連絡を多く受けたのです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"大陸横断鉄道はまさに\x01",
"我々にとっても生命線……\x01",
"被害が最小限で済んで一安心ですよ。\x02",
)
)
CloseMessageWindow()
Jump("loc_3049")
label("loc_26B2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 5)), scpexpr(EXPR_END)), "loc_272A")
ChrTalk(
0x8,
"ふむ、列車事故ですか……\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"クロスベルでは\x01",
"比較的珍しいことですが……\x01",
"一体原因は何なんでしょうね。\x02",
)
)
CloseMessageWindow()
Jump("loc_3049")
label("loc_272A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 0)), scpexpr(EXPR_END)), "loc_287B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_27FC")
ChrTalk(
0x8,
(
"最近ドリスさんの仕事ぶりが\x01",
"めきめき良くなっているのです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"掃除の手際はもちろん、\x01",
"お客様からの評判も上々でしてね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"これも教育係を務める\x01",
"アーロンさんの指導の賜物ですね。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2876")
label("loc_27FC")
ChrTalk(
0x8,
(
"最近ドリスさんの仕事ぶりが\x01",
"めきめき良くなっているのです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"これも教育係を務める\x01",
"アーロンさんの指導の賜物ですね。\x02",
)
)
CloseMessageWindow()
label("loc_2876")
Jump("loc_3049")
label("loc_287B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x160, 0)), scpexpr(EXPR_END)), "loc_2A1F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_297B")
ChrTalk(
0x8,
(
"私はクロスベル人ですが、\x01",
"以前は帝国のホテルに\x01",
"勤めていた事があるのです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"こう言っては何ですが、\x01",
"帝国にいた頃は貴族の方々に\x01",
"神経をすり減らす日々でしてね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"こちらに来てからは、伸び伸び\x01",
"仕事をさせて頂いておりますよ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2A1A")
label("loc_297B")
ChrTalk(
0x8,
(
"帝国のホテルはサービス技術を\x01",
"学ぶには良い環境でしたが、\x01",
"その分気疲れも相当なものでした。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"こちらに来てからは、伸び伸び\x01",
"仕事をさせて頂いておりますよ。\x02",
)
)
CloseMessageWindow()
label("loc_2A1A")
Jump("loc_3049")
label("loc_2A1F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_END)), "loc_2B8D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2B04")
ChrTalk(
0x8,
(
"昨日、仕事帰りに\x01",
"遠目ながらオルキスタワーを\x01",
"拝見させて頂いたのですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"いやはや、あの迫力たるや\x01",
"話に聞いていた以上でしたね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"今度はぜひとも近くに行って、\x01",
"ビルを見上げてみたいものです。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2B88")
label("loc_2B04")
ChrTalk(
0x8,
(
"遠目ながら、オルキルタワーの\x01",
"迫力には本当に圧倒されました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"今度はぜひとも近くに行って、\x01",
"ビルを見上げてみたいものですね。\x02",
)
)
CloseMessageWindow()
label("loc_2B88")
Jump("loc_3049")
label("loc_2B8D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 4)), scpexpr(EXPR_END)), "loc_2CDE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2C57")
ChrTalk(
0x8,
(
"お客様は除幕式の様子を\x01",
"ご見学されましたか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"何でも花火も打ち上げられ、\x01",
"大変見応えのある式典だったとか。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"オルキスタワーの威容……\x01",
"私も早く拝見したいものですね。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2CD9")
label("loc_2C57")
ChrTalk(
0x8,
(
"除幕式では花火も打ち上げられ、\x01",
"大変見応えがあったと聞きました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"オルキスタワーの威容……\x01",
"私も早く拝見したいものですね。\x02",
)
)
CloseMessageWindow()
label("loc_2CD9")
Jump("loc_3049")
label("loc_2CDE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 0)), scpexpr(EXPR_END)), "loc_2E60")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2DD2")
ChrTalk(
0x8,
(
"いらっしゃいませ。\x01",
"本日も良い天気でございますね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"街は警察による警戒体制が\x01",
"敷かれておりますが、\x01",
"観光日和には違いありません。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"どこか良いスポットを\x01",
"お探しでしたら、目的に合わせて\x01",
"ご案内させて頂きますよ?\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2E5B")
label("loc_2DD2")
ChrTalk(
0x8,
(
"いらっしゃいませ。\x01",
"本日も良い天気でございますね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"どこか良いスポットを\x01",
"お探しでしたら、目的に合わせて\x01",
"ご案内させて頂きますよ?\x02",
)
)
CloseMessageWindow()
label("loc_2E5B")
Jump("loc_3049")
label("loc_2E60")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_2FA3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2F2A")
ChrTalk(
0x8,
(
"おはようございます。\x01",
"本日もようこそいらっしゃいませ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"雨天時のお出掛けの際は\x01",
"仰っていただければ、\x01",
"傘のご提供もさせて頂きます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
"どうぞお気軽にお申し付け下さい。\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2F9E")
label("loc_2F2A")
ChrTalk(
0x8,
(
"雨天時のお出掛けの際は\x01",
"仰っていただければ、\x01",
"傘のご提供もさせて頂きます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
"どうぞお気軽にお申し付け下さい。\x02",
)
CloseMessageWindow()
label("loc_2F9E")
Jump("loc_3049")
label("loc_2FA3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_3049")
ChrTalk(
0x8,
(
"いらっしゃいませ。\x01",
"《ホテル・ミレニアム》へようこそ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"当フロントでは当日の宿泊予約も\x01",
"受け付けておりますので、\x01",
"どうぞお気軽にお申し付け下さいませ。\x02",
)
)
CloseMessageWindow()
label("loc_3049")
Jump("loc_20B3")
label("loc_304E")
TalkEnd(0x8)
Return()
# Function_13_1F38 end
def Function_14_3052(): pass
label("Function_14_3052")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_END)), "loc_312D")
ChrTalk(
0xFE,
(
"先ほどレティシア支配人から\x01",
"当分は利益度外視で営業を行うとの\x01",
"意思表明がございました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"今はクロスベル全体が困難にある時……\x01",
"私もこれまでの経験を全て活かすつもりで\x01",
"全力で支配人を支える所存です。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_312D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_END)), "loc_31E6")
ChrTalk(
0xFE,
(
"この度のホテルの無償提供で、\x01",
"支配人がミラではなく人を大切される\x01",
"方であることがよく分かりました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"そんな方の元で仕事が出来るのは\x01",
"本当に幸せなことだと思いますよ。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_31E6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 1)), scpexpr(EXPR_END)), "loc_32AE")
ChrTalk(
0xFE,
(
"独立宣言以降、お客様の数は\x01",
"日に日に減ってはいたのですが……\x01",
"今朝の演説は決定的ですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"こういう時にする話でもありませんが……\x01",
"当ホテルも経営方針を見直さざるを\x01",
"得ないでしょう。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_32AE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_END)), "loc_3415")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_338D")
ChrTalk(
0xFE,
(
"襲撃事件が街に残した爪痕は\x01",
"余りに大きいと言う他ありません。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"警備隊の被害は甚大、\x01",
"それにあのイリア嬢まで……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"このようなことは\x01",
"二度と起こってはならない……\x01",
"ただ、そう思うばかりです。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 2)
Jump("loc_3410")
label("loc_338D")
ChrTalk(
0xFE,
(
"警備隊の被害は甚大、\x01",
"それにあのイリア嬢まで……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"このようなことは\x01",
"二度と起こってはならない……\x01",
"ただ、そう思うばかりです。\x02",
)
)
CloseMessageWindow()
label("loc_3410")
Jump("loc_3E42")
label("loc_3415")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_END)), "loc_34A6")
ChrTalk(
0xFE,
(
"昨日起こった襲撃事件……\x01",
"まだまだ事態は収束に\x01",
"向かってくれないようですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"マインツの住民の\x01",
"皆様のことが本当に心配です。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_34A6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_END)), "loc_354D")
ChrTalk(
0xFE,
(
"脱線事故は不可思議な\x01",
"魔獣の仕業と聞きましたが……\x01",
"何とも不気味な話でございますね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"何か不吉なことの前触れ、\x01",
"などとは考えたくないものですが……\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_354D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 5)), scpexpr(EXPR_END)), "loc_360A")
ChrTalk(
0xFE,
(
"そろそろチェックインのお客様が\x01",
"見え出す時間なのですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"どうやら列車事故の影響が\x01",
"さっそく出ているようですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"皆さん、無事に\x01",
"到着して頂けるとよいのですが……\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_360A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 0)), scpexpr(EXPR_END)), "loc_36B4")
ChrTalk(
0xFE,
(
"今の時期、デラックスルームが\x01",
"空室になることは\x01",
"そう珍しいことではありません。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"つまり、確実に\x01",
"ご宿泊になられたい方には\x01",
"今が狙い目ということですね。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_36B4")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x160, 0)), scpexpr(EXPR_END)), "loc_389A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_37DB")
ChrTalk(
0xFE,
(
"国家独立の是非……\x01",
"基本的に賛成意見が多いものの\x01",
"様々な意見があるようですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"私のような年寄りは、\x01",
"どうしても2大国の脅威について\x01",
"ばかり考えてしまいますが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"当ホテルがそうであったように、\x01",
"クロスベル自治州も今こそ変化が\x01",
"必要な時期なのかもしれませんね。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 2)
Jump("loc_3895")
label("loc_37DB")
ChrTalk(
0xFE,
(
"私のような年寄りは、\x01",
"どうしても2大国の脅威について\x01",
"ばかり考えてしまいますが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"当ホテルがそうであったように、\x01",
"クロスベル自治州も今こそ変化が\x01",
"必要な時期なのかもしれませんね。\x02",
)
)
CloseMessageWindow()
label("loc_3895")
Jump("loc_3E42")
label("loc_389A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_END)), "loc_3929")
ChrTalk(
0xFE,
(
"いよいよ通商会議の\x01",
"本会議が始まるわけですな。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"いやはや、ディーター市長と\x01",
"マクダエル議長には\x01",
"期待せずにはおれませんね。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_3929")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 4)), scpexpr(EXPR_END)), "loc_39C7")
ChrTalk(
0xFE,
(
"オルキスタワーには全てのフロアに\x01",
"導力ネットが引かれているそうですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"ふふ、当ホテルの導力ネット予約も\x01",
"ますます盛況になりそうですな。\x02",
)
)
CloseMessageWindow()
Jump("loc_3E42")
label("loc_39C7")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 0)), scpexpr(EXPR_END)), "loc_3B40")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3AA4")
ChrTalk(
0xFE,
(
"少し前まで頼りない所もあった\x01",
"ドリスさんですが、なかなかどうして\x01",
"最近は安心して見ていられますよ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"後進の成長を肌で感じられる……\x01",
"教育係を任された者として\x01",
"これ以上の喜びはありませんな。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 2)
Jump("loc_3B3B")
label("loc_3AA4")
ChrTalk(
0xFE,
(
"ドリスさんが成長してくれて\x01",
"本当に嬉しく思います。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"後進の成長を肌で感じられる……\x01",
"教育係を任された者として\x01",
"これ以上の喜びはありませんからね。\x02",
)
)
CloseMessageWindow()
label("loc_3B3B")
Jump("loc_3E42")
label("loc_3B40")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_3CEA")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3C37")
ChrTalk(
0xFE,
(
"記念祭ほどの盛況さはないものの、\x01",
"当ホテルの客足は\x01",
"順調に推移してございます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"支配人肝いりの導力ネットによる\x01",
"予約システムも反響は上々ですし……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"いやはや、《ホテル・ミレニアム》の\x01",
"未来は明るうございますな。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 2)
Jump("loc_3CE5")
label("loc_3C37")
ChrTalk(
0xFE,
(
"伝統と革新の融合……\x01",
"それがレティシア支配人の目指す\x01",
"当ホテルのあり方でございます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"私も最初は戸惑いもしましたが、\x01",
"今では支配人のことを\x01",
"全面的に信頼しておりますよ。\x02",
)
)
CloseMessageWindow()
label("loc_3CE5")
Jump("loc_3E42")
label("loc_3CEA")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_3E42")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3DBF")
ChrTalk(
0xFE,
(
"当ホテルは今年で開業60周年……\x01",
"ちなみに私がここで働き始めて\x01",
"早30年以上の歳月が経ちました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"気づけば一番の古株ですよ。\x01",
"いやはや、時代の流れというのは\x01",
"本当に早いものですね。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 2)
Jump("loc_3E42")
label("loc_3DBF")
ChrTalk(
0xFE,
(
"このホテルで働き始めて\x01",
"早30年以上……\x01",
"気づけば一番の古株ですよ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"いやはや、時代の流れというのは\x01",
"本当に早いものですね。\x02",
)
)
CloseMessageWindow()
label("loc_3E42")
TalkEnd(0xFE)
Return()
# Function_14_3052 end
def Function_15_3E46(): pass
label("Function_15_3E46")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_END)), "loc_3EEE")
ChrTalk(
0xFE,
(
"ようやく、避難者の方々を一通り\x01",
"お見送りすることができました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"こんな状況ですが……\x01",
"皆さんから、お礼の言葉をいただけて\x01",
"本当に嬉しかったです。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_3EEE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_END)), "loc_3F95")
ChrTalk(
0xFE,
(
"状況がどうあれ……\x01",
"ホテルがここまで忙しくなるのは\x01",
"ずいぶん久しぶりです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"ホテル・ミレニアムの一員として、\x01",
"全力でサービスに努めさせて頂きます。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_3F95")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 1)), scpexpr(EXPR_END)), "loc_40A3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_403F")
ChrTalk(
0xFE,
(
"私にはうまく事態を\x01",
"飲み込めないのですが……\x01",
"今は複雑な気持ちで一杯です。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"確かに私たちは投票によって、\x01",
"独立に賛成したわけですけど……\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 3)
Jump("loc_409E")
label("loc_403F")
ChrTalk(
0xFE,
(
"……手を止めると、何だか\x01",
"色々考えちゃってダメですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"さてと……仕事に励みませんと。\x02",
)
CloseMessageWindow()
label("loc_409E")
Jump("loc_486F")
label("loc_40A3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_END)), "loc_4184")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x8F, 0x0, 0x2)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x8F, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x8F, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x198, 4)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x199, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_40D5")
Call(0, 42)
Return()
label("loc_40D5")
ChrTalk(
0xFE,
(
"魔獣の咆哮に銃撃の音、\x01",
"それに警官隊の方々の怒号に悲鳴……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"……襲撃の日のことを思い出すと、\x01",
"今でも震えが止まりません……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"どうして、この街でこんなことが……\x02",
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_4184")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_END)), "loc_41ED")
ChrTalk(
0xFE,
(
"マインツの事件……\x01",
"本当にとんでもない話ですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"一刻も早く解決して欲しいです……\x02",
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_41ED")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_END)), "loc_42A5")
ChrTalk(
0xFE,
(
"昨日の列車事故では\x01",
"多くの怪我人が出たそうですが、\x01",
"幸い死者は出なかったそうですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"でも中にはかなり\x01",
"重傷の方もいたとか……\x01",
"とにかく、早く良くなって欲しいです。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_42A5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 5)), scpexpr(EXPR_END)), "loc_4306")
ChrTalk(
0xFE,
(
"列車の事故だなんて……\x01",
"本当に恐ろしいですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"……乗客の方々が心配です。\x02",
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_4306")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 0)), scpexpr(EXPR_END)), "loc_4414")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_438C")
ChrTalk(
0xFE,
"さてと、今日も頑張ってお仕事です。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"あっと、お客様への笑顔も\x01",
"忘れないようにしませんと。(ニコリ)\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 3)
Jump("loc_440F")
label("loc_438C")
ChrTalk(
0xFE,
(
"誰かのために汗を掻くのって\x01",
"本当に気持ち良いですよね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"何ていうか、\x01",
"自分って必要とされてるんだな、\x01",
"って実感できるんです。\x02",
)
)
CloseMessageWindow()
label("loc_440F")
Jump("loc_486F")
label("loc_4414")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x160, 0)), scpexpr(EXPR_END)), "loc_44B8")
ChrTalk(
0xFE,
(
"アルカンシェルの\x01",
"リニューアル公演の日が\x01",
"いよいよ近づいて来ましたね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"流石にチケットは\x01",
"取れませんでしたけど、\x01",
"どんな舞台になるか楽しみです。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_44B8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_END)), "loc_4604")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_4580")
ChrTalk(
0xFE,
(
"お客様、タイムズ百貨店の\x01",
"屋上へはもう行かれましたか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"何でもオルキスタワーを観る\x01",
"絶景スポットだそうですよ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"もしまだでしたら、\x01",
"行ってみてはいかがでしょうか?\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 3)
Jump("loc_45FF")
label("loc_4580")
ChrTalk(
0xFE,
(
"タイムズ百貨店の屋上は\x01",
"オルキスタワーを観る\x01",
"絶景スポットだそうですよ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"お客様もぜひ、\x01",
"行ってみてはいかがでしょうか?\x02",
)
)
CloseMessageWindow()
label("loc_45FF")
Jump("loc_486F")
label("loc_4604")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 4)), scpexpr(EXPR_END)), "loc_46BE")
ChrTalk(
0xFE,
(
"VIPの方々は除幕式の後、\x01",
"それぞれ色々な場所を\x01",
"ご訪問されるご予定だそうですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"どこかで拝見できたりすると\x01",
"嬉しいんですけど……\x01",
"ガードが固いから難しいですよね。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_46BE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 0)), scpexpr(EXPR_END)), "loc_474C")
ChrTalk(
0xFE,
(
"最近アーロンさんに叱られることが\x01",
"少なくなって来たんです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"私、成長してるんでしょうか?\x01",
"ふふ、だとしたら嬉しいですね。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_474C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_47F8")
ChrTalk(
0xFE,
(
"雨の日はどうしても\x01",
"泥汚れが付いてしまうので、\x01",
"カーペット掃除が大変なんです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"でも綺麗になって行く所が\x01",
"目に見えるのって、\x01",
"けっこう快感なんですよね。\x02",
)
)
CloseMessageWindow()
Jump("loc_486F")
label("loc_47F8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_486F")
ChrTalk(
0xFE,
(
"おはようございます。\x01",
"ご宿泊のお客様でしょうか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"外出の際は、お部屋の鍵を\x01",
"忘れずにお掛けくださいね。\x02",
)
)
CloseMessageWindow()
label("loc_486F")
TalkEnd(0xFE)
Return()
# Function_15_3E46 end
def Function_16_4873(): pass
label("Function_16_4873")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_48FC")
ChrTalk(
0xFE,
"ふむ、今日は何をして過ごそうか。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"支配人の言う通り、ホテルの\x01",
"サービスを味わい尽くすというのも\x01",
"案外いいかもな。\x02",
)
)
CloseMessageWindow()
Jump("loc_4981")
label("loc_48FC")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_4981")
ChrTalk(
0xFE,
(
"このホテル、部屋はもちろん\x01",
"サービスも一流だな。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"ふむ、今後もクロスベル旅行の際は\x01",
"ぜひここを利用するようにしよう。\x02",
)
)
CloseMessageWindow()
label("loc_4981")
TalkEnd(0xFE)
Return()
# Function_16_4873 end
def Function_17_4985(): pass
label("Function_17_4985")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_4A00")
ChrTalk(
0xFE,
(
"ふふ、確かにホテルで\x01",
"過ごすのも悪くなさそうね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"私としては、カジノに\x01",
"入り浸りたいところだけど。\x02",
)
)
CloseMessageWindow()
Jump("loc_4A90")
label("loc_4A00")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_4A90")
ChrTalk(
0xFE,
(
"ふふ、今回は導力鉄道で\x01",
"来たのだけど、移動の疲れが\x01",
"すっかり取れたわよ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"少し値段は張るけど、\x01",
"値段以上の価値があるのは確かね。\x02",
)
)
CloseMessageWindow()
label("loc_4A90")
TalkEnd(0xFE)
Return()
# Function_17_4985 end
def Function_18_4A94(): pass
label("Function_18_4A94")
TalkBegin(0xFE)
ChrTalk(
0xFE,
(
"僕と妹は家に帰ってる途中、\x01",
"突然モヤに巻き込まれて\x01",
"このホテルに避難してきたんだ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"……ホント、\x01",
"命からがらって気分だったよ。\x02",
)
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_18_4A94 end
def Function_19_4B20(): pass
label("Function_19_4B20")
TalkBegin(0xFE)
ChrTalk(
0xFE,
(
"お兄ちゃん、モヤが出た時\x01",
"私のことをおんぶして\x01",
"走り回ってくれたんだよ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"えへへ、かっこよかったな噴\x02",
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_19_4B20 end
def Function_20_4B93(): pass
label("Function_20_4B93")
TalkBegin(0xFE)
ChrTalk(
0xFE,
"今頃みんな心配してるだろうな……\x02",
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_20_4B93 end
def Function_21_4BC0(): pass
label("Function_21_4BC0")
TalkBegin(0xFE)
ChrTalk(
0xFE,
"早くお家に帰りたいわ……\x02",
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_21_4BC0 end
def Function_22_4BE5(): pass
label("Function_22_4BE5")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_4CCA")
ChrTalk(
0xFE,
(
"街を守っていた結界は\x01",
"一体どこへ行ったんだ……!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"それに、あの気味の悪い\x01",
"化物は一体なんなんだ……!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"大統領演説の日に故郷#4Rく に#に\x01",
"帰り損ねただけでこの仕打ち……\x01",
"もういい加減にしてくれたまえ。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 5)
Jump("loc_4D2E")
label("loc_4CCA")
ChrTalk(
0xFE,
(
"大統領演説の日に故郷#4Rく に#に\x01",
"帰り損ねただけでこの仕打ち……\x01",
"もういい加減にしてくれたまえ。\x02",
)
)
CloseMessageWindow()
label("loc_4D2E")
TalkEnd(0xFE)
Return()
# Function_22_4BE5 end
def Function_23_4D32(): pass
label("Function_23_4D32")
TalkBegin(0xFE)
ChrTalk(
0xFE,
(
"こんな状況になると\x01",
"分かっていれば、すぐに\x01",
"家に帰っていたんだがなぁ……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"まあただ、こんないい部屋に\x01",
"無償で通してくれたことは\x01",
"すごくラッキーだったけどね。\x02",
)
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_23_4D32 end
def Function_24_4DDB(): pass
label("Function_24_4DDB")
TalkBegin(0xFE)
ChrTalk(
0xFE,
(
"ホテルに避難できたのは\x01",
"ほんと不幸中の幸いね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"でもこの状況……\x01",
"一体いつまで続くのかしら?\x02",
)
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_24_4DDB end
def Function_25_4E46(): pass
label("Function_25_4E46")
TalkBegin(0xFE)
ChrTalk(
0xFE,
"えへへ、このお部屋おっきーね♪\x02",
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_25_4E46 end
def Function_26_4E71(): pass
label("Function_26_4E71")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x2)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x82, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x174, 5)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_4EA5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x174, 6)), scpexpr(EXPR_END)), "loc_4EA2")
Call(0, 29)
Jump("loc_4EA5")
label("loc_4EA2")
Call(0, 28)
label("loc_4EA5")
Return()
# Function_26_4E71 end
def Function_27_4EA6(): pass
label("Function_27_4EA6")
TalkBegin(0xFF)
Sound(807, 0, 100, 0)
SetChrName("")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"扉には鍵がかかっている。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
TalkEnd(0xFF)
Return()
# Function_27_4EA6 end
def Function_28_4ED8(): pass
label("Function_28_4ED8")
EventBegin(0x0)
FadeToDark(500, 0, -1)
OP_0D()
SetChrFlags(0xA, 0x80)
EndChrThread(0xA, 0x0)
LoadChrToIndex("chr/ch32300.itc", 0x1E)
ClearChrFlags(0x17, 0x80)
ClearChrBattleFlags(0x17, 0x8000)
SetChrChipByIndex(0x17, 0x1E)
SetChrSubChip(0x17, 0x0)
SetChrPos(0x17, 68000, 0, 12400, 315)
OP_68(68140, 1500, 9270, 0)
MoveCamera(312, 19, 0, 0)
OP_6E(400, 0)
SetCameraDistance(21270, 0)
SetChrPos(0x101, 67400, 0, 9530, 0)
SetChrPos(0x102, 68780, 0, 9180, 0)
SetChrPos(0x103, 66670, 0, 8520, 0)
SetChrPos(0x104, 68370, 0, 8240, 0)
SetChrPos(0x109, 67410, 0, 7270, 0)
SetChrPos(0x105, 69430, 0, 7040, 0)
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
FadeToBright(1000, 0)
OP_0D()
ChrTalk(
0x17,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"──ええ、それではまた明日。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"今後ともよろしくお願いします。\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x104, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x109, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x105, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
SetChrPos(0x17, 68000, 0, 13400, 315)
OP_A7(0x17, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
ClearMapObjFlags(0x1, 0x10)
Sound(103, 0, 100, 0)
OP_71(0x1, 0x0, 0x10, 0x0, 0x0)
OP_79(0x1)
def lambda_50CD():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x17, 2, lambda_50CD)
def lambda_50DE():
OP_97(0xFE, 0x0, 0x0, 0xFFFFF79A, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x17, 1, lambda_50DE)
WaitChrThread(0x17, 1)
OP_71(0x1, 0x10, 0x0, 0x0, 0x0)
OP_79(0x1)
Sound(104, 0, 100, 0)
OP_63(0x17, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
ChrTalk(
0x17,
"おや……あんたたちは。\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00000Fえっと、すみません。\x02\x03",
"アルモリカ村の\x01",
"デリックさんですよね?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"ああ、その通りだが……\x01",
"俺に何か用なのか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00000F申し遅れました……\x01",
"警察の特務支援課の者です。\x02\x03",
"少し話をお聞かせ願えませんか?\x02",
)
)
CloseMessageWindow()
OP_63(0x17, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x17)
ChrTalk(
0x17,
"……なるほどな。\x02",
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"あんたたちは村長……\x01",
"親父の差し金だな?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"警察まで呼ぶなんて……\x01",
"フン、ご苦労なことだ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#00105Fえ、えっとあの……\x02",
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"……大体見当はついてる。\x01",
"俺の最近の行動を\x01",
"洗おうって言うんだろう。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"別に後ろ暗いことを\x01",
"しているわけじゃないんだ、\x01",
"なんでも聞いてみろ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
"#10303F(ふむ……意外な反応だね。)\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003F……では、単刀直入に聞きます。\x02\x03",
"#00001Fここ数日、あなたは\x01",
"ミンネスさんという方と\x01",
"付き合いがあるそうですが……\x02\x03",
"一体、どういう目的が?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"……まあ、いいだろう。\x01",
"いまさら知ったところで\x01",
"親父にはどうにもできまい。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"少し前から、ミンネスさんには\x01",
"あることについて世話になっている。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
"主に、村の改革についてな。\x02",
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x104, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x109, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x105, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
ChrTalk(
0x109,
"#10105Fむ、村の改革ですか……?\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00005Fそ、そんな大事なことを\x01",
"村長さんに黙って\x01",
"進めているんですか?\x02\x03",
"#00006Fいくらなんでも、\x01",
"それはよくないような……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"村長……親父には\x01",
"今まで何度も話したさ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"だが、返す言葉は決まって\x01",
"『あるべき姿を見失うな』だの\x01",
"『急激な変化はよくない』だの……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"だが、現状を維持しても\x01",
"あんな田舎の村に\x01",
"未来があるとは思えない。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"村を存続させるには、\x01",
"改革が絶対に必要なんだ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"親父はそこのところを、\x01",
"分かってないんだ……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
(
"#00203Fなるほど……\x01",
"そんな中、そのミンネスという\x01",
"人物に出会ったわけですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"……彼は、親父と違って\x01",
"俺の相談に乗ってくれた。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"そして、アルモリカ村の\x01",
"養蜂業に大きな可能性を\x01",
"見出してくれたらしくてな。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"近々、彼と協力して\x01",
"大きな事業を立ち上げる\x01",
"計画もあるんだ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00306Fな、なんつーか\x01",
"途方もねえ話だなあ……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"フン……\x01",
"俺が話せるのはこの位だ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x17,
(
"もういいだろう?\x01",
"そろそろ村に帰らせてもらうぞ。\x02",
)
)
CloseMessageWindow()
def lambda_58B1():
OP_95(0xFE, 74620, 0, 5690, 2000, 0x0)
ExitThread()
QueueWorkItem(0x17, 1, lambda_58B1)
Sleep(2000)
def lambda_58CE():
OP_93(0xFE, 0x87, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 1, lambda_58CE)
Sleep(50)
def lambda_58DE():
OP_93(0xFE, 0x87, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 1, lambda_58DE)
Sleep(50)
def lambda_58EE():
OP_93(0xFE, 0x87, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 1, lambda_58EE)
Sleep(50)
def lambda_58FE():
OP_93(0xFE, 0x87, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 1, lambda_58FE)
Sleep(50)
def lambda_590E():
OP_93(0xFE, 0x87, 0x1F4)
ExitThread()
QueueWorkItem(0x109, 1, lambda_590E)
Sleep(50)
def lambda_591E():
OP_93(0xFE, 0x87, 0x1F4)
ExitThread()
QueueWorkItem(0x105, 1, lambda_591E)
WaitChrThread(0x17, 1)
SetChrFlags(0x17, 0x80)
OP_0D()
ChrTalk(
0x102,
"#00105Fあっ……\x02",
)
CloseMessageWindow()
ChrTalk(
0x109,
"#10106F行ってしまいましたね……\x02",
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x101)
OP_68(68210, 1500, 8580, 2000)
OP_6F(0x1)
ChrTalk(
0x101,
(
"#00003Fとにかく……\x01",
"折角ここまできたんだ。\x02\x03",
"#00000Fここは一つ、\x01",
"ミンネスという男に、\x01",
"直接会ってみよう。\x02",
)
)
CloseMessageWindow()
def lambda_5A07():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x102, 1, lambda_5A07)
Sleep(50)
def lambda_5A17():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x103, 1, lambda_5A17)
Sleep(50)
def lambda_5A27():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x104, 1, lambda_5A27)
Sleep(50)
def lambda_5A37():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x109, 1, lambda_5A37)
Sleep(50)
def lambda_5A47():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x105, 1, lambda_5A47)
ChrTalk(
0x104,
(
"#00303Fなるほど……\x01",
"色々と分かるかも知れねえな。\x02\x03",
"#00300Fよっしゃ、そんじゃ\x01",
"早速突入してみるとするか。\x02",
)
)
CloseMessageWindow()
FadeToDark(500, 0, -1)
OP_0D()
SetScenarioFlags(0x174, 6)
OP_29(0x82, 0x1, 0x6)
OP_D7(0x1E)
SetChrFlags(0x4, 0x80)
SetChrBattleFlags(0x4, 0x8000)
SetChrFlags(0x5, 0x80)
SetChrBattleFlags(0x5, 0x8000)
ClearChrFlags(0xA, 0x80)
SetChrPos(0xA, 50740, 0, 9750, 90)
BeginChrThread(0xA, 0, 0, 1)
SetChrPos(0x0, 68510, 0, 9710, 0)
OP_69(0xFF, 0x0)
EventEnd(0x5)
Return()
# Function_28_4ED8 end
def Function_29_5B19(): pass
label("Function_29_5B19")
EventBegin(0x0)
FadeToDark(500, 0, -1)
OP_0D()
SetChrFlags(0xA, 0x80)
EndChrThread(0xA, 0x0)
OP_4B(0xC, 0xFF)
OP_68(68560, 1500, 10330, 0)
MoveCamera(315, 26, 0, 0)
OP_6E(400, 0)
SetCameraDistance(20260, 0)
SetChrPos(0x101, 67900, 0, 11200, 0)
SetChrPos(0x102, 69690, 0, 9980, 315)
SetChrPos(0x103, 66720, 0, 10430, 45)
SetChrPos(0x104, 68370, 0, 9740, 0)
SetChrPos(0x109, 67410, 0, 8770, 0)
SetChrPos(0x105, 69430, 0, 8540, 0)
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
FadeToBright(1000, 0)
OP_0D()
ChrTalk(
0x101,
(
"#00001Fそれじゃあ……\x01",
"早速入ってみるぞ。\x02",
)
)
CloseMessageWindow()
Sleep(600)
Sound(808, 0, 100, 0)
Sleep(1000)
SetMessageWindowPos(330, 20, -1, -1)
SetChrName("中年の声")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"おや……\x01",
"どちらさまですかな?\x02\x03",
"ルームサービスを\x01",
"頼んだ覚えはありませぬが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00000Fミンネスさん……ですね?\x02\x03",
"#00004F突然すみません、\x01",
"クロスベル警察・\x01",
"特務支援課の者です。\x02\x03",
"#00000F2、3、お聞きしたいことが\x01",
"あるのですが……\x02",
)
)
CloseMessageWindow()
SetChrName("中年の声")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"おやおや、\x01",
"警察の方がわざわざ……\x02\x03",
"そういうことなら\x01",
"どうぞ、お入りください。\x01",
"鍵は開いておりますゆえ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00305F(な、なんだかえらくあっさり\x01",
" 入れてくれるんだな。)\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003F(俺たちが考えている以上の\x01",
" やり手なのかもしれないな……)\x02\x03",
"#00005Fえっと……\x01",
"それでは、失礼します。\x02",
)
)
CloseMessageWindow()
ClearMapObjFlags(0x1, 0x10)
Sound(103, 0, 100, 0)
OP_71(0x1, 0x0, 0x10, 0x0, 0x0)
OP_79(0x1)
FadeToDark(500, 0, -1)
OP_0D()
OP_71(0x1, 0x10, 0x0, 0x0, 0x0)
OP_79(0x1)
OP_68(169250, 1500, 2800, 0)
MoveCamera(311, 16, 0, 0)
OP_6E(400, 0)
SetCameraDistance(19200, 0)
SetChrPos(0xC, 168410, 0, 5520, 180)
SetChrPos(0x101, 168960, 0, -2080, 0)
SetChrPos(0x102, 168960, 0, -2080, 0)
SetChrPos(0x103, 168960, 0, -2080, 0)
SetChrPos(0x104, 168960, 0, -2080, 0)
SetChrPos(0x109, 168960, 0, -2080, 0)
SetChrPos(0x105, 168960, 0, -2080, 0)
OP_A7(0x101, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x102, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x103, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x104, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x109, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x105, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
FadeToBright(500, 0)
OP_0D()
OP_68(169610, 1500, 3430, 3000)
BeginChrThread(0x101, 3, 0, 30)
Sleep(500)
BeginChrThread(0x102, 3, 0, 31)
Sleep(500)
BeginChrThread(0x103, 3, 0, 32)
Sleep(500)
BeginChrThread(0x104, 3, 0, 33)
Sleep(500)
BeginChrThread(0x109, 3, 0, 34)
Sleep(500)
BeginChrThread(0x105, 3, 0, 35)
WaitChrThread(0x105, 3)
OP_6F(0x1)
ChrTalk(
0xC,
(
"#11Pお初にお目にかかります。\x01",
"私がミンネスにございますが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P本日はどういった\x01",
"ご用件でしょう?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003F先ほども言った通り……\x01",
"いくつか質問をさせて\x01",
"いただこうと思います。\x02\x03",
"#00001Fご協力いただけますか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pもちろんですとも。\x01",
"私に協力できることなら何なりと……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pなにか、この辺りで\x01",
"事件でも起こりましたかな?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003Fいえ……\x01",
"聞きたいことというのは\x01",
"あなたについてです。\x02\x03",
"あなたがどういった人物なのか、\x01",
"アルモリカ村でなにをしようと\x01",
"しているのか……\x02\x03",
"#00001F一通り、お聞かせ願いたいのですが。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"#11Pほう……?\x02",
)
CloseMessageWindow()
StopBGM(0xBB8)
ChrTalk(
0xC,
(
"#11Pまあいいでしょう。\x01",
"それくらいは詮無きことです。\x02",
)
)
CloseMessageWindow()
WaitBGM()
Sleep(10)
PlayBGM("ed7111", 0)
ChrTalk(
0xC,
(
"#11Pコホン……私はある会社で役員を\x01",
"させてもらっている者でしてね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P仕事内容は、商品開発から\x01",
"営業まで幅広くしております。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pアルモリカ村へは、わが社……\x01",
"『クインシー社』の重要な取引きのため\x01",
"訪問させていただいた次第です。\x02",
)
)
CloseMessageWindow()
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
ChrTalk(
0x102,
(
"#00105Fえ……ええっ!\x01",
"あのクインシー社ですか?\x02",
)
)
CloseMessageWindow()
def lambda_6361():
TurnDirection(0xFE, 0x102, 500)
ExitThread()
QueueWorkItem(0x101, 1, lambda_6361)
Sleep(50)
def lambda_6371():
TurnDirection(0xFE, 0x102, 500)
ExitThread()
QueueWorkItem(0x103, 1, lambda_6371)
Sleep(50)
def lambda_6381():
TurnDirection(0xFE, 0x102, 500)
ExitThread()
QueueWorkItem(0x104, 1, lambda_6381)
Sleep(50)
def lambda_6391():
TurnDirection(0xFE, 0x102, 500)
ExitThread()
QueueWorkItem(0x109, 1, lambda_6391)
Sleep(300)
ChrTalk(
0x104,
(
"#00305F初めて聞く名前だが……\x01",
"お嬢は知ってるのかよ?\x02",
)
)
CloseMessageWindow()
TurnDirection(0x102, 0x104, 500)
Sleep(300)
ChrTalk(
0x102,
(
"#00105Fえっと……クインシー社というのは、\x01",
"外国の有名なお菓子メーカーなの。\x02\x03",
"#00104F製菓業界でもかなりの大企業で、\x01",
"確か、クロスベルにも\x01",
"商品が輸入されてたと思うわ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00005Fああ、そういえば子供の頃、\x01",
"そんなメーカーのチョコレートを\x01",
"よく買って食べてたような……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00303Fうーん、メーカーなんぞ\x01",
"あまり意識して見ないからなあ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pふふ、それもまた\x01",
"仕方のないことでありましょう。\x02",
)
)
CloseMessageWindow()
def lambda_656C():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 1, lambda_656C)
Sleep(50)
def lambda_657C():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 1, lambda_657C)
Sleep(50)
def lambda_658C():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 1, lambda_658C)
Sleep(50)
def lambda_659C():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 1, lambda_659C)
Sleep(50)
def lambda_65AC():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x105, 1, lambda_65AC)
Sleep(300)
ChrTalk(
0xC,
(
"#11P私自身、この立場にはいますが\x01",
"甘い物は苦手でしてねぇ。\x01",
"昔は本当に疎いものでした。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P長年営業方面で活躍したおかげで\x01",
"力を認められ、今の地位に\x01",
"つかせてもらったわけですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P……おっと、\x01",
"話が逸れてしまいましたかな?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00005Fあ……い、いえ。\x01",
"こちらこそ失礼しました。\x02\x03",
"#00003F……コホン。\x01",
"先ほど、アルモリカ村で\x01",
"『取引き』と仰いましたね。\x02\x03",
"#00001Fその『取引き』とは……\x01",
"村長の息子、デリックさんに\x01",
"関係のあることなんですね?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#00100F何でも、村の発展に\x01",
"関係のあることのようですが……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pおや……\x01",
"そこまで知っておいででしたか。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pふむ、デリックさん自ら\x01",
"情報を解禁したというのなら、\x01",
"隠す意味はありませんな。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pふふ、彼とは友好的な関係を\x01",
"築かせていただいております。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
"#00203Fやはり……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
"#00001F詳しく聞かせていただけますか?\x02",
)
CloseMessageWindow()
ChrTalk(
0xC,
"#11Pふふ、いいでしょう。\x02",
)
CloseMessageWindow()
OP_68(167980, 1500, 3640, 3000)
def lambda_68F2():
OP_95(0xFE, 164960, 0, 5520, 2000, 0x0)
ExitThread()
QueueWorkItem(0xC, 1, lambda_68F2)
Sleep(500)
def lambda_690F():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 1, lambda_690F)
Sleep(50)
def lambda_691F():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 1, lambda_691F)
Sleep(50)
def lambda_692F():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 1, lambda_692F)
Sleep(50)
def lambda_693F():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 1, lambda_693F)
Sleep(50)
def lambda_694F():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x109, 1, lambda_694F)
Sleep(50)
def lambda_695F():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x105, 1, lambda_695F)
WaitChrThread(0xC, 1)
OP_6F(0x1)
ChrTalk(
0xC,
(
"#5P我がクインシー社は、\x01",
"製菓業界の未来の為、\x01",
"日々、研鑽を重ねています。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#5Pそんな中、私は本社より\x01",
"ある使命を賜って参りました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#5Pそれは、このクロスベルへの\x01",
"クインシー社の進出、\x01",
"その足がかりを模索することです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#00105Fつまり……\x01",
"クインシー社の子会社を\x01",
"クロスベルに?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"#5Pふふ、その通りです。\x02",
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#5Pそして、手始めに市内の百貨店に\x01",
"ヒントを探しに行った所で……\x01",
"私は出会ったのです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#5Pかのアルモリカ村で作られるという、\x01",
"大変質のよい『蜂蜜』をね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x109,
(
"#10100F蜂蜜……アルモリカの\x01",
"レンゲ畑で作られるアレですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003Fハロルドさんもその質は\x01",
"保証していたっけ……\x02",
)
)
CloseMessageWindow()
OP_68(169610, 1500, 3430, 3000)
def lambda_6BD6():
OP_95(0xFE, 168410, 0, 5520, 2000, 0x0)
ExitThread()
QueueWorkItem(0xC, 1, lambda_6BD6)
Sleep(500)
def lambda_6BF3():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 1, lambda_6BF3)
Sleep(50)
def lambda_6C03():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 1, lambda_6C03)
Sleep(50)
def lambda_6C13():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 1, lambda_6C13)
Sleep(50)
def lambda_6C23():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 1, lambda_6C23)
Sleep(50)
def lambda_6C33():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x109, 1, lambda_6C33)
Sleep(50)
def lambda_6C43():
OP_93(0xFE, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x105, 1, lambda_6C43)
WaitChrThread(0xC, 1)
def lambda_6C54():
OP_93(0xFE, 0xB4, 0x1F4)
ExitThread()
QueueWorkItem(0xC, 1, lambda_6C54)
OP_6F(0x1)
Sleep(300)
ChrTalk(
0xC,
(
"#11P豊かな自然のもと\x01",
"代々受け継がれてきた\x01",
"レンゲ畑によって生まれる蜂蜜。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pそれを見たとき、天啓の如く\x01",
"新たな製菓ブランドを立ち上げる\x01",
"一つの計画が生まれたのです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pその計画名こそ……\x01",
"『アルモリカ・ハニーカンパニー』。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x109,
"#10105Fアルモリカ・ハニーカンパニー……\x02",
)
CloseMessageWindow()
ChrTalk(
0x104,
"#00306Fな、なにやら凄そうな響きだな。\x02",
)
CloseMessageWindow()
TurnDirection(0xC, 0x101, 500)
ChrTalk(
0xC,
(
"#11Pつまりは、アルモリカ村の蜂蜜を\x01",
"ふんだんに使用したお菓子を\x01",
"提供していくわけです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pしかし、そのためには現地の、\x01",
"アルモリカ村の方々の協力が\x01",
"必要不可欠でした。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pそこで私は、アルモリカ村の\x01",
"次期村長であるデリックさんに、\x01",
"この話を持ちかけたのでございます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P製菓工場の建造、そして\x01",
"この新会社の経営をしてみないか、とね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00005Fデリックさんに\x01",
"クインシー社の子会社を……!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P無論、そのノウハウや販売ラインは\x01",
"我が社で用意し、以降、レンゲ畑は\x01",
"こちらのスタッフで管理する……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P一切のお手を煩わせない、\x01",
"そして村人たちの苦労を減らすという\x01",
"条件を提示させていただきました。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#00105Fでも、工場なんて……\x01",
"いったいどこに建設する\x01",
"おつもりなのですか?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pそれに関しては今までの取引きで、\x01",
"村の私有地を貸していただけることに\x01",
"相成りましてね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pもともと物置程度にしか\x01",
"使っておられなかったそうなので、\x01",
"快諾していただきました次第です。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
(
"#10304F確かにその条件なら、\x01",
"話に乗ってくれる可能性は\x01",
"かなり高いだろうね。\x02\x03",
"村の改革を願うデリック君ならば\x01",
"なおさら……\x02\x03",
"#10302Fまさにあなたにとっても、\x01",
"デリック君にとっても\x01",
"悪い話じゃなかったわけだ。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"#11Pフフ、その通り。\x02",
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P実際、彼の才能と強い責任感は\x01",
"それに値するものと感じましたから。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11P……ふふ、私の話はこんなところです。\x01",
"ご理解いただけましたかな?\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x102, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x103, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x104, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x109, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x105, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x101)
OP_64(0x102)
OP_64(0x103)
OP_64(0x104)
OP_64(0x109)
OP_64(0x105)
ChrTalk(
0x101,
(
"#00003F……お話を聞かせていただき\x01",
"ありがとうございます。\x02\x03",
"#00000Fおかげさまで色々と\x01",
"分からなかった部分に\x01",
"答えが見出せそうです。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
"#11Pおや、もう話はいいのですかな?\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00000Fええ、お時間をとらせて\x01",
"申し訳ありませんでした。\x02\x03",
"自分たちはこれで\x01",
"失礼させていただきます。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pいえいえ、何のこれしき。\x01",
"またいつでも\x01",
"いらっしゃってください。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xC,
(
"#11Pどうかお気をつけて\x01",
"帰られますよう。\x02",
)
)
CloseMessageWindow()
FadeToDark(1000, 0, -1)
OP_0D()
StopBGM(0x7D0)
OP_68(68880, 1500, 9870, 0)
MoveCamera(315, 26, 0, 0)
OP_6E(400, 0)
SetCameraDistance(21000, 0)
SetChrPos(0x101, 68000, 0, 13400, 180)
SetChrPos(0x102, 68000, 0, 13400, 180)
SetChrPos(0x103, 68000, 0, 13400, 180)
SetChrPos(0x104, 68000, 0, 13400, 180)
SetChrPos(0x109, 68000, 0, 13400, 180)
SetChrPos(0x105, 68000, 0, 13400, 180)
OP_A7(0x101, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
OP_A7(0x102, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
OP_A7(0x103, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
OP_A7(0x104, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
OP_A7(0x109, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
OP_A7(0x105, 0xFF, 0xFF, 0xFF, 0xFF, 0x0)
WaitBGM()
Sleep(10)
PlayBGM("ed7113", 0)
FadeToBright(1000, 0)
OP_0D()
ClearMapObjFlags(0x1, 0x10)
Sound(103, 0, 100, 0)
OP_71(0x1, 0x0, 0x10, 0x0, 0x0)
OP_79(0x1)
BeginChrThread(0x105, 3, 0, 41)
Sleep(500)
BeginChrThread(0x109, 3, 0, 40)
Sleep(500)
OP_68(69520, 1500, 7610, 3000)
BeginChrThread(0x104, 3, 0, 39)
Sleep(500)
BeginChrThread(0x103, 3, 0, 38)
Sleep(500)
BeginChrThread(0x102, 3, 0, 37)
Sleep(500)
BeginChrThread(0x101, 3, 0, 36)
WaitChrThread(0x101, 3)
OP_71(0x1, 0x10, 0x0, 0x0, 0x0)
OP_79(0x1)
Sound(104, 0, 100, 0)
OP_6F(0x1)
ChrTalk(
0x102,
(
"#00106Fふう……\x01",
"なんていうか……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
(
"#10300Fフフ、なんだか凄い話を\x01",
"聞かされてしまったね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x103,
(
"#00203Fあのミンネスという男……\x01",
"予想以上の凄腕だったようですね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x104,
(
"#00306F話は小難しかったが、\x01",
"確かに儲かりそうな話だったし……\x02\x03",
"#00301Fしかし、ありゃあ……\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x102, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x103, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x104, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x109, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x105, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x101)
OP_64(0x102)
OP_64(0x103)
OP_64(0x104)
OP_64(0x109)
OP_64(0x105)
ChrTalk(
0x109,
(
"#10101F……でも、これで一通りの情報は\x01",
"手に入れられましたね。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003Fああ……\x01",
"一旦アルモリカ村に戻ろう。\x02\x03",
"#00001Fトルタ村長に報告しなきゃな。\x02",
)
)
CloseMessageWindow()
FadeToDark(500, 0, -1)
OP_0D()
StopBGM(0xBB8)
WaitBGM()
SetChrFlags(0x4, 0x80)
SetChrBattleFlags(0x4, 0x8000)
SetChrFlags(0x5, 0x80)
SetChrBattleFlags(0x5, 0x8000)
SetScenarioFlags(0x22, 1)
NewScene("t0010", 0, 0, 0)
IdleLoop()
Return()
# Function_29_5B19 end
def Function_30_7861(): pass
label("Function_30_7861")
def lambda_7866():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 2, lambda_7866)
def lambda_7877():
OP_98(0xFE, 0x0, 0x0, 0x9C4, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_7877)
WaitChrThread(0x101, 1)
OP_95(0x101, 167730, 0, 2860, 2000, 0x0)
OP_93(0x101, 0x0, 0x1F4)
Return()
# Function_30_7861 end
def Function_31_78AC(): pass
label("Function_31_78AC")
def lambda_78B1():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 2, lambda_78B1)
def lambda_78C2():
OP_98(0xFE, 0x0, 0x0, 0x9C4, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_78C2)
WaitChrThread(0x102, 1)
OP_95(0x102, 169150, 0, 2870, 2000, 0x0)
Return()
# Function_31_78AC end
def Function_32_78F0(): pass
label("Function_32_78F0")
def lambda_78F5():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 2, lambda_78F5)
def lambda_7906():
OP_98(0xFE, 0x0, 0x0, 0x9C4, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x103, 1, lambda_7906)
WaitChrThread(0x103, 1)
OP_95(0x103, 170230, 0, 1900, 2000, 0x0)
OP_93(0x103, 0x0, 0x1F4)
Return()
# Function_32_78F0 end
def Function_33_793B(): pass
label("Function_33_793B")
def lambda_7940():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 2, lambda_7940)
def lambda_7951():
OP_98(0xFE, 0x0, 0x0, 0x9C4, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x104, 1, lambda_7951)
WaitChrThread(0x104, 1)
OP_95(0x104, 167400, 0, 1860, 2000, 0x0)
OP_93(0x104, 0x0, 0x1F4)
Return()
# Function_33_793B end
def Function_34_7986(): pass
label("Function_34_7986")
def lambda_798B():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x109, 2, lambda_798B)
def lambda_799C():
OP_98(0xFE, 0x0, 0x0, 0x9C4, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x109, 1, lambda_799C)
WaitChrThread(0x109, 1)
OP_95(0x109, 168250, 0, 1200, 2000, 0x0)
OP_93(0x109, 0x0, 0x1F4)
Return()
# Function_34_7986 end
def Function_35_79D1(): pass
label("Function_35_79D1")
def lambda_79D6():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x105, 2, lambda_79D6)
def lambda_79E7():
OP_98(0xFE, 0x0, 0x0, 0x9C4, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x105, 1, lambda_79E7)
WaitChrThread(0x105, 1)
OP_95(0x105, 169670, 0, 1220, 2000, 0x0)
OP_93(0x105, 0x0, 0x1F4)
Return()
# Function_35_79D1 end
def Function_36_7A1C(): pass
label("Function_36_7A1C")
def lambda_7A21():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 2, lambda_7A21)
def lambda_7A32():
OP_98(0xFE, 0x0, 0x0, 0xFFFFF63C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_7A32)
WaitChrThread(0x101, 1)
OP_95(0x101, 68440, 0, 10210, 2000, 0x0)
OP_93(0x101, 0xB4, 0x1F4)
Return()
# Function_36_7A1C end
def Function_37_7A67(): pass
label("Function_37_7A67")
def lambda_7A6C():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 2, lambda_7A6C)
def lambda_7A7D():
OP_98(0xFE, 0x0, 0x0, 0xFFFFF63C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_7A7D)
WaitChrThread(0x102, 1)
OP_95(0x102, 67120, 0, 8910, 2000, 0x0)
OP_93(0x102, 0x5A, 0x1F4)
Return()
# Function_37_7A67 end
def Function_38_7AB2(): pass
label("Function_38_7AB2")
def lambda_7AB7():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 2, lambda_7AB7)
def lambda_7AC8():
OP_98(0xFE, 0x0, 0x0, 0xFFFFF63C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x103, 1, lambda_7AC8)
WaitChrThread(0x103, 1)
OP_95(0x103, 70510, 0, 9150, 2000, 0x0)
OP_93(0x103, 0xE1, 0x1F4)
Return()
# Function_38_7AB2 end
def Function_39_7AFD(): pass
label("Function_39_7AFD")
def lambda_7B02():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 2, lambda_7B02)
def lambda_7B13():
OP_98(0xFE, 0x0, 0x0, 0xFFFFF63C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x104, 1, lambda_7B13)
WaitChrThread(0x104, 1)
OP_95(0x104, 67540, 0, 7130, 2000, 0x0)
OP_93(0x104, 0x2D, 0x1F4)
Return()
# Function_39_7AFD end
def Function_40_7B48(): pass
label("Function_40_7B48")
def lambda_7B4D():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x109, 2, lambda_7B4D)
def lambda_7B5E():
OP_98(0xFE, 0x0, 0x0, 0xFFFFF63C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x109, 1, lambda_7B5E)
WaitChrThread(0x109, 1)
OP_95(0x109, 69250, 0, 6220, 2000, 0x0)
OP_93(0x109, 0x0, 0x1F4)
Return()
# Function_40_7B48 end
def Function_41_7B93(): pass
label("Function_41_7B93")
def lambda_7B98():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x105, 2, lambda_7B98)
def lambda_7BA9():
OP_98(0xFE, 0x0, 0x0, 0xFFFFF63C, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x105, 1, lambda_7BA9)
WaitChrThread(0x105, 1)
OP_95(0x105, 70730, 0, 7540, 2000, 0x0)
OP_93(0x105, 0x10E, 0x1F4)
Return()
# Function_41_7B93 end
def Function_42_7BDE(): pass
label("Function_42_7BDE")
TalkBegin(0x9)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x198, 7)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_7D75")
ChrTalk(
0x9,
"さて、お掃除お掃除っと……\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003F(彼女なら『メイド』枠で\x01",
" ミスコンに出場できそうだな。)\x02\x03",
"#00000Fあの、すみません。\x01",
"ちょっと相談なのですが……\x02",
)
)
CloseMessageWindow()
SetChrName("")
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"チャリティイベントの\x01",
"ミスコンへの参加を頼んでみた。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
ChrTalk(
0x9,
"ミ、ミスコン……ですか?\x02",
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"あ、あの、すみません……\x01",
"お気持ちはうれしいのですが\x01",
"仕事を抜けられないもので……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#00003Fそうですか……\x01",
"いえ、失礼しました。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x198, 7)
Jump("loc_7DD9")
label("loc_7D75")
ChrTalk(
0x9,
(
"ミスコンへのお誘いは\x01",
"ちょっと……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x9,
(
"お気持ちはうれしいのですが\x01",
"仕事を抜けられないもので……\x02",
)
)
CloseMessageWindow()
label("loc_7DD9")
TalkEnd(0x9)
Return()
# Function_42_7BDE end
SaveToFile()
Try(main)
|
[
"[email protected]"
] | |
e5c95f65e2d375ab804087caa24c1424a0aba734
|
291f0aa9a40eeca26fb08106c952b9347db7dba7
|
/nz_crawl_demo/day2/requests/biquge.py
|
4436df5628c3550c69cfc0f0492fb0cc28404bae
|
[
"Apache-2.0"
] |
permissive
|
gaohj/nzflask_bbs
|
fad10b93f8f495a94d5d6db6f5c60d85c1c85518
|
36a94c380b78241ed5d1e07edab9618c3e8d477b
|
refs/heads/master
| 2022-12-12T21:43:17.417294 | 2020-03-20T10:28:22 | 2020-03-20T10:28:22 | 239,702,874 | 0 | 2 |
Apache-2.0
| 2022-12-08T03:50:07 | 2020-02-11T07:34:01 |
JavaScript
|
UTF-8
|
Python
| false | false | 569 |
py
|
import requests
url = "http://www.xbiquge.la/login.php?jumpurl=http://www.xbiquge.la/"
data = {
"LoginForm[username]":"kangbazi666",
"LoginForm[password]":'kangbazi666',
}
headers = {
'User-Agent':"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:64.0) Gecko/20100101 Firefox/64.0"
}
#登录
session = requests.Session() #实例化一个session对象
session.post(url,data=data,headers=headers)
res = session.get("http://www.xbiquge.la/modules/article/bookcase.php")
with open('biquge.html','w',encoding='utf-8') as fp:
fp.write(res.content.decode('utf-8'))
|
[
"[email protected]"
] | |
3e58c9590ed681e7f501c509360dd7178b5b6355
|
e01fb71c991e57504fa745d0a29b4a84033db438
|
/Market/GetNSECompanyCodes.py
|
fb96a44e9b20abdb8d6a7e83200fe354b102eddc
|
[] |
no_license
|
NandaCj/Machine_Learning1
|
6707f36d71e26dcdca03fc11da27f724e21f265e
|
fc2255f6932d8fd7a0ec002e6885e5a45fd04fe5
|
refs/heads/master
| 2021-06-20T01:32:17.249662 | 2019-05-30T06:42:13 | 2019-05-30T06:42:13 | 134,279,997 | 2 | 0 | null | 2019-05-30T06:42:14 | 2018-05-21T14:25:13 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 548 |
py
|
from nsepy import get_history
from datetime import date
from nsetools import Nse
nse = Nse()
import re
file = open("C:\\Users\\Ranjith\\Desktop\\NSE Scripts\\NSECodesList.txt", 'r')
OutputFile = open("C:\\Users\\Ranjith\\Desktop\\NSE Scripts\\NseHistory.txt", 'a+')
for line in file.readlines()[:1]:
Code = re.sub("'","",line.split(':')[0])
print (Code)
data = get_history(symbol=Code, start=date(2017,11,1), end=date(2017,11,7))
#OutputFile.write(Code+":"+str(data['Close'].to_dict())+"\n")
print (data)
|
[
"[email protected]"
] | |
3292495d148e6dbd5ae87db54b0fbd9e68c1a7cd
|
650026ba1780612868fcaae9c7587f3eb1c61eac
|
/robot_localizer/scripts/particle_marker.py
|
5171f46f4a899ffe2b6dcf9682f7578ed68f192f
|
[] |
no_license
|
anilpatel38/robot_localization
|
b75478b3ece9119c090e9f8679cc0e3f0fdf3c84
|
609f2eab8ec0ea810cf9c8f0cc6445bc27e6ca03
|
refs/heads/master
| 2020-03-30T05:01:56.897608 | 2018-10-16T17:33:40 | 2018-10-16T17:33:40 | 150,776,225 | 0 | 0 | null | 2018-09-28T17:53:19 | 2018-09-28T17:53:19 | null |
UTF-8
|
Python
| false | false | 2,066 |
py
|
#!/usr/bin/env python
from __future__ import print_function
from std_msgs.msg import Header
from neato_node.msg import Bump
from sensor_msgs.msg import LaserScan
from geometry_msgs.msg import Twist
from nav_msgs.msg import Odometry
from visualization_msgs.msg import Marker
from visualization_msgs.msg import MarkerArray
import rospy
import math
import random
class particle_marker(object):
"""adds particles or something"""
def __init__(self):
rospy.init_node('particle_marker')
self.rate = rospy.Rate(2)
self.num_particles = 10
self.particles = None
self.pub = rospy.Publisher('visualization_marker_array', MarkerArray, queue_size = 10)
def create_array(self):
self.particles = []
for i in range(self.num_particles):
x_pos = random.randint(1,100)
x_pos = x_pos/50.0
y_pos = random.randint(1,100)
y_pos = y_pos/50.0
self.particles.append((x_pos, y_pos))
def update_markers(self):
"updates all markers"
self.markerArray = MarkerArray()
id_number = 0
for pos in self.particles:
self.create_particle_marker(pos[0], pos[1])
self.marker.id = id_number
self.markerArray.markers.append(self.marker)
id_number += 1
def sort_add_noise(self):
pass
def create_particle_marker(self, x,y):
"creates marker with position x,y"
self.marker = Marker()
self.marker.header.frame_id = "base_link"
self.marker.type = self.marker.SPHERE
self.marker.action = self.marker.ADD
self.marker.pose.position.x = x
self.marker.pose.position.y = y
self.marker.pose.position.z = 0
scale = .15
self.marker.scale.x = scale
self.marker.scale.y = scale
self.marker.scale.z = scale
self.marker.color.a = 1
self.marker.color.g= 1
def update_current_location(self):
pass
def read_pos(self, data):
self.pos = data.pose.pose.position
self.orientation = data.pose.pose.orientation
def run(self):
while not rospy.is_shutdown():
self.create_array()
self.update_markers()
self.pub.publish(self.markerArray)
self.rate.sleep()
if __name__ == '__main__':
node = particle_marker()
node.run()
|
[
"[email protected]"
] | |
c950deb33595ab7513145a259c0dad0684cff22f
|
e5255d7588b117f000c8e11a57127d7bbb63a6e6
|
/collection/j1/01_getImageJson.py
|
6d1e626ddc17536930faed75ea9b0610302058d2
|
[] |
no_license
|
nakamura196/toyo_images
|
4134e9ae7d5790e04c157195ecdea10f952dbbf2
|
60c71b23b6028c639c4f9b1ee3083c083421a336
|
refs/heads/master
| 2020-08-25T13:46:25.334222 | 2019-10-25T03:15:06 | 2019-10-25T03:15:06 | 216,973,729 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,596 |
py
|
import urllib.request
from bs4 import BeautifulSoup
from time import sleep
import json
import hashlib
import os
from PIL import Image
import requests
import shutil
import urllib.parse
def download_img(url, file_name):
print("img="+url)
r = requests.get(url, stream=True)
if r.status_code == 200:
with open(file_name, 'wb') as f:
r.raw.decode_content = True
shutil.copyfileobj(r.raw, f)
def dwn(url):
html = requests.get(url).text
soup = BeautifulSoup(html, "html.parser")
img = soup.find("img")
src = urllib.parse.urljoin(url, img.get("src"))
opath = src.replace("http://124.33.215.236/", "../../")
if not os.path.exists(opath):
tmp = os.path.split(opath)
os.makedirs(tmp[0], exist_ok=True)
download_img(src, opath)
url = "http://124.33.215.236/gazou/index_img.php?tg=J1"
html = urllib.request.urlopen(url)
soup = BeautifulSoup(html, "html.parser")
aas = soup.find_all("a")
urls = []
for a in aas:
href = urllib.parse.urljoin(url, a.get("href"))
urls.append(href)
for url0 in sorted(urls):
if "201511" in url0:
print("url0="+url0)
id = url0.split("lstdir=")[1].split("&")[0]
try:
html = requests.get(url0).text
except Exception as e:
print(e)
continue
soup = BeautifulSoup(html, "html.parser")
dwn(url0)
aas = soup.find_all("a")
for a in aas:
href = urllib.parse.urljoin(url0, a.get("href"))
if "201511.php" in href:
dwn(href)
|
[
"[email protected]"
] | |
15f343f6f01d94e5c35889c1a366f57d920911d9
|
c32f561f78f7b4aa39c9f3c9a1876a86551e6542
|
/build/iiwa_stack_examples/two_iiwa/two_iiwa_gazebo/catkin_generated/pkg.develspace.context.pc.py
|
fe4486c9268ed5a231608695bdd83d17acb330da
|
[] |
no_license
|
NexusReflex/Kuka_iiwa_needleForceControl
|
e34d665c0a3eec3acd60b391529751762e6cf5d8
|
fe33ded9106816cc521f90518a16d43f2b3a3464
|
refs/heads/master
| 2021-05-07T07:43:45.165798 | 2017-12-01T13:49:22 | 2017-12-01T13:49:22 | 109,149,868 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 373 |
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "two_iiwa_gazebo"
PROJECT_SPACE_DIR = "/home/laura/ros_ws/devel"
PROJECT_VERSION = "0.0.0"
|
[
"[email protected]"
] | |
e76ac53ae8d12790b226c6ee9266556427575296
|
97aad4adb802cf41be7b79c83beb580759336f0f
|
/enviar_sms/settings.py
|
4ffcc91cc9f95e3737addc0bc51e736e526f2e81
|
[] |
no_license
|
tatianno/enviar_sms_digivoice
|
cec91e8eb5dbb28a6e81c4a4fa03aa8787eb4132
|
a6649f134f82b4520c9f5f81b9b25822b0a38fe6
|
refs/heads/master
| 2023-05-31T06:09:40.812920 | 2021-06-22T13:58:37 | 2021-06-22T13:58:37 | 343,479,475 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 778 |
py
|
#Modo Debug
DEBUG = True
log_file = '/var/log/enviar_sms.log'
#log_file = '/home/tatianno/Projetos/enviar_sms_digivoice/enviar_sms.log'
#Diretorio monitorado com os arquivos csv com destino e mensagem do SMS
pasta_mensagens = '/media/sms'
#pasta_mensagens = '/home/tatianno/Projetos/enviar_sms_digivoice/sms/prod'
#Extensoes aceitas
extensoes_validas = ['txt', 'csv']
#Delimitador do arquivo CSV
delimitador = ';'
#Dados do AMI
ami_login = {
'host' : '127.0.0.1',
'user' : 'user',
'secret' : 'passwd',
}
#Grupo com as portas GSM configurado no digivoice.conf
grupo_portas_gsm = '1'
#Dados para conexao com banco de dados
db = {
'host' : 'localhost',
'user' : 'sms',
'passwd' : 'asJeuqo',
'database' : 'envio_sms',
}
intervalo_verificacoes = 300
|
[
"[email protected]"
] | |
e2d074a9fa072d6fcaa7b8c2def0ff1c94fdbf34
|
eb44b795c7462980fcc831bc4f3c8c4062a55a7c
|
/Raw/Cards.pyw
|
80504c0ceea24cf65dd3186af73ee21d4cab7bef
|
[] |
no_license
|
joshgreatuk/Revision-Helper
|
1a8664b9189ebb6c2ac604d9ede9ce7c704f0297
|
9d92bd6466364349b6f92e21bc22a3b42b7d6f44
|
refs/heads/master
| 2020-08-29T14:40:16.026735 | 2019-10-28T14:45:13 | 2019-10-28T14:45:13 | 218,064,169 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 12,112 |
pyw
|
import PySimpleGUI as gui
import os
import random
#Create Cards window
currentsubject = "No Subject"
gui.ChangeLookAndFeel("BlueMono")
subtreedata = gui.TreeData()
data = []
layout = [[gui.Button("New Subject"),gui.Button("Change Subject"),gui.Button("Delete Subject"),gui.Button("Show Random Card"),gui.Button("New Card"),gui.Button("Modify Card"),gui.Button("Delete Card"),gui.Button("Exit")],[gui.Text(text="Subjects",font=("Consolas",10),size=(60,1),key="subject"), gui.Text(text=currentsubject,font=("Consolas",10),size=(40,1),key="currentsubject")], [gui.Tree(data=subtreedata,headings=[],auto_size_columns=False,col0_width=15,num_rows=27,key="subtree",enable_events=True), gui.Table(values=data[1:][:],headings=["Topic","Question"],col_widths=[20,40],auto_size_columns=False,display_row_numbers=False,num_rows=27,key="cardtable",select_mode="browse",alternating_row_color="LightBlue",justification="centre")]]
window = gui.Window("Revision Helper : Cards",layout=layout,margins=(0,0),resizable=True)
window.read(timeout=1)
#Functions
def FlashCard():
#Gets a random topic then question from that topic
if currentsubject != "No Subject":
topics = os.listdir("Cards/"+currentsubject+"/")
questions = []
for i in topics:
for j in os.listdir("Cards/"+currentsubject+"/"+i+"/"):
questions.append([j,i])
questionnum = random.randint(1,len(questions))-1
question = questions[questionnum][0]
questiontopic = questions[questionnum][1]
f = open("Cards/"+currentsubject+"/"+questiontopic+"/"+question)
answer = f.read()
f.close()
flashlayout = [[gui.Text(text="Question:",key="toptext",size=(50,2))], [gui.Text(text=question,key="questiontext",size=(50,5))], [gui.Button("Answer",key="ansbut"),gui.Button("Cancel",key="canbut")]]
flashwindow = gui.Window("Revision Helper : Flash Card",layout=flashlayout,margins=(0,0),resizable=False)
another = False
cardmode = 1
print(question)
print(answer)
while True:
event, values = flashwindow.read()
if event in (None,"canbut"):
flashwindow.close()
break
elif event in ("ansbut"):
if cardmode == 1:
flashwindow["toptext"].Update(value="Answer:")
flashwindow["questiontext"].Update(value=answer)
flashwindow["ansbut"].Update(text="Another")
flashwindow["canbut"].Update(text="Done")
cardmode = 2
elif cardmode == 2:
another = True
flashwindow.close()
break
if another:
FlashCard()
window.Enable()
def UpdateTree():
subfiles = os.listdir("Cards/")
subtreedata = gui.TreeData()
parent = ""
for i in subfiles:
subtreedata.Insert(parent=parent,text=i,values=[],key=i)
window.FindElement("subtree").Update(values=subtreedata)
if currentsubject != "No Subject":
topfiles = os.listdir("Cards/"+currentsubject+"/")
toptabledata = []
for i in range(len(topfiles)):
quefiles = os.listdir("Cards/"+currentsubject+"/"+topfiles[i])
for j in range(len(quefiles)):
toptabledata.append([topfiles[i],quefiles[j]])
window.FindElement("cardtable").Update(values=toptabledata)
UpdateTree()
#Event Loop
while True:
event,values = window.read()
if event in (None,"Cancel") or event in ("Exit"):
break
elif event in ("New Subject"):
new = gui.popup_get_text("Enter New Subject Name")
if new.strip() != "":
new = new.lower()
os.mkdir("Cards/"+new)
window.FindElement("currentsubject").Update(value="Subject : "+new)
currentsubject=new
UpdateTree()
else:
gui.popup_error("Entered Nothing")
elif event in ("Change Subject"):
new = gui.popup_get_text("Enter Subject To Change To")
if new.strip() != "":
new = new.lower()
if os.path.exists("Cards/"+new):
currentsubject=new
window.FindElement("currentsubject").Update(value="Subject : "+new)
UpdateTree()
else:
gui.popup_error("Entered Nothing")
elif event in ("Delete Subject"):
new = gui.popup_get_text("Enter Subject To Delete")
if new.strip() != "":
new = new.lower()
if os.path.exists("Cards/"+new):
for i in os.listdir("Cards/"+new+"/"):
os.rmdir("Cards/"+new+"/"+i+"/")
os.rmdir("Cards/"+new)
currentsubject = "No Subject"
window.FindElement("currentsubject").Update(value="No Subject")
UpdateTree()
else:
gui.popup_error("Entered Nothing")
elif event in ("Show Random Card"):
FlashCard()
elif event in ("New Card"):
if currentsubject != "No Subject":
cardlayout = [[gui.Text(text="Flash Card Creator",key="toptext")], [gui.Text(text="Topic",key="topictext")], [gui.Input("",key="topicentry",enable_events=True)], [gui.Text(text="Question (no special characters '/','?')",key="questiontext")], [gui.Input("",key="questionentry",enable_events=True)], [gui.Text(text="Answer",key="answertext")], [gui.Input("",key="answerentry",enable_events=True)], [gui.Button("Create Card")]]
cardwindow = gui.Window("Revision Helper : Card Creator",layout=cardlayout,margins=(0,0),resizable=False)
cardwindow.read()
while True:
event, values = cardwindow.read()
if event in (None,"Cancel"):
break
elif event in ("Create Card"):
if (cardwindow["topicentry"].get()).strip() != "" and (cardwindow["questionentry"].get()).strip() != "" and (cardwindow["answerentry"].get()).strip() != "":
if os.path.exists("Cards/"+currentsubject+"/"+cardwindow["topicentry"].get()) == False:
os.mkdir("Cards/"+currentsubject+"/"+cardwindow["topicentry"].get())
f = open("Cards/"+currentsubject+"/"+cardwindow["topicentry"].get() +"/"+cardwindow["questionentry"].get(),"w")
f.write(cardwindow["questionentry"].get()+"\n\n"+cardwindow["answerentry"].get())
f.close()
UpdateTree()
break
else:
gui.popup_error("Enter Values")
cardwindow.close()
else:
gui.popup_error("Choose A Subject First")
elif event in ("Modify Card"):
if currentsubject != "No Subject":
cardlayout = [[gui.Text(text="Flash Card Modifier",key="toptext")], [gui.Text(text="Topic Of Question",key="topictext")], [gui.Input("",key="topicentry",enable_events=True)], [gui.Text(text="Question To Modify",key="questiontext")], [gui.Input("",key="questionentry",enable_events=True)], [gui.Button("Modify Card")]]
cardwindow = gui.Window("Revision Helper : Card Modifier",layout=cardlayout,margins=(0,0),resizable=False)
cardwindow.read()
while True:
event, values = cardwindow.read()
if event in (None,"Cancel"):
break
elif event in ("Modify Card"):
if (cardwindow["topicentry"].get()).strip() != "" and (cardwindow["questionentry"].get()).strip() != "":
topicmodify = cardwindow["topicentry"].get()
questionmodify = cardwindow["questionentry"].get()
break
#modifier window
cardlayout = [[gui.Text(text="Flash Card Modifier",key="toptext")], [gui.Text(text="Topic",key="topictext")], [gui.Input("",key="topicentry",enable_events=True)], [gui.Text(text="Question (no special characters '/','?')",key="questiontext")], [gui.Input("",key="questionentry",enable_events=True)], [gui.Text(text="Answer",key="answertext")], [gui.Input("",key="answerentry",enable_events=True)], [gui.Button("Modify Card")]]
cardwindow = gui.Window("Revision Helper : Card Modifier",layout=cardlayout,margins=(0,0),resizable=False)
f = open("Cards/"+currentsubject+"/"+topicmodify+"/"+questionmodify,"r")
question = f.readline()
answer = f.readline()
f.close()
cardwindow.read()
cardwindow["topicentry"].Update(value=topicmodify)
cardwindow["questionentry"].Update(value=question)
cardwindow["answerentry"].Update(value=answer)
cardwindow.read()
while True:
event, values = cardwindow.read()
if event in (None,"Cancel"):
break
elif event in ("Modify Card"):
if (cardwindow["topicentry"].get()).strip() != "" and (cardwindow["questionentry"].get()).strip() != "" and (cardwindow["answerentry"].get()).strip() != "":
os.remove("Cards/"+currentsubject+"/"+topicmodify+"/"+questionmodify)
if not os.path.exists("Cards/"+currentsubject+"/"+cardwindow["topicentry"].get()):
os.mkdir("Cards/"+currentsubject+"/"+cardwindow["topicentry"].get())
f = open("Cards/"+currentsubject+"/"+cardwindow["topicentry"].get() +"/"+cardwindow["questionentry"].get().strip(),"w")
f.write(cardwindow["questionentry"].get()+"\n\n"+cardwindow["answerentry"].get().strip())
f.close()
UpdateTree()
if len(os.listdir("Cards/"+currentsubject+"/"+topicmodify+"/")) == 0:
os.rmdir("Cards/"+currentsubject+"/"+topicmodify+"/")
break
else:
gui.popup_error("Enter Values")
cardwindow.close()
else:
gui.popup_error("Choose A Subject First")
elif event in ("Delete Card"):
if currentsubject != "No Subject":
cardlayout = [[gui.Text(text="Flash Card Deleter",key="toptext")], [gui.Text(text="Topic",key="topictext")], [gui.Input("",key="topicentry",enable_events=True)], [gui.Text(text="Question (Leave Blank For Whole Topic Deletion",key="questiontext")],[gui.Input("",key="questionentry",enable_events=True)],[gui.Button("Delete Card/Topic")]]
cardwindow = gui.Window("Revision Helper : Card Deleter",layout=cardlayout,margins=(0,0),resizable=False)
cardwindow.read()
while True:
event, values = cardwindow.read()
if event in (None,"Cancel"):
break
elif event in ("Delete Card/Topic"):
if (cardwindow["topicentry"].get()).strip() !="":
if cardwindow["questionentry"].get().strip() == "":
for i in os.listdir("Cards/"+currentsubject+"/"+cardwindow["topicentry"].get()+"/"):
os.remove("Cards/"+currentsubject+"/"+cardwindow["topicentry"].get()+"/"+i)
os.rmdir("Cards/"+currentsubject+"/"+cardwindow["topicentry"].get()+"/")
else:
os.remove("Cards/"+currentsubject+"/"+cardwindow["topicentry"].get()+"/"+cardwindow["questionentry"].get())
if len(os.listdir("Cards/"+currentsubject+"/"+cardwindow["topicentry"].get()+"/")) == 0:
os.rmdir("Cards/"+currentsubject+"/"+cardwindow["topicentry"].get()+"/")
UpdateTree()
break
else:
gui.popup_error("Enter At Least The Topic")
cardwindow.close()
else:
gui.popup_error("Choose A Subject First")
|
[
"[email protected]"
] | |
08e02c1028708a6f7f2389f245deda91846e826a
|
b26160f064954498a8487c7542c4a6b09731eee5
|
/Supplementary/Holland_MgCa/mg_funks/supp_figures.py
|
587a00e2ed446c3e6784fc61b4c76a60607c3e81
|
[] |
no_license
|
oscarbranson/ForamGeochem
|
1c78ee3055b3558a91a490752e17e514e357563a
|
f5d52b4d9963f9e53d4965e05b0834a9de9dc46c
|
refs/heads/master
| 2021-07-22T23:33:24.883980 | 2020-02-03T17:06:15 | 2020-02-03T17:06:15 | 119,317,528 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 9,628 |
py
|
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.patches import Polygon
from pandas import IndexSlice as idx
from scipy import stats
from scipy.optimize import curve_fit
import uncertainties as un
from uncertainties.unumpy import nominal_values as nom
from uncertainties.unumpy import std_devs as err
from cbsyst import Csys
from .helpers import isolate_constant_conditions
from .plot import spreadm, angle_of_line
plt.rcParams['axes.labelsize'] = 'small'
# Code for making supplementary Figures
#######################################
def carb_chem(raw, dat, mdict, ldict):
fig, ax = plt.subplots(1, 1)
vlim = dat.loc[:, ('csys_mid', 'DIC')].min(), dat.loc[:, ('csys_mid', 'DIC')].max()
for who in mdict.keys():
ind = raw.Measured.who == who
if who not in ['This Study']:
s = 25
z = -1
c = 'C3'
else:
s = 35
z = 1
c = 'C0'
ma = ax.scatter(raw.loc[ind, ('csys_mid', 'CO3')],
raw.loc[ind, ('csys_mid', 'pHtot')],
marker=mdict[who], label=ldict[who],
color=c, alpha=0.75,
# color=dat.loc[ind, ('csys_mid', 'DIC')],
vmin=vlim[0], vmax=vlim[1], cmap=plt.cm.Blues,
edgecolor='k', lw=0.5, s=s, zorder=z)
ax.set_xlabel('$[CO_3^{2-}]\ (\mu mol\ kg^{-1})$')
ax.set_ylabel('$pH_{Total}$')
# fig.colorbar(ma, label='[DIC]')
ax.legend(loc='upper left', fontsize=8)
ax.set_ylim(ax.get_ylim())
ax.set_xlim(0, 550)
fig.tight_layout()
for DIC in [1000, 2000, 4000, 8000]:
cs = Csys(np.linspace(*ax.get_ylim(), 50), DIC)
line = ax.plot(cs.CO3, cs.pHtot, ls='dashed', color=(0,0,0,0.4), zorder=-1, lw=1)
ind = (line[0].get_xdata() < ax.get_xlim()[1]) & (line[0].get_ydata() < ax.get_ylim()[1])
x = line[0].get_xdata()[ind][-4:]
y = line[0].get_ydata()[ind][-4:]
angle = angle_of_line(x, y, ax)
ax.text(x[0], y[0], 'DIC: {:.0f}'.format(DIC), rotation=angle, ha='center', alpha=0.4)
return fig, ax
def culture_chem(dat, mdict, ldict):
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=[8, 3.6])
for who in mdict.keys():
ind = dat.Measured.who == who
if who in ['Allen', 'Hönisch']:
continue
if who not in ['This Study']:
s = 25
z = -1
c = 'C3'
else:
s = 35
z = 1
c = 'C0'
ma = ax1.scatter(dat.loc[ind, ('csys_mid', 'pHtot')],
dat.loc[ind, ('csys_mid', 'DIC')],
marker=mdict[who], label=ldict[who],
color=c, alpha=0.75,
edgecolor='k', lw=0.5, s=s, zorder=z)
ma = ax2.scatter(dat.loc[ind, ('Measured', '[Ca]sw')],
dat.loc[ind, ('Measured', '[Mg]sw')],
marker=mdict[who], label=ldict[who],
color=c, alpha=0.75,
edgecolor='k', lw=0.5, s=s, zorder=z)
ax1.legend()
ax1.set_xlabel('$pH_{tot}$')
ax1.set_ylabel('DIC $(\mu mol kg^{-1})$')
ax2.set_xlabel('$[Ca]_{SW} ~ (mM)$')
ax2.set_ylabel('$[Mg]_{SW} ~ (mM)$')
fig.tight_layout()
return fig, (ax1, ax2)
# Code for making figures in the Python notebook supplement.
############################################################
def parameter_space(dat):
fig, ax = plt.subplots(1,1)
x_thresh = [19, 21, 23, 25.5, 27]
y_thresh = [1.8, 4, 8, 12]
xt_last = 0
yt_last = 0
vmin = dat.loc[:, ('csys_mid', 'DIC')].min()
vmax = dat.loc[:, ('csys_mid', 'DIC')].max()
for xt in x_thresh:
for yt in y_thresh:
ind = ((dat.loc[:, ('Measured', 'Temp')] >= xt_last) & (dat.loc[:, ('Measured', 'Temp')] <= xt) &
(dat.loc[:, ('Measured', 'Mg/Casw')] >= yt_last) & (dat.loc[:, ('Measured', 'Mg/Casw')] <= yt))
if sum(ind) > 0:
x, y, xm, ym = spreadm(dat.loc[ind, ('Measured', 'Temp')].astype(float).values,
dat.loc[ind, ('Measured', 'Mg/Casw')].astype(float).values,
x_tol=0.15, y_tol=0.17, offset_mult=0.2)
if xm is not None:
for xi, yi in zip(x,y):
c = (.6, .6, .6)
ax.plot([xm, xi], [ym, yi], lw=0.5, color=c, zorder=-1)
ax.scatter(xm, ym, color='w', edgecolor=c, lw=0.5, s=10)
ma = ax.scatter(x, y, c=sorted(dat.loc[ind, ('csys_mid', 'DIC')]), vmin=vmin, vmax=vmax, cmap=plt.cm.Blues, s=20, lw=0.5, edgecolor='k')
yt_last = yt
xt_last = xt
ax.set_xlabel('Temperature ($^{\circ}C$)')
ax.set_ylabel('Mg/Ca$_{SW}$ (mol/mol)')
fig.colorbar(ma, label='[DIC] ($\mu M$)')
return fig, ax
def add_vs_mult(dat, crossplots):
fig, axs = plt.subplots(2, 2)
sub = isolate_constant_conditions(dat, Temp=22, tolerance=0.08)
vmin, vmax = sub.loc[:, ('Measured', 'D_Mg')].min() , sub.loc[:, ('Measured', 'D_Mg')].max() * 1.2
cmap = plt.cm.Blues_r
ax = axs[0,0]
ax.set_title('$D_{Mg} = C_1\ DIC + C_2\ [Ca] + D$', loc='left', fontsize=8)
cm = ax.scatter(sub.loc[:, ('Measured', '[Ca]sw')], sub.loc[:, ('csys_mid', 'DIC')], c=sub.loc[:, ('Measured', 'D_Mg')] ,
edgecolors='k', lw=0.5, vmin=vmin, vmax=vmax, cmap=cmap)
ax.set_xlabel('[Ca]sw')
ax.set_ylabel('DIC')
# fit an additive relationship
def ca_dic_fn(x, C1, C2, D):
Ca, DIC = x
return DIC * C1 + C2 * Ca + D
p, cov = curve_fit(ca_dic_fn, (sub.loc[:, ('Measured', '[Ca]sw')], sub.loc[:, ('csys_mid', 'DIC')]), sub.loc[:, ('Measured', 'D_Mg')] )
x = np.linspace(*ax.get_xlim(), 100)
y = np.linspace(*ax.get_ylim(), 100)
X, Y = np.meshgrid(x, y)
ax.pcolormesh(X, Y, ca_dic_fn((X, Y), *p), zorder=-1, vmin=vmin, vmax=vmax, cmap=cmap)
rax = axs[1, 0]
rax.set_ylabel('N')
resid = sub.loc[:, ('Measured', 'D_Mg')] - ca_dic_fn((sub.loc[:, ('Measured', '[Ca]sw')], sub.loc[:, ('csys_mid', 'DIC')]), *p)
rax.hist(resid, bins=20)
ax = axs[0,1]
ax.set_title('$D_{Mg} = C_1\ DIC\ [Ca] + D$', loc='left', fontsize=8)
cm = ax.scatter(sub.loc[:, ('Measured', '[Ca]sw')], sub.loc[:, ('csys_mid', 'DIC')], c=sub.loc[:, ('Measured', 'D_Mg')] ,
edgecolors='k', lw=0.5, vmin=vmin, vmax=vmax, cmap=cmap)
ax.set_xlabel('[Ca]sw')
# fit the relationship
def ca_dic_fn(x, C1, D):
Ca, DIC = x
return DIC * C1 * Ca + D
p, cov = curve_fit(ca_dic_fn, (sub.loc[:, ('Measured', '[Ca]sw')], sub.loc[:, ('csys_mid', 'DIC')]), sub.loc[:, ('Measured', 'D_Mg')] )
x = np.linspace(*ax.get_xlim(), 100)
y = np.linspace(*ax.get_ylim(), 100)
X, Y = np.meshgrid(x, y)
ax.pcolormesh(X, Y, ca_dic_fn((X, Y), *p), zorder=-1, vmin=vmin, vmax=vmax, cmap=cmap)
rax = axs[1, 1]
resid = sub.loc[:, ('Measured', 'D_Mg')] - ca_dic_fn((sub.loc[:, ('Measured', '[Ca]sw')], sub.loc[:, ('csys_mid', 'DIC')]), *p)
rax.hist(resid, bins=20)
fig.tight_layout(rect=[0, 0, .85, 1])
pos = ax.get_position()
cax = fig.add_axes([.855, pos.y0, .02, pos.height])
fig.colorbar(cm, cax=cax, label='$D_{Mg}$')
for rax in axs[1, :]:
rax.set_ylim(0, 11)
rax.set_xlabel('Residual')
crossplots['Ca_DIC'] = (X, Y, ca_dic_fn((X, Y), *p), sub.loc[:, ('Measured', '[Ca]sw')], sub.loc[:, ('csys_mid', 'DIC')], sub.loc[:, ('Measured', 'D_Mg')])
return fig, axs, crossplots
def C_speciation(raw):
fig, ax = plt.subplots(1, 1)
xvar = 'pHtot'
yvar = 'CO3'
# colors
cvar = '[Mg]sw'
ambient = 52
cmap = plt.cm.RdBu
xs = raw.loc[:, idx[['pitzer', 'MyAMI'], xvar]]
ys = raw.loc[:, idx[['pitzer', 'MyAMI'], yvar]]
pad = 0.05
xmin = np.nanmin(xs.values)
xmax = np.nanmax(xs.values)
xran = xmax - xmin
xlim = (xmin - pad * xran, xmax + pad * xran)
ymin = np.nanmin(ys.values)
ymay = np.nanmax(ys.values)
yran = ymay - ymin
ylim = (ymin - pad * yran, ymay + pad * yran)
for i, r in raw.iterrows():
cv = ((r.loc[('Measured', cvar)] - ambient) / np.max(abs(raw.loc[:, ('Measured', cvar)] - ambient)) + 1) / 2
c = list(cmap(cv))
c[-1] = 0.7
xp = r.loc[('pitzer', xvar)]
xm = r.loc[('MyAMI', xvar)]
yp = r.loc[('pitzer', yvar)]
ym = r.loc[('MyAMI', yvar)]
x = [xp, xm, xm, xp]
y = [yp, yp, ym, ym]
p = Polygon(np.vstack([x, y]).T, facecolor=c, lw=0.5, edgecolor=(0,0,0,0.8))
ax.add_patch(p)
ax.set_xlim(xlim)
ax.set_ylim(ylim)
ax.set_xlabel('$pH_{Total}$')
ax.set_ylabel('$[CO_3]\ (\mu mol\ kg^{-1})$')
fig.subplots_adjust(right=0.8)
pos = ax.get_position()
cax = fig.add_axes([.82, pos.y0, 0.03, pos.height])
cax.yaxis.tick_right()
xs, ys = np.meshgrid([0,1], np.linspace(raw.loc[:, ('Measured', cvar)].min(), raw.loc[:, ('Measured', cvar)].max(), 50))
cax.pcolormesh(xs,ys,ys, cmap=cmap)
cax.set_ylabel('$[Mg]_{SW}\ (mmol\ kg^{-1})$')
cax.yaxis.set_label_position('right')
cax.xaxis.set_visible(False)
return fig
|
[
"[email protected]"
] | |
a6fb19b3c66c25a3ed7314ed3f0a4285ab02fbb3
|
bf6061ce108aed02d1fa20670f1e751303ba689b
|
/granadatiles_project/apps/galleries/migrations/0004_auto_20160219_2112.py
|
f7a38f4f9cc8947b677c074942c75c7a638679b4
|
[] |
no_license
|
rogergaitan/granadatiles
|
1dd320478407627ee940207e3cbcdeb015f8caa0
|
3bf26b77f4b99605a266e168c2c7f18d6b6360d5
|
refs/heads/master
| 2021-04-03T10:24:30.705477 | 2018-03-13T20:06:54 | 2018-03-13T20:06:54 | 125,105,404 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 619 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('galleries', '0003_galleryimage_tiles'),
]
operations = [
migrations.RemoveField(
model_name='galleryimage',
name='galleryCategory',
),
migrations.AddField(
model_name='galleryimage',
name='gallery_categories',
field=models.ManyToManyField(verbose_name='Gallery Categories', to='galleries.GalleryCategory', related_name='images'),
),
]
|
[
"[email protected]"
] | |
bb9562a4ccb28a45756442fc794a6be5bf04f5ae
|
e392092eb5c90259630e2f3f5a463dae830493d1
|
/email_reply_parser/__init__.py
|
c17e27938b6769353c3ef906ab66468431c75b5f
|
[
"MIT"
] |
permissive
|
lukas-manduch/antispam-system
|
2e038e69cfaec5707e7ecedd170ccfc9b5c3df8c
|
1aac94a8fb03cf49564408248606d30a77ff284e
|
refs/heads/master
| 2021-03-22T04:39:42.527534 | 2017-06-14T22:40:45 | 2017-06-14T22:40:51 | 83,908,589 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 98 |
py
|
__version__ = (0, 0, 1)
from .parser import read, parse_reply
__all__ = ('read', 'parse_reply')
|
[
"[email protected]"
] | |
7a78ee18b6b71374ce96f2e7b9225a8918f69ce9
|
7edb4fa01d6325a5042dbcca6b7925513a5221b5
|
/New folder/018_email_sender/email_sender_v2.py
|
ece67aa8d579d862bc40a71ce6d5aadcc10ccde3
|
[] |
no_license
|
KotRom/pythonProject1
|
ae0919727fa3cabc9be5e04948d34d33c54711d7
|
0709802abfe0e998818099662e4cd56c01a545bf
|
refs/heads/master
| 2023-08-30T09:57:57.614352 | 2021-11-05T19:01:00 | 2021-11-05T19:01:00 | 404,008,204 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,161 |
py
|
# password dsf342asda
# [email protected]
# smtp: smtp.zone.eu port 465 SSL/TLS or 587 STARTTLS
import smtplib
from email.message import EmailMessage
smtp = smtplib.SMTP_SSL('smtp.zone.eu', 465)
smtp.login('[email protected]', 'dsf342asda')
msg = EmailMessage()
msg['Subject'] = 'Sample email sent by Python'
msg['From'] = '[email protected]'
msg['To'] = '[email protected]'
msg.set_content('Sample mail sent by Python script. Have fun!')
msg.add_alternative("""\
<!DOCTYPE html>
<html>
<body>
<h1 style="color: red;">What is in a lava lamp and how does it work?</h1>
<p style="color: grey;">This is a test message sent to you by a small script on Python</p>
<p style="color: grey;">The lamp contains blobs of coloured wax inside a glass vessel filled with clear or translucent liquid; the wax rises and falls as its density changes due to heating from an incandescent light bulb underneath the vessel. The appearance of the wax is suggestive of pāhoehoe lava, hence the name.</p>
</body>
</html>
""", subtype='html')
smtp.send_message(msg)
|
[
"[email protected]"
] | |
1f52885f118744862a082fd33704cd5b2e738fe8
|
56166a310098a15f65a6201e69c778e493374577
|
/code/path_poisoning/modules/peeringtb.py
|
23299613320ac78dd5597343506d8af0a6879681
|
[] |
no_license
|
nrodday/TAURIN-21
|
bbb24986a3b7d1cdc9d86b2b8e2ad66c136a2075
|
bbec9a0a9eb9d28164d20a62dffa6c360d2b6972
|
refs/heads/main
| 2023-08-23T23:23:10.772163 | 2021-10-19T17:00:44 | 2021-10-19T17:00:44 | 372,407,431 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,868 |
py
|
import subprocess
import shlex
import configparser
from modules.util import *
config = configparser.ConfigParser()
config.read('config.ini')
dir = config['general']['peering_testbed_dir']
ptb_asn = config['general']['ptb_asn']
if dir[-1] != "/":
dir = dir+"/"
def announce_prefix(prefix):
cmd = 'sudo ' + dir + 'peering prefix announce -m ' + config['general']['mux'] + ' ' + prefix
log("announce_prefix", "DEBUG", cmd)
proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) # Executing
proc.communicate() # Fetching result
def withdraw_prefix(prefix):
cmd = 'sudo ' + dir + 'peering prefix withdraw -m ' + config['general']['mux'] + ' ' + prefix
log("withdraw_prefix", "DEBUG", cmd)
proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) # Executing
proc.communicate() # Fetching result
def announce_poisoned_prefix(prefix, asn):
if len(asn) == 2 and type(asn) is list:
poison = " -p " + str(asn[0]) + " -p " + str(ptb_asn) + " -p " + str(asn[1]) + " -p " + str(ptb_asn)
elif len(asn) == 1 and type(asn) is list:
poison = asn[0]
else:
log("announce_poisoned_prefix", "DEBUG", "AS List must be of type list and of len 1 or 2, it currently is: " + str(asn))
raise ListError("AS List must be of type list and of len 1 or 2")
cmd = 'sudo ' + dir + 'peering prefix announce -m ' + config['general']['mux'] + poison + ' ' + prefix
log("announce_poisoned_prefix", "DEBUG", cmd)
proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) # Executing
proc.communicate() # Fetching result
def check_announce_v4(prefix):
announced = bool(False)
cmd = 'sudo ' + dir + 'peering bgp adv '+config['general']['mux']
log("check_announce_v4", "DEBUG", cmd)
proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) # Executing
out, err = proc.communicate() # Fetching result
for line in out.decode().split('\n'):
log("check_announce_v4", "DEBUG", line)
if config['general']['debug'] == "True":
print(line)
if prefix in line:
announced = bool(True)
return announced
else:
continue
return announced
def check_announce_v6(prefix):
announced = bool(False)
cmd = 'sudo ' + dir + 'peering bgp6 adv '+config['general']['mux']
log("check_announce_v6", "DEBUG", cmd)
proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) # Executing
out, err = proc.communicate() # Fetching result
for line in out.decode().split('\n'):
log("check_announce_v6", "DEBUG", line)
if config['general']['debug'] == "True":
print(line)
if prefix in line:
announced = bool(True)
return announced
else:
continue
return announced
|
[
"[email protected]"
] | |
08592906e415e04754f6338b2ac1811ec11d4c06
|
9b2bea687dbc18cc478318cefcc99122c54193df
|
/search/views.py
|
d0d59a0068f2f247a84e4faeb13b6d667d5aa30d
|
[] |
no_license
|
Rath-san/wagtail_wasteland
|
7669f4491d68d6175ebc2c54a1facac01077f2d2
|
e2117b9abf7a41eead8c590d01344bb52b55a7ef
|
refs/heads/master
| 2021-05-14T17:33:22.091931 | 2018-01-02T19:23:02 | 2018-01-02T19:23:02 | 116,049,150 | 1 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,061 |
py
|
from __future__ import absolute_import, unicode_literals
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
})
|
[
"[email protected]"
] | |
1de786fcadd53ed8cf3d4a953a6b1b9a234f5ca7
|
2ff4fbef275d02bafc0390fdf826d3819d0b4549
|
/fabadmin/forms.py
|
148bf7f3cc311d2f2077a7562e92140af163658e
|
[
"BSD-3-Clause"
] |
permissive
|
fgallina/django-fabadmin
|
e186729178d8cf8ce685acd5952c56a0de26261c
|
c2fcd154cd57e2f62266b205279cebfed7853caa
|
refs/heads/master
| 2020-05-19T16:17:15.661708 | 2011-11-03T18:41:04 | 2011-11-03T18:41:04 | 2,704,192 | 0 | 0 |
NOASSERTION
| 2021-05-14T15:09:32 | 2011-11-03T18:39:29 |
Python
|
UTF-8
|
Python
| false | false | 2,473 |
py
|
from itertools import chain
from django.utils import simplejson
from django import forms
from django.utils.encoding import force_unicode
from django.utils.html import conditional_escape, escape
from django.core.exceptions import ValidationError
from django.core import validators
from django.utils.encoding import smart_unicode
class FabTaskSelectWidget(forms.Select):
def render_option(self, selected_choices, task):
value = force_unicode(task.value)
selected_html = (value in selected_choices) \
and u' selected="selected"' or ''
return u'<option value="%s"%s title="%s" data="%s">%s</option>' % (
escape(value),
selected_html,
force_unicode(escape(task.short_description)),
escape(force_unicode(simplejson.dumps(task.__dict__))),
conditional_escape(force_unicode(task.name)))
def render_options(self, choices, selected_choices):
# Normalize to strings.
selected_choices = set([force_unicode(v) for v in selected_choices])
output = []
for task in chain(self.choices, choices):
output.append(self.render_option(selected_choices, task))
return u'\n'.join(output)
class FabTaskChoiceField(forms.ChoiceField):
widget = FabTaskSelectWidget
def validate(self, value):
if value in validators.EMPTY_VALUES and self.required:
raise ValidationError(self.error_messages['required'])
if value and not self.valid_value(value):
raise ValidationError(self.error_messages['invalid_choice'] % {'value': value})
def valid_value(self, value):
"Check to see if the provided value is a valid choice"
for task in self.choices:
if value == smart_unicode(task.name):
return True
return False
class FabfileForm(forms.Form):
task = FabTaskChoiceField()
arguments = forms.CharField(required=False)
def __init__(self, *args, **kwargs):
tasks = kwargs.pop('tasks', None)
super(FabfileForm, self).__init__(*args, **kwargs)
self.fields['task'].choices = tasks
def clean_arguments(self):
cleaned_data = self.cleaned_data
arguments = cleaned_data['arguments']
for arg in arguments.split(":"):
if arg.startswith("<<") and arg.endswith(">>"):
raise forms.ValidationError("A required argument is missing")
return arguments
|
[
"fgallina@cuca"
] |
fgallina@cuca
|
047e6f52be4c14be072c722d08822c707bf030c4
|
1c6eb66eb0f67b57a7c4a6d08bcd1ef2df8f436d
|
/website/auth.py
|
5efc3c45ff51873fe47485f366861b0b4d7425ae
|
[] |
no_license
|
TheKing-coder68/Flask-Website
|
4778256b95aa6e8510a23180934876412687f52f
|
24acd2687f81b03eda9bd73a1e2ac4b5eb50867f
|
refs/heads/master
| 2023-04-18T08:58:07.417779 | 2021-04-16T17:31:30 | 2021-04-16T17:31:30 | 354,710,216 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,022 |
py
|
from flask import Blueprint, render_template, request, flash
auth=Blueprint('auth', __name__)
@auth.route('/login', methods=['GET','POST'])
def login():
return render_template("login.html")
@auth.route('/logout')
def logout():
return "This is the logout page"
@auth.route('/sign-up', methods=['GET','POST'])
def sign_up ():
if request.method=='POST':
email = request.form.get('email')
firstName = request.form.get('firstName')
password1 = request.form.get('password1')
password2 = request.form.get('password2')
if len(email)<4:
flash("Email must be greater than 3 characters.", category='error')
elif len(firstName)<2:
flash("First name must be greater than 1 character.", category="error")
elif password1 != password2:
flash("Passwords don't match.", category="error")
elif len(password1) < 7:
flash("Password must be at least 7 characters", category="error")
else:
#add user to the database
flash("Account created!", category="success")
return render_template("sign_up.html")
|
[
""
] | |
e9bb27222c38f40ffe7f88c5cf3722d5dd47c363
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/10/usersdata/124/24836/submittedfiles/testes.py
|
5775a4b04ac6e07e20b13628e10307df3311b756
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,107 |
py
|
# -*- coding: utf-8 -*-
from __future__ import division
import math
def vabsol(x):
if x < 0:
x = -1*x
return x
def calculopi(y):
c = 3
d = 2
for i in range (0, y, 1):
if i%2 != 0:
c = c - (4/(d*(d+1)*(d+2)))
elif i%2 == 0:
c = c + (4/(d*(d+1)*(d+2)))
d = d + 2
return c
def cos(z, epsilon):
cosz = 1
v = 2
fat = 1
cont = 0
d = (z**v)/fat
while epsilon <= d:
for i in range (v, 0, -1):
fat = fat*i
if cont%2 != 0:
cosz = cosz + d
elif cont%2 == 0:
cosz = cosz - d
v = v + 2
fat = 1
cont = cont + 1
return cosz
def razaurea(m, epsilon):
pi = calculopi(m)
fi = 2*cos(pi/5, epsilon)
return fi
m = int(input('Digite o número m de termos da fórmula de pi: '))
epsilon = input('Digite o epsilon para o cálculo da razão áurea: ')
m = vabsol(m)
print('Valor aproximado de pi: %.15f' %calculopi(m))
print('Valor aproximado da razão áurea: %.15f' %razaurea(m, epsilon))
|
[
"[email protected]"
] | |
4ad8fd01c03a6ae1a29510b7ddaba5625e4d100c
|
2b398353f5b0529ac666ef180e9dc966474a70c0
|
/vspk/v6/nunetworkperformancebinding.py
|
0c35e1c30289cec4db7f7ef8fd9e2d6a7936ffec
|
[
"BSD-3-Clause"
] |
permissive
|
nuagenetworks/vspk-python
|
e0c4570be81da2a4d8946299cb44eaf9559e0170
|
9a44d3015aa6424d0154c8c8a42297669cce11f9
|
refs/heads/master
| 2023-06-01T01:12:47.011489 | 2023-05-12T19:48:52 | 2023-05-12T19:48:52 | 53,171,411 | 21 | 18 |
BSD-3-Clause
| 2020-12-16T12:36:58 | 2016-03-04T23:10:58 |
Python
|
UTF-8
|
Python
| false | false | 12,223 |
py
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUPermissionsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUNetworkPerformanceBinding(NURESTObject):
""" Represents a NetworkPerformanceBinding in the VSD
Notes:
Association of Network Performance Measurement policies enable the measurement of path SLA metrics between NSGs in the domain.
"""
__rest_name__ = "networkperformancebinding"
__resource_name__ = "networkperformancebindings"
## Constants
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a NetworkPerformanceBinding instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> networkperformancebinding = NUNetworkPerformanceBinding(id=u'xxxx-xxx-xxx-xxx', name=u'NetworkPerformanceBinding')
>>> networkperformancebinding = NUNetworkPerformanceBinding(data=my_dict)
"""
super(NUNetworkPerformanceBinding, self).__init__()
# Read/Write Attributes
self._last_updated_by = None
self._last_updated_date = None
self._read_only = None
self._embedded_metadata = None
self._entity_scope = None
self._creation_date = None
self._priority = None
self._associated_network_measurement_id = None
self._owner = None
self._external_id = None
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="read_only", remote_name="readOnly", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="priority", remote_name="priority", attribute_type=int, is_required=False, is_unique=True)
self.expose_attribute(local_name="associated_network_measurement_id", remote_name="associatedNetworkMeasurementID", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def read_only(self):
""" Get read_only value.
Notes:
Determines whether this entity is read only. Read only objects cannot be modified or deleted.
This attribute is named `readOnly` in VSD API.
"""
return self._read_only
@read_only.setter
def read_only(self, value):
""" Set read_only value.
Notes:
Determines whether this entity is read only. Read only objects cannot be modified or deleted.
This attribute is named `readOnly` in VSD API.
"""
self._read_only = value
@property
def embedded_metadata(self):
""" Get embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
return self._embedded_metadata
@embedded_metadata.setter
def embedded_metadata(self, value):
""" Set embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
self._embedded_metadata = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
self._creation_date = value
@property
def priority(self):
""" Get priority value.
Notes:
Priority of the associated Network Performance Measurement
"""
return self._priority
@priority.setter
def priority(self, value):
""" Set priority value.
Notes:
Priority of the associated Network Performance Measurement
"""
self._priority = value
@property
def associated_network_measurement_id(self):
""" Get associated_network_measurement_id value.
Notes:
Associated Network Performance Measurement ID
This attribute is named `associatedNetworkMeasurementID` in VSD API.
"""
return self._associated_network_measurement_id
@associated_network_measurement_id.setter
def associated_network_measurement_id(self, value):
""" Set associated_network_measurement_id value.
Notes:
Associated Network Performance Measurement ID
This attribute is named `associatedNetworkMeasurementID` in VSD API.
"""
self._associated_network_measurement_id = value
@property
def owner(self):
""" Get owner value.
Notes:
Identifies the user that has created this object.
"""
return self._owner
@owner.setter
def owner(self, value):
""" Set owner value.
Notes:
Identifies the user that has created this object.
"""
self._owner = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
|
[
"[email protected]"
] | |
4edc4a4117a2f5785f06ed7c041ecc6251e057d3
|
13f900b9dc0c3e838ff788febaa59514b97d1128
|
/Proyecto/apps.py
|
40c7b3b40f6d31687e5ba04a1ee90b01b19feb2f
|
[] |
no_license
|
JorgitoR/App-Proyectos-Slabcode
|
68439c5fe0dbe58a004b9f04be807f6756d84a7f
|
173ea655bf00f8b5ae7fb0eb4ee0cf0ed5e6f3a7
|
refs/heads/main
| 2023-04-12T21:52:16.339073 | 2021-04-10T21:02:57 | 2021-04-10T21:02:57 | 356,660,392 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 148 |
py
|
from django.apps import AppConfig
class ProyectoConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'Proyecto'
|
[
"[email protected]"
] | |
1331463e04eacbba88e7bfe7a6abd22516264f96
|
474cbe0024e4337bacbfe83623d0392fc0d78cb8
|
/tf2lib/specs/spectral_ops.py
|
61e52a6dbf5a972948e3600e4693f804bf2827ae
|
[] |
no_license
|
SoMA-group/style-drumsynth
|
4408fe8fd3f96d381b3ea4578f31cc33ea4ef5cc
|
e6961e9cf85d42de79c4e6b07cb21e2f9d3dec74
|
refs/heads/main
| 2023-08-26T02:19:07.093638 | 2021-11-09T14:55:09 | 2021-11-09T14:55:09 | 416,120,833 | 27 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 9,839 |
py
|
# Copyright 2021 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library of spectral processing functions.
Includes transforming linear to mel frequency scales and phase to instantaneous
frequency.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v1 as tf
# mel spectrum constants.
_MEL_BREAK_FREQUENCY_HERTZ = 700.0
_MEL_HIGH_FREQUENCY_Q = 1127.0
def mel_to_hertz(mel_values):
"""Converts frequencies in `mel_values` from the mel scale to linear scale."""
return _MEL_BREAK_FREQUENCY_HERTZ * (
np.exp(np.array(mel_values) / _MEL_HIGH_FREQUENCY_Q) - 1.0)
def hertz_to_mel(frequencies_hertz):
"""Converts frequencies in `frequencies_hertz` in Hertz to the mel scale."""
return _MEL_HIGH_FREQUENCY_Q * np.log(
1.0 + (np.array(frequencies_hertz) / _MEL_BREAK_FREQUENCY_HERTZ))
def linear_to_mel_weight_matrix(num_mel_bins=20,
num_spectrogram_bins=129,
sample_rate=16000,
lower_edge_hertz=125.0,
upper_edge_hertz=3800.0):
"""Returns a matrix to warp linear scale spectrograms to the mel scale.
Adapted from tf.signal.linear_to_mel_weight_matrix with a minimum
band width (in Hz scale) of 1.5 * freq_bin. To preserve accuracy,
we compute the matrix at float64 precision and then cast to `dtype`
at the end. This function can be constant folded by graph optimization
since there are no Tensor inputs.
Args:
num_mel_bins: Int, number of output frequency dimensions.
num_spectrogram_bins: Int, number of input frequency dimensions.
sample_rate: Int, sample rate of the audio.
lower_edge_hertz: Float, lowest frequency to consider.
upper_edge_hertz: Float, highest frequency to consider.
Returns:
Numpy float32 matrix of shape [num_spectrogram_bins, num_mel_bins].
Raises:
ValueError: Input argument in the wrong range.
"""
# Validate input arguments
if num_mel_bins <= 0:
raise ValueError('num_mel_bins must be positive. Got: %s' % num_mel_bins)
if num_spectrogram_bins <= 0:
raise ValueError(
'num_spectrogram_bins must be positive. Got: %s' % num_spectrogram_bins)
if sample_rate <= 0.0:
raise ValueError('sample_rate must be positive. Got: %s' % sample_rate)
if lower_edge_hertz < 0.0:
raise ValueError(
'lower_edge_hertz must be non-negative. Got: %s' % lower_edge_hertz)
if lower_edge_hertz >= upper_edge_hertz:
raise ValueError('lower_edge_hertz %.1f >= upper_edge_hertz %.1f' %
(lower_edge_hertz, upper_edge_hertz))
if upper_edge_hertz > sample_rate / 2:
raise ValueError('upper_edge_hertz must not be larger than the Nyquist '
'frequency (sample_rate / 2). Got: %s for sample_rate: %s'
% (upper_edge_hertz, sample_rate))
# HTK excludes the spectrogram DC bin.
bands_to_zero = 1
nyquist_hertz = sample_rate / 2.0
linear_frequencies = np.linspace(
0.0, nyquist_hertz, num_spectrogram_bins)[bands_to_zero:, np.newaxis]
# spectrogram_bins_mel = hertz_to_mel(linear_frequencies)
# Compute num_mel_bins triples of (lower_edge, center, upper_edge). The
# center of each band is the lower and upper edge of the adjacent bands.
# Accordingly, we divide [lower_edge_hertz, upper_edge_hertz] into
# num_mel_bins + 2 pieces.
band_edges_mel = np.linspace(
hertz_to_mel(lower_edge_hertz), hertz_to_mel(upper_edge_hertz),
num_mel_bins + 2)
lower_edge_mel = band_edges_mel[0:-2]
center_mel = band_edges_mel[1:-1]
upper_edge_mel = band_edges_mel[2:]
freq_res = nyquist_hertz / float(num_spectrogram_bins)
freq_th = 1.5 * freq_res
for i in range(0, num_mel_bins):
center_hz = mel_to_hertz(center_mel[i])
lower_hz = mel_to_hertz(lower_edge_mel[i])
upper_hz = mel_to_hertz(upper_edge_mel[i])
if upper_hz - lower_hz < freq_th:
rhs = 0.5 * freq_th / (center_hz + _MEL_BREAK_FREQUENCY_HERTZ)
dm = _MEL_HIGH_FREQUENCY_Q * np.log(rhs + np.sqrt(1.0 + rhs**2))
lower_edge_mel[i] = center_mel[i] - dm
upper_edge_mel[i] = center_mel[i] + dm
lower_edge_hz = mel_to_hertz(lower_edge_mel)[np.newaxis, :]
center_hz = mel_to_hertz(center_mel)[np.newaxis, :]
upper_edge_hz = mel_to_hertz(upper_edge_mel)[np.newaxis, :]
# Calculate lower and upper slopes for every spectrogram bin.
# Line segments are linear in the mel domain, not Hertz.
lower_slopes = (linear_frequencies - lower_edge_hz) / (
center_hz - lower_edge_hz)
upper_slopes = (upper_edge_hz - linear_frequencies) / (
upper_edge_hz - center_hz)
# Intersect the line segments with each other and zero.
mel_weights_matrix = np.maximum(0.0, np.minimum(lower_slopes, upper_slopes))
# Re-add the zeroed lower bins we sliced out above.
# [freq, mel]
mel_weights_matrix = np.pad(mel_weights_matrix, [[bands_to_zero, 0], [0, 0]],
'constant')
return mel_weights_matrix
def diff(x, axis=-1):
"""Take the finite difference of a tensor along an axis.
Args:
x: Input tensor of any dimension.
axis: Axis on which to take the finite difference.
Returns:
d: Tensor with size less than x by 1 along the difference dimension.
Raises:
ValueError: Axis out of range for tensor.
"""
shape = x.get_shape()
if axis >= len(shape):
raise ValueError('Invalid axis index: %d for tensor with only %d axes.' %
(axis, len(shape)))
begin_back = [0 for unused_s in range(len(shape))]
begin_front = [0 for unused_s in range(len(shape))]
begin_front[axis] = 1
size = shape.as_list()
size[axis] -= 1
slice_front = tf.slice(x, begin_front, size)
slice_back = tf.slice(x, begin_back, size)
d = slice_front - slice_back
return d
def unwrap(p, discont=np.pi, axis=-1):
"""Unwrap a cyclical phase tensor.
Args:
p: Phase tensor.
discont: Float, size of the cyclic discontinuity.
axis: Axis of which to unwrap.
Returns:
unwrapped: Unwrapped tensor of same size as input.
"""
dd = diff(p, axis=axis)
ddmod = tf.mod(dd + np.pi, 2.0 * np.pi) - np.pi
idx = tf.logical_and(tf.equal(ddmod, -np.pi), tf.greater(dd, 0))
ddmod = tf.where(idx, tf.ones_like(ddmod) * np.pi, ddmod)
ph_correct = ddmod - dd
idx = tf.less(tf.abs(dd), discont)
ddmod = tf.where(idx, tf.zeros_like(ddmod), dd)
ph_cumsum = tf.cumsum(ph_correct, axis=axis)
shape = p.get_shape().as_list()
shape[axis] = 1
ph_cumsum = tf.concat([tf.zeros(shape, dtype=p.dtype), ph_cumsum], axis=axis)
unwrapped = p + ph_cumsum
return unwrapped
def instantaneous_frequency(phase_angle, time_axis=-2, use_unwrap=True):
"""Transform a fft tensor from phase angle to instantaneous frequency.
Take the finite difference of the phase. Pad with initial phase to keep the
tensor the same size.
Args:
phase_angle: Tensor of angles in radians. [Batch, Time, Freqs]
time_axis: Axis over which to unwrap and take finite difference.
use_unwrap: True preserves original GANSynth behavior, whereas False will
guard against loss of precision.
Returns:
dphase: Instantaneous frequency (derivative of phase). Same size as input.
"""
if use_unwrap:
# Can lead to loss of precision.
phase_unwrapped = unwrap(phase_angle, axis=time_axis)
dphase = diff(phase_unwrapped, axis=time_axis)
else:
# Keep dphase bounded. N.B. runs faster than a single mod-2pi expression.
dphase = diff(phase_angle, axis=time_axis)
dphase = tf.where(dphase > np.pi, dphase - 2 * np.pi, dphase)
dphase = tf.where(dphase < -np.pi, dphase + 2 * np.pi, dphase)
# Add an initial phase to dphase.
size = phase_angle.get_shape().as_list()
size[time_axis] = 1
begin = [0 for unused_s in size]
phase_slice = tf.slice(phase_angle, begin, size)
dphase = tf.concat([phase_slice, dphase], axis=time_axis) / np.pi
return dphase
def polar2rect(mag, phase_angle):
"""Convert polar-form complex number to its rectangular form."""
mag = tf.complex(mag, tf.convert_to_tensor(0.0, dtype=mag.dtype))
phase = tf.complex(tf.cos(phase_angle), tf.sin(phase_angle))
return mag * phase
def random_phase_in_radians(shape, dtype):
return np.pi * (2 * tf.random_uniform(shape, dtype=dtype) - 1.0)
def crop_or_pad(waves, length, channels):
"""Crop or pad wave to have shape [N, length, channels].
Args:
waves: A 3D `Tensor` of NLC format.
length: A Python scalar. The output wave size.
channels: Number of output waves channels.
Returns:
A 3D `Tensor` of NLC format with shape [N, length, channels].
"""
waves = tf.convert_to_tensor(waves)
batch_size = int(waves.shape[0])
waves_shape = tf.shape(waves)
# Force audio length.
pad = tf.maximum(0, length - waves_shape[1])
right_pad = tf.to_int32(tf.to_float(pad) / 2.0)
left_pad = pad - right_pad
waves = tf.pad(waves, [[0, 0], [left_pad, right_pad], [0, 0]])
waves = waves[:, :length, :]
# Force number of channels.
num_repeats = tf.to_int32(
tf.ceil(tf.to_float(channels) / tf.to_float(waves_shape[2])))
waves = tf.tile(waves, [1, 1, num_repeats])[:, :, :channels]
waves.set_shape([batch_size, length, channels])
return waves
|
[
"[email protected]"
] | |
6fd2f9cac3bf22b97948b2a190ce4a65f9c488ae
|
4554f8d3ab1a6267b17dad2b4d2c47b0abe8d746
|
/benchmarking/lab_driver.py
|
03e7770f8347f387876b15dba21e7f83f446d948
|
[
"Apache-2.0"
] |
permissive
|
jteller/FAI-PEP
|
44fead3ca26f4844067d455c86ac8c5bfaf79a14
|
73b8a08815675135e9da7d68375d1218cbd04eaa
|
refs/heads/master
| 2020-04-29T06:04:19.197966 | 2019-03-15T23:32:54 | 2019-03-15T23:32:54 | 175,904,011 | 0 | 0 |
Apache-2.0
| 2019-03-15T23:30:04 | 2019-03-15T23:30:04 | null |
UTF-8
|
Python
| false | false | 5,441 |
py
|
#!/usr/bin/env python
##############################################################################
# Copyright 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
##############################################################################
from __future__ import absolute_import, division, print_function, unicode_literals
import argparse
import json
import os
from download_benchmarks.download_benchmarks import DownloadBenchmarks
from run_remote import RunRemote
from run_lab import RunLab
from harness import BenchmarkDriver
from repo_driver import RepoDriver as OSS_RepoDriver
from utils.custom_logger import getLogger, setLoggerLevel
parser = argparse.ArgumentParser(description="Download models from dewey")
parser.add_argument("--app_id",
help="The app id you use to upload/download your file for everstore")
parser.add_argument("-b", "--benchmark_file",
help="Specify the json file for the benchmark or a number of benchmarks")
parser.add_argument("--lab", action="store_true",
help="Indicate whether the run is lab run.")
parser.add_argument("--logger_level", default="warning",
choices=["info", "warning", "error"],
help="Specify the logger level")
parser.add_argument("--remote", action="store_true",
help="Submit the job to remote devices to run the benchmark.")
parser.add_argument("--root_model_dir", required=True,
help="The root model directory if the meta data of the model uses "
"relative directory, i.e. the location field starts with //")
parser.add_argument("--token",
help="The token you use to upload/download your file for everstore")
parser.add_argument("-c", "--custom_binary",
help="Specify the custom binary that you want to run.")
parser.add_argument("--pre_built_binary",
help="Specify the pre_built_binary to bypass the building process.")
parser.add_argument("--user_string",
help="If set, use this instead of the $USER env variable as the user string.")
class LabDriver(object):
def __init__(self, raw_args=None):
self.args, self.unknowns = parser.parse_known_args(raw_args)
setLoggerLevel(self.args.logger_level)
def run(self):
if not self.args.lab and not self.args.remote:
assert self.args.benchmark_file, \
"--benchmark_file (-b) must be specified"
if self.args.benchmark_file:
getLogger().info("Checking benchmark files to download")
dbench = DownloadBenchmarks(self.args,
getLogger())
dbench.run(self.args.benchmark_file)
if self.args.remote:
unique_args = [
"--app_id", self.args.app_id,
"--token", self.args.token,
]
if self.args.benchmark_file:
unique_args.extend([
"--benchmark_file", self.args.benchmark_file,
])
if self.args.pre_built_binary:
unique_args.extend([
"--pre_built_binary", self.args.pre_built_binary,
])
if self.args.user_string:
unique_args.extend([
"--user_string", self.args.user_string,
])
# hack to remove --repo from the argument list since python2
# argparse doesn't support allow_abbrev to be False, and it is
# the prefix of --repo_dir
if '--repo' in self.unknowns:
index = self.unknowns.index('--repo')
new_unknowns = self.unknowns[:index]
new_unknowns.extend(self.unknowns[index + 2:])
self.unknowns = new_unknowns
app_class = RunRemote
elif self.args.lab:
unique_args = [
"--app_id", self.args.app_id,
"--token", self.args.token,
]
app_class = RunLab
elif self.args.custom_binary or self.args.pre_built_binary:
if self.args.custom_binary:
binary = self.args.custom_binary
else:
binary = self.args.pre_built_binary
repo_info = {
"treatment": {
"program": binary, "commit": "-1", "commit_time": 0
}
}
unique_args = [
"--info \'", json.dumps(repo_info) + '\'',
"--benchmark_file", self.args.benchmark_file,
]
app_class = BenchmarkDriver
else:
if self.args.user_string:
usr_string = self.args.user_string
else:
usr_string = os.environ["USER"]
unique_args = [
"--benchmark_file", self.args.benchmark_file,
"--user_string", usr_string,
]
app_class = OSS_RepoDriver
raw_args = []
raw_args.extend(unique_args)
raw_args.extend(["--root_model_dir", self.args.root_model_dir])
raw_args.extend(["--logger_level", self.args.logger_level])
raw_args.extend(self.unknowns)
getLogger().info("Running {} with raw_args {}".format(app_class, raw_args))
app = app_class(raw_args=raw_args)
app.run()
if __name__ == "__main__":
raw_args = None
app = LabDriver(raw_args=raw_args)
app.run()
|
[
"[email protected]"
] | |
9165fa645530445bd62b1dd6a0a62069ada7bff7
|
06e34e2dface0b87fa785cab7e65422a5f20ba18
|
/Solutions/900-RLE-Iterator/python.py
|
df44e067f90f609efe109d47495f2673b48fe69d
|
[] |
no_license
|
JerryHu1994/LeetCode-Practice
|
c9841b0ce70451c19c8a429a3898c05b6233e1d4
|
b0ce69985c51a9a794397cd98a996fca0e91d7d1
|
refs/heads/master
| 2022-02-10T04:42:28.033364 | 2022-01-02T04:44:22 | 2022-01-02T04:44:22 | 117,118,143 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 752 |
py
|
class RLEIterator(object):
def __init__(self, A):
"""
:type A: List[int]
"""
self.li = A
def next(self, n):
"""
:type n: int
:rtype: int
"""
if len(self.li) == 0: return -1
cnt = n
while cnt > 0:
if len(self.li) == 0: return -1
if cnt <= self.li[0]:
ret = self.li[1]
self.li[0] -= cnt
return ret
else:
cnt -= self.li[0]
self.li.pop(0)
self.li.pop(0)
return -1
# Your RLEIterator object will be instantiated and called as such:
# obj = RLEIterator(A)
# param_1 = obj.next(n)
|
[
"[email protected]"
] | |
207c49b46a02380d179e8e4579110635ef557edb
|
ae2a7e627fa60e625bdaf76163a1f8ac3cced29e
|
/migrations/versions/7ba92c15782e_create_table_questioner.py
|
2808c22260459e1d1843ca0bccfbf881654dcb36
|
[] |
no_license
|
devsummit/backend
|
bcff998091a4ebfe3501d03bf490cead0ba9edc9
|
e36afa9c8786db19b57c78cfc162ab734b653b33
|
refs/heads/develop
| 2021-01-01T16:53:15.428432 | 2017-11-15T07:19:05 | 2017-11-15T07:19:05 | 97,940,235 | 8 | 6 | null | 2017-11-10T04:38:37 | 2017-07-21T11:21:18 |
HTML
|
UTF-8
|
Python
| false | false | 796 |
py
|
"""create table questioner
Revision ID: 7ba92c15782e
Revises: 645494c3895d
Create Date: 2017-11-13 12:28:36.981820
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7ba92c15782e'
down_revision = '645494c3895d'
branch_labels = None
depends_on = None
def upgrade():
op.create_table('questioners',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('booth_id', sa.Integer, sa.ForeignKey(
'booths.id', ondelete='CASCADE')),
sa.Column('questions', sa.Text),
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime)
)
def downgrade():
op.drop_table('questioners')
|
[
"[email protected]"
] | |
0fb9e4222db7bfd1ec0943adcdbb63c79283c96f
|
7ebead5cc471a17188dffd55fa922026ad7ef620
|
/zajecia02/00-Podstawy-tablice.py
|
7b6aea48972651342fce85100124afe53fc6e369
|
[] |
no_license
|
wojciechowskimarcin/piatkaCMI
|
4eb71c6730b2e62f4dae50e5dbeda448b1c932e9
|
a0265f2607ff46b0fd3b73b32442467d22616776
|
refs/heads/master
| 2023-01-09T01:33:57.640656 | 2020-11-03T16:26:07 | 2020-11-03T16:26:07 | 300,417,712 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,881 |
py
|
# Tablice są obecne praktycznie w każdym języku programowania (C, C++, C#, JAVA, Java Script)
# Dzięki tablicom możemy w bardzo łatwy sposób agregować dane - umieszczać je w szufladach
# Adres każdej tablicy zaczyna się od zera np. tablica[0]. Kolejny element to tablica[1], tablica[2]
# Poniżej kilka przykładów wykorzystania tablic
jPolskiOceny = [5,6,4,3,5] # Tworzenie tablicy z jednoczesnym przypisaniem wartości
print("Moją pierwszą oceną z języka polskiego jest:", jPolskiOceny[0], " - dostałem ją za recytację wiersza")
print("Drugą oceną jest:", jPolskiOceny[1], " - dostałem ją za aktywność na lekcji")
# Na tablicach możemy wykonywać różne metody np. możemy sortować wartości, możemy dodawać lub usuwać nowe wartości
# Dodawanie nowych ocen - metoda append (dodaj)
jPolskiOceny.append(2)
#Wyświetlanie całej tablicy, zwróć uwagę na ocenę 2 na końcu
print("Wszystkie oceny z języka polskiego:",jPolskiOceny)
#Możemy dodać kilka ocen - metoda extend (rozszerz)
jPolskiOceny.extend([5,6,4,6])
print("Wszystkie oceny z języka polskiego, po dodaniu [5,6,4,6]:",jPolskiOceny)
#Możemy dodać wartości w określonej pozycji załóżmy że chcemy dodać po pierwszych dwóch ocenach - metoda insert
jPolskiOceny.insert(2, 2) # czyli na 3 pozycji dodajemy ocenę 2
print("Wszystkie oceny z języka polskiego, po dodaniu 2 na indeksie 2 (3 pozycja - 0-1-2):",jPolskiOceny)
#Teraz usuwamy ocenę 4, która ma index 3: jPolskiOceny[3] - metoda pop
jPolskiOceny.pop(3)
print("Wszystkie oceny z języka polskiego, po usunięciu oceny 4:",jPolskiOceny)
#Możemy też usunąć z tablicy określone element, który maja wartość np. 5 - usuwamy pierwszą piątkę :) - metoda remove
jPolskiOceny.remove(5)
print("Wszystkie oceny z języka polskiego, po usunięciu pierwszej napotkanej piątki:",jPolskiOceny)
#Sortujemy wszystkie oceny od najmniejszej do największej - metoda sort
jPolskiOceny.sort()
print("Posortowane oceny od najmniejszej:",jPolskiOceny)
jPolskiOceny.sort(reverse=True)
print("Posortowane oceny od największej:",jPolskiOceny)
#Przykład obliczania średniej ocen
sumaOcenJpolski = 0 # tworzymy zmienne do zsumowania i podzielenia przez ilość ocen - w ten sposób uzyskamy średnią arytmetyczną ocen z języka polskiego
sredniaJpolski = 0
liczbaOcenJezykPolski = len(jPolskiOceny); # zliczamy ilość ocen - czyli ile ocen znajduje się w tablicy jPolskiOceny
# Tworzymy pętlę do zsumwania wszystkich ocen - tworzymy zmienną pomocniczą ocena
for ocena in jPolskiOceny:
sumaOcenJpolski = sumaOcenJpolski + ocena
print("Suma ocen z języka polskiego:", sumaOcenJpolski)
print("Średnia ocen z języka polskiego:", sumaOcenJpolski/liczbaOcenJezykPolski)
print("Zaookrąglona średnia ocen do 2 miejsc po przecinku z języka polskiego:", round(sumaOcenJpolski/liczbaOcenJezykPolski,2))
|
[
"[email protected]"
] | |
11aca772ad2e23f3a0b1e741e91be114c7a129b1
|
2bff702c0ba91580bdf48c79e9e9ba658877ac5c
|
/Unidad 3/ejerciciotarea2.py
|
1a95b2bcf1952e0bb022c1552423aec82019db65
|
[] |
no_license
|
benjaltamirano/testayed2021
|
528356e76269f35d555a8f921c885375f16909b0
|
ff9ef9e270cc617ba5a1a7adeaa8656919a1746f
|
refs/heads/main
| 2023-08-13T18:01:47.098625 | 2021-09-15T21:15:53 | 2021-09-15T21:15:53 | 406,923,095 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 554 |
py
|
"""
El programa debe:
* Pedir al usuario una cantidad de tramos de un viaje
* Pedir al usuario la duración en minutos de cada tramo
* Calcular el tiempo total de viaje
* No deben generar errores
"""
try:
usuario=int(input("ingrese cantidad de tramos del viaje: "))
minutos=float(input("ingrese minutos de cada tramo: "))
viaje_minutos= usuario*minutos
viaje_horas=(usuario*minutos)/60
print(f"El viaje va a durar {viaje_minutos} minutos")
print(f"O el viaje va a durar {viaje_horas} horas")
except:
print("Error")
|
[
"[email protected]"
] | |
818d347d5ad5029e8246fe46f97504bcf6646510
|
8a42be3f930d8a215394a96ad2e91c95c3b7ff86
|
/Build/Instalation/GeneralDb/Marathon/MarathonTests_3.5.2/HSQL_RecordEditor1/TestCases/SaveAs/SaveAsXml1.py
|
7e17d8d59bd69361fc57951c63a851daf3fe52ae
|
[] |
no_license
|
java-tools/jrec
|
742e741418c987baa4350390d126d74c0d7c4689
|
9ece143cdd52832804eca6f3fb4a1490e2a6f891
|
refs/heads/master
| 2021-09-27T19:24:11.979955 | 2017-11-18T06:35:31 | 2017-11-18T06:35:31 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,710 |
py
|
#{{{ Marathon
from default import *
#}}} Marathon
from Modules import commonBits
def test():
set_java_recorded_version("1.6.0_22")
if frame(' - Open File:0'):
select('File', commonBits.sampleDir() + 'DTAR020_tst1.bin')
click('Edit')
close()
if window('Record Editor'):
click('Export')
if frame('Export - DTAR020_tst1.bin:0'):
## select('JTabbedPane_16', 'Xml')
select('File Name_2', 'Xml')
select('Edit Output File', 'true')
click('save file')
close()
if frame('Tree View - DTAR020_tst1.bin.xml:0'):
select('net.sf.RecordEditor.utils.swing.treeTable.JTreeTable_10', 'rows:[9],columns:[Xml~Namespace]')
assert_content('net.sf.RecordEditor.utils.swing.treeTable.JTreeTable_10', [ ['', '', 'UTF-8', '1.0', 'false', '', '', '', '', '', '', ''],
['', '', '', '', '', '', '', '', '', '', '', ''],
['', '', '', '', '63604808', '20', '40118', '170', '1', '4.87', 'True', ''],
['', '', '', '', '69684558', '20', '40118', '280', '1', '19.00', 'True', ''],
['', '', '', '', '69684558', '20', '40118', '280', '-1', '-19.00', 'True', ''],
['', '', '', '', '69694158', '20', '40118', '280', '1', '5.01', 'True', ''],
['', '', '', '', '62684671', '20', '40118', '685', '1', '69.99', 'True', ''],
['', '', '', '', '62684671', '20', '40118', '685', '-1', '-69.99', 'True', ''],
['', '', '', '', '61664713', '59', '40118', '335', '1', '17.99', 'True', ''],
['', '', '', '', '61664713', '59', '40118', '335', '-1', '-17.99', 'True', ''],
['', '', '', '', '61684613', '59', '40118', '335', '1', '12.99', 'True', ''],
['', '', '', '', '68634752', '59', '40118', '410', '1', '8.99', 'True', ''],
['', '', '', '', '60694698', '59', '40118', '620', '1', '3.99', 'True', ''],
['', '', '', '', '60664659', '59', '40118', '620', '1', '3.99', 'True', ''],
['', '', '', '', '60614487', '59', '40118', '878', '1', '5.95', 'True', ''],
['', '', '', '', '68654655', '166', '40118', '60', '1', '5.08', 'True', ''],
['', '', '', '', '69624033', '166', '40118', '80', '1', '18.19', 'True', ''],
['', '', '', '', '60604100', '166', '40118', '80', '1', '13.30', 'True', ''],
['', '', '', '', '68674560', '166', '40118', '170', '1', '5.99', 'True', '']
])
select('net.sf.RecordEditor.utils.swing.treeTable.JTreeTable_10', 'rows:[9],columns:[Xml~Namespace]')
click('Close')
## select('net.sf.RecordEditor.utils.swing.treeTable.JTreeTable_10', 'rows:[9],columns:[Xml~Namespace]')
close()
select_menu('Window>>DTAR020_tst1.bin>>Table: ')
## window_closed('Record Editor')
close()
pass
|
[
"bruce_a_martin@b856f413-25aa-4700-8b60-b3441822b2ec"
] |
bruce_a_martin@b856f413-25aa-4700-8b60-b3441822b2ec
|
aa293e1ff78c775da8ee0d65c93d61dbe77e9ece
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_212/ch38_2020_06_18_18_19_09_636520.py
|
a1acd720111d63b2d1b433ca15896300cc635a3a
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 206 |
py
|
def quantos_uns (n):
soma = 0
i=0
num = str(n)
while i <= len(num):
if num[i] == '1':
soma += 1
i +=1
return soma
|
[
"[email protected]"
] | |
655c2a2f805cf993afaff63d6a4f19fb5d67fcca
|
8891efac3ca3d7ab7b4034a6e1d0171ca0d48b71
|
/1/stemming/stemming.py
|
b689f8ac1a0220e88e0ec3b5be0f142a7010bdd3
|
[] |
no_license
|
SZhaoBC/TextAnalysis
|
b2b8029b3fbae69ca481b0e3b0af1fd0305b546a
|
28721e757a88ec65c8f080b0536b914815ec6489
|
refs/heads/master
| 2020-04-12T22:05:27.185292 | 2018-12-22T04:30:31 | 2018-12-22T04:30:31 | 162,781,311 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,041 |
py
|
from nltk.stem import PorterStemmer
from nltk.tokenize import word_tokenize
ps = PorterStemmer()
p_words = open('positive.txt','r')
pos_words = [line.rstrip('\n') for line in p_words]
n_words = open('negative.txt','r')
neg_words = [line.rstrip('\n') for line in n_words]
data = open('labeledDataset.csv','r')
data_content = []
for line in data:
line.rstrip('\n')
data_content.append(list(line.strip().split(',')))
#stemming for positive lexicon
filename="positive_words.csv"
f=open(filename,"w")
for w in pos_words:
f.write(ps.stem(w)+"\n")
f.close()
#stemming for negative lexicon
filename="negative_words.csv"
f=open(filename,"w")
for w in neg_words:
f.write(ps.stem(w)+"\n")
f.close()
filename = "stemmingReview.csv"
f=open(filename,"w")
for(content,label) in data_content:
contentWords = word_tokenize(content)
for w in contentWords:
f.write(ps.stem(w).lower()+" ")
f.write(",")
f.write(label+"\n")
print(ps.stem(content))
print(ps.stem("did"))
f.close()
|
[
"[email protected]"
] | |
484c9e825b88c69ce5fb14f60c927e5703f10386
|
4715951492667ad9597bd559ffc59709952b73ed
|
/Respository/RepositoriesBase/__init__.py
|
a14a639174d7ca788f4339443ccc1fc10e8c585b
|
[] |
no_license
|
AndresJejen/SolidRussell
|
99971930a8104c095175638042473e4f69d469bc
|
88bf8f20289deafb520dedbe65afbe8827977166
|
refs/heads/master
| 2022-07-04T17:25:17.804971 | 2020-05-14T14:54:52 | 2020-05-14T14:54:52 | 262,442,946 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 47 |
py
|
from .MemoryRepository import MemoryRepository
|
[
"[email protected]"
] | |
9b1cc198ba049ed2a1e88ee56531681e0b4e438a
|
f4aec883b8073c4139046590d03907a751db6ab8
|
/tests/snippets/pipeline/pipeline.py
|
3d18442921f639077263c258ec8797f616f848ce
|
[] |
no_license
|
turnhq/nucling
|
1699d2a19154c4332c9836eace03ee21ae72ed41
|
56426954c6ca48e4f6d5314f9a7807dac986bce9
|
refs/heads/master
| 2020-03-28T06:56:30.360598 | 2019-04-10T21:10:33 | 2019-04-10T21:10:33 | 147,871,208 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,429 |
py
|
import unittest
from nucling.snippet.pipelines import (
Pipeline, Pipeline_manager, Transform_keys_camel_case_to_snake,
Remove_nones,
)
class Pipeline_with_FUN( Pipeline ):
def FUN( p, x ):
return x + 15
class Test_pipeline( unittest.TestCase ):
def setUp( self ):
self.p = Pipeline()
self.q = Pipeline()
def test_when_the_class_dont_have_fun_should_raise_no_implemented( self ):
with self.assertRaises( NotImplementedError ):
Pipeline().process( {} )
def test_when_the_instance_is_assing_fun_should_run_the_function( self ):
result = Pipeline( fun=lambda x: x + 10 ).process( 10 )
self.assertEqual( result, 20 )
def test_when_the_pipiline_have_FUN_should_run_the_function( self ):
result = Pipeline_with_FUN().process( 40 )
self.assertEqual( result, 55 )
def test_when_combine_with_another_thing_should_return_a_manaager( self ):
result = self.p | self.q
self.assertIsInstance( result, Pipeline_manager )
def test_the_new_manager_should_contain_the_pipeline_and_the_other( self ):
result = self.p | self.q
self.assertIs( result.children[0], self.p )
self.assertIs( result.children[1], self.q )
def test_do_or_to_the_class_should_be_a_manager_with_both_class( self ):
result = Pipeline | Pipeline
self.assertIsInstance( result, Pipeline_manager )
self.assertIsInstance( result.children[0], type )
self.assertIsInstance( result.children[1], type )
class Test_camel_case( unittest.TestCase ):
def setUp( self ):
self.prev_dict = { 'HelloWorld': 'hello_world' }
self.result_dict = { 'hello_world': 'hello_world' }
def test_transform_key_to_camel_to_sanke_should_transform_the_keys( self ):
result = Transform_keys_camel_case_to_snake().process( self.prev_dict )
self.assertDictEqual( result, self.result_dict )
class Test_remove_nones( unittest.TestCase ):
def setUp( self ):
self.prev_dict = { 'nones': None, 'hello_world': 'hello_world' }
self.result_dict = { 'hello_world': 'hello_world' }
def test_remove_nones_should_no_return_a_none( self ):
result = Remove_nones().process(
{ 'day': None, 'month': None, 'year': '100' } )
result = Remove_nones().process( self.prev_dict )
self.assertDictEqual( result, self.result_dict )
|
[
"[email protected]"
] | |
bdfaaae3fb459abeb5243fc81224ea68ba5dc7db
|
f986393b6aa4e6557fce735e9f7f1efa76649463
|
/migrations/versions/44e45369770c_.py
|
e86e8c13373479c2a6b18587ee746094a3ead8eb
|
[
"Apache-2.0"
] |
permissive
|
claire0613/Scard
|
6a0e1c8002c02e6806a000537ce2e6c2609b333b
|
11c30084398860bc31326c56424d6794806245eb
|
refs/heads/master
| 2023-08-29T00:38:27.198173 | 2021-10-20T05:40:54 | 2021-10-20T05:40:54 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,004 |
py
|
"""empty message
Revision ID: 44e45369770c
Revises:
Create Date: 2021-06-12 14:08:33.017959
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '44e45369770c'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('password', sa.String(length=255), nullable=False),
sa.Column('verify', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('gender', sa.Enum('male', 'female'), nullable=True),
sa.Column('birthday', sa.Date(), nullable=True),
sa.Column('collage', sa.String(length=255), nullable=True),
sa.Column('department', sa.String(length=255), nullable=True),
sa.Column('scard', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('avatar', sa.String(length=255), server_default='https://d2lzngk4bddvz9.cloudfront.net/avatar/default_avatar.jpeg', nullable=True),
sa.Column('relationship', sa.Enum('secret', 'single', 'in_a_relationship', 'complicated', 'open_relationship', 'no_show'), nullable=True),
sa.Column('interest', sa.Text(), nullable=True),
sa.Column('club', sa.Text(), nullable=True),
sa.Column('course', sa.Text(), nullable=True),
sa.Column('country', sa.Text(), nullable=True),
sa.Column('worry', sa.Text(), nullable=True),
sa.Column('swap', sa.Text(), nullable=True),
sa.Column('want_to_try', sa.Text(), nullable=True),
sa.Column('days_no_open_scard', sa.Integer(), server_default=sa.text('3'), nullable=False),
sa.Column('match_list', sa.JSON(), server_default=sa.text('(JSON_ARRAY())'), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
op.create_index('email_pwd_index', 'user', ['email', 'password'], unique=False)
op.create_index('no_open_index', 'user', ['days_no_open_scard'], unique=False)
op.create_table('scard',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_1', sa.Integer(), nullable=False),
sa.Column('user_2', sa.Integer(), nullable=False),
sa.Column('create_date', sa.Date(), server_default=sa.text('(NOW())'), nullable=True),
sa.Column('user_1_message', sa.String(length=255), nullable=True),
sa.Column('user_2_message', sa.String(length=255), nullable=True),
sa.Column('is_friend', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.ForeignKeyConstraint(['user_1'], ['user.id'], ),
sa.ForeignKeyConstraint(['user_2'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('user1_date_index', 'scard', ['user_1', 'create_date'], unique=False)
op.create_index('user2_date_index', 'scard', ['user_2', 'create_date'], unique=False)
op.create_table('messages',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('scard_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('create_time', sa.DateTime(), server_default=sa.text('NOW()'), nullable=True),
sa.ForeignKeyConstraint(['scard_id'], ['scard.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('messages')
op.drop_index('user2_date_index', table_name='scard')
op.drop_index('user1_date_index', table_name='scard')
op.drop_table('scard')
op.drop_index('no_open_index', table_name='user')
op.drop_index('email_pwd_index', table_name='user')
op.drop_table('user')
# ### end Alembic commands ###
|
[
"[email protected]"
] | |
350341a7d34ec5a08e73d9e251f0969c31319e0c
|
814ec330f84f8a3e4e03616224b618280a62b72d
|
/env/bin/wheel
|
2d10ddc86e670c5a635a61952b808d513a5496b1
|
[] |
no_license
|
lennykamande/Shopping-List-V2
|
f5cca145b7517c256f377df91c786264b44578a8
|
bf49ea2ac4f3fe69c976b0f039b7a24bce4a539b
|
refs/heads/master
| 2021-07-03T06:44:54.453204 | 2017-09-21T20:43:58 | 2017-09-21T20:43:58 | 104,333,008 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 243 |
#!/home/lennykamande/Shopping_new/env/bin/python2
# -*- coding: utf-8 -*-
import re
import sys
from wheel.tool import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"[email protected]"
] | ||
e2f190730eeec9f17a97eb07432140fe4dedc841
|
aa0c083cea57def1543bdefdb39d49555679c365
|
/laba2/t11.3.py
|
ac34abf6471e5ecd4c6608fa1c77486b06f22679
|
[] |
no_license
|
RaiLinKey/EREMEEV-BSBO-08-17-LAB-PYTHON
|
fd2548d3ed7ecfda0b97fe14943e2b27dce01233
|
0f5aaeec4c7fa8ba9ac1d51382b925cc4da81b52
|
refs/heads/master
| 2020-07-31T22:22:58.313286 | 2019-11-28T15:31:31 | 2019-11-28T15:31:31 | 210,772,490 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 224 |
py
|
m = input('Введите строку: ')
n = int(input('Введите число: '))
b = input('Введите букву (или несколько): ')
if m[n-1] == b:
print('Да')
else:
print('Ошибка')
|
[
"[email protected]"
] | |
600464c5c8a6c68e94281103bd7f1c7802cb5c31
|
b2da1bdd9134de99db5bb88f343400ec1b09f737
|
/writer-ident/pooling.py
|
53e50ef84cb77703212ea88898b948446fa82020
|
[
"MIT"
] |
permissive
|
lilujunai/dgmp
|
677e9e217749ecbd3f167b7d6d5100b0263470ec
|
b04c0ec4289112f53903eb3429b0d8bc78231ceb
|
refs/heads/master
| 2022-04-04T10:36:53.088987 | 2019-10-30T13:05:20 | 2019-10-30T13:05:20 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,630 |
py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
def getPool(pool_type='avg', gmp_lambda=1e3, lse_r=10):
"""
# NOTE: this function is not used in writer_ident, s. constructor of
# ResNet50Encoder
params
pool_type: the allowed pool types
gmp_lambda: the initial regularization parameter for GMP
lse_r: the initial regularization parameter for LSE
"""
if pool_type == 'gmp':
pool_layer = GMP(lamb=gmp_lambda)
elif pool_type == 'avg':
pool_layer = nn.AdaptiveAvgPool2d(1)
elif pool_type == 'max':
pool_layer = nn.AdaptiveMaxPool2d(1)
elif pool_type == 'mixed-pool':
pool_layer = MixedPool(0.5)
elif pool_type == 'lse':
pool_layer = LSEPool(lse_r)
else:
raise RuntimeError('{} is not a valid pooling'
' strategy.'.format(pool_type))
return pool_layer
class GMP(nn.Module):
""" Generalized Max Pooling
"""
def __init__(self, lamb):
super().__init__()
self.lamb = nn.Parameter(lamb * torch.ones(1))
#self.inv_lamb = nn.Parameter((1./lamb) * torch.ones(1))
def forward(self, x):
B, D, H, W = x.shape
N = H * W
identity = torch.eye(N).cuda()
# reshape x, s.t. we can use the gmp formulation as a global pooling operation
x = x.view(B, D, N)
x = x.permute(0, 2, 1)
# compute the linear kernel
K = torch.bmm(x, x.permute(0, 2, 1))
# solve the linear system (K + lambda * I) * alpha = ones
A = K + self.lamb * identity
o = torch.ones(B, N, 1).cuda()
alphas, _ = torch.gesv(o, A)
alphas = alphas.view(B, 1, -1)
xi = torch.bmm(alphas, x)
xi = xi.view(B, -1)
return xi
class MixedPool(nn.Module):
def __init__(self, a):
super(MixedPool, self).__init__()
self.a = nn.Parameter(a * torch.ones(1))
def forward(self, x):
return self.a * F.adaptive_max_pool2d(x, 1) + (1 - self.a) * F.adaptive_avg_pool2d(x, 1)
class LSEPool(nn.Module):
"""
Learnable LSE pooling with a shared parameter
"""
def __init__(self, r):
super(LSEPool, self).__init__()
self.r = nn.Parameter(torch.ones(1) * r)
def forward(self, x):
s = (x.size(2) * x.size(3))
x_max = F.adaptive_max_pool2d(x, 1)
exp = torch.exp(self.r * (x - x_max))
sumexp = 1 / s * torch.sum(exp, dim=(2, 3))
sumexp = sumexp.view(sumexp.size(0), -1, 1, 1)
logsumexp = x_max + 1 / self.r * torch.log(sumexp)
return logsumexp
|
[
"[email protected]"
] | |
a704145bfefd1db2a57ecc137381c3adbdb5fbf1
|
be5a3ca6a7030f6fff10a3284d061ed90688c2f6
|
/data_processing/plot_graph.py
|
b29b7f1a2ec8c03306d094ba0c68c4248e37f850
|
[] |
no_license
|
edwardclem/protein-graph-structure-learning
|
db9f61988afca3ca76603524931049002a7ae41f
|
5dedf8b1679f159aff3194d64fba0155b13ca6bb
|
refs/heads/master
| 2021-01-20T12:11:04.981332 | 2017-03-18T23:34:29 | 2017-03-18T23:34:29 | 73,009,098 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,074 |
py
|
#plotting a protein interaction graph
import networkx as nx
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
#returns a dictionary of amino acid type for each amino acid
def get_edge_labels(edgelist):
labels = {}
for edge in edgelist:
if edge[0] not in labels.keys():
labels[edge[0]] = "{}({})".format(edge[1], edge[0])
if edge[2] not in labels.keys():
labels[edge[2]] = "{}({})".format(edge[2], edge[3])
return labels
def run():
edgefile = "../data/graph_files/1aa2_graph.txt"
with open(edgefile, 'r') as edgefile:
edgelist = [edge.split() for edge in edgefile]
edge_labels = get_edge_labels(edgelist)
edges_processed = [(edge[0], edge[2]) for edge in edgelist]
G = nx.Graph()
G.add_edges_from(edges_processed)
pos = nx.spring_layout(G, k=3/np.sqrt(len(edge_labels.keys())))
print len(edge_labels.keys())
print len(edgelist)
nx.draw_networkx(G, pos=pos)
plt.title("Amino Acid Contacts for 1AA2")
plt.axis('off')
plt.savefig("1aa2_plot.png")
if __name__ =="__main__":
run()
|
[
"[email protected]"
] | |
2cc6fe236c84cda705a1b8fec0493df1b53fd497
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03026/s973955145.py
|
a2924f6209bebcf3663a5e647cd0aaf7dd7eaa40
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 440 |
py
|
#実験用の写経
N = int(input())
g = {i: [] for i in range(N)}
for i in range(N - 1):
a, b = map(int, input().split())
g[a - 1].append(b - 1)
g[b - 1].append(a - 1)
c = list(map(int, input().split()))
c.sort()
print(sum(c[:-1]))
nums = [0] * N
stack = [0]
while stack:
d = stack.pop()
nums[d] = c.pop()
for node in g[d]:
if nums[node] == 0:
stack.append(node)
print(' '.join(map(str, nums)))
|
[
"[email protected]"
] | |
0d85126ab946fc8001e0e2298bbab82fef2f38f3
|
2596290c3add5ed2f67f9524e9e38c8864e79f0c
|
/game_stats.py
|
af00a93d38c7c497b3f95a87eb33ad54b5ef1bac
|
[] |
no_license
|
myrdstom/Alien-Invasion
|
af438f767b208e60c835c5eb74190a1ea9733a05
|
58eab9424b0831d6100e920fa7f62563c7270eef
|
refs/heads/master
| 2021-01-25T13:34:51.908146 | 2019-05-06T07:00:30 | 2019-05-06T07:00:30 | 123,584,377 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 561 |
py
|
class GameStats():
"""Track statistics for alien invasion."""
def __init__(self, ai_settings):
"""Initialize statistics"""
self.ai_settings = ai_settings
self.reset_stats()
#High Score should never be reset.
self.high_score = 0
#Start Alien invasion in an inactive state.
self.game_active = False
def reset_stats(self):
"""Initialize statistics that can change during the game."""
self.ships_left = self.ai_settings.ship_limit
self.score = 0
self.level = 1
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.